Merge branch 'interstitialMerged' of github.com:wayne-chen/hifi into interstitialMerged

This commit is contained in:
Dante Ruiz 2018-10-04 15:01:39 -07:00
commit 651f37aceb
243 changed files with 6943 additions and 2329 deletions

View file

@ -133,6 +133,10 @@ dependencies {
implementation 'com.android.support.constraint:constraint-layout:1.0.2'
implementation 'com.android.support:design:26.1.0'
compile 'com.android.support:support-v4:26.1.0'
compile 'com.android.support:appcompat-v7:26.1.0'
compile 'com.android.support:support-vector-drawable:26.1.0'
implementation 'com.android.support:appcompat-v7:26.1.0'
compile 'com.android.support:recyclerview-v7:26.1.0'
compile 'com.android.support:cardview-v7:26.1.0'

View file

@ -26,6 +26,7 @@
QAndroidJniObject __interfaceActivity;
QAndroidJniObject __loginCompletedListener;
QAndroidJniObject __signupCompletedListener;
QAndroidJniObject __loadCompleteListener;
QAndroidJniObject __usernameChangedListener;
void tempMessageHandler(QtMsgType type, const QMessageLogContext& context, const QString& message) {
@ -156,7 +157,7 @@ JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeOnCrea
JavaVM* jvm;
env->GetJavaVM(&jvm);
QObject::connect(&AndroidHelper::instance(), &AndroidHelper::androidActivityRequested, [jvm](const QString& a, const bool backToScene, QList<QString> args) {
QObject::connect(&AndroidHelper::instance(), &AndroidHelper::androidActivityRequested, [jvm](const QString& a, const bool backToScene, QMap<QString, QString> args) {
JNIEnv* myNewEnv;
JavaVMAttachArgs jvmArgs;
jvmArgs.version = JNI_VERSION_1_6; // choose your JNI version
@ -182,9 +183,11 @@ JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeOnCrea
jmethodID mapClassConstructor = myNewEnv->GetMethodID(hashMapClass, "<init>", "()V");
jobject hashmap = myNewEnv->NewObject(hashMapClass, mapClassConstructor);
jmethodID mapClassPut = myNewEnv->GetMethodID(hashMapClass, "put", "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;");
for (const QString& arg: args) {
QAndroidJniObject jArg = QAndroidJniObject::fromString(arg);
myNewEnv->CallObjectMethod(hashmap, mapClassPut, QAndroidJniObject::fromString("url").object<jstring>(), jArg.object<jstring>());
QMap<QString, QString>::iterator i;
for (i = args.begin(); i != args.end(); ++i) {
QAndroidJniObject jKey = QAndroidJniObject::fromString(i.key());
QAndroidJniObject jValue = QAndroidJniObject::fromString(i.value());
myNewEnv->CallObjectMethod(hashmap, mapClassPut, jKey.object<jstring>(), jValue.object<jstring>());
}
__interfaceActivity.callMethod<void>("openAndroidActivity", "(Ljava/lang/String;ZLjava/util/HashMap;)V", string.object<jstring>(), jBackToScene, hashmap);
if (attachedHere) {
@ -255,6 +258,24 @@ JNIEXPORT jstring JNICALL Java_io_highfidelity_hifiinterface_fragment_HomeFragme
return env->NewStringUTF(lastLocation.toString().toLatin1().data());
}
JNIEXPORT void JNICALL
Java_io_highfidelity_hifiinterface_fragment_LoginFragment_nativeCancelLogin(JNIEnv *env, jobject instance) {
auto accountManager = DependencyManager::get<AccountManager>();
QObject::disconnect(accountManager.data(), &AccountManager::loginComplete, nullptr, nullptr);
QObject::disconnect(accountManager.data(), &AccountManager::loginFailed, nullptr, nullptr);
}
JNIEXPORT void JNICALL
Java_io_highfidelity_hifiinterface_fragment_SignupFragment_nativeCancelLogin(JNIEnv *env,
jobject instance) {
Java_io_highfidelity_hifiinterface_fragment_LoginFragment_nativeCancelLogin(env, instance);
}
JNIEXPORT void JNICALL
Java_io_highfidelity_hifiinterface_fragment_LoginFragment_nativeLogin(JNIEnv *env, jobject instance,
jstring username_, jstring password_,
@ -273,23 +294,90 @@ Java_io_highfidelity_hifiinterface_fragment_LoginFragment_nativeLogin(JNIEnv *en
QObject::connect(accountManager.data(), &AccountManager::loginComplete, [](const QUrl& authURL) {
jboolean jSuccess = (jboolean) true;
__loginCompletedListener.callMethod<void>("handleLoginCompleted", "(Z)V", jSuccess);
if (__loginCompletedListener.isValid()) {
__loginCompletedListener.callMethod<void>("handleLoginCompleted", "(Z)V", jSuccess);
}
});
QObject::connect(accountManager.data(), &AccountManager::loginFailed, []() {
jboolean jSuccess = (jboolean) false;
__loginCompletedListener.callMethod<void>("handleLoginCompleted", "(Z)V", jSuccess);
if (__loginCompletedListener.isValid()) {
__loginCompletedListener.callMethod<void>("handleLoginCompleted", "(Z)V", jSuccess);
}
});
QObject::connect(accountManager.data(), &AccountManager::usernameChanged, [](const QString& username) {
QAndroidJniObject string = QAndroidJniObject::fromString(username);
__usernameChangedListener.callMethod<void>("handleUsernameChanged", "(Ljava/lang/String;)V", string.object<jstring>());
if (__usernameChangedListener.isValid()) {
__usernameChangedListener.callMethod<void>("handleUsernameChanged", "(Ljava/lang/String;)V", string.object<jstring>());
}
});
QMetaObject::invokeMethod(accountManager.data(), "requestAccessToken",
Q_ARG(const QString&, username), Q_ARG(const QString&, password));
}
JNIEXPORT void JNICALL
Java_io_highfidelity_hifiinterface_fragment_SignupFragment_nativeLogin(JNIEnv *env,
jobject instance,
jstring username_,
jstring password_,
jobject usernameChangedListener) {
Java_io_highfidelity_hifiinterface_fragment_LoginFragment_nativeLogin(env, instance, username_, password_, usernameChangedListener);
}
JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeInitAfterAppLoaded(JNIEnv* env, jobject obj) {
AndroidHelper::instance().moveToThread(qApp->thread());
}
JNIEXPORT void JNICALL
Java_io_highfidelity_hifiinterface_fragment_SignupFragment_nativeSignup(JNIEnv *env, jobject instance,
jstring email_, jstring username_,
jstring password_) {
const char *c_email = env->GetStringUTFChars(email_, 0);
const char *c_username = env->GetStringUTFChars(username_, 0);
const char *c_password = env->GetStringUTFChars(password_, 0);
QString email = QString(c_email);
QString username = QString(c_username);
QString password = QString(c_password);
env->ReleaseStringUTFChars(email_, c_email);
env->ReleaseStringUTFChars(username_, c_username);
env->ReleaseStringUTFChars(password_, c_password);
__signupCompletedListener = QAndroidJniObject(instance);
// disconnect any previous callback
QObject::disconnect(&AndroidHelper::instance(), &AndroidHelper::handleSignupCompleted, nullptr, nullptr);
QObject::disconnect(&AndroidHelper::instance(), &AndroidHelper::handleSignupFailed, nullptr, nullptr);
QObject::connect(&AndroidHelper::instance(), &AndroidHelper::handleSignupCompleted, []() {
jboolean jSuccess = (jboolean) true;
if (__signupCompletedListener.isValid()) {
__signupCompletedListener.callMethod<void>("handleSignupCompleted", "()V", jSuccess);
}
});
QObject::connect(&AndroidHelper::instance(), &AndroidHelper::handleSignupFailed, [](QString errorString) {
jboolean jSuccess = (jboolean) false;
jstring jError = QAndroidJniObject::fromString(errorString).object<jstring>();
if (__signupCompletedListener.isValid()) {
QAndroidJniObject string = QAndroidJniObject::fromString(errorString);
__signupCompletedListener.callMethod<void>("handleSignupFailed", "(Ljava/lang/String;)V", string.object<jstring>());
}
});
AndroidHelper::instance().signup(email, username, password);
}
JNIEXPORT void JNICALL
Java_io_highfidelity_hifiinterface_fragment_SignupFragment_nativeCancelSignup(JNIEnv *env, jobject instance) {
QObject::disconnect(&AndroidHelper::instance(), &AndroidHelper::handleSignupCompleted, nullptr, nullptr);
QObject::disconnect(&AndroidHelper::instance(), &AndroidHelper::handleSignupFailed, nullptr, nullptr);
__signupCompletedListener = nullptr;
}
JNIEXPORT jboolean JNICALL
Java_io_highfidelity_hifiinterface_fragment_FriendsFragment_nativeIsLoggedIn(JNIEnv *env, jobject instance) {
auto accountManager = DependencyManager::get<AccountManager>();

View file

@ -39,6 +39,7 @@ import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import io.highfidelity.hifiinterface.fragment.WebViewFragment;
import io.highfidelity.hifiinterface.receiver.HeadsetStateReceiver;
@ -68,6 +69,7 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
private native void nativeEnterBackground();
private native void nativeEnterForeground();
private native long nativeOnExitVr();
private native void nativeInitAfterAppLoaded();
private AssetManager assetManager;
@ -303,14 +305,22 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
switch (activityName) {
case "Home":
case "Privacy Policy":
case "Login": {
nativeBeforeEnterBackground();
Intent intent = new Intent(this, MainActivity.class);
intent.putExtra(MainActivity.EXTRA_FRAGMENT, activityName);
intent.putExtra(MainActivity.EXTRA_BACK_TO_SCENE, backToScene);
startActivity(intent);
break;
}
case "Login":
nativeBeforeEnterBackground();
Intent loginIntent = new Intent(this, MainActivity.class);
loginIntent.putExtra(MainActivity.EXTRA_FRAGMENT, activityName);
loginIntent.putExtra(MainActivity.EXTRA_BACK_TO_SCENE, backToScene);
if (args != null && args.containsKey(DOMAIN_URL)) {
loginIntent.putExtra(DOMAIN_URL, (String) args.get(DOMAIN_URL));
}
startActivity(loginIntent);
break;
case "WebView":
runOnUiThread(() -> {
webSlidingDrawer.setVisibility(View.VISIBLE);
@ -342,6 +352,9 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
if (nativeEnterBackgroundCallEnqueued) {
nativeEnterBackground();
}
runOnUiThread(() -> {
nativeInitAfterAppLoaded();
});
}
public void performHapticFeedback(int duration) {

View file

@ -29,22 +29,29 @@ import android.widget.TextView;
import com.squareup.picasso.Callback;
import com.squareup.picasso.Picasso;
import java.util.HashMap;
import java.util.Map;
import io.highfidelity.hifiinterface.fragment.FriendsFragment;
import io.highfidelity.hifiinterface.fragment.HomeFragment;
import io.highfidelity.hifiinterface.fragment.LoginFragment;
import io.highfidelity.hifiinterface.fragment.PolicyFragment;
import io.highfidelity.hifiinterface.fragment.SettingsFragment;
import io.highfidelity.hifiinterface.task.DownloadProfileImageTask;
import io.highfidelity.hifiinterface.fragment.SignedInFragment;
import io.highfidelity.hifiinterface.fragment.SignupFragment;import io.highfidelity.hifiinterface.task.DownloadProfileImageTask;
public class MainActivity extends AppCompatActivity implements NavigationView.OnNavigationItemSelectedListener,
LoginFragment.OnLoginInteractionListener,
HomeFragment.OnHomeInteractionListener,
FriendsFragment.OnHomeInteractionListener {
FriendsFragment.OnHomeInteractionListener,
SignupFragment.OnSignupInteractionListener,
SignedInFragment.OnSignedInInteractionListener {
private static final int PROFILE_PICTURE_PLACEHOLDER = R.drawable.default_profile_avatar;
public static final String DEFAULT_FRAGMENT = "Home";
public static final String EXTRA_FRAGMENT = "fragment";
public static final String EXTRA_BACK_TO_SCENE = "backToScene";
public static final String EXTRA_BACK_TO_URL = "url";
private String TAG = "HighFidelity";
@ -62,6 +69,7 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
private MenuItem mPeopleMenuItem;
private boolean backToScene;
private String backToUrl;
@Override
protected void onCreate(Bundle savedInstanceState) {
@ -105,9 +113,8 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
loadFragment(DEFAULT_FRAGMENT);
}
if (getIntent().hasExtra(EXTRA_BACK_TO_SCENE)) {
backToScene = getIntent().getBooleanExtra(EXTRA_BACK_TO_SCENE, false);
}
backToScene = getIntent().getBooleanExtra(EXTRA_BACK_TO_SCENE, false);
backToUrl = getIntent().getStringExtra(EXTRA_BACK_TO_URL);
}
}
@ -143,35 +150,44 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
private void loadHomeFragment(boolean addToBackStack) {
Fragment fragment = HomeFragment.newInstance();
loadFragment(fragment, getString(R.string.home), getString(R.string.tagFragmentHome), addToBackStack);
loadFragment(fragment, getString(R.string.home), getString(R.string.tagFragmentHome), addToBackStack, true);
}
private void loadLoginFragment() {
Fragment fragment = LoginFragment.newInstance();
loadFragment(fragment, getString(R.string.login), getString(R.string.tagFragmentLogin), true, true);
}
loadFragment(fragment, getString(R.string.login), getString(R.string.tagFragmentLogin), true);
private void loadSignedInFragment() {
Fragment fragment = SignedInFragment.newInstance();
loadFragment(fragment, getString(R.string.welcome), getString(R.string.tagFragmentSignedIn), true, true);
}
private void loadSignupFragment() {
Fragment fragment = SignupFragment.newInstance();
loadFragment(fragment, getString(R.string.signup), getString(R.string.tagFragmentSignup), true, false);
}
private void loadPrivacyPolicyFragment() {
Fragment fragment = PolicyFragment.newInstance();
loadFragment(fragment, getString(R.string.privacyPolicy), getString(R.string.tagFragmentPolicy), true);
loadFragment(fragment, getString(R.string.privacyPolicy), getString(R.string.tagFragmentPolicy), true, true);
}
private void loadPeopleFragment() {
Fragment fragment = FriendsFragment.newInstance();
loadFragment(fragment, getString(R.string.people), getString(R.string.tagFragmentPeople), true);
loadFragment(fragment, getString(R.string.people), getString(R.string.tagFragmentPeople), true, true);
}
private void loadSettingsFragment() {
SettingsFragment fragment = SettingsFragment.newInstance();
loadFragment(fragment, getString(R.string.settings), getString(R.string.tagSettings), true);
loadFragment(fragment, getString(R.string.settings), getString(R.string.tagSettings), true, true);
}
private void loadFragment(Fragment fragment, String title, String tag, boolean addToBackStack) {
private void loadFragment(Fragment newFragment, String title, String tag, boolean addToBackStack, boolean goBackUntilHome) {
FragmentManager fragmentManager = getFragmentManager();
// check if it's the same fragment
@ -183,17 +199,19 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
return; // cancel as we are already in that fragment
}
// go back until first transaction
int backStackEntryCount = fragmentManager.getBackStackEntryCount();
for (int i = 0; i < backStackEntryCount - 1; i++) {
fragmentManager.popBackStackImmediate();
if (goBackUntilHome) {
// go back until first transaction
int backStackEntryCount = fragmentManager.getBackStackEntryCount();
for (int i = 0; i < backStackEntryCount - 1; i++) {
fragmentManager.popBackStackImmediate();
}
}
// this case is when we wanted to go home.. rollback already did that!
// But asking for a new Home fragment makes it easier to have an updated list so we let it to continue
FragmentTransaction ft = fragmentManager.beginTransaction();
ft.replace(R.id.content_frame, fragment, tag);
ft.replace(R.id.content_frame, newFragment, tag);
if (addToBackStack) {
ft.addToBackStack(title);
@ -301,7 +319,7 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
}
private void goToLastLocation() {
goToDomain("");
goToDomain(backToUrl != null? backToUrl : "");
}
private void goToDomain(String domainUrl) {
@ -330,6 +348,32 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
}
}
@Override
public void onGettingStarted() {
loadHomeFragment(false);
if (backToScene) {
backToScene = false;
goToLastLocation();
}
}
@Override
public void onLoginRequested() {
// go back from signup to login
onBackPressed();
}
@Override
public void onSignupRequested() {
loadSignupFragment();
}
@Override
public void onSignupCompleted() {
loadSignedInFragment();
updateLoginMenu();
}
public void handleUsernameChanged(String username) {
runOnUiThread(() -> updateProfileHeader(username));
}

View file

@ -4,12 +4,10 @@ import android.app.Activity;
import android.app.Fragment;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
@ -19,19 +17,26 @@ import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import org.qtproject.qt5.android.QtNative;
import io.highfidelity.hifiinterface.R;
import static org.qtproject.qt5.android.QtActivityDelegate.ApplicationActive;
import static org.qtproject.qt5.android.QtActivityDelegate.ApplicationInactive;
public class LoginFragment extends Fragment {
private EditText mUsername;
private EditText mPassword;
private TextView mError;
private TextView mForgotPassword;
private TextView mSignup;
private Button mLoginButton;
private ProgressDialog mDialog;
public native void nativeLogin(String username, String password, Activity usernameChangedListener);
public native void nativeCancelLogin();
private LoginFragment.OnLoginInteractionListener mListener;
@ -54,48 +59,12 @@ public class LoginFragment extends Fragment {
mError = rootView.findViewById(R.id.error);
mLoginButton = rootView.findViewById(R.id.loginButton);
mForgotPassword = rootView.findViewById(R.id.forgotPassword);
mUsername.addTextChangedListener(new TextWatcher() {
boolean ignoreNextChange = false;
boolean hadBlankSpace = false;
@Override
public void beforeTextChanged(CharSequence charSequence, int start, int count, int after) {
hadBlankSpace = charSequence.length() > 0 && charSequence.charAt(charSequence.length()-1) == ' ';
}
@Override
public void onTextChanged(CharSequence charSequence, int start, int count, int after) {
}
@Override
public void afterTextChanged(Editable editable) {
if (!ignoreNextChange) {
ignoreNextChange = true;
boolean spaceFound = false;
for (int i = 0; i < editable.length(); i++) {
if (editable.charAt(i) == ' ') {
spaceFound=true;
editable.delete(i, i + 1);
i--;
}
}
if (hadBlankSpace && !spaceFound && editable.length() > 0) {
editable.delete(editable.length()-1, editable.length());
}
editable.append(' ');
ignoreNextChange = false;
}
}
});
mSignup = rootView.findViewById(R.id.signupButton);
mLoginButton.setOnClickListener(view -> login());
mForgotPassword.setOnClickListener(view -> forgotPassword());
mSignup.setOnClickListener(view -> signup());
mPassword.setOnEditorActionListener(
(textView, actionId, keyEvent) -> {
@ -125,10 +94,19 @@ public class LoginFragment extends Fragment {
mListener = null;
}
@Override
public void onResume() {
super.onResume();
// This hack intends to keep Qt threads running even after the app comes from background
QtNative.setApplicationState(ApplicationActive);
}
@Override
public void onStop() {
super.onStop();
cancelActivityIndicator();
// Leave the Qt app paused
QtNative.setApplicationState(ApplicationInactive);
hideKeyboard();
}
@ -146,6 +124,12 @@ public class LoginFragment extends Fragment {
}
}
public void signup() {
if (mListener != null) {
mListener.onSignupRequested();
}
}
private void hideKeyboard() {
View view = getActivity().getCurrentFocus();
if (view != null) {
@ -164,7 +148,15 @@ public class LoginFragment extends Fragment {
mDialog = new ProgressDialog(getContext());
}
mDialog.setMessage(getString(R.string.logging_in));
mDialog.setCancelable(false);
mDialog.setCancelable(true);
mDialog.setOnCancelListener(new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialogInterface) {
nativeCancelLogin();
cancelActivityIndicator();
mLoginButton.setEnabled(true);
}
});
mDialog.show();
}
@ -184,7 +176,6 @@ public class LoginFragment extends Fragment {
}
public void handleLoginCompleted(boolean success) {
Log.d("[LOGIN]", "handleLoginCompleted " + success);
getActivity().runOnUiThread(() -> {
mLoginButton.setEnabled(true);
cancelActivityIndicator();
@ -200,6 +191,7 @@ public class LoginFragment extends Fragment {
public interface OnLoginInteractionListener {
void onLoginCompleted();
void onSignupRequested();
}
}

View file

@ -0,0 +1,73 @@
package io.highfidelity.hifiinterface.fragment;
import android.app.Fragment;
import android.content.Context;
import android.os.Bundle;
import android.text.Html;
import android.text.Spanned;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.TextView;
import java.io.IOException;
import java.io.InputStream;
import io.highfidelity.hifiinterface.R;
public class SignedInFragment extends Fragment {
private Button mGetStartedButton;
private OnSignedInInteractionListener mListener;
public SignedInFragment() {
// Required empty public constructor
}
public static SignedInFragment newInstance() {
SignedInFragment fragment = new SignedInFragment();
return fragment;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_signedin, container, false);
mGetStartedButton = rootView.findViewById(R.id.getStarted);
mGetStartedButton.setOnClickListener(view -> {
getStarted();
});
return rootView;
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
if (context instanceof SignedInFragment.OnSignedInInteractionListener) {
mListener = (SignedInFragment.OnSignedInInteractionListener) context;
} else {
throw new RuntimeException(context.toString()
+ " must implement OnSignedInInteractionListener");
}
}
@Override
public void onDetach() {
super.onDetach();
mListener = null;
}
public void getStarted() {
if (mListener != null) {
mListener.onGettingStarted();
}
}
public interface OnSignedInInteractionListener {
void onGettingStarted();
}
}

View file

@ -0,0 +1,217 @@
package io.highfidelity.hifiinterface.fragment;
import android.app.Activity;
import android.app.Fragment;
import android.app.ProgressDialog;
import android.content.Context;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputMethodManager;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import org.qtproject.qt5.android.QtNative;
import io.highfidelity.hifiinterface.R;
import static org.qtproject.qt5.android.QtActivityDelegate.ApplicationActive;
import static org.qtproject.qt5.android.QtActivityDelegate.ApplicationInactive;
public class SignupFragment extends Fragment {
private EditText mEmail;
private EditText mUsername;
private EditText mPassword;
private TextView mError;
private TextView mCancelButton;
private Button mSignupButton;
private ProgressDialog mDialog;
public native void nativeSignup(String email, String username, String password); // move to SignupFragment
public native void nativeCancelSignup();
public native void nativeLogin(String username, String password, Activity usernameChangedListener);
public native void nativeCancelLogin();
private SignupFragment.OnSignupInteractionListener mListener;
public SignupFragment() {
// Required empty public constructor
}
public static SignupFragment newInstance() {
SignupFragment fragment = new SignupFragment();
return fragment;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_signup, container, false);
mEmail = rootView.findViewById(R.id.email);
mUsername = rootView.findViewById(R.id.username);
mPassword = rootView.findViewById(R.id.password);
mError = rootView.findViewById(R.id.error);
mSignupButton = rootView.findViewById(R.id.signupButton);
mCancelButton = rootView.findViewById(R.id.cancelButton);
mSignupButton.setOnClickListener(view -> signup());
mCancelButton.setOnClickListener(view -> login());
mPassword.setOnEditorActionListener(
(textView, actionId, keyEvent) -> {
if (actionId == EditorInfo.IME_ACTION_DONE) {
mSignupButton.performClick();
return true;
}
return false;
});
return rootView;
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
if (context instanceof OnSignupInteractionListener) {
mListener = (OnSignupInteractionListener) context;
} else {
throw new RuntimeException(context.toString()
+ " must implement OnSignupInteractionListener");
}
}
@Override
public void onDetach() {
super.onDetach();
mListener = null;
}
@Override
public void onResume() {
super.onResume();
// This hack intends to keep Qt threads running even after the app comes from background
QtNative.setApplicationState(ApplicationActive);
}
@Override
public void onStop() {
super.onStop();
cancelActivityIndicator();
// Leave the Qt app paused
QtNative.setApplicationState(ApplicationInactive);
hideKeyboard();
}
private void login() {
if (mListener != null) {
mListener.onLoginRequested();
}
}
public void signup() {
String email = mEmail.getText().toString().trim();
String username = mUsername.getText().toString().trim();
String password = mPassword.getText().toString();
hideKeyboard();
if (email.isEmpty() || username.isEmpty() || password.isEmpty()) {
showError(getString(R.string.signup_email_username_or_password_incorrect));
} else {
mSignupButton.setEnabled(false);
hideError();
showActivityIndicator();
nativeSignup(email, username, password);
}
}
private void hideKeyboard() {
View view = getActivity().getCurrentFocus();
if (view != null) {
InputMethodManager imm = (InputMethodManager) getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(view.getWindowToken(), 0);
}
}
private void showActivityIndicator() {
if (mDialog == null) {
mDialog = new ProgressDialog(getContext());
}
mDialog.setMessage(getString(R.string.creating_account));
mDialog.setCancelable(true);
mDialog.setOnCancelListener(dialogInterface -> {
nativeCancelSignup();
cancelActivityIndicator();
mSignupButton.setEnabled(true);
});
mDialog.show();
}
private void cancelActivityIndicator() {
if (mDialog != null) {
mDialog.cancel();
}
}
private void showError(String error) {
mError.setText(error);
mError.setVisibility(View.VISIBLE);
}
private void hideError() {
mError.setText("");
mError.setVisibility(View.INVISIBLE);
}
public interface OnSignupInteractionListener {
void onSignupCompleted();
void onLoginRequested();
}
public void handleSignupCompleted() {
String username = mUsername.getText().toString().trim();
String password = mPassword.getText().toString();
mDialog.setMessage(getString(R.string.logging_in));
mDialog.setCancelable(true);
mDialog.setOnCancelListener(dialogInterface -> {
nativeCancelLogin();
cancelActivityIndicator();
if (mListener != null) {
mListener.onLoginRequested();
}
});
mDialog.show();
nativeLogin(username, password, getActivity());
}
public void handleSignupFailed(String error) {
getActivity().runOnUiThread(() -> {
mSignupButton.setEnabled(true);
cancelActivityIndicator();
mError.setText(error);
mError.setVisibility(View.VISIBLE);
});
}
public void handleLoginCompleted(boolean success) {
getActivity().runOnUiThread(() -> {
mSignupButton.setEnabled(true);
cancelActivityIndicator();
if (success) {
if (mListener != null) {
mListener.onSignupCompleted();
}
} else {
// Registration was successful but login failed.
// Let the user to login manually
mListener.onLoginRequested();
}
});
}
}

View file

@ -0,0 +1,27 @@
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="36dp"
android:height="22dp"
android:viewportWidth="36"
android:viewportHeight="22">
<path
android:fillColor="#3D3D3D"
android:fillType="evenOdd"
android:pathData="M3.59534,11.0156 C6.16042,13.4128,9.65987,15.5898,13.6042,16.1774 C17.686,16.7856,22.4164,15.7196,27.3057,11.0659 C22.0721,6.07309,17.0642,5.14115,12.9153,5.90073 C8.99427,6.61859,5.69298,8.87688,3.59534,11.0156 Z M12.455,3.27591 C17.7727,2.30235,23.9836,3.74895,30.1053,10.1333 L31,11.0664 L30.1053,11.9994 C24.3636,17.9875,18.4774,19.5983,13.2276,18.8161 C8.06048,18.0463,3.70384,14.9892,0.837069,11.9994 L0,11.1265 L0.778477,10.1986 C3.05338,7.48717,7.2318,4.23217,12.455,3.27591 Z" />
<path
android:fillColor="#3D3D3D"
android:pathData="M15.6539,7.11119 C17.6719,7.11119,19.3078,8.81726,19.3078,10.9218 C19.3078,13.0263,17.6719,14.7324,15.6539,14.7324 C13.6359,14.7324,12,13.0263,12,10.9218 C12,8.81726,13.6359,7.11119,15.6539,7.11119 Z" />
<!--path
android:fillColor="#000000"
android:strokeColor="#ffffff"
android:strokeWidth="2.7"
android:strokeLineCap="round"
android:pathData="M27,2.90919 L8.90919,21" /-->
<path
android:fillColor="#000000"
android:strokeColor="#3D3D3D"
android:strokeWidth="3"
android:strokeLineCap="round"
android:pathData="M25,2.12132 L7.12132,20" />
</vector>

View file

@ -0,0 +1,15 @@
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="36dp"
android:height="16dp"
android:viewportWidth="36"
android:viewportHeight="16">
<path
android:fillColor="#3D3D3D"
android:fillType="evenOdd"
android:pathData="M3.59534,8.01564 C6.16042,10.4128,9.65987,12.5898,13.6042,13.1774 C17.686,13.7856,22.4164,12.7196,27.3057,8.06585 C22.0721,3.07309,17.0642,2.14115,12.9153,2.90073 C8.99427,3.61859,5.69298,5.87688,3.59534,8.01564 Z M12.455,0.275915 C17.7727,-0.697651,23.9836,0.748949,30.1053,7.13329 L31,8.06636 L30.1053,8.99944 C24.3636,14.9875,18.4774,16.5983,13.2276,15.8161 C8.06048,15.0463,3.70384,11.9892,0.837069,8.99944 L0,8.12646 L0.778477,7.1986 C3.05338,4.48717,7.2318,1.23217,12.455,0.275915 Z" />
<path
android:fillColor="#3D3D3D"
android:pathData="M15.6441,4.11118 C17.6621,4.11118,19.298,5.81725,19.298,7.92179 C19.298,10.0263,17.6621,11.7324,15.6441,11.7324 C13.6261,11.7324,11.9902,10.0263,11.9902,7.92179 C11.9902,5.81725,13.6261,4.11118,15.6441,4.11118 Z" />
</vector>

View file

@ -0,0 +1,24 @@
<?xml version="1.0" encoding="utf-8"?>
<selector xmlns:android="http://schemas.android.com/apk/res/android" >
<item android:state_pressed="true" >
<shape android:shape="rectangle" >
<corners android:radius="4dip" />
<stroke android:width="1dip" android:color="@color/colorButton2" />
<solid android:color="@color/colorButton2"/>
</shape>
</item>
<item android:state_focused="true">
<shape android:shape="rectangle" >
<corners android:radius="4dip" />
<stroke android:width="1dip" android:color="@color/colorButton2" />
<solid android:color="@color/colorButton2"/>
</shape>
</item>
<item>
<shape android:shape="rectangle" >
<corners android:radius="4dip" />
<stroke android:width="1dip" android:color="@color/colorButton2" />
<solid android:color="@color/colorButton2"/>
</shape>
</item>
</selector>

View file

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<selector xmlns:android="http://schemas.android.com/apk/res/android">
<item android:state_checked="true" android:drawable="@drawable/ic_eye_noshow"/>
<item android:drawable="@drawable/ic_eye_show" />
</selector>

View file

@ -21,10 +21,12 @@
android:id="@+id/error"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginBottom="16dp"
android:layout_marginBottom="25dp"
android:layout_marginLeft="9dp"
android:layout_marginRight="9dp"
android:fontFamily="@font/raleway"
android:textColor="@color/colorLoginError"
android:textSize="12sp"
android:textSize="14sp"
app:layout_constraintBottom_toTopOf="@id/username"
app:layout_constraintLeft_toLeftOf="@id/username"
android:visibility="invisible"/>
@ -41,72 +43,100 @@
android:paddingTop="14dp"
android:ems="10"
android:fontFamily="@font/raleway"
android:textSize="14sp"
android:textSize="17sp"
android:inputType="textEmailAddress"
android:textStyle="italic"
android:textColor="@color/editTextColor"
android:textColorHint="@color/editTextColor"
android:gravity="right|center_vertical"
android:gravity="left|center_vertical"
app:layout_constraintTop_toBottomOf="@id/header"
android:layout_marginTop="70dp"
android:hint="@string/username_or_email" />
<EditText
<android.support.design.widget.TextInputLayout
android:id="@+id/passwordLayout"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginLeft="46dp"
android:layout_marginRight="46dp"
app:passwordToggleTint="@color/showPasswordColor"
app:passwordToggleEnabled="true"
app:hintAnimationEnabled="false"
app:passwordToggleDrawable="@drawable/selector_show_password"
app:hintEnabled="false"
app:layout_constraintTop_toBottomOf="@id/username"
android:layout_marginTop="13dp"
>
<android.support.design.widget.TextInputEditText
android:id="@+id/password"
android:layout_width="match_parent"
android:layout_height="35dp"
android:layout_marginLeft="46dp"
android:layout_marginRight="46dp"
android:background="@drawable/rounded_edit"
android:padding="7dp"
android:paddingRight="12dp"
android:drawablePadding="55dp"
android:paddingTop="14dp"
android:drawableEnd="@drawable/ic_eye_noshow"
android:ems="10"
android:fontFamily="@font/raleway"
android:textSize="14sp"
android:inputType="textPassword"
android:textSize="17sp"
android:textStyle="italic"
android:textColor="@color/editTextColor"
android:textColorHint="@color/editTextColor"
android:gravity="right|center_vertical"
app:layout_constraintTop_toBottomOf="@id/username"
android:gravity="left|center_vertical"
android:imeOptions="actionDone"
android:hint="@string/password"
android:layout_marginTop="13dp"
android:imeOptions="actionDone"/>
android:inputType="textPassword" />
</android.support.design.widget.TextInputLayout>
<Button
android:id="@+id/loginButton"
android:layout_width="154dp"
android:layout_height="38dp"
android:layout_marginTop="16dp"
android:background="@drawable/rounded_button"
android:fontFamily="@font/raleway_semibold"
android:paddingBottom="0dp"
android:paddingLeft="55dp"
android:paddingRight="55dp"
android:paddingTop="0dp"
android:text="@string/login"
android:textColor="@color/white_opaque"
android:textAllCaps="false"
android:textSize="15sp"
android:textSize="18sp"
app:layout_constraintRight_toRightOf="@id/username"
app:layout_constraintTop_toBottomOf="@id/password"
app:layout_constraintTop_toBottomOf="@id/forgotPassword"
app:layout_goneMarginTop="4dp"/>
<TextView
android:id="@+id/forgotPassword"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:paddingTop="9dp"
android:paddingBottom="16dp"
android:fontFamily="@font/raleway_semibold"
android:textSize="14dp"
android:text="@string/forgot_password"
android:textStyle="italic"
android:paddingRight="10dp"
app:layout_constraintLeft_toLeftOf="@id/password"
app:layout_constraintTop_toTopOf="@id/loginButton"
app:layout_constraintRight_toLeftOf="@id/loginButton"
app:layout_constraintRight_toRightOf="@id/passwordLayout"
app:layout_constraintTop_toBottomOf="@id/passwordLayout"
android:textColor="@color/colorButton1"/>
<Button
android:id="@+id/signupButton"
android:layout_width="0dp"
app:layout_constraintWidth_default="spread"
android:layout_height="38dp"
android:background="@drawable/rounded_secondary_button"
android:fontFamily="@font/raleway_semibold"
android:paddingBottom="0dp"
android:paddingTop="0dp"
android:layout_marginRight="15dp"
android:text="@string/signup"
android:textColor="@color/white_opaque"
android:textAllCaps="false"
android:textSize="18sp"
app:layout_constraintLeft_toLeftOf="@id/passwordLayout"
app:layout_constraintTop_toTopOf="@id/loginButton"
app:layout_constraintRight_toLeftOf="@id/loginButton"
app:layout_goneMarginTop="4dp"/>

View file

@ -0,0 +1,63 @@
<?xml version="1.0" encoding="utf-8"?>
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/backgroundLight">
<ImageView
android:id="@+id/header"
android:layout_width="@dimen/header_hifi_width"
android:layout_height="@dimen/header_hifi_height"
android:layout_marginTop="@dimen/header_hifi_margin_top"
android:contentDescription="HighFidelity"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:srcCompat="@drawable/hifi_header" />
<TextView
android:id="@+id/welcome"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="50dp"
android:paddingLeft="86dp"
android:paddingRight="86dp"
android:fontFamily="@font/raleway"
android:textColor="@color/clearText"
android:textSize="24sp"
android:text="@string/signedin_welcome"
app:layout_constraintTop_toBottomOf="@id/header"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
android:gravity="center"
/>
<Button
android:id="@+id/getStarted"
android:layout_width="217dp"
android:layout_height="38dp"
android:layout_marginTop="30dp"
android:background="@drawable/rounded_button"
android:fontFamily="@font/raleway_semibold"
android:paddingBottom="0dp"
android:paddingLeft="25dp"
android:paddingRight="25dp"
android:paddingTop="0dp"
android:text="@string/get_started"
android:textColor="@color/white_opaque"
android:textAllCaps="false"
android:textSize="18sp"
app:layout_constraintTop_toBottomOf="@id/welcome"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_goneMarginTop="4dp"/>
</android.support.constraint.ConstraintLayout>

View file

@ -0,0 +1,151 @@
<?xml version="1.0" encoding="utf-8"?>
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/backgroundLight">
<ImageView
android:id="@+id/header"
android:layout_width="@dimen/header_hifi_width"
android:layout_height="@dimen/header_hifi_height"
android:layout_marginTop="@dimen/header_hifi_margin_top"
android:contentDescription="HighFidelity"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:srcCompat="@drawable/hifi_header" />
<TextView
android:id="@+id/error"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_marginBottom="16dp"
android:layout_marginLeft="9dp"
android:layout_marginRight="9dp"
android:fontFamily="@font/raleway"
android:textColor="@color/colorLoginError"
android:textSize="14sp"
app:layout_constraintBottom_toTopOf="@id/email"
app:layout_constraintLeft_toLeftOf="@id/email"
app:layout_constraintRight_toRightOf="@id/email"
android:visibility="invisible"/>
<EditText
android:id="@+id/email"
android:layout_width="match_parent"
android:layout_height="35dp"
android:layout_marginLeft="46dp"
android:layout_marginRight="46dp"
android:background="@drawable/rounded_edit"
android:padding="7dp"
android:paddingRight="12dp"
android:paddingTop="14dp"
android:ems="10"
android:fontFamily="@font/raleway"
android:textSize="17sp"
android:inputType="textEmailAddress"
android:textStyle="italic"
android:textColor="@color/editTextColor"
android:textColorHint="@color/editTextColor"
android:gravity="left|center_vertical"
app:layout_constraintTop_toBottomOf="@id/header"
android:layout_marginTop="70dp"
android:hint="@string/email" />
<EditText
android:id="@+id/username"
android:layout_width="match_parent"
android:layout_height="35dp"
android:layout_marginLeft="46dp"
android:layout_marginRight="46dp"
android:background="@drawable/rounded_edit"
android:padding="7dp"
android:paddingRight="12dp"
android:paddingTop="14dp"
android:ems="10"
android:fontFamily="@font/raleway"
android:textSize="17sp"
android:inputType="text"
android:textStyle="italic"
android:textColor="@color/editTextColor"
android:textColorHint="@color/editTextColor"
android:gravity="left|center_vertical"
app:layout_constraintTop_toBottomOf="@id/email"
android:layout_marginTop="7dp"
android:hint="@string/username" />
<android.support.design.widget.TextInputLayout
android:id="@+id/passwordLayout"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginLeft="46dp"
android:layout_marginRight="46dp"
app:passwordToggleTint="@color/showPasswordColor"
app:passwordToggleEnabled="true"
app:hintAnimationEnabled="false"
app:passwordToggleDrawable="@drawable/selector_show_password"
app:hintEnabled="false"
app:layout_constraintTop_toBottomOf="@id/username"
android:layout_marginTop="7dp"
>
<android.support.design.widget.TextInputEditText
android:id="@+id/password"
android:layout_width="match_parent"
android:layout_height="35dp"
android:background="@drawable/rounded_edit"
android:padding="7dp"
android:drawablePadding="55dp"
android:paddingTop="14dp"
android:drawableEnd="@drawable/ic_eye_noshow"
android:ems="10"
android:fontFamily="@font/raleway"
android:textSize="17sp"
android:textStyle="italic"
android:textColor="@color/editTextColor"
android:textColorHint="@color/editTextColor"
android:gravity="left|center_vertical"
android:imeOptions="actionDone"
android:hint="@string/password"
android:inputType="textPassword" />
</android.support.design.widget.TextInputLayout>
<Button
android:id="@+id/signupButton"
android:layout_width="154dp"
android:layout_height="38dp"
android:layout_marginTop="44dp"
android:background="@drawable/rounded_button"
android:fontFamily="@font/raleway_semibold"
android:paddingBottom="0dp"
android:paddingTop="0dp"
android:text="@string/signup"
android:textColor="@color/white_opaque"
android:textAllCaps="false"
android:textSize="18sp"
app:layout_constraintRight_toRightOf="@id/username"
app:layout_constraintTop_toBottomOf="@id/passwordLayout"
app:layout_goneMarginTop="4dp"/>
<Button
android:id="@+id/cancelButton"
android:layout_width="0dp"
app:layout_constraintWidth_default="spread"
android:layout_height="38dp"
android:background="@drawable/rounded_secondary_button"
android:fontFamily="@font/raleway_semibold"
android:paddingBottom="0dp"
android:paddingTop="0dp"
android:layout_marginRight="15dp"
android:text="@string/cancel"
android:textColor="@color/white_opaque"
android:textAllCaps="false"
android:textSize="18sp"
app:layout_constraintLeft_toLeftOf="@id/passwordLayout"
app:layout_constraintTop_toTopOf="@id/signupButton"
app:layout_constraintRight_toLeftOf="@id/signupButton"
app:layout_goneMarginTop="4dp"/>
</android.support.constraint.ConstraintLayout>

View file

@ -6,8 +6,10 @@
<color name="colorAccent">#54D7FD</color>
<color name="backgroundEditText">#E3E3E3</color>
<color name="editTextColor">#575757</color>
<color name="showPasswordColor">#3D3D3D</color>
<color name="tabs">#1EB5EC</color>
<color name="colorButton1">#00B4EF</color>
<color name="colorButton2">#828282</color>
<color name="backgroundDark">#333333</color>
<color name="backgroundLight">#4F4F4F</color>
<color name="backgroundSearch">#33999999</color>
@ -22,4 +24,6 @@
<color name="starSelectedTint">#FBD92A</color>
<color name="starUnselectedTint">#8A8A8A</color>
<color name="slidingUpPanelFadeColor">#40000000</color>
<color name="clearText">#F2F2F2</color>
</resources>

View file

@ -10,11 +10,13 @@
<string name="popular">POPULAR</string>
<string name="bookmarks">BOOKMARKS</string>
<string name="goto_url_hint">Type a domain url</string>
<string name="username_or_email">Username or email\u00A0</string>
<string name="password">Password\u00A0</string>
<string name="email">Email</string>
<string name="username">Username</string>
<string name="username_or_email">Username or email</string>
<string name="password">Password</string>
<string name="login">Login</string>
<string name="logout">Logout</string>
<string name="forgot_password">Forgot password?\u00A0</string>
<string name="forgot_password"><u>Forgot password?</u>\u00A0</string>
<string name="login_username_or_password_incorrect">Username or password incorrect.</string>
<string name="logging_in">Logging into High Fidelity</string>
<string name="search_hint"><i>Search for a place by name</i>\u00A0</string>
@ -23,13 +25,22 @@
<string name="privacyPolicy">Privacy Policy</string>
<string name="your_last_location">Your Last Location</string>
<string name="online">Online</string>
<string name="signup">Sign Up</string>
<string name="creating_account">Creating your High Fidelity account</string>
<string name="signup_email_username_or_password_incorrect">Email, username or password incorrect.</string>
<string name="signedin_welcome">You are now signed into High Fidelity</string>
<string name="welcome">Welcome</string>
<string name="cancel">Cancel</string>
<string name="get_started">Get Started</string>
<!-- tags -->
<string name="tagFragmentHome">tagFragmentHome</string>
<string name="tagFragmentLogin">tagFragmentLogin</string>
<string name="tagFragmentSignup">tagFragmentSignup</string>
<string name="tagFragmentPolicy">tagFragmentPolicy</string>
<string name="tagFragmentPeople">tagFragmentPeople</string>
<string name="tagSettings">tagSettings</string>
<string name="tagFragmentSignedIn">tagFragmentSignedIn</string>
<string name="settings">Settings</string>
<string name="AEC">AEC</string>
<string name="acoustic_echo_cancellation">Acoustic Echo Cancellation</string>

View file

@ -38,6 +38,8 @@
#include "AvatarAudioStream.h"
#include "InjectedAudioStream.h"
using namespace std;
static const float DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE = 0.5f; // attenuation = -6dB * log2(distance)
static const int DISABLE_STATIC_JITTER_FRAMES = -1;
static const float DEFAULT_NOISE_MUTING_THRESHOLD = 1.0f;
@ -49,11 +51,11 @@ static const QString AUDIO_THREADING_GROUP_KEY = "audio_threading";
int AudioMixer::_numStaticJitterFrames{ DISABLE_STATIC_JITTER_FRAMES };
float AudioMixer::_noiseMutingThreshold{ DEFAULT_NOISE_MUTING_THRESHOLD };
float AudioMixer::_attenuationPerDoublingInDistance{ DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE };
std::map<QString, std::shared_ptr<CodecPlugin>> AudioMixer::_availableCodecs{ };
map<QString, shared_ptr<CodecPlugin>> AudioMixer::_availableCodecs{ };
QStringList AudioMixer::_codecPreferenceOrder{};
QHash<QString, AABox> AudioMixer::_audioZones;
QVector<AudioMixer::ZoneSettings> AudioMixer::_zoneSettings;
QVector<AudioMixer::ReverbSettings> AudioMixer::_zoneReverbSettings;
vector<AudioMixer::ZoneDescription> AudioMixer::_audioZones;
vector<AudioMixer::ZoneSettings> AudioMixer::_zoneSettings;
vector<AudioMixer::ReverbSettings> AudioMixer::_zoneReverbSettings;
AudioMixer::AudioMixer(ReceivedMessage& message) :
ThreadedAssignment(message)
@ -67,7 +69,7 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
_availableCodecs.clear(); // Make sure struct is clean
auto pluginManager = DependencyManager::set<PluginManager>();
auto codecPlugins = pluginManager->getCodecPlugins();
std::for_each(codecPlugins.cbegin(), codecPlugins.cend(),
for_each(codecPlugins.cbegin(), codecPlugins.cend(),
[&](const CodecPluginPointer& codec) {
_availableCodecs[codec->getName()] = codec;
});
@ -122,7 +124,7 @@ void AudioMixer::queueAudioPacket(QSharedPointer<ReceivedMessage> message, Share
void AudioMixer::queueReplicatedAudioPacket(QSharedPointer<ReceivedMessage> message) {
// make sure we have a replicated node for the original sender of the packet
auto nodeList = DependencyManager::get<NodeList>();
// Node ID is now part of user data, since replicated audio packets are non-sourced.
QUuid nodeID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
@ -173,12 +175,12 @@ void AudioMixer::handleMuteEnvironmentPacket(QSharedPointer<ReceivedMessage> mes
}
}
const std::pair<QString, CodecPluginPointer> AudioMixer::negotiateCodec(std::vector<QString> codecs) {
const pair<QString, CodecPluginPointer> AudioMixer::negotiateCodec(vector<QString> codecs) {
QString selectedCodecName;
CodecPluginPointer selectedCodec;
// read the codecs requested (by the client)
int minPreference = std::numeric_limits<int>::max();
int minPreference = numeric_limits<int>::max();
for (auto& codec : codecs) {
if (_availableCodecs.count(codec) > 0) {
int preference = _codecPreferenceOrder.indexOf(codec);
@ -191,20 +193,9 @@ const std::pair<QString, CodecPluginPointer> AudioMixer::negotiateCodec(std::vec
}
}
return std::make_pair(selectedCodecName, _availableCodecs[selectedCodecName]);
return make_pair(selectedCodecName, _availableCodecs[selectedCodecName]);
}
void AudioMixer::handleNodeKilled(SharedNodePointer killedNode) {
// enumerate the connected listeners to remove HRTF objects for the disconnected node
auto nodeList = DependencyManager::get<NodeList>();
nodeList->eachNode([&killedNode](const SharedNodePointer& node) {
auto clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
if (clientData) {
clientData->removeNode(killedNode->getUUID());
}
});
}
void AudioMixer::handleNodeMuteRequestPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode) {
auto nodeList = DependencyManager::get<NodeList>();
@ -223,32 +214,31 @@ void AudioMixer::handleNodeMuteRequestPacket(QSharedPointer<ReceivedMessage> pac
}
}
void AudioMixer::handleNodeKilled(SharedNodePointer killedNode) {
auto clientData = dynamic_cast<AudioMixerClientData*>(killedNode->getLinkedData());
if (clientData) {
// stage the removal of all streams from this node, workers handle when preparing mixes for listeners
_workerSharedData.removedNodes.emplace_back(killedNode->getLocalID());
}
}
void AudioMixer::handleKillAvatarPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode) {
auto clientData = dynamic_cast<AudioMixerClientData*>(sendingNode->getLinkedData());
if (clientData) {
clientData->removeAgentAvatarAudioStream();
auto nodeList = DependencyManager::get<NodeList>();
nodeList->eachNode([sendingNode](const SharedNodePointer& node){
auto listenerClientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
if (listenerClientData) {
listenerClientData->removeHRTFForStream(sendingNode->getUUID());
}
});
// stage a removal of the avatar audio stream from this Agent, workers handle when preparing mixes for listeners
_workerSharedData.removedStreams.emplace_back(sendingNode->getUUID(), sendingNode->getLocalID(), QUuid());
}
}
void AudioMixer::removeHRTFsForFinishedInjector(const QUuid& streamID) {
auto injectorClientData = qobject_cast<AudioMixerClientData*>(sender());
if (injectorClientData) {
// enumerate the connected listeners to remove HRTF objects for the disconnected injector
auto nodeList = DependencyManager::get<NodeList>();
nodeList->eachNode([injectorClientData, &streamID](const SharedNodePointer& node){
auto listenerClientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
if (listenerClientData) {
listenerClientData->removeHRTFForStream(injectorClientData->getNodeID(), streamID);
}
});
if (injectorClientData) {
// stage the removal of this stream, workers handle when preparing mixes for listeners
_workerSharedData.removedStreams.emplace_back(injectorClientData->getNodeID(), injectorClientData->getNodeLocalID(),
streamID);
}
}
@ -285,7 +275,7 @@ void AudioMixer::sendStatsPacket() {
// timing stats
QJsonObject timingStats;
auto addTiming = [&](Timer& timer, std::string name) {
auto addTiming = [&](Timer& timer, string name) {
uint64_t timing, trailing;
timer.get(timing, trailing);
timingStats[("us_per_" + name).c_str()] = (qint64)(timing / _numStatFrames);
@ -293,12 +283,12 @@ void AudioMixer::sendStatsPacket() {
};
addTiming(_ticTiming, "tic");
addTiming(_checkTimeTiming, "check_time");
addTiming(_sleepTiming, "sleep");
addTiming(_frameTiming, "frame");
addTiming(_prepareTiming, "prepare");
addTiming(_packetsTiming, "packets");
addTiming(_mixTiming, "mix");
addTiming(_eventsTiming, "events");
addTiming(_packetsTiming, "packets");
#ifdef HIFI_AUDIO_MIXER_DEBUG
timingStats["ns_per_mix"] = (_stats.totalMixes > 0) ? (float)(_stats.mixTime / _stats.totalMixes) : 0;
@ -311,11 +301,24 @@ void AudioMixer::sendStatsPacket() {
QJsonObject mixStats;
mixStats["%_hrtf_mixes"] = percentageForMixStats(_stats.hrtfRenders);
mixStats["%_hrtf_silent_mixes"] = percentageForMixStats(_stats.hrtfSilentRenders);
mixStats["%_hrtf_throttle_mixes"] = percentageForMixStats(_stats.hrtfThrottleRenders);
mixStats["%_manual_stereo_mixes"] = percentageForMixStats(_stats.manualStereoMixes);
mixStats["%_manual_echo_mixes"] = percentageForMixStats(_stats.manualEchoMixes);
mixStats["1_hrtf_renders"] = (int)(_stats.hrtfRenders / (float)_numStatFrames);
mixStats["1_hrtf_resets"] = (int)(_stats.hrtfResets / (float)_numStatFrames);
mixStats["1_hrtf_updates"] = (int)(_stats.hrtfUpdates / (float)_numStatFrames);
mixStats["2_skipped_streams"] = (int)(_stats.skipped / (float)_numStatFrames);
mixStats["2_inactive_streams"] = (int)(_stats.inactive / (float)_numStatFrames);
mixStats["2_active_streams"] = (int)(_stats.active / (float)_numStatFrames);
mixStats["3_skippped_to_active"] = (int)(_stats.skippedToActive / (float)_numStatFrames);
mixStats["3_skippped_to_inactive"] = (int)(_stats.skippedToInactive / (float)_numStatFrames);
mixStats["3_inactive_to_skippped"] = (int)(_stats.inactiveToSkipped / (float)_numStatFrames);
mixStats["3_inactive_to_active"] = (int)(_stats.inactiveToActive / (float)_numStatFrames);
mixStats["3_active_to_skippped"] = (int)(_stats.activeToSkipped / (float)_numStatFrames);
mixStats["3_active_to_inactive"] = (int)(_stats.activeToInactive / (float)_numStatFrames);
mixStats["total_mixes"] = _stats.totalMixes;
mixStats["avg_mixes_per_block"] = _stats.totalMixes / _numStatFrames;
@ -366,7 +369,7 @@ AudioMixerClientData* AudioMixer::getOrCreateClientData(Node* node) {
auto clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
if (!clientData) {
node->setLinkedData(std::unique_ptr<NodeData> { new AudioMixerClientData(node->getUUID(), node->getLocalID()) });
node->setLinkedData(unique_ptr<NodeData> { new AudioMixerClientData(node->getUUID(), node->getLocalID()) });
clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
connect(clientData, &AudioMixerClientData::injectorStreamFinished, this, &AudioMixer::removeHRTFsForFinishedInjector);
}
@ -393,33 +396,49 @@ void AudioMixer::start() {
// mix state
unsigned int frame = 1;
auto frameTimestamp = p_high_resolution_clock::now();
while (!_isFinished) {
auto ticTimer = _ticTiming.timer();
{
auto timer = _sleepTiming.timer();
auto frameDuration = timeFrame(frameTimestamp);
if (_startFrameTimestamp.time_since_epoch().count() == 0) {
_startFrameTimestamp = _idealFrameTimestamp = p_high_resolution_clock::now();
} else {
auto timer = _checkTimeTiming.timer();
auto frameDuration = timeFrame();
throttle(frameDuration, frame);
}
auto frameTimer = _frameTiming.timer();
nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
// prepare frames; pop off any new audio from their streams
{
auto prepareTimer = _prepareTiming.timer();
std::for_each(cbegin, cend, [&](const SharedNodePointer& node) {
_stats.sumStreams += prepareFrame(node, frame);
});
}
// process (node-isolated) audio packets across slave threads
{
auto packetsTimer = _packetsTiming.timer();
// first clear the concurrent vector of added streams that the slaves will add to when they process packets
_workerSharedData.addedStreams.clear();
nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
_slavePool.processPackets(cbegin, cend);
});
}
// process queued events (networking, global audio packets, &c.)
{
auto eventsTimer = _eventsTiming.timer();
// clear removed nodes and removed streams before we process events that will setup the new set
_workerSharedData.removedNodes.clear();
_workerSharedData.removedStreams.clear();
// since we're a while loop we need to yield to qt's event processing
QCoreApplication::processEvents();
}
int numToRetain = nodeList->size() * (1 - _throttlingRatio);
nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
// mix across slave threads
{
auto mixTimer = _mixTiming.timer();
_slavePool.mix(cbegin, cend, frame, _throttlingRatio);
}
auto mixTimer = _mixTiming.timer();
_slavePool.mix(cbegin, cend, frame, numToRetain);
});
// gather stats
@ -431,21 +450,6 @@ void AudioMixer::start() {
++frame;
++_numStatFrames;
// process queued events (networking, global audio packets, &c.)
{
auto eventsTimer = _eventsTiming.timer();
// since we're a while loop we need to yield to qt's event processing
QCoreApplication::processEvents();
// process (node-isolated) audio packets across slave threads
{
nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
auto packetsTimer = _packetsTiming.timer();
_slavePool.processPackets(cbegin, cend);
});
}
}
if (_isFinished) {
// alert qt eventing that this is finished
@ -455,26 +459,26 @@ void AudioMixer::start() {
}
}
std::chrono::microseconds AudioMixer::timeFrame(p_high_resolution_clock::time_point& timestamp) {
chrono::microseconds AudioMixer::timeFrame() {
// advance the next frame
auto nextTimestamp = timestamp + std::chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS);
auto now = p_high_resolution_clock::now();
// compute how long the last frame took
auto duration = std::chrono::duration_cast<std::chrono::microseconds>(now - timestamp);
auto duration = chrono::duration_cast<chrono::microseconds>(now - _startFrameTimestamp);
// set the new frame timestamp
timestamp = std::max(now, nextTimestamp);
_idealFrameTimestamp += chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS);
// sleep until the next frame should start
// WIN32 sleep_until is broken until VS2015 Update 2
// instead, std::max (above) guarantees that timestamp >= now, so we can sleep_for
std::this_thread::sleep_for(timestamp - now);
{
auto timer = _sleepTiming.timer();
this_thread::sleep_until(_idealFrameTimestamp);
}
_startFrameTimestamp = p_high_resolution_clock::now();
return duration;
}
void AudioMixer::throttle(std::chrono::microseconds duration, int frame) {
void AudioMixer::throttle(chrono::microseconds duration, int frame) {
// throttle using a modified proportional-integral controller
const float FRAME_TIME = 10000.0f;
float mixRatio = duration.count() / FRAME_TIME;
@ -508,28 +512,19 @@ void AudioMixer::throttle(std::chrono::microseconds duration, int frame) {
if (_trailingMixRatio > TARGET) {
int proportionalTerm = 1 + (_trailingMixRatio - TARGET) / 0.1f;
_throttlingRatio += THROTTLE_RATE * proportionalTerm;
_throttlingRatio = std::min(_throttlingRatio, 1.0f);
_throttlingRatio = min(_throttlingRatio, 1.0f);
qCDebug(audio) << "audio-mixer is struggling (" << _trailingMixRatio << "mix/sleep) - throttling"
<< _throttlingRatio << "of streams";
} else if (_throttlingRatio > 0.0f && _trailingMixRatio <= BACKOFF_TARGET) {
int proportionalTerm = 1 + (TARGET - _trailingMixRatio) / 0.2f;
_throttlingRatio -= BACKOFF_RATE * proportionalTerm;
_throttlingRatio = std::max(_throttlingRatio, 0.0f);
_throttlingRatio = max(_throttlingRatio, 0.0f);
qCDebug(audio) << "audio-mixer is recovering (" << _trailingMixRatio << "mix/sleep) - throttling"
<< _throttlingRatio << "of streams";
}
}
}
int AudioMixer::prepareFrame(const SharedNodePointer& node, unsigned int frame) {
AudioMixerClientData* data = (AudioMixerClientData*)node->getLinkedData();
if (data == nullptr) {
return 0;
}
return data->checkBuffersBeforeFrameSend();
}
void AudioMixer::clearDomainSettings() {
_numStaticJitterFrames = DISABLE_STATIC_JITTER_FRAMES;
_attenuationPerDoublingInDistance = DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE;
@ -661,8 +656,11 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
const QString Y_MAX = "y_max";
const QString Z_MIN = "z_min";
const QString Z_MAX = "z_max";
foreach (const QString& zone, zones.keys()) {
QJsonObject zoneObject = zones[zone].toObject();
auto zoneNames = zones.keys();
_audioZones.reserve(zoneNames.length());
foreach (const QString& zoneName, zoneNames) {
QJsonObject zoneObject = zones[zoneName].toObject();
if (zoneObject.contains(X_MIN) && zoneObject.contains(X_MAX) && zoneObject.contains(Y_MIN) &&
zoneObject.contains(Y_MAX) && zoneObject.contains(Z_MIN) && zoneObject.contains(Z_MAX)) {
@ -686,8 +684,8 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
glm::vec3 corner(xMin, yMin, zMin);
glm::vec3 dimensions(xMax - xMin, yMax - yMin, zMax - zMin);
AABox zoneAABox(corner, dimensions);
_audioZones.insert(zone, zoneAABox);
qCDebug(audio) << "Added zone:" << zone << "(corner:" << corner << ", dimensions:" << dimensions << ")";
_audioZones.push_back({ zoneName, zoneAABox });
qCDebug(audio) << "Added zone:" << zoneName << "(corner:" << corner << ", dimensions:" << dimensions << ")";
}
}
}
@ -707,18 +705,28 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
coefficientObject.contains(LISTENER) &&
coefficientObject.contains(COEFFICIENT)) {
ZoneSettings settings;
auto itSource = find_if(begin(_audioZones), end(_audioZones), [&](const ZoneDescription& description) {
return description.name == coefficientObject.value(SOURCE).toString();
});
auto itListener = find_if(begin(_audioZones), end(_audioZones), [&](const ZoneDescription& description) {
return description.name == coefficientObject.value(LISTENER).toString();
});
bool ok;
settings.source = coefficientObject.value(SOURCE).toString();
settings.listener = coefficientObject.value(LISTENER).toString();
settings.coefficient = coefficientObject.value(COEFFICIENT).toString().toFloat(&ok);
float coefficient = coefficientObject.value(COEFFICIENT).toString().toFloat(&ok);
if (ok && settings.coefficient >= 0.0f && settings.coefficient <= 1.0f &&
_audioZones.contains(settings.source) && _audioZones.contains(settings.listener)) {
if (ok && coefficient >= 0.0f && coefficient <= 1.0f &&
itSource != end(_audioZones) &&
itListener != end(_audioZones)) {
ZoneSettings settings;
settings.source = itSource - begin(_audioZones);
settings.listener = itListener - begin(_audioZones);
settings.coefficient = coefficient;
_zoneSettings.push_back(settings);
qCDebug(audio) << "Added Coefficient:" << settings.source << settings.listener << settings.coefficient;
qCDebug(audio) << "Added Coefficient:" << itSource->name << itListener->name << settings.coefficient;
}
}
}
@ -739,19 +747,21 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
reverbObject.contains(WET_LEVEL)) {
bool okReverbTime, okWetLevel;
QString zone = reverbObject.value(ZONE).toString();
auto itZone = find_if(begin(_audioZones), end(_audioZones), [&](const ZoneDescription& description) {
return description.name == reverbObject.value(ZONE).toString();
});
float reverbTime = reverbObject.value(REVERB_TIME).toString().toFloat(&okReverbTime);
float wetLevel = reverbObject.value(WET_LEVEL).toString().toFloat(&okWetLevel);
if (okReverbTime && okWetLevel && _audioZones.contains(zone)) {
if (okReverbTime && okWetLevel && itZone != end(_audioZones)) {
ReverbSettings settings;
settings.zone = zone;
settings.zone = itZone - begin(_audioZones);
settings.reverbTime = reverbTime;
settings.wetLevel = wetLevel;
_zoneReverbSettings.push_back(settings);
qCDebug(audio) << "Added Reverb:" << zone << reverbTime << wetLevel;
qCDebug(audio) << "Added Reverb:" << itZone->name << reverbTime << wetLevel;
}
}
}
@ -764,7 +774,7 @@ AudioMixer::Timer::Timing::Timing(uint64_t& sum) : _sum(sum) {
}
AudioMixer::Timer::Timing::~Timing() {
_sum += std::chrono::duration_cast<std::chrono::microseconds>(p_high_resolution_clock::now() - _timing).count();
_sum += chrono::duration_cast<chrono::microseconds>(p_high_resolution_clock::now() - _timing).count();
}
void AudioMixer::Timer::get(uint64_t& timing, uint64_t& trailing) {

View file

@ -34,13 +34,18 @@ class AudioMixer : public ThreadedAssignment {
public:
AudioMixer(ReceivedMessage& message);
struct ZoneDescription {
QString name;
AABox area;
};
struct ZoneSettings {
QString source;
QString listener;
int source;
int listener;
float coefficient;
};
struct ReverbSettings {
QString zone;
int zone;
float reverbTime;
float wetLevel;
};
@ -48,9 +53,9 @@ public:
static int getStaticJitterFrames() { return _numStaticJitterFrames; }
static bool shouldMute(float quietestFrame) { return quietestFrame > _noiseMutingThreshold; }
static float getAttenuationPerDoublingInDistance() { return _attenuationPerDoublingInDistance; }
static const QHash<QString, AABox>& getAudioZones() { return _audioZones; }
static const QVector<ZoneSettings>& getZoneSettings() { return _zoneSettings; }
static const QVector<ReverbSettings>& getReverbSettings() { return _zoneReverbSettings; }
static const std::vector<ZoneDescription>& getAudioZones() { return _audioZones; }
static const std::vector<ZoneSettings>& getZoneSettings() { return _zoneSettings; }
static const std::vector<ReverbSettings>& getReverbSettings() { return _zoneReverbSettings; }
static const std::pair<QString, CodecPluginPointer> negotiateCodec(std::vector<QString> codecs);
static bool shouldReplicateTo(const Node& from, const Node& to) {
@ -79,11 +84,8 @@ private slots:
private:
// mixing helpers
std::chrono::microseconds timeFrame(p_high_resolution_clock::time_point& timestamp);
std::chrono::microseconds timeFrame();
void throttle(std::chrono::microseconds frameDuration, int frame);
// pop a frame from any streams on the node
// returns the number of available streams
int prepareFrame(const SharedNodePointer& node, unsigned int frame);
AudioMixerClientData* getOrCreateClientData(Node* node);
@ -92,6 +94,9 @@ private:
void parseSettingsObject(const QJsonObject& settingsObject);
void clearDomainSettings();
p_high_resolution_clock::time_point _idealFrameTimestamp;
p_high_resolution_clock::time_point _startFrameTimestamp;
float _trailingMixRatio { 0.0f };
float _throttlingRatio { 0.0f };
@ -100,7 +105,7 @@ private:
int _numStatFrames { 0 };
AudioMixerStats _stats;
AudioMixerSlavePool _slavePool;
AudioMixerSlavePool _slavePool { _workerSharedData };
class Timer {
public:
@ -123,7 +128,9 @@ private:
uint64_t _history[TIMER_TRAILING_SECONDS] {};
int _index { 0 };
};
Timer _ticTiming;
Timer _checkTimeTiming;
Timer _sleepTiming;
Timer _frameTiming;
Timer _prepareTiming;
@ -136,10 +143,13 @@ private:
static float _attenuationPerDoublingInDistance;
static std::map<QString, CodecPluginPointer> _availableCodecs;
static QStringList _codecPreferenceOrder;
static QHash<QString, AABox> _audioZones;
static QVector<ZoneSettings> _zoneSettings;
static QVector<ReverbSettings> _zoneReverbSettings;
static std::vector<ZoneDescription> _audioZones;
static std::vector<ZoneSettings> _zoneSettings;
static std::vector<ReverbSettings> _zoneReverbSettings;
AudioMixerSlave::SharedData _workerSharedData;
};
#endif // hifi_AudioMixer_h

View file

@ -13,6 +13,8 @@
#include <random>
#include <glm/detail/func_common.hpp>
#include <QtCore/QDebug>
#include <QtCore/QJsonArray>
@ -28,7 +30,6 @@
AudioMixerClientData::AudioMixerClientData(const QUuid& nodeID, Node::LocalID nodeLocalID) :
NodeData(nodeID, nodeLocalID),
audioLimiter(AudioConstants::SAMPLE_RATE, AudioConstants::STEREO),
_ignoreZone(*this),
_outgoingMixedAudioSequenceNumber(0),
_downstreamAudioStreamStats()
{
@ -56,7 +57,7 @@ void AudioMixerClientData::queuePacket(QSharedPointer<ReceivedMessage> message,
_packetQueue.push(message);
}
void AudioMixerClientData::processPackets() {
int AudioMixerClientData::processPackets(ConcurrentAddedStreams& addedStreams) {
SharedNodePointer node = _packetQueue.node;
assert(_packetQueue.empty() || node);
_packetQueue.node.clear();
@ -69,22 +70,17 @@ void AudioMixerClientData::processPackets() {
case PacketType::MicrophoneAudioWithEcho:
case PacketType::InjectAudio:
case PacketType::SilentAudioFrame: {
if (node->isUpstream()) {
setupCodecForReplicatedAgent(packet);
}
QMutexLocker lock(&getMutex());
parseData(*packet);
processStreamPacket(*packet, addedStreams);
optionallyReplicatePacket(*packet, *node);
break;
}
case PacketType::AudioStreamStats: {
QMutexLocker lock(&getMutex());
parseData(*packet);
break;
}
case PacketType::NegotiateAudioFormat:
@ -109,6 +105,10 @@ void AudioMixerClientData::processPackets() {
_packetQueue.pop();
}
assert(_packetQueue.empty());
// now that we have processed all packets for this frame
// we can prepare the sources from this client to be ready for mixing
return checkBuffersBeforeFrameSend();
}
bool isReplicatedPacket(PacketType packetType) {
@ -186,63 +186,136 @@ void AudioMixerClientData::parseRequestsDomainListData(ReceivedMessage& message)
void AudioMixerClientData::parsePerAvatarGainSet(ReceivedMessage& message, const SharedNodePointer& node) {
QUuid uuid = node->getUUID();
// parse the UUID from the packet
QUuid avatarUuid = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
QUuid avatarUUID = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
uint8_t packedGain;
message.readPrimitive(&packedGain);
float gain = unpackFloatGainFromByte(packedGain);
if (avatarUuid.isNull()) {
if (avatarUUID.isNull()) {
// set the MASTER avatar gain
setMasterAvatarGain(gain);
qCDebug(audio) << "Setting MASTER avatar gain for " << uuid << " to " << gain;
} else {
// set the per-source avatar gain
hrtfForStream(avatarUuid, QUuid()).setGainAdjustment(gain);
qCDebug(audio) << "Setting avatar gain adjustment for hrtf[" << uuid << "][" << avatarUuid << "] to " << gain;
setGainForAvatar(avatarUUID, gain);
qCDebug(audio) << "Setting avatar gain adjustment for hrtf[" << uuid << "][" << avatarUUID << "] to " << gain;
}
}
void AudioMixerClientData::setGainForAvatar(QUuid nodeID, uint8_t gain) {
auto it = std::find_if(_streams.active.cbegin(), _streams.active.cend(), [nodeID](const MixableStream& mixableStream){
return mixableStream.nodeStreamID.nodeID == nodeID && mixableStream.nodeStreamID.streamID.isNull();
});
if (it != _streams.active.cend()) {
it->hrtf->setGainAdjustment(gain);
}
}
void AudioMixerClientData::parseNodeIgnoreRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node) {
node->parseIgnoreRequestMessage(message);
auto ignoredNodesPair = node->parseIgnoreRequestMessage(message);
// we have a vector of ignored or unignored node UUIDs - update our internal data structures so that
// streams can be included or excluded next time a mix is being created
if (ignoredNodesPair.second) {
// we have newly ignored nodes, add them to our vector
_newIgnoredNodeIDs.insert(std::end(_newIgnoredNodeIDs),
std::begin(ignoredNodesPair.first), std::end(ignoredNodesPair.first));
} else {
// we have newly unignored nodes, add them to our vector
_newUnignoredNodeIDs.insert(std::end(_newUnignoredNodeIDs),
std::begin(ignoredNodesPair.first), std::end(ignoredNodesPair.first));
}
auto nodeList = DependencyManager::get<NodeList>();
for (auto& nodeID : ignoredNodesPair.first) {
auto otherNode = nodeList->nodeWithUUID(nodeID);
if (otherNode) {
auto otherNodeMixerClientData = static_cast<AudioMixerClientData*>(otherNode->getLinkedData());
if (otherNodeMixerClientData) {
if (ignoredNodesPair.second) {
otherNodeMixerClientData->ignoredByNode(getNodeID());
} else {
otherNodeMixerClientData->unignoredByNode(getNodeID());
}
}
}
}
}
void AudioMixerClientData::ignoredByNode(QUuid nodeID) {
// first add this ID to the concurrent vector for newly ignoring nodes
_newIgnoringNodeIDs.push_back(nodeID);
// now take a lock and on the consistent vector of ignoring nodes and make sure this node is in it
std::lock_guard<std::mutex> lock(_ignoringNodeIDsMutex);
if (std::find(_ignoringNodeIDs.begin(), _ignoringNodeIDs.end(), nodeID) == _ignoringNodeIDs.end()) {
_ignoringNodeIDs.push_back(nodeID);
}
}
void AudioMixerClientData::unignoredByNode(QUuid nodeID) {
// first add this ID to the concurrent vector for newly unignoring nodes
_newUnignoringNodeIDs.push_back(nodeID);
// now take a lock on the consistent vector of ignoring nodes and make sure this node isn't in it
std::lock_guard<std::mutex> lock(_ignoringNodeIDsMutex);
auto it = _ignoringNodeIDs.begin();
while (it != _ignoringNodeIDs.end()) {
if (*it == nodeID) {
it = _ignoringNodeIDs.erase(it);
} else {
++it;
}
}
}
void AudioMixerClientData::clearStagedIgnoreChanges() {
_newIgnoredNodeIDs.clear();
_newUnignoredNodeIDs.clear();
_newIgnoringNodeIDs.clear();
_newUnignoringNodeIDs.clear();
}
void AudioMixerClientData::parseRadiusIgnoreRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node) {
node->parseIgnoreRadiusRequestMessage(message);
bool enabled;
message->readPrimitive(&enabled);
_isIgnoreRadiusEnabled = enabled;
auto avatarAudioStream = getAvatarAudioStream();
// if we have an avatar audio stream, tell it wether its ignore box should be enabled or disabled
if (avatarAudioStream) {
if (_isIgnoreRadiusEnabled) {
avatarAudioStream->enableIgnoreBox();
} else {
avatarAudioStream->disableIgnoreBox();
}
}
}
AvatarAudioStream* AudioMixerClientData::getAvatarAudioStream() {
QReadLocker readLocker { &_streamsLock };
auto it = std::find_if(_audioStreams.begin(), _audioStreams.end(), [](const SharedStreamPointer& stream){
return stream->getStreamIdentifier().isNull();
});
auto it = _audioStreams.find(QUuid());
if (it != _audioStreams.end()) {
return dynamic_cast<AvatarAudioStream*>(it->second.get());
return dynamic_cast<AvatarAudioStream*>(it->get());
}
// no mic stream found - return NULL
return NULL;
}
void AudioMixerClientData::removeHRTFForStream(const QUuid& nodeID, const QUuid& streamID) {
auto it = _nodeSourcesHRTFMap.find(nodeID);
if (it != _nodeSourcesHRTFMap.end()) {
// erase the stream with the given ID from the given node
it->second.erase(streamID);
// is the map for this node now empty?
// if so we can remove it
if (it->second.size() == 0) {
_nodeSourcesHRTFMap.erase(it);
}
}
}
void AudioMixerClientData::removeAgentAvatarAudioStream() {
QWriteLocker writeLocker { &_streamsLock };
auto it = _audioStreams.find(QUuid());
auto it = std::remove_if(_audioStreams.begin(), _audioStreams.end(), [](const SharedStreamPointer& stream){
return stream->getStreamIdentifier().isNull();
});
if (it != _audioStreams.end()) {
_audioStreams.erase(it);
}
writeLocker.unlock();
}
int AudioMixerClientData::parseData(ReceivedMessage& message) {
@ -252,128 +325,186 @@ int AudioMixerClientData::parseData(ReceivedMessage& message) {
// skip over header, appendFlag, and num stats packed
message.seek(sizeof(quint8) + sizeof(quint16));
if (message.getBytesLeftToRead() != sizeof(AudioStreamStats)) {
qWarning() << "Received AudioStreamStats of wrong size" << message.getBytesLeftToRead()
<< "instead of" << sizeof(AudioStreamStats) << "from"
<< message.getSourceID() << "at" << message.getSenderSockAddr();
return message.getPosition();
}
// read the downstream audio stream stats
message.readPrimitive(&_downstreamAudioStreamStats);
return message.getPosition();
} else {
SharedStreamPointer matchingStream;
bool isMicStream = false;
if (packetType == PacketType::MicrophoneAudioWithEcho
|| packetType == PacketType::ReplicatedMicrophoneAudioWithEcho
|| packetType == PacketType::MicrophoneAudioNoEcho
|| packetType == PacketType::ReplicatedMicrophoneAudioNoEcho
|| packetType == PacketType::SilentAudioFrame
|| packetType == PacketType::ReplicatedSilentAudioFrame) {
QWriteLocker writeLocker { &_streamsLock };
auto micStreamIt = _audioStreams.find(QUuid());
if (micStreamIt == _audioStreams.end()) {
// we don't have a mic stream yet, so add it
// hop past the sequence number that leads the packet
message.seek(sizeof(quint16));
// pull the codec string from the packet
auto codecString = message.readString();
// determine if the stream is stereo or not
bool isStereo;
if (packetType == PacketType::SilentAudioFrame
|| packetType == PacketType::ReplicatedSilentAudioFrame) {
quint16 numSilentSamples;
message.readPrimitive(&numSilentSamples);
isStereo = numSilentSamples == AudioConstants::NETWORK_FRAME_SAMPLES_STEREO;
} else {
quint8 channelFlag;
message.readPrimitive(&channelFlag);
isStereo = channelFlag == 1;
}
auto avatarAudioStream = new AvatarAudioStream(isStereo, AudioMixer::getStaticJitterFrames());
avatarAudioStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
qCDebug(audio) << "creating new AvatarAudioStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
connect(avatarAudioStream, &InboundAudioStream::mismatchedAudioCodec,
this, &AudioMixerClientData::handleMismatchAudioFormat);
auto emplaced = _audioStreams.emplace(
QUuid(),
std::unique_ptr<PositionalAudioStream> { avatarAudioStream }
);
micStreamIt = emplaced.first;
}
matchingStream = micStreamIt->second;
writeLocker.unlock();
isMicStream = true;
} else if (packetType == PacketType::InjectAudio
|| packetType == PacketType::ReplicatedInjectAudio) {
// this is injected audio
// grab the stream identifier for this injected audio
message.seek(sizeof(quint16));
QUuid streamIdentifier = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
bool isStereo;
message.readPrimitive(&isStereo);
QWriteLocker writeLock { &_streamsLock };
auto streamIt = _audioStreams.find(streamIdentifier);
if (streamIt == _audioStreams.end()) {
// we don't have this injected stream yet, so add it
auto injectorStream = new InjectedAudioStream(streamIdentifier, isStereo, AudioMixer::getStaticJitterFrames());
#if INJECTORS_SUPPORT_CODECS
injectorStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
qCDebug(audio) << "creating new injectorStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
#endif
auto emplaced = _audioStreams.emplace(
streamIdentifier,
std::unique_ptr<InjectedAudioStream> { injectorStream }
);
streamIt = emplaced.first;
}
matchingStream = streamIt->second;
writeLock.unlock();
}
// seek to the beginning of the packet so that the next reader is in the right spot
message.seek(0);
// check the overflow count before we parse data
auto overflowBefore = matchingStream->getOverflowCount();
auto parseResult = matchingStream->parseData(message);
if (matchingStream->getOverflowCount() > overflowBefore) {
qCDebug(audio) << "Just overflowed on stream from" << message.getSourceID() << "at" << message.getSenderSockAddr();
qCDebug(audio) << "This stream is for" << (isMicStream ? "microphone audio" : "injected audio");
}
return parseResult;
}
return 0;
}
int AudioMixerClientData::checkBuffersBeforeFrameSend() {
QWriteLocker writeLocker { &_streamsLock };
bool AudioMixerClientData::containsValidPosition(ReceivedMessage& message) const {
static const int SEQUENCE_NUMBER_BYTES = sizeof(quint16);
auto posBefore = message.getPosition();
message.seek(SEQUENCE_NUMBER_BYTES);
// skip over the codec string
message.readString();
switch (message.getType()) {
case PacketType::MicrophoneAudioNoEcho:
case PacketType::MicrophoneAudioWithEcho: {
// skip over the stereo flag
message.seek(message.getPosition() + sizeof(ChannelFlag));
break;
}
case PacketType::SilentAudioFrame: {
// skip the number of silent samples
message.seek(message.getPosition() + sizeof(SilentSamplesBytes));
break;
}
case PacketType::InjectAudio: {
// skip the stream ID, stereo flag, and loopback flag
message.seek(message.getPosition() + NUM_STREAM_ID_BYTES + sizeof(ChannelFlag) + sizeof(LoopbackFlag));
break;
}
default:
Q_UNREACHABLE();
break;
}
glm::vec3 peekPosition;
message.readPrimitive(&peekPosition);
// reset the position the message was at before we were called
message.seek(posBefore);
if (glm::any(glm::isnan(peekPosition))) {
return false;
}
return true;
}
void AudioMixerClientData::processStreamPacket(ReceivedMessage& message, ConcurrentAddedStreams &addedStreams) {
if (!containsValidPosition(message)) {
qDebug() << "Refusing to process audio stream from" << message.getSourceID() << "with invalid position";
return;
}
SharedStreamPointer matchingStream;
auto packetType = message.getType();
bool newStream = false;
if (packetType == PacketType::MicrophoneAudioWithEcho
|| packetType == PacketType::MicrophoneAudioNoEcho
|| packetType == PacketType::SilentAudioFrame) {
auto micStreamIt = std::find_if(_audioStreams.begin(), _audioStreams.end(), [](const SharedStreamPointer& stream){
return stream->getStreamIdentifier().isNull();
});
if (micStreamIt == _audioStreams.end()) {
// we don't have a mic stream yet, so add it
// hop past the sequence number that leads the packet
message.seek(sizeof(StreamSequenceNumber));
// pull the codec string from the packet
auto codecString = message.readString();
// determine if the stream is stereo or not
bool isStereo;
if (packetType == PacketType::SilentAudioFrame || packetType == PacketType::ReplicatedSilentAudioFrame) {
SilentSamplesBytes numSilentSamples;
message.readPrimitive(&numSilentSamples);
isStereo = numSilentSamples == AudioConstants::NETWORK_FRAME_SAMPLES_STEREO;
} else {
ChannelFlag channelFlag;
message.readPrimitive(&channelFlag);
isStereo = channelFlag == 1;
}
auto avatarAudioStream = new AvatarAudioStream(isStereo, AudioMixer::getStaticJitterFrames());
avatarAudioStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
if (_isIgnoreRadiusEnabled) {
avatarAudioStream->enableIgnoreBox();
} else {
avatarAudioStream->disableIgnoreBox();
}
qCDebug(audio) << "creating new AvatarAudioStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
connect(avatarAudioStream, &InboundAudioStream::mismatchedAudioCodec,
this, &AudioMixerClientData::handleMismatchAudioFormat);
matchingStream = SharedStreamPointer(avatarAudioStream);
_audioStreams.push_back(matchingStream);
newStream = true;
} else {
matchingStream = *micStreamIt;
}
} else if (packetType == PacketType::InjectAudio) {
// this is injected audio
// skip the sequence number and codec string and grab the stream identifier for this injected audio
message.seek(sizeof(StreamSequenceNumber));
message.readString();
QUuid streamIdentifier = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
auto streamIt = std::find_if(_audioStreams.begin(), _audioStreams.end(), [&streamIdentifier](const SharedStreamPointer& stream) {
return stream->getStreamIdentifier() == streamIdentifier;
});
if (streamIt == _audioStreams.end()) {
bool isStereo;
message.readPrimitive(&isStereo);
// we don't have this injected stream yet, so add it
auto injectorStream = new InjectedAudioStream(streamIdentifier, isStereo, AudioMixer::getStaticJitterFrames());
#if INJECTORS_SUPPORT_CODECS
injectorStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
qCDebug(audio) << "creating new injectorStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
#endif
matchingStream = SharedStreamPointer(injectorStream);
_audioStreams.push_back(matchingStream);
newStream = true;
} else {
matchingStream = *streamIt;
}
}
// seek to the beginning of the packet so that the next reader is in the right spot
message.seek(0);
// check the overflow count before we parse data
auto overflowBefore = matchingStream->getOverflowCount();
matchingStream->parseData(message);
if (matchingStream->getOverflowCount() > overflowBefore) {
qCDebug(audio) << "Just overflowed on stream" << matchingStream->getStreamIdentifier()
<< "from" << message.getSourceID();
}
if (newStream) {
// whenever a stream is added, push it to the concurrent vector of streams added this frame
addedStreams.emplace_back(getNodeID(), getNodeLocalID(), matchingStream->getStreamIdentifier(), matchingStream.get());
}
}
int AudioMixerClientData::checkBuffersBeforeFrameSend() {
auto it = _audioStreams.begin();
while (it != _audioStreams.end()) {
SharedStreamPointer stream = it->second;
SharedStreamPointer stream = *it;
if (stream->popFrames(1, true) > 0) {
stream->updateLastPopOutputLoudnessAndTrailingLoudness();
@ -388,7 +519,7 @@ int AudioMixerClientData::checkBuffersBeforeFrameSend() {
// this is an inactive injector, pull it from our streams
// first emit that it is finished so that the HRTF objects for this source can be cleaned up
emit injectorStreamFinished(it->second->getStreamIdentifier());
emit injectorStreamFinished(stream->getStreamIdentifier());
// erase the stream to drop our ref to the shared pointer and remove it
it = _audioStreams.erase(it);
@ -441,7 +572,7 @@ void AudioMixerClientData::sendAudioStreamStatsPackets(const SharedNodePointer&
// pack the calculated number of stream stats
for (int i = 0; i < numStreamStatsToPack; i++) {
PositionalAudioStream* stream = it->second.get();
PositionalAudioStream* stream = it->get();
stream->perSecondCallbackForUpdatingStats();
@ -513,12 +644,12 @@ QJsonObject AudioMixerClientData::getAudioStreamStats() {
QJsonArray injectorArray;
auto streamsCopy = getAudioStreams();
for (auto& injectorPair : streamsCopy) {
if (injectorPair.second->getType() == PositionalAudioStream::Injector) {
if (injectorPair->getType() == PositionalAudioStream::Injector) {
QJsonObject upstreamStats;
AudioStreamStats streamStats = injectorPair.second->getAudioStreamStats();
AudioStreamStats streamStats = injectorPair->getAudioStreamStats();
upstreamStats["inj.desired"] = streamStats._desiredJitterBufferFrames;
upstreamStats["desired_calc"] = injectorPair.second->getCalculatedJitterBufferFrames();
upstreamStats["desired_calc"] = injectorPair->getCalculatedJitterBufferFrames();
upstreamStats["available_avg_10s"] = streamStats._framesAvailableAverage;
upstreamStats["available"] = (double) streamStats._framesAvailable;
upstreamStats["unplayed"] = (double) streamStats._unplayedMs;
@ -609,99 +740,6 @@ void AudioMixerClientData::cleanupCodec() {
}
}
AudioMixerClientData::IgnoreZone& AudioMixerClientData::IgnoreZoneMemo::get(unsigned int frame) {
// check for a memoized zone
if (frame != _frame.load(std::memory_order_acquire)) {
AvatarAudioStream* stream = _data.getAvatarAudioStream();
// get the initial dimensions from the stream
glm::vec3 corner = stream ? stream->getAvatarBoundingBoxCorner() : glm::vec3(0);
glm::vec3 scale = stream ? stream->getAvatarBoundingBoxScale() : glm::vec3(0);
// enforce a minimum scale
static const glm::vec3 MIN_IGNORE_BOX_SCALE = glm::vec3(0.3f, 1.3f, 0.3f);
if (glm::any(glm::lessThan(scale, MIN_IGNORE_BOX_SCALE))) {
scale = MIN_IGNORE_BOX_SCALE;
}
// (this is arbitrary number determined empirically for comfort)
const float IGNORE_BOX_SCALE_FACTOR = 2.4f;
scale *= IGNORE_BOX_SCALE_FACTOR;
// create the box (we use a box for the zone for convenience)
AABox box(corner, scale);
// update the memoized zone
// This may be called by multiple threads concurrently,
// so take a lock and only update the memo if this call is first.
// This prevents concurrent updates from invalidating the returned reference
// (contingent on the preconditions listed in the header).
std::lock_guard<std::mutex> lock(_mutex);
if (frame != _frame.load(std::memory_order_acquire)) {
_zone = box;
unsigned int oldFrame = _frame.exchange(frame, std::memory_order_release);
Q_UNUSED(oldFrame);
}
}
return _zone;
}
void AudioMixerClientData::IgnoreNodeCache::cache(bool shouldIgnore) {
if (!_isCached) {
_shouldIgnore = shouldIgnore;
_isCached = true;
}
}
bool AudioMixerClientData::IgnoreNodeCache::isCached() {
return _isCached;
}
bool AudioMixerClientData::IgnoreNodeCache::shouldIgnore() {
bool ignore = _shouldIgnore;
_isCached = false;
return ignore;
}
bool AudioMixerClientData::shouldIgnore(const SharedNodePointer self, const SharedNodePointer node, unsigned int frame) {
// this is symmetric over self / node; if computed, it is cached in the other
// check the cache to avoid computation
auto& cache = _nodeSourcesIgnoreMap[node->getUUID()];
if (cache.isCached()) {
return cache.shouldIgnore();
}
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
if (!nodeData) {
return false;
}
// compute shouldIgnore
bool shouldIgnore = true;
if ( // the nodes are not ignoring each other explicitly (or are but get data regardless)
(!self->isIgnoringNodeWithID(node->getUUID()) ||
(nodeData->getRequestsDomainListData() && node->getCanKick())) &&
(!node->isIgnoringNodeWithID(self->getUUID()) ||
(getRequestsDomainListData() && self->getCanKick()))) {
// if either node is enabling an ignore radius, check their proximity
if ((self->isIgnoreRadiusEnabled() || node->isIgnoreRadiusEnabled())) {
auto& zone = _ignoreZone.get(frame);
auto& nodeZone = nodeData->_ignoreZone.get(frame);
shouldIgnore = zone.touches(nodeZone);
} else {
shouldIgnore = false;
}
}
// cache in node
nodeData->_nodeSourcesIgnoreMap[self->getUUID()].cache(shouldIgnore);
return shouldIgnore;
}
void AudioMixerClientData::setupCodecForReplicatedAgent(QSharedPointer<ReceivedMessage> message) {
// hop past the sequence number that leads the packet
message->seek(sizeof(quint16));

View file

@ -14,6 +14,8 @@
#include <queue>
#include <tbb/concurrent_vector.h>
#include <QtCore/QJsonObject>
#include <AABox.h>
@ -30,39 +32,34 @@
class AudioMixerClientData : public NodeData {
Q_OBJECT
public:
struct AddedStream {
NodeIDStreamID nodeIDStreamID;
PositionalAudioStream* positionalStream;
AddedStream(QUuid nodeID, Node::LocalID localNodeID,
StreamID streamID, PositionalAudioStream* positionalStream) :
nodeIDStreamID(nodeID, localNodeID, streamID), positionalStream(positionalStream) {};
};
using ConcurrentAddedStreams = tbb::concurrent_vector<AddedStream>;
AudioMixerClientData(const QUuid& nodeID, Node::LocalID nodeLocalID);
~AudioMixerClientData();
using SharedStreamPointer = std::shared_ptr<PositionalAudioStream>;
using AudioStreamMap = std::unordered_map<QUuid, SharedStreamPointer>;
using AudioStreamVector = std::vector<SharedStreamPointer>;
void queuePacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer node);
void processPackets();
int processPackets(ConcurrentAddedStreams& addedStreams); // returns the number of available streams this frame
// locks the mutex to make a copy
AudioStreamMap getAudioStreams() { QReadLocker readLock { &_streamsLock }; return _audioStreams; }
AudioStreamVector& getAudioStreams() { return _audioStreams; }
AvatarAudioStream* getAvatarAudioStream();
// returns whether self (this data's node) should ignore node, memoized by frame
// precondition: frame is increasing after first call (including overflow wrap)
bool shouldIgnore(SharedNodePointer self, SharedNodePointer node, unsigned int frame);
// the following methods should be called from the AudioMixer assignment thread ONLY
// they are not thread-safe
// returns a new or existing HRTF object for the given stream from the given node
AudioHRTF& hrtfForStream(const QUuid& nodeID, const QUuid& streamID = QUuid()) { return _nodeSourcesHRTFMap[nodeID][streamID]; }
// removes an AudioHRTF object for a given stream
void removeHRTFForStream(const QUuid& nodeID, const QUuid& streamID = QUuid());
// remove all sources and data from this node
void removeNode(const QUuid& nodeID) { _nodeSourcesIgnoreMap.unsafe_erase(nodeID); _nodeSourcesHRTFMap.erase(nodeID); }
void removeAgentAvatarAudioStream();
// packet parsers
int parseData(ReceivedMessage& message) override;
void processStreamPacket(ReceivedMessage& message, ConcurrentAddedStreams& addedStreams);
void negotiateAudioFormat(ReceivedMessage& message, const SharedNodePointer& node);
void parseRequestsDomainListData(ReceivedMessage& message);
void parsePerAvatarGainSet(ReceivedMessage& message, const SharedNodePointer& node);
@ -108,11 +105,56 @@ public:
bool shouldMuteClient() { return _shouldMuteClient; }
void setShouldMuteClient(bool shouldMuteClient) { _shouldMuteClient = shouldMuteClient; }
glm::vec3 getPosition() { return getAvatarAudioStream() ? getAvatarAudioStream()->getPosition() : glm::vec3(0); }
bool getRequestsDomainListData() { return _requestsDomainListData; }
bool getRequestsDomainListData() const { return _requestsDomainListData; }
void setRequestsDomainListData(bool requesting) { _requestsDomainListData = requesting; }
void setupCodecForReplicatedAgent(QSharedPointer<ReceivedMessage> message);
struct MixableStream {
float approximateVolume { 0.0f };
NodeIDStreamID nodeStreamID;
std::unique_ptr<AudioHRTF> hrtf;
PositionalAudioStream* positionalStream;
bool ignoredByListener { false };
bool ignoringListener { false };
MixableStream(NodeIDStreamID nodeIDStreamID, PositionalAudioStream* positionalStream) :
nodeStreamID(nodeIDStreamID), hrtf(new AudioHRTF), positionalStream(positionalStream) {};
MixableStream(QUuid nodeID, Node::LocalID localNodeID, StreamID streamID, PositionalAudioStream* positionalStream) :
nodeStreamID(nodeID, localNodeID, streamID), hrtf(new AudioHRTF), positionalStream(positionalStream) {};
};
using MixableStreamsVector = std::vector<MixableStream>;
struct Streams {
MixableStreamsVector active;
MixableStreamsVector inactive;
MixableStreamsVector skipped;
};
Streams& getStreams() { return _streams; }
// thread-safe, called from AudioMixerSlave(s) while processing ignore packets for other nodes
void ignoredByNode(QUuid nodeID);
void unignoredByNode(QUuid nodeID);
// start of methods called non-concurrently from single AudioMixerSlave mixing for the owning node
const Node::IgnoredNodeIDs& getNewIgnoredNodeIDs() const { return _newIgnoredNodeIDs; }
const Node::IgnoredNodeIDs& getNewUnignoredNodeIDs() const { return _newUnignoredNodeIDs; }
using ConcurrentIgnoreNodeIDs = tbb::concurrent_vector<QUuid>;
const ConcurrentIgnoreNodeIDs& getNewIgnoringNodeIDs() const { return _newIgnoringNodeIDs; }
const ConcurrentIgnoreNodeIDs& getNewUnignoringNodeIDs() const { return _newUnignoringNodeIDs; }
void clearStagedIgnoreChanges();
const Node::IgnoredNodeIDs& getIgnoringNodeIDs() const { return _ignoringNodeIDs; }
bool getHasReceivedFirstMix() const { return _hasReceivedFirstMix; }
void setHasReceivedFirstMix(bool hasReceivedFirstMix) { _hasReceivedFirstMix = hasReceivedFirstMix; }
// end of methods called non-concurrently from single AudioMixerSlave
signals:
void injectorStreamFinished(const QUuid& streamIdentifier);
@ -126,52 +168,15 @@ private:
};
PacketQueue _packetQueue;
QReadWriteLock _streamsLock;
AudioStreamMap _audioStreams; // microphone stream from avatar is stored under key of null UUID
AudioStreamVector _audioStreams; // microphone stream from avatar has a null stream ID
void optionallyReplicatePacket(ReceivedMessage& packet, const Node& node);
using IgnoreZone = AABox;
class IgnoreZoneMemo {
public:
IgnoreZoneMemo(AudioMixerClientData& data) : _data(data) {}
void setGainForAvatar(QUuid nodeID, uint8_t gain);
// returns an ignore zone, memoized by frame (lockless if the zone is already memoized)
// preconditions:
// - frame is increasing after first call (including overflow wrap)
// - there are no references left from calls to getIgnoreZone(frame - 1)
IgnoreZone& get(unsigned int frame);
bool containsValidPosition(ReceivedMessage& message) const;
private:
AudioMixerClientData& _data;
IgnoreZone _zone;
std::atomic<unsigned int> _frame { 0 };
std::mutex _mutex;
};
IgnoreZoneMemo _ignoreZone;
class IgnoreNodeCache {
public:
// std::atomic is not copyable - always initialize uncached
IgnoreNodeCache() {}
IgnoreNodeCache(const IgnoreNodeCache& other) {}
void cache(bool shouldIgnore);
bool isCached();
bool shouldIgnore();
private:
std::atomic<bool> _isCached { false };
bool _shouldIgnore { false };
};
struct IgnoreNodeCacheHasher { std::size_t operator()(const QUuid& key) const { return qHash(key); } };
using NodeSourcesIgnoreMap = tbb::concurrent_unordered_map<QUuid, IgnoreNodeCache, IgnoreNodeCacheHasher>;
NodeSourcesIgnoreMap _nodeSourcesIgnoreMap;
using HRTFMap = std::unordered_map<QUuid, AudioHRTF>;
using NodeSourcesHRTFMap = std::unordered_map<QUuid, HRTFMap>;
NodeSourcesHRTFMap _nodeSourcesHRTFMap;
Streams _streams;
quint16 _outgoingMixedAudioSequenceNumber;
@ -190,6 +195,21 @@ private:
bool _shouldMuteClient { false };
bool _requestsDomainListData { false };
std::vector<AddedStream> _newAddedStreams;
Node::IgnoredNodeIDs _newIgnoredNodeIDs;
Node::IgnoredNodeIDs _newUnignoredNodeIDs;
tbb::concurrent_vector<QUuid> _newIgnoringNodeIDs;
tbb::concurrent_vector<QUuid> _newUnignoringNodeIDs;
std::mutex _ignoringNodeIDsMutex;
Node::IgnoredNodeIDs _ignoringNodeIDs;
std::atomic_bool _isIgnoreRadiusEnabled { false };
bool _hasReceivedFirstMix { false };
};
#endif // hifi_AudioMixerClientData_h

View file

@ -36,7 +36,10 @@
#include "InjectedAudioStream.h"
#include "AudioHelpers.h"
using AudioStreamMap = AudioMixerClientData::AudioStreamMap;
using namespace std;
using AudioStreamVector = AudioMixerClientData::AudioStreamVector;
using MixableStream = AudioMixerClientData::MixableStream;
using MixableStreamsVector = AudioMixerClientData::MixableStreamsVector;
// packet helpers
std::unique_ptr<NLPacket> createAudioPacket(PacketType type, int size, quint16 sequence, QString codec);
@ -46,9 +49,8 @@ void sendMutePacket(const SharedNodePointer& node, AudioMixerClientData&);
void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData& data);
// mix helpers
inline float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition);
inline float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudioStream& listeningNodeStream,
inline float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd);
inline float computeGain(float masterListenerGain, const AvatarAudioStream& listeningNodeStream,
const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition, float distance, bool isEcho);
inline float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition);
@ -56,15 +58,16 @@ inline float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const
void AudioMixerSlave::processPackets(const SharedNodePointer& node) {
AudioMixerClientData* data = (AudioMixerClientData*)node->getLinkedData();
if (data) {
data->processPackets();
// process packets and collect the number of streams available for this frame
stats.sumStreams += data->processPackets(_sharedData.addedStreams);
}
}
void AudioMixerSlave::configureMix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio) {
void AudioMixerSlave::configureMix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain) {
_begin = begin;
_end = end;
_frame = frame;
_throttlingRatio = throttlingRatio;
_numToRetain = numToRetain;
}
void AudioMixerSlave::mix(const SharedNodePointer& node) {
@ -125,105 +128,338 @@ void AudioMixerSlave::mix(const SharedNodePointer& node) {
}
}
template <class Container, class Predicate>
void erase_if(Container& cont, Predicate&& pred) {
auto it = remove_if(begin(cont), end(cont), std::forward<Predicate>(pred));
cont.erase(it, end(cont));
}
template <class Container>
bool contains(const Container& cont, typename Container::value_type value) {
return std::any_of(begin(cont), end(cont), [&value](const auto& element) {
return value == element;
});
}
// This class lets you do an erase if in several segments
// that use different predicates
template <class Container>
class SegmentedEraseIf {
public:
using iterator = typename Container::iterator;
SegmentedEraseIf(Container& cont) : _cont(cont) {
_first = begin(_cont);
_it = _first;
}
~SegmentedEraseIf() {
assert(_it == end(_cont));
_cont.erase(_first, _it);
}
template <class Predicate>
void iterateTo(iterator last, Predicate pred) {
while (_it != last) {
if (!pred(*_it)) {
if (_first != _it) {
*_first = move(*_it);
}
++_first;
}
++_it;
}
}
private:
iterator _first;
iterator _it;
Container& _cont;
};
void AudioMixerSlave::addStreams(Node& listener, AudioMixerClientData& listenerData) {
auto& ignoredNodeIDs = listener.getIgnoredNodeIDs();
auto& ignoringNodeIDs = listenerData.getIgnoringNodeIDs();
auto& streams = listenerData.getStreams();
// add data for newly created streams to our vector
if (!listenerData.getHasReceivedFirstMix()) {
// when this listener is new, we need to fill its added streams object with all available streams
std::for_each(_begin, _end, [&](const SharedNodePointer& node) {
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
if (nodeData) {
for (auto& stream : nodeData->getAudioStreams()) {
bool ignoredByListener = contains(ignoredNodeIDs, node->getUUID());
bool ignoringListener = contains(ignoringNodeIDs, node->getUUID());
if (ignoredByListener || ignoringListener) {
streams.skipped.emplace_back(node->getUUID(), node->getLocalID(),
stream->getStreamIdentifier(), stream.get());
// pre-populate ignored and ignoring flags for this stream
streams.skipped.back().ignoredByListener = ignoredByListener;
streams.skipped.back().ignoringListener = ignoringListener;
} else {
streams.active.emplace_back(node->getUUID(), node->getLocalID(),
stream->getStreamIdentifier(), stream.get());
}
}
}
});
// flag this listener as having received their first mix so we know we don't need to enumerate all nodes again
listenerData.setHasReceivedFirstMix(true);
} else {
for (const auto& newStream : _sharedData.addedStreams) {
bool ignoredByListener = contains(ignoredNodeIDs, newStream.nodeIDStreamID.nodeID);
bool ignoringListener = contains(ignoringNodeIDs, newStream.nodeIDStreamID.nodeID);
if (ignoredByListener || ignoringListener) {
streams.skipped.emplace_back(newStream.nodeIDStreamID, newStream.positionalStream);
// pre-populate ignored and ignoring flags for this stream
streams.skipped.back().ignoredByListener = ignoredByListener;
streams.skipped.back().ignoringListener = ignoringListener;
} else {
streams.active.emplace_back(newStream.nodeIDStreamID, newStream.positionalStream);
}
}
}
}
bool shouldBeRemoved(const MixableStream& stream, const AudioMixerSlave::SharedData& sharedData) {
return (contains(sharedData.removedNodes, stream.nodeStreamID.nodeLocalID) ||
contains(sharedData.removedStreams, stream.nodeStreamID));
};
bool shouldBeInactive(MixableStream& stream) {
return (!stream.positionalStream->lastPopSucceeded() ||
stream.positionalStream->getLastPopOutputLoudness() == 0.0f);
};
bool shouldBeSkipped(MixableStream& stream, const Node& listener,
const AvatarAudioStream& listenerAudioStream,
const AudioMixerClientData& listenerData) {
if (stream.nodeStreamID.nodeLocalID == listener.getLocalID()) {
return !stream.positionalStream->shouldLoopbackForNode();
}
// grab the unprocessed ignores and unignores from and for this listener
const auto& nodesIgnoredByListener = listenerData.getNewIgnoredNodeIDs();
const auto& nodesUnignoredByListener = listenerData.getNewUnignoredNodeIDs();
const auto& nodesIgnoringListener = listenerData.getNewIgnoringNodeIDs();
const auto& nodesUnignoringListener = listenerData.getNewUnignoringNodeIDs();
// this stream was previously not ignored by the listener and we have some newly ignored streams
// check now if it is one of the ignored streams and flag it as such
if (stream.ignoredByListener) {
stream.ignoredByListener = !contains(nodesUnignoredByListener, stream.nodeStreamID.nodeID);
} else {
stream.ignoredByListener = contains(nodesIgnoredByListener, stream.nodeStreamID.nodeID);
}
if (stream.ignoringListener) {
stream.ignoringListener = !contains(nodesUnignoringListener, stream.nodeStreamID.nodeID);
} else {
stream.ignoringListener = contains(nodesIgnoringListener, stream.nodeStreamID.nodeID);
}
bool listenerIsAdmin = listenerData.getRequestsDomainListData() && listener.getCanKick();
if (stream.ignoredByListener || (stream.ignoringListener && !listenerIsAdmin)) {
return true;
}
bool shouldCheckIgnoreBox = (listenerAudioStream.isIgnoreBoxEnabled() ||
stream.positionalStream->isIgnoreBoxEnabled());
if (shouldCheckIgnoreBox &&
listenerAudioStream.getIgnoreBox().touches(stream.positionalStream->getIgnoreBox())) {
return true;
}
return false;
};
float approximateVolume(const MixableStream& stream, const AvatarAudioStream* listenerAudioStream) {
if (stream.positionalStream->getLastPopOutputTrailingLoudness() == 0.0f) {
return 0.0f;
}
if (stream.positionalStream == listenerAudioStream) {
return 1.0f;
}
// approximate the gain
float gain = approximateGain(*listenerAudioStream, *(stream.positionalStream));
// for avatar streams, modify by the set gain adjustment
if (stream.nodeStreamID.streamID.isNull()) {
gain *= stream.hrtf->getGainAdjustment();
}
return stream.positionalStream->getLastPopOutputTrailingLoudness() * gain;
};
bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
AvatarAudioStream* listenerAudioStream = static_cast<AudioMixerClientData*>(listener->getLinkedData())->getAvatarAudioStream();
AudioMixerClientData* listenerData = static_cast<AudioMixerClientData*>(listener->getLinkedData());
// if we received an invalid position from this listener, then refuse to make them a mix
// because we don't know how to do it properly
if (!listenerAudioStream->hasValidPosition()) {
return false;
}
// zero out the mix for this listener
memset(_mixSamples, 0, sizeof(_mixSamples));
bool isThrottling = _throttlingRatio > 0.0f;
std::vector<std::pair<float, SharedNodePointer>> throttledNodes;
bool isThrottling = _numToRetain != -1;
typedef void (AudioMixerSlave::*MixFunctor)(
AudioMixerClientData&, const QUuid&, const AvatarAudioStream&, const PositionalAudioStream&);
auto forAllStreams = [&](const SharedNodePointer& node, AudioMixerClientData* nodeData, MixFunctor mixFunctor) {
auto nodeID = node->getUUID();
for (auto& streamPair : nodeData->getAudioStreams()) {
auto nodeStream = streamPair.second;
(this->*mixFunctor)(*listenerData, nodeID, *listenerAudioStream, *nodeStream);
}
};
auto& streams = listenerData->getStreams();
#ifdef HIFI_AUDIO_MIXER_DEBUG
auto mixStart = p_high_resolution_clock::now();
#endif
addStreams(*listener, *listenerData);
std::for_each(_begin, _end, [&](const SharedNodePointer& node) {
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
if (!nodeData) {
return;
// Process skipped streams
erase_if(streams.skipped, [&](MixableStream& stream) {
if (shouldBeRemoved(stream, _sharedData)) {
return true;
}
if (*node == *listener) {
// only mix the echo, if requested
for (auto& streamPair : nodeData->getAudioStreams()) {
auto nodeStream = streamPair.second;
if (nodeStream->shouldLoopbackForNode()) {
mixStream(*listenerData, node->getUUID(), *listenerAudioStream, *nodeStream);
}
}
} else if (!listenerData->shouldIgnore(listener, node, _frame)) {
if (!isThrottling) {
forAllStreams(node, nodeData, &AudioMixerSlave::mixStream);
if (!shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
if (shouldBeInactive(stream)) {
streams.inactive.push_back(move(stream));
++stats.skippedToInactive;
} else {
auto nodeID = node->getUUID();
streams.active.push_back(move(stream));
++stats.skippedToActive;
}
return true;
}
// compute the node's max relative volume
float nodeVolume = 0.0f;
for (auto& streamPair : nodeData->getAudioStreams()) {
auto nodeStream = streamPair.second;
if (!isThrottling) {
updateHRTFParameters(stream, *listenerAudioStream,
listenerData->getMasterAvatarGain());
}
return false;
});
// approximate the gain
glm::vec3 relativePosition = nodeStream->getPosition() - listenerAudioStream->getPosition();
float gain = approximateGain(*listenerAudioStream, *nodeStream, relativePosition);
// Process inactive streams
erase_if(streams.inactive, [&](MixableStream& stream) {
if (shouldBeRemoved(stream, _sharedData)) {
return true;
}
// modify by hrtf gain adjustment
auto& hrtf = listenerData->hrtfForStream(nodeID, nodeStream->getStreamIdentifier());
gain *= hrtf.getGainAdjustment();
if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
streams.skipped.push_back(move(stream));
++stats.inactiveToSkipped;
return true;
}
auto streamVolume = nodeStream->getLastPopOutputTrailingLoudness() * gain;
nodeVolume = std::max(streamVolume, nodeVolume);
}
if (!shouldBeInactive(stream)) {
streams.active.push_back(move(stream));
++stats.inactiveToActive;
return true;
}
// max-heapify the nodes by relative volume
throttledNodes.push_back({ nodeVolume, node });
std::push_heap(throttledNodes.begin(), throttledNodes.end());
if (!isThrottling) {
updateHRTFParameters(stream, *listenerAudioStream,
listenerData->getMasterAvatarGain());
}
return false;
});
// Process active streams
erase_if(streams.active, [&](MixableStream& stream) {
if (shouldBeRemoved(stream, _sharedData)) {
return true;
}
if (isThrottling) {
// we're throttling, so we need to update the approximate volume for any un-skipped streams
// unless this is simply for an echo (in which case the approx volume is 1.0)
stream.approximateVolume = approximateVolume(stream, listenerAudioStream);
} else {
if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
addStream(stream, *listenerAudioStream, 0.0f);
streams.skipped.push_back(move(stream));
++stats.activeToSkipped;
return true;
}
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain());
if (shouldBeInactive(stream)) {
// To reduce artifacts we still call render to flush the HRTF for every silent
// sources on the first frame where the source becomes silent
// this ensures the correct tail from last mixed block
streams.inactive.push_back(move(stream));
++stats.activeToInactive;
return true;
}
}
return false;
});
if (isThrottling) {
// pop the loudest nodes off the heap and mix their streams
int numToRetain = (int)(std::distance(_begin, _end) * (1 - _throttlingRatio));
for (int i = 0; i < numToRetain; i++) {
if (throttledNodes.empty()) {
break;
// since we're throttling, we need to partition the mixable into throttled and unthrottled streams
int numToRetain = min(_numToRetain, (int)streams.active.size()); // Make sure we don't overflow
auto throttlePoint = begin(streams.active) + numToRetain;
std::nth_element(streams.active.begin(), throttlePoint, streams.active.end(),
[](const auto& a, const auto& b)
{
return a.approximateVolume > b.approximateVolume;
});
SegmentedEraseIf<MixableStreamsVector> erase(streams.active);
erase.iterateTo(throttlePoint, [&](MixableStream& stream) {
if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
resetHRTFState(stream);
streams.skipped.push_back(move(stream));
++stats.activeToSkipped;
return true;
}
std::pop_heap(throttledNodes.begin(), throttledNodes.end());
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain());
auto& node = throttledNodes.back().second;
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
forAllStreams(node, nodeData, &AudioMixerSlave::mixStream);
if (shouldBeInactive(stream)) {
// To reduce artifacts we still call render to flush the HRTF for every silent
// sources on the first frame where the source becomes silent
// this ensures the correct tail from last mixed block
streams.inactive.push_back(move(stream));
++stats.activeToInactive;
return true;
}
throttledNodes.pop_back();
}
return false;
});
erase.iterateTo(end(streams.active), [&](MixableStream& stream) {
// To reduce artifacts we reset the HRTF state for every throttled
// sources on the first frame where the source becomes throttled
// this ensures at least remove the tail from last mixed block
// preventing excessive artifacts on the next first block
resetHRTFState(stream);
// throttle the remaining nodes' streams
for (const std::pair<float, SharedNodePointer>& nodePair : throttledNodes) {
auto& node = nodePair.second;
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
forAllStreams(node, nodeData, &AudioMixerSlave::throttleStream);
}
if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
streams.skipped.push_back(move(stream));
++stats.activeToSkipped;
return true;
}
if (shouldBeInactive(stream)) {
streams.inactive.push_back(move(stream));
++stats.activeToInactive;
return true;
}
return false;
});
}
stats.skipped += (int)streams.skipped.size();
stats.inactive += (int)streams.inactive.size();
stats.active += (int)streams.active.size();
// clear the newly ignored, un-ignored, ignoring, and un-ignoring streams now that we've processed them
listenerData->clearStagedIgnoreChanges();
#ifdef HIFI_AUDIO_MIXER_DEBUG
auto mixEnd = p_high_resolution_clock::now();
auto mixTime = std::chrono::duration_cast<std::chrono::nanoseconds>(mixEnd - mixStart);
@ -246,51 +482,35 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
return hasAudio;
}
void AudioMixerSlave::throttleStream(AudioMixerClientData& listenerNodeData, const QUuid& sourceNodeID,
const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd) {
// only throttle this stream to the mix if it has a valid position, we won't know how to mix it otherwise
if (streamToAdd.hasValidPosition()) {
addStream(listenerNodeData, sourceNodeID, listeningNodeStream, streamToAdd, true);
}
}
void AudioMixerSlave::mixStream(AudioMixerClientData& listenerNodeData, const QUuid& sourceNodeID,
const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd) {
// only add the stream to the mix if it has a valid position, we won't know how to mix it otherwise
if (streamToAdd.hasValidPosition()) {
addStream(listenerNodeData, sourceNodeID, listeningNodeStream, streamToAdd, false);
}
}
void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QUuid& sourceNodeID,
const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
bool throttle) {
void AudioMixerSlave::addStream(AudioMixerClientData::MixableStream& mixableStream,
AvatarAudioStream& listeningNodeStream,
float masterListenerGain) {
++stats.totalMixes;
// to reduce artifacts we call the HRTF functor for every source, even if throttled or silent
// this ensures the correct tail from last mixed block and the correct spatialization of next first block
auto streamToAdd = mixableStream.positionalStream;
// check if this is a server echo of a source back to itself
bool isEcho = (&streamToAdd == &listeningNodeStream);
bool isEcho = (streamToAdd == &listeningNodeStream);
glm::vec3 relativePosition = streamToAdd.getPosition() - listeningNodeStream.getPosition();
glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream.getPosition();
float distance = glm::max(glm::length(relativePosition), EPSILON);
float gain = computeGain(listenerNodeData, listeningNodeStream, streamToAdd, relativePosition, distance, isEcho);
float gain = computeGain(masterListenerGain, listeningNodeStream, *streamToAdd, relativePosition, distance, isEcho);
float azimuth = isEcho ? 0.0f : computeAzimuth(listeningNodeStream, listeningNodeStream, relativePosition);
const int HRTF_DATASET_INDEX = 1;
if (!streamToAdd.lastPopSucceeded()) {
if (!streamToAdd->lastPopSucceeded()) {
bool forceSilentBlock = true;
if (!streamToAdd.getLastPopOutput().isNull()) {
bool isInjector = dynamic_cast<const InjectedAudioStream*>(&streamToAdd);
if (!streamToAdd->getLastPopOutput().isNull()) {
bool isInjector = dynamic_cast<const InjectedAudioStream*>(streamToAdd);
// in an injector, just go silent - the injector has likely ended
// in other inputs (microphone, &c.), repeat with fade to avoid the harsh jump to silence
if (!isInjector) {
// calculate its fade factor, which depends on how many times it's already been repeated.
float fadeFactor = calculateRepeatedFrameFadeFactor(streamToAdd.getConsecutiveNotMixedCount() - 1);
float fadeFactor = calculateRepeatedFrameFadeFactor(streamToAdd->getConsecutiveNotMixedCount() - 1);
if (fadeFactor > 0.0f) {
// apply the fadeFactor to the gain
gain *= fadeFactor;
@ -302,15 +522,12 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
if (forceSilentBlock) {
// call renderSilent with a forced silent block to reduce artifacts
// (this is not done for stereo streams since they do not go through the HRTF)
if (!streamToAdd.isStereo() && !isEcho) {
// get the existing listener-source HRTF object, or create a new one
auto& hrtf = listenerNodeData.hrtfForStream(sourceNodeID, streamToAdd.getStreamIdentifier());
if (!streamToAdd->isStereo() && !isEcho) {
static int16_t silentMonoBlock[AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL] = {};
hrtf.renderSilent(silentMonoBlock, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
mixableStream.hrtf->render(silentMonoBlock, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
++stats.hrtfSilentRenders;
++stats.hrtfRenders;
}
return;
@ -318,16 +535,15 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
}
// grab the stream from the ring buffer
AudioRingBuffer::ConstIterator streamPopOutput = streamToAdd.getLastPopOutput();
AudioRingBuffer::ConstIterator streamPopOutput = streamToAdd->getLastPopOutput();
// stereo sources are not passed through HRTF
if (streamToAdd.isStereo()) {
if (streamToAdd->isStereo()) {
// apply the avatar gain adjustment
auto& hrtf = listenerNodeData.hrtfForStream(sourceNodeID, streamToAdd.getStreamIdentifier());
gain *= hrtf.getGainAdjustment();
gain *= mixableStream.hrtf->getGainAdjustment();
const float scale = 1/32768.0f; // int16_t to float
const float scale = 1 / 32768.0f; // int16_t to float
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL; i++) {
_mixSamples[2*i+0] += (float)streamPopOutput[2*i+0] * gain * scale;
@ -335,11 +551,8 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
}
++stats.manualStereoMixes;
return;
}
// echo sources are not passed through HRTF
if (isEcho) {
} else if (isEcho) {
// echo sources are not passed through HRTF
const float scale = 1/32768.0f; // int16_t to float
@ -350,41 +563,38 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
}
++stats.manualEchoMixes;
return;
} else {
streamPopOutput.readSamples(_bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
mixableStream.hrtf->render(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
++stats.hrtfRenders;
}
}
// get the existing listener-source HRTF object, or create a new one
auto& hrtf = listenerNodeData.hrtfForStream(sourceNodeID, streamToAdd.getStreamIdentifier());
void AudioMixerSlave::updateHRTFParameters(AudioMixerClientData::MixableStream& mixableStream,
AvatarAudioStream& listeningNodeStream,
float masterListenerGain) {
auto streamToAdd = mixableStream.positionalStream;
streamPopOutput.readSamples(_bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
// check if this is a server echo of a source back to itself
bool isEcho = (streamToAdd == &listeningNodeStream);
if (streamToAdd.getLastPopOutputLoudness() == 0.0f) {
// call renderSilent to reduce artifacts
hrtf.renderSilent(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream.getPosition();
++stats.hrtfSilentRenders;
return;
}
float distance = glm::max(glm::length(relativePosition), EPSILON);
float gain = computeGain(masterListenerGain, listeningNodeStream, *streamToAdd, relativePosition, distance, isEcho);
float azimuth = isEcho ? 0.0f : computeAzimuth(listeningNodeStream, listeningNodeStream, relativePosition);
if (throttle) {
// call renderSilent with actual frame data and a gain of 0.0f to reduce artifacts
hrtf.renderSilent(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, 0.0f,
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
mixableStream.hrtf->setParameterHistory(azimuth, distance, gain);
++stats.hrtfThrottleRenders;
return;
}
++stats.hrtfUpdates;
}
if (streamToAdd.getType() == PositionalAudioStream::Injector) {
// apply per-avatar gain to positional audio injectors, which wouldn't otherwise be affected by PAL sliders
hrtf.setGainAdjustment(listenerNodeData.hrtfForStream(sourceNodeID, QUuid()).getGainAdjustment());
}
hrtf.render(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
++stats.hrtfRenders;
void AudioMixerSlave::resetHRTFState(AudioMixerClientData::MixableStream& mixableStream) {
mixableStream.hrtf->reset();
++stats.hrtfResets;
}
std::unique_ptr<NLPacket> createAudioPacket(PacketType type, int size, quint16 sequence, QString codec) {
@ -443,12 +653,12 @@ void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData&
glm::vec3 streamPosition = stream->getPosition();
// find reverb properties
for (int i = 0; i < reverbSettings.size(); ++i) {
AABox box = audioZones[reverbSettings[i].zone];
for (const auto& settings : reverbSettings) {
AABox box = audioZones[settings.zone].area;
if (box.contains(streamPosition)) {
hasReverb = true;
reverbTime = reverbSettings[i].reverbTime;
wetLevel = reverbSettings[i].wetLevel;
reverbTime = settings.reverbTime;
wetLevel = settings.wetLevel;
break;
}
}
@ -493,8 +703,7 @@ void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData&
}
}
float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition) {
float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd) {
float gain = 1.0f;
// injector: apply attenuation
@ -505,13 +714,14 @@ float approximateGain(const AvatarAudioStream& listeningNodeStream, const Positi
// avatar: skip attenuation - it is too costly to approximate
// distance attenuation: approximate, ignore zone-specific attenuations
glm::vec3 relativePosition = streamToAdd.getPosition() - listeningNodeStream.getPosition();
float distance = glm::length(relativePosition);
return gain / distance;
// avatar: skip master gain - it is constant for all streams
}
float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudioStream& listeningNodeStream,
float computeGain(float masterListenerGain, const AvatarAudioStream& listeningNodeStream,
const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition, float distance, bool isEcho) {
float gain = 1.0f;
@ -534,7 +744,7 @@ float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudi
gain *= offAxisCoefficient;
// apply master gain, only to avatars
gain *= listenerNodeData.getMasterAvatarGain();
gain *= masterListenerGain;
}
auto& audioZones = AudioMixer::getAudioZones();
@ -542,10 +752,10 @@ float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudi
// find distance attenuation coefficient
float attenuationPerDoublingInDistance = AudioMixer::getAttenuationPerDoublingInDistance();
for (int i = 0; i < zoneSettings.length(); ++i) {
if (audioZones[zoneSettings[i].source].contains(streamToAdd.getPosition()) &&
audioZones[zoneSettings[i].listener].contains(listeningNodeStream.getPosition())) {
attenuationPerDoublingInDistance = zoneSettings[i].coefficient;
for (const auto& settings : zoneSettings) {
if (audioZones[settings.source].area.contains(streamToAdd.getPosition()) &&
audioZones[settings.listener].area.contains(listeningNodeStream.getPosition())) {
attenuationPerDoublingInDistance = settings.coefficient;
break;
}
}

View file

@ -12,29 +12,39 @@
#ifndef hifi_AudioMixerSlave_h
#define hifi_AudioMixerSlave_h
#include <tbb/concurrent_vector.h>
#include <AABox.h>
#include <AudioHRTF.h>
#include <AudioRingBuffer.h>
#include <ThreadedAssignment.h>
#include <UUIDHasher.h>
#include <NodeList.h>
#include <PositionalAudioStream.h>
#include "AudioMixerClientData.h"
#include "AudioMixerStats.h"
class PositionalAudioStream;
class AvatarAudioStream;
class AudioHRTF;
class AudioMixerClientData;
class AudioMixerSlave {
public:
using ConstIter = NodeList::const_iterator;
struct SharedData {
AudioMixerClientData::ConcurrentAddedStreams addedStreams;
std::vector<Node::LocalID> removedNodes;
std::vector<NodeIDStreamID> removedStreams;
};
AudioMixerSlave(SharedData& sharedData) : _sharedData(sharedData) {};
// process packets for a given node (requires no configuration)
void processPackets(const SharedNodePointer& node);
// configure a round of mixing
void configureMix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio);
void configureMix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain);
// mix and broadcast non-ignored streams to the node (requires configuration using configureMix, above)
// returns true if a mixed packet was sent to the node
@ -45,13 +55,15 @@ public:
private:
// create mix, returns true if mix has audio
bool prepareMix(const SharedNodePointer& listener);
void throttleStream(AudioMixerClientData& listenerData, const QUuid& streamerID,
const AvatarAudioStream& listenerStream, const PositionalAudioStream& streamer);
void mixStream(AudioMixerClientData& listenerData, const QUuid& streamerID,
const AvatarAudioStream& listenerStream, const PositionalAudioStream& streamer);
void addStream(AudioMixerClientData& listenerData, const QUuid& streamerID,
const AvatarAudioStream& listenerStream, const PositionalAudioStream& streamer,
bool throttle);
void addStream(AudioMixerClientData::MixableStream& mixableStream,
AvatarAudioStream& listeningNodeStream,
float masterListenerGain);
void updateHRTFParameters(AudioMixerClientData::MixableStream& mixableStream,
AvatarAudioStream& listeningNodeStream,
float masterListenerGain);
void resetHRTFState(AudioMixerClientData::MixableStream& mixableStream);
void addStreams(Node& listener, AudioMixerClientData& listenerData);
// mixing buffers
float _mixSamples[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
@ -61,7 +73,9 @@ private:
ConstIter _begin;
ConstIter _end;
unsigned int _frame { 0 };
float _throttlingRatio { 0.0f };
int _numToRetain { -1 };
SharedData& _sharedData;
};
#endif // hifi_AudioMixerSlave_h

View file

@ -74,13 +74,11 @@ void AudioMixerSlavePool::processPackets(ConstIter begin, ConstIter end) {
run(begin, end);
}
void AudioMixerSlavePool::mix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio) {
void AudioMixerSlavePool::mix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain) {
_function = &AudioMixerSlave::mix;
_configure = [=](AudioMixerSlave& slave) {
slave.configureMix(_begin, _end, _frame, _throttlingRatio);
slave.configureMix(_begin, _end, frame, numToRetain);
};
_frame = frame;
_throttlingRatio = throttlingRatio;
run(begin, end);
}
@ -167,7 +165,7 @@ void AudioMixerSlavePool::resize(int numThreads) {
if (numThreads > _numThreads) {
// start new slaves
for (int i = 0; i < numThreads - _numThreads; ++i) {
auto slave = new AudioMixerSlaveThread(*this);
auto slave = new AudioMixerSlaveThread(*this, _workerSharedData);
slave->start();
_slaves.emplace_back(slave);
}

View file

@ -31,7 +31,8 @@ class AudioMixerSlaveThread : public QThread, public AudioMixerSlave {
using Lock = std::unique_lock<Mutex>;
public:
AudioMixerSlaveThread(AudioMixerSlavePool& pool) : _pool(pool) {}
AudioMixerSlaveThread(AudioMixerSlavePool& pool, AudioMixerSlave::SharedData& sharedData)
: AudioMixerSlave(sharedData), _pool(pool) {}
void run() override final;
@ -58,14 +59,15 @@ class AudioMixerSlavePool {
public:
using ConstIter = NodeList::const_iterator;
AudioMixerSlavePool(int numThreads = QThread::idealThreadCount()) { setNumThreads(numThreads); }
AudioMixerSlavePool(AudioMixerSlave::SharedData& sharedData, int numThreads = QThread::idealThreadCount())
: _workerSharedData(sharedData) { setNumThreads(numThreads); }
~AudioMixerSlavePool() { resize(0); }
// process packets on slave threads
void processPackets(ConstIter begin, ConstIter end);
// mix on slave threads
void mix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio);
void mix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain);
// iterate over all slaves
void each(std::function<void(AudioMixerSlave& slave)> functor);
@ -96,10 +98,10 @@ private:
// frame state
Queue _queue;
unsigned int _frame { 0 };
float _throttlingRatio { 0.0f };
ConstIter _begin;
ConstIter _end;
AudioMixerSlave::SharedData& _workerSharedData;
};
#endif // hifi_AudioMixerSlavePool_h

View file

@ -15,12 +15,27 @@ void AudioMixerStats::reset() {
sumStreams = 0;
sumListeners = 0;
sumListenersSilent = 0;
totalMixes = 0;
hrtfRenders = 0;
hrtfSilentRenders = 0;
hrtfThrottleRenders = 0;
hrtfResets = 0;
hrtfUpdates = 0;
manualStereoMixes = 0;
manualEchoMixes = 0;
skippedToActive = 0;
skippedToInactive = 0;
inactiveToSkipped = 0;
inactiveToActive = 0;
activeToSkipped = 0;
activeToInactive = 0;
skipped = 0;
inactive = 0;
active = 0;
#ifdef HIFI_AUDIO_MIXER_DEBUG
mixTime = 0;
#endif
@ -30,12 +45,27 @@ void AudioMixerStats::accumulate(const AudioMixerStats& otherStats) {
sumStreams += otherStats.sumStreams;
sumListeners += otherStats.sumListeners;
sumListenersSilent += otherStats.sumListenersSilent;
totalMixes += otherStats.totalMixes;
hrtfRenders += otherStats.hrtfRenders;
hrtfSilentRenders += otherStats.hrtfSilentRenders;
hrtfThrottleRenders += otherStats.hrtfThrottleRenders;
hrtfResets += otherStats.hrtfResets;
hrtfUpdates += otherStats.hrtfUpdates;
manualStereoMixes += otherStats.manualStereoMixes;
manualEchoMixes += otherStats.manualEchoMixes;
skippedToActive += otherStats.skippedToActive;
skippedToInactive += otherStats.skippedToInactive;
inactiveToSkipped += otherStats.inactiveToSkipped;
inactiveToActive += otherStats.inactiveToActive;
activeToSkipped += otherStats.activeToSkipped;
activeToInactive += otherStats.activeToInactive;
skipped += otherStats.skipped;
inactive += otherStats.inactive;
active += otherStats.active;
#ifdef HIFI_AUDIO_MIXER_DEBUG
mixTime += otherStats.mixTime;
#endif

View file

@ -24,12 +24,23 @@ struct AudioMixerStats {
int totalMixes { 0 };
int hrtfRenders { 0 };
int hrtfSilentRenders { 0 };
int hrtfThrottleRenders { 0 };
int hrtfResets { 0 };
int hrtfUpdates { 0 };
int manualStereoMixes { 0 };
int manualEchoMixes { 0 };
int skippedToActive { 0 };
int skippedToInactive { 0 };
int inactiveToSkipped { 0 };
int inactiveToActive { 0 };
int activeToSkipped { 0 };
int activeToInactive { 0 };
int skipped { 0 };
int inactive { 0 };
int active { 0 };
#ifdef HIFI_AUDIO_MIXER_DEBUG
uint64_t mixTime { 0 };
#endif

View file

@ -23,9 +23,9 @@ int AvatarAudioStream::parseStreamProperties(PacketType type, const QByteArray&
if (type == PacketType::SilentAudioFrame) {
const char* dataAt = packetAfterSeqNum.constData();
quint16 numSilentSamples = *(reinterpret_cast<const quint16*>(dataAt));
readBytes += sizeof(quint16);
numAudioSamples = (int)numSilentSamples;
SilentSamplesBytes numSilentSamples = *(reinterpret_cast<const quint16*>(dataAt));
readBytes += sizeof(SilentSamplesBytes);
numAudioSamples = (int) numSilentSamples;
// read the positional data
readBytes += parsePositionalData(packetAfterSeqNum.mid(readBytes));
@ -34,9 +34,9 @@ int AvatarAudioStream::parseStreamProperties(PacketType type, const QByteArray&
_shouldLoopbackForNode = (type == PacketType::MicrophoneAudioWithEcho);
// read the channel flag
quint8 channelFlag = packetAfterSeqNum.at(readBytes);
ChannelFlag channelFlag = packetAfterSeqNum.at(readBytes);
bool isStereo = channelFlag == 1;
readBytes += sizeof(quint8);
readBytes += sizeof(ChannelFlag);
// if isStereo value has changed, restart the ring buffer with new frame size
if (isStereo != _isStereo) {

View file

@ -16,6 +16,8 @@
#include "PositionalAudioStream.h"
using SilentSamplesBytes = quint16;
class AvatarAudioStream : public PositionalAudioStream {
public:
AvatarAudioStream(bool isStereo, int numStaticJitterFrames = -1);

View file

@ -673,7 +673,13 @@ void AvatarMixer::handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage>
void AvatarMixer::handleRadiusIgnoreRequestPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode) {
auto start = usecTimestampNow();
sendingNode->parseIgnoreRadiusRequestMessage(packet);
bool enabled;
packet->readPrimitive(&enabled);
auto avatarData = getOrCreateClientData(sendingNode);
avatarData->setIsIgnoreRadiusEnabled(enabled);
auto end = usecTimestampNow();
_handleRadiusIgnoreRequestPacketElapsedTime += (end - start);
}

View file

@ -227,7 +227,7 @@ void AvatarMixerClientData::ignoreOther(const Node* self, const Node* other) {
addToRadiusIgnoringSet(other->getUUID());
auto killPacket = NLPacket::create(PacketType::KillAvatar, NUM_BYTES_RFC4122_UUID + sizeof(KillAvatarReason), true);
killPacket->write(other->getUUID().toRfc4122());
if (self->isIgnoreRadiusEnabled()) {
if (_isIgnoreRadiusEnabled) {
killPacket->writePrimitive(KillAvatarReason::TheirAvatarEnteredYourBubble);
} else {
killPacket->writePrimitive(KillAvatarReason::YourAvatarEnteredTheirBubble);

View file

@ -49,6 +49,9 @@ public:
const AvatarData* getConstAvatarData() const { return _avatar.get(); }
AvatarSharedPointer getAvatarSharedPointer() const { return _avatar; }
bool isIgnoreRadiusEnabled() const { return _isIgnoreRadiusEnabled; }
void setIsIgnoreRadiusEnabled(bool enabled) { _isIgnoreRadiusEnabled = enabled; }
uint16_t getLastBroadcastSequenceNumber(const QUuid& nodeUUID) const;
void setLastBroadcastSequenceNumber(const QUuid& nodeUUID, uint16_t sequenceNumber)
{ _lastBroadcastSequenceNumbers[nodeUUID] = sequenceNumber; }
@ -180,6 +183,8 @@ private:
std::unordered_map<Node::LocalID, TraitsCheckTimestamp> _lastSentTraitsTimestamps;
std::unordered_map<Node::LocalID, AvatarTraits::TraitVersions> _sentTraitVersions;
std::atomic_bool _isIgnoreRadiusEnabled { false };
};
#endif // hifi_AvatarMixerClientData_h

View file

@ -345,7 +345,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
} else {
// Check to see if the space bubble is enabled
// Don't bother with these checks if the other avatar has their bubble enabled and we're gettingAnyIgnored
if (destinationNode->isIgnoreRadiusEnabled() || (avatarNode->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
if (nodeData->isIgnoreRadiusEnabled() || (avatarClientNodeData->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
// Perform the collision check between the two bounding boxes
const float OTHER_AVATAR_BUBBLE_EXPANSION_FACTOR = 2.4f; // magic number determined empirically
AABox otherNodeBox = computeBubbleBox(avatarClientNodeData->getAvatar(), OTHER_AVATAR_BUBBLE_EXPANSION_FACTOR);

View file

@ -1110,7 +1110,36 @@ function moveTableRow(row, move_up) {
}
// we need to fire a change event on one of the remaining inputs so that the sidebar badge is updated
badgeForDifferences($(table))
badgeForDifferences($(table));
// figure out which group this row is in
var panelParentID = row.closest('.panel').attr('id');
// get the short name for the setting from the table
var tableShortName = row.closest('table').data('short-name');
var changed = tableHasChanged(panelParentID, tableShortName);
$(table).find('.' + Settings.DATA_ROW_CLASS).each(function(){
var hiddenInput = $(this).find('td.' + Settings.DATA_COL_CLASS + ' input');
if (changed) {
hiddenInput.attr('data-changed', true);
} else {
hiddenInput.removeAttr('data-changed');
}
});
}
function tableHasChanged(panelParentID, tableShortName) {
// get a JSON representation of that section
var panelSettingJSON = form2js(panelParentID, ".", false, cleanupFormValues, true)[panelParentID][tableShortName]
if (Settings.initialValues[panelParentID]) {
var initialPanelSettingJSON = Settings.initialValues[panelParentID][tableShortName]
} else {
var initialPanelSettingJSON = {};
}
return !_.isEqual(panelSettingJSON, initialPanelSettingJSON);
}
function updateDataChangedForSiblingRows(row, forceTrue) {
@ -1123,16 +1152,8 @@ function updateDataChangedForSiblingRows(row, forceTrue) {
// get the short name for the setting from the table
var tableShortName = row.closest('table').data('short-name')
// get a JSON representation of that section
var panelSettingJSON = form2js(panelParentID, ".", false, cleanupFormValues, true)[panelParentID][tableShortName]
if (Settings.initialValues[panelParentID]) {
var initialPanelSettingJSON = Settings.initialValues[panelParentID][tableShortName]
} else {
var initialPanelSettingJSON = {};
}
// if they are equal, we don't need data-changed
isTrue = !_.isEqual(panelSettingJSON, initialPanelSettingJSON)
isTrue = tableHasChanged(panelParentID, tableShortName);
} else {
isTrue = true
}
@ -1140,9 +1161,9 @@ function updateDataChangedForSiblingRows(row, forceTrue) {
row.siblings('.' + Settings.DATA_ROW_CLASS).each(function(){
var hiddenInput = $(this).find('td.' + Settings.DATA_COL_CLASS + ' input')
if (isTrue) {
hiddenInput.attr('data-changed', isTrue)
hiddenInput.attr('data-changed', isTrue);
} else {
hiddenInput.removeAttr('data-changed')
hiddenInput.removeAttr('data-changed');
}
})
}

View file

@ -2906,7 +2906,7 @@ void DomainServer::updateReplicationNodes(ReplicationServerDirection direction)
// collect them in a vector to separately remove them with handleKillNode (since eachNode has a read lock and
// we cannot recursively take the write lock required by handleKillNode)
std::vector<SharedNodePointer> nodesToKill;
nodeList->eachNode([this, direction, replicationNodesInSettings, replicationDirection, &nodesToKill](const SharedNodePointer& otherNode) {
nodeList->eachNode([direction, replicationNodesInSettings, replicationDirection, &nodesToKill](const SharedNodePointer& otherNode) {
if ((direction == Upstream && NodeType::isUpstream(otherNode->getType()))
|| (direction == Downstream && NodeType::isDownstream(otherNode->getType()))) {
bool nodeInSettings = find(replicationNodesInSettings.cbegin(), replicationNodesInSettings.cend(),

View file

@ -23,6 +23,7 @@ Item {
property bool failAfterSignUp: false
function login() {
flavorText.visible = false
mainTextContainer.visible = false
toggleLoading(true)
loginDialog.login(usernameField.text, passwordField.text)
@ -43,7 +44,7 @@ Item {
function resize() {
var targetWidth = Math.max(titleWidth, form.contentWidth);
var targetHeight = hifi.dimensions.contentSpacing.y + mainTextContainer.height +
var targetHeight = hifi.dimensions.contentSpacing.y + flavorText.height + mainTextContainer.height +
4 * hifi.dimensions.contentSpacing.y + form.height;
if (additionalInformation.visible) {
@ -106,14 +107,15 @@ Item {
ShortcutText {
id: mainTextContainer
anchors {
top: parent.top
top: flavorText.bottom
left: parent.left
margins: 0
topMargin: hifi.dimensions.contentSpacing.y
topMargin: 1.5 * hifi.dimensions.contentSpacing.y
}
visible: false
text: qsTr("Username or password incorrect.")
height: flavorText.height - 20
wrapMode: Text.WordWrap
color: hifi.colors.redAccent
lineHeight: 1
@ -128,7 +130,7 @@ Item {
anchors {
top: mainTextContainer.bottom
topMargin: 2 * hifi.dimensions.contentSpacing.y
topMargin: 1.5 * hifi.dimensions.contentSpacing.y
}
spacing: 2 * hifi.dimensions.contentSpacing.y
@ -139,6 +141,7 @@ Item {
focus: true
placeholderText: "Username or Email"
activeFocusOnPress: true
onHeightChanged: d.resize(); onWidthChanged: d.resize();
ShortcutText {
z: 10
@ -172,7 +175,7 @@ Item {
width: parent.width
placeholderText: "Password"
activeFocusOnPress: true
echoMode: TextInput.Password
echoMode: passwordFieldMouseArea.showPassword ? TextInput.Normal : TextInput.Password
onHeightChanged: d.resize(); onWidthChanged: d.resize();
ShortcutText {
@ -212,29 +215,28 @@ Item {
Image {
id: showPasswordImage
y: (passwordField.height - (passwordField.height * 16 / 23)) / 2
width: passwordField.width - (passwordField.width - (((passwordField.height) * 31/23)))
width: passwordField.height * 16 / 23
height: passwordField.height * 16 / 23
anchors {
right: parent.right
rightMargin: 3
rightMargin: 8
top: parent.top
topMargin: passwordFieldMouseArea.showPassword ? 6 : 8
bottom: parent.bottom
bottomMargin: passwordFieldMouseArea.showPassword ? 5 : 8
}
source: passwordFieldMouseArea.showPassword ? "../../images/eyeClosed.svg" : "../../images/eyeOpen.svg"
MouseArea {
id: passwordFieldMouseArea
anchors.fill: parent
acceptedButtons: Qt.LeftButton
property bool showPassword: false
onClicked: {
showPassword = !showPassword;
}
}
source: "../../images/eyeOpen.svg"
}
MouseArea {
id: passwordFieldMouseArea
anchors.fill: parent
acceptedButtons: Qt.LeftButton
property bool showPassword: false
onClicked: {
showPassword = !showPassword;
passwordField.echoMode = showPassword ? TextInput.Normal : TextInput.Password;
showPasswordImage.source = showPassword ? "../../images/eyeClosed.svg" : "../../images/eyeOpen.svg";
showPasswordImage.height = showPassword ? passwordField.height : passwordField.height * 16 / 23;
showPasswordImage.y = showPassword ? 0 : (passwordField.height - showPasswordImage.height) / 2;
}
}
}
Keys.onReturnPressed: linkAccountBody.login()
@ -284,7 +286,7 @@ Item {
anchors.verticalCenter: parent.verticalCenter
width: 200
text: qsTr(loginDialog.isSteamRunning() ? "Link Account" : "Login")
text: qsTr(loginDialog.isSteamRunning() ? "Link Account" : "Log in")
color: hifi.buttons.blue
onClicked: linkAccountBody.login()
@ -336,6 +338,7 @@ Item {
if (failAfterSignUp) {
mainTextContainer.text = "Account created successfully."
flavorText.visible = true
mainTextContainer.visible = true
}
@ -374,6 +377,7 @@ Item {
UserActivityLogger.logAction("encourageLoginDialog", data);
Settings.setValue("loginDialogPoppedUp", false);
}
flavorText.visible = true
mainTextContainer.visible = true
toggleLoading(false)
}

View file

@ -34,7 +34,7 @@ Preference {
left: parent.left
right: parent.right
}
height: isFirstCheckBox ? hifi.dimensions.controlInterlineHeight : 0
height: isFirstCheckBox && !preference.indented ? 16 : 2
}
CheckBox {
@ -54,6 +54,7 @@ Preference {
left: parent.left
right: parent.right
bottom: parent.bottom
leftMargin: preference.indented ? 20 : 0
}
text: root.label
colorScheme: hifi.colorSchemes.dark

View file

@ -11,14 +11,27 @@
import QtQuick 2.5
import "../../controls-uit"
import "../../styles-uit"
Preference {
id: root
height: control.height + hifi.dimensions.controlInterlineHeight
property int value: 0
Component.onCompleted: {
repeater.itemAt(preference.value).checked = true
value = preference.value;
repeater.itemAt(preference.value).checked = true;
}
function updateValue() {
for (var i = 0; i < repeater.count; i++) {
if (repeater.itemAt(i).checked) {
value = i;
break;
}
}
}
function save() {
@ -33,24 +46,36 @@ Preference {
preference.save();
}
Row {
Column {
id: control
anchors {
left: parent.left
right: parent.right
bottom: parent.bottom
}
spacing: 5
spacing: 3
RalewaySemiBold {
id: heading
size: hifi.fontSizes.inputLabel
text: preference.heading
color: hifi.colors.lightGrayText
visible: text !== ""
bottomPadding: 3
}
Repeater {
id: repeater
model: preference.items.length
delegate: RadioButton {
text: preference.items[index]
letterSpacing: 0
anchors {
verticalCenter: parent.verticalCenter
left: parent.left
}
leftPadding: 0
colorScheme: hifi.colorSchemes.dark
onClicked: updateValue();
}
}
}

View file

@ -138,11 +138,12 @@ Preference {
break;
case Preference.PrimaryHand:
checkBoxCount++;
checkBoxCount = 0;
builder = primaryHandBuilder;
break;
case Preference.RadioButtons:
checkBoxCount++;
checkBoxCount = 0;
builder = radioButtonsBuilder;
break;
};

View file

@ -48,7 +48,7 @@ Rectangle {
HifiModels.PSFListModel {
id: connectionsUserModel;
http: http;
endpoint: "/api/v1/users?filter=connections";
endpoint: "/api/v1/users/connections";
property var sortColumn: connectionsTable.getColumn(connectionsTable.sortIndicatorColumn);
sortProperty: switch (sortColumn && sortColumn.role) {
case 'placeName':

View file

@ -0,0 +1,43 @@
import QtQuick 2.0
Item {
property alias source: sourceImage.sourceItem
property alias maskSource: sourceMask.sourceItem
anchors.fill: parent
ShaderEffectSource {
id: sourceMask
smooth: true
hideSource: true
}
ShaderEffectSource {
id: sourceImage
hideSource: true
}
ShaderEffect {
id: maskEffect
anchors.fill: parent
property variant source: sourceImage
property variant mask: sourceMask
fragmentShader: {
"
varying highp vec2 qt_TexCoord0;
uniform lowp sampler2D source;
uniform lowp sampler2D mask;
void main() {
highp vec4 maskColor = texture2D(mask, vec2(qt_TexCoord0.x, qt_TexCoord0.y));
highp vec4 sourceColor = texture2D(source, vec2(qt_TexCoord0.x, qt_TexCoord0.y));
if (maskColor.a > 0.0)
gl_FragColor = sourceColor;
else
gl_FragColor = maskColor;
}
"
}
}
}

View file

@ -314,7 +314,7 @@ Rectangle {
}
};
wearableUpdated(getCurrentWearable().id, jointIndex, properties);
wearableUpdated(getCurrentWearable().id, wearablesCombobox.currentIndex, properties);
}
onCurrentIndexChanged: {

View file

@ -24,19 +24,20 @@ Item {
fragmentShader: {
"
varying highp vec2 qt_TexCoord0;
uniform lowp sampler2D source;
uniform lowp sampler2D mask;
void main() {
highp vec4 maskColor = texture2D(mask, vec2(qt_TexCoord0.x, qt_TexCoord0.y));
highp vec4 sourceColor = texture2D(source, vec2(qt_TexCoord0.x, qt_TexCoord0.y));
if (maskColor.a > 0.0)
gl_FragColor = sourceColor;
else
gl_FragColor = maskColor;
}
#version 410
in vec2 qt_TexCoord0;
out vec4 color;
uniform sampler2D source;
uniform sampler2D mask;
void main()
{
vec4 maskColor = texture(mask, vec2(qt_TexCoord0.x, qt_TexCoord0.y));
vec4 sourceColor = texture(source, vec2(qt_TexCoord0.x, qt_TexCoord0.y));
if (maskColor.a > 0.0)
color = sourceColor;
else
color = maskColor;
}
"
}
}

View file

@ -552,6 +552,10 @@ Rectangle {
// Alignment
horizontalAlignment: Text.AlignLeft;
verticalAlignment: Text.AlignVCenter;
onLinkActivated: {
// Only case is to go to the bank.
sendToScript({method: 'gotoBank'});
}
}
}
@ -1107,25 +1111,32 @@ Rectangle {
}
function handleBuyAgainLogic() {
// If you can buy this item again...
if (canBuyAgain()) {
// If you can't afford another copy of the item...
if (root.balanceAfterPurchase < 0) {
// If you already own the item...
if (root.alreadyOwned) {
buyText.text = "<b>Your Wallet does not have sufficient funds to purchase this item again.</b>";
// Else if you don't already own the item...
} else {
buyText.text = "<b>Your Wallet does not have sufficient funds to purchase this item.</b>";
}
buyTextContainer.color = "#FFC3CD";
buyTextContainer.border.color = "#F3808F";
buyGlyph.text = hifi.glyphs.alert;
buyGlyph.size = 54;
// If you CAN afford another copy of the item...
// General rules, implemented in various scattered places in this file:
// 1. If you already own the item, a viewInMyPurchasesButton is visible,
// and the buyButton is visible (and says "Buy it again") ONLY if it is a type you canBuyAgain.
// 2. Separately,
// a. If you don't have enough money to buy, the buyText becomes visible and tells you, and the buyButton is disabled.
// b. Otherwise, if the item is a content set and you don't have rez permission, the buyText becomes visible and tells you so.
// If you can't afford another copy of the item...
if (root.balanceAfterPurchase < 0) {
// If you already own the item...
if (!root.alreadyOwned) {
buyText.text = "<b>Your Wallet does not have sufficient funds to purchase this item.</b>";
// Else if you don't already own the item...
} else if (canBuyAgain()) {
buyText.text = "<b>Your Wallet does not have sufficient funds to purchase this item again.</b>";
} else {
handleContentSets();
buyText.text = "<b>While you do not have sufficient funds to buy this, you already have this item.</b>"
}
buyText.text += " Visit <a href='#'>Bank of High Fidelity</a> to get more HFC."
buyTextContainer.color = "#FFC3CD";
buyTextContainer.border.color = "#F3808F";
buyGlyph.text = hifi.glyphs.alert;
buyGlyph.size = 54;
// If you CAN afford another copy of the item...
} else {
handleContentSets();
}
}

View file

@ -829,6 +829,7 @@ Rectangle {
Commerce.getWalletAuthenticatedStatus(); // before writing security image, ensures that salt/account password is set.
Commerce.chooseSecurityImage(securityImagePath);
Commerce.generateKeyPair();
followReferrer({ referrer: walletSetup.referrer });
}
function addLeadingZero(n) {
@ -836,7 +837,7 @@ Rectangle {
}
function followReferrer(msg) {
if (msg.referrer === '' || msg.referrer === 'marketplace cta') {
if (msg.referrer === '') {
root.activeView = "initialize";
Commerce.getWalletStatus();
} else if (msg.referrer === 'purchases') {

View file

@ -28,7 +28,7 @@ Item {
property string activeView: "step_1";
property string lastPage;
property bool hasShownSecurityImageTip: false;
property string referrer;
property string referrer: '';
property string keyFilePath;
property date startingTimestamp;
property string setupAttemptID;

View file

@ -122,6 +122,22 @@ Item {
}
}
// Runtime customization of preferences.
var locomotionPreference = findPreference("VR Movement", "Teleporting only / Walking and teleporting");
var flyingPreference = findPreference("VR Movement", "Jumping and flying");
if (locomotionPreference && flyingPreference) {
flyingPreference.visible = (locomotionPreference.value === 1);
locomotionPreference.valueChanged.connect(function () {
flyingPreference.visible = (locomotionPreference.value === 1);
});
}
if (HMD.isHeadControllerAvailable("Oculus")) {
var boundariesPreference = findPreference("VR Movement", "Show room boundaries while teleporting");
if (boundariesPreference) {
boundariesPreference.label = "Show room boundaries and sensors while teleporting";
}
}
if (sections.length) {
// Default sections to expanded/collapsed as appropriate for dialog.
if (sections.length === 1) {
@ -234,4 +250,32 @@ Item {
}
}
}
function findPreference(category, name) {
var section = null;
var preference = null;
var i;
// Find category section.
i = 0;
while (!section && i < sections.length) {
if (sections[i].name === category) {
section = sections[i];
}
i++;
}
// Find named preference.
if (section) {
i = 0;
while (!preference && i < section.preferences.length) {
if (section.preferences[i].preference && section.preferences[i].preference.name === name) {
preference = section.preferences[i];
}
i++;
}
}
return preference;
}
}

View file

@ -153,11 +153,12 @@ Preference {
break;
case Preference.PrimaryHand:
checkBoxCount++;
checkBoxCount = 0;
builder = primaryHandBuilder;
break;
case Preference.RadioButtons:
checkBoxCount++;
checkBoxCount = 0;
builder = radioButtonsBuilder;
break;
};

View file

@ -10,8 +10,11 @@
//
#include "AndroidHelper.h"
#include <QDebug>
#include <AccountManager.h>
#include <AudioClient.h>
#include <src/ui/LoginDialog.h>
#include "Application.h"
#include "Constants.h"
#if defined(qApp)
#undef qApp
@ -25,7 +28,7 @@ AndroidHelper::AndroidHelper() {
AndroidHelper::~AndroidHelper() {
}
void AndroidHelper::requestActivity(const QString &activityName, const bool backToScene, QList<QString> args) {
void AndroidHelper::requestActivity(const QString &activityName, const bool backToScene, QMap<QString, QString> args) {
emit androidActivityRequested(activityName, backToScene, args);
}
@ -49,8 +52,10 @@ void AndroidHelper::performHapticFeedback(int duration) {
emit hapticFeedbackRequested(duration);
}
void AndroidHelper::showLoginDialog() {
emit androidActivityRequested("Login", true);
void AndroidHelper::showLoginDialog(QUrl url) {
QMap<QString, QString> args;
args["url"] = url.toString();
emit androidActivityRequested("Login", true, args);
}
void AndroidHelper::processURL(const QString &url) {
@ -67,3 +72,75 @@ void AndroidHelper::notifyHeadsetOn(bool pluggedIn) {
}
#endif
}
void AndroidHelper::signup(QString email, QString username, QString password) {
JSONCallbackParameters callbackParams;
callbackParams.callbackReceiver = this;
callbackParams.jsonCallbackMethod = "signupCompleted";
callbackParams.errorCallbackMethod = "signupFailed";
QJsonObject payload;
QJsonObject userObject;
userObject.insert("email", email);
userObject.insert("username", username);
userObject.insert("password", password);
payload.insert("user", userObject);
auto accountManager = DependencyManager::get<AccountManager>();
accountManager->sendRequest(API_SIGNUP_PATH, AccountManagerAuth::None,
QNetworkAccessManager::PostOperation, callbackParams,
QJsonDocument(payload).toJson());
}
void AndroidHelper::signupCompleted(QNetworkReply* reply) {
emit handleSignupCompleted();
}
QString AndroidHelper::errorStringFromAPIObject(const QJsonValue& apiObject) {
if (apiObject.isArray()) {
return apiObject.toArray()[0].toString();
} else if (apiObject.isString()) {
return apiObject.toString();
} else {
return "is invalid";
}
}
void AndroidHelper::signupFailed(QNetworkReply* reply) {
// parse the returned JSON to see what the problem was
auto jsonResponse = QJsonDocument::fromJson(reply->readAll());
static const QString RESPONSE_DATA_KEY = "data";
auto dataJsonValue = jsonResponse.object()[RESPONSE_DATA_KEY];
if (dataJsonValue.isObject()) {
auto dataObject = dataJsonValue.toObject();
static const QString EMAIL_DATA_KEY = "email";
static const QString USERNAME_DATA_KEY = "username";
static const QString PASSWORD_DATA_KEY = "password";
QStringList errorStringList;
if (dataObject.contains(EMAIL_DATA_KEY)) {
errorStringList.append(QString("Email %1.").arg(errorStringFromAPIObject(dataObject[EMAIL_DATA_KEY])));
}
if (dataObject.contains(USERNAME_DATA_KEY)) {
errorStringList.append(QString("Username %1.").arg(errorStringFromAPIObject(dataObject[USERNAME_DATA_KEY])));
}
if (dataObject.contains(PASSWORD_DATA_KEY)) {
errorStringList.append(QString("Password %1.").arg(errorStringFromAPIObject(dataObject[PASSWORD_DATA_KEY])));
}
emit handleSignupFailed(errorStringList.join('\n'));
} else {
static const QString DEFAULT_SIGN_UP_FAILURE_MESSAGE = "There was an unknown error while creating your account. Please try again later.";
emit handleSignupFailed(DEFAULT_SIGN_UP_FAILURE_MESSAGE);
}
}

View file

@ -13,6 +13,11 @@
#define hifi_Android_Helper_h
#include <QObject>
#include <QMap>
#include <QUrl>
#include <QNetworkReply>
#include <QtCore/QEventLoop>
class AndroidHelper : public QObject {
Q_OBJECT
@ -21,7 +26,7 @@ public:
static AndroidHelper instance;
return instance;
}
void requestActivity(const QString &activityName, const bool backToScene, QList<QString> args = QList<QString>());
void requestActivity(const QString &activityName, const bool backToScene, QMap<QString, QString> args = QMap<QString, QString>());
void notifyLoadComplete();
void notifyEnterForeground();
void notifyBeforeEnterBackground();
@ -34,11 +39,14 @@ public:
AndroidHelper(AndroidHelper const&) = delete;
void operator=(AndroidHelper const&) = delete;
public slots:
void showLoginDialog();
void signup(QString email, QString username, QString password);
public slots:
void showLoginDialog(QUrl url);
void signupCompleted(QNetworkReply* reply);
void signupFailed(QNetworkReply* reply);
signals:
void androidActivityRequested(const QString &activityName, const bool backToScene, QList<QString> args = QList<QString>());
void androidActivityRequested(const QString &activityName, const bool backToScene, QMap<QString, QString> args = QMap<QString, QString>());
void qtAppLoadComplete();
void enterForeground();
void beforeEnterBackground();
@ -46,9 +54,14 @@ signals:
void hapticFeedbackRequested(int duration);
void handleSignupCompleted();
void handleSignupFailed(QString errorString);
private:
AndroidHelper();
~AndroidHelper();
QString errorStringFromAPIObject(const QJsonValue& apiObject);
};
#endif

View file

@ -270,7 +270,7 @@ class RenderEventHandler : public QObject {
public:
RenderEventHandler() {
// Transfer to a new thread
moveToNewNamedThread(this, "RenderThread", [this](QThread* renderThread) {
moveToNewNamedThread(this, "RenderThread", [](QThread* renderThread) {
hifi::qt::addBlockingForbiddenThread("Render", renderThread);
qApp->_lastTimeRendered.start();
}, std::bind(&RenderEventHandler::initialize, this), QThread::HighestPriority);
@ -410,6 +410,10 @@ public:
});
}
void setMainThreadID(Qt::HANDLE threadID) {
_mainThreadID = threadID;
}
static void updateHeartbeat() {
auto now = usecTimestampNow();
auto elapsed = now - _heartbeat;
@ -417,7 +421,9 @@ public:
_heartbeat = now;
}
static void deadlockDetectionCrash() {
void deadlockDetectionCrash() {
setCrashAnnotation("_mod_faulting_tid", std::to_string((uint64_t)_mainThreadID));
setCrashAnnotation("deadlock", "1");
uint32_t* crashTrigger = nullptr;
*crashTrigger = 0xDEAD10CC;
}
@ -504,6 +510,8 @@ public:
static ThreadSafeMovingAverage<int, HEARTBEAT_SAMPLES> _movingAverage;
bool _quit { false };
Qt::HANDLE _mainThreadID = nullptr;
};
std::atomic<bool> DeadlockWatchdogThread::_paused;
@ -963,7 +971,7 @@ Q_GUI_EXPORT void qt_gl_set_global_share_context(QOpenGLContext *context);
Setting::Handle<int> sessionRunTime{ "sessionRunTime", 0 };
const float DEFAULT_HMD_TABLET_SCALE_PERCENT = 70.0f;
const float DEFAULT_HMD_TABLET_SCALE_PERCENT = 60.0f;
const float DEFAULT_DESKTOP_TABLET_SCALE_PERCENT = 75.0f;
const bool DEFAULT_DESKTOP_TABLET_BECOMES_TOOLBAR = true;
const bool DEFAULT_HMD_TABLET_BECOMES_TOOLBAR = false;
@ -976,7 +984,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
_window(new MainWindow(desktop())),
_sessionRunTimer(startupTimer),
_previousSessionCrashed(setupEssentials(argc, argv, runningMarkerExisted)),
_undoStackScriptingInterface(&_undoStack),
_entitySimulation(new PhysicalEntitySimulation()),
_physicsEngine(new PhysicsEngine(Vectors::ZERO)),
_entityClipboard(new EntityTree()),
@ -996,7 +1003,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
_enableProcessOctreeThread(true),
_lastNackTime(usecTimestampNow()),
_lastSendDownstreamAudioStats(usecTimestampNow()),
_aboutToQuit(false),
_notifiedPacketVersionMismatchThisDomain(false),
_maxOctreePPS(maxOctreePacketsPerSecond.get()),
_lastFaceTrackerUpdate(0),
@ -1055,6 +1061,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
auto controllerScriptingInterface = DependencyManager::get<controller::ScriptingInterface>().data();
_controllerScriptingInterface = dynamic_cast<ControllerScriptingInterface*>(controllerScriptingInterface);
connect(PluginManager::getInstance().data(), &PluginManager::inputDeviceRunningChanged,
controllerScriptingInterface, &controller::ScriptingInterface::updateRunningInputDevices);
_entityClipboard->createRootElement();
@ -1094,7 +1102,9 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
}
// Set up a watchdog thread to intentionally crash the application on deadlocks
if (!DISABLE_WATCHDOG) {
(new DeadlockWatchdogThread())->start();
auto deadlockWatchdogThread = new DeadlockWatchdogThread();
deadlockWatchdogThread->setMainThreadID(QThread::currentThreadId());
deadlockWatchdogThread->start();
}
// Set File Logger Session UUID
@ -1236,7 +1246,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
auto dialogsManager = DependencyManager::get<DialogsManager>();
#if defined(Q_OS_ANDROID)
connect(accountManager.data(), &AccountManager::authRequired, this, []() {
AndroidHelper::instance().showLoginDialog();
auto addressManager = DependencyManager::get<AddressManager>();
AndroidHelper::instance().showLoginDialog(addressManager->currentAddress());
});
#else
connect(accountManager.data(), &AccountManager::authRequired, dialogsManager.data(), &DialogsManager::showLoginDialog);
@ -1755,7 +1766,13 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
// we can unlock the desktop repositioning code, since all the positions will be
// relative to the desktop size for this plugin
auto offscreenUi = DependencyManager::get<OffscreenUi>();
offscreenUi->getDesktop()->setProperty("repositionLocked", false);
connect(offscreenUi.data(), &OffscreenUi::desktopReady, []() {
auto offscreenUi = DependencyManager::get<OffscreenUi>();
auto desktop = offscreenUi->getDesktop();
if (desktop) {
desktop->setProperty("repositionLocked", false);
}
});
// Make sure we don't time out during slow operations at startup
updateHeartbeat();
@ -2309,7 +2326,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
QTimer* checkLoginTimer = new QTimer(this);
checkLoginTimer->setInterval(CHECK_LOGIN_TIMER);
checkLoginTimer->setSingleShot(true);
connect(checkLoginTimer, &QTimer::timeout, this, [this]() {
connect(checkLoginTimer, &QTimer::timeout, this, []() {
auto accountManager = DependencyManager::get<AccountManager>();
auto dialogsManager = DependencyManager::get<DialogsManager>();
if (!accountManager->isLoggedIn()) {
@ -3089,7 +3106,6 @@ void Application::onDesktopRootContextCreated(QQmlContext* surfaceContext) {
surfaceContext->setContextProperty("DialogsManager", _dialogsManagerScriptingInterface);
surfaceContext->setContextProperty("FaceTracker", DependencyManager::get<DdeFaceTracker>().data());
surfaceContext->setContextProperty("AvatarManager", DependencyManager::get<AvatarManager>().data());
surfaceContext->setContextProperty("UndoStack", &_undoStackScriptingInterface);
surfaceContext->setContextProperty("LODManager", DependencyManager::get<LODManager>().data());
surfaceContext->setContextProperty("HMD", DependencyManager::get<HMDScriptingInterface>().data());
surfaceContext->setContextProperty("Scene", DependencyManager::get<SceneScriptingInterface>().data());
@ -4673,8 +4689,14 @@ void Application::idle() {
checkChangeCursor();
Stats::getInstance()->updateStats();
AnimStats::getInstance()->updateStats();
auto stats = Stats::getInstance();
if (stats) {
stats->updateStats();
}
auto animStats = AnimStats::getInstance();
if (animStats) {
animStats->updateStats();
}
// Normally we check PipelineWarnings, but since idle will often take more than 10ms we only show these idle timing
// details if we're in ExtraDebugging mode. However, the ::update() and its subcomponents will show their timing
@ -6723,8 +6745,6 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEnginePointe
scriptEngine->registerGlobalObject("AvatarManager", DependencyManager::get<AvatarManager>().data());
scriptEngine->registerGlobalObject("UndoStack", &_undoStackScriptingInterface);
scriptEngine->registerGlobalObject("LODManager", DependencyManager::get<LODManager>().data());
scriptEngine->registerGlobalObject("Paths", DependencyManager::get<PathUtils>().data());

View file

@ -23,7 +23,6 @@
#include <QtGui/QImage>
#include <QtWidgets/QApplication>
#include <QtWidgets/QUndoStack>
#include <ThreadHelpers.h>
#include <AbstractScriptingServicesInterface.h>
@ -70,7 +69,6 @@
#include "ui/OctreeStatsDialog.h"
#include "ui/OverlayConductor.h"
#include "ui/overlays/Overlays.h"
#include "UndoStackScriptingInterface.h"
#include "workload/GameWorkload.h"
@ -189,7 +187,6 @@ public:
const OctreePacketProcessor& getOctreePacketProcessor() const { return _octreeProcessor; }
QSharedPointer<EntityTreeRenderer> getEntities() const { return DependencyManager::get<EntityTreeRenderer>(); }
QUndoStack* getUndoStack() { return &_undoStack; }
MainWindow* getWindow() const { return _window; }
EntityTreePointer getEntityClipboard() const { return _entityClipboard; }
EntityEditPacketSender* getEntityEditPacketSender() { return &_entityEditSender; }
@ -562,6 +559,8 @@ private:
MainWindow* _window;
QElapsedTimer& _sessionRunTimer;
bool _aboutToQuit { false };
bool _previousSessionCrashed;
DisplayPluginPointer _displayPlugin;
@ -571,9 +570,6 @@ private:
bool _activatingDisplayPlugin { false };
QUndoStack _undoStack;
UndoStackScriptingInterface _undoStackScriptingInterface;
uint32_t _renderFrameCount { 0 };
// Frame Rate Measurement
@ -655,8 +651,6 @@ private:
quint64 _lastNackTime;
quint64 _lastSendDownstreamAudioStats;
bool _aboutToQuit;
bool _notifiedPacketVersionMismatchThisDomain;
ConditionalGuard _settingsGuard;

View file

@ -157,7 +157,10 @@ void Application::paintGL() {
renderArgs._context->enableStereo(false);
{
Stats::getInstance()->setRenderDetails(renderArgs._details);
auto stats = Stats::getInstance();
if (stats) {
stats->setRenderDetails(renderArgs._details);
}
}
uint64_t lastPaintDuration = usecTimestampNow() - lastPaintBegin;

15
interface/src/Constants.h Normal file
View file

@ -0,0 +1,15 @@
//
// Constants.h
// interface
//
// Created by Gabriel Calero on 9/28/18.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include <QString>
static const QString API_SIGNUP_PATH = "api/v1/users";

View file

@ -90,19 +90,6 @@ Menu::Menu() {
// Edit menu ----------------------------------
MenuWrapper* editMenu = addMenu("Edit");
// Edit > Undo
QUndoStack* undoStack = qApp->getUndoStack();
QAction* undoAction = undoStack->createUndoAction(editMenu);
undoAction->setShortcut(Qt::CTRL | Qt::Key_Z);
addActionToQMenuAndActionHash(editMenu, undoAction);
// Edit > Redo
QAction* redoAction = undoStack->createRedoAction(editMenu);
redoAction->setShortcut(Qt::CTRL | Qt::SHIFT | Qt::Key_Z);
addActionToQMenuAndActionHash(editMenu, redoAction);
editMenu->addSeparator();
// Edit > Cut
auto cutAction = addActionToQMenuAndActionHash(editMenu, "Cut", QKeySequence::Cut);
connect(cutAction, &QAction::triggered, [] {

View file

@ -48,6 +48,10 @@ AvatarActionHold::~AvatarActionHold() {
myAvatar->removeHoldAction(this);
}
}
auto ownerEntity = _ownerEntity.lock();
if (ownerEntity) {
ownerEntity->setTransitingWithAvatar(false);
}
#if WANT_DEBUG
qDebug() << "AvatarActionHold::~AvatarActionHold" << (void*)this;
@ -131,6 +135,15 @@ bool AvatarActionHold::getTarget(float deltaTimeStep, glm::quat& rotation, glm::
glm::vec3 palmPosition;
glm::quat palmRotation;
bool isTransitingWithAvatar = holdingAvatar->getTransit()->isTransiting();
if (isTransitingWithAvatar != _isTransitingWithAvatar) {
_isTransitingWithAvatar = isTransitingWithAvatar;
auto ownerEntity = _ownerEntity.lock();
if (ownerEntity) {
ownerEntity->setTransitingWithAvatar(_isTransitingWithAvatar);
}
}
if (holdingAvatar->isMyAvatar()) {
std::shared_ptr<MyAvatar> myAvatar = avatarManager->getMyAvatar();
@ -404,11 +417,14 @@ bool AvatarActionHold::updateArguments(QVariantMap arguments) {
_kinematicSetVelocity = kinematicSetVelocity;
_ignoreIK = ignoreIK;
_active = true;
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
auto ownerEntity = _ownerEntity.lock();
if (ownerEntity) {
ownerEntity->setDynamicDataDirty(true);
ownerEntity->setDynamicDataNeedsTransmit(true);
ownerEntity->setDynamicDataNeedsTransmit(true);
ownerEntity->setTransitingWithAvatar(myAvatar->getTransit()->isTransiting());
}
});
}

View file

@ -59,6 +59,8 @@ private:
bool _kinematicSetVelocity { false };
bool _previousSet { false };
bool _ignoreIK { false };
bool _isTransitingWithAvatar { false };
glm::vec3 _previousPositionalTarget;
glm::quat _previousRotationalTarget;

View file

@ -78,6 +78,15 @@ AvatarManager::AvatarManager(QObject* parent) :
removeAvatar(nodeID, KillAvatarReason::AvatarIgnored);
}
});
const float AVATAR_TRANSIT_TRIGGER_DISTANCE = 1.0f;
const int AVATAR_TRANSIT_FRAME_COUNT = 11; // Based on testing
const int AVATAR_TRANSIT_FRAMES_PER_METER = 1; // Based on testing
_transitConfig._totalFrames = AVATAR_TRANSIT_FRAME_COUNT;
_transitConfig._triggerDistance = AVATAR_TRANSIT_TRIGGER_DISTANCE;
_transitConfig._framesPerMeter = AVATAR_TRANSIT_FRAMES_PER_METER;
_transitConfig._isDistanceBased = true;
}
AvatarSharedPointer AvatarManager::addAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer) {
@ -129,6 +138,10 @@ void AvatarManager::updateMyAvatar(float deltaTime) {
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "AvatarManager::updateMyAvatar()");
AvatarTransit::Status status = _myAvatar->updateTransit(deltaTime, _myAvatar->getNextPosition(), _transitConfig);
bool sendFirstTransitPackage = (status == AvatarTransit::Status::START_TRANSIT);
bool blockTransitData = (status == AvatarTransit::Status::TRANSITING);
_myAvatar->update(deltaTime);
render::Transaction transaction;
_myAvatar->updateRenderItem(transaction);
@ -137,9 +150,13 @@ void AvatarManager::updateMyAvatar(float deltaTime) {
quint64 now = usecTimestampNow();
quint64 dt = now - _lastSendAvatarDataTime;
if (dt > MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS && !_myAvatarDataPacketsPaused) {
if (sendFirstTransitPackage || (dt > MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS && !_myAvatarDataPacketsPaused && !blockTransitData)) {
// send head/hand data to the avatar mixer and voxel server
PerformanceTimer perfTimer("send");
if (sendFirstTransitPackage) {
_myAvatar->overrideNextPackagePositionData(_myAvatar->getTransit()->getEndPosition());
}
_myAvatar->sendAvatarDataPacket();
_lastSendAvatarDataTime = now;
_myAvatarSendRate.increment();
@ -258,6 +275,7 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
if (inView && avatar->hasNewJointData()) {
numAvatarsUpdated++;
}
avatar->_transit.update(deltaTime, avatar->_globalPosition, _transitConfig);
avatar->simulate(deltaTime, inView);
avatar->updateRenderItem(renderTransaction);
avatar->updateSpaceProxy(workloadTransaction);
@ -487,14 +505,16 @@ void AvatarManager::clearOtherAvatars() {
void AvatarManager::deleteAllAvatars() {
assert(_avatarsToChangeInPhysics.empty());
QWriteLocker locker(&_hashLock);
AvatarHash::iterator avatarIterator = _avatarHash.begin();
QReadLocker locker(&_hashLock);
AvatarHash::iterator avatarIterator = _avatarHash.begin();
while (avatarIterator != _avatarHash.end()) {
auto avatar = std::static_pointer_cast<OtherAvatar>(avatarIterator.value());
auto avatar = std::static_pointer_cast<Avatar>(avatarIterator.value());
avatarIterator = _avatarHash.erase(avatarIterator);
avatar->die();
assert(!avatar->_motionState);
if (avatar != _myAvatar) {
auto otherAvatar = std::static_pointer_cast<OtherAvatar>(avatar);
assert(!otherAvatar->_motionState);
}
}
}
@ -811,7 +831,7 @@ void AvatarManager::setAvatarSortCoefficient(const QString& name, const QScriptV
}
}
QVariantMap AvatarManager::getPalData(const QList<QString> specificAvatarIdentifiers) {
QVariantMap AvatarManager::getPalData(const QList<QString> specificAvatarIdentifiers) {
QJsonArray palData;
auto avatarMap = getHashCopy();

View file

@ -31,6 +31,7 @@
#include "MyAvatar.h"
#include "OtherAvatar.h"
using SortedAvatar = std::pair<float, std::shared_ptr<Avatar>>;
/**jsdoc
@ -232,6 +233,8 @@ private:
mutable std::mutex _spaceLock;
workload::SpacePointer _space;
std::vector<int32_t> _spaceProxiesToDelete;
AvatarTransit::TransitConfig _transitConfig;
};
#endif // hifi_AvatarManager_h

View file

@ -106,6 +106,7 @@ MyAvatar::MyAvatar(QThread* thread) :
_realWorldFieldOfView("realWorldFieldOfView",
DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
_useAdvancedMovementControls("advancedMovementForHandControllersIsChecked", true),
_showPlayArea("showPlayArea", true),
_smoothOrientationTimer(std::numeric_limits<float>::max()),
_smoothOrientationInitial(),
_smoothOrientationTarget(),
@ -529,7 +530,8 @@ void MyAvatar::update(float deltaTime) {
}
if (_goToFeetAjustment && _skeletonModelLoaded) {
auto feetAjustment = getWorldPosition() - getWorldFeetPosition();
goToLocation(getWorldPosition() + feetAjustment);
_goToPosition = getWorldPosition() + feetAjustment;
setWorldPosition(_goToPosition);
_goToFeetAjustment = false;
}
if (_physicsSafetyPending && qApp->isPhysicsEnabled() && _characterController.isEnabledAndReady()) {
@ -638,9 +640,8 @@ void MyAvatar::updateChildCauterization(SpatiallyNestablePointer object, bool ca
void MyAvatar::simulate(float deltaTime) {
PerformanceTimer perfTimer("simulate");
animateScaleChanges(deltaTime);
setFlyingEnabled(getFlyingEnabled());
if (_cauterizationNeedsUpdate) {
@ -928,6 +929,7 @@ void MyAvatar::updateSensorToWorldMatrix() {
updateJointFromController(controller::Action::RIGHT_HAND, _controllerRightHandMatrixCache);
if (hasSensorToWorldScaleChanged) {
setTransitScale(sensorToWorldScale);
emit sensorToWorldScaleChanged(sensorToWorldScale);
}
@ -3306,7 +3308,7 @@ float MyAvatar::getRawDriveKey(DriveKeys key) const {
}
void MyAvatar::relayDriveKeysToCharacterController() {
if (getDriveKey(TRANSLATE_Y) > 0.0f) {
if (getDriveKey(TRANSLATE_Y) > 0.0f && (!qApp->isHMDMode() || (useAdvancedMovementControls() && getFlyingHMDPref()))) {
_characterController.jump();
}
}

View file

@ -122,8 +122,10 @@ class MyAvatar : public Avatar {
* zone may disallow collisionless avatars.
* @property {boolean} characterControllerEnabled - Synonym of <code>collisionsEnabled</code>.
* <strong>Deprecated:</strong> Use <code>collisionsEnabled</code> instead.
* @property {boolean} useAdvancedMovementControls - Returns the value of the Interface setting, Settings > Advanced
* Movement for Hand Controller. Note: Setting the value has no effect unless Interface is restarted.
* @property {boolean} useAdvancedMovementControls - Returns and sets the value of the Interface setting, Settings >
* Walking and teleporting. Note: Setting the value has no effect unless Interface is restarted.
* @property {boolean} showPlayArea - Returns and sets the value of the Interface setting, Settings > Show room boundaries
* while teleporting. Note: Setting the value has no effect unless Interface is restarted.
* @property {number} yawSpeed=75
* @property {number} pitchSpeed=50
* @property {boolean} hmdRollControlEnabled=true - If <code>true</code>, the roll angle of your HMD turns your avatar
@ -223,6 +225,7 @@ class MyAvatar : public Avatar {
Q_PROPERTY(bool collisionsEnabled READ getCollisionsEnabled WRITE setCollisionsEnabled)
Q_PROPERTY(bool characterControllerEnabled READ getCharacterControllerEnabled WRITE setCharacterControllerEnabled)
Q_PROPERTY(bool useAdvancedMovementControls READ useAdvancedMovementControls WRITE setUseAdvancedMovementControls)
Q_PROPERTY(bool showPlayArea READ getShowPlayArea WRITE setShowPlayArea)
Q_PROPERTY(float yawSpeed MEMBER _yawSpeed)
Q_PROPERTY(float pitchSpeed MEMBER _pitchSpeed)
@ -542,6 +545,9 @@ public:
void setUseAdvancedMovementControls(bool useAdvancedMovementControls)
{ _useAdvancedMovementControls.set(useAdvancedMovementControls); }
bool getShowPlayArea() const { return _showPlayArea.get(); }
void setShowPlayArea(bool showPlayArea) { _showPlayArea.set(showPlayArea); }
void setHMDRollControlEnabled(bool value) { _hmdRollControlEnabled = value; }
bool getHMDRollControlEnabled() const { return _hmdRollControlEnabled; }
void setHMDRollControlDeadZone(float value) { _hmdRollControlDeadZone = value; }
@ -1115,6 +1121,8 @@ public:
virtual QVariantList getAttachmentsVariant() const override;
virtual void setAttachmentsVariant(const QVariantList& variant) override;
glm::vec3 getNextPosition() { return _goToPending ? _goToPosition : getWorldPosition(); };
public slots:
/**jsdoc
@ -1629,6 +1637,7 @@ private:
Setting::Handle<float> _realWorldFieldOfView;
Setting::Handle<bool> _useAdvancedMovementControls;
Setting::Handle<bool> _showPlayArea;
// Smoothing.
const float SMOOTH_TIME_ORIENTATION = 0.5f;

View file

@ -18,6 +18,7 @@
const glm::vec4 ParabolaPointer::RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_COLOR { 1.0f };
const float ParabolaPointer::RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_WIDTH { 0.01f };
const bool ParabolaPointer::RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_ISVISIBLEINSECONDARYCAMERA { false };
const bool ParabolaPointer::RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_DRAWINFRONT { false };
gpu::PipelinePointer ParabolaPointer::RenderState::ParabolaRenderItem::_parabolaPipeline { nullptr };
gpu::PipelinePointer ParabolaPointer::RenderState::ParabolaRenderItem::_transparentParabolaPipeline { nullptr };
@ -46,6 +47,7 @@ void ParabolaPointer::editRenderStatePath(const std::string& state, const QVaria
float alpha = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_COLOR.a;
float width = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_WIDTH;
bool isVisibleInSecondaryCamera = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_ISVISIBLEINSECONDARYCAMERA;
bool drawInFront = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_DRAWINFRONT;
bool enabled = false;
if (!pathMap.isEmpty()) {
enabled = true;
@ -63,8 +65,11 @@ void ParabolaPointer::editRenderStatePath(const std::string& state, const QVaria
if (pathMap["isVisibleInSecondaryCamera"].isValid()) {
isVisibleInSecondaryCamera = pathMap["isVisibleInSecondaryCamera"].toBool();
}
if (pathMap["drawInFront"].isValid()) {
drawInFront = pathMap["drawInFront"].toBool();
}
}
renderState->editParabola(color, alpha, width, isVisibleInSecondaryCamera, enabled);
renderState->editParabola(color, alpha, width, isVisibleInSecondaryCamera, drawInFront, enabled);
}
}
@ -146,7 +151,7 @@ void ParabolaPointer::setVisualPickResultInternal(PickResultPointer pickResult,
}
ParabolaPointer::RenderState::RenderState(const OverlayID& startID, const OverlayID& endID, const glm::vec3& pathColor, float pathAlpha, float pathWidth,
bool isVisibleInSecondaryCamera, bool pathEnabled) :
bool isVisibleInSecondaryCamera, bool drawInFront, bool pathEnabled) :
StartEndRenderState(startID, endID)
{
render::Transaction transaction;
@ -154,7 +159,7 @@ ParabolaPointer::RenderState::RenderState(const OverlayID& startID, const Overla
_pathID = scene->allocateID();
_pathWidth = pathWidth;
if (render::Item::isValidID(_pathID)) {
auto renderItem = std::make_shared<ParabolaRenderItem>(pathColor, pathAlpha, pathWidth, isVisibleInSecondaryCamera, pathEnabled);
auto renderItem = std::make_shared<ParabolaRenderItem>(pathColor, pathAlpha, pathWidth, isVisibleInSecondaryCamera, drawInFront, pathEnabled);
transaction.resetItem(_pathID, std::make_shared<ParabolaRenderItem::Payload>(renderItem));
scene->enqueueTransaction(transaction);
}
@ -182,15 +187,16 @@ void ParabolaPointer::RenderState::disable() {
}
}
void ParabolaPointer::RenderState::editParabola(const glm::vec3& color, float alpha, float width, bool isVisibleInSecondaryCamera, bool enabled) {
void ParabolaPointer::RenderState::editParabola(const glm::vec3& color, float alpha, float width, bool isVisibleInSecondaryCamera, bool drawInFront, bool enabled) {
if (render::Item::isValidID(_pathID)) {
render::Transaction transaction;
auto scene = qApp->getMain3DScene();
transaction.updateItem<ParabolaRenderItem>(_pathID, [color, alpha, width, isVisibleInSecondaryCamera, enabled](ParabolaRenderItem& item) {
transaction.updateItem<ParabolaRenderItem>(_pathID, [color, alpha, width, isVisibleInSecondaryCamera, drawInFront, enabled](ParabolaRenderItem& item) {
item.setColor(color);
item.setAlpha(alpha);
item.setWidth(width);
item.setIsVisibleInSecondaryCamera(isVisibleInSecondaryCamera);
item.setDrawInFront(drawInFront);
item.setEnabled(enabled);
item.updateKey();
});
@ -238,6 +244,7 @@ std::shared_ptr<StartEndRenderState> ParabolaPointer::buildRenderState(const QVa
float alpha = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_COLOR.a;
float width = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_WIDTH;
bool isVisibleInSecondaryCamera = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_ISVISIBLEINSECONDARYCAMERA;
bool drawInFront = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_DRAWINFRONT;
bool enabled = false;
if (propMap["path"].isValid()) {
enabled = true;
@ -258,6 +265,10 @@ std::shared_ptr<StartEndRenderState> ParabolaPointer::buildRenderState(const QVa
if (pathMap["isVisibleInSecondaryCamera"].isValid()) {
isVisibleInSecondaryCamera = pathMap["isVisibleInSecondaryCamera"].toBool();
}
if (pathMap["drawInFront"].isValid()) {
drawInFront = pathMap["drawInFront"].toBool();
}
}
QUuid endID;
@ -269,7 +280,7 @@ std::shared_ptr<StartEndRenderState> ParabolaPointer::buildRenderState(const QVa
}
}
return std::make_shared<RenderState>(startID, endID, color, alpha, width, isVisibleInSecondaryCamera, enabled);
return std::make_shared<RenderState>(startID, endID, color, alpha, width, isVisibleInSecondaryCamera, drawInFront, enabled);
}
PointerEvent ParabolaPointer::buildPointerEvent(const PickedObject& target, const PickResultPointer& pickResult, const std::string& button, bool hover) {
@ -321,8 +332,8 @@ glm::vec3 ParabolaPointer::findIntersection(const PickedObject& pickedObject, co
}
ParabolaPointer::RenderState::ParabolaRenderItem::ParabolaRenderItem(const glm::vec3& color, float alpha, float width,
bool isVisibleInSecondaryCamera, bool enabled) :
_isVisibleInSecondaryCamera(isVisibleInSecondaryCamera), _enabled(enabled)
bool isVisibleInSecondaryCamera, bool drawInFront, bool enabled) :
_isVisibleInSecondaryCamera(isVisibleInSecondaryCamera), _drawInFront(drawInFront), _enabled(enabled)
{
_uniformBuffer->resize(sizeof(ParabolaData));
setColor(color);
@ -358,6 +369,10 @@ void ParabolaPointer::RenderState::ParabolaRenderItem::updateKey() {
builder.withTagBits(render::hifi::TAG_MAIN_VIEW);
}
if (_drawInFront) {
builder.withLayer(render::hifi::LAYER_3D_FRONT);
}
_key = builder.build();
}

View file

@ -21,7 +21,7 @@ public:
using Pointer = Payload::DataPointer;
ParabolaRenderItem(const glm::vec3& color, float alpha, float width,
bool isVisibleInSecondaryCamera, bool enabled);
bool isVisibleInSecondaryCamera, bool drawInFront, bool enabled);
~ParabolaRenderItem() {}
static gpu::PipelinePointer _parabolaPipeline;
@ -46,11 +46,13 @@ public:
void setAcceleration(const glm::vec3& acceleration) { _parabolaData.acceleration = acceleration; }
void setOrigin(const glm::vec3& origin) { _origin = origin; }
void setIsVisibleInSecondaryCamera(const bool& isVisibleInSecondaryCamera) { _isVisibleInSecondaryCamera = isVisibleInSecondaryCamera; }
void setDrawInFront(const bool& drawInFront) { _drawInFront = drawInFront; }
void setEnabled(const bool& enabled) { _enabled = enabled; }
static const glm::vec4 DEFAULT_PARABOLA_COLOR;
static const float DEFAULT_PARABOLA_WIDTH;
static const bool DEFAULT_PARABOLA_ISVISIBLEINSECONDARYCAMERA;
static const bool DEFAULT_PARABOLA_DRAWINFRONT;
private:
render::Item::Bound _bound;
@ -58,6 +60,7 @@ public:
glm::vec3 _origin { 0.0f };
bool _isVisibleInSecondaryCamera { DEFAULT_PARABOLA_ISVISIBLEINSECONDARYCAMERA };
bool _drawInFront { DEFAULT_PARABOLA_DRAWINFRONT };
bool _visible { false };
bool _enabled { false };
@ -77,7 +80,7 @@ public:
RenderState() {}
RenderState(const OverlayID& startID, const OverlayID& endID, const glm::vec3& pathColor, float pathAlpha, float pathWidth,
bool isVisibleInSecondaryCamera, bool pathEnabled);
bool isVisibleInSecondaryCamera, bool drawInFront, bool pathEnabled);
void setPathWidth(float width) { _pathWidth = width; }
float getPathWidth() const { return _pathWidth; }
@ -87,7 +90,7 @@ public:
void update(const glm::vec3& origin, const glm::vec3& end, const glm::vec3& surfaceNormal, bool scaleWithAvatar, bool distanceScaleEnd, bool centerEndY,
bool faceAvatar, bool followNormal, float followNormalStrength, float distance, const PickResultPointer& pickResult) override;
void editParabola(const glm::vec3& color, float alpha, float width, bool isVisibleInSecondaryCamera, bool enabled);
void editParabola(const glm::vec3& color, float alpha, float width, bool isVisibleInSecondaryCamera, bool drawInFront, bool enabled);
private:
int _pathID;

View file

@ -27,6 +27,7 @@ class StartEndRenderState {
public:
StartEndRenderState() {}
StartEndRenderState(const OverlayID& startID, const OverlayID& endID);
virtual ~StartEndRenderState() {}
const OverlayID& getStartID() const { return _startID; }
const OverlayID& getEndID() const { return _endID; }

View file

@ -218,6 +218,7 @@ unsigned int PointerScriptingInterface::createLaserPointer(const QVariant& prope
* @property {number} alpha=1.0 The alpha of the parabola.
* @property {number} width=0.01 The width of the parabola, in meters.
* @property {boolean} isVisibleInSecondaryCamera=false The width of the parabola, in meters.
* @property {boolean} drawInFront=false If <code>true</code>, the parabola is rendered in front of other items in the scene.
*/
/**jsdoc
* A set of properties used to define the visual aspect of a Parabola Pointer in the case that the Pointer is not intersecting something. Same as a {@link Pointers.ParabolaPointerRenderState},
@ -393,4 +394,4 @@ QVariantMap PointerScriptingInterface::getPrevPickResult(unsigned int uid) const
QVariantMap PointerScriptingInterface::getPointerProperties(unsigned int uid) const {
return DependencyManager::get<PointerManager>()->getPointerProperties(uid);
}
}

View file

@ -205,7 +205,7 @@ public:
/**jsdoc
* Returns information about an existing Pointer
* @function Pointers.getPointerState
* @function Pointers.getPointerProperties
* @param {number} uid The ID of the Pointer, as returned by {@link Pointers.createPointer}.
* @returns {Pointers.LaserPointerProperties|Pointers.StylusPointerProperties|Pointers.ParabolaPointerProperties} The information about the Pointer.
* Currently only includes renderStates and defaultRenderStates with associated overlay IDs.

View file

@ -241,4 +241,4 @@ glm::vec2 StylusPointer::findPos2D(const PickedObject& pickedObject, const glm::
default:
return glm::vec2(NAN);
}
}
}

View file

@ -201,3 +201,12 @@ bool HMDScriptingInterface::isKeyboardVisible() {
void HMDScriptingInterface::centerUI() {
QMetaObject::invokeMethod(qApp, "centerUI", Qt::QueuedConnection);
}
QVariant HMDScriptingInterface::getPlayAreaRect() {
auto rect = qApp->getActiveDisplayPlugin()->getPlayAreaRect();
return qRectFToVariant(rect);
}
QVector<glm::vec3> HMDScriptingInterface::getSensorPositions() {
return qApp->getActiveDisplayPlugin()->getSensorPositions();
}

View file

@ -57,18 +57,28 @@ class QScriptEngine;
* @property {Uuid} tabletScreenID - The UUID of the tablet's screen overlay.
* @property {Uuid} homeButtonID - The UUID of the tablet's "home" button overlay.
* @property {Uuid} homeButtonHighlightID - The UUID of the tablet's "home" button highlight overlay.
* @property {Uuid} miniTabletID - The UUID of the mini tablet's body model overlay. <code>null</code> if not in HMD mode.
* @property {Uuid} miniTabletScreenID - The UUID of the mini tablet's screen overlay. <code>null</code> if not in HMD mode.
* @property {number} miniTabletHand - The hand that the mini tablet is displayed on: <code>0</code> for left hand,
* <code>1</code> for right hand, <code>-1</code> if not in HMD mode.
* @property {Rect} playArea=0,0,0,0 - The size and position of the HMD play area in sensor coordinates. <em>Read-only.</em>
* @property {Vec3[]} sensorPositions=[]] - The positions of the VR system sensors in sensor coordinates. <em>Read-only.</em>
*/
class HMDScriptingInterface : public AbstractHMDScriptingInterface, public Dependency {
Q_OBJECT
Q_PROPERTY(glm::vec3 position READ getPosition)
Q_PROPERTY(glm::quat orientation READ getOrientation)
Q_PROPERTY(bool mounted READ isMounted NOTIFY mountedChanged)
Q_PROPERTY(bool showTablet READ getShouldShowTablet)
Q_PROPERTY(bool tabletContextualMode READ getTabletContextualMode)
Q_PROPERTY(QUuid tabletID READ getCurrentTabletFrameID WRITE setCurrentTabletFrameID)
Q_PROPERTY(QUuid homeButtonID READ getCurrentHomeButtonID WRITE setCurrentHomeButtonID)
Q_PROPERTY(QUuid tabletScreenID READ getCurrentTabletScreenID WRITE setCurrentTabletScreenID)
Q_PROPERTY(QUuid homeButtonHighlightID READ getCurrentHomeButtonHighlightID WRITE setCurrentHomeButtonHighlightID)
Q_PROPERTY(QUuid miniTabletID READ getCurrentMiniTabletID WRITE setCurrentMiniTabletID)
Q_PROPERTY(QUuid miniTabletScreenID READ getCurrentMiniTabletScreenID WRITE setCurrentMiniTabletScreenID)
Q_PROPERTY(int miniTabletHand READ getCurrentMiniTabletHand WRITE setCurrentMiniTabletHand)
Q_PROPERTY(QVariant playArea READ getPlayAreaRect);
Q_PROPERTY(QVector<glm::vec3> sensorPositions READ getSensorPositions);
public:
@ -350,7 +360,7 @@ public:
static QScriptValue getHUDLookAtPosition2D(QScriptContext* context, QScriptEngine* engine);
static QScriptValue getHUDLookAtPosition3D(QScriptContext* context, QScriptEngine* engine);
bool isMounted() const;
bool isMounted() const override;
void toggleShouldShowTablet();
void setShouldShowTablet(bool value);
@ -369,6 +379,18 @@ public:
void setCurrentTabletScreenID(QUuid tabletID) { _tabletScreenID = tabletID; }
QUuid getCurrentTabletScreenID() const { return _tabletScreenID; }
void setCurrentMiniTabletID(QUuid miniTabletID) { _miniTabletID = miniTabletID; }
QUuid getCurrentMiniTabletID() const { return _miniTabletID; }
void setCurrentMiniTabletScreenID(QUuid miniTabletScreenID) { _miniTabletScreenID = miniTabletScreenID; }
QUuid getCurrentMiniTabletScreenID() const { return _miniTabletScreenID; }
void setCurrentMiniTabletHand(int miniTabletHand) { _miniTabletHand = miniTabletHand; }
int getCurrentMiniTabletHand() const { return _miniTabletHand; }
QVariant getPlayAreaRect();
QVector<glm::vec3> getSensorPositions();
private:
bool _showTablet { false };
bool _tabletContextualMode { false };
@ -377,6 +399,9 @@ private:
QUuid _homeButtonID;
QUuid _tabletEntityID;
QUuid _homeButtonHighlightID;
QUuid _miniTabletID;
QUuid _miniTabletScreenID;
int _miniTabletHand { -1 };
// Get the position of the HMD
glm::vec3 getPosition() const;

View file

@ -135,7 +135,8 @@ void WindowScriptingInterface::openUrl(const QUrl& url) {
DependencyManager::get<AddressManager>()->handleLookupString(url.toString());
} else {
#if defined(Q_OS_ANDROID)
QList<QString> args = { url.toString() };
QMap<QString, QString> args;
args["url"] = url.toString();
AndroidHelper::instance().requestActivity("WebView", true, args);
#else
// address manager did not handle - ask QDesktopServices to handle

View file

@ -27,6 +27,7 @@
#include "Application.h"
#include "scripting/HMDScriptingInterface.h"
#include "Constants.h"
HIFI_QML_DEF(LoginDialog)
@ -220,8 +221,6 @@ void LoginDialog::signup(const QString& email, const QString& username, const QS
payload.insert("user", userObject);
static const QString API_SIGNUP_PATH = "api/v1/users";
qDebug() << "Sending a request to create an account for" << username;
auto accountManager = DependencyManager::get<AccountManager>();

View file

@ -75,7 +75,14 @@ void OverlayConductor::centerUI() {
void OverlayConductor::update(float dt) {
auto offscreenUi = DependencyManager::get<OffscreenUi>();
bool currentVisible = !offscreenUi->getDesktop()->property("pinned").toBool();
if (!offscreenUi) {
return;
}
auto desktop = offscreenUi->getDesktop();
if (!desktop) {
return;
}
bool currentVisible = !desktop->property("pinned").toBool();
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
// centerUI when hmd mode is first enabled and mounted

View file

@ -226,18 +226,22 @@ void setupPreferences() {
static const QString VR_MOVEMENT{ "VR Movement" };
{
static const QString movementsControlChannel = QStringLiteral("Hifi-Advanced-Movement-Disabler");
auto getter = [myAvatar]()->bool { return myAvatar->useAdvancedMovementControls(); };
auto setter = [myAvatar](bool value) { myAvatar->setUseAdvancedMovementControls(value); };
preferences->addPreference(new CheckPreference(VR_MOVEMENT,
QStringLiteral("Advanced movement in VR (Teleport movement when unchecked)"),
getter, setter));
auto getter = [myAvatar]()->int { return myAvatar->useAdvancedMovementControls() ? 1 : 0; };
auto setter = [myAvatar](int value) { myAvatar->setUseAdvancedMovementControls(value == 1); };
auto preference =
new RadioButtonsPreference(VR_MOVEMENT, "Teleporting only / Walking and teleporting", getter, setter);
QStringList items;
items << "Teleporting only" << "Walking and teleporting";
preference->setHeading("Movement mode");
preference->setItems(items);
preferences->addPreference(preference);
}
{
auto getter = [myAvatar]()->bool { return myAvatar->getFlyingHMDPref(); };
auto setter = [myAvatar](bool value) { myAvatar->setFlyingHMDPref(value); };
preferences->addPreference(new CheckPreference(VR_MOVEMENT, "Flying & jumping (HMD)", getter, setter));
auto preference = new CheckPreference(VR_MOVEMENT, "Jumping and flying", getter, setter);
preference->setIndented(true);
preferences->addPreference(preference);
}
{
auto getter = [myAvatar]()->int { return myAvatar->getSnapTurn() ? 0 : 1; };
@ -245,9 +249,16 @@ void setupPreferences() {
auto preference = new RadioButtonsPreference(VR_MOVEMENT, "Snap turn / Smooth turn", getter, setter);
QStringList items;
items << "Snap turn" << "Smooth turn";
preference->setHeading("Rotation mode");
preference->setItems(items);
preferences->addPreference(preference);
}
{
auto getter = [myAvatar]()->bool { return myAvatar->getShowPlayArea(); };
auto setter = [myAvatar](bool value) { myAvatar->setShowPlayArea(value); };
auto preference = new CheckPreference(VR_MOVEMENT, "Show room boundaries while teleporting", getter, setter);
preferences->addPreference(preference);
}
{
auto getter = [=]()->float { return myAvatar->getUserHeight(); };
auto setter = [=](float value) { myAvatar->setUserHeight(value); };
@ -258,12 +269,6 @@ void setupPreferences() {
preference->setStep(0.001f);
preferences->addPreference(preference);
}
{
auto preference = new ButtonPreference(VR_MOVEMENT, "RESET SENSORS", [] {
qApp->resetSensors();
});
preferences->addPreference(preference);
}
static const QString AVATAR_CAMERA{ "Mouse Sensitivity" };
{

View file

@ -74,7 +74,7 @@ void Circle3DOverlay::render(RenderArgs* args) {
const float FULL_CIRCLE = 360.0f;
const float SLICES = 180.0f; // The amount of segment to create the circle
const float SLICE_ANGLE = FULL_CIRCLE / SLICES;
const float SLICE_ANGLE_RADIANS = glm::radians(FULL_CIRCLE / SLICES);
const float MAX_COLOR = 255.0f;
auto geometryCache = DependencyManager::get<GeometryCache>();
@ -111,28 +111,38 @@ void Circle3DOverlay::render(RenderArgs* args) {
vec4 innerEndColor = vec4(toGlm(_innerEndColor), _innerEndAlpha) * pulseModifier;
vec4 outerEndColor = vec4(toGlm(_outerEndColor), _outerEndAlpha) * pulseModifier;
const auto startAtRadians = glm::radians(_startAt);
const auto endAtRadians = glm::radians(_endAt);
const auto totalRange = _endAt - _startAt;
if (_innerRadius <= 0) {
_solidPrimitive = gpu::TRIANGLE_FAN;
points << vec2();
colors << innerStartColor;
for (float angle = _startAt; angle <= _endAt; angle += SLICE_ANGLE) {
float range = (angle - _startAt) / (_endAt - _startAt);
float angleRadians = glm::radians(angle);
for (float angleRadians = startAtRadians; angleRadians < endAtRadians; angleRadians += SLICE_ANGLE_RADIANS) {
float range = (angleRadians - startAtRadians) / totalRange;
points << glm::vec2(cosf(angleRadians) * _outerRadius, sinf(angleRadians) * _outerRadius);
colors << glm::mix(outerStartColor, outerEndColor, range);
}
points << glm::vec2(cosf(endAtRadians) * _outerRadius, sinf(endAtRadians) * _outerRadius);
colors << outerEndColor;
} else {
_solidPrimitive = gpu::TRIANGLE_STRIP;
for (float angle = _startAt; angle <= _endAt; angle += SLICE_ANGLE) {
float range = (angle - _startAt) / (_endAt - _startAt);
for (float angleRadians = startAtRadians; angleRadians < endAtRadians; angleRadians += SLICE_ANGLE_RADIANS) {
float range = (angleRadians - startAtRadians) / totalRange;
float angleRadians = glm::radians(angle);
points << glm::vec2(cosf(angleRadians) * _innerRadius, sinf(angleRadians) * _innerRadius);
colors << glm::mix(innerStartColor, innerEndColor, range);
points << glm::vec2(cosf(angleRadians) * _outerRadius, sinf(angleRadians) * _outerRadius);
colors << glm::mix(outerStartColor, outerEndColor, range);
}
points << glm::vec2(cosf(endAtRadians) * _innerRadius, sinf(endAtRadians) * _innerRadius);
colors << innerEndColor;
points << glm::vec2(cosf(endAtRadians) * _outerRadius, sinf(endAtRadians) * _outerRadius);
colors << outerEndColor;
}
geometryCache->updateVertices(_quadVerticesID, points, colors);
}
@ -147,29 +157,28 @@ void Circle3DOverlay::render(RenderArgs* args) {
if (geometryChanged) {
QVector<glm::vec2> points;
float angle = _startAt;
float angleInRadians = glm::radians(angle);
glm::vec2 firstPoint(cosf(angleInRadians) * _outerRadius, sinf(angleInRadians) * _outerRadius);
const auto startAtRadians = glm::radians(_startAt);
const auto endAtRadians = glm::radians(_endAt);
float angleRadians = startAtRadians;
glm::vec2 firstPoint(cosf(angleRadians) * _outerRadius, sinf(angleRadians) * _outerRadius);
points << firstPoint;
while (angle < _endAt) {
angle += SLICE_ANGLE;
angleInRadians = glm::radians(angle);
glm::vec2 thisPoint(cosf(angleInRadians) * _outerRadius, sinf(angleInRadians) * _outerRadius);
while (angleRadians < endAtRadians) {
angleRadians += SLICE_ANGLE_RADIANS;
glm::vec2 thisPoint(cosf(angleRadians) * _outerRadius, sinf(angleRadians) * _outerRadius);
points << thisPoint;
if (getIsDashedLine()) {
angle += SLICE_ANGLE / 2.0f; // short gap
angleInRadians = glm::radians(angle);
glm::vec2 dashStartPoint(cosf(angleInRadians) * _outerRadius, sinf(angleInRadians) * _outerRadius);
angleRadians += SLICE_ANGLE_RADIANS / 2.0f; // short gap
glm::vec2 dashStartPoint(cosf(angleRadians) * _outerRadius, sinf(angleRadians) * _outerRadius);
points << dashStartPoint;
}
}
// get the last slice portion....
angle = _endAt;
angleInRadians = glm::radians(angle);
glm::vec2 lastPoint(cosf(angleInRadians) * _outerRadius, sinf(angleInRadians) * _outerRadius);
angleRadians = endAtRadians;
glm::vec2 lastPoint(cosf(angleRadians) * _outerRadius, sinf(angleRadians) * _outerRadius);
points << lastPoint;
geometryCache->updateVertices(_lineVerticesID, points, vec4(toGlm(getColor()), getAlpha()));
}

View file

@ -114,7 +114,6 @@ void Text3DOverlay::render(RenderArgs* args) {
float scaleFactor = (maxHeight / FIXED_FONT_SCALING_RATIO) * _lineHeight;
glm::vec2 clipMinimum(0.0f, 0.0f);
glm::vec2 clipDimensions((dimensions.x - (_leftMargin + _rightMargin)) / scaleFactor,
(dimensions.y - (_topMargin + _bottomMargin)) / scaleFactor);
@ -296,4 +295,4 @@ QSizeF Text3DOverlay::textSize(const QString& text) const {
float pointToWorldScale = (maxHeight / FIXED_FONT_SCALING_RATIO) * _lineHeight;
return QSizeF(extents.x, extents.y) * pointToWorldScale;
}
}

View file

@ -1173,20 +1173,5 @@ void AudioHRTF::render(int16_t* input, float* output, int index, float azimuth,
// crossfade old/new output and accumulate
crossfade_4x2(bqBuffer, output, crossfadeTable, HRTF_BLOCK);
_silentState = false;
}
void AudioHRTF::renderSilent(int16_t* input, float* output, int index, float azimuth, float distance, float gain, int numFrames) {
// process the first silent block, to flush internal state
if (!_silentState) {
render(input, output, index, azimuth, distance, gain, numFrames);
}
// new parameters become old
_azimuthState = azimuth;
_distanceState = distance;
_gainState = gain;
_silentState = true;
_resetState = false;
}

View file

@ -47,9 +47,14 @@ public:
void render(int16_t* input, float* output, int index, float azimuth, float distance, float gain, int numFrames);
//
// Fast path when input is known to be silent
// Fast path when input is known to be silent and state as been flushed
//
void renderSilent(int16_t* input, float* output, int index, float azimuth, float distance, float gain, int numFrames);
void setParameterHistory(float azimuth, float distance, float gain) {
// new parameters become old
_azimuthState = azimuth;
_distanceState = distance;
_gainState = gain;
}
//
// HRTF local gain adjustment in amplitude (1.0 == unity)
@ -59,23 +64,25 @@ public:
// clear internal state, but retain settings
void reset() {
// FIR history
memset(_firState, 0, sizeof(_firState));
if (!_resetState) {
// FIR history
memset(_firState, 0, sizeof(_firState));
// integer delay history
memset(_delayState, 0, sizeof(_delayState));
// integer delay history
memset(_delayState, 0, sizeof(_delayState));
// biquad history
memset(_bqState, 0, sizeof(_bqState));
// biquad history
memset(_bqState, 0, sizeof(_bqState));
// parameter history
_azimuthState = 0.0f;
_distanceState = 0.0f;
_gainState = 0.0f;
// parameter history
_azimuthState = 0.0f;
_distanceState = 0.0f;
_gainState = 0.0f;
// _gainAdjust is retained
// _gainAdjust is retained
_silentState = true;
_resetState = true;
}
}
private:
@ -110,7 +117,7 @@ private:
// global and local gain adjustment
float _gainAdjust = HRTF_GAIN;
bool _silentState = true;
bool _resetState = true;
};
#endif // AudioHRTF_h

View file

@ -66,4 +66,6 @@ public:
PacketStreamStats _packetStreamWindowStats;
};
static_assert(sizeof(AudioStreamStats) == 152, "AudioStreamStats size isn't right");
#endif // hifi_AudioStreamStats_h

View file

@ -171,7 +171,6 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
} else {
_mismatchedAudioCodecCount++;
qDebug(audio) << "Codec mismatch: expected" << _selectedCodecName << "got" << codecInPacket;
if (packetPCM) {
// If there are PCM packets in-flight after the codec is changed, use them.
@ -191,7 +190,8 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
auto sendingNode = DependencyManager::get<NodeList>()->nodeWithLocalID(message.getSourceID());
if (sendingNode) {
emit mismatchedAudioCodec(sendingNode, _selectedCodecName, codecInPacket);
qDebug(audio) << "Codec mismatch threshold exceeded, SelectedAudioFormat(" << _selectedCodecName << " ) sent";
qDebug(audio) << "Codec mismatch threshold exceeded, sent selected codec"
<< _selectedCodecName << "to" << message.getSenderSockAddr();
}
}
}
@ -208,7 +208,6 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
int framesAvailable = _ringBuffer.framesAvailable();
// if this stream was starved, check if we're still starved.
if (_isStarved && framesAvailable >= _desiredJitterBufferFrames) {
qCInfo(audiostream, "Starve ended");
_isStarved = false;
}
// if the ringbuffer exceeds the desired size by more than the threshold specified,
@ -378,10 +377,6 @@ void InboundAudioStream::framesAvailableChanged() {
}
void InboundAudioStream::setToStarved() {
if (!_isStarved) {
qCInfo(audiostream, "Starved");
}
_consecutiveNotMixedCount = 0;
_starveCount++;
// if we have more than the desired frames when setToStarved() is called, then we'll immediately

View file

@ -30,6 +30,8 @@
// Audio Env bitset
const int HAS_REVERB_BIT = 0; // 1st bit
using StreamSequenceNumber = quint16;
class InboundAudioStream : public NodeData {
Q_OBJECT

View file

@ -50,7 +50,7 @@ int InjectedAudioStream::parseStreamProperties(PacketType type,
}
// pull the loopback flag and set our boolean
uchar shouldLoopback;
LoopbackFlag shouldLoopback;
packetStream >> shouldLoopback;
_shouldLoopbackForNode = (shouldLoopback == 1);

View file

@ -16,6 +16,8 @@
#include "PositionalAudioStream.h"
using LoopbackFlag = uchar;
class InjectedAudioStream : public PositionalAudioStream {
public:
InjectedAudioStream(const QUuid& streamIdentifier, bool isStereo, int numStaticJitterFrames = -1);

View file

@ -14,7 +14,6 @@
#include <cstring>
#include <glm/detail/func_common.hpp>
#include <QtCore/QDataStream>
#include <QtCore/QLoggingCategory>
@ -78,20 +77,15 @@ int PositionalAudioStream::parsePositionalData(const QByteArray& positionalByteA
QDataStream packetStream(positionalByteArray);
packetStream.readRawData(reinterpret_cast<char*>(&_position), sizeof(_position));
// if the client sends us a bad position, flag it so that we don't consider this stream for mixing
if (glm::isnan(_position.x) || glm::isnan(_position.y) || glm::isnan(_position.z)) {
HIFI_FDEBUG("PositionalAudioStream unpacked invalid position for node" << uuidStringWithoutCurlyBraces(getNodeID()) );
_hasValidPosition = false;
} else {
_hasValidPosition = true;
}
packetStream.readRawData(reinterpret_cast<char*>(&_orientation), sizeof(_orientation));
packetStream.readRawData(reinterpret_cast<char*>(&_avatarBoundingBoxCorner), sizeof(_avatarBoundingBoxCorner));
packetStream.readRawData(reinterpret_cast<char*>(&_avatarBoundingBoxScale), sizeof(_avatarBoundingBoxScale));
if (_avatarBoundingBoxCorner != _ignoreBox.getCorner()) {
// if the ignore box corner changes, we need to re-calculate the ignore box
calculateIgnoreBox();
}
// if this node sent us a NaN for first float in orientation then don't consider this good audio and bail
if (glm::isnan(_orientation.x)) {
// NOTE: why would we reset the ring buffer here?
@ -107,3 +101,29 @@ AudioStreamStats PositionalAudioStream::getAudioStreamStats() const {
streamStats._streamType = _type;
return streamStats;
}
void PositionalAudioStream::calculateIgnoreBox() {
if (_avatarBoundingBoxScale != glm::vec3(0)) {
auto scale = _avatarBoundingBoxScale;
// enforce a minimum scale
static const glm::vec3 MIN_IGNORE_BOX_SCALE = glm::vec3(0.3f, 1.3f, 0.3f);
if (glm::any(glm::lessThan(scale, MIN_IGNORE_BOX_SCALE))) {
scale = MIN_IGNORE_BOX_SCALE;
}
// (this is arbitrary number determined empirically for comfort)
const float IGNORE_BOX_SCALE_FACTOR = 2.4f;
scale *= IGNORE_BOX_SCALE_FACTOR;
// create the box (we use a box for the zone for convenience)
_ignoreBox.setBox(_avatarBoundingBoxCorner, scale);
}
}
void PositionalAudioStream::enableIgnoreBox() {
// re-calculate the ignore box using the latest values
calculateIgnoreBox();
_isIgnoreBoxEnabled = true;
}

View file

@ -19,6 +19,24 @@
const int AUDIOMIXER_INBOUND_RING_BUFFER_FRAME_CAPACITY = 100;
using StreamID = QUuid;
const int NUM_STREAM_ID_BYTES = NUM_BYTES_RFC4122_UUID;
struct NodeIDStreamID {
QUuid nodeID;
Node::LocalID nodeLocalID;
StreamID streamID;
NodeIDStreamID(QUuid nodeID, Node::LocalID nodeLocalID, StreamID streamID)
: nodeID(nodeID), nodeLocalID(nodeLocalID), streamID(streamID) {};
bool operator==(const NodeIDStreamID& other) const {
return (nodeLocalID == other.nodeLocalID || nodeID == other.nodeID) && streamID == other.streamID;
}
};
using ChannelFlag = quint8;
class PositionalAudioStream : public InboundAudioStream {
Q_OBJECT
public:
@ -30,7 +48,7 @@ public:
PositionalAudioStream(PositionalAudioStream::Type type, bool isStereo, int numStaticJitterFrames = -1);
const QUuid DEFAULT_STREAM_IDENTIFIER = QUuid();
virtual const QUuid& getStreamIdentifier() const { return DEFAULT_STREAM_IDENTIFIER; }
virtual const StreamID& getStreamIdentifier() const { return DEFAULT_STREAM_IDENTIFIER; }
virtual void resetStats() override;
@ -51,7 +69,15 @@ public:
const glm::vec3& getAvatarBoundingBoxCorner() const { return _avatarBoundingBoxCorner; }
const glm::vec3& getAvatarBoundingBoxScale() const { return _avatarBoundingBoxScale; }
bool hasValidPosition() const { return _hasValidPosition; }
using IgnoreBox = AABox;
// called from single AudioMixerSlave while processing packets for node
void enableIgnoreBox();
void disableIgnoreBox() { _isIgnoreBoxEnabled = false; }
// thread-safe, called from AudioMixerSlave(s) while preparing mixes
bool isIgnoreBoxEnabled() const { return _isIgnoreBoxEnabled; }
const IgnoreBox& getIgnoreBox() const { return _ignoreBox; }
protected:
// disallow copying of PositionalAudioStream objects
@ -61,6 +87,8 @@ protected:
int parsePositionalData(const QByteArray& positionalByteArray);
protected:
void calculateIgnoreBox();
Type _type;
glm::vec3 _position;
glm::quat _orientation;
@ -79,7 +107,8 @@ protected:
float _quietestFrameLoudness;
int _frameCounter;
bool _hasValidPosition { false };
bool _isIgnoreBoxEnabled { false };
IgnoreBox _ignoreBox;
};
#endif // hifi_PositionalAudioStream_h

View file

@ -113,6 +113,80 @@ void Avatar::setShowNamesAboveHeads(bool show) {
showNamesAboveHeads = show;
}
AvatarTransit::Status AvatarTransit::update(float deltaTime, const glm::vec3& avatarPosition, const AvatarTransit::TransitConfig& config) {
glm::vec3 currentPosition = _isTransiting ? _currentPosition : avatarPosition;
float oneFrameDistance = glm::length(currentPosition - _lastPosition);
const float MAX_TRANSIT_DISTANCE = 30.0f;
float scaledMaxTransitDistance = MAX_TRANSIT_DISTANCE * _scale;
if (oneFrameDistance > config._triggerDistance && oneFrameDistance < scaledMaxTransitDistance && !_isTransiting) {
start(deltaTime, _lastPosition, currentPosition, config);
}
_lastPosition = currentPosition;
_status = updatePosition(deltaTime);
return _status;
}
void AvatarTransit::start(float deltaTime, const glm::vec3& startPosition, const glm::vec3& endPosition, const AvatarTransit::TransitConfig& config) {
_startPosition = startPosition;
_endPosition = endPosition;
_transitLine = endPosition - startPosition;
_totalDistance = glm::length(_transitLine);
_easeType = config._easeType;
const float REFERENCE_FRAMES_PER_SECOND = 30.0f;
int transitFrames = (!config._isDistanceBased) ? config._totalFrames : config._framesPerMeter * _totalDistance;
_totalTime = (float)transitFrames / REFERENCE_FRAMES_PER_SECOND;
_currentTime = 0.0f;
_isTransiting = true;
}
float AvatarTransit::getEaseValue(AvatarTransit::EaseType type, float value) {
switch (type) {
case EaseType::NONE:
return value;
break;
case EaseType::EASE_IN:
return value * value;
break;
case EaseType::EASE_OUT:
return value * (2.0f - value);
break;
case EaseType::EASE_IN_OUT:
return (value < 0.5f) ? 2.0f * value * value : -1.0f + (4.0f - 2.0f * value) * value;
break;
}
return value;
}
AvatarTransit::Status AvatarTransit::updatePosition(float deltaTime) {
Status status = Status::IDLE;
if (_isTransiting) {
float nextTime = _currentTime + deltaTime;
glm::vec3 newPosition;
if (nextTime >= _totalTime) {
_currentPosition = _endPosition;
_isTransiting = false;
status = Status::END_TRANSIT;
} else {
if (_currentTime == 0) {
status = Status::START_TRANSIT;
} else {
status = Status::TRANSITING;
}
float percentageIntoTransit = nextTime / _totalTime;
_currentPosition = _startPosition + getEaseValue(_easeType, percentageIntoTransit) * _transitLine;
}
_currentTime = nextTime;
}
return status;
}
bool AvatarTransit::getNextPosition(glm::vec3& nextPosition) {
nextPosition = _currentPosition;
return _isTransiting;
}
Avatar::Avatar(QThread* thread) :
_voiceSphereID(GeometryCache::UNKNOWN_ID)
{
@ -449,7 +523,18 @@ void Avatar::relayJointDataToChildren() {
void Avatar::simulate(float deltaTime, bool inView) {
PROFILE_RANGE(simulation, "simulate");
if (_transit.isTransiting()) {
glm::vec3 nextPosition;
if (_transit.getNextPosition(nextPosition)) {
_globalPosition = nextPosition;
_globalPositionChanged = usecTimestampNow();
if (!hasParent()) {
setLocalPosition(nextPosition);
}
}
}
_simulationRate.increment();
if (inView) {
_simulationInViewRate.increment();
@ -460,7 +545,7 @@ void Avatar::simulate(float deltaTime, bool inView) {
PROFILE_RANGE(simulation, "updateJoints");
if (inView) {
Head* head = getHead();
if (_hasNewJointData) {
if (_hasNewJointData || _transit.isTransiting()) {
_skeletonModel->getRig().copyJointsFromJointData(_jointData);
glm::mat4 rootTransform = glm::scale(_skeletonModel->getScale()) * glm::translate(_skeletonModel->getOffset());
_skeletonModel->getRig().computeExternalPoses(rootTransform);
@ -1881,6 +1966,22 @@ float Avatar::getUnscaledEyeHeightFromSkeleton() const {
}
}
AvatarTransit::Status Avatar::updateTransit(float deltaTime, const glm::vec3& avatarPosition, const AvatarTransit::TransitConfig& config) {
std::lock_guard<std::mutex> lock(_transitLock);
return _transit.update(deltaTime, avatarPosition, config);
}
void Avatar::setTransitScale(float scale) {
std::lock_guard<std::mutex> lock(_transitLock);
return _transit.setScale(scale);
}
void Avatar::overrideNextPackagePositionData(const glm::vec3& position) {
std::lock_guard<std::mutex> lock(_transitLock);
_overrideGlobalPosition = true;
_globalPositionOverride = position;
}
void Avatar::addMaterial(graphics::MaterialLayer material, const std::string& parentMaterialName) {
std::lock_guard<std::mutex> lock(_materialsLock);
_materials[parentMaterialName].push(material);

View file

@ -50,6 +50,62 @@ enum ScreenTintLayer {
class Texture;
class AvatarTransit {
public:
enum Status {
IDLE = 0,
START_TRANSIT,
TRANSITING,
END_TRANSIT
};
enum EaseType {
NONE = 0,
EASE_IN,
EASE_OUT,
EASE_IN_OUT
};
struct TransitConfig {
TransitConfig() {};
int _totalFrames { 0 };
int _framesPerMeter { 0 };
bool _isDistanceBased { false };
float _triggerDistance { 0 };
EaseType _easeType { EaseType::EASE_OUT };
};
AvatarTransit() {};
Status update(float deltaTime, const glm::vec3& avatarPosition, const TransitConfig& config);
Status getStatus() { return _status; }
bool isTransiting() { return _isTransiting; }
glm::vec3 getCurrentPosition() { return _currentPosition; }
bool getNextPosition(glm::vec3& nextPosition);
glm::vec3 getEndPosition() { return _endPosition; }
float getTransitTime() { return _totalTime; }
void setScale(float scale) { _scale = scale; }
private:
Status updatePosition(float deltaTime);
void start(float deltaTime, const glm::vec3& startPosition, const glm::vec3& endPosition, const TransitConfig& config);
float getEaseValue(AvatarTransit::EaseType type, float value);
bool _isTransiting { false };
glm::vec3 _startPosition;
glm::vec3 _endPosition;
glm::vec3 _currentPosition;
glm::vec3 _lastPosition;
glm::vec3 _transitLine;
float _totalDistance { 0.0f };
float _totalTime { 0.0f };
float _currentTime { 0.0f };
EaseType _easeType { EaseType::EASE_OUT };
Status _status { Status::IDLE };
float _scale { 1.0f };
};
class Avatar : public AvatarData, public scriptable::ModelProvider {
Q_OBJECT
@ -370,6 +426,13 @@ public:
virtual scriptable::ScriptableModelBase getScriptableModel() override;
std::shared_ptr<AvatarTransit> getTransit() { return std::make_shared<AvatarTransit>(_transit); };
AvatarTransit::Status updateTransit(float deltaTime, const glm::vec3& avatarPosition, const AvatarTransit::TransitConfig& config);
void setTransitScale(float scale);
void overrideNextPackagePositionData(const glm::vec3& position);
signals:
void targetScaleChanged(float targetScale);
@ -505,6 +568,7 @@ protected:
RateCounter<> _skeletonModelSimulationRate;
RateCounter<> _jointDataSimulationRate;
protected:
class AvatarEntityDataHash {
public:
@ -528,6 +592,9 @@ protected:
bool _reconstructSoftEntitiesJointMap { false };
float _modelScale { 1.0f };
AvatarTransit _transit;
std::mutex _transitLock;
static int _jointConesID;
int _voiceSphereID;

View file

@ -369,7 +369,12 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
if (hasAvatarGlobalPosition) {
auto startSection = destinationBuffer;
AVATAR_MEMCPY(_globalPosition);
if (_overrideGlobalPosition) {
AVATAR_MEMCPY(_globalPositionOverride);
} else {
AVATAR_MEMCPY(_globalPosition);
}
int numBytes = destinationBuffer - startSection;
@ -2088,6 +2093,10 @@ void AvatarData::sendAvatarDataPacket(bool sendAll) {
}
}
if (_overrideGlobalPosition) {
_overrideGlobalPosition = false;
}
doneEncoding(cullSmallData);
static AvatarDataSequenceNumber sequenceNumber = 0;

View file

@ -1372,7 +1372,8 @@ protected:
// where Entities are located. This is currently only used by the mixer to decide how often to send
// updates about one avatar to another.
glm::vec3 _globalPosition { 0, 0, 0 };
glm::vec3 _globalPositionOverride { 0, 0, 0 };
bool _overrideGlobalPosition { false };
quint64 _globalPositionChanged { 0 };
quint64 _avatarBoundingBoxChanged { 0 };

View file

@ -266,6 +266,7 @@ AvatarSharedPointer AvatarHashMap::parseAvatarData(QSharedPointer<ReceivedMessag
}
}
// have the matching (or new) avatar parse the data from the packet
int bytesRead = avatar->parseDataFromBuffer(byteArray);
message->seek(positionBeforeRead + bytesRead);
@ -316,7 +317,6 @@ void AvatarHashMap::processAvatarIdentityPacket(QSharedPointer<ReceivedMessage>
// In this case, the "sendingNode" is the Avatar Mixer.
avatar->processAvatarIdentity(message->getMessage(), identityChanged, displayNameChanged);
_replicas.processAvatarIdentity(identityUUID, message->getMessage(), identityChanged, displayNameChanged);
}
}
@ -329,6 +329,7 @@ void AvatarHashMap::processBulkAvatarTraits(QSharedPointer<ReceivedMessage> mess
// grab the avatar so we can ask it to process trait data
bool isNewAvatar;
auto avatar = newOrExistingAvatar(avatarID, sendingNode, isNewAvatar);
// read the first trait type for this avatar
AvatarTraits::TraitType traitType;
message->readPrimitive(&traitType);

View file

@ -98,6 +98,7 @@ enum Hand {
class InputDevice {
public:
InputDevice(const QString& name) : _name(name) {}
virtual ~InputDevice() {}
using Pointer = std::shared_ptr<InputDevice>;

View file

@ -178,6 +178,17 @@ namespace controller {
return inputRecorder->getSaveDirectory();
}
QStringList ScriptingInterface::getRunningInputDeviceNames() {
QMutexLocker locker(&_runningDevicesMutex);
return _runningInputDeviceNames;
}
void ScriptingInterface::updateRunningInputDevices(const QString& deviceName, bool isRunning, const QStringList& runningDevices) {
QMutexLocker locker(&_runningDevicesMutex);
_runningInputDeviceNames = runningDevices;
emit inputDeviceRunningChanged(deviceName, isRunning);
}
bool ScriptingInterface::triggerHapticPulseOnDevice(unsigned int device, float strength, float duration, controller::Hand hand) const {
return DependencyManager::get<UserInputMapper>()->triggerHapticPulseOnDevice(device, strength, duration, hand);
}

View file

@ -26,6 +26,7 @@
#include <QThread>
#include <QtCore/QObject>
#include <QtCore/QVariant>
#include <QMutex>
#include <QtQml/QJSValue>
#include <QtScript/QScriptValue>
@ -431,6 +432,13 @@ namespace controller {
*/
Q_INVOKABLE QString getInputRecorderSaveDirectory();
/**jsdoc
* Get all the active and enabled (running) input devices
* @function Controller.getRunningInputDevices
* @returns {string[]} An array of strings with the names
*/
Q_INVOKABLE QStringList getRunningInputDeviceNames();
bool isMouseCaptured() const { return _mouseCaptured; }
bool isTouchCaptured() const { return _touchCaptured; }
bool isWheelCaptured() const { return _wheelCaptured; }
@ -531,6 +539,8 @@ namespace controller {
*/
virtual void releaseActionEvents() { _actionsCaptured = false; }
void updateRunningInputDevices(const QString& deviceName, bool isRunning, const QStringList& runningDevices);
signals:
/**jsdoc
* Triggered when an action occurs.
@ -590,6 +600,17 @@ namespace controller {
*/
void hardwareChanged();
/**jsdoc
* Triggered when a device is enabled/disabled
* Enabling/Disabling Leapmotion on settings/controls will trigger this signal.
* @function Controller.deviceRunningChanged
* @param {string} deviceName - The name of the device that is getting enabled/disabled
* @param {boolean} isEnabled - Return if the device is enabled.
* @returns {Signal}
*/
void inputDeviceRunningChanged(QString deviceName, bool isRunning);
private:
// Update the exposed variant maps reporting active hardware
void updateMaps();
@ -598,10 +619,14 @@ namespace controller {
QVariantMap _actions;
QVariantMap _standard;
QStringList _runningInputDeviceNames;
std::atomic<bool> _mouseCaptured{ false };
std::atomic<bool> _touchCaptured { false };
std::atomic<bool> _wheelCaptured { false };
std::atomic<bool> _actionsCaptured { false };
QMutex _runningDevicesMutex;
};
}

View file

@ -24,6 +24,8 @@ public:
AndConditional(Conditional::Pointer& first, Conditional::Pointer& second)
: _children({ first, second }) {}
virtual ~AndConditional() {}
virtual bool satisfied() override;
private:

View file

@ -18,6 +18,7 @@ namespace controller {
class EndpointConditional : public Conditional {
public:
EndpointConditional(Endpoint::Pointer endpoint) : _endpoint(endpoint) {}
virtual ~EndpointConditional() {}
virtual bool satisfied() override { return _endpoint && _endpoint->peek() != 0.0f; }
private:
Endpoint::Pointer _endpoint;

View file

@ -19,6 +19,7 @@ namespace controller {
using Pointer = std::shared_ptr<NotConditional>;
NotConditional(Conditional::Pointer operand) : _operand(operand) { }
virtual ~NotConditional() {}
virtual bool satisfied() override;

View file

@ -18,6 +18,7 @@ class ClampFilter : public Filter {
REGISTER_FILTER_CLASS(ClampFilter);
public:
ClampFilter(float min = 0.0, float max = 1.0) : _min(min), _max(max) {};
virtual ~ClampFilter() {}
virtual float apply(float value) const override {
return glm::clamp(value, _min, _max);
}

View file

@ -18,6 +18,7 @@ class ConstrainToIntegerFilter : public Filter {
REGISTER_FILTER_CLASS(ConstrainToIntegerFilter);
public:
ConstrainToIntegerFilter() {};
virtual ~ConstrainToIntegerFilter() {}
virtual float apply(float value) const override {
return glm::sign(value);

View file

@ -18,6 +18,7 @@ class ConstrainToPositiveIntegerFilter : public Filter {
REGISTER_FILTER_CLASS(ConstrainToPositiveIntegerFilter);
public:
ConstrainToPositiveIntegerFilter() {};
virtual ~ConstrainToPositiveIntegerFilter() {};
virtual float apply(float value) const override {
return (value <= 0.0f) ? 0.0f : 1.0f;

View file

@ -18,6 +18,7 @@ class DeadZoneFilter : public Filter {
REGISTER_FILTER_CLASS(DeadZoneFilter);
public:
DeadZoneFilter(float min = 0.0) : _min(min) {};
virtual ~DeadZoneFilter() {}
virtual float apply(float value) const override;

View file

@ -20,6 +20,7 @@ namespace controller {
ExponentialSmoothingFilter() {}
ExponentialSmoothingFilter(float rotationConstant, float translationConstant) :
_translationConstant(translationConstant), _rotationConstant(rotationConstant) {}
virtual ~ExponentialSmoothingFilter() {}
float apply(float value) const override { return value; }
Pose apply(Pose value) const override;

View file

@ -18,6 +18,7 @@ class HysteresisFilter : public Filter {
REGISTER_FILTER_CLASS(HysteresisFilter);
public:
HysteresisFilter(float min = 0.25, float max = 0.75);
virtual ~HysteresisFilter() {}
virtual float apply(float value) const override;
virtual Pose apply(Pose value) const override { return value; }

Some files were not shown because too many files have changed in this diff Show more