());
+ }
+ });
+
+}
+
JNIEXPORT void JNICALL
Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeBeforeEnterBackground(JNIEnv *env, jobject obj) {
AndroidHelper::instance().notifyBeforeEnterBackground();
@@ -355,5 +546,11 @@ JNIEXPORT void Java_io_highfidelity_hifiinterface_WebViewActivity_nativeProcessU
AndroidHelper::instance().processURL(QString::fromUtf8(nativeString));
}
+JNIEXPORT void JNICALL
+Java_io_highfidelity_hifiinterface_receiver_HeadsetStateReceiver_notifyHeadsetOn(JNIEnv *env,
+ jobject instance,
+ jboolean pluggedIn) {
+ AndroidHelper::instance().notifyHeadsetOn(pluggedIn);
+}
}
diff --git a/android/app/src/main/java/io/highfidelity/hifiinterface/HifiUtils.java b/android/app/src/main/java/io/highfidelity/hifiinterface/HifiUtils.java
index f92cd0a385..a85e18d9a9 100644
--- a/android/app/src/main/java/io/highfidelity/hifiinterface/HifiUtils.java
+++ b/android/app/src/main/java/io/highfidelity/hifiinterface/HifiUtils.java
@@ -64,4 +64,10 @@ public class HifiUtils {
public native String protocolVersionSignature();
+ public native boolean isUserLoggedIn();
+
+ public native void updateHifiSetting(String group, String key, boolean value);
+ public native boolean getHifiSettingBoolean(String group, String key, boolean defaultValue);
+
+ public native boolean isKeepingLoggedIn();
}
diff --git a/android/app/src/main/java/io/highfidelity/hifiinterface/InterfaceActivity.java b/android/app/src/main/java/io/highfidelity/hifiinterface/InterfaceActivity.java
index f161783d6a..50aea59663 100644
--- a/android/app/src/main/java/io/highfidelity/hifiinterface/InterfaceActivity.java
+++ b/android/app/src/main/java/io/highfidelity/hifiinterface/InterfaceActivity.java
@@ -13,6 +13,7 @@ package io.highfidelity.hifiinterface;
import android.content.Context;
import android.content.Intent;
+import android.content.IntentFilter;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
@@ -38,8 +39,10 @@ import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import io.highfidelity.hifiinterface.fragment.WebViewFragment;
+import io.highfidelity.hifiinterface.receiver.HeadsetStateReceiver;
/*import com.google.vr.cardboard.DisplaySynchronizer;
import com.google.vr.cardboard.DisplayUtils;
@@ -55,6 +58,7 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
private static final int NORMAL_DPI = 160;
private Vibrator mVibrator;
+ private HeadsetStateReceiver headsetStateReceiver;
//public static native void handleHifiURL(String hifiURLString);
private native long nativeOnCreate(InterfaceActivity instance, AssetManager assetManager);
@@ -65,13 +69,14 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
private native void nativeEnterBackground();
private native void nativeEnterForeground();
private native long nativeOnExitVr();
+ private native void nativeInitAfterAppLoaded();
private AssetManager assetManager;
private static boolean inVrMode;
private boolean nativeEnterBackgroundCallEnqueued = false;
- private SlidingDrawer webSlidingDrawer;
+ private SlidingDrawer mWebSlidingDrawer;
// private GvrApi gvrApi;
// Opaque native pointer to the Application C++ object.
// This object is owned by the InterfaceActivity instance and passed to the native methods.
@@ -111,17 +116,6 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
//nativeGvrApi =
nativeOnCreate(this, assetManager /*, gvrApi.getNativeGvrContext()*/);
- Point size = new Point();
- getWindowManager().getDefaultDisplay().getRealSize(size);
-
- try {
- PackageInfo pInfo = this.getPackageManager().getPackageInfo(getPackageName(), 0);
- String version = pInfo.versionName;
-// setAppVersion(version);
- } catch (PackageManager.NameNotFoundException e) {
- Log.e("GVR", "Error getting application version", e);
- }
-
final View rootView = getWindow().getDecorView().findViewById(android.R.id.content);
// This is a workaround to hide the menu bar when the virtual keyboard is shown from Qt
@@ -132,25 +126,7 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
});
startActivity(new Intent(this, SplashActivity.class));
mVibrator = (Vibrator) this.getSystemService(VIBRATOR_SERVICE);
-
- FrameLayout mainLayout = findViewById(android.R.id.content);
- LayoutInflater inflater = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
- webSlidingDrawer = (SlidingDrawer) inflater.inflate(R.layout.web_drawer, mainLayout, false);
- QtLayout qtLayout = (QtLayout) mainLayout.getChildAt(0);
- QtLayout.LayoutParams layoutParams = new QtLayout.LayoutParams(webSlidingDrawer.getLayoutParams());
- webSlidingDrawer.setOnDrawerCloseListener(() -> {
- WebViewFragment webViewFragment = (WebViewFragment) getFragmentManager().findFragmentByTag("webViewFragment");
- webViewFragment.close();
- });
- int widthPx = Math.max(size.x, size.y);
- int heightPx = Math.min(size.x, size.y);
-
- layoutParams.x = (int) (widthPx - WEB_DRAWER_RIGHT_MARGIN * getResources().getDisplayMetrics().xdpi / NORMAL_DPI);
- layoutParams.y = (int) (heightPx - WEB_DRAWER_BOTTOM_MARGIN * getResources().getDisplayMetrics().ydpi / NORMAL_DPI);
-
- layoutParams.resolveLayoutDirection(View.LAYOUT_DIRECTION_RTL);
- qtLayout.addView(webSlidingDrawer, layoutParams);
- webSlidingDrawer.setVisibility(View.GONE);
+ headsetStateReceiver = new HeadsetStateReceiver();
}
@Override
@@ -161,6 +137,7 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
} else {
nativeEnterBackground();
}
+ unregisterReceiver(headsetStateReceiver);
//gvrApi.pauseTracking();
}
@@ -183,6 +160,7 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
nativeEnterForeground();
surfacesWorkaround();
keepInterfaceRunning = false;
+ registerReceiver(headsetStateReceiver, new IntentFilter(Intent.ACTION_HEADSET_PLUG));
//gvrApi.resumeTracking();
}
@@ -280,14 +258,47 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
protected void onNewIntent(Intent intent) {
super.onNewIntent(intent);
if (intent.hasExtra(DOMAIN_URL)) {
- webSlidingDrawer.setVisibility(View.GONE);
+ hideWebDrawer();
nativeGotoUrl(intent.getStringExtra(DOMAIN_URL));
} else if (intent.hasExtra(EXTRA_GOTO_USERNAME)) {
- webSlidingDrawer.setVisibility(View.GONE);
+ hideWebDrawer();
nativeGoToUser(intent.getStringExtra(EXTRA_GOTO_USERNAME));
}
}
+ private void hideWebDrawer() {
+ if (mWebSlidingDrawer != null) {
+ mWebSlidingDrawer.setVisibility(View.GONE);
+ }
+ }
+
+ public void showWebDrawer() {
+ if (mWebSlidingDrawer == null) {
+ FrameLayout mainLayout = findViewById(android.R.id.content);
+ LayoutInflater inflater = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
+ QtLayout qtLayout = (QtLayout) mainLayout.getChildAt(0);
+ mWebSlidingDrawer = (SlidingDrawer) inflater.inflate(R.layout.web_drawer, mainLayout, false);
+
+ QtLayout.LayoutParams layoutParams = new QtLayout.LayoutParams(mWebSlidingDrawer.getLayoutParams());
+ mWebSlidingDrawer.setOnDrawerCloseListener(() -> {
+ WebViewFragment webViewFragment = (WebViewFragment) getFragmentManager().findFragmentByTag("webViewFragment");
+ webViewFragment.close();
+ });
+
+ Point size = new Point();
+ getWindowManager().getDefaultDisplay().getRealSize(size);
+ int widthPx = Math.max(size.x, size.y);
+ int heightPx = Math.min(size.x, size.y);
+
+ layoutParams.x = (int) (widthPx - WEB_DRAWER_RIGHT_MARGIN * getResources().getDisplayMetrics().xdpi / NORMAL_DPI);
+ layoutParams.y = (int) (heightPx - WEB_DRAWER_BOTTOM_MARGIN * getResources().getDisplayMetrics().ydpi / NORMAL_DPI);
+
+ layoutParams.resolveLayoutDirection(View.LAYOUT_DIRECTION_RTL);
+ qtLayout.addView(mWebSlidingDrawer, layoutParams);
+ }
+ mWebSlidingDrawer.setVisibility(View.VISIBLE);
+ }
+
public void openAndroidActivity(String activityName, boolean backToScene) {
openAndroidActivity(activityName, backToScene, null);
}
@@ -296,29 +307,37 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
switch (activityName) {
case "Home":
case "Privacy Policy":
- case "Login": {
nativeBeforeEnterBackground();
Intent intent = new Intent(this, MainActivity.class);
intent.putExtra(MainActivity.EXTRA_FRAGMENT, activityName);
intent.putExtra(MainActivity.EXTRA_BACK_TO_SCENE, backToScene);
startActivity(intent);
break;
- }
+ case "Login":
+ nativeBeforeEnterBackground();
+ Intent loginIntent = new Intent(this, LoginMenuActivity.class);
+ loginIntent.putExtra(LoginMenuActivity.EXTRA_BACK_TO_SCENE, backToScene);
+ loginIntent.putExtra(LoginMenuActivity.EXTRA_BACK_ON_SKIP, true);
+ if (args != null && args.containsKey(DOMAIN_URL)) {
+ loginIntent.putExtra(LoginMenuActivity.EXTRA_DOMAIN_URL, (String) args.get(DOMAIN_URL));
+ }
+ startActivity(loginIntent);
+ break;
case "WebView":
runOnUiThread(() -> {
- webSlidingDrawer.setVisibility(View.VISIBLE);
- if (!webSlidingDrawer.isOpened()) {
- webSlidingDrawer.animateOpen();
+ showWebDrawer();
+ if (!mWebSlidingDrawer.isOpened()) {
+ mWebSlidingDrawer.animateOpen();
}
if (args != null && args.containsKey(WebViewActivity.WEB_VIEW_ACTIVITY_EXTRA_URL)) {
WebViewFragment webViewFragment = (WebViewFragment) getFragmentManager().findFragmentByTag("webViewFragment");
webViewFragment.loadUrl((String) args.get(WebViewActivity.WEB_VIEW_ACTIVITY_EXTRA_URL), true);
webViewFragment.setToolbarVisible(true);
webViewFragment.setCloseAction(() -> {
- if (webSlidingDrawer.isOpened()) {
- webSlidingDrawer.animateClose();
+ if (mWebSlidingDrawer.isOpened()) {
+ mWebSlidingDrawer.animateClose();
}
- webSlidingDrawer.setVisibility(View.GONE);
+ hideWebDrawer();
});
}
});
@@ -335,6 +354,9 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
if (nativeEnterBackgroundCallEnqueued) {
nativeEnterBackground();
}
+ runOnUiThread(() -> {
+ nativeInitAfterAppLoaded();
+ });
}
public void performHapticFeedback(int duration) {
@@ -361,4 +383,7 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
public void onExpand() {
keepInterfaceRunning = true;
}
+
+ @Override
+ public void onOAuthAuthorizeCallback(Uri uri) { }
}
diff --git a/android/app/src/main/java/io/highfidelity/hifiinterface/LoginMenuActivity.java b/android/app/src/main/java/io/highfidelity/hifiinterface/LoginMenuActivity.java
new file mode 100644
index 0000000000..5cb196249d
--- /dev/null
+++ b/android/app/src/main/java/io/highfidelity/hifiinterface/LoginMenuActivity.java
@@ -0,0 +1,210 @@
+package io.highfidelity.hifiinterface;
+
+
+import android.app.Fragment;
+import android.app.FragmentManager;
+import android.app.FragmentTransaction;
+import android.content.Intent;
+import android.os.Bundle;
+import android.support.v7.app.AppCompatActivity;
+import android.view.View;
+import io.highfidelity.hifiinterface.fragment.LoginFragment;
+import io.highfidelity.hifiinterface.fragment.OnBackPressedListener;
+import io.highfidelity.hifiinterface.fragment.SignupFragment;
+import io.highfidelity.hifiinterface.fragment.StartMenuFragment;
+
+public class LoginMenuActivity extends AppCompatActivity
+ implements StartMenuFragment.StartMenuInteractionListener,
+ LoginFragment.OnLoginInteractionListener,
+ SignupFragment.OnSignupInteractionListener {
+
+ /**
+ * Set EXTRA_FINISH_ON_BACK to finish the app when back button is pressed
+ */
+ public static final String EXTRA_FINISH_ON_BACK = "finishOnBack";
+
+ /**
+ * Set EXTRA_BACK_TO_SCENE to back to the scene
+ */
+ public static final String EXTRA_BACK_TO_SCENE = "backToScene";
+
+ /**
+ * Set EXTRA_BACK_ON_SKIP to finish this activity when skip button is pressed
+ */
+ public static final String EXTRA_BACK_ON_SKIP = "backOnSkip";
+
+ public static final String EXTRA_DOMAIN_URL = "url";
+
+ private boolean finishOnBack;
+ private boolean backToScene;
+ private boolean backOnSkip;
+ private String domainUrlToBack;
+
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ setContentView(R.layout.activity_encourage_login);
+
+ finishOnBack = getIntent().getBooleanExtra(EXTRA_FINISH_ON_BACK, false);
+ backToScene = getIntent().getBooleanExtra(EXTRA_BACK_TO_SCENE, false);
+ domainUrlToBack = getIntent().getStringExtra(EXTRA_DOMAIN_URL);
+ backOnSkip = getIntent().getBooleanExtra(EXTRA_BACK_ON_SKIP, false);
+
+ if (savedInstanceState != null) {
+ finishOnBack = savedInstanceState.getBoolean(EXTRA_FINISH_ON_BACK, false);
+ backToScene = savedInstanceState.getBoolean(EXTRA_BACK_TO_SCENE, false);
+ backOnSkip = savedInstanceState.getBoolean(EXTRA_BACK_ON_SKIP, false);
+ domainUrlToBack = savedInstanceState.getString(EXTRA_DOMAIN_URL);
+ }
+
+ loadMenuFragment();
+ }
+
+ @Override
+ protected void onSaveInstanceState(Bundle outState) {
+ super.onSaveInstanceState(outState);
+ outState.putBoolean(EXTRA_FINISH_ON_BACK, finishOnBack);
+ outState.putBoolean(EXTRA_BACK_TO_SCENE, backToScene);
+ outState.putString(EXTRA_DOMAIN_URL, domainUrlToBack);
+ }
+
+ @Override
+ protected void onRestoreInstanceState(Bundle savedInstanceState) {
+ super.onRestoreInstanceState(savedInstanceState);
+ finishOnBack = savedInstanceState.getBoolean(EXTRA_FINISH_ON_BACK, false);
+ backToScene = savedInstanceState.getBoolean(EXTRA_BACK_TO_SCENE, false);
+ backOnSkip = savedInstanceState.getBoolean(EXTRA_BACK_ON_SKIP, false);
+ domainUrlToBack = savedInstanceState.getString(EXTRA_DOMAIN_URL);
+ }
+
+ private void loadMenuFragment() {
+ FragmentManager fragmentManager = getFragmentManager();
+ FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction();
+ Fragment fragment = StartMenuFragment.newInstance();
+ fragmentTransaction.replace(R.id.content_frame, fragment);
+ fragmentTransaction.addToBackStack(fragment.toString());
+ fragmentTransaction.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_OPEN);
+ fragmentTransaction.commit();
+ hideStatusBar();
+ }
+
+ @Override
+ protected void onResume() {
+ super.onResume();
+ hideStatusBar();
+ }
+
+ private void hideStatusBar() {
+ View decorView = getWindow().getDecorView();
+ // Hide the status bar.
+ int uiOptions = View.SYSTEM_UI_FLAG_FULLSCREEN | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN;
+ decorView.setSystemUiVisibility(uiOptions);
+ }
+
+ @Override
+ public void onSignupButtonClicked() {
+ loadSignupFragment();
+ }
+
+ @Override
+ public void onLoginButtonClicked() {
+ loadLoginFragment(false);
+ }
+
+ @Override
+ public void onSkipLoginClicked() {
+ if (backOnSkip) {
+ onBackPressed();
+ } else {
+ loadMainActivity();
+ }
+ }
+
+ @Override
+ public void onSteamLoginButtonClicked() {
+ loadLoginFragment(true);
+ }
+
+ private void loadSignupFragment() {
+ FragmentManager fragmentManager = getFragmentManager();
+ FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction();
+ Fragment fragment = SignupFragment.newInstance();
+ String tag = getString(R.string.tagFragmentSignup);
+ fragmentTransaction.replace(R.id.content_frame, fragment, tag);
+ fragmentTransaction.addToBackStack(tag);
+ fragmentTransaction.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_OPEN);
+ fragmentTransaction.commit();
+ hideStatusBar();
+ }
+
+ private void loadLoginFragment(boolean useOauth) {
+ FragmentManager fragmentManager = getFragmentManager();
+ FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction();
+ Fragment fragment = LoginFragment.newInstance(useOauth);
+ String tag = getString(R.string.tagFragmentLogin);
+ fragmentTransaction.replace(R.id.content_frame, fragment, tag);
+ fragmentTransaction.addToBackStack(tag);
+ fragmentTransaction.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_OPEN);
+ fragmentTransaction.commit();
+ hideStatusBar();
+ }
+
+ @Override
+ public void onLoginCompleted() {
+ loadMainActivity();
+ }
+
+ @Override
+ public void onCancelLogin() {
+ getFragmentManager().popBackStack();
+ }
+
+ @Override
+ public void onCancelSignup() {
+ getFragmentManager().popBackStack();
+ }
+
+ private void loadMainActivity() {
+ finish();
+ if (backToScene) {
+ backToScene = false;
+ goToDomain(domainUrlToBack != null? domainUrlToBack : "");
+ } else {
+ startActivity(new Intent(this, MainActivity.class));
+ }
+ }
+
+ private void goToDomain(String domainUrl) {
+ Intent intent = new Intent(this, InterfaceActivity.class);
+ intent.putExtra(InterfaceActivity.DOMAIN_URL, domainUrl);
+ finish();
+ intent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP);
+ startActivity(intent);
+ }
+
+
+ @Override
+ public void onSignupCompleted() {
+ loadMainActivity();
+ }
+
+ @Override
+ public void onBackPressed() {
+ FragmentManager fm = getFragmentManager();
+ int index = fm.getBackStackEntryCount() - 1;
+ if (index > 0) {
+ FragmentManager.BackStackEntry backEntry = fm.getBackStackEntryAt(index);
+ String tag = backEntry.getName();
+ Fragment topFragment = getFragmentManager().findFragmentByTag(tag);
+ if (!(topFragment instanceof OnBackPressedListener) ||
+ !((OnBackPressedListener) topFragment).doBack()) {
+ super.onBackPressed();
+ }
+ } else if (finishOnBack){
+ finishAffinity();
+ } else {
+ finish();
+ }
+ }
+}
diff --git a/android/app/src/main/java/io/highfidelity/hifiinterface/MainActivity.java b/android/app/src/main/java/io/highfidelity/hifiinterface/MainActivity.java
index db6f0fca24..e17b530f1c 100644
--- a/android/app/src/main/java/io/highfidelity/hifiinterface/MainActivity.java
+++ b/android/app/src/main/java/io/highfidelity/hifiinterface/MainActivity.java
@@ -1,5 +1,6 @@
package io.highfidelity.hifiinterface;
+import android.app.Activity;
import android.app.Fragment;
import android.app.FragmentManager;
import android.app.FragmentTransaction;
@@ -31,12 +32,12 @@ import com.squareup.picasso.Picasso;
import io.highfidelity.hifiinterface.fragment.FriendsFragment;
import io.highfidelity.hifiinterface.fragment.HomeFragment;
-import io.highfidelity.hifiinterface.fragment.LoginFragment;
import io.highfidelity.hifiinterface.fragment.PolicyFragment;
+import io.highfidelity.hifiinterface.fragment.SettingsFragment;
+import io.highfidelity.hifiinterface.fragment.SignupFragment;
import io.highfidelity.hifiinterface.task.DownloadProfileImageTask;
public class MainActivity extends AppCompatActivity implements NavigationView.OnNavigationItemSelectedListener,
- LoginFragment.OnLoginInteractionListener,
HomeFragment.OnHomeInteractionListener,
FriendsFragment.OnHomeInteractionListener {
@@ -44,12 +45,13 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
public static final String DEFAULT_FRAGMENT = "Home";
public static final String EXTRA_FRAGMENT = "fragment";
public static final String EXTRA_BACK_TO_SCENE = "backToScene";
+ public static final String EXTRA_BACK_TO_URL = "url";
private String TAG = "HighFidelity";
- public native boolean nativeIsLoggedIn();
- public native void nativeLogout();
- public native String nativeGetDisplayName();
+ public native void logout();
+ public native void setUsernameChangedListener(Activity usernameChangedListener);
+ public native String getUsername();
private DrawerLayout mDrawerLayout;
private NavigationView mNavigationView;
@@ -61,6 +63,7 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
private MenuItem mPeopleMenuItem;
private boolean backToScene;
+ private String backToUrl;
@Override
protected void onCreate(Bundle savedInstanceState) {
@@ -80,6 +83,8 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
mPeopleMenuItem = mNavigationView.getMenu().findItem(R.id.action_people);
+ updateDebugMenu(mNavigationView.getMenu());
+
Toolbar toolbar = findViewById(R.id.toolbar);
toolbar.setTitleTextAppearance(this, R.style.HomeActionBarTitleStyle);
setSupportActionBar(toolbar);
@@ -102,17 +107,23 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
loadFragment(DEFAULT_FRAGMENT);
}
- if (getIntent().hasExtra(EXTRA_BACK_TO_SCENE)) {
- backToScene = getIntent().getBooleanExtra(EXTRA_BACK_TO_SCENE, false);
+ backToScene = getIntent().getBooleanExtra(EXTRA_BACK_TO_SCENE, false);
+ backToUrl = getIntent().getStringExtra(EXTRA_BACK_TO_URL);
+ }
+ }
+
+ private void updateDebugMenu(Menu menu) {
+ if (BuildConfig.DEBUG) {
+ for (int i=0; i < menu.size(); i++) {
+ if (menu.getItem(i).getItemId() == R.id.action_debug_settings) {
+ menu.getItem(i).setVisible(true);
+ }
}
}
}
private void loadFragment(String fragment) {
switch (fragment) {
- case "Login":
- loadLoginFragment();
- break;
case "Home":
loadHomeFragment(true);
break;
@@ -130,28 +141,35 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
private void loadHomeFragment(boolean addToBackStack) {
Fragment fragment = HomeFragment.newInstance();
- loadFragment(fragment, getString(R.string.home), getString(R.string.tagFragmentHome), addToBackStack);
+ loadFragment(fragment, getString(R.string.home), getString(R.string.tagFragmentHome), addToBackStack, true);
}
- private void loadLoginFragment() {
- Fragment fragment = LoginFragment.newInstance();
-
- loadFragment(fragment, getString(R.string.login), getString(R.string.tagFragmentLogin), true);
+ private void startLoginMenuActivity() {
+ Intent intent = new Intent(this, LoginMenuActivity.class);
+ intent.putExtra(LoginMenuActivity.EXTRA_BACK_ON_SKIP, true);
+ startActivity(intent);
}
private void loadPrivacyPolicyFragment() {
Fragment fragment = PolicyFragment.newInstance();
- loadFragment(fragment, getString(R.string.privacyPolicy), getString(R.string.tagFragmentPolicy), true);
+ loadFragment(fragment, getString(R.string.privacyPolicy), getString(R.string.tagFragmentPolicy), true, true);
}
private void loadPeopleFragment() {
Fragment fragment = FriendsFragment.newInstance();
- loadFragment(fragment, getString(R.string.people), getString(R.string.tagFragmentPeople), true);
+ loadFragment(fragment, getString(R.string.people), getString(R.string.tagFragmentPeople), true, true);
}
- private void loadFragment(Fragment fragment, String title, String tag, boolean addToBackStack) {
+ private void loadSettingsFragment() {
+ SettingsFragment fragment = SettingsFragment.newInstance();
+
+ loadFragment(fragment, getString(R.string.settings), getString(R.string.tagSettings), true, true);
+ }
+
+
+ private void loadFragment(Fragment newFragment, String title, String tag, boolean addToBackStack, boolean goBackUntilHome) {
FragmentManager fragmentManager = getFragmentManager();
// check if it's the same fragment
@@ -163,17 +181,19 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
return; // cancel as we are already in that fragment
}
- // go back until first transaction
- int backStackEntryCount = fragmentManager.getBackStackEntryCount();
- for (int i = 0; i < backStackEntryCount - 1; i++) {
- fragmentManager.popBackStackImmediate();
+ if (goBackUntilHome) {
+ // go back until first transaction
+ int backStackEntryCount = fragmentManager.getBackStackEntryCount();
+ for (int i = 0; i < backStackEntryCount - 1; i++) {
+ fragmentManager.popBackStackImmediate();
+ }
}
// this case is when we wanted to go home.. rollback already did that!
// But asking for a new Home fragment makes it easier to have an updated list so we let it to continue
FragmentTransaction ft = fragmentManager.beginTransaction();
- ft.replace(R.id.content_frame, fragment, tag);
+ ft.replace(R.id.content_frame, newFragment, tag);
if (addToBackStack) {
ft.addToBackStack(title);
@@ -185,7 +205,7 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
private void updateLoginMenu() {
- if (nativeIsLoggedIn()) {
+ if (HifiUtils.getInstance().isUserLoggedIn()) {
mLoginPanel.setVisibility(View.GONE);
mProfilePanel.setVisibility(View.VISIBLE);
mLogoutOption.setVisibility(View.VISIBLE);
@@ -201,7 +221,7 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
}
private void updateProfileHeader() {
- updateProfileHeader(nativeGetDisplayName());
+ updateProfileHeader(getUsername());
}
private void updateProfileHeader(String username) {
if (!username.isEmpty()) {
@@ -241,6 +261,9 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
case R.id.action_people:
loadPeopleFragment();
return true;
+ case R.id.action_debug_settings:
+ loadSettingsFragment();
+ return true;
}
return false;
}
@@ -248,15 +271,22 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
@Override
protected void onStart() {
super.onStart();
+ setUsernameChangedListener(this);
updateLoginMenu();
}
+ @Override
+ protected void onStop() {
+ super.onStop();
+ setUsernameChangedListener(null);
+ }
+
public void onLoginClicked(View view) {
- loadLoginFragment();
+ startLoginMenuActivity();
}
public void onLogoutClicked(View view) {
- nativeLogout();
+ logout();
updateLoginMenu();
exitLoggedInFragment();
@@ -278,7 +308,7 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
}
private void goToLastLocation() {
- goToDomain("");
+ goToDomain(backToUrl != null? backToUrl : "");
}
private void goToDomain(String domainUrl) {
@@ -297,16 +327,6 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
startActivity(intent);
}
- @Override
- public void onLoginCompleted() {
- loadHomeFragment(false);
- updateLoginMenu();
- if (backToScene) {
- backToScene = false;
- goToLastLocation();
- }
- }
-
public void handleUsernameChanged(String username) {
runOnUiThread(() -> updateProfileHeader(username));
}
@@ -351,7 +371,6 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
public void onBackPressed() {
// if a fragment needs to internally manage back presses..
FragmentManager fm = getFragmentManager();
- Log.d("[BACK]", "getBackStackEntryCount " + fm.getBackStackEntryCount());
Fragment friendsFragment = fm.findFragmentByTag(getString(R.string.tagFragmentPeople));
if (friendsFragment != null && friendsFragment instanceof FriendsFragment) {
if (((FriendsFragment) friendsFragment).onBackPressed()) {
diff --git a/android/app/src/main/java/io/highfidelity/hifiinterface/SplashActivity.java b/android/app/src/main/java/io/highfidelity/hifiinterface/SplashActivity.java
index e0aa967aaa..bb42467ace 100644
--- a/android/app/src/main/java/io/highfidelity/hifiinterface/SplashActivity.java
+++ b/android/app/src/main/java/io/highfidelity/hifiinterface/SplashActivity.java
@@ -3,7 +3,6 @@ package io.highfidelity.hifiinterface;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
-import android.os.Handler;
import android.view.View;
public class SplashActivity extends Activity {
@@ -37,7 +36,13 @@ public class SplashActivity extends Activity {
}
public void onAppLoadedComplete() {
- startActivity(new Intent(this, MainActivity.class));
+ if (HifiUtils.getInstance().isUserLoggedIn()) {
+ startActivity(new Intent(this, MainActivity.class));
+ } else {
+ Intent menuIntent = new Intent(this, LoginMenuActivity.class);
+ menuIntent.putExtra(LoginMenuActivity.EXTRA_FINISH_ON_BACK, true);
+ startActivity(menuIntent);
+ }
SplashActivity.this.finish();
}
}
diff --git a/android/app/src/main/java/io/highfidelity/hifiinterface/WebViewActivity.java b/android/app/src/main/java/io/highfidelity/hifiinterface/WebViewActivity.java
index 5d65bcad51..e906c4b734 100644
--- a/android/app/src/main/java/io/highfidelity/hifiinterface/WebViewActivity.java
+++ b/android/app/src/main/java/io/highfidelity/hifiinterface/WebViewActivity.java
@@ -28,11 +28,18 @@ import java.net.MalformedURLException;
import java.net.URL;
import io.highfidelity.hifiinterface.fragment.WebViewFragment;
+import io.highfidelity.hifiinterface.fragment.WebViewFragment.OnWebViewInteractionListener;
-public class WebViewActivity extends Activity implements WebViewFragment.OnWebViewInteractionListener {
+public class WebViewActivity extends Activity implements OnWebViewInteractionListener {
public static final String WEB_VIEW_ACTIVITY_EXTRA_URL = "url";
+ public static final String WEB_VIEW_ACTIVITY_EXTRA_CLEAR_COOKIES = "clear_cookies";
+ public static final String RESULT_OAUTH_CODE = "code";
+ public static final String RESULT_OAUTH_STATE = "state";
+
private static final String FRAGMENT_TAG = "WebViewActivity_WebFragment";
+ private static final String OAUTH_CODE = "code";
+ private static final String OAUTH_STATE = "state";
private native void nativeProcessURL(String url);
@@ -47,14 +54,15 @@ public class WebViewActivity extends Activity implements WebViewFragment.OnWebVi
mActionBar = getActionBar();
mActionBar.setDisplayHomeAsUpEnabled(true);
- loadWebViewFragment(getIntent().getStringExtra(WEB_VIEW_ACTIVITY_EXTRA_URL));
+ loadWebViewFragment(getIntent().getStringExtra(WEB_VIEW_ACTIVITY_EXTRA_URL), getIntent().getBooleanExtra(WEB_VIEW_ACTIVITY_EXTRA_CLEAR_COOKIES, false));
}
- private void loadWebViewFragment(String url) {
+ private void loadWebViewFragment(String url, boolean clearCookies) {
WebViewFragment fragment = WebViewFragment.newInstance();
Bundle bundle = new Bundle();
bundle.putString(WebViewFragment.URL, url);
bundle.putBoolean(WebViewFragment.TOOLBAR_VISIBLE, false);
+ bundle.putBoolean(WebViewFragment.CLEAR_COOKIES, clearCookies);
fragment.setArguments(bundle);
FragmentManager fragmentManager = getFragmentManager();
FragmentTransaction ft = fragmentManager.beginTransaction();
@@ -131,4 +139,13 @@ public class WebViewActivity extends Activity implements WebViewFragment.OnWebVi
@Override
public void onExpand() { }
+ @Override
+ public void onOAuthAuthorizeCallback(Uri uri) {
+ Intent result = new Intent();
+ result.putExtra(RESULT_OAUTH_CODE, uri.getQueryParameter(OAUTH_CODE));
+ result.putExtra(RESULT_OAUTH_STATE, uri.getQueryParameter(OAUTH_STATE));
+ setResult(Activity.RESULT_OK, result);
+ finish();
+ }
+
}
diff --git a/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/FriendsFragment.java b/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/FriendsFragment.java
index 2a008d7950..e19a9c5a7a 100644
--- a/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/FriendsFragment.java
+++ b/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/FriendsFragment.java
@@ -23,8 +23,6 @@ import io.highfidelity.hifiinterface.view.UserListAdapter;
public class FriendsFragment extends Fragment {
- public native boolean nativeIsLoggedIn();
-
public native String nativeGetAccessToken();
private RecyclerView mUsersView;
@@ -98,13 +96,17 @@ public class FriendsFragment extends Fragment {
mUsersAdapter.setListener(new UserListAdapter.AdapterListener() {
@Override
- public void onEmptyAdapter() {
- mSwipeRefreshLayout.setRefreshing(false);
+ public void onEmptyAdapter(boolean shouldStopRefreshing) {
+ if (shouldStopRefreshing) {
+ mSwipeRefreshLayout.setRefreshing(false);
+ }
}
@Override
- public void onNonEmptyAdapter() {
- mSwipeRefreshLayout.setRefreshing(false);
+ public void onNonEmptyAdapter(boolean shouldStopRefreshing) {
+ if (shouldStopRefreshing) {
+ mSwipeRefreshLayout.setRefreshing(false);
+ }
}
@Override
@@ -115,6 +117,8 @@ public class FriendsFragment extends Fragment {
mUsersView.setAdapter(mUsersAdapter);
+ mUsersAdapter.startLoad();
+
mSlidingUpPanelLayout.setFadeOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
diff --git a/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/HomeFragment.java b/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/HomeFragment.java
index 7bd373cf1d..86b8625cfe 100644
--- a/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/HomeFragment.java
+++ b/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/HomeFragment.java
@@ -76,18 +76,22 @@ public class HomeFragment extends Fragment {
});
mDomainAdapter.setListener(new DomainAdapter.AdapterListener() {
@Override
- public void onEmptyAdapter() {
+ public void onEmptyAdapter(boolean shouldStopRefreshing) {
searchNoResultsView.setText(R.string.search_no_results);
searchNoResultsView.setVisibility(View.VISIBLE);
mDomainsView.setVisibility(View.GONE);
- mSwipeRefreshLayout.setRefreshing(false);
+ if (shouldStopRefreshing) {
+ mSwipeRefreshLayout.setRefreshing(false);
+ }
}
@Override
- public void onNonEmptyAdapter() {
+ public void onNonEmptyAdapter(boolean shouldStopRefreshing) {
searchNoResultsView.setVisibility(View.GONE);
mDomainsView.setVisibility(View.VISIBLE);
- mSwipeRefreshLayout.setRefreshing(false);
+ if (shouldStopRefreshing) {
+ mSwipeRefreshLayout.setRefreshing(false);
+ }
}
@Override
@@ -96,11 +100,20 @@ public class HomeFragment extends Fragment {
}
});
mDomainsView.setAdapter(mDomainAdapter);
+ mDomainAdapter.startLoad();
mSearchView = rootView.findViewById(R.id.searchView);
mSearchIconView = rootView.findViewById(R.id.search_mag_icon);
mClearSearch = rootView.findViewById(R.id.search_clear);
+ getActivity().getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
+
+ return rootView;
+ }
+
+ @Override
+ public void onStart() {
+ super.onStart();
mSearchView.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) {}
@@ -142,10 +155,6 @@ public class HomeFragment extends Fragment {
mDomainAdapter.loadDomains(mSearchView.getText().toString(), true);
}
});
-
- getActivity().getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
-
- return rootView;
}
@Override
diff --git a/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/LoginFragment.java b/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/LoginFragment.java
index f29c237ed7..28406d5986 100644
--- a/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/LoginFragment.java
+++ b/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/LoginFragment.java
@@ -2,36 +2,65 @@ package io.highfidelity.hifiinterface.fragment;
import android.app.Activity;
import android.app.Fragment;
-import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
-import android.text.Editable;
-import android.text.TextWatcher;
+import android.support.annotation.Nullable;
import android.util.Log;
+import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputMethodManager;
import android.widget.Button;
+import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.TextView;
-import io.highfidelity.hifiinterface.R;
+import org.qtproject.qt5.android.QtNative;
-public class LoginFragment extends Fragment {
+import java.io.UnsupportedEncodingException;
+import java.net.URLEncoder;
+import java.util.Random;
+
+import io.highfidelity.hifiinterface.BuildConfig;
+import io.highfidelity.hifiinterface.HifiUtils;
+import io.highfidelity.hifiinterface.R;
+import io.highfidelity.hifiinterface.WebViewActivity;
+
+import static org.qtproject.qt5.android.QtActivityDelegate.ApplicationActive;
+import static org.qtproject.qt5.android.QtActivityDelegate.ApplicationInactive;
+
+public class LoginFragment extends Fragment
+ implements OnBackPressedListener {
+
+ private static final String ARG_USE_OAUTH = "use_oauth";
+ private static final String TAG = "Interface";
+
+ private final String OAUTH_CLIENT_ID = BuildConfig.OAUTH_CLIENT_ID;
+ private final String OAUTH_REDIRECT_URI = BuildConfig.OAUTH_REDIRECT_URI;
+ private final String OAUTH_AUTHORIZE_BASE_URL = "https://highfidelity.com/oauth/authorize";
+ private static final int OAUTH_AUTHORIZE_REQUEST = 1;
private EditText mUsername;
private EditText mPassword;
private TextView mError;
- private TextView mForgotPassword;
private Button mLoginButton;
+ private CheckBox mKeepMeLoggedInCheckbox;
+ private ViewGroup mLoginForm;
+ private ViewGroup mLoggingInFrame;
+ private ViewGroup mLoggedInFrame;
+ private boolean mLoginInProgress;
+ private boolean mLoginSuccess;
+ private boolean mUseOauth;
+ private String mOauthState;
- private ProgressDialog mDialog;
+ public native void login(String username, String password, boolean keepLoggedIn);
+ private native void retrieveAccessToken(String authCode, String clientId, String clientSecret, String redirectUri);
- public native void nativeLogin(String username, String password, Activity usernameChangedListener);
+ public native void cancelLogin();
private LoginFragment.OnLoginInteractionListener mListener;
@@ -39,11 +68,22 @@ public class LoginFragment extends Fragment {
// Required empty public constructor
}
- public static LoginFragment newInstance() {
+ public static LoginFragment newInstance(boolean useOauth) {
LoginFragment fragment = new LoginFragment();
+ Bundle args = new Bundle();
+ args.putBoolean(ARG_USE_OAUTH, useOauth);
+ fragment.setArguments(args);
return fragment;
}
+ @Override
+ public void onCreate(@Nullable Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ if (getArguments() != null) {
+ mUseOauth = getArguments().getBoolean(ARG_USE_OAUTH, false);
+ }
+ }
+
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
@@ -53,58 +93,29 @@ public class LoginFragment extends Fragment {
mPassword = rootView.findViewById(R.id.password);
mError = rootView.findViewById(R.id.error);
mLoginButton = rootView.findViewById(R.id.loginButton);
- mForgotPassword = rootView.findViewById(R.id.forgotPassword);
+ mLoginForm = rootView.findViewById(R.id.loginForm);
+ mLoggingInFrame = rootView.findViewById(R.id.loggingInFrame);
+ mLoggedInFrame = rootView.findViewById(R.id.loggedInFrame);
+ mKeepMeLoggedInCheckbox = rootView.findViewById(R.id.keepMeLoggedIn);
- mUsername.addTextChangedListener(new TextWatcher() {
- boolean ignoreNextChange = false;
- boolean hadBlankSpace = false;
- @Override
- public void beforeTextChanged(CharSequence charSequence, int start, int count, int after) {
- hadBlankSpace = charSequence.length() > 0 && charSequence.charAt(charSequence.length()-1) == ' ';
- }
+ rootView.findViewById(R.id.forgotPassword).setOnClickListener(view -> onForgotPasswordClicked());
- @Override
- public void onTextChanged(CharSequence charSequence, int start, int count, int after) {
+ rootView.findViewById(R.id.cancel).setOnClickListener(view -> onCancelLogin());
- }
+ rootView.findViewById(R.id.getStarted).setOnClickListener(view -> onGetStartedClicked());
- @Override
- public void afterTextChanged(Editable editable) {
- if (!ignoreNextChange) {
- ignoreNextChange = true;
- boolean spaceFound = false;
- for (int i = 0; i < editable.length(); i++) {
- if (editable.charAt(i) == ' ') {
- spaceFound=true;
- editable.delete(i, i + 1);
- i--;
- }
- }
+ mLoginButton.setOnClickListener(view -> onLoginButtonClicked());
- if (hadBlankSpace && !spaceFound && editable.length() > 0) {
- editable.delete(editable.length()-1, editable.length());
- }
+ rootView.findViewById(R.id.takeMeInWorld).setOnClickListener(view -> skipLogin());
+ mPassword.setOnEditorActionListener((textView, actionId, keyEvent) -> onPasswordEditorAction(textView, actionId, keyEvent));
- editable.append(' ');
- ignoreNextChange = false;
- }
+ mKeepMeLoggedInCheckbox.setChecked(HifiUtils.getInstance().isKeepingLoggedIn());
- }
- });
-
-
- mLoginButton.setOnClickListener(view -> login());
-
- mForgotPassword.setOnClickListener(view -> forgotPassword());
-
- mPassword.setOnEditorActionListener(
- (textView, actionId, keyEvent) -> {
- if (actionId == EditorInfo.IME_ACTION_DONE) {
- mLoginButton.performClick();
- return true;
- }
- return false;
- });
+ if (mUseOauth) {
+ openWebForAuthorization();
+ } else {
+ showLoginForm();
+ }
return rootView;
}
@@ -125,14 +136,67 @@ public class LoginFragment extends Fragment {
mListener = null;
}
+ @Override
+ public void onResume() {
+ super.onResume();
+ // This hack intends to keep Qt threads running even after the app comes from background
+ QtNative.setApplicationState(ApplicationActive);
+ }
+
@Override
public void onStop() {
super.onStop();
- cancelActivityIndicator();
+ // Leave the Qt app paused
+ QtNative.setApplicationState(ApplicationInactive);
hideKeyboard();
}
- public void login() {
+ @Override
+ public void onActivityResult(int requestCode, int resultCode, Intent data) {
+ if (requestCode == OAUTH_AUTHORIZE_REQUEST) {
+ if (resultCode == Activity.RESULT_OK) {
+ String authCode = data.getStringExtra(WebViewActivity.RESULT_OAUTH_CODE);
+ String state = data.getStringExtra(WebViewActivity.RESULT_OAUTH_STATE);
+ if (state != null && state.equals(mOauthState) && mListener != null) {
+ mOauthState = null;
+ showActivityIndicator();
+ mLoginInProgress = true;
+ retrieveAccessToken(authCode, BuildConfig.OAUTH_CLIENT_ID, BuildConfig.OAUTH_CLIENT_SECRET, BuildConfig.OAUTH_REDIRECT_URI);
+ }
+ } else {
+ onCancelLogin();
+ }
+ }
+
+ }
+
+ private void onCancelLogin() {
+ if (mListener != null) {
+ mListener.onCancelLogin();
+ }
+ }
+
+ private boolean onPasswordEditorAction(TextView textView, int actionId, KeyEvent keyEvent) {
+ if (actionId == EditorInfo.IME_ACTION_DONE) {
+ mLoginButton.performClick();
+ return true;
+ }
+ return false;
+ }
+
+ private void skipLogin() {
+ if (mListener != null) {
+ mListener.onSkipLoginClicked();
+ }
+ }
+
+ private void onGetStartedClicked() {
+ if (mListener != null) {
+ mListener.onLoginCompleted();
+ }
+ }
+
+ public void onLoginButtonClicked() {
String username = mUsername.getText().toString().trim();
String password = mPassword.getText().toString();
hideKeyboard();
@@ -142,7 +206,10 @@ public class LoginFragment extends Fragment {
mLoginButton.setEnabled(false);
hideError();
showActivityIndicator();
- nativeLogin(username, password, getActivity());
+ mLoginInProgress = true;
+ mLoginSuccess = false;
+ boolean keepUserLoggedIn = mKeepMeLoggedInCheckbox.isChecked();
+ login(username, password, keepUserLoggedIn);
}
}
@@ -154,25 +221,32 @@ public class LoginFragment extends Fragment {
}
}
- private void forgotPassword() {
+ private void onForgotPasswordClicked() {
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse("https://highfidelity.com/users/password/new"));
startActivity(intent);
}
private void showActivityIndicator() {
- if (mDialog == null) {
- mDialog = new ProgressDialog(getContext());
- }
- mDialog.setMessage(getString(R.string.logging_in));
- mDialog.setCancelable(false);
- mDialog.show();
+ mLoginForm.setVisibility(View.GONE);
+ mLoggedInFrame.setVisibility(View.GONE);
+ mLoggingInFrame.setVisibility(View.VISIBLE);
+ mLoggingInFrame.bringToFront();
}
- private void cancelActivityIndicator() {
- if (mDialog != null) {
- mDialog.cancel();
- }
+ private void showLoginForm() {
+ mLoggingInFrame.setVisibility(View.GONE);
+ mLoggedInFrame.setVisibility(View.GONE);
+ mLoginForm.setVisibility(View.VISIBLE);
+ mLoginForm.bringToFront();
}
+
+ private void showLoggedInMessage() {
+ mLoginForm.setVisibility(View.GONE);
+ mLoggingInFrame.setVisibility(View.GONE);
+ mLoggedInFrame.setVisibility(View.VISIBLE);
+ mLoggedInFrame.bringToFront();
+ }
+
private void showError(String error) {
mError.setText(error);
mError.setVisibility(View.VISIBLE);
@@ -184,22 +258,71 @@ public class LoginFragment extends Fragment {
}
public void handleLoginCompleted(boolean success) {
- Log.d("[LOGIN]", "handleLoginCompleted " + success);
+ mLoginInProgress = false;
getActivity().runOnUiThread(() -> {
mLoginButton.setEnabled(true);
- cancelActivityIndicator();
if (success) {
- if (mListener != null) {
- mListener.onLoginCompleted();
- }
+ mLoginSuccess = true;
+ showLoggedInMessage();
} else {
- showError(getString(R.string.login_username_or_password_incorrect));
+ if (!mUseOauth) {
+ showLoginForm();
+ showError(getString(R.string.login_username_or_password_incorrect));
+ } else {
+ openWebForAuthorization();
+ }
}
});
}
+ @Override
+ public boolean doBack() {
+ if (mLoginInProgress) {
+ cancelLogin();
+ showLoginForm();
+ mLoginInProgress = false;
+ mLoginButton.setEnabled(true);
+ return true;
+ } else if (mLoginSuccess) {
+ onGetStartedClicked();
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ private void updateOauthState() {
+ // as we only use oauth for steam that's ok for now
+ mOauthState = "steam-" + Long.toString(new Random().nextLong());
+ }
+
+ private String buildAuthorizeUrl() {
+ StringBuilder sb = new StringBuilder(OAUTH_AUTHORIZE_BASE_URL);
+ sb.append("?client_id=").append(OAUTH_CLIENT_ID);
+ try {
+ String redirectUri = URLEncoder.encode(OAUTH_REDIRECT_URI, "utf-8");
+ sb.append("&redirect_uri=").append(redirectUri);
+ } catch (UnsupportedEncodingException e) {
+ Log.e(TAG, "Cannot build oauth autorization url", e);
+ }
+ sb.append("&response_type=code&scope=owner");
+ sb.append("&state=").append(mOauthState);
+ return sb.toString();
+ }
+
+ private void openWebForAuthorization() {
+ Intent openUrlIntent = new Intent(getActivity(), WebViewActivity.class);
+ updateOauthState();
+ openUrlIntent.putExtra(WebViewActivity.WEB_VIEW_ACTIVITY_EXTRA_URL, buildAuthorizeUrl());
+ openUrlIntent.putExtra(WebViewActivity.WEB_VIEW_ACTIVITY_EXTRA_CLEAR_COOKIES, true);
+ startActivityForResult(openUrlIntent, OAUTH_AUTHORIZE_REQUEST);
+ }
+
+
public interface OnLoginInteractionListener {
void onLoginCompleted();
+ void onCancelLogin();
+ void onSkipLoginClicked();
}
}
diff --git a/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/OnBackPressedListener.java b/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/OnBackPressedListener.java
new file mode 100644
index 0000000000..c160138cea
--- /dev/null
+++ b/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/OnBackPressedListener.java
@@ -0,0 +1,11 @@
+package io.highfidelity.hifiinterface.fragment;
+
+public interface OnBackPressedListener {
+
+ /**
+ * Processes the back pressed event and returns true if it was managed by this Fragment
+ * @return
+ */
+ boolean doBack();
+
+}
diff --git a/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/SettingsFragment.java b/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/SettingsFragment.java
new file mode 100644
index 0000000000..e32dc3b996
--- /dev/null
+++ b/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/SettingsFragment.java
@@ -0,0 +1,64 @@
+package io.highfidelity.hifiinterface.fragment;
+
+import android.content.SharedPreferences;
+import android.media.audiofx.AcousticEchoCanceler;
+import android.os.Bundle;
+import android.preference.PreferenceFragment;
+import android.preference.PreferenceManager;
+import android.support.annotation.Nullable;
+
+import io.highfidelity.hifiinterface.HifiUtils;
+import io.highfidelity.hifiinterface.R;
+
+public class SettingsFragment extends PreferenceFragment implements SharedPreferences.OnSharedPreferenceChangeListener {
+
+ private final String HIFI_SETTINGS_ANDROID_GROUP = "Android";
+ private final String HIFI_SETTINGS_AEC_KEY = "aec";
+ private final String PREFERENCE_KEY_AEC = "aec";
+
+ private final boolean DEFAULT_AEC_ENABLED = true;
+
+ @Override
+ public void onCreate(@Nullable Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ addPreferencesFromResource(R.xml.settings);
+ boolean aecAvailable = AcousticEchoCanceler.isAvailable();
+ PreferenceManager.setDefaultValues(getContext(), R.xml.settings, false);
+
+ if (!aecAvailable) {
+ findPreference(PREFERENCE_KEY_AEC).setEnabled(false);
+ HifiUtils.getInstance().updateHifiSetting(HIFI_SETTINGS_ANDROID_GROUP, HIFI_SETTINGS_AEC_KEY, false);
+ }
+
+ getPreferenceScreen().getSharedPreferences().edit().putBoolean(PREFERENCE_KEY_AEC,
+ aecAvailable && HifiUtils.getInstance().getHifiSettingBoolean(HIFI_SETTINGS_ANDROID_GROUP, HIFI_SETTINGS_AEC_KEY, DEFAULT_AEC_ENABLED)).commit();
+ }
+
+ public static SettingsFragment newInstance() {
+ SettingsFragment fragment = new SettingsFragment();
+ return fragment;
+ }
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ getPreferenceScreen().getSharedPreferences().registerOnSharedPreferenceChangeListener(this);
+ }
+
+ @Override
+ public void onPause() {
+ super.onPause();
+ getPreferenceScreen().getSharedPreferences().unregisterOnSharedPreferenceChangeListener(this);
+ }
+
+ @Override
+ public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
+ switch (key) {
+ case "aec":
+ HifiUtils.getInstance().updateHifiSetting(HIFI_SETTINGS_ANDROID_GROUP, HIFI_SETTINGS_AEC_KEY, sharedPreferences.getBoolean(key, false));
+ break;
+ default:
+ break;
+ }
+ }
+}
diff --git a/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/SignupFragment.java b/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/SignupFragment.java
new file mode 100644
index 0000000000..a11ae31fa1
--- /dev/null
+++ b/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/SignupFragment.java
@@ -0,0 +1,260 @@
+package io.highfidelity.hifiinterface.fragment;
+
+import android.app.Fragment;
+import android.content.Context;
+import android.os.Bundle;
+import android.view.KeyEvent;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.view.inputmethod.EditorInfo;
+import android.view.inputmethod.InputMethodManager;
+import android.widget.Button;
+import android.widget.CheckBox;
+import android.widget.EditText;
+import android.widget.TextView;
+
+import org.qtproject.qt5.android.QtNative;
+
+import io.highfidelity.hifiinterface.HifiUtils;
+import io.highfidelity.hifiinterface.R;
+
+import static org.qtproject.qt5.android.QtActivityDelegate.ApplicationActive;
+import static org.qtproject.qt5.android.QtActivityDelegate.ApplicationInactive;
+
+public class SignupFragment extends Fragment
+ implements OnBackPressedListener {
+
+ private EditText mEmail;
+ private EditText mUsername;
+ private EditText mPassword;
+ private TextView mError;
+ private TextView mActivityText;
+ private Button mSignupButton;
+ private CheckBox mKeepMeLoggedInCheckbox;
+
+ private ViewGroup mSignupForm;
+ private ViewGroup mLoggingInFrame;
+ private ViewGroup mLoggedInFrame;
+
+ private boolean mLoginInProgress;
+ private boolean mSignupInProgress;
+ private boolean mSignupSuccess;
+
+ public native void signup(String email, String username, String password); // move to SignupFragment
+ public native void cancelSignup();
+ public native void login(String username, String password, boolean keepLoggedIn);
+ public native void cancelLogin();
+
+ private SignupFragment.OnSignupInteractionListener mListener;
+
+ public SignupFragment() {
+ // Required empty public constructor
+ }
+
+ public static SignupFragment newInstance() {
+ SignupFragment fragment = new SignupFragment();
+ return fragment;
+ }
+
+ @Override
+ public View onCreateView(LayoutInflater inflater, ViewGroup container,
+ Bundle savedInstanceState) {
+ View rootView = inflater.inflate(R.layout.fragment_signup, container, false);
+
+ mEmail = rootView.findViewById(R.id.email);
+ mUsername = rootView.findViewById(R.id.username);
+ mPassword = rootView.findViewById(R.id.password);
+ mError = rootView.findViewById(R.id.error);
+ mSignupButton = rootView.findViewById(R.id.signupButton);
+ mActivityText = rootView.findViewById(R.id.activityText);
+ mKeepMeLoggedInCheckbox = rootView.findViewById(R.id.keepMeLoggedIn);
+
+ mSignupForm = rootView.findViewById(R.id.signupForm);
+ mLoggedInFrame = rootView.findViewById(R.id.loggedInFrame);
+ mLoggingInFrame = rootView.findViewById(R.id.loggingInFrame);
+
+ rootView.findViewById(R.id.cancel).setOnClickListener(view -> onCancelSignup());
+
+ mSignupButton.setOnClickListener(view -> signup());
+
+ rootView.findViewById(R.id.getStarted).setOnClickListener(view -> onGetStartedClicked());
+
+ mPassword.setOnEditorActionListener((textView, actionId, keyEvent) -> onPasswordEditorAction(textView, actionId, keyEvent));
+
+ mKeepMeLoggedInCheckbox.setChecked(HifiUtils.getInstance().isKeepingLoggedIn());
+
+ return rootView;
+ }
+
+ @Override
+ public void onAttach(Context context) {
+ super.onAttach(context);
+ if (context instanceof OnSignupInteractionListener) {
+ mListener = (OnSignupInteractionListener) context;
+ } else {
+ throw new RuntimeException(context.toString()
+ + " must implement OnSignupInteractionListener");
+ }
+ }
+
+ @Override
+ public void onDetach() {
+ super.onDetach();
+ mListener = null;
+ }
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ // This hack intends to keep Qt threads running even after the app comes from background
+ QtNative.setApplicationState(ApplicationActive);
+ }
+
+ @Override
+ public void onStop() {
+ super.onStop();
+ // Leave the Qt app paused
+ QtNative.setApplicationState(ApplicationInactive);
+ hideKeyboard();
+ }
+
+ private boolean onPasswordEditorAction(TextView textView, int actionId, KeyEvent keyEvent) {
+ if (actionId == EditorInfo.IME_ACTION_DONE) {
+ mSignupButton.performClick();
+ return true;
+ }
+ return false;
+ }
+
+ private void onCancelSignup() {
+ if (mListener != null) {
+ mListener.onCancelSignup();
+ }
+ }
+
+ public void signup() {
+ String email = mEmail.getText().toString().trim();
+ String username = mUsername.getText().toString().trim();
+ String password = mPassword.getText().toString();
+ hideKeyboard();
+ if (email.isEmpty() || username.isEmpty() || password.isEmpty()) {
+ showError(getString(R.string.signup_email_username_or_password_incorrect));
+ } else {
+ mSignupButton.setEnabled(false);
+ hideError();
+ mActivityText.setText(R.string.creating_account);
+ showActivityIndicator();
+ mSignupInProgress = true;
+ mSignupSuccess = false;
+ signup(email, username, password);
+ }
+ }
+
+ private void hideKeyboard() {
+ View view = getActivity().getCurrentFocus();
+ if (view != null) {
+ InputMethodManager imm = (InputMethodManager) getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
+ imm.hideSoftInputFromWindow(view.getWindowToken(), 0);
+ }
+ }
+
+ private void showActivityIndicator() {
+ mSignupForm.setVisibility(View.GONE);
+ mLoggedInFrame.setVisibility(View.GONE);
+ mLoggingInFrame.setVisibility(View.VISIBLE);
+ }
+
+ private void showLoggedInMessage() {
+ mSignupForm.setVisibility(View.GONE);
+ mLoggingInFrame.setVisibility(View.GONE);
+ mLoggedInFrame.setVisibility(View.VISIBLE);
+ }
+
+ private void showSignupForm() {
+ mLoggingInFrame.setVisibility(View.GONE);
+ mLoggedInFrame.setVisibility(View.GONE);
+ mSignupForm.setVisibility(View.VISIBLE);
+ }
+ private void showError(String error) {
+ mError.setText(error);
+ mError.setVisibility(View.VISIBLE);
+ }
+
+ private void hideError() {
+ mError.setText("");
+ mError.setVisibility(View.INVISIBLE);
+ }
+
+ public interface OnSignupInteractionListener {
+ void onSignupCompleted();
+ void onCancelSignup();
+ }
+
+ private void onGetStartedClicked() {
+ if (mListener != null) {
+ mListener.onSignupCompleted();
+ }
+ }
+
+
+ public void handleSignupCompleted() {
+ mSignupInProgress = false;
+ String username = mUsername.getText().toString().trim();
+ String password = mPassword.getText().toString();
+ getActivity().runOnUiThread(() -> {
+ mActivityText.setText(R.string.logging_in);
+ });
+ mLoginInProgress = true;
+ boolean keepUserLoggedIn = mKeepMeLoggedInCheckbox.isChecked();
+ login(username, password, keepUserLoggedIn);
+ }
+
+ public void handleSignupFailed(String error) {
+ mSignupInProgress = false;
+ getActivity().runOnUiThread(() -> {
+ mSignupButton.setEnabled(true);
+ showSignupForm();
+ mError.setText(error);
+ mError.setVisibility(View.VISIBLE);
+ });
+ }
+
+ public void handleLoginCompleted(boolean success) {
+ mLoginInProgress = false;
+ getActivity().runOnUiThread(() -> {
+ mSignupButton.setEnabled(true);
+ if (success) {
+ mSignupSuccess = true;
+ showLoggedInMessage();
+ } else {
+ // Registration was successful but login failed.
+ // Let the user to login manually
+ mListener.onCancelSignup();
+ showSignupForm();
+ }
+ });
+ }
+
+ @Override
+ public boolean doBack() {
+ if (mSignupInProgress) {
+ cancelSignup();
+ } else if (mLoginInProgress) {
+ cancelLogin();
+ }
+
+ if (mSignupInProgress || mLoginInProgress) {
+ showSignupForm();
+ mLoginInProgress = false;
+ mSignupInProgress = false;
+ mSignupButton.setEnabled(true);
+ return true;
+ } else if (mSignupSuccess) {
+ onGetStartedClicked();
+ return true;
+ } else {
+ return false;
+ }
+ }
+}
diff --git a/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/StartMenuFragment.java b/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/StartMenuFragment.java
new file mode 100644
index 0000000000..fe77991962
--- /dev/null
+++ b/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/StartMenuFragment.java
@@ -0,0 +1,93 @@
+package io.highfidelity.hifiinterface.fragment;
+
+import android.app.Fragment;
+import android.content.Context;
+import android.os.Bundle;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+
+import io.highfidelity.hifiinterface.R;
+
+public class StartMenuFragment extends Fragment {
+
+ private String TAG = "HighFidelity";
+ private StartMenuInteractionListener mListener;
+
+ public StartMenuFragment() {
+ // Required empty public constructor
+ }
+
+ public static StartMenuFragment newInstance() {
+ StartMenuFragment fragment = new StartMenuFragment();
+ return fragment;
+ }
+
+ @Override
+ public View onCreateView(LayoutInflater inflater, ViewGroup container,
+ Bundle savedInstanceState) {
+ // Inflate the layout for this fragment
+ View rootView = inflater.inflate(R.layout.fragment_login_menu, container, false);
+ rootView.findViewById(R.id.signupButton).setOnClickListener(view -> {
+ if (mListener != null) {
+ mListener.onSignupButtonClicked();
+ }
+ });
+
+ rootView.findViewById(R.id.loginButton).setOnClickListener(view -> {
+ if (mListener != null) {
+ mListener.onLoginButtonClicked();
+ }
+ });
+
+ rootView.findViewById(R.id.steamLoginButton).setOnClickListener(view -> {
+ if (mListener != null) {
+ mListener.onSteamLoginButtonClicked();
+ }
+ });
+
+ rootView.findViewById(R.id.takeMeInWorld).setOnClickListener(view -> {
+ if (mListener != null) {
+ mListener.onSkipLoginClicked();
+ }
+ });
+
+
+
+ return rootView;
+ }
+
+ @Override
+ public void onAttach(Context context) {
+ super.onAttach(context);
+ if (context instanceof StartMenuInteractionListener) {
+ mListener = (StartMenuInteractionListener) context;
+ } else {
+ throw new RuntimeException(context.toString()
+ + " must implement StartMenuInteractionListener");
+ }
+ }
+
+ @Override
+ public void onDetach() {
+ super.onDetach();
+ mListener = null;
+ }
+
+ /**
+ * This interface must be implemented by activities that contain this
+ * fragment to allow an interaction in this fragment to be communicated
+ * to the activity and potentially other fragments contained in that
+ * activity.
+ *
+ * See the Android Training lesson Communicating with Other Fragments for more information.
+ */
+ public interface StartMenuInteractionListener {
+ void onSignupButtonClicked();
+ void onLoginButtonClicked();
+ void onSkipLoginClicked();
+ void onSteamLoginButtonClicked();
+ }
+}
diff --git a/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/WebViewFragment.java b/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/WebViewFragment.java
index 2d887d5a19..3614fe47e4 100644
--- a/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/WebViewFragment.java
+++ b/android/app/src/main/java/io/highfidelity/hifiinterface/fragment/WebViewFragment.java
@@ -4,9 +4,11 @@ import android.app.Fragment;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
+import android.net.Uri;
import android.net.http.SslError;
import android.os.Bundle;
import android.os.Handler;
+import android.text.TextUtils;
import android.view.GestureDetector;
import android.view.KeyEvent;
import android.view.LayoutInflater;
@@ -14,6 +16,7 @@ import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.AlphaAnimation;
+import android.webkit.CookieManager;
import android.webkit.SslErrorHandler;
import android.webkit.WebChromeClient;
import android.webkit.WebResourceError;
@@ -25,6 +28,7 @@ import android.webkit.WebViewClient;
import android.widget.ProgressBar;
import android.widget.Toast;
+import io.highfidelity.hifiinterface.BuildConfig;
import io.highfidelity.hifiinterface.R;
import io.highfidelity.hifiinterface.WebViewActivity;
@@ -32,6 +36,7 @@ public class WebViewFragment extends Fragment implements GestureDetector.OnGestu
public static final String URL = "url";
public static final String TOOLBAR_VISIBLE = "toolbar_visible";
+ public static final String CLEAR_COOKIES = "clear_cookies";
private static final long DELAY_HIDE_TOOLBAR_MILLIS = 3000;
private static final long FADE_OUT_DURATION = 2000;
@@ -41,6 +46,7 @@ public class WebViewFragment extends Fragment implements GestureDetector.OnGestu
private ProgressBar mProgressBar;
private String mUrl;
private boolean mToolbarVisible;
+ private boolean mClearCookies;
private OnWebViewInteractionListener mListener;
private Runnable mCloseAction;
@@ -170,6 +176,7 @@ public class WebViewFragment extends Fragment implements GestureDetector.OnGestu
if (getArguments() != null) {
mUrl = getArguments().getString(URL);
mToolbarVisible = getArguments().getBoolean(TOOLBAR_VISIBLE);
+ mClearCookies = getArguments().getBoolean(CLEAR_COOKIES);
}
}
@@ -179,6 +186,10 @@ public class WebViewFragment extends Fragment implements GestureDetector.OnGestu
View rootView = inflater.inflate(R.layout.fragment_web_view, container, false);
mProgressBar = rootView.findViewById(R.id.toolbarProgressBar);
myWebView = rootView.findViewById(R.id.web_view);
+ if (mClearCookies) {
+ CookieManager.getInstance().removeAllCookies(null);
+ }
+
mHandler = new Handler();
gestureDetector = new GestureDetector(this);
gestureDetector.setOnDoubleTapListener(new GestureDetector.OnDoubleTapListener() {
@@ -251,6 +262,7 @@ public class WebViewFragment extends Fragment implements GestureDetector.OnGestu
void onWebLoaded(String url, SafenessLevel safenessLevel);
void onTitleReceived(String title);
void onExpand();
+ void onOAuthAuthorizeCallback(Uri uri);
}
@@ -320,6 +332,18 @@ public class WebViewFragment extends Fragment implements GestureDetector.OnGestu
super.onLoadResource(view, url);
}
}
+
+ @Override
+ public boolean shouldOverrideUrlLoading(WebView view, WebResourceRequest request) {
+ if (!TextUtils.isEmpty(BuildConfig.OAUTH_REDIRECT_URI) &&
+ request.getUrl().toString().startsWith(BuildConfig.OAUTH_REDIRECT_URI)) {
+ if (mListener != null) {
+ mListener.onOAuthAuthorizeCallback(request.getUrl());
+ }
+ return true;
+ }
+ return super.shouldOverrideUrlLoading(view, request);
+ }
}
class HiFiWebChromeClient extends WebChromeClient {
diff --git a/android/app/src/main/java/io/highfidelity/hifiinterface/receiver/HeadsetStateReceiver.java b/android/app/src/main/java/io/highfidelity/hifiinterface/receiver/HeadsetStateReceiver.java
new file mode 100644
index 0000000000..5645912d73
--- /dev/null
+++ b/android/app/src/main/java/io/highfidelity/hifiinterface/receiver/HeadsetStateReceiver.java
@@ -0,0 +1,18 @@
+package io.highfidelity.hifiinterface.receiver;
+
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.media.AudioManager;
+import android.util.Log;
+
+public class HeadsetStateReceiver extends BroadcastReceiver {
+
+ private native void notifyHeadsetOn(boolean pluggedIn);
+
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+ notifyHeadsetOn(audioManager.isWiredHeadsetOn());
+ }
+}
diff --git a/android/app/src/main/java/io/highfidelity/hifiinterface/view/DomainAdapter.java b/android/app/src/main/java/io/highfidelity/hifiinterface/view/DomainAdapter.java
index 71d634e9ea..78251ac4a4 100644
--- a/android/app/src/main/java/io/highfidelity/hifiinterface/view/DomainAdapter.java
+++ b/android/app/src/main/java/io/highfidelity/hifiinterface/view/DomainAdapter.java
@@ -12,6 +12,7 @@ import android.widget.TextView;
import com.squareup.picasso.Picasso;
+import java.util.Arrays;
import java.util.List;
import io.highfidelity.hifiinterface.R;
@@ -36,19 +37,41 @@ public class DomainAdapter extends RecyclerView.Adapter 0) {
+ mDomains = Arrays.copyOf(DOMAINS_TMP_CACHE, DOMAINS_TMP_CACHE.length);
+ notifyDataSetChanged();
+ if (mAdapterListener != null) {
+ if (mDomains.length == 0) {
+ mAdapterListener.onEmptyAdapter(false);
+ } else {
+ mAdapterListener.onNonEmptyAdapter(false);
+ }
+ }
+ }
+ }
+ }
+
public void loadDomains(String filterText, boolean forceRefresh) {
domainProvider.retrieve(filterText, new DomainProvider.DomainCallback() {
@Override
@@ -60,13 +83,18 @@ public class DomainAdapter extends RecyclerView.Adapter USERS_TMP_CACHE;
+
public UserListAdapter(Context c, UsersProvider usersProvider) {
mContext = c;
mInflater = LayoutInflater.from(mContext);
mProvider = usersProvider;
- loadUsers();
}
public void setListener(AdapterListener adapterListener) {
mAdapterListener = adapterListener;
}
+ public void startLoad() {
+ useTmpCachedUsers();
+ loadUsers();
+ }
+
+ private void useTmpCachedUsers() {
+ synchronized (this) {
+ if (USERS_TMP_CACHE != null && USERS_TMP_CACHE.size() > 0) {
+ mUsers = new ArrayList<>(USERS_TMP_CACHE.size());
+ mUsers.addAll(USERS_TMP_CACHE);
+ notifyDataSetChanged();
+ if (mAdapterListener != null) {
+ if (mUsers.isEmpty()) {
+ mAdapterListener.onEmptyAdapter(false);
+ } else {
+ mAdapterListener.onNonEmptyAdapter(false);
+ }
+ }
+ }
+ }
+ }
+
public void loadUsers() {
mProvider.retrieve(new UsersProvider.UsersCallback() {
@Override
public void retrieveOk(List users) {
mUsers = new ArrayList<>(users);
notifyDataSetChanged();
- if (mAdapterListener != null) {
- if (mUsers.isEmpty()) {
- mAdapterListener.onEmptyAdapter();
- } else {
- mAdapterListener.onNonEmptyAdapter();
+
+ synchronized (this) {
+ USERS_TMP_CACHE = new ArrayList<>(mUsers.size());
+ USERS_TMP_CACHE.addAll(mUsers);
+
+ if (mAdapterListener != null) {
+ if (mUsers.isEmpty()) {
+ mAdapterListener.onEmptyAdapter(true);
+ } else {
+ mAdapterListener.onNonEmptyAdapter(true);
+ }
}
}
}
@@ -240,8 +269,9 @@ public class UserListAdapter extends RecyclerView.Adapter
diff --git a/android/app/src/main/res/drawable/ic_eye_noshow.xml b/android/app/src/main/res/drawable/ic_eye_noshow.xml
new file mode 100644
index 0000000000..1d5304afac
--- /dev/null
+++ b/android/app/src/main/res/drawable/ic_eye_noshow.xml
@@ -0,0 +1,27 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/android/app/src/main/res/drawable/ic_eye_show.xml b/android/app/src/main/res/drawable/ic_eye_show.xml
new file mode 100644
index 0000000000..273ecc8339
--- /dev/null
+++ b/android/app/src/main/res/drawable/ic_eye_show.xml
@@ -0,0 +1,15 @@
+
+
+
+
+
+
diff --git a/android/app/src/main/res/drawable/ic_right_arrow.xml b/android/app/src/main/res/drawable/ic_right_arrow.xml
new file mode 100644
index 0000000000..e35d1a2733
--- /dev/null
+++ b/android/app/src/main/res/drawable/ic_right_arrow.xml
@@ -0,0 +1,11 @@
+
+
+
+
+
\ No newline at end of file
diff --git a/android/app/src/main/res/drawable/ic_steam.xml b/android/app/src/main/res/drawable/ic_steam.xml
new file mode 100644
index 0000000000..9b739c1f73
--- /dev/null
+++ b/android/app/src/main/res/drawable/ic_steam.xml
@@ -0,0 +1,11 @@
+
+
+
+
+
\ No newline at end of file
diff --git a/android/app/src/main/res/drawable/rounded_button.xml b/android/app/src/main/res/drawable/rounded_button_color1.xml
similarity index 100%
rename from android/app/src/main/res/drawable/rounded_button.xml
rename to android/app/src/main/res/drawable/rounded_button_color1.xml
diff --git a/android/app/src/main/res/drawable/rounded_button_color3.xml b/android/app/src/main/res/drawable/rounded_button_color3.xml
new file mode 100644
index 0000000000..6230885b30
--- /dev/null
+++ b/android/app/src/main/res/drawable/rounded_button_color3.xml
@@ -0,0 +1,24 @@
+
+
+ -
+
+
+
+
+
+
+ -
+
+
+
+
+
+
+ -
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/android/app/src/main/res/drawable/rounded_button_color4.xml b/android/app/src/main/res/drawable/rounded_button_color4.xml
new file mode 100644
index 0000000000..679bf24513
--- /dev/null
+++ b/android/app/src/main/res/drawable/rounded_button_color4.xml
@@ -0,0 +1,24 @@
+
+
+ -
+
+
+
+
+
+
+ -
+
+
+
+
+
+
+ -
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/android/app/src/main/res/drawable/rounded_edit.xml b/android/app/src/main/res/drawable/rounded_edit.xml
deleted file mode 100644
index 3c1cac4d1d..0000000000
--- a/android/app/src/main/res/drawable/rounded_edit.xml
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-
-
-
-
\ No newline at end of file
diff --git a/android/app/src/main/res/drawable/rounded_secondary_button.xml b/android/app/src/main/res/drawable/rounded_secondary_button.xml
new file mode 100644
index 0000000000..6230885b30
--- /dev/null
+++ b/android/app/src/main/res/drawable/rounded_secondary_button.xml
@@ -0,0 +1,24 @@
+
+
+ -
+
+
+
+
+
+
+ -
+
+
+
+
+
+
+ -
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/android/app/src/main/res/drawable/selector_show_password.xml b/android/app/src/main/res/drawable/selector_show_password.xml
new file mode 100644
index 0000000000..a44092aceb
--- /dev/null
+++ b/android/app/src/main/res/drawable/selector_show_password.xml
@@ -0,0 +1,5 @@
+
+
+
+
+
\ No newline at end of file
diff --git a/android/app/src/main/res/layout/activity_encourage_login.xml b/android/app/src/main/res/layout/activity_encourage_login.xml
new file mode 100644
index 0000000000..d7c9ff6b4d
--- /dev/null
+++ b/android/app/src/main/res/layout/activity_encourage_login.xml
@@ -0,0 +1,14 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/android/app/src/main/res/layout/fragment_login.xml b/android/app/src/main/res/layout/fragment_login.xml
index c50e6c1380..d12b84cc8d 100644
--- a/android/app/src/main/res/layout/fragment_login.xml
+++ b/android/app/src/main/res/layout/fragment_login.xml
@@ -6,6 +6,17 @@
android:layout_height="match_parent"
android:background="@color/backgroundLight">
+
+
+
+
-
-
-
+ app:layout_constraintBottom_toBottomOf="parent"
+ android:visibility="gone">
+
+
+
-
-
+
+
+
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/android/app/src/main/res/layout/fragment_login_menu.xml b/android/app/src/main/res/layout/fragment_login_menu.xml
new file mode 100644
index 0000000000..edfa4dd9fd
--- /dev/null
+++ b/android/app/src/main/res/layout/fragment_login_menu.xml
@@ -0,0 +1,122 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/android/app/src/main/res/layout/fragment_signup.xml b/android/app/src/main/res/layout/fragment_signup.xml
new file mode 100644
index 0000000000..1540d26434
--- /dev/null
+++ b/android/app/src/main/res/layout/fragment_signup.xml
@@ -0,0 +1,252 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/android/app/src/main/res/menu/menu_navigation.xml b/android/app/src/main/res/menu/menu_navigation.xml
index 3cce64f9f5..142af5d146 100644
--- a/android/app/src/main/res/menu/menu_navigation.xml
+++ b/android/app/src/main/res/menu/menu_navigation.xml
@@ -9,4 +9,9 @@
android:id="@+id/action_people"
android:title="@string/people"
/>
+
diff --git a/android/app/src/main/res/values-w385dp/dimens.xml b/android/app/src/main/res/values-w385dp/dimens.xml
new file mode 100644
index 0000000000..7463700d2a
--- /dev/null
+++ b/android/app/src/main/res/values-w385dp/dimens.xml
@@ -0,0 +1,22 @@
+
+
+ 35dp
+ 84dp
+ 340dp
+ 171dp
+ 42dp
+ 10dp
+ 14sp
+ 18sp
+ 18sp
+ 72dp
+ 76dp
+ 100dp
+ 27dp
+ 238dp
+ 42dp
+ 270dp
+ 86dp
+ 22sp
+ 16dp
+
\ No newline at end of file
diff --git a/android/app/src/main/res/values/colors.xml b/android/app/src/main/res/values/colors.xml
index e4bbb60544..fc5ce7ba16 100644
--- a/android/app/src/main/res/values/colors.xml
+++ b/android/app/src/main/res/values/colors.xml
@@ -6,8 +6,12 @@
#54D7FD
#E3E3E3
#575757
+ #3D3D3D
#1EB5EC
#00B4EF
+ #828282
+ #8F8F8F
+ #434343
#333333
#4F4F4F
#33999999
@@ -22,4 +26,6 @@
#FBD92A
#8A8A8A
#40000000
+ #F2F2F2
+
diff --git a/android/app/src/main/res/values/dimens.xml b/android/app/src/main/res/values/dimens.xml
index d40132939b..85d79509ba 100644
--- a/android/app/src/main/res/values/dimens.xml
+++ b/android/app/src/main/res/values/dimens.xml
@@ -33,10 +33,26 @@
6dp
64dp
- 56dp
- 101dp
- 425dp
+ 32dp
+ 76dp
+ 306dp
8dp
+ 150dp
+ 38dp
+ 65dp
+ 68dp
+ 90dp
+ 9dp
+ 16sp
+ 16sp
+ 13sp
+ 24dp
+ 214dp
+ 38dp
+ 300dp
+ 77dp
+ 20sp
+ 14dp
diff --git a/android/app/src/main/res/values/strings.xml b/android/app/src/main/res/values/strings.xml
index b158aba59d..671f171c3e 100644
--- a/android/app/src/main/res/values/strings.xml
+++ b/android/app/src/main/res/values/strings.xml
@@ -10,23 +10,48 @@
POPULAR
BOOKMARKS
Type a domain url
- Username or email\u00A0
- Password\u00A0
+ Email
+ Username
+ Username or email
+ Password
Login
Logout
- Forgot password?\u00A0
+ Can\u0027t access your account?
Username or password incorrect.
- Logging into High Fidelity
+ Logging in
Search for a place by name\u00A0
Loading places…
No places exist with that name
Privacy Policy
Your Last Location
Online
+ Sign Up
+ SIGN UP
+ Creating your High Fidelity account
+ Email, username or password incorrect.
+ You are now signed into High Fidelity
+ You are now logged in!
+ Welcome
+ Cancel
+ CANCEL
+ GET STARTED
tagFragmentHome
tagFragmentLogin
+ tagFragmentLogginIn
+ tagFragmentSignup
tagFragmentPolicy
tagFragmentPeople
+ tagSettings
+ tagFragmentSignedIn
+ Settings
+ AEC
+ Acoustic Echo Cancellation
+ Developer
+ LOG IN
+ Keep Me Logged In
+ No thanks, take me in-world!
+ BE ANYWHERE, WITH ANYONE \nRIGHT NOW
+ STEAM LOG IN
diff --git a/android/app/src/main/res/xml/settings.xml b/android/app/src/main/res/xml/settings.xml
new file mode 100644
index 0000000000..934d34ba73
--- /dev/null
+++ b/android/app/src/main/res/xml/settings.xml
@@ -0,0 +1,12 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/android/build.gradle b/android/build.gradle
index a6de0d469c..e22c2d877f 100644
--- a/android/build.gradle
+++ b/android/build.gradle
@@ -72,17 +72,17 @@ def jniFolder = new File(appDir, 'src/main/jniLibs/arm64-v8a')
def baseUrl = 'https://hifi-public.s3.amazonaws.com/dependencies/android/'
def breakpadDumpSymsDir = new File("${appDir}/build/tmp/breakpadDumpSyms")
-def qtFile='qt-5.11.1_linux_armv8-libcpp_openssl.tgz'
-def qtChecksum='f312c47cd8b8dbca824c32af4eec5e66'
-def qtVersionId='nyCGcb91S4QbYeJhUkawO5x1lrLdSNB_'
+def qtFile='qt-5.11.1_linux_armv8-libcpp_openssl_patched.tgz'
+def qtChecksum='aa449d4bfa963f3bc9a9dfe558ba29df'
+def qtVersionId='3S97HBM5G5Xw9EfE52sikmgdN3t6C2MN'
if (Os.isFamily(Os.FAMILY_MAC)) {
- qtFile = 'qt-5.11.1_osx_armv8-libcpp_openssl.tgz'
- qtChecksum='a0c8b394aec5b0fcd46714ca3a53278a'
- qtVersionId='QNa.lwNJaPc0eGuIL.xZ8ebeTuLL7rh8'
+ qtFile = 'qt-5.11.1_osx_armv8-libcpp_openssl_patched.tgz'
+ qtChecksum='c83cc477c08a892e00c71764dca051a0'
+ qtVersionId='OxBD7iKINv1HbyOXmAmDrBb8AF3N.Kup'
} else if (Os.isFamily(Os.FAMILY_WINDOWS)) {
- qtFile = 'qt-5.11.1_win_armv8-libcpp_openssl.tgz'
- qtChecksum='d80aed4233ce9e222aae8376e7a94bf9'
- qtVersionId='iDVXu0i3WEXRFIxQCtzcJ2XuKrE8RIqB'
+ qtFile = 'qt-5.11.1_win_armv8-libcpp_openssl_patched.tgz'
+ qtChecksum='0582191cc55431aa4f660848a542883e'
+ qtVersionId='JfWM0P_Mz5Qp0LwpzhrsRwN3fqlLSFeT'
}
def packages = [
@@ -106,11 +106,6 @@ def packages = [
versionId: 'r5Zran.JSCtvrrB6Q4KaqfIoALPw3lYY',
checksum: 'a8ee8584cf1ccd34766c7ddd9d5e5449',
],
- glm: [
- file: 'glm-0.9.8.5-patched.tgz',
- versionId: 'cskfMoJrFlAeqI3WPxemyO_Cxt7rT9EJ',
- checksum: '067b5fe16b220b5b1a1039ba51b062ae',
- ],
gvr: [
file: 'gvrsdk_v1.101.0.tgz',
versionId: 'nqBV_j81Uc31rC7bKIrlya_Hah4v3y5r',
@@ -143,11 +138,9 @@ def packages = [
includeLibs: ['libtbb.so', 'libtbbmalloc.so'],
],
hifiAC: [
- file: 'libplugins_libhifiCodec.zip',
- versionId: 'i31pW.qNbvFOXRxbyiJUxg3sphaFNmZU',
- checksum: '9412a8e12c88a4096c1fc843bb9fe52d',
- sharedLibFolder: '',
- includeLibs: ['libplugins_libhifiCodec.so']
+ baseUrl: 'http://s3.amazonaws.com/hifi-public/dependencies/',
+ file: 'codecSDK-android_armv8-2.0.zip',
+ checksum: '1cbef929675818fc64c4101b72f84a6a'
],
etc2comp: [
file: 'etc2comp-patched-armv8-libcpp.tgz',
@@ -163,33 +156,6 @@ def packages = [
]
]
-
-def scribeLocalFile='scribe' + EXEC_SUFFIX
-def scribeFile='scribe_linux_x86_64'
-def scribeChecksum='ca4b904f52f4f993c29175ba96798fa6'
-def scribeVersion='u_iTrJDaE95i2abTPXOpPZckGBIim53G'
-
-def shreflectLocalFile='shreflect' + EXEC_SUFFIX
-def shreflectFile='shreflect_linux_x86_64'
-def shreflectChecksum='d6094a8580066c0b6f4e80b5adfb1d98'
-def shreflectVersion='jnrpudh6fptIg6T2.Z6fgKP2ultAdKmE'
-
-if (Os.isFamily(Os.FAMILY_MAC)) {
- scribeFile = 'scribe_osx_x86_64'
- scribeChecksum='72db9d32d4e1e50add755570ac5eb749'
- scribeVersion='DAW0DmnjCRib4MD8x93bgc2Z2MpPojZC'
- shreflectFile='shreflect_osx_x86_64'
- shreflectChecksum='d613ef0703c21371fee93fd2e54b964f'
- shreflectVersion='.rYNzjSFq6WtWDnE5KIKRIAGyJtr__ad'
-} else if (Os.isFamily(Os.FAMILY_WINDOWS)) {
- scribeFile = 'scribe_win32_x86_64.exe'
- scribeChecksum='678e43d290c90fda670c6fefe038a06d'
- scribeVersion='PuullrA_bPlO9kXZRt8rLe536X1UI.m7'
- shreflectFile='shreflect_win32_x86_64.exe'
- shreflectChecksum='6f4a77b8cceb3f1bbc655132c3665060'
- shreflectVersion='iIyCyza1nelkbI7ihybF59bBlwrfAC3D'
-}
-
def options = [
files: new TreeSet(),
features: new HashSet(),
@@ -367,7 +333,8 @@ task downloadDependencies {
doLast {
packages.each { entry ->
def filename = entry.value['file'];
- def url = baseUrl + filename;
+ def dependencyBaseUrl = entry.value['baseUrl']
+ def url = (dependencyBaseUrl?.trim() ? dependencyBaseUrl : baseUrl) + filename;
if (entry.value.containsKey('versionId')) {
url = url + '?versionId=' + entry.value['versionId']
}
@@ -447,44 +414,6 @@ task copyDependencies(dependsOn: [ extractDependencies ]) {
}
}
-task downloadScribe(type: Download) {
- src baseUrl + scribeFile + '?versionId=' + scribeVersion
- dest new File(baseFolder, scribeLocalFile)
- onlyIfNewer true
-}
-
-task verifyScribe (type: Verify, dependsOn: downloadScribe) {
- src new File(baseFolder, scribeLocalFile);
- checksum scribeChecksum
-}
-
-task fixScribePermissions(type: Exec, dependsOn: verifyScribe) {
- commandLine 'chmod', 'a+x', HIFI_ANDROID_PRECOMPILED + '/' + scribeLocalFile
-}
-
-task downloadShreflect(type: Download) {
- src baseUrl + shreflectFile + '?versionId=' + shreflectVersion
- dest new File(baseFolder, shreflectLocalFile)
- onlyIfNewer true
-}
-
-task verifyShreflect(type: Verify, dependsOn: downloadShreflect) {
- src new File(baseFolder, shreflectLocalFile);
- checksum shreflectChecksum
-}
-
-task fixShreflectPermissions(type: Exec, dependsOn: verifyShreflect) {
- commandLine 'chmod', 'a+x', HIFI_ANDROID_PRECOMPILED + '/' + shreflectLocalFile
-}
-
-task setupScribe(dependsOn: [verifyScribe, verifyShreflect]) { }
-
-// On Windows, we don't need to set the executable bit, but on OSX and Unix we do
-if (!Os.isFamily(Os.FAMILY_WINDOWS)) {
- setupScribe.dependsOn fixScribePermissions
- setupScribe.dependsOn fixShreflectPermissions
-}
-
task extractGvrBinaries(dependsOn: extractDependencies) {
doLast {
def gvrLibFolder = new File(HIFI_ANDROID_PRECOMPILED, 'gvr/gvr-android-sdk-1.101.0/libraries');
@@ -571,7 +500,7 @@ task qtBundle {
}
}
-task setupDependencies(dependsOn: [setupScribe, copyDependencies, extractGvrBinaries, qtBundle]) { }
+task setupDependencies(dependsOn: [copyDependencies, extractGvrBinaries, qtBundle]) { }
task cleanDependencies(type: Delete) {
delete HIFI_ANDROID_PRECOMPILED
@@ -668,6 +597,21 @@ task uploadBreakpadDumpSymsRelease(type:io.github.httpbuilderng.http.HttpTask, d
}
}
+task renameHifiACTaskDebug() {
+ doLast {
+ def sourceFile = new File("${appDir}/build/intermediates/cmake/debug/obj/arm64-v8a/","libhifiCodec.so")
+ def destinationFile = new File("${appDir}/src/main/jniLibs/arm64-v8a", "libplugins_libhifiCodec.so")
+ copy { from sourceFile; into destinationFile.parent; rename(sourceFile.name, destinationFile.name) }
+ }
+}
+task renameHifiACTaskRelease(type: Copy) {
+ doLast {
+ def sourceFile = new File("${appDir}/build/intermediates/cmake/release/obj/arm64-v8a/","libhifiCodec.so")
+ def destinationFile = new File("${appDir}/src/main/jniLibs/arm64-v8a", "libplugins_libhifiCodec.so")
+ copy { from sourceFile; into destinationFile.parent; rename(sourceFile.name, destinationFile.name) }
+ }
+}
+
// FIXME this code is prototyping the desired functionality for doing build time binary dependency resolution.
// See the comment on the qtBundle task above
/*
diff --git a/android/gradle.properties b/android/gradle.properties
new file mode 100644
index 0000000000..ac639c5ae7
--- /dev/null
+++ b/android/gradle.properties
@@ -0,0 +1 @@
+org.gradle.jvmargs=-Xms2g -Xmx4g
diff --git a/assignment-client/CMakeLists.txt b/assignment-client/CMakeLists.txt
index c73e8e1d34..1500d7b98e 100644
--- a/assignment-client/CMakeLists.txt
+++ b/assignment-client/CMakeLists.txt
@@ -11,7 +11,7 @@ setup_memory_debugger()
# link in the shared libraries
link_hifi_libraries(
- audio avatars octree gpu graphics fbx entities
+ audio avatars octree gpu graphics fbx hfm entities
networking animation recording shared script-engine embedded-webserver
controllers physics plugins midi image
)
diff --git a/assignment-client/src/Agent.cpp b/assignment-client/src/Agent.cpp
index 06a14927d3..88897a0fed 100644
--- a/assignment-client/src/Agent.cpp
+++ b/assignment-client/src/Agent.cpp
@@ -53,6 +53,7 @@
#include // TODO: consider moving to scriptengine.h
#include "entities/AssignmentParentFinder.h"
+#include "AssignmentDynamicFactory.h"
#include "RecordingScriptingInterface.h"
#include "AbstractAudioInterface.h"
#include "AgentScriptingInterface.h"
@@ -67,6 +68,9 @@ Agent::Agent(ReceivedMessage& message) :
{
DependencyManager::set();
+ DependencyManager::registerInheritance();
+ DependencyManager::set();
+
DependencyManager::set();
DependencyManager::set();
DependencyManager::set(false);
@@ -92,7 +96,6 @@ Agent::Agent(ReceivedMessage& message) :
DependencyManager::set();
DependencyManager::set();
- DependencyManager::set();
DependencyManager::set();
DependencyManager::set();
@@ -173,6 +176,8 @@ void Agent::run() {
// Create ScriptEngines on threaded-assignment thread then move to main thread.
DependencyManager::set(ScriptEngine::AGENT_SCRIPT)->moveToThread(qApp->thread());
+ DependencyManager::set();
+
// make sure we request our script once the agent connects to the domain
auto nodeList = DependencyManager::get();
@@ -211,13 +216,14 @@ void Agent::requestScript() {
}
// make sure this is not a script request for the file scheme
- if (scriptURL.scheme() == URL_SCHEME_FILE) {
+ if (scriptURL.scheme() == HIFI_URL_SCHEME_FILE) {
qWarning() << "Cannot load script for Agent from local filesystem.";
scriptRequestFinished();
return;
}
- auto request = DependencyManager::get()->createResourceRequest(this, scriptURL);
+ auto request = DependencyManager::get()->createResourceRequest(
+ this, scriptURL, true, -1, "Agent::requestScript");
if (!request) {
qWarning() << "Could not create ResourceRequest for Agent script at" << scriptURL.toString();
@@ -356,154 +362,173 @@ void Agent::scriptRequestFinished() {
}
void Agent::executeScript() {
- _scriptEngine = scriptEngineFactory(ScriptEngine::AGENT_SCRIPT, _scriptContents, _payload);
+ // the following block is scoped so that any shared pointers we take here
+ // are cleared before we call setFinished at the end of the function
+ {
+ _scriptEngine = scriptEngineFactory(ScriptEngine::AGENT_SCRIPT, _scriptContents, _payload);
- // setup an Avatar for the script to use
- auto scriptedAvatar = DependencyManager::get();
+ // setup an Avatar for the script to use
+ auto scriptedAvatar = DependencyManager::get();
+ scriptedAvatar->setID(getSessionUUID());
+ scriptedAvatar->setForceFaceTrackerConnected(true);
- scriptedAvatar->setID(getSessionUUID());
+ // call model URL setters with empty URLs so our avatar, if user, will have the default models
+ scriptedAvatar->setSkeletonModelURL(QUrl());
- connect(_scriptEngine.data(), SIGNAL(update(float)),
- scriptedAvatar.data(), SLOT(update(float)), Qt::ConnectionType::QueuedConnection);
- scriptedAvatar->setForceFaceTrackerConnected(true);
+ // force lazy initialization of the head data for the scripted avatar
+ // since it is referenced below by computeLoudness and getAudioLoudness
+ scriptedAvatar->getHeadOrientation();
- // call model URL setters with empty URLs so our avatar, if user, will have the default models
- scriptedAvatar->setSkeletonModelURL(QUrl());
+ // give this AvatarData object to the script engine
+ _scriptEngine->registerGlobalObject("Avatar", scriptedAvatar.data());
- // force lazy initialization of the head data for the scripted avatar
- // since it is referenced below by computeLoudness and getAudioLoudness
- scriptedAvatar->getHeadOrientation();
+ // give scripts access to the Users object
+ _scriptEngine->registerGlobalObject("Users", DependencyManager::get().data());
- // give this AvatarData object to the script engine
- _scriptEngine->registerGlobalObject("Avatar", scriptedAvatar.data());
+ auto player = DependencyManager::get();
+ connect(player.data(), &recording::Deck::playbackStateChanged, [&player, &scriptedAvatar] {
+ if (player->isPlaying()) {
+ auto recordingInterface = DependencyManager::get();
+ if (recordingInterface->getPlayFromCurrentLocation()) {
+ scriptedAvatar->setRecordingBasis();
+ }
- // give scripts access to the Users object
- _scriptEngine->registerGlobalObject("Users", DependencyManager::get().data());
+ // these procedural movements are included in the recordings
+ scriptedAvatar->setHasProceduralEyeFaceMovement(false);
+ scriptedAvatar->setHasProceduralBlinkFaceMovement(false);
+ scriptedAvatar->setHasAudioEnabledFaceMovement(false);
+ } else {
+ scriptedAvatar->clearRecordingBasis();
- auto player = DependencyManager::get();
- connect(player.data(), &recording::Deck::playbackStateChanged, [=] {
- if (player->isPlaying()) {
- auto recordingInterface = DependencyManager::get();
- if (recordingInterface->getPlayFromCurrentLocation()) {
- scriptedAvatar->setRecordingBasis();
+ // restore procedural blendshape movement
+ scriptedAvatar->setHasProceduralEyeFaceMovement(true);
+ scriptedAvatar->setHasProceduralBlinkFaceMovement(true);
+ scriptedAvatar->setHasAudioEnabledFaceMovement(true);
}
- } else {
- scriptedAvatar->clearRecordingBasis();
- }
- });
+ });
- using namespace recording;
- static const FrameType AVATAR_FRAME_TYPE = Frame::registerFrameType(AvatarData::FRAME_NAME);
- Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [scriptedAvatar](Frame::ConstPointer frame) {
+ using namespace recording;
+ static const FrameType AVATAR_FRAME_TYPE = Frame::registerFrameType(AvatarData::FRAME_NAME);
+ Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [scriptedAvatar](Frame::ConstPointer frame) {
+
+ auto recordingInterface = DependencyManager::get();
+ bool useFrameSkeleton = recordingInterface->getPlayerUseSkeletonModel();
+
+ // FIXME - the ability to switch the avatar URL is not actually supported when playing back from a recording
+ if (!useFrameSkeleton) {
+ static std::once_flag warning;
+ std::call_once(warning, [] {
+ qWarning() << "Recording.setPlayerUseSkeletonModel(false) is not currently supported.";
+ });
+ }
+
+ AvatarData::fromFrame(frame->data, *scriptedAvatar);
+ });
+
+ using namespace recording;
+ static const FrameType AUDIO_FRAME_TYPE = Frame::registerFrameType(AudioConstants::getAudioFrameName());
+ Frame::registerFrameHandler(AUDIO_FRAME_TYPE, [this, &scriptedAvatar](Frame::ConstPointer frame) {
+ static quint16 audioSequenceNumber{ 0 };
+
+ QByteArray audio(frame->data);
+
+ if (_isNoiseGateEnabled) {
+ int16_t* samples = reinterpret_cast(audio.data());
+ int numSamples = AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL;
+ _audioGate.render(samples, samples, numSamples);
+ }
+
+ computeLoudness(&audio, scriptedAvatar);
+
+ // state machine to detect gate opening and closing
+ bool audioGateOpen = (scriptedAvatar->getAudioLoudness() != 0.0f);
+ bool openedInLastBlock = !_audioGateOpen && audioGateOpen; // the gate just opened
+ bool closedInLastBlock = _audioGateOpen && !audioGateOpen; // the gate just closed
+ _audioGateOpen = audioGateOpen;
+ Q_UNUSED(openedInLastBlock);
+
+ // the codec must be flushed to silence before sending silent packets,
+ // so delay the transition to silent packets by one packet after becoming silent.
+ auto packetType = PacketType::MicrophoneAudioNoEcho;
+ if (!audioGateOpen && !closedInLastBlock) {
+ packetType = PacketType::SilentAudioFrame;
+ }
+
+ Transform audioTransform;
+ auto headOrientation = scriptedAvatar->getHeadOrientation();
+ audioTransform.setTranslation(scriptedAvatar->getWorldPosition());
+ audioTransform.setRotation(headOrientation);
+
+ QByteArray encodedBuffer;
+ if (_encoder) {
+ _encoder->encode(audio, encodedBuffer);
+ } else {
+ encodedBuffer = audio;
+ }
+
+ AbstractAudioInterface::emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), audioSequenceNumber, false,
+ audioTransform, scriptedAvatar->getWorldPosition(), glm::vec3(0),
+ packetType, _selectedCodecName);
+ });
+
+ auto avatarHashMap = DependencyManager::set();
+ _scriptEngine->registerGlobalObject("AvatarList", avatarHashMap.data());
+
+ // register ourselves to the script engine
+ _scriptEngine->registerGlobalObject("Agent", new AgentScriptingInterface(this));
+
+ _scriptEngine->registerGlobalObject("AnimationCache", DependencyManager::get().data());
+ _scriptEngine->registerGlobalObject("SoundCache", DependencyManager::get().data());
+
+ QScriptValue webSocketServerConstructorValue = _scriptEngine->newFunction(WebSocketServerClass::constructor);
+ _scriptEngine->globalObject().setProperty("WebSocketServer", webSocketServerConstructorValue);
+
+ auto entityScriptingInterface = DependencyManager::get();
+
+ _scriptEngine->registerGlobalObject("EntityViewer", &_entityViewer);
+
+ _scriptEngine->registerGetterSetter("location", LocationScriptingInterface::locationGetter,
+ LocationScriptingInterface::locationSetter);
auto recordingInterface = DependencyManager::get();
- bool useFrameSkeleton = recordingInterface->getPlayerUseSkeletonModel();
+ _scriptEngine->registerGlobalObject("Recording", recordingInterface.data());
- // FIXME - the ability to switch the avatar URL is not actually supported when playing back from a recording
- if (!useFrameSkeleton) {
- static std::once_flag warning;
- std::call_once(warning, [] {
- qWarning() << "Recording.setPlayerUseSkeletonModel(false) is not currently supported.";
- });
+ entityScriptingInterface->init();
+
+ _entityViewer.init();
+
+ entityScriptingInterface->setEntityTree(_entityViewer.getTree());
+
+ DependencyManager::set(_entityViewer.getTree());
+
+ // Agents should run at 45hz
+ static const int AVATAR_DATA_HZ = 45;
+ static const int AVATAR_DATA_IN_MSECS = MSECS_PER_SECOND / AVATAR_DATA_HZ;
+ QTimer* avatarDataTimer = new QTimer(this);
+ connect(avatarDataTimer, &QTimer::timeout, this, &Agent::processAgentAvatar);
+ avatarDataTimer->setSingleShot(false);
+ avatarDataTimer->setInterval(AVATAR_DATA_IN_MSECS);
+ avatarDataTimer->setTimerType(Qt::PreciseTimer);
+ avatarDataTimer->start();
+
+ _scriptEngine->run();
+
+ Frame::clearFrameHandler(AUDIO_FRAME_TYPE);
+ Frame::clearFrameHandler(AVATAR_FRAME_TYPE);
+
+ if (recordingInterface->isPlaying()) {
+ recordingInterface->stopPlaying();
}
- AvatarData::fromFrame(frame->data, *scriptedAvatar);
- });
-
- using namespace recording;
- static const FrameType AUDIO_FRAME_TYPE = Frame::registerFrameType(AudioConstants::getAudioFrameName());
- Frame::registerFrameHandler(AUDIO_FRAME_TYPE, [this, &scriptedAvatar](Frame::ConstPointer frame) {
- static quint16 audioSequenceNumber{ 0 };
-
- QByteArray audio(frame->data);
-
- if (_isNoiseGateEnabled) {
- int16_t* samples = reinterpret_cast(audio.data());
- int numSamples = AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL;
- _audioGate.render(samples, samples, numSamples);
+ if (recordingInterface->isRecording()) {
+ recordingInterface->stopRecording();
}
- computeLoudness(&audio, scriptedAvatar);
+ avatarDataTimer->stop();
- // state machine to detect gate opening and closing
- bool audioGateOpen = (scriptedAvatar->getAudioLoudness() != 0.0f);
- bool openedInLastBlock = !_audioGateOpen && audioGateOpen; // the gate just opened
- bool closedInLastBlock = _audioGateOpen && !audioGateOpen; // the gate just closed
- _audioGateOpen = audioGateOpen;
- Q_UNUSED(openedInLastBlock);
+ setIsAvatar(false); // will stop timers for sending identity packets
+ }
- // the codec must be flushed to silence before sending silent packets,
- // so delay the transition to silent packets by one packet after becoming silent.
- auto packetType = PacketType::MicrophoneAudioNoEcho;
- if (!audioGateOpen && !closedInLastBlock) {
- packetType = PacketType::SilentAudioFrame;
- }
-
- Transform audioTransform;
- auto headOrientation = scriptedAvatar->getHeadOrientation();
- audioTransform.setTranslation(scriptedAvatar->getWorldPosition());
- audioTransform.setRotation(headOrientation);
-
- QByteArray encodedBuffer;
- if (_encoder) {
- _encoder->encode(audio, encodedBuffer);
- } else {
- encodedBuffer = audio;
- }
-
- AbstractAudioInterface::emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), audioSequenceNumber, false,
- audioTransform, scriptedAvatar->getWorldPosition(), glm::vec3(0),
- packetType, _selectedCodecName);
- });
-
- auto avatarHashMap = DependencyManager::set();
- _scriptEngine->registerGlobalObject("AvatarList", avatarHashMap.data());
-
- // register ourselves to the script engine
- _scriptEngine->registerGlobalObject("Agent", new AgentScriptingInterface(this));
-
- _scriptEngine->registerGlobalObject("AnimationCache", DependencyManager::get().data());
- _scriptEngine->registerGlobalObject("SoundCache", DependencyManager::get().data());
-
- QScriptValue webSocketServerConstructorValue = _scriptEngine->newFunction(WebSocketServerClass::constructor);
- _scriptEngine->globalObject().setProperty("WebSocketServer", webSocketServerConstructorValue);
-
- auto entityScriptingInterface = DependencyManager::get();
-
- _scriptEngine->registerGlobalObject("EntityViewer", &_entityViewer);
-
- _scriptEngine->registerGetterSetter("location", LocationScriptingInterface::locationGetter,
- LocationScriptingInterface::locationSetter);
-
- auto recordingInterface = DependencyManager::get();
- _scriptEngine->registerGlobalObject("Recording", recordingInterface.data());
-
- entityScriptingInterface->init();
-
- _entityViewer.init();
-
- entityScriptingInterface->setEntityTree(_entityViewer.getTree());
-
- DependencyManager::set(_entityViewer.getTree());
-
- QMetaObject::invokeMethod(&_avatarAudioTimer, "start");
-
- // Agents should run at 45hz
- static const int AVATAR_DATA_HZ = 45;
- static const int AVATAR_DATA_IN_MSECS = MSECS_PER_SECOND / AVATAR_DATA_HZ;
- QTimer* avatarDataTimer = new QTimer(this);
- connect(avatarDataTimer, &QTimer::timeout, this, &Agent::processAgentAvatar);
- avatarDataTimer->setSingleShot(false);
- avatarDataTimer->setInterval(AVATAR_DATA_IN_MSECS);
- avatarDataTimer->setTimerType(Qt::PreciseTimer);
- avatarDataTimer->start();
-
- _scriptEngine->run();
-
- Frame::clearFrameHandler(AUDIO_FRAME_TYPE);
- Frame::clearFrameHandler(AVATAR_FRAME_TYPE);
-
- DependencyManager::destroy();
setFinished(true);
}
@@ -553,28 +578,33 @@ void Agent::setIsAvatar(bool isAvatar) {
}
_isAvatar = isAvatar;
- if (_isAvatar && !_avatarIdentityTimer) {
- // set up the avatar timers
- _avatarIdentityTimer = new QTimer(this);
- _avatarQueryTimer = new QTimer(this);
+ auto scriptableAvatar = DependencyManager::get();
+ if (_isAvatar) {
+ if (!_avatarIdentityTimer) {
+ // set up the avatar timers
+ _avatarIdentityTimer = new QTimer(this);
+ _avatarQueryTimer = new QTimer(this);
- // connect our slot
- connect(_avatarIdentityTimer, &QTimer::timeout, this, &Agent::sendAvatarIdentityPacket);
- connect(_avatarQueryTimer, &QTimer::timeout, this, &Agent::queryAvatars);
+ // connect our slot
+ connect(_avatarIdentityTimer, &QTimer::timeout, this, &Agent::sendAvatarIdentityPacket);
+ connect(_avatarQueryTimer, &QTimer::timeout, this, &Agent::queryAvatars);
- static const int AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS = 1000;
- static const int AVATAR_VIEW_PACKET_SEND_INTERVAL_MSECS = 1000;
+ static const int AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS = 1000;
+ static const int AVATAR_VIEW_PACKET_SEND_INTERVAL_MSECS = 1000;
- // start the timers
- _avatarIdentityTimer->start(AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS); // FIXME - we shouldn't really need to constantly send identity packets
- _avatarQueryTimer->start(AVATAR_VIEW_PACKET_SEND_INTERVAL_MSECS);
+ // start the timers
+ _avatarIdentityTimer->start(AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS); // FIXME - we shouldn't really need to constantly send identity packets
+ _avatarQueryTimer->start(AVATAR_VIEW_PACKET_SEND_INTERVAL_MSECS);
- // tell the avatarAudioTimer to start ticking
- QMetaObject::invokeMethod(&_avatarAudioTimer, "start");
- }
+ connect(_scriptEngine.data(), &ScriptEngine::update,
+ scriptableAvatar.data(), &ScriptableAvatar::update, Qt::QueuedConnection);
- if (!_isAvatar) {
+ // tell the avatarAudioTimer to start ticking
+ QMetaObject::invokeMethod(&_avatarAudioTimer, "start");
+ }
+ _entityEditSender.setMyAvatar(scriptableAvatar.data());
+ } else {
if (_avatarIdentityTimer) {
_avatarIdentityTimer->stop();
delete _avatarIdentityTimer;
@@ -601,14 +631,14 @@ void Agent::setIsAvatar(bool isAvatar) {
packet->writePrimitive(KillAvatarReason::NoReason);
nodeList->sendPacket(std::move(packet), *node);
});
+
+ disconnect(_scriptEngine.data(), &ScriptEngine::update,
+ scriptableAvatar.data(), &ScriptableAvatar::update);
+
+ QMetaObject::invokeMethod(&_avatarAudioTimer, "stop");
}
- QMetaObject::invokeMethod(&_avatarAudioTimer, "stop");
-
_entityEditSender.setMyAvatar(nullptr);
- } else {
- auto scriptableAvatar = DependencyManager::get();
- _entityEditSender.setMyAvatar(scriptableAvatar.data());
}
}
@@ -724,13 +754,13 @@ void Agent::processAgentAvatarAudio() {
const int16_t* nextSoundOutput = NULL;
if (_avatarSound) {
- const QByteArray& soundByteArray = _avatarSound->getByteArray();
- nextSoundOutput = reinterpret_cast(soundByteArray.data()
+ auto audioData = _avatarSound->getAudioData();
+ nextSoundOutput = reinterpret_cast(audioData->rawData()
+ _numAvatarSoundSentBytes);
- int numAvailableBytes = (soundByteArray.size() - _numAvatarSoundSentBytes) > AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL
+ int numAvailableBytes = (audioData->getNumBytes() - _numAvatarSoundSentBytes) > AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL
? AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL
- : soundByteArray.size() - _numAvatarSoundSentBytes;
+ : audioData->getNumBytes() - _numAvatarSoundSentBytes;
numAvailableSamples = (int16_t)numAvailableBytes / sizeof(int16_t);
@@ -743,7 +773,7 @@ void Agent::processAgentAvatarAudio() {
}
_numAvatarSoundSentBytes += numAvailableBytes;
- if (_numAvatarSoundSentBytes == soundByteArray.size()) {
+ if (_numAvatarSoundSentBytes == (int)audioData->getNumBytes()) {
// we're done with this sound object - so set our pointer back to NULL
// and our sent bytes back to zero
_avatarSound.clear();
@@ -759,7 +789,7 @@ void Agent::processAgentAvatarAudio() {
// seek past the sequence number, will be packed when destination node is known
audioPacket->seek(sizeof(quint16));
- if (silentFrame) {
+ if (silentFrame && !_flushEncoder) {
if (!_isListeningToAudioStream) {
// if we have a silent frame and we're not listening then just send nothing and break out of here
@@ -781,7 +811,7 @@ void Agent::processAgentAvatarAudio() {
// no matter what, the loudness should be set to 0
computeLoudness(nullptr, scriptedAvatar);
- } else if (nextSoundOutput) {
+ } else if (nextSoundOutput || _flushEncoder) {
// write the codec
audioPacket->writeString(_selectedCodecName);
@@ -835,8 +865,6 @@ void Agent::processAgentAvatarAudio() {
}
void Agent::aboutToFinish() {
- setIsAvatar(false);// will stop timers for sending identity packets
-
// our entity tree is going to go away so tell that to the EntityScriptingInterface
DependencyManager::get()->setEntityTree(nullptr);
@@ -855,15 +883,25 @@ void Agent::aboutToFinish() {
DependencyManager::destroy();
DependencyManager::destroy();
+ DependencyManager::destroy();
DependencyManager::destroy();
DependencyManager::destroy();
DependencyManager::destroy();
- DependencyManager::destroy();
+
+ // drop our shared pointer to the script engine, then ask ScriptEngines to shutdown scripting
+ // this ensures that the ScriptEngine goes down before ScriptEngines
+ _scriptEngine.clear();
+
+ {
+ DependencyManager::get()->shutdownScripting();
+ }
+
+ DependencyManager::destroy();
+
+ DependencyManager::destroy();
DependencyManager::destroy();
- QMetaObject::invokeMethod(&_avatarAudioTimer, "stop");
-
// cleanup codec & encoder
if (_codec && _encoder) {
_codec->releaseEncoder(_encoder);
diff --git a/assignment-client/src/Agent.h b/assignment-client/src/Agent.h
index 2b5ff51b49..7d47c8e713 100644
--- a/assignment-client/src/Agent.h
+++ b/assignment-client/src/Agent.h
@@ -21,7 +21,6 @@
#include
#include
-#include
#include
#include
#include
diff --git a/assignment-client/src/AssignmentClient.cpp b/assignment-client/src/AssignmentClient.cpp
index 426f3ce6fc..76ff5ab2ed 100644
--- a/assignment-client/src/AssignmentClient.cpp
+++ b/assignment-client/src/AssignmentClient.cpp
@@ -35,6 +35,7 @@
#include "AssignmentClientLogging.h"
#include "AssignmentFactory.h"
+#include "ResourceRequestObserver.h"
const QString ASSIGNMENT_CLIENT_TARGET_NAME = "assignment-client";
const long long ASSIGNMENT_REQUEST_INTERVAL_MSECS = 1 * 1000;
@@ -49,6 +50,7 @@ AssignmentClient::AssignmentClient(Assignment::Type requestAssignmentType, QStri
DependencyManager::set();
DependencyManager::set();
DependencyManager::set();
+ DependencyManager::set();
auto addressManager = DependencyManager::set();
diff --git a/assignment-client/src/audio/AudioMixer.cpp b/assignment-client/src/audio/AudioMixer.cpp
index 0d42cc83be..d6f893c42e 100644
--- a/assignment-client/src/audio/AudioMixer.cpp
+++ b/assignment-client/src/audio/AudioMixer.cpp
@@ -38,6 +38,8 @@
#include "AvatarAudioStream.h"
#include "InjectedAudioStream.h"
+using namespace std;
+
static const float DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE = 0.5f; // attenuation = -6dB * log2(distance)
static const int DISABLE_STATIC_JITTER_FRAMES = -1;
static const float DEFAULT_NOISE_MUTING_THRESHOLD = 1.0f;
@@ -49,11 +51,11 @@ static const QString AUDIO_THREADING_GROUP_KEY = "audio_threading";
int AudioMixer::_numStaticJitterFrames{ DISABLE_STATIC_JITTER_FRAMES };
float AudioMixer::_noiseMutingThreshold{ DEFAULT_NOISE_MUTING_THRESHOLD };
float AudioMixer::_attenuationPerDoublingInDistance{ DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE };
-std::map> AudioMixer::_availableCodecs{ };
+map> AudioMixer::_availableCodecs{ };
QStringList AudioMixer::_codecPreferenceOrder{};
-QHash AudioMixer::_audioZones;
-QVector AudioMixer::_zoneSettings;
-QVector AudioMixer::_zoneReverbSettings;
+vector AudioMixer::_audioZones;
+vector AudioMixer::_zoneSettings;
+vector AudioMixer::_zoneReverbSettings;
AudioMixer::AudioMixer(ReceivedMessage& message) :
ThreadedAssignment(message)
@@ -67,7 +69,7 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
_availableCodecs.clear(); // Make sure struct is clean
auto pluginManager = DependencyManager::set();
auto codecPlugins = pluginManager->getCodecPlugins();
- std::for_each(codecPlugins.cbegin(), codecPlugins.cend(),
+ for_each(codecPlugins.cbegin(), codecPlugins.cend(),
[&](const CodecPluginPointer& codec) {
_availableCodecs[codec->getName()] = codec;
});
@@ -87,7 +89,8 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
PacketType::NodeIgnoreRequest,
PacketType::RadiusIgnoreRequest,
PacketType::RequestsDomainListData,
- PacketType::PerAvatarGainSet },
+ PacketType::PerAvatarGainSet,
+ PacketType::AudioSoloRequest },
this, "queueAudioPacket");
// packets whose consequences are global should be processed on the main thread
@@ -122,7 +125,7 @@ void AudioMixer::queueAudioPacket(QSharedPointer message, Share
void AudioMixer::queueReplicatedAudioPacket(QSharedPointer message) {
// make sure we have a replicated node for the original sender of the packet
auto nodeList = DependencyManager::get();
-
+
// Node ID is now part of user data, since replicated audio packets are non-sourced.
QUuid nodeID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
@@ -173,12 +176,12 @@ void AudioMixer::handleMuteEnvironmentPacket(QSharedPointer mes
}
}
-const std::pair AudioMixer::negotiateCodec(std::vector codecs) {
+const pair AudioMixer::negotiateCodec(vector codecs) {
QString selectedCodecName;
CodecPluginPointer selectedCodec;
// read the codecs requested (by the client)
- int minPreference = std::numeric_limits::max();
+ int minPreference = numeric_limits::max();
for (auto& codec : codecs) {
if (_availableCodecs.count(codec) > 0) {
int preference = _codecPreferenceOrder.indexOf(codec);
@@ -191,20 +194,9 @@ const std::pair AudioMixer::negotiateCodec(std::vec
}
}
- return std::make_pair(selectedCodecName, _availableCodecs[selectedCodecName]);
+ return make_pair(selectedCodecName, _availableCodecs[selectedCodecName]);
}
-void AudioMixer::handleNodeKilled(SharedNodePointer killedNode) {
- // enumerate the connected listeners to remove HRTF objects for the disconnected node
- auto nodeList = DependencyManager::get();
-
- nodeList->eachNode([&killedNode](const SharedNodePointer& node) {
- auto clientData = dynamic_cast(node->getLinkedData());
- if (clientData) {
- clientData->removeNode(killedNode->getUUID());
- }
- });
-}
void AudioMixer::handleNodeMuteRequestPacket(QSharedPointer packet, SharedNodePointer sendingNode) {
auto nodeList = DependencyManager::get();
@@ -223,32 +215,31 @@ void AudioMixer::handleNodeMuteRequestPacket(QSharedPointer pac
}
}
+void AudioMixer::handleNodeKilled(SharedNodePointer killedNode) {
+ auto clientData = dynamic_cast(killedNode->getLinkedData());
+ if (clientData) {
+ // stage the removal of all streams from this node, workers handle when preparing mixes for listeners
+ _workerSharedData.removedNodes.emplace_back(killedNode->getLocalID());
+ }
+}
+
void AudioMixer::handleKillAvatarPacket(QSharedPointer packet, SharedNodePointer sendingNode) {
auto clientData = dynamic_cast(sendingNode->getLinkedData());
if (clientData) {
clientData->removeAgentAvatarAudioStream();
- auto nodeList = DependencyManager::get();
- nodeList->eachNode([sendingNode](const SharedNodePointer& node){
- auto listenerClientData = dynamic_cast(node->getLinkedData());
- if (listenerClientData) {
- listenerClientData->removeHRTFForStream(sendingNode->getUUID());
- }
- });
+
+ // stage a removal of the avatar audio stream from this Agent, workers handle when preparing mixes for listeners
+ _workerSharedData.removedStreams.emplace_back(sendingNode->getUUID(), sendingNode->getLocalID(), QUuid());
}
}
void AudioMixer::removeHRTFsForFinishedInjector(const QUuid& streamID) {
auto injectorClientData = qobject_cast(sender());
- if (injectorClientData) {
- // enumerate the connected listeners to remove HRTF objects for the disconnected injector
- auto nodeList = DependencyManager::get();
- nodeList->eachNode([injectorClientData, &streamID](const SharedNodePointer& node){
- auto listenerClientData = dynamic_cast(node->getLinkedData());
- if (listenerClientData) {
- listenerClientData->removeHRTFForStream(injectorClientData->getNodeID(), streamID);
- }
- });
+ if (injectorClientData) {
+ // stage the removal of this stream, workers handle when preparing mixes for listeners
+ _workerSharedData.removedStreams.emplace_back(injectorClientData->getNodeID(), injectorClientData->getNodeLocalID(),
+ streamID);
}
}
@@ -285,7 +276,7 @@ void AudioMixer::sendStatsPacket() {
// timing stats
QJsonObject timingStats;
- auto addTiming = [&](Timer& timer, std::string name) {
+ auto addTiming = [&](Timer& timer, string name) {
uint64_t timing, trailing;
timer.get(timing, trailing);
timingStats[("us_per_" + name).c_str()] = (qint64)(timing / _numStatFrames);
@@ -293,12 +284,12 @@ void AudioMixer::sendStatsPacket() {
};
addTiming(_ticTiming, "tic");
+ addTiming(_checkTimeTiming, "check_time");
addTiming(_sleepTiming, "sleep");
addTiming(_frameTiming, "frame");
- addTiming(_prepareTiming, "prepare");
+ addTiming(_packetsTiming, "packets");
addTiming(_mixTiming, "mix");
addTiming(_eventsTiming, "events");
- addTiming(_packetsTiming, "packets");
#ifdef HIFI_AUDIO_MIXER_DEBUG
timingStats["ns_per_mix"] = (_stats.totalMixes > 0) ? (float)(_stats.mixTime / _stats.totalMixes) : 0;
@@ -311,11 +302,24 @@ void AudioMixer::sendStatsPacket() {
QJsonObject mixStats;
mixStats["%_hrtf_mixes"] = percentageForMixStats(_stats.hrtfRenders);
- mixStats["%_hrtf_silent_mixes"] = percentageForMixStats(_stats.hrtfSilentRenders);
- mixStats["%_hrtf_throttle_mixes"] = percentageForMixStats(_stats.hrtfThrottleRenders);
mixStats["%_manual_stereo_mixes"] = percentageForMixStats(_stats.manualStereoMixes);
mixStats["%_manual_echo_mixes"] = percentageForMixStats(_stats.manualEchoMixes);
+ mixStats["1_hrtf_renders"] = (int)(_stats.hrtfRenders / (float)_numStatFrames);
+ mixStats["1_hrtf_resets"] = (int)(_stats.hrtfResets / (float)_numStatFrames);
+ mixStats["1_hrtf_updates"] = (int)(_stats.hrtfUpdates / (float)_numStatFrames);
+
+ mixStats["2_skipped_streams"] = (int)(_stats.skipped / (float)_numStatFrames);
+ mixStats["2_inactive_streams"] = (int)(_stats.inactive / (float)_numStatFrames);
+ mixStats["2_active_streams"] = (int)(_stats.active / (float)_numStatFrames);
+
+ mixStats["3_skippped_to_active"] = (int)(_stats.skippedToActive / (float)_numStatFrames);
+ mixStats["3_skippped_to_inactive"] = (int)(_stats.skippedToInactive / (float)_numStatFrames);
+ mixStats["3_inactive_to_skippped"] = (int)(_stats.inactiveToSkipped / (float)_numStatFrames);
+ mixStats["3_inactive_to_active"] = (int)(_stats.inactiveToActive / (float)_numStatFrames);
+ mixStats["3_active_to_skippped"] = (int)(_stats.activeToSkipped / (float)_numStatFrames);
+ mixStats["3_active_to_inactive"] = (int)(_stats.activeToInactive / (float)_numStatFrames);
+
mixStats["total_mixes"] = _stats.totalMixes;
mixStats["avg_mixes_per_block"] = _stats.totalMixes / _numStatFrames;
@@ -366,7 +370,7 @@ AudioMixerClientData* AudioMixer::getOrCreateClientData(Node* node) {
auto clientData = dynamic_cast(node->getLinkedData());
if (!clientData) {
- node->setLinkedData(std::unique_ptr { new AudioMixerClientData(node->getUUID(), node->getLocalID()) });
+ node->setLinkedData(unique_ptr { new AudioMixerClientData(node->getUUID(), node->getLocalID()) });
clientData = dynamic_cast(node->getLinkedData());
connect(clientData, &AudioMixerClientData::injectorStreamFinished, this, &AudioMixer::removeHRTFsForFinishedInjector);
}
@@ -393,33 +397,49 @@ void AudioMixer::start() {
// mix state
unsigned int frame = 1;
- auto frameTimestamp = p_high_resolution_clock::now();
while (!_isFinished) {
auto ticTimer = _ticTiming.timer();
- {
- auto timer = _sleepTiming.timer();
- auto frameDuration = timeFrame(frameTimestamp);
+ if (_startFrameTimestamp.time_since_epoch().count() == 0) {
+ _startFrameTimestamp = _idealFrameTimestamp = p_high_resolution_clock::now();
+ } else {
+ auto timer = _checkTimeTiming.timer();
+ auto frameDuration = timeFrame();
throttle(frameDuration, frame);
}
auto frameTimer = _frameTiming.timer();
- nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
- // prepare frames; pop off any new audio from their streams
- {
- auto prepareTimer = _prepareTiming.timer();
- std::for_each(cbegin, cend, [&](const SharedNodePointer& node) {
- _stats.sumStreams += prepareFrame(node, frame);
- });
- }
+ // process (node-isolated) audio packets across slave threads
+ {
+ auto packetsTimer = _packetsTiming.timer();
+ // first clear the concurrent vector of added streams that the slaves will add to when they process packets
+ _workerSharedData.addedStreams.clear();
+
+ nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
+ _slavePool.processPackets(cbegin, cend);
+ });
+ }
+
+ // process queued events (networking, global audio packets, &c.)
+ {
+ auto eventsTimer = _eventsTiming.timer();
+
+ // clear removed nodes and removed streams before we process events that will setup the new set
+ _workerSharedData.removedNodes.clear();
+ _workerSharedData.removedStreams.clear();
+
+ // since we're a while loop we need to yield to qt's event processing
+ QCoreApplication::processEvents();
+ }
+
+ int numToRetain = nodeList->size() * (1 - _throttlingRatio);
+ nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
// mix across slave threads
- {
- auto mixTimer = _mixTiming.timer();
- _slavePool.mix(cbegin, cend, frame, _throttlingRatio);
- }
+ auto mixTimer = _mixTiming.timer();
+ _slavePool.mix(cbegin, cend, frame, numToRetain);
});
// gather stats
@@ -431,21 +451,6 @@ void AudioMixer::start() {
++frame;
++_numStatFrames;
- // process queued events (networking, global audio packets, &c.)
- {
- auto eventsTimer = _eventsTiming.timer();
-
- // since we're a while loop we need to yield to qt's event processing
- QCoreApplication::processEvents();
-
- // process (node-isolated) audio packets across slave threads
- {
- nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
- auto packetsTimer = _packetsTiming.timer();
- _slavePool.processPackets(cbegin, cend);
- });
- }
- }
if (_isFinished) {
// alert qt eventing that this is finished
@@ -455,26 +460,26 @@ void AudioMixer::start() {
}
}
-std::chrono::microseconds AudioMixer::timeFrame(p_high_resolution_clock::time_point& timestamp) {
+chrono::microseconds AudioMixer::timeFrame() {
// advance the next frame
- auto nextTimestamp = timestamp + std::chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS);
auto now = p_high_resolution_clock::now();
// compute how long the last frame took
- auto duration = std::chrono::duration_cast(now - timestamp);
+ auto duration = chrono::duration_cast(now - _startFrameTimestamp);
- // set the new frame timestamp
- timestamp = std::max(now, nextTimestamp);
+ _idealFrameTimestamp += chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS);
- // sleep until the next frame should start
- // WIN32 sleep_until is broken until VS2015 Update 2
- // instead, std::max (above) guarantees that timestamp >= now, so we can sleep_for
- std::this_thread::sleep_for(timestamp - now);
+ {
+ auto timer = _sleepTiming.timer();
+ this_thread::sleep_until(_idealFrameTimestamp);
+ }
+
+ _startFrameTimestamp = p_high_resolution_clock::now();
return duration;
}
-void AudioMixer::throttle(std::chrono::microseconds duration, int frame) {
+void AudioMixer::throttle(chrono::microseconds duration, int frame) {
// throttle using a modified proportional-integral controller
const float FRAME_TIME = 10000.0f;
float mixRatio = duration.count() / FRAME_TIME;
@@ -508,28 +513,19 @@ void AudioMixer::throttle(std::chrono::microseconds duration, int frame) {
if (_trailingMixRatio > TARGET) {
int proportionalTerm = 1 + (_trailingMixRatio - TARGET) / 0.1f;
_throttlingRatio += THROTTLE_RATE * proportionalTerm;
- _throttlingRatio = std::min(_throttlingRatio, 1.0f);
+ _throttlingRatio = min(_throttlingRatio, 1.0f);
qCDebug(audio) << "audio-mixer is struggling (" << _trailingMixRatio << "mix/sleep) - throttling"
<< _throttlingRatio << "of streams";
} else if (_throttlingRatio > 0.0f && _trailingMixRatio <= BACKOFF_TARGET) {
int proportionalTerm = 1 + (TARGET - _trailingMixRatio) / 0.2f;
_throttlingRatio -= BACKOFF_RATE * proportionalTerm;
- _throttlingRatio = std::max(_throttlingRatio, 0.0f);
+ _throttlingRatio = max(_throttlingRatio, 0.0f);
qCDebug(audio) << "audio-mixer is recovering (" << _trailingMixRatio << "mix/sleep) - throttling"
<< _throttlingRatio << "of streams";
}
}
}
-int AudioMixer::prepareFrame(const SharedNodePointer& node, unsigned int frame) {
- AudioMixerClientData* data = (AudioMixerClientData*)node->getLinkedData();
- if (data == nullptr) {
- return 0;
- }
-
- return data->checkBuffersBeforeFrameSend();
-}
-
void AudioMixer::clearDomainSettings() {
_numStaticJitterFrames = DISABLE_STATIC_JITTER_FRAMES;
_attenuationPerDoublingInDistance = DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE;
@@ -661,8 +657,11 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
const QString Y_MAX = "y_max";
const QString Z_MIN = "z_min";
const QString Z_MAX = "z_max";
- foreach (const QString& zone, zones.keys()) {
- QJsonObject zoneObject = zones[zone].toObject();
+
+ auto zoneNames = zones.keys();
+ _audioZones.reserve(zoneNames.length());
+ foreach (const QString& zoneName, zoneNames) {
+ QJsonObject zoneObject = zones[zoneName].toObject();
if (zoneObject.contains(X_MIN) && zoneObject.contains(X_MAX) && zoneObject.contains(Y_MIN) &&
zoneObject.contains(Y_MAX) && zoneObject.contains(Z_MIN) && zoneObject.contains(Z_MAX)) {
@@ -686,8 +685,8 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
glm::vec3 corner(xMin, yMin, zMin);
glm::vec3 dimensions(xMax - xMin, yMax - yMin, zMax - zMin);
AABox zoneAABox(corner, dimensions);
- _audioZones.insert(zone, zoneAABox);
- qCDebug(audio) << "Added zone:" << zone << "(corner:" << corner << ", dimensions:" << dimensions << ")";
+ _audioZones.push_back({ zoneName, zoneAABox });
+ qCDebug(audio) << "Added zone:" << zoneName << "(corner:" << corner << ", dimensions:" << dimensions << ")";
}
}
}
@@ -707,18 +706,28 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
coefficientObject.contains(LISTENER) &&
coefficientObject.contains(COEFFICIENT)) {
- ZoneSettings settings;
+ auto itSource = find_if(begin(_audioZones), end(_audioZones), [&](const ZoneDescription& description) {
+ return description.name == coefficientObject.value(SOURCE).toString();
+ });
+ auto itListener = find_if(begin(_audioZones), end(_audioZones), [&](const ZoneDescription& description) {
+ return description.name == coefficientObject.value(LISTENER).toString();
+ });
bool ok;
- settings.source = coefficientObject.value(SOURCE).toString();
- settings.listener = coefficientObject.value(LISTENER).toString();
- settings.coefficient = coefficientObject.value(COEFFICIENT).toString().toFloat(&ok);
+ float coefficient = coefficientObject.value(COEFFICIENT).toString().toFloat(&ok);
- if (ok && settings.coefficient >= 0.0f && settings.coefficient <= 1.0f &&
- _audioZones.contains(settings.source) && _audioZones.contains(settings.listener)) {
+
+ if (ok && coefficient >= 0.0f && coefficient <= 1.0f &&
+ itSource != end(_audioZones) &&
+ itListener != end(_audioZones)) {
+
+ ZoneSettings settings;
+ settings.source = itSource - begin(_audioZones);
+ settings.listener = itListener - begin(_audioZones);
+ settings.coefficient = coefficient;
_zoneSettings.push_back(settings);
- qCDebug(audio) << "Added Coefficient:" << settings.source << settings.listener << settings.coefficient;
+ qCDebug(audio) << "Added Coefficient:" << itSource->name << itListener->name << settings.coefficient;
}
}
}
@@ -739,19 +748,21 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
reverbObject.contains(WET_LEVEL)) {
bool okReverbTime, okWetLevel;
- QString zone = reverbObject.value(ZONE).toString();
+ auto itZone = find_if(begin(_audioZones), end(_audioZones), [&](const ZoneDescription& description) {
+ return description.name == reverbObject.value(ZONE).toString();
+ });
float reverbTime = reverbObject.value(REVERB_TIME).toString().toFloat(&okReverbTime);
float wetLevel = reverbObject.value(WET_LEVEL).toString().toFloat(&okWetLevel);
- if (okReverbTime && okWetLevel && _audioZones.contains(zone)) {
+ if (okReverbTime && okWetLevel && itZone != end(_audioZones)) {
ReverbSettings settings;
- settings.zone = zone;
+ settings.zone = itZone - begin(_audioZones);
settings.reverbTime = reverbTime;
settings.wetLevel = wetLevel;
_zoneReverbSettings.push_back(settings);
- qCDebug(audio) << "Added Reverb:" << zone << reverbTime << wetLevel;
+ qCDebug(audio) << "Added Reverb:" << itZone->name << reverbTime << wetLevel;
}
}
}
@@ -764,7 +775,7 @@ AudioMixer::Timer::Timing::Timing(uint64_t& sum) : _sum(sum) {
}
AudioMixer::Timer::Timing::~Timing() {
- _sum += std::chrono::duration_cast(p_high_resolution_clock::now() - _timing).count();
+ _sum += chrono::duration_cast(p_high_resolution_clock::now() - _timing).count();
}
void AudioMixer::Timer::get(uint64_t& timing, uint64_t& trailing) {
diff --git a/assignment-client/src/audio/AudioMixer.h b/assignment-client/src/audio/AudioMixer.h
index f9eb18da6d..b8ea0d5c58 100644
--- a/assignment-client/src/audio/AudioMixer.h
+++ b/assignment-client/src/audio/AudioMixer.h
@@ -34,13 +34,18 @@ class AudioMixer : public ThreadedAssignment {
public:
AudioMixer(ReceivedMessage& message);
+
+ struct ZoneDescription {
+ QString name;
+ AABox area;
+ };
struct ZoneSettings {
- QString source;
- QString listener;
+ int source;
+ int listener;
float coefficient;
};
struct ReverbSettings {
- QString zone;
+ int zone;
float reverbTime;
float wetLevel;
};
@@ -48,9 +53,9 @@ public:
static int getStaticJitterFrames() { return _numStaticJitterFrames; }
static bool shouldMute(float quietestFrame) { return quietestFrame > _noiseMutingThreshold; }
static float getAttenuationPerDoublingInDistance() { return _attenuationPerDoublingInDistance; }
- static const QHash& getAudioZones() { return _audioZones; }
- static const QVector& getZoneSettings() { return _zoneSettings; }
- static const QVector& getReverbSettings() { return _zoneReverbSettings; }
+ static const std::vector& getAudioZones() { return _audioZones; }
+ static const std::vector& getZoneSettings() { return _zoneSettings; }
+ static const std::vector& getReverbSettings() { return _zoneReverbSettings; }
static const std::pair negotiateCodec(std::vector codecs);
static bool shouldReplicateTo(const Node& from, const Node& to) {
@@ -79,11 +84,8 @@ private slots:
private:
// mixing helpers
- std::chrono::microseconds timeFrame(p_high_resolution_clock::time_point& timestamp);
+ std::chrono::microseconds timeFrame();
void throttle(std::chrono::microseconds frameDuration, int frame);
- // pop a frame from any streams on the node
- // returns the number of available streams
- int prepareFrame(const SharedNodePointer& node, unsigned int frame);
AudioMixerClientData* getOrCreateClientData(Node* node);
@@ -92,6 +94,9 @@ private:
void parseSettingsObject(const QJsonObject& settingsObject);
void clearDomainSettings();
+ p_high_resolution_clock::time_point _idealFrameTimestamp;
+ p_high_resolution_clock::time_point _startFrameTimestamp;
+
float _trailingMixRatio { 0.0f };
float _throttlingRatio { 0.0f };
@@ -100,7 +105,7 @@ private:
int _numStatFrames { 0 };
AudioMixerStats _stats;
- AudioMixerSlavePool _slavePool;
+ AudioMixerSlavePool _slavePool { _workerSharedData };
class Timer {
public:
@@ -123,7 +128,9 @@ private:
uint64_t _history[TIMER_TRAILING_SECONDS] {};
int _index { 0 };
};
+
Timer _ticTiming;
+ Timer _checkTimeTiming;
Timer _sleepTiming;
Timer _frameTiming;
Timer _prepareTiming;
@@ -136,10 +143,13 @@ private:
static float _attenuationPerDoublingInDistance;
static std::map _availableCodecs;
static QStringList _codecPreferenceOrder;
- static QHash _audioZones;
- static QVector _zoneSettings;
- static QVector _zoneReverbSettings;
+
+ static std::vector _audioZones;
+ static std::vector _zoneSettings;
+ static std::vector _zoneReverbSettings;
+
+ AudioMixerSlave::SharedData _workerSharedData;
};
#endif // hifi_AudioMixer_h
diff --git a/assignment-client/src/audio/AudioMixerClientData.cpp b/assignment-client/src/audio/AudioMixerClientData.cpp
index 07cc5493b0..9a78ba31a2 100644
--- a/assignment-client/src/audio/AudioMixerClientData.cpp
+++ b/assignment-client/src/audio/AudioMixerClientData.cpp
@@ -13,6 +13,8 @@
#include
+#include
+
#include
#include
@@ -28,7 +30,6 @@
AudioMixerClientData::AudioMixerClientData(const QUuid& nodeID, Node::LocalID nodeLocalID) :
NodeData(nodeID, nodeLocalID),
audioLimiter(AudioConstants::SAMPLE_RATE, AudioConstants::STEREO),
- _ignoreZone(*this),
_outgoingMixedAudioSequenceNumber(0),
_downstreamAudioStreamStats()
{
@@ -56,7 +57,7 @@ void AudioMixerClientData::queuePacket(QSharedPointer message,
_packetQueue.push(message);
}
-void AudioMixerClientData::processPackets() {
+int AudioMixerClientData::processPackets(ConcurrentAddedStreams& addedStreams) {
SharedNodePointer node = _packetQueue.node;
assert(_packetQueue.empty() || node);
_packetQueue.node.clear();
@@ -69,22 +70,17 @@ void AudioMixerClientData::processPackets() {
case PacketType::MicrophoneAudioWithEcho:
case PacketType::InjectAudio:
case PacketType::SilentAudioFrame: {
-
if (node->isUpstream()) {
setupCodecForReplicatedAgent(packet);
}
- QMutexLocker lock(&getMutex());
- parseData(*packet);
+ processStreamPacket(*packet, addedStreams);
optionallyReplicatePacket(*packet, *node);
-
break;
}
case PacketType::AudioStreamStats: {
- QMutexLocker lock(&getMutex());
parseData(*packet);
-
break;
}
case PacketType::NegotiateAudioFormat:
@@ -102,6 +98,9 @@ void AudioMixerClientData::processPackets() {
case PacketType::RadiusIgnoreRequest:
parseRadiusIgnoreRequest(packet, node);
break;
+ case PacketType::AudioSoloRequest:
+ parseSoloRequest(packet, node);
+ break;
default:
Q_UNREACHABLE();
}
@@ -109,6 +108,10 @@ void AudioMixerClientData::processPackets() {
_packetQueue.pop();
}
assert(_packetQueue.empty());
+
+ // now that we have processed all packets for this frame
+ // we can prepare the sources from this client to be ready for mixing
+ return checkBuffersBeforeFrameSend();
}
bool isReplicatedPacket(PacketType packetType) {
@@ -186,63 +189,155 @@ void AudioMixerClientData::parseRequestsDomainListData(ReceivedMessage& message)
void AudioMixerClientData::parsePerAvatarGainSet(ReceivedMessage& message, const SharedNodePointer& node) {
QUuid uuid = node->getUUID();
// parse the UUID from the packet
- QUuid avatarUuid = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
+ QUuid avatarUUID = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
uint8_t packedGain;
message.readPrimitive(&packedGain);
float gain = unpackFloatGainFromByte(packedGain);
- if (avatarUuid.isNull()) {
+ if (avatarUUID.isNull()) {
// set the MASTER avatar gain
setMasterAvatarGain(gain);
qCDebug(audio) << "Setting MASTER avatar gain for " << uuid << " to " << gain;
} else {
// set the per-source avatar gain
- hrtfForStream(avatarUuid, QUuid()).setGainAdjustment(gain);
- qCDebug(audio) << "Setting avatar gain adjustment for hrtf[" << uuid << "][" << avatarUuid << "] to " << gain;
+ setGainForAvatar(avatarUUID, gain);
+ qCDebug(audio) << "Setting avatar gain adjustment for hrtf[" << uuid << "][" << avatarUUID << "] to " << gain;
+ }
+}
+
+void AudioMixerClientData::setGainForAvatar(QUuid nodeID, float gain) {
+ auto it = std::find_if(_streams.active.cbegin(), _streams.active.cend(), [nodeID](const MixableStream& mixableStream){
+ return mixableStream.nodeStreamID.nodeID == nodeID && mixableStream.nodeStreamID.streamID.isNull();
+ });
+
+ if (it != _streams.active.cend()) {
+ it->hrtf->setGainAdjustment(gain);
}
}
void AudioMixerClientData::parseNodeIgnoreRequest(QSharedPointer message, const SharedNodePointer& node) {
- node->parseIgnoreRequestMessage(message);
+ auto ignoredNodesPair = node->parseIgnoreRequestMessage(message);
+
+ // we have a vector of ignored or unignored node UUIDs - update our internal data structures so that
+ // streams can be included or excluded next time a mix is being created
+ if (ignoredNodesPair.second) {
+ // we have newly ignored nodes, add them to our vector
+ _newIgnoredNodeIDs.insert(std::end(_newIgnoredNodeIDs),
+ std::begin(ignoredNodesPair.first), std::end(ignoredNodesPair.first));
+ } else {
+ // we have newly unignored nodes, add them to our vector
+ _newUnignoredNodeIDs.insert(std::end(_newUnignoredNodeIDs),
+ std::begin(ignoredNodesPair.first), std::end(ignoredNodesPair.first));
+ }
+
+ auto nodeList = DependencyManager::get();
+ for (auto& nodeID : ignoredNodesPair.first) {
+ auto otherNode = nodeList->nodeWithUUID(nodeID);
+ if (otherNode) {
+ auto otherNodeMixerClientData = static_cast(otherNode->getLinkedData());
+ if (otherNodeMixerClientData) {
+ if (ignoredNodesPair.second) {
+ otherNodeMixerClientData->ignoredByNode(getNodeID());
+ } else {
+ otherNodeMixerClientData->unignoredByNode(getNodeID());
+ }
+ }
+ }
+ }
+}
+
+void AudioMixerClientData::ignoredByNode(QUuid nodeID) {
+ // first add this ID to the concurrent vector for newly ignoring nodes
+ _newIgnoringNodeIDs.push_back(nodeID);
+
+ // now take a lock and on the consistent vector of ignoring nodes and make sure this node is in it
+ std::lock_guard lock(_ignoringNodeIDsMutex);
+ if (std::find(_ignoringNodeIDs.begin(), _ignoringNodeIDs.end(), nodeID) == _ignoringNodeIDs.end()) {
+ _ignoringNodeIDs.push_back(nodeID);
+ }
+}
+
+void AudioMixerClientData::unignoredByNode(QUuid nodeID) {
+ // first add this ID to the concurrent vector for newly unignoring nodes
+ _newUnignoringNodeIDs.push_back(nodeID);
+
+ // now take a lock on the consistent vector of ignoring nodes and make sure this node isn't in it
+ std::lock_guard lock(_ignoringNodeIDsMutex);
+ auto it = _ignoringNodeIDs.begin();
+ while (it != _ignoringNodeIDs.end()) {
+ if (*it == nodeID) {
+ it = _ignoringNodeIDs.erase(it);
+ } else {
+ ++it;
+ }
+ }
+}
+
+void AudioMixerClientData::clearStagedIgnoreChanges() {
+ _newIgnoredNodeIDs.clear();
+ _newUnignoredNodeIDs.clear();
+ _newIgnoringNodeIDs.clear();
+ _newUnignoringNodeIDs.clear();
}
void AudioMixerClientData::parseRadiusIgnoreRequest(QSharedPointer message, const SharedNodePointer& node) {
- node->parseIgnoreRadiusRequestMessage(message);
+ bool enabled;
+ message->readPrimitive(&enabled);
+
+ _isIgnoreRadiusEnabled = enabled;
+
+ auto avatarAudioStream = getAvatarAudioStream();
+
+ // if we have an avatar audio stream, tell it wether its ignore box should be enabled or disabled
+ if (avatarAudioStream) {
+ if (_isIgnoreRadiusEnabled) {
+ avatarAudioStream->enableIgnoreBox();
+ } else {
+ avatarAudioStream->disableIgnoreBox();
+ }
+ }
+}
+
+
+void AudioMixerClientData::parseSoloRequest(QSharedPointer message, const SharedNodePointer& node) {
+
+ uint8_t addToSolo;
+ message->readPrimitive(&addToSolo);
+
+ while (message->getBytesLeftToRead()) {
+ // parse out the UUID being soloed from the packet
+ QUuid soloedUUID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
+
+ if (addToSolo) {
+ _soloedNodes.push_back(soloedUUID);
+ } else {
+ auto it = std::remove(std::begin(_soloedNodes), std::end(_soloedNodes), soloedUUID);
+ _soloedNodes.erase(it, std::end(_soloedNodes));
+ }
+ }
}
AvatarAudioStream* AudioMixerClientData::getAvatarAudioStream() {
- QReadLocker readLocker { &_streamsLock };
+ auto it = std::find_if(_audioStreams.begin(), _audioStreams.end(), [](const SharedStreamPointer& stream){
+ return stream->getStreamIdentifier().isNull();
+ });
- auto it = _audioStreams.find(QUuid());
if (it != _audioStreams.end()) {
- return dynamic_cast(it->second.get());
+ return dynamic_cast(it->get());
}
// no mic stream found - return NULL
return NULL;
}
-void AudioMixerClientData::removeHRTFForStream(const QUuid& nodeID, const QUuid& streamID) {
- auto it = _nodeSourcesHRTFMap.find(nodeID);
- if (it != _nodeSourcesHRTFMap.end()) {
- // erase the stream with the given ID from the given node
- it->second.erase(streamID);
-
- // is the map for this node now empty?
- // if so we can remove it
- if (it->second.size() == 0) {
- _nodeSourcesHRTFMap.erase(it);
- }
- }
-}
-
void AudioMixerClientData::removeAgentAvatarAudioStream() {
- QWriteLocker writeLocker { &_streamsLock };
- auto it = _audioStreams.find(QUuid());
+ auto it = std::remove_if(_audioStreams.begin(), _audioStreams.end(), [](const SharedStreamPointer& stream){
+ return stream->getStreamIdentifier().isNull();
+ });
+
if (it != _audioStreams.end()) {
_audioStreams.erase(it);
}
- writeLocker.unlock();
}
int AudioMixerClientData::parseData(ReceivedMessage& message) {
@@ -252,128 +347,186 @@ int AudioMixerClientData::parseData(ReceivedMessage& message) {
// skip over header, appendFlag, and num stats packed
message.seek(sizeof(quint8) + sizeof(quint16));
+ if (message.getBytesLeftToRead() != sizeof(AudioStreamStats)) {
+ qWarning() << "Received AudioStreamStats of wrong size" << message.getBytesLeftToRead()
+ << "instead of" << sizeof(AudioStreamStats) << "from"
+ << message.getSourceID() << "at" << message.getSenderSockAddr();
+
+ return message.getPosition();
+ }
+
// read the downstream audio stream stats
message.readPrimitive(&_downstreamAudioStreamStats);
return message.getPosition();
-
- } else {
- SharedStreamPointer matchingStream;
-
- bool isMicStream = false;
-
- if (packetType == PacketType::MicrophoneAudioWithEcho
- || packetType == PacketType::ReplicatedMicrophoneAudioWithEcho
- || packetType == PacketType::MicrophoneAudioNoEcho
- || packetType == PacketType::ReplicatedMicrophoneAudioNoEcho
- || packetType == PacketType::SilentAudioFrame
- || packetType == PacketType::ReplicatedSilentAudioFrame) {
-
- QWriteLocker writeLocker { &_streamsLock };
-
- auto micStreamIt = _audioStreams.find(QUuid());
- if (micStreamIt == _audioStreams.end()) {
- // we don't have a mic stream yet, so add it
-
- // hop past the sequence number that leads the packet
- message.seek(sizeof(quint16));
-
- // pull the codec string from the packet
- auto codecString = message.readString();
-
- // determine if the stream is stereo or not
- bool isStereo;
- if (packetType == PacketType::SilentAudioFrame
- || packetType == PacketType::ReplicatedSilentAudioFrame) {
- quint16 numSilentSamples;
- message.readPrimitive(&numSilentSamples);
- isStereo = numSilentSamples == AudioConstants::NETWORK_FRAME_SAMPLES_STEREO;
- } else {
- quint8 channelFlag;
- message.readPrimitive(&channelFlag);
- isStereo = channelFlag == 1;
- }
-
- auto avatarAudioStream = new AvatarAudioStream(isStereo, AudioMixer::getStaticJitterFrames());
- avatarAudioStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
- qCDebug(audio) << "creating new AvatarAudioStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
-
- connect(avatarAudioStream, &InboundAudioStream::mismatchedAudioCodec,
- this, &AudioMixerClientData::handleMismatchAudioFormat);
-
- auto emplaced = _audioStreams.emplace(
- QUuid(),
- std::unique_ptr { avatarAudioStream }
- );
-
- micStreamIt = emplaced.first;
- }
-
- matchingStream = micStreamIt->second;
-
- writeLocker.unlock();
-
- isMicStream = true;
- } else if (packetType == PacketType::InjectAudio
- || packetType == PacketType::ReplicatedInjectAudio) {
- // this is injected audio
- // grab the stream identifier for this injected audio
- message.seek(sizeof(quint16));
-
- QUuid streamIdentifier = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
-
- bool isStereo;
- message.readPrimitive(&isStereo);
-
- QWriteLocker writeLock { &_streamsLock };
-
- auto streamIt = _audioStreams.find(streamIdentifier);
-
- if (streamIt == _audioStreams.end()) {
- // we don't have this injected stream yet, so add it
- auto injectorStream = new InjectedAudioStream(streamIdentifier, isStereo, AudioMixer::getStaticJitterFrames());
-
-#if INJECTORS_SUPPORT_CODECS
- injectorStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
- qCDebug(audio) << "creating new injectorStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
-#endif
-
- auto emplaced = _audioStreams.emplace(
- streamIdentifier,
- std::unique_ptr { injectorStream }
- );
-
- streamIt = emplaced.first;
- }
-
- matchingStream = streamIt->second;
-
- writeLock.unlock();
- }
-
- // seek to the beginning of the packet so that the next reader is in the right spot
- message.seek(0);
-
- // check the overflow count before we parse data
- auto overflowBefore = matchingStream->getOverflowCount();
- auto parseResult = matchingStream->parseData(message);
-
- if (matchingStream->getOverflowCount() > overflowBefore) {
- qCDebug(audio) << "Just overflowed on stream from" << message.getSourceID() << "at" << message.getSenderSockAddr();
- qCDebug(audio) << "This stream is for" << (isMicStream ? "microphone audio" : "injected audio");
- }
-
- return parseResult;
}
+
return 0;
}
-int AudioMixerClientData::checkBuffersBeforeFrameSend() {
- QWriteLocker writeLocker { &_streamsLock };
+bool AudioMixerClientData::containsValidPosition(ReceivedMessage& message) const {
+ static const int SEQUENCE_NUMBER_BYTES = sizeof(quint16);
+ auto posBefore = message.getPosition();
+
+ message.seek(SEQUENCE_NUMBER_BYTES);
+
+ // skip over the codec string
+ message.readString();
+
+ switch (message.getType()) {
+ case PacketType::MicrophoneAudioNoEcho:
+ case PacketType::MicrophoneAudioWithEcho: {
+ // skip over the stereo flag
+ message.seek(message.getPosition() + sizeof(ChannelFlag));
+ break;
+ }
+ case PacketType::SilentAudioFrame: {
+ // skip the number of silent samples
+ message.seek(message.getPosition() + sizeof(SilentSamplesBytes));
+ break;
+ }
+ case PacketType::InjectAudio: {
+ // skip the stream ID, stereo flag, and loopback flag
+ message.seek(message.getPosition() + NUM_STREAM_ID_BYTES + sizeof(ChannelFlag) + sizeof(LoopbackFlag));
+ break;
+ }
+ default:
+ Q_UNREACHABLE();
+ break;
+ }
+
+ glm::vec3 peekPosition;
+ message.readPrimitive(&peekPosition);
+
+ // reset the position the message was at before we were called
+ message.seek(posBefore);
+
+ if (glm::any(glm::isnan(peekPosition))) {
+ return false;
+ }
+
+ return true;
+}
+
+void AudioMixerClientData::processStreamPacket(ReceivedMessage& message, ConcurrentAddedStreams &addedStreams) {
+
+ if (!containsValidPosition(message)) {
+ qDebug() << "Refusing to process audio stream from" << message.getSourceID() << "with invalid position";
+ return;
+ }
+
+ SharedStreamPointer matchingStream;
+
+ auto packetType = message.getType();
+ bool newStream = false;
+
+ if (packetType == PacketType::MicrophoneAudioWithEcho
+ || packetType == PacketType::MicrophoneAudioNoEcho
+ || packetType == PacketType::SilentAudioFrame) {
+
+ auto micStreamIt = std::find_if(_audioStreams.begin(), _audioStreams.end(), [](const SharedStreamPointer& stream){
+ return stream->getStreamIdentifier().isNull();
+ });
+
+ if (micStreamIt == _audioStreams.end()) {
+ // we don't have a mic stream yet, so add it
+
+ // hop past the sequence number that leads the packet
+ message.seek(sizeof(StreamSequenceNumber));
+
+ // pull the codec string from the packet
+ auto codecString = message.readString();
+
+ // determine if the stream is stereo or not
+ bool isStereo;
+ if (packetType == PacketType::SilentAudioFrame || packetType == PacketType::ReplicatedSilentAudioFrame) {
+ SilentSamplesBytes numSilentSamples;
+ message.readPrimitive(&numSilentSamples);
+ isStereo = numSilentSamples == AudioConstants::NETWORK_FRAME_SAMPLES_STEREO;
+ } else {
+ ChannelFlag channelFlag;
+ message.readPrimitive(&channelFlag);
+ isStereo = channelFlag == 1;
+ }
+
+ auto avatarAudioStream = new AvatarAudioStream(isStereo, AudioMixer::getStaticJitterFrames());
+ avatarAudioStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
+
+ if (_isIgnoreRadiusEnabled) {
+ avatarAudioStream->enableIgnoreBox();
+ } else {
+ avatarAudioStream->disableIgnoreBox();
+ }
+
+ qCDebug(audio) << "creating new AvatarAudioStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
+
+ connect(avatarAudioStream, &InboundAudioStream::mismatchedAudioCodec,
+ this, &AudioMixerClientData::handleMismatchAudioFormat);
+
+ matchingStream = SharedStreamPointer(avatarAudioStream);
+ _audioStreams.push_back(matchingStream);
+
+ newStream = true;
+ } else {
+ matchingStream = *micStreamIt;
+ }
+ } else if (packetType == PacketType::InjectAudio) {
+
+ // this is injected audio
+ // skip the sequence number and codec string and grab the stream identifier for this injected audio
+ message.seek(sizeof(StreamSequenceNumber));
+ message.readString();
+
+ QUuid streamIdentifier = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
+
+ auto streamIt = std::find_if(_audioStreams.begin(), _audioStreams.end(), [&streamIdentifier](const SharedStreamPointer& stream) {
+ return stream->getStreamIdentifier() == streamIdentifier;
+ });
+
+ if (streamIt == _audioStreams.end()) {
+ bool isStereo;
+ message.readPrimitive(&isStereo);
+
+ // we don't have this injected stream yet, so add it
+ auto injectorStream = new InjectedAudioStream(streamIdentifier, isStereo, AudioMixer::getStaticJitterFrames());
+
+#if INJECTORS_SUPPORT_CODECS
+ injectorStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
+ qCDebug(audio) << "creating new injectorStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
+#endif
+
+ matchingStream = SharedStreamPointer(injectorStream);
+ _audioStreams.push_back(matchingStream);
+
+ newStream = true;
+ } else {
+ matchingStream = *streamIt;
+ }
+ }
+
+ // seek to the beginning of the packet so that the next reader is in the right spot
+ message.seek(0);
+
+ // check the overflow count before we parse data
+ auto overflowBefore = matchingStream->getOverflowCount();
+ matchingStream->parseData(message);
+
+ if (matchingStream->getOverflowCount() > overflowBefore) {
+ qCDebug(audio) << "Just overflowed on stream" << matchingStream->getStreamIdentifier()
+ << "from" << message.getSourceID();
+ }
+
+ if (newStream) {
+ // whenever a stream is added, push it to the concurrent vector of streams added this frame
+ addedStreams.push_back(AddedStream(getNodeID(), getNodeLocalID(), matchingStream->getStreamIdentifier(), matchingStream.get()));
+ }
+}
+
+int AudioMixerClientData::checkBuffersBeforeFrameSend() {
auto it = _audioStreams.begin();
while (it != _audioStreams.end()) {
- SharedStreamPointer stream = it->second;
+ SharedStreamPointer stream = *it;
if (stream->popFrames(1, true) > 0) {
stream->updateLastPopOutputLoudnessAndTrailingLoudness();
@@ -388,7 +541,7 @@ int AudioMixerClientData::checkBuffersBeforeFrameSend() {
// this is an inactive injector, pull it from our streams
// first emit that it is finished so that the HRTF objects for this source can be cleaned up
- emit injectorStreamFinished(it->second->getStreamIdentifier());
+ emit injectorStreamFinished(stream->getStreamIdentifier());
// erase the stream to drop our ref to the shared pointer and remove it
it = _audioStreams.erase(it);
@@ -441,7 +594,7 @@ void AudioMixerClientData::sendAudioStreamStatsPackets(const SharedNodePointer&
// pack the calculated number of stream stats
for (int i = 0; i < numStreamStatsToPack; i++) {
- PositionalAudioStream* stream = it->second.get();
+ PositionalAudioStream* stream = it->get();
stream->perSecondCallbackForUpdatingStats();
@@ -513,12 +666,12 @@ QJsonObject AudioMixerClientData::getAudioStreamStats() {
QJsonArray injectorArray;
auto streamsCopy = getAudioStreams();
for (auto& injectorPair : streamsCopy) {
- if (injectorPair.second->getType() == PositionalAudioStream::Injector) {
+ if (injectorPair->getType() == PositionalAudioStream::Injector) {
QJsonObject upstreamStats;
- AudioStreamStats streamStats = injectorPair.second->getAudioStreamStats();
+ AudioStreamStats streamStats = injectorPair->getAudioStreamStats();
upstreamStats["inj.desired"] = streamStats._desiredJitterBufferFrames;
- upstreamStats["desired_calc"] = injectorPair.second->getCalculatedJitterBufferFrames();
+ upstreamStats["desired_calc"] = injectorPair->getCalculatedJitterBufferFrames();
upstreamStats["available_avg_10s"] = streamStats._framesAvailableAverage;
upstreamStats["available"] = (double) streamStats._framesAvailable;
upstreamStats["unplayed"] = (double) streamStats._unplayedMs;
@@ -609,99 +762,6 @@ void AudioMixerClientData::cleanupCodec() {
}
}
-AudioMixerClientData::IgnoreZone& AudioMixerClientData::IgnoreZoneMemo::get(unsigned int frame) {
- // check for a memoized zone
- if (frame != _frame.load(std::memory_order_acquire)) {
- AvatarAudioStream* stream = _data.getAvatarAudioStream();
-
- // get the initial dimensions from the stream
- glm::vec3 corner = stream ? stream->getAvatarBoundingBoxCorner() : glm::vec3(0);
- glm::vec3 scale = stream ? stream->getAvatarBoundingBoxScale() : glm::vec3(0);
-
- // enforce a minimum scale
- static const glm::vec3 MIN_IGNORE_BOX_SCALE = glm::vec3(0.3f, 1.3f, 0.3f);
- if (glm::any(glm::lessThan(scale, MIN_IGNORE_BOX_SCALE))) {
- scale = MIN_IGNORE_BOX_SCALE;
- }
-
- // (this is arbitrary number determined empirically for comfort)
- const float IGNORE_BOX_SCALE_FACTOR = 2.4f;
- scale *= IGNORE_BOX_SCALE_FACTOR;
-
- // create the box (we use a box for the zone for convenience)
- AABox box(corner, scale);
-
- // update the memoized zone
- // This may be called by multiple threads concurrently,
- // so take a lock and only update the memo if this call is first.
- // This prevents concurrent updates from invalidating the returned reference
- // (contingent on the preconditions listed in the header).
- std::lock_guard lock(_mutex);
- if (frame != _frame.load(std::memory_order_acquire)) {
- _zone = box;
- unsigned int oldFrame = _frame.exchange(frame, std::memory_order_release);
- Q_UNUSED(oldFrame);
- }
- }
-
- return _zone;
-}
-
-void AudioMixerClientData::IgnoreNodeCache::cache(bool shouldIgnore) {
- if (!_isCached) {
- _shouldIgnore = shouldIgnore;
- _isCached = true;
- }
-}
-
-bool AudioMixerClientData::IgnoreNodeCache::isCached() {
- return _isCached;
-}
-
-bool AudioMixerClientData::IgnoreNodeCache::shouldIgnore() {
- bool ignore = _shouldIgnore;
- _isCached = false;
- return ignore;
-}
-
-bool AudioMixerClientData::shouldIgnore(const SharedNodePointer self, const SharedNodePointer node, unsigned int frame) {
- // this is symmetric over self / node; if computed, it is cached in the other
-
- // check the cache to avoid computation
- auto& cache = _nodeSourcesIgnoreMap[node->getUUID()];
- if (cache.isCached()) {
- return cache.shouldIgnore();
- }
-
- AudioMixerClientData* nodeData = static_cast(node->getLinkedData());
- if (!nodeData) {
- return false;
- }
-
- // compute shouldIgnore
- bool shouldIgnore = true;
- if ( // the nodes are not ignoring each other explicitly (or are but get data regardless)
- (!self->isIgnoringNodeWithID(node->getUUID()) ||
- (nodeData->getRequestsDomainListData() && node->getCanKick())) &&
- (!node->isIgnoringNodeWithID(self->getUUID()) ||
- (getRequestsDomainListData() && self->getCanKick()))) {
-
- // if either node is enabling an ignore radius, check their proximity
- if ((self->isIgnoreRadiusEnabled() || node->isIgnoreRadiusEnabled())) {
- auto& zone = _ignoreZone.get(frame);
- auto& nodeZone = nodeData->_ignoreZone.get(frame);
- shouldIgnore = zone.touches(nodeZone);
- } else {
- shouldIgnore = false;
- }
- }
-
- // cache in node
- nodeData->_nodeSourcesIgnoreMap[self->getUUID()].cache(shouldIgnore);
-
- return shouldIgnore;
-}
-
void AudioMixerClientData::setupCodecForReplicatedAgent(QSharedPointer message) {
// hop past the sequence number that leads the packet
message->seek(sizeof(quint16));
diff --git a/assignment-client/src/audio/AudioMixerClientData.h b/assignment-client/src/audio/AudioMixerClientData.h
index 82bdc0e5c5..653749f619 100644
--- a/assignment-client/src/audio/AudioMixerClientData.h
+++ b/assignment-client/src/audio/AudioMixerClientData.h
@@ -14,6 +14,8 @@
#include
+#include
+
#include
#include
@@ -30,44 +32,40 @@
class AudioMixerClientData : public NodeData {
Q_OBJECT
public:
+ struct AddedStream {
+ NodeIDStreamID nodeIDStreamID;
+ PositionalAudioStream* positionalStream;
+
+ AddedStream(QUuid nodeID, Node::LocalID localNodeID,
+ StreamID streamID, PositionalAudioStream* positionalStream) :
+ nodeIDStreamID(nodeID, localNodeID, streamID), positionalStream(positionalStream) {};
+ };
+
+ using ConcurrentAddedStreams = tbb::concurrent_vector;
+
AudioMixerClientData(const QUuid& nodeID, Node::LocalID nodeLocalID);
~AudioMixerClientData();
using SharedStreamPointer = std::shared_ptr;
- using AudioStreamMap = std::unordered_map;
+ using AudioStreamVector = std::vector;
void queuePacket(QSharedPointer packet, SharedNodePointer node);
- void processPackets();
+ int processPackets(ConcurrentAddedStreams& addedStreams); // returns the number of available streams this frame
- // locks the mutex to make a copy
- AudioStreamMap getAudioStreams() { QReadLocker readLock { &_streamsLock }; return _audioStreams; }
+ AudioStreamVector& getAudioStreams() { return _audioStreams; }
AvatarAudioStream* getAvatarAudioStream();
- // returns whether self (this data's node) should ignore node, memoized by frame
- // precondition: frame is increasing after first call (including overflow wrap)
- bool shouldIgnore(SharedNodePointer self, SharedNodePointer node, unsigned int frame);
-
- // the following methods should be called from the AudioMixer assignment thread ONLY
- // they are not thread-safe
-
- // returns a new or existing HRTF object for the given stream from the given node
- AudioHRTF& hrtfForStream(const QUuid& nodeID, const QUuid& streamID = QUuid()) { return _nodeSourcesHRTFMap[nodeID][streamID]; }
-
- // removes an AudioHRTF object for a given stream
- void removeHRTFForStream(const QUuid& nodeID, const QUuid& streamID = QUuid());
-
- // remove all sources and data from this node
- void removeNode(const QUuid& nodeID) { _nodeSourcesIgnoreMap.unsafe_erase(nodeID); _nodeSourcesHRTFMap.erase(nodeID); }
-
void removeAgentAvatarAudioStream();
// packet parsers
int parseData(ReceivedMessage& message) override;
+ void processStreamPacket(ReceivedMessage& message, ConcurrentAddedStreams& addedStreams);
void negotiateAudioFormat(ReceivedMessage& message, const SharedNodePointer& node);
void parseRequestsDomainListData(ReceivedMessage& message);
void parsePerAvatarGainSet(ReceivedMessage& message, const SharedNodePointer& node);
void parseNodeIgnoreRequest(QSharedPointer message, const SharedNodePointer& node);
void parseRadiusIgnoreRequest(QSharedPointer message, const SharedNodePointer& node);
+ void parseSoloRequest(QSharedPointer message, const SharedNodePointer& node);
// attempt to pop a frame from each audio stream, and return the number of streams from this client
int checkBuffersBeforeFrameSend();
@@ -108,11 +106,59 @@ public:
bool shouldMuteClient() { return _shouldMuteClient; }
void setShouldMuteClient(bool shouldMuteClient) { _shouldMuteClient = shouldMuteClient; }
glm::vec3 getPosition() { return getAvatarAudioStream() ? getAvatarAudioStream()->getPosition() : glm::vec3(0); }
- bool getRequestsDomainListData() { return _requestsDomainListData; }
+ bool getRequestsDomainListData() const { return _requestsDomainListData; }
void setRequestsDomainListData(bool requesting) { _requestsDomainListData = requesting; }
void setupCodecForReplicatedAgent(QSharedPointer message);
+ struct MixableStream {
+ float approximateVolume { 0.0f };
+ NodeIDStreamID nodeStreamID;
+ std::unique_ptr hrtf;
+ PositionalAudioStream* positionalStream;
+ bool ignoredByListener { false };
+ bool ignoringListener { false };
+
+ MixableStream(NodeIDStreamID nodeIDStreamID, PositionalAudioStream* positionalStream) :
+ nodeStreamID(nodeIDStreamID), hrtf(new AudioHRTF), positionalStream(positionalStream) {};
+ MixableStream(QUuid nodeID, Node::LocalID localNodeID, StreamID streamID, PositionalAudioStream* positionalStream) :
+ nodeStreamID(nodeID, localNodeID, streamID), hrtf(new AudioHRTF), positionalStream(positionalStream) {};
+ };
+
+ using MixableStreamsVector = std::vector;
+ struct Streams {
+ MixableStreamsVector active;
+ MixableStreamsVector inactive;
+ MixableStreamsVector skipped;
+ };
+
+ Streams& getStreams() { return _streams; }
+
+ // thread-safe, called from AudioMixerSlave(s) while processing ignore packets for other nodes
+ void ignoredByNode(QUuid nodeID);
+ void unignoredByNode(QUuid nodeID);
+
+ // start of methods called non-concurrently from single AudioMixerSlave mixing for the owning node
+
+ const Node::IgnoredNodeIDs& getNewIgnoredNodeIDs() const { return _newIgnoredNodeIDs; }
+ const Node::IgnoredNodeIDs& getNewUnignoredNodeIDs() const { return _newUnignoredNodeIDs; }
+
+ using ConcurrentIgnoreNodeIDs = tbb::concurrent_vector;
+ const ConcurrentIgnoreNodeIDs& getNewIgnoringNodeIDs() const { return _newIgnoringNodeIDs; }
+ const ConcurrentIgnoreNodeIDs& getNewUnignoringNodeIDs() const { return _newUnignoringNodeIDs; }
+
+ void clearStagedIgnoreChanges();
+
+ const Node::IgnoredNodeIDs& getIgnoringNodeIDs() const { return _ignoringNodeIDs; }
+
+
+ const std::vector& getSoloedNodes() const { return _soloedNodes; }
+
+ bool getHasReceivedFirstMix() const { return _hasReceivedFirstMix; }
+ void setHasReceivedFirstMix(bool hasReceivedFirstMix) { _hasReceivedFirstMix = hasReceivedFirstMix; }
+
+ // end of methods called non-concurrently from single AudioMixerSlave
+
signals:
void injectorStreamFinished(const QUuid& streamIdentifier);
@@ -126,52 +172,15 @@ private:
};
PacketQueue _packetQueue;
- QReadWriteLock _streamsLock;
- AudioStreamMap _audioStreams; // microphone stream from avatar is stored under key of null UUID
+ AudioStreamVector _audioStreams; // microphone stream from avatar has a null stream ID
void optionallyReplicatePacket(ReceivedMessage& packet, const Node& node);
- using IgnoreZone = AABox;
- class IgnoreZoneMemo {
- public:
- IgnoreZoneMemo(AudioMixerClientData& data) : _data(data) {}
+ void setGainForAvatar(QUuid nodeID, float gain);
- // returns an ignore zone, memoized by frame (lockless if the zone is already memoized)
- // preconditions:
- // - frame is increasing after first call (including overflow wrap)
- // - there are no references left from calls to getIgnoreZone(frame - 1)
- IgnoreZone& get(unsigned int frame);
+ bool containsValidPosition(ReceivedMessage& message) const;
- private:
- AudioMixerClientData& _data;
- IgnoreZone _zone;
- std::atomic _frame { 0 };
- std::mutex _mutex;
- };
- IgnoreZoneMemo _ignoreZone;
-
- class IgnoreNodeCache {
- public:
- // std::atomic is not copyable - always initialize uncached
- IgnoreNodeCache() {}
- IgnoreNodeCache(const IgnoreNodeCache& other) {}
-
- void cache(bool shouldIgnore);
- bool isCached();
- bool shouldIgnore();
-
- private:
- std::atomic _isCached { false };
- bool _shouldIgnore { false };
- };
- struct IgnoreNodeCacheHasher { std::size_t operator()(const QUuid& key) const { return qHash(key); } };
-
- using NodeSourcesIgnoreMap = tbb::concurrent_unordered_map;
- NodeSourcesIgnoreMap _nodeSourcesIgnoreMap;
-
- using HRTFMap = std::unordered_map;
- using NodeSourcesHRTFMap = std::unordered_map;
- NodeSourcesHRTFMap _nodeSourcesHRTFMap;
+ Streams _streams;
quint16 _outgoingMixedAudioSequenceNumber;
@@ -190,6 +199,23 @@ private:
bool _shouldMuteClient { false };
bool _requestsDomainListData { false };
+
+ std::vector _newAddedStreams;
+
+ Node::IgnoredNodeIDs _newIgnoredNodeIDs;
+ Node::IgnoredNodeIDs _newUnignoredNodeIDs;
+
+ tbb::concurrent_vector _newIgnoringNodeIDs;
+ tbb::concurrent_vector _newUnignoringNodeIDs;
+
+ std::mutex _ignoringNodeIDsMutex;
+ Node::IgnoredNodeIDs _ignoringNodeIDs;
+
+ std::atomic_bool _isIgnoreRadiusEnabled { false };
+
+ std::vector _soloedNodes;
+
+ bool _hasReceivedFirstMix { false };
};
#endif // hifi_AudioMixerClientData_h
diff --git a/assignment-client/src/audio/AudioMixerSlave.cpp b/assignment-client/src/audio/AudioMixerSlave.cpp
index b447048ac9..7a6ab9c3e2 100644
--- a/assignment-client/src/audio/AudioMixerSlave.cpp
+++ b/assignment-client/src/audio/AudioMixerSlave.cpp
@@ -36,7 +36,10 @@
#include "InjectedAudioStream.h"
#include "AudioHelpers.h"
-using AudioStreamMap = AudioMixerClientData::AudioStreamMap;
+using namespace std;
+using AudioStreamVector = AudioMixerClientData::AudioStreamVector;
+using MixableStream = AudioMixerClientData::MixableStream;
+using MixableStreamsVector = AudioMixerClientData::MixableStreamsVector;
// packet helpers
std::unique_ptr createAudioPacket(PacketType type, int size, quint16 sequence, QString codec);
@@ -46,9 +49,8 @@ void sendMutePacket(const SharedNodePointer& node, AudioMixerClientData&);
void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData& data);
// mix helpers
-inline float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
- const glm::vec3& relativePosition);
-inline float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudioStream& listeningNodeStream,
+inline float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd);
+inline float computeGain(float masterListenerGain, const AvatarAudioStream& listeningNodeStream,
const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition, float distance, bool isEcho);
inline float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition);
@@ -56,15 +58,16 @@ inline float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const
void AudioMixerSlave::processPackets(const SharedNodePointer& node) {
AudioMixerClientData* data = (AudioMixerClientData*)node->getLinkedData();
if (data) {
- data->processPackets();
+ // process packets and collect the number of streams available for this frame
+ stats.sumStreams += data->processPackets(_sharedData.addedStreams);
}
}
-void AudioMixerSlave::configureMix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio) {
+void AudioMixerSlave::configureMix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain) {
_begin = begin;
_end = end;
_frame = frame;
- _throttlingRatio = throttlingRatio;
+ _numToRetain = numToRetain;
}
void AudioMixerSlave::mix(const SharedNodePointer& node) {
@@ -125,105 +128,345 @@ void AudioMixerSlave::mix(const SharedNodePointer& node) {
}
}
+
+template
+void erase_if(Container& cont, Predicate&& pred) {
+ auto it = remove_if(begin(cont), end(cont), std::forward(pred));
+ cont.erase(it, end(cont));
+}
+
+template
+bool contains(const Container& cont, typename Container::value_type value) {
+ return std::any_of(begin(cont), end(cont), [&value](const auto& element) {
+ return value == element;
+ });
+}
+
+// This class lets you do an erase if in several segments
+// that use different predicates
+template
+class SegmentedEraseIf {
+public:
+ using iterator = typename Container::iterator;
+
+ SegmentedEraseIf(Container& cont) : _cont(cont) {
+ _first = begin(_cont);
+ _it = _first;
+ }
+ ~SegmentedEraseIf() {
+ assert(_it == end(_cont));
+ _cont.erase(_first, _it);
+ }
+
+ template
+ void iterateTo(iterator last, Predicate pred) {
+ while (_it != last) {
+ if (!pred(*_it)) {
+ if (_first != _it) {
+ *_first = move(*_it);
+ }
+ ++_first;
+ }
+ ++_it;
+ }
+ }
+
+private:
+ iterator _first;
+ iterator _it;
+ Container& _cont;
+};
+
+
+void AudioMixerSlave::addStreams(Node& listener, AudioMixerClientData& listenerData) {
+ auto& ignoredNodeIDs = listener.getIgnoredNodeIDs();
+ auto& ignoringNodeIDs = listenerData.getIgnoringNodeIDs();
+
+ auto& streams = listenerData.getStreams();
+
+ // add data for newly created streams to our vector
+ if (!listenerData.getHasReceivedFirstMix()) {
+ // when this listener is new, we need to fill its added streams object with all available streams
+ std::for_each(_begin, _end, [&](const SharedNodePointer& node) {
+ AudioMixerClientData* nodeData = static_cast(node->getLinkedData());
+ if (nodeData) {
+ for (auto& stream : nodeData->getAudioStreams()) {
+ bool ignoredByListener = contains(ignoredNodeIDs, node->getUUID());
+ bool ignoringListener = contains(ignoringNodeIDs, node->getUUID());
+
+ if (ignoredByListener || ignoringListener) {
+ streams.skipped.emplace_back(node->getUUID(), node->getLocalID(),
+ stream->getStreamIdentifier(), stream.get());
+
+ // pre-populate ignored and ignoring flags for this stream
+ streams.skipped.back().ignoredByListener = ignoredByListener;
+ streams.skipped.back().ignoringListener = ignoringListener;
+ } else {
+ streams.active.emplace_back(node->getUUID(), node->getLocalID(),
+ stream->getStreamIdentifier(), stream.get());
+ }
+ }
+ }
+ });
+
+ // flag this listener as having received their first mix so we know we don't need to enumerate all nodes again
+ listenerData.setHasReceivedFirstMix(true);
+ } else {
+ for (const auto& newStream : _sharedData.addedStreams) {
+ bool ignoredByListener = contains(ignoredNodeIDs, newStream.nodeIDStreamID.nodeID);
+ bool ignoringListener = contains(ignoringNodeIDs, newStream.nodeIDStreamID.nodeID);
+
+ if (ignoredByListener || ignoringListener) {
+ streams.skipped.emplace_back(newStream.nodeIDStreamID, newStream.positionalStream);
+
+ // pre-populate ignored and ignoring flags for this stream
+ streams.skipped.back().ignoredByListener = ignoredByListener;
+ streams.skipped.back().ignoringListener = ignoringListener;
+ } else {
+ streams.active.emplace_back(newStream.nodeIDStreamID, newStream.positionalStream);
+ }
+ }
+ }
+}
+
+bool shouldBeRemoved(const MixableStream& stream, const AudioMixerSlave::SharedData& sharedData) {
+ return (contains(sharedData.removedNodes, stream.nodeStreamID.nodeLocalID) ||
+ contains(sharedData.removedStreams, stream.nodeStreamID));
+};
+
+bool shouldBeInactive(MixableStream& stream) {
+ return (!stream.positionalStream->lastPopSucceeded() ||
+ stream.positionalStream->getLastPopOutputLoudness() == 0.0f);
+};
+
+bool shouldBeSkipped(MixableStream& stream, const Node& listener,
+ const AvatarAudioStream& listenerAudioStream,
+ const AudioMixerClientData& listenerData) {
+
+ if (stream.nodeStreamID.nodeLocalID == listener.getLocalID()) {
+ return !stream.positionalStream->shouldLoopbackForNode();
+ }
+
+ // grab the unprocessed ignores and unignores from and for this listener
+ const auto& nodesIgnoredByListener = listenerData.getNewIgnoredNodeIDs();
+ const auto& nodesUnignoredByListener = listenerData.getNewUnignoredNodeIDs();
+ const auto& nodesIgnoringListener = listenerData.getNewIgnoringNodeIDs();
+ const auto& nodesUnignoringListener = listenerData.getNewUnignoringNodeIDs();
+
+ // this stream was previously not ignored by the listener and we have some newly ignored streams
+ // check now if it is one of the ignored streams and flag it as such
+ if (stream.ignoredByListener) {
+ stream.ignoredByListener = !contains(nodesUnignoredByListener, stream.nodeStreamID.nodeID);
+ } else {
+ stream.ignoredByListener = contains(nodesIgnoredByListener, stream.nodeStreamID.nodeID);
+ }
+
+ if (stream.ignoringListener) {
+ stream.ignoringListener = !contains(nodesUnignoringListener, stream.nodeStreamID.nodeID);
+ } else {
+ stream.ignoringListener = contains(nodesIgnoringListener, stream.nodeStreamID.nodeID);
+ }
+
+ bool listenerIsAdmin = listenerData.getRequestsDomainListData() && listener.getCanKick();
+ if (stream.ignoredByListener || (stream.ignoringListener && !listenerIsAdmin)) {
+ return true;
+ }
+
+ if (!listenerData.getSoloedNodes().empty()) {
+ return !contains(listenerData.getSoloedNodes(), stream.nodeStreamID.nodeID);
+ }
+
+ bool shouldCheckIgnoreBox = (listenerAudioStream.isIgnoreBoxEnabled() ||
+ stream.positionalStream->isIgnoreBoxEnabled());
+ if (shouldCheckIgnoreBox &&
+ listenerAudioStream.getIgnoreBox().touches(stream.positionalStream->getIgnoreBox())) {
+ return true;
+ }
+
+ return false;
+};
+
+float approximateVolume(const MixableStream& stream, const AvatarAudioStream* listenerAudioStream) {
+ if (stream.positionalStream->getLastPopOutputTrailingLoudness() == 0.0f) {
+ return 0.0f;
+ }
+
+ if (stream.positionalStream == listenerAudioStream) {
+ return 1.0f;
+ }
+
+ // approximate the gain
+ float gain = approximateGain(*listenerAudioStream, *(stream.positionalStream));
+
+ // for avatar streams, modify by the set gain adjustment
+ if (stream.nodeStreamID.streamID.isNull()) {
+ gain *= stream.hrtf->getGainAdjustment();
+ }
+
+ return stream.positionalStream->getLastPopOutputTrailingLoudness() * gain;
+};
+
bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
AvatarAudioStream* listenerAudioStream = static_cast(listener->getLinkedData())->getAvatarAudioStream();
AudioMixerClientData* listenerData = static_cast(listener->getLinkedData());
- // if we received an invalid position from this listener, then refuse to make them a mix
- // because we don't know how to do it properly
- if (!listenerAudioStream->hasValidPosition()) {
- return false;
- }
-
// zero out the mix for this listener
memset(_mixSamples, 0, sizeof(_mixSamples));
- bool isThrottling = _throttlingRatio > 0.0f;
- std::vector> throttledNodes;
+ bool isThrottling = _numToRetain != -1;
+ bool isSoloing = !listenerData->getSoloedNodes().empty();
- typedef void (AudioMixerSlave::*MixFunctor)(
- AudioMixerClientData&, const QUuid&, const AvatarAudioStream&, const PositionalAudioStream&);
- auto forAllStreams = [&](const SharedNodePointer& node, AudioMixerClientData* nodeData, MixFunctor mixFunctor) {
- auto nodeID = node->getUUID();
- for (auto& streamPair : nodeData->getAudioStreams()) {
- auto nodeStream = streamPair.second;
- (this->*mixFunctor)(*listenerData, nodeID, *listenerAudioStream, *nodeStream);
- }
- };
+ auto& streams = listenerData->getStreams();
-#ifdef HIFI_AUDIO_MIXER_DEBUG
- auto mixStart = p_high_resolution_clock::now();
-#endif
+ addStreams(*listener, *listenerData);
- std::for_each(_begin, _end, [&](const SharedNodePointer& node) {
- AudioMixerClientData* nodeData = static_cast(node->getLinkedData());
- if (!nodeData) {
- return;
+ // Process skipped streams
+ erase_if(streams.skipped, [&](MixableStream& stream) {
+ if (shouldBeRemoved(stream, _sharedData)) {
+ return true;
}
- if (*node == *listener) {
- // only mix the echo, if requested
- for (auto& streamPair : nodeData->getAudioStreams()) {
- auto nodeStream = streamPair.second;
- if (nodeStream->shouldLoopbackForNode()) {
- mixStream(*listenerData, node->getUUID(), *listenerAudioStream, *nodeStream);
- }
- }
- } else if (!listenerData->shouldIgnore(listener, node, _frame)) {
- if (!isThrottling) {
- forAllStreams(node, nodeData, &AudioMixerSlave::mixStream);
+ if (!shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
+ if (shouldBeInactive(stream)) {
+ streams.inactive.push_back(move(stream));
+ ++stats.skippedToInactive;
} else {
- auto nodeID = node->getUUID();
+ streams.active.push_back(move(stream));
+ ++stats.skippedToActive;
+ }
+ return true;
+ }
- // compute the node's max relative volume
- float nodeVolume = 0.0f;
- for (auto& streamPair : nodeData->getAudioStreams()) {
- auto nodeStream = streamPair.second;
+ if (!isThrottling) {
+ updateHRTFParameters(stream, *listenerAudioStream,
+ listenerData->getMasterAvatarGain());
+ }
+ return false;
+ });
- // approximate the gain
- glm::vec3 relativePosition = nodeStream->getPosition() - listenerAudioStream->getPosition();
- float gain = approximateGain(*listenerAudioStream, *nodeStream, relativePosition);
+ // Process inactive streams
+ erase_if(streams.inactive, [&](MixableStream& stream) {
+ if (shouldBeRemoved(stream, _sharedData)) {
+ return true;
+ }
- // modify by hrtf gain adjustment
- auto& hrtf = listenerData->hrtfForStream(nodeID, nodeStream->getStreamIdentifier());
- gain *= hrtf.getGainAdjustment();
+ if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
+ streams.skipped.push_back(move(stream));
+ ++stats.inactiveToSkipped;
+ return true;
+ }
- auto streamVolume = nodeStream->getLastPopOutputTrailingLoudness() * gain;
- nodeVolume = std::max(streamVolume, nodeVolume);
- }
+ if (!shouldBeInactive(stream)) {
+ streams.active.push_back(move(stream));
+ ++stats.inactiveToActive;
+ return true;
+ }
- // max-heapify the nodes by relative volume
- throttledNodes.push_back({ nodeVolume, node });
- std::push_heap(throttledNodes.begin(), throttledNodes.end());
+ if (!isThrottling) {
+ updateHRTFParameters(stream, *listenerAudioStream,
+ listenerData->getMasterAvatarGain());
+ }
+ return false;
+ });
+
+ // Process active streams
+ erase_if(streams.active, [&](MixableStream& stream) {
+ if (shouldBeRemoved(stream, _sharedData)) {
+ return true;
+ }
+
+ if (isThrottling) {
+ // we're throttling, so we need to update the approximate volume for any un-skipped streams
+ // unless this is simply for an echo (in which case the approx volume is 1.0)
+ stream.approximateVolume = approximateVolume(stream, listenerAudioStream);
+ } else {
+ if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
+ addStream(stream, *listenerAudioStream, 0.0f, isSoloing);
+ streams.skipped.push_back(move(stream));
+ ++stats.activeToSkipped;
+ return true;
+ }
+
+ addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain(),
+ isSoloing);
+
+ if (shouldBeInactive(stream)) {
+ // To reduce artifacts we still call render to flush the HRTF for every silent
+ // sources on the first frame where the source becomes silent
+ // this ensures the correct tail from last mixed block
+ streams.inactive.push_back(move(stream));
+ ++stats.activeToInactive;
+ return true;
}
}
+
+ return false;
});
if (isThrottling) {
- // pop the loudest nodes off the heap and mix their streams
- int numToRetain = (int)(std::distance(_begin, _end) * (1 - _throttlingRatio));
- for (int i = 0; i < numToRetain; i++) {
- if (throttledNodes.empty()) {
- break;
+ // since we're throttling, we need to partition the mixable into throttled and unthrottled streams
+ int numToRetain = min(_numToRetain, (int)streams.active.size()); // Make sure we don't overflow
+ auto throttlePoint = begin(streams.active) + numToRetain;
+
+ std::nth_element(streams.active.begin(), throttlePoint, streams.active.end(),
+ [](const auto& a, const auto& b)
+ {
+ return a.approximateVolume > b.approximateVolume;
+ });
+
+ SegmentedEraseIf erase(streams.active);
+ erase.iterateTo(throttlePoint, [&](MixableStream& stream) {
+ if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
+ resetHRTFState(stream);
+ streams.skipped.push_back(move(stream));
+ ++stats.activeToSkipped;
+ return true;
}
- std::pop_heap(throttledNodes.begin(), throttledNodes.end());
+ addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain(),
+ isSoloing);
- auto& node = throttledNodes.back().second;
- AudioMixerClientData* nodeData = static_cast(node->getLinkedData());
- forAllStreams(node, nodeData, &AudioMixerSlave::mixStream);
+ if (shouldBeInactive(stream)) {
+ // To reduce artifacts we still call render to flush the HRTF for every silent
+ // sources on the first frame where the source becomes silent
+ // this ensures the correct tail from last mixed block
+ streams.inactive.push_back(move(stream));
+ ++stats.activeToInactive;
+ return true;
+ }
- throttledNodes.pop_back();
- }
+ return false;
+ });
+ erase.iterateTo(end(streams.active), [&](MixableStream& stream) {
+ // To reduce artifacts we reset the HRTF state for every throttled
+ // sources on the first frame where the source becomes throttled
+ // this ensures at least remove the tail from last mixed block
+ // preventing excessive artifacts on the next first block
+ resetHRTFState(stream);
- // throttle the remaining nodes' streams
- for (const std::pair& nodePair : throttledNodes) {
- auto& node = nodePair.second;
- AudioMixerClientData* nodeData = static_cast(node->getLinkedData());
- forAllStreams(node, nodeData, &AudioMixerSlave::throttleStream);
- }
+ if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
+ streams.skipped.push_back(move(stream));
+ ++stats.activeToSkipped;
+ return true;
+ }
+
+ if (shouldBeInactive(stream)) {
+ streams.inactive.push_back(move(stream));
+ ++stats.activeToInactive;
+ return true;
+ }
+
+ return false;
+ });
}
+ stats.skipped += (int)streams.skipped.size();
+ stats.inactive += (int)streams.inactive.size();
+ stats.active += (int)streams.active.size();
+
+ // clear the newly ignored, un-ignored, ignoring, and un-ignoring streams now that we've processed them
+ listenerData->clearStagedIgnoreChanges();
+
#ifdef HIFI_AUDIO_MIXER_DEBUG
auto mixEnd = p_high_resolution_clock::now();
auto mixTime = std::chrono::duration_cast(mixEnd - mixStart);
@@ -246,51 +489,39 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
return hasAudio;
}
-void AudioMixerSlave::throttleStream(AudioMixerClientData& listenerNodeData, const QUuid& sourceNodeID,
- const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd) {
- // only throttle this stream to the mix if it has a valid position, we won't know how to mix it otherwise
- if (streamToAdd.hasValidPosition()) {
- addStream(listenerNodeData, sourceNodeID, listeningNodeStream, streamToAdd, true);
- }
-}
-
-void AudioMixerSlave::mixStream(AudioMixerClientData& listenerNodeData, const QUuid& sourceNodeID,
- const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd) {
- // only add the stream to the mix if it has a valid position, we won't know how to mix it otherwise
- if (streamToAdd.hasValidPosition()) {
- addStream(listenerNodeData, sourceNodeID, listeningNodeStream, streamToAdd, false);
- }
-}
-
-void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QUuid& sourceNodeID,
- const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
- bool throttle) {
+void AudioMixerSlave::addStream(AudioMixerClientData::MixableStream& mixableStream,
+ AvatarAudioStream& listeningNodeStream,
+ float masterListenerGain, bool isSoloing) {
++stats.totalMixes;
- // to reduce artifacts we call the HRTF functor for every source, even if throttled or silent
- // this ensures the correct tail from last mixed block and the correct spatialization of next first block
+ auto streamToAdd = mixableStream.positionalStream;
// check if this is a server echo of a source back to itself
- bool isEcho = (&streamToAdd == &listeningNodeStream);
+ bool isEcho = (streamToAdd == &listeningNodeStream);
- glm::vec3 relativePosition = streamToAdd.getPosition() - listeningNodeStream.getPosition();
+ glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream.getPosition();
float distance = glm::max(glm::length(relativePosition), EPSILON);
- float gain = computeGain(listenerNodeData, listeningNodeStream, streamToAdd, relativePosition, distance, isEcho);
float azimuth = isEcho ? 0.0f : computeAzimuth(listeningNodeStream, listeningNodeStream, relativePosition);
+
+ float gain = 1.0f;
+ if (!isSoloing) {
+ gain = computeGain(masterListenerGain, listeningNodeStream, *streamToAdd, relativePosition, distance, isEcho);
+ }
+
const int HRTF_DATASET_INDEX = 1;
- if (!streamToAdd.lastPopSucceeded()) {
+ if (!streamToAdd->lastPopSucceeded()) {
bool forceSilentBlock = true;
- if (!streamToAdd.getLastPopOutput().isNull()) {
- bool isInjector = dynamic_cast(&streamToAdd);
+ if (!streamToAdd->getLastPopOutput().isNull()) {
+ bool isInjector = dynamic_cast(streamToAdd);
// in an injector, just go silent - the injector has likely ended
// in other inputs (microphone, &c.), repeat with fade to avoid the harsh jump to silence
if (!isInjector) {
// calculate its fade factor, which depends on how many times it's already been repeated.
- float fadeFactor = calculateRepeatedFrameFadeFactor(streamToAdd.getConsecutiveNotMixedCount() - 1);
+ float fadeFactor = calculateRepeatedFrameFadeFactor(streamToAdd->getConsecutiveNotMixedCount() - 1);
if (fadeFactor > 0.0f) {
// apply the fadeFactor to the gain
gain *= fadeFactor;
@@ -302,15 +533,12 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
if (forceSilentBlock) {
// call renderSilent with a forced silent block to reduce artifacts
// (this is not done for stereo streams since they do not go through the HRTF)
- if (!streamToAdd.isStereo() && !isEcho) {
- // get the existing listener-source HRTF object, or create a new one
- auto& hrtf = listenerNodeData.hrtfForStream(sourceNodeID, streamToAdd.getStreamIdentifier());
-
+ if (!streamToAdd->isStereo() && !isEcho) {
static int16_t silentMonoBlock[AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL] = {};
- hrtf.renderSilent(silentMonoBlock, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
- AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
+ mixableStream.hrtf->render(silentMonoBlock, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
+ AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
- ++stats.hrtfSilentRenders;
+ ++stats.hrtfRenders;
}
return;
@@ -318,16 +546,15 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
}
// grab the stream from the ring buffer
- AudioRingBuffer::ConstIterator streamPopOutput = streamToAdd.getLastPopOutput();
+ AudioRingBuffer::ConstIterator streamPopOutput = streamToAdd->getLastPopOutput();
// stereo sources are not passed through HRTF
- if (streamToAdd.isStereo()) {
+ if (streamToAdd->isStereo()) {
// apply the avatar gain adjustment
- auto& hrtf = listenerNodeData.hrtfForStream(sourceNodeID, streamToAdd.getStreamIdentifier());
- gain *= hrtf.getGainAdjustment();
+ gain *= mixableStream.hrtf->getGainAdjustment();
- const float scale = 1/32768.0f; // int16_t to float
+ const float scale = 1 / 32768.0f; // int16_t to float
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL; i++) {
_mixSamples[2*i+0] += (float)streamPopOutput[2*i+0] * gain * scale;
@@ -335,11 +562,8 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
}
++stats.manualStereoMixes;
- return;
- }
-
- // echo sources are not passed through HRTF
- if (isEcho) {
+ } else if (isEcho) {
+ // echo sources are not passed through HRTF
const float scale = 1/32768.0f; // int16_t to float
@@ -350,41 +574,38 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
}
++stats.manualEchoMixes;
- return;
+ } else {
+ streamPopOutput.readSamples(_bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
+
+ mixableStream.hrtf->render(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
+ AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
+
+ ++stats.hrtfRenders;
}
+}
- // get the existing listener-source HRTF object, or create a new one
- auto& hrtf = listenerNodeData.hrtfForStream(sourceNodeID, streamToAdd.getStreamIdentifier());
+void AudioMixerSlave::updateHRTFParameters(AudioMixerClientData::MixableStream& mixableStream,
+ AvatarAudioStream& listeningNodeStream,
+ float masterListenerGain) {
+ auto streamToAdd = mixableStream.positionalStream;
- streamPopOutput.readSamples(_bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
+ // check if this is a server echo of a source back to itself
+ bool isEcho = (streamToAdd == &listeningNodeStream);
- if (streamToAdd.getLastPopOutputLoudness() == 0.0f) {
- // call renderSilent to reduce artifacts
- hrtf.renderSilent(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
- AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
+ glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream.getPosition();
- ++stats.hrtfSilentRenders;
- return;
- }
+ float distance = glm::max(glm::length(relativePosition), EPSILON);
+ float gain = computeGain(masterListenerGain, listeningNodeStream, *streamToAdd, relativePosition, distance, isEcho);
+ float azimuth = isEcho ? 0.0f : computeAzimuth(listeningNodeStream, listeningNodeStream, relativePosition);
- if (throttle) {
- // call renderSilent with actual frame data and a gain of 0.0f to reduce artifacts
- hrtf.renderSilent(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, 0.0f,
- AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
+ mixableStream.hrtf->setParameterHistory(azimuth, distance, gain);
- ++stats.hrtfThrottleRenders;
- return;
- }
+ ++stats.hrtfUpdates;
+}
- if (streamToAdd.getType() == PositionalAudioStream::Injector) {
- // apply per-avatar gain to positional audio injectors, which wouldn't otherwise be affected by PAL sliders
- hrtf.setGainAdjustment(listenerNodeData.hrtfForStream(sourceNodeID, QUuid()).getGainAdjustment());
- }
-
- hrtf.render(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
- AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
-
- ++stats.hrtfRenders;
+void AudioMixerSlave::resetHRTFState(AudioMixerClientData::MixableStream& mixableStream) {
+ mixableStream.hrtf->reset();
+ ++stats.hrtfResets;
}
std::unique_ptr createAudioPacket(PacketType type, int size, quint16 sequence, QString codec) {
@@ -443,12 +664,12 @@ void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData&
glm::vec3 streamPosition = stream->getPosition();
// find reverb properties
- for (int i = 0; i < reverbSettings.size(); ++i) {
- AABox box = audioZones[reverbSettings[i].zone];
+ for (const auto& settings : reverbSettings) {
+ AABox box = audioZones[settings.zone].area;
if (box.contains(streamPosition)) {
hasReverb = true;
- reverbTime = reverbSettings[i].reverbTime;
- wetLevel = reverbSettings[i].wetLevel;
+ reverbTime = settings.reverbTime;
+ wetLevel = settings.wetLevel;
break;
}
}
@@ -493,8 +714,7 @@ void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData&
}
}
-float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
- const glm::vec3& relativePosition) {
+float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd) {
float gain = 1.0f;
// injector: apply attenuation
@@ -505,13 +725,14 @@ float approximateGain(const AvatarAudioStream& listeningNodeStream, const Positi
// avatar: skip attenuation - it is too costly to approximate
// distance attenuation: approximate, ignore zone-specific attenuations
+ glm::vec3 relativePosition = streamToAdd.getPosition() - listeningNodeStream.getPosition();
float distance = glm::length(relativePosition);
return gain / distance;
// avatar: skip master gain - it is constant for all streams
}
-float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudioStream& listeningNodeStream,
+float computeGain(float masterListenerGain, const AvatarAudioStream& listeningNodeStream,
const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition, float distance, bool isEcho) {
float gain = 1.0f;
@@ -534,7 +755,7 @@ float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudi
gain *= offAxisCoefficient;
// apply master gain, only to avatars
- gain *= listenerNodeData.getMasterAvatarGain();
+ gain *= masterListenerGain;
}
auto& audioZones = AudioMixer::getAudioZones();
@@ -542,10 +763,10 @@ float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudi
// find distance attenuation coefficient
float attenuationPerDoublingInDistance = AudioMixer::getAttenuationPerDoublingInDistance();
- for (int i = 0; i < zoneSettings.length(); ++i) {
- if (audioZones[zoneSettings[i].source].contains(streamToAdd.getPosition()) &&
- audioZones[zoneSettings[i].listener].contains(listeningNodeStream.getPosition())) {
- attenuationPerDoublingInDistance = zoneSettings[i].coefficient;
+ for (const auto& settings : zoneSettings) {
+ if (audioZones[settings.source].area.contains(streamToAdd.getPosition()) &&
+ audioZones[settings.listener].area.contains(listeningNodeStream.getPosition())) {
+ attenuationPerDoublingInDistance = settings.coefficient;
break;
}
}
diff --git a/assignment-client/src/audio/AudioMixerSlave.h b/assignment-client/src/audio/AudioMixerSlave.h
index 074d10ff40..3d979da1fc 100644
--- a/assignment-client/src/audio/AudioMixerSlave.h
+++ b/assignment-client/src/audio/AudioMixerSlave.h
@@ -12,29 +12,39 @@
#ifndef hifi_AudioMixerSlave_h
#define hifi_AudioMixerSlave_h
+#include
+
#include
#include
#include
#include
#include
#include
+#include
+#include "AudioMixerClientData.h"
#include "AudioMixerStats.h"
-class PositionalAudioStream;
class AvatarAudioStream;
class AudioHRTF;
-class AudioMixerClientData;
class AudioMixerSlave {
public:
using ConstIter = NodeList::const_iterator;
+
+ struct SharedData {
+ AudioMixerClientData::ConcurrentAddedStreams addedStreams;
+ std::vector removedNodes;
+ std::vector removedStreams;
+ };
+
+ AudioMixerSlave(SharedData& sharedData) : _sharedData(sharedData) {};
// process packets for a given node (requires no configuration)
void processPackets(const SharedNodePointer& node);
// configure a round of mixing
- void configureMix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio);
+ void configureMix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain);
// mix and broadcast non-ignored streams to the node (requires configuration using configureMix, above)
// returns true if a mixed packet was sent to the node
@@ -45,13 +55,15 @@ public:
private:
// create mix, returns true if mix has audio
bool prepareMix(const SharedNodePointer& listener);
- void throttleStream(AudioMixerClientData& listenerData, const QUuid& streamerID,
- const AvatarAudioStream& listenerStream, const PositionalAudioStream& streamer);
- void mixStream(AudioMixerClientData& listenerData, const QUuid& streamerID,
- const AvatarAudioStream& listenerStream, const PositionalAudioStream& streamer);
- void addStream(AudioMixerClientData& listenerData, const QUuid& streamerID,
- const AvatarAudioStream& listenerStream, const PositionalAudioStream& streamer,
- bool throttle);
+ void addStream(AudioMixerClientData::MixableStream& mixableStream,
+ AvatarAudioStream& listeningNodeStream,
+ float masterListenerGain, bool isSoloing);
+ void updateHRTFParameters(AudioMixerClientData::MixableStream& mixableStream,
+ AvatarAudioStream& listeningNodeStream,
+ float masterListenerGain);
+ void resetHRTFState(AudioMixerClientData::MixableStream& mixableStream);
+
+ void addStreams(Node& listener, AudioMixerClientData& listenerData);
// mixing buffers
float _mixSamples[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
@@ -61,7 +73,9 @@ private:
ConstIter _begin;
ConstIter _end;
unsigned int _frame { 0 };
- float _throttlingRatio { 0.0f };
+ int _numToRetain { -1 };
+
+ SharedData& _sharedData;
};
#endif // hifi_AudioMixerSlave_h
diff --git a/assignment-client/src/audio/AudioMixerSlavePool.cpp b/assignment-client/src/audio/AudioMixerSlavePool.cpp
index dfe7ef56aa..7cc7ac9f93 100644
--- a/assignment-client/src/audio/AudioMixerSlavePool.cpp
+++ b/assignment-client/src/audio/AudioMixerSlavePool.cpp
@@ -74,13 +74,11 @@ void AudioMixerSlavePool::processPackets(ConstIter begin, ConstIter end) {
run(begin, end);
}
-void AudioMixerSlavePool::mix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio) {
+void AudioMixerSlavePool::mix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain) {
_function = &AudioMixerSlave::mix;
_configure = [=](AudioMixerSlave& slave) {
- slave.configureMix(_begin, _end, _frame, _throttlingRatio);
+ slave.configureMix(_begin, _end, frame, numToRetain);
};
- _frame = frame;
- _throttlingRatio = throttlingRatio;
run(begin, end);
}
@@ -167,7 +165,7 @@ void AudioMixerSlavePool::resize(int numThreads) {
if (numThreads > _numThreads) {
// start new slaves
for (int i = 0; i < numThreads - _numThreads; ++i) {
- auto slave = new AudioMixerSlaveThread(*this);
+ auto slave = new AudioMixerSlaveThread(*this, _workerSharedData);
slave->start();
_slaves.emplace_back(slave);
}
diff --git a/assignment-client/src/audio/AudioMixerSlavePool.h b/assignment-client/src/audio/AudioMixerSlavePool.h
index 25047faa89..82b892123c 100644
--- a/assignment-client/src/audio/AudioMixerSlavePool.h
+++ b/assignment-client/src/audio/AudioMixerSlavePool.h
@@ -31,7 +31,8 @@ class AudioMixerSlaveThread : public QThread, public AudioMixerSlave {
using Lock = std::unique_lock;
public:
- AudioMixerSlaveThread(AudioMixerSlavePool& pool) : _pool(pool) {}
+ AudioMixerSlaveThread(AudioMixerSlavePool& pool, AudioMixerSlave::SharedData& sharedData)
+ : AudioMixerSlave(sharedData), _pool(pool) {}
void run() override final;
@@ -58,14 +59,15 @@ class AudioMixerSlavePool {
public:
using ConstIter = NodeList::const_iterator;
- AudioMixerSlavePool(int numThreads = QThread::idealThreadCount()) { setNumThreads(numThreads); }
+ AudioMixerSlavePool(AudioMixerSlave::SharedData& sharedData, int numThreads = QThread::idealThreadCount())
+ : _workerSharedData(sharedData) { setNumThreads(numThreads); }
~AudioMixerSlavePool() { resize(0); }
// process packets on slave threads
void processPackets(ConstIter begin, ConstIter end);
// mix on slave threads
- void mix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio);
+ void mix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain);
// iterate over all slaves
void each(std::function functor);
@@ -96,10 +98,10 @@ private:
// frame state
Queue _queue;
- unsigned int _frame { 0 };
- float _throttlingRatio { 0.0f };
ConstIter _begin;
ConstIter _end;
+
+ AudioMixerSlave::SharedData& _workerSharedData;
};
#endif // hifi_AudioMixerSlavePool_h
diff --git a/assignment-client/src/audio/AudioMixerStats.cpp b/assignment-client/src/audio/AudioMixerStats.cpp
index 4cfdd55167..bb2daa1d2d 100644
--- a/assignment-client/src/audio/AudioMixerStats.cpp
+++ b/assignment-client/src/audio/AudioMixerStats.cpp
@@ -15,12 +15,27 @@ void AudioMixerStats::reset() {
sumStreams = 0;
sumListeners = 0;
sumListenersSilent = 0;
+
totalMixes = 0;
+
hrtfRenders = 0;
- hrtfSilentRenders = 0;
- hrtfThrottleRenders = 0;
+ hrtfResets = 0;
+ hrtfUpdates = 0;
+
manualStereoMixes = 0;
manualEchoMixes = 0;
+
+ skippedToActive = 0;
+ skippedToInactive = 0;
+ inactiveToSkipped = 0;
+ inactiveToActive = 0;
+ activeToSkipped = 0;
+ activeToInactive = 0;
+
+ skipped = 0;
+ inactive = 0;
+ active = 0;
+
#ifdef HIFI_AUDIO_MIXER_DEBUG
mixTime = 0;
#endif
@@ -30,12 +45,27 @@ void AudioMixerStats::accumulate(const AudioMixerStats& otherStats) {
sumStreams += otherStats.sumStreams;
sumListeners += otherStats.sumListeners;
sumListenersSilent += otherStats.sumListenersSilent;
+
totalMixes += otherStats.totalMixes;
+
hrtfRenders += otherStats.hrtfRenders;
- hrtfSilentRenders += otherStats.hrtfSilentRenders;
- hrtfThrottleRenders += otherStats.hrtfThrottleRenders;
+ hrtfResets += otherStats.hrtfResets;
+ hrtfUpdates += otherStats.hrtfUpdates;
+
manualStereoMixes += otherStats.manualStereoMixes;
manualEchoMixes += otherStats.manualEchoMixes;
+
+ skippedToActive += otherStats.skippedToActive;
+ skippedToInactive += otherStats.skippedToInactive;
+ inactiveToSkipped += otherStats.inactiveToSkipped;
+ inactiveToActive += otherStats.inactiveToActive;
+ activeToSkipped += otherStats.activeToSkipped;
+ activeToInactive += otherStats.activeToInactive;
+
+ skipped += otherStats.skipped;
+ inactive += otherStats.inactive;
+ active += otherStats.active;
+
#ifdef HIFI_AUDIO_MIXER_DEBUG
mixTime += otherStats.mixTime;
#endif
diff --git a/assignment-client/src/audio/AudioMixerStats.h b/assignment-client/src/audio/AudioMixerStats.h
index f4ba9db769..459cbfc970 100644
--- a/assignment-client/src/audio/AudioMixerStats.h
+++ b/assignment-client/src/audio/AudioMixerStats.h
@@ -24,12 +24,23 @@ struct AudioMixerStats {
int totalMixes { 0 };
int hrtfRenders { 0 };
- int hrtfSilentRenders { 0 };
- int hrtfThrottleRenders { 0 };
+ int hrtfResets { 0 };
+ int hrtfUpdates { 0 };
int manualStereoMixes { 0 };
int manualEchoMixes { 0 };
+ int skippedToActive { 0 };
+ int skippedToInactive { 0 };
+ int inactiveToSkipped { 0 };
+ int inactiveToActive { 0 };
+ int activeToSkipped { 0 };
+ int activeToInactive { 0 };
+
+ int skipped { 0 };
+ int inactive { 0 };
+ int active { 0 };
+
#ifdef HIFI_AUDIO_MIXER_DEBUG
uint64_t mixTime { 0 };
#endif
diff --git a/assignment-client/src/audio/AvatarAudioStream.cpp b/assignment-client/src/audio/AvatarAudioStream.cpp
index 22ea8c0617..1b3ca9a8b1 100644
--- a/assignment-client/src/audio/AvatarAudioStream.cpp
+++ b/assignment-client/src/audio/AvatarAudioStream.cpp
@@ -23,9 +23,9 @@ int AvatarAudioStream::parseStreamProperties(PacketType type, const QByteArray&
if (type == PacketType::SilentAudioFrame) {
const char* dataAt = packetAfterSeqNum.constData();
- quint16 numSilentSamples = *(reinterpret_cast(dataAt));
- readBytes += sizeof(quint16);
- numAudioSamples = (int)numSilentSamples;
+ SilentSamplesBytes numSilentSamples = *(reinterpret_cast(dataAt));
+ readBytes += sizeof(SilentSamplesBytes);
+ numAudioSamples = (int) numSilentSamples;
// read the positional data
readBytes += parsePositionalData(packetAfterSeqNum.mid(readBytes));
@@ -34,9 +34,9 @@ int AvatarAudioStream::parseStreamProperties(PacketType type, const QByteArray&
_shouldLoopbackForNode = (type == PacketType::MicrophoneAudioWithEcho);
// read the channel flag
- quint8 channelFlag = packetAfterSeqNum.at(readBytes);
+ ChannelFlag channelFlag = packetAfterSeqNum.at(readBytes);
bool isStereo = channelFlag == 1;
- readBytes += sizeof(quint8);
+ readBytes += sizeof(ChannelFlag);
// if isStereo value has changed, restart the ring buffer with new frame size
if (isStereo != _isStereo) {
diff --git a/assignment-client/src/audio/AvatarAudioStream.h b/assignment-client/src/audio/AvatarAudioStream.h
index 497e522922..de9577099e 100644
--- a/assignment-client/src/audio/AvatarAudioStream.h
+++ b/assignment-client/src/audio/AvatarAudioStream.h
@@ -16,6 +16,8 @@
#include "PositionalAudioStream.h"
+using SilentSamplesBytes = quint16;
+
class AvatarAudioStream : public PositionalAudioStream {
public:
AvatarAudioStream(bool isStereo, int numStaticJitterFrames = -1);
diff --git a/assignment-client/src/avatars/AvatarMixer.cpp b/assignment-client/src/avatars/AvatarMixer.cpp
index 167c1cd29c..53fc13e5cf 100644
--- a/assignment-client/src/avatars/AvatarMixer.cpp
+++ b/assignment-client/src/avatars/AvatarMixer.cpp
@@ -541,7 +541,8 @@ void AvatarMixer::handleRequestsDomainListDataPacket(QSharedPointersetLastBroadcastTime(node->getUUID(), 0);
+ nodeData->setLastBroadcastTime(node->getLocalID(), 0);
+ nodeData->resetSentTraitData(node->getLocalID());
}
);
}
@@ -564,7 +565,8 @@ void AvatarMixer::handleAvatarIdentityPacket(QSharedPointer mes
// parse the identity packet and update the change timestamp if appropriate
bool identityChanged = false;
bool displayNameChanged = false;
- avatar.processAvatarIdentity(message->getMessage(), identityChanged, displayNameChanged);
+ QDataStream avatarIdentityStream(message->getMessage());
+ avatar.processAvatarIdentity(avatarIdentityStream, identityChanged, displayNameChanged);
if (identityChanged) {
QMutexLocker nodeDataLocker(&nodeData->getMutex());
@@ -588,10 +590,10 @@ void AvatarMixer::handleAvatarIdentityRequestPacket(QSharedPointergetMessage()) );
if (!avatarID.isNull()) {
auto nodeList = DependencyManager::get();
- auto node = nodeList->nodeWithUUID(avatarID);
- if (node) {
- QMutexLocker lock(&node->getMutex());
- AvatarMixerClientData* avatarClientData = dynamic_cast