Merge branch 'master' into scriptvec3

This commit is contained in:
Sam Gondelman 2018-10-03 17:14:15 -07:00 committed by GitHub
commit 4e96c3297b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
95 changed files with 4203 additions and 1798 deletions

View file

@ -26,6 +26,7 @@
QAndroidJniObject __interfaceActivity;
QAndroidJniObject __loginCompletedListener;
QAndroidJniObject __signupCompletedListener;
QAndroidJniObject __loadCompleteListener;
QAndroidJniObject __usernameChangedListener;
void tempMessageHandler(QtMsgType type, const QMessageLogContext& context, const QString& message) {
@ -267,6 +268,14 @@ Java_io_highfidelity_hifiinterface_fragment_LoginFragment_nativeCancelLogin(JNIE
}
JNIEXPORT void JNICALL
Java_io_highfidelity_hifiinterface_fragment_SignupFragment_nativeCancelLogin(JNIEnv *env,
jobject instance) {
Java_io_highfidelity_hifiinterface_fragment_LoginFragment_nativeCancelLogin(env, instance);
}
JNIEXPORT void JNICALL
Java_io_highfidelity_hifiinterface_fragment_LoginFragment_nativeLogin(JNIEnv *env, jobject instance,
jstring username_, jstring password_,
@ -308,6 +317,67 @@ Java_io_highfidelity_hifiinterface_fragment_LoginFragment_nativeLogin(JNIEnv *en
Q_ARG(const QString&, username), Q_ARG(const QString&, password));
}
JNIEXPORT void JNICALL
Java_io_highfidelity_hifiinterface_fragment_SignupFragment_nativeLogin(JNIEnv *env,
jobject instance,
jstring username_,
jstring password_,
jobject usernameChangedListener) {
Java_io_highfidelity_hifiinterface_fragment_LoginFragment_nativeLogin(env, instance, username_, password_, usernameChangedListener);
}
JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeInitAfterAppLoaded(JNIEnv* env, jobject obj) {
AndroidHelper::instance().moveToThread(qApp->thread());
}
JNIEXPORT void JNICALL
Java_io_highfidelity_hifiinterface_fragment_SignupFragment_nativeSignup(JNIEnv *env, jobject instance,
jstring email_, jstring username_,
jstring password_) {
const char *c_email = env->GetStringUTFChars(email_, 0);
const char *c_username = env->GetStringUTFChars(username_, 0);
const char *c_password = env->GetStringUTFChars(password_, 0);
QString email = QString(c_email);
QString username = QString(c_username);
QString password = QString(c_password);
env->ReleaseStringUTFChars(email_, c_email);
env->ReleaseStringUTFChars(username_, c_username);
env->ReleaseStringUTFChars(password_, c_password);
__signupCompletedListener = QAndroidJniObject(instance);
// disconnect any previous callback
QObject::disconnect(&AndroidHelper::instance(), &AndroidHelper::handleSignupCompleted, nullptr, nullptr);
QObject::disconnect(&AndroidHelper::instance(), &AndroidHelper::handleSignupFailed, nullptr, nullptr);
QObject::connect(&AndroidHelper::instance(), &AndroidHelper::handleSignupCompleted, []() {
jboolean jSuccess = (jboolean) true;
if (__signupCompletedListener.isValid()) {
__signupCompletedListener.callMethod<void>("handleSignupCompleted", "()V", jSuccess);
}
});
QObject::connect(&AndroidHelper::instance(), &AndroidHelper::handleSignupFailed, [](QString errorString) {
jboolean jSuccess = (jboolean) false;
jstring jError = QAndroidJniObject::fromString(errorString).object<jstring>();
if (__signupCompletedListener.isValid()) {
QAndroidJniObject string = QAndroidJniObject::fromString(errorString);
__signupCompletedListener.callMethod<void>("handleSignupFailed", "(Ljava/lang/String;)V", string.object<jstring>());
}
});
AndroidHelper::instance().signup(email, username, password);
}
JNIEXPORT void JNICALL
Java_io_highfidelity_hifiinterface_fragment_SignupFragment_nativeCancelSignup(JNIEnv *env, jobject instance) {
QObject::disconnect(&AndroidHelper::instance(), &AndroidHelper::handleSignupCompleted, nullptr, nullptr);
QObject::disconnect(&AndroidHelper::instance(), &AndroidHelper::handleSignupFailed, nullptr, nullptr);
__signupCompletedListener = nullptr;
}
JNIEXPORT jboolean JNICALL
Java_io_highfidelity_hifiinterface_fragment_FriendsFragment_nativeIsLoggedIn(JNIEnv *env, jobject instance) {
auto accountManager = DependencyManager::get<AccountManager>();

View file

@ -69,6 +69,7 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
private native void nativeEnterBackground();
private native void nativeEnterForeground();
private native long nativeOnExitVr();
private native void nativeInitAfterAppLoaded();
private AssetManager assetManager;
@ -351,6 +352,9 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
if (nativeEnterBackgroundCallEnqueued) {
nativeEnterBackground();
}
runOnUiThread(() -> {
nativeInitAfterAppLoaded();
});
}
public void performHapticFeedback(int duration) {

View file

@ -37,12 +37,15 @@ import io.highfidelity.hifiinterface.fragment.HomeFragment;
import io.highfidelity.hifiinterface.fragment.LoginFragment;
import io.highfidelity.hifiinterface.fragment.PolicyFragment;
import io.highfidelity.hifiinterface.fragment.SettingsFragment;
import io.highfidelity.hifiinterface.task.DownloadProfileImageTask;
import io.highfidelity.hifiinterface.fragment.SignedInFragment;
import io.highfidelity.hifiinterface.fragment.SignupFragment;import io.highfidelity.hifiinterface.task.DownloadProfileImageTask;
public class MainActivity extends AppCompatActivity implements NavigationView.OnNavigationItemSelectedListener,
LoginFragment.OnLoginInteractionListener,
HomeFragment.OnHomeInteractionListener,
FriendsFragment.OnHomeInteractionListener {
FriendsFragment.OnHomeInteractionListener,
SignupFragment.OnSignupInteractionListener,
SignedInFragment.OnSignedInInteractionListener {
private static final int PROFILE_PICTURE_PLACEHOLDER = R.drawable.default_profile_avatar;
public static final String DEFAULT_FRAGMENT = "Home";
@ -147,35 +150,44 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
private void loadHomeFragment(boolean addToBackStack) {
Fragment fragment = HomeFragment.newInstance();
loadFragment(fragment, getString(R.string.home), getString(R.string.tagFragmentHome), addToBackStack);
loadFragment(fragment, getString(R.string.home), getString(R.string.tagFragmentHome), addToBackStack, true);
}
private void loadLoginFragment() {
Fragment fragment = LoginFragment.newInstance();
loadFragment(fragment, getString(R.string.login), getString(R.string.tagFragmentLogin), true, true);
}
loadFragment(fragment, getString(R.string.login), getString(R.string.tagFragmentLogin), true);
private void loadSignedInFragment() {
Fragment fragment = SignedInFragment.newInstance();
loadFragment(fragment, getString(R.string.welcome), getString(R.string.tagFragmentSignedIn), true, true);
}
private void loadSignupFragment() {
Fragment fragment = SignupFragment.newInstance();
loadFragment(fragment, getString(R.string.signup), getString(R.string.tagFragmentSignup), true, false);
}
private void loadPrivacyPolicyFragment() {
Fragment fragment = PolicyFragment.newInstance();
loadFragment(fragment, getString(R.string.privacyPolicy), getString(R.string.tagFragmentPolicy), true);
loadFragment(fragment, getString(R.string.privacyPolicy), getString(R.string.tagFragmentPolicy), true, true);
}
private void loadPeopleFragment() {
Fragment fragment = FriendsFragment.newInstance();
loadFragment(fragment, getString(R.string.people), getString(R.string.tagFragmentPeople), true);
loadFragment(fragment, getString(R.string.people), getString(R.string.tagFragmentPeople), true, true);
}
private void loadSettingsFragment() {
SettingsFragment fragment = SettingsFragment.newInstance();
loadFragment(fragment, getString(R.string.settings), getString(R.string.tagSettings), true);
loadFragment(fragment, getString(R.string.settings), getString(R.string.tagSettings), true, true);
}
private void loadFragment(Fragment fragment, String title, String tag, boolean addToBackStack) {
private void loadFragment(Fragment newFragment, String title, String tag, boolean addToBackStack, boolean goBackUntilHome) {
FragmentManager fragmentManager = getFragmentManager();
// check if it's the same fragment
@ -187,17 +199,19 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
return; // cancel as we are already in that fragment
}
// go back until first transaction
int backStackEntryCount = fragmentManager.getBackStackEntryCount();
for (int i = 0; i < backStackEntryCount - 1; i++) {
fragmentManager.popBackStackImmediate();
if (goBackUntilHome) {
// go back until first transaction
int backStackEntryCount = fragmentManager.getBackStackEntryCount();
for (int i = 0; i < backStackEntryCount - 1; i++) {
fragmentManager.popBackStackImmediate();
}
}
// this case is when we wanted to go home.. rollback already did that!
// But asking for a new Home fragment makes it easier to have an updated list so we let it to continue
FragmentTransaction ft = fragmentManager.beginTransaction();
ft.replace(R.id.content_frame, fragment, tag);
ft.replace(R.id.content_frame, newFragment, tag);
if (addToBackStack) {
ft.addToBackStack(title);
@ -334,6 +348,32 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
}
}
@Override
public void onGettingStarted() {
loadHomeFragment(false);
if (backToScene) {
backToScene = false;
goToLastLocation();
}
}
@Override
public void onLoginRequested() {
// go back from signup to login
onBackPressed();
}
@Override
public void onSignupRequested() {
loadSignupFragment();
}
@Override
public void onSignupCompleted() {
loadSignedInFragment();
updateLoginMenu();
}
public void handleUsernameChanged(String username) {
runOnUiThread(() -> updateProfileHeader(username));
}

View file

@ -30,6 +30,7 @@ public class LoginFragment extends Fragment {
private EditText mPassword;
private TextView mError;
private TextView mForgotPassword;
private TextView mSignup;
private Button mLoginButton;
private ProgressDialog mDialog;
@ -58,10 +59,12 @@ public class LoginFragment extends Fragment {
mError = rootView.findViewById(R.id.error);
mLoginButton = rootView.findViewById(R.id.loginButton);
mForgotPassword = rootView.findViewById(R.id.forgotPassword);
mSignup = rootView.findViewById(R.id.signupButton);
mLoginButton.setOnClickListener(view -> login());
mForgotPassword.setOnClickListener(view -> forgotPassword());
mSignup.setOnClickListener(view -> signup());
mPassword.setOnEditorActionListener(
(textView, actionId, keyEvent) -> {
@ -121,6 +124,12 @@ public class LoginFragment extends Fragment {
}
}
public void signup() {
if (mListener != null) {
mListener.onSignupRequested();
}
}
private void hideKeyboard() {
View view = getActivity().getCurrentFocus();
if (view != null) {
@ -182,6 +191,7 @@ public class LoginFragment extends Fragment {
public interface OnLoginInteractionListener {
void onLoginCompleted();
void onSignupRequested();
}
}

View file

@ -0,0 +1,73 @@
package io.highfidelity.hifiinterface.fragment;
import android.app.Fragment;
import android.content.Context;
import android.os.Bundle;
import android.text.Html;
import android.text.Spanned;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.TextView;
import java.io.IOException;
import java.io.InputStream;
import io.highfidelity.hifiinterface.R;
public class SignedInFragment extends Fragment {
private Button mGetStartedButton;
private OnSignedInInteractionListener mListener;
public SignedInFragment() {
// Required empty public constructor
}
public static SignedInFragment newInstance() {
SignedInFragment fragment = new SignedInFragment();
return fragment;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_signedin, container, false);
mGetStartedButton = rootView.findViewById(R.id.getStarted);
mGetStartedButton.setOnClickListener(view -> {
getStarted();
});
return rootView;
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
if (context instanceof SignedInFragment.OnSignedInInteractionListener) {
mListener = (SignedInFragment.OnSignedInInteractionListener) context;
} else {
throw new RuntimeException(context.toString()
+ " must implement OnSignedInInteractionListener");
}
}
@Override
public void onDetach() {
super.onDetach();
mListener = null;
}
public void getStarted() {
if (mListener != null) {
mListener.onGettingStarted();
}
}
public interface OnSignedInInteractionListener {
void onGettingStarted();
}
}

View file

@ -0,0 +1,217 @@
package io.highfidelity.hifiinterface.fragment;
import android.app.Activity;
import android.app.Fragment;
import android.app.ProgressDialog;
import android.content.Context;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputMethodManager;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import org.qtproject.qt5.android.QtNative;
import io.highfidelity.hifiinterface.R;
import static org.qtproject.qt5.android.QtActivityDelegate.ApplicationActive;
import static org.qtproject.qt5.android.QtActivityDelegate.ApplicationInactive;
public class SignupFragment extends Fragment {
private EditText mEmail;
private EditText mUsername;
private EditText mPassword;
private TextView mError;
private TextView mCancelButton;
private Button mSignupButton;
private ProgressDialog mDialog;
public native void nativeSignup(String email, String username, String password); // move to SignupFragment
public native void nativeCancelSignup();
public native void nativeLogin(String username, String password, Activity usernameChangedListener);
public native void nativeCancelLogin();
private SignupFragment.OnSignupInteractionListener mListener;
public SignupFragment() {
// Required empty public constructor
}
public static SignupFragment newInstance() {
SignupFragment fragment = new SignupFragment();
return fragment;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_signup, container, false);
mEmail = rootView.findViewById(R.id.email);
mUsername = rootView.findViewById(R.id.username);
mPassword = rootView.findViewById(R.id.password);
mError = rootView.findViewById(R.id.error);
mSignupButton = rootView.findViewById(R.id.signupButton);
mCancelButton = rootView.findViewById(R.id.cancelButton);
mSignupButton.setOnClickListener(view -> signup());
mCancelButton.setOnClickListener(view -> login());
mPassword.setOnEditorActionListener(
(textView, actionId, keyEvent) -> {
if (actionId == EditorInfo.IME_ACTION_DONE) {
mSignupButton.performClick();
return true;
}
return false;
});
return rootView;
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
if (context instanceof OnSignupInteractionListener) {
mListener = (OnSignupInteractionListener) context;
} else {
throw new RuntimeException(context.toString()
+ " must implement OnSignupInteractionListener");
}
}
@Override
public void onDetach() {
super.onDetach();
mListener = null;
}
@Override
public void onResume() {
super.onResume();
// This hack intends to keep Qt threads running even after the app comes from background
QtNative.setApplicationState(ApplicationActive);
}
@Override
public void onStop() {
super.onStop();
cancelActivityIndicator();
// Leave the Qt app paused
QtNative.setApplicationState(ApplicationInactive);
hideKeyboard();
}
private void login() {
if (mListener != null) {
mListener.onLoginRequested();
}
}
public void signup() {
String email = mEmail.getText().toString().trim();
String username = mUsername.getText().toString().trim();
String password = mPassword.getText().toString();
hideKeyboard();
if (email.isEmpty() || username.isEmpty() || password.isEmpty()) {
showError(getString(R.string.signup_email_username_or_password_incorrect));
} else {
mSignupButton.setEnabled(false);
hideError();
showActivityIndicator();
nativeSignup(email, username, password);
}
}
private void hideKeyboard() {
View view = getActivity().getCurrentFocus();
if (view != null) {
InputMethodManager imm = (InputMethodManager) getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(view.getWindowToken(), 0);
}
}
private void showActivityIndicator() {
if (mDialog == null) {
mDialog = new ProgressDialog(getContext());
}
mDialog.setMessage(getString(R.string.creating_account));
mDialog.setCancelable(true);
mDialog.setOnCancelListener(dialogInterface -> {
nativeCancelSignup();
cancelActivityIndicator();
mSignupButton.setEnabled(true);
});
mDialog.show();
}
private void cancelActivityIndicator() {
if (mDialog != null) {
mDialog.cancel();
}
}
private void showError(String error) {
mError.setText(error);
mError.setVisibility(View.VISIBLE);
}
private void hideError() {
mError.setText("");
mError.setVisibility(View.INVISIBLE);
}
public interface OnSignupInteractionListener {
void onSignupCompleted();
void onLoginRequested();
}
public void handleSignupCompleted() {
String username = mUsername.getText().toString().trim();
String password = mPassword.getText().toString();
mDialog.setMessage(getString(R.string.logging_in));
mDialog.setCancelable(true);
mDialog.setOnCancelListener(dialogInterface -> {
nativeCancelLogin();
cancelActivityIndicator();
if (mListener != null) {
mListener.onLoginRequested();
}
});
mDialog.show();
nativeLogin(username, password, getActivity());
}
public void handleSignupFailed(String error) {
getActivity().runOnUiThread(() -> {
mSignupButton.setEnabled(true);
cancelActivityIndicator();
mError.setText(error);
mError.setVisibility(View.VISIBLE);
});
}
public void handleLoginCompleted(boolean success) {
getActivity().runOnUiThread(() -> {
mSignupButton.setEnabled(true);
cancelActivityIndicator();
if (success) {
if (mListener != null) {
mListener.onSignupCompleted();
}
} else {
// Registration was successful but login failed.
// Let the user to login manually
mListener.onLoginRequested();
}
});
}
}

View file

@ -0,0 +1,24 @@
<?xml version="1.0" encoding="utf-8"?>
<selector xmlns:android="http://schemas.android.com/apk/res/android" >
<item android:state_pressed="true" >
<shape android:shape="rectangle" >
<corners android:radius="4dip" />
<stroke android:width="1dip" android:color="@color/colorButton2" />
<solid android:color="@color/colorButton2"/>
</shape>
</item>
<item android:state_focused="true">
<shape android:shape="rectangle" >
<corners android:radius="4dip" />
<stroke android:width="1dip" android:color="@color/colorButton2" />
<solid android:color="@color/colorButton2"/>
</shape>
</item>
<item>
<shape android:shape="rectangle" >
<corners android:radius="4dip" />
<stroke android:width="1dip" android:color="@color/colorButton2" />
<solid android:color="@color/colorButton2"/>
</shape>
</item>
</selector>

View file

@ -21,10 +21,12 @@
android:id="@+id/error"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginBottom="16dp"
android:layout_marginBottom="25dp"
android:layout_marginLeft="9dp"
android:layout_marginRight="9dp"
android:fontFamily="@font/raleway"
android:textColor="@color/colorLoginError"
android:textSize="12sp"
android:textSize="14sp"
app:layout_constraintBottom_toTopOf="@id/username"
app:layout_constraintLeft_toLeftOf="@id/username"
android:visibility="invisible"/>
@ -91,35 +93,50 @@
android:id="@+id/loginButton"
android:layout_width="154dp"
android:layout_height="38dp"
android:layout_marginTop="16dp"
android:background="@drawable/rounded_button"
android:fontFamily="@font/raleway_semibold"
android:paddingBottom="0dp"
android:paddingLeft="55dp"
android:paddingRight="55dp"
android:paddingTop="0dp"
android:text="@string/login"
android:textColor="@color/white_opaque"
android:textAllCaps="false"
android:textSize="15sp"
android:textSize="18sp"
app:layout_constraintRight_toRightOf="@id/username"
app:layout_constraintTop_toBottomOf="@id/passwordLayout"
app:layout_constraintTop_toBottomOf="@id/forgotPassword"
app:layout_goneMarginTop="4dp"/>
<TextView
android:id="@+id/forgotPassword"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:paddingTop="9dp"
android:paddingBottom="16dp"
android:fontFamily="@font/raleway_semibold"
android:textSize="14dp"
android:text="@string/forgot_password"
android:textStyle="italic"
android:paddingRight="10dp"
app:layout_constraintRight_toRightOf="@id/passwordLayout"
app:layout_constraintTop_toBottomOf="@id/passwordLayout"
android:textColor="@color/colorButton1"/>
<Button
android:id="@+id/signupButton"
android:layout_width="0dp"
app:layout_constraintWidth_default="spread"
android:layout_height="38dp"
android:background="@drawable/rounded_secondary_button"
android:fontFamily="@font/raleway_semibold"
android:paddingBottom="0dp"
android:paddingTop="0dp"
android:layout_marginRight="15dp"
android:text="@string/signup"
android:textColor="@color/white_opaque"
android:textAllCaps="false"
android:textSize="18sp"
app:layout_constraintLeft_toLeftOf="@id/passwordLayout"
app:layout_constraintTop_toTopOf="@id/loginButton"
app:layout_constraintRight_toLeftOf="@id/loginButton"
android:textColor="@color/colorButton1"/>
app:layout_goneMarginTop="4dp"/>

View file

@ -0,0 +1,63 @@
<?xml version="1.0" encoding="utf-8"?>
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/backgroundLight">
<ImageView
android:id="@+id/header"
android:layout_width="@dimen/header_hifi_width"
android:layout_height="@dimen/header_hifi_height"
android:layout_marginTop="@dimen/header_hifi_margin_top"
android:contentDescription="HighFidelity"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:srcCompat="@drawable/hifi_header" />
<TextView
android:id="@+id/welcome"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="50dp"
android:paddingLeft="86dp"
android:paddingRight="86dp"
android:fontFamily="@font/raleway"
android:textColor="@color/clearText"
android:textSize="24sp"
android:text="@string/signedin_welcome"
app:layout_constraintTop_toBottomOf="@id/header"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
android:gravity="center"
/>
<Button
android:id="@+id/getStarted"
android:layout_width="217dp"
android:layout_height="38dp"
android:layout_marginTop="30dp"
android:background="@drawable/rounded_button"
android:fontFamily="@font/raleway_semibold"
android:paddingBottom="0dp"
android:paddingLeft="25dp"
android:paddingRight="25dp"
android:paddingTop="0dp"
android:text="@string/get_started"
android:textColor="@color/white_opaque"
android:textAllCaps="false"
android:textSize="18sp"
app:layout_constraintTop_toBottomOf="@id/welcome"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_goneMarginTop="4dp"/>
</android.support.constraint.ConstraintLayout>

View file

@ -0,0 +1,151 @@
<?xml version="1.0" encoding="utf-8"?>
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@color/backgroundLight">
<ImageView
android:id="@+id/header"
android:layout_width="@dimen/header_hifi_width"
android:layout_height="@dimen/header_hifi_height"
android:layout_marginTop="@dimen/header_hifi_margin_top"
android:contentDescription="HighFidelity"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:srcCompat="@drawable/hifi_header" />
<TextView
android:id="@+id/error"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_marginBottom="16dp"
android:layout_marginLeft="9dp"
android:layout_marginRight="9dp"
android:fontFamily="@font/raleway"
android:textColor="@color/colorLoginError"
android:textSize="14sp"
app:layout_constraintBottom_toTopOf="@id/email"
app:layout_constraintLeft_toLeftOf="@id/email"
app:layout_constraintRight_toRightOf="@id/email"
android:visibility="invisible"/>
<EditText
android:id="@+id/email"
android:layout_width="match_parent"
android:layout_height="35dp"
android:layout_marginLeft="46dp"
android:layout_marginRight="46dp"
android:background="@drawable/rounded_edit"
android:padding="7dp"
android:paddingRight="12dp"
android:paddingTop="14dp"
android:ems="10"
android:fontFamily="@font/raleway"
android:textSize="17sp"
android:inputType="textEmailAddress"
android:textStyle="italic"
android:textColor="@color/editTextColor"
android:textColorHint="@color/editTextColor"
android:gravity="left|center_vertical"
app:layout_constraintTop_toBottomOf="@id/header"
android:layout_marginTop="70dp"
android:hint="@string/email" />
<EditText
android:id="@+id/username"
android:layout_width="match_parent"
android:layout_height="35dp"
android:layout_marginLeft="46dp"
android:layout_marginRight="46dp"
android:background="@drawable/rounded_edit"
android:padding="7dp"
android:paddingRight="12dp"
android:paddingTop="14dp"
android:ems="10"
android:fontFamily="@font/raleway"
android:textSize="17sp"
android:inputType="text"
android:textStyle="italic"
android:textColor="@color/editTextColor"
android:textColorHint="@color/editTextColor"
android:gravity="left|center_vertical"
app:layout_constraintTop_toBottomOf="@id/email"
android:layout_marginTop="7dp"
android:hint="@string/username" />
<android.support.design.widget.TextInputLayout
android:id="@+id/passwordLayout"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginLeft="46dp"
android:layout_marginRight="46dp"
app:passwordToggleTint="@color/showPasswordColor"
app:passwordToggleEnabled="true"
app:hintAnimationEnabled="false"
app:passwordToggleDrawable="@drawable/selector_show_password"
app:hintEnabled="false"
app:layout_constraintTop_toBottomOf="@id/username"
android:layout_marginTop="7dp"
>
<android.support.design.widget.TextInputEditText
android:id="@+id/password"
android:layout_width="match_parent"
android:layout_height="35dp"
android:background="@drawable/rounded_edit"
android:padding="7dp"
android:drawablePadding="55dp"
android:paddingTop="14dp"
android:drawableEnd="@drawable/ic_eye_noshow"
android:ems="10"
android:fontFamily="@font/raleway"
android:textSize="17sp"
android:textStyle="italic"
android:textColor="@color/editTextColor"
android:textColorHint="@color/editTextColor"
android:gravity="left|center_vertical"
android:imeOptions="actionDone"
android:hint="@string/password"
android:inputType="textPassword" />
</android.support.design.widget.TextInputLayout>
<Button
android:id="@+id/signupButton"
android:layout_width="154dp"
android:layout_height="38dp"
android:layout_marginTop="44dp"
android:background="@drawable/rounded_button"
android:fontFamily="@font/raleway_semibold"
android:paddingBottom="0dp"
android:paddingTop="0dp"
android:text="@string/signup"
android:textColor="@color/white_opaque"
android:textAllCaps="false"
android:textSize="18sp"
app:layout_constraintRight_toRightOf="@id/username"
app:layout_constraintTop_toBottomOf="@id/passwordLayout"
app:layout_goneMarginTop="4dp"/>
<Button
android:id="@+id/cancelButton"
android:layout_width="0dp"
app:layout_constraintWidth_default="spread"
android:layout_height="38dp"
android:background="@drawable/rounded_secondary_button"
android:fontFamily="@font/raleway_semibold"
android:paddingBottom="0dp"
android:paddingTop="0dp"
android:layout_marginRight="15dp"
android:text="@string/cancel"
android:textColor="@color/white_opaque"
android:textAllCaps="false"
android:textSize="18sp"
app:layout_constraintLeft_toLeftOf="@id/passwordLayout"
app:layout_constraintTop_toTopOf="@id/signupButton"
app:layout_constraintRight_toLeftOf="@id/signupButton"
app:layout_goneMarginTop="4dp"/>
</android.support.constraint.ConstraintLayout>

View file

@ -9,6 +9,7 @@
<color name="showPasswordColor">#3D3D3D</color>
<color name="tabs">#1EB5EC</color>
<color name="colorButton1">#00B4EF</color>
<color name="colorButton2">#828282</color>
<color name="backgroundDark">#333333</color>
<color name="backgroundLight">#4F4F4F</color>
<color name="backgroundSearch">#33999999</color>
@ -23,4 +24,6 @@
<color name="starSelectedTint">#FBD92A</color>
<color name="starUnselectedTint">#8A8A8A</color>
<color name="slidingUpPanelFadeColor">#40000000</color>
<color name="clearText">#F2F2F2</color>
</resources>

View file

@ -10,11 +10,13 @@
<string name="popular">POPULAR</string>
<string name="bookmarks">BOOKMARKS</string>
<string name="goto_url_hint">Type a domain url</string>
<string name="email">Email</string>
<string name="username">Username</string>
<string name="username_or_email">Username or email</string>
<string name="password">Password</string>
<string name="login">Login</string>
<string name="logout">Logout</string>
<string name="forgot_password">Forgot password?\u00A0</string>
<string name="forgot_password"><u>Forgot password?</u>\u00A0</string>
<string name="login_username_or_password_incorrect">Username or password incorrect.</string>
<string name="logging_in">Logging into High Fidelity</string>
<string name="search_hint"><i>Search for a place by name</i>\u00A0</string>
@ -23,13 +25,22 @@
<string name="privacyPolicy">Privacy Policy</string>
<string name="your_last_location">Your Last Location</string>
<string name="online">Online</string>
<string name="signup">Sign Up</string>
<string name="creating_account">Creating your High Fidelity account</string>
<string name="signup_email_username_or_password_incorrect">Email, username or password incorrect.</string>
<string name="signedin_welcome">You are now signed into High Fidelity</string>
<string name="welcome">Welcome</string>
<string name="cancel">Cancel</string>
<string name="get_started">Get Started</string>
<!-- tags -->
<string name="tagFragmentHome">tagFragmentHome</string>
<string name="tagFragmentLogin">tagFragmentLogin</string>
<string name="tagFragmentSignup">tagFragmentSignup</string>
<string name="tagFragmentPolicy">tagFragmentPolicy</string>
<string name="tagFragmentPeople">tagFragmentPeople</string>
<string name="tagSettings">tagSettings</string>
<string name="tagFragmentSignedIn">tagFragmentSignedIn</string>
<string name="settings">Settings</string>
<string name="AEC">AEC</string>
<string name="acoustic_echo_cancellation">Acoustic Echo Cancellation</string>

View file

@ -38,6 +38,8 @@
#include "AvatarAudioStream.h"
#include "InjectedAudioStream.h"
using namespace std;
static const float DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE = 0.5f; // attenuation = -6dB * log2(distance)
static const int DISABLE_STATIC_JITTER_FRAMES = -1;
static const float DEFAULT_NOISE_MUTING_THRESHOLD = 1.0f;
@ -49,11 +51,11 @@ static const QString AUDIO_THREADING_GROUP_KEY = "audio_threading";
int AudioMixer::_numStaticJitterFrames{ DISABLE_STATIC_JITTER_FRAMES };
float AudioMixer::_noiseMutingThreshold{ DEFAULT_NOISE_MUTING_THRESHOLD };
float AudioMixer::_attenuationPerDoublingInDistance{ DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE };
std::map<QString, std::shared_ptr<CodecPlugin>> AudioMixer::_availableCodecs{ };
map<QString, shared_ptr<CodecPlugin>> AudioMixer::_availableCodecs{ };
QStringList AudioMixer::_codecPreferenceOrder{};
QHash<QString, AABox> AudioMixer::_audioZones;
QVector<AudioMixer::ZoneSettings> AudioMixer::_zoneSettings;
QVector<AudioMixer::ReverbSettings> AudioMixer::_zoneReverbSettings;
vector<AudioMixer::ZoneDescription> AudioMixer::_audioZones;
vector<AudioMixer::ZoneSettings> AudioMixer::_zoneSettings;
vector<AudioMixer::ReverbSettings> AudioMixer::_zoneReverbSettings;
AudioMixer::AudioMixer(ReceivedMessage& message) :
ThreadedAssignment(message)
@ -67,7 +69,7 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
_availableCodecs.clear(); // Make sure struct is clean
auto pluginManager = DependencyManager::set<PluginManager>();
auto codecPlugins = pluginManager->getCodecPlugins();
std::for_each(codecPlugins.cbegin(), codecPlugins.cend(),
for_each(codecPlugins.cbegin(), codecPlugins.cend(),
[&](const CodecPluginPointer& codec) {
_availableCodecs[codec->getName()] = codec;
});
@ -122,7 +124,7 @@ void AudioMixer::queueAudioPacket(QSharedPointer<ReceivedMessage> message, Share
void AudioMixer::queueReplicatedAudioPacket(QSharedPointer<ReceivedMessage> message) {
// make sure we have a replicated node for the original sender of the packet
auto nodeList = DependencyManager::get<NodeList>();
// Node ID is now part of user data, since replicated audio packets are non-sourced.
QUuid nodeID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
@ -173,12 +175,12 @@ void AudioMixer::handleMuteEnvironmentPacket(QSharedPointer<ReceivedMessage> mes
}
}
const std::pair<QString, CodecPluginPointer> AudioMixer::negotiateCodec(std::vector<QString> codecs) {
const pair<QString, CodecPluginPointer> AudioMixer::negotiateCodec(vector<QString> codecs) {
QString selectedCodecName;
CodecPluginPointer selectedCodec;
// read the codecs requested (by the client)
int minPreference = std::numeric_limits<int>::max();
int minPreference = numeric_limits<int>::max();
for (auto& codec : codecs) {
if (_availableCodecs.count(codec) > 0) {
int preference = _codecPreferenceOrder.indexOf(codec);
@ -191,20 +193,9 @@ const std::pair<QString, CodecPluginPointer> AudioMixer::negotiateCodec(std::vec
}
}
return std::make_pair(selectedCodecName, _availableCodecs[selectedCodecName]);
return make_pair(selectedCodecName, _availableCodecs[selectedCodecName]);
}
void AudioMixer::handleNodeKilled(SharedNodePointer killedNode) {
// enumerate the connected listeners to remove HRTF objects for the disconnected node
auto nodeList = DependencyManager::get<NodeList>();
nodeList->eachNode([&killedNode](const SharedNodePointer& node) {
auto clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
if (clientData) {
clientData->removeNode(killedNode->getUUID());
}
});
}
void AudioMixer::handleNodeMuteRequestPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode) {
auto nodeList = DependencyManager::get<NodeList>();
@ -223,32 +214,31 @@ void AudioMixer::handleNodeMuteRequestPacket(QSharedPointer<ReceivedMessage> pac
}
}
void AudioMixer::handleNodeKilled(SharedNodePointer killedNode) {
auto clientData = dynamic_cast<AudioMixerClientData*>(killedNode->getLinkedData());
if (clientData) {
// stage the removal of all streams from this node, workers handle when preparing mixes for listeners
_workerSharedData.removedNodes.emplace_back(killedNode->getLocalID());
}
}
void AudioMixer::handleKillAvatarPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode) {
auto clientData = dynamic_cast<AudioMixerClientData*>(sendingNode->getLinkedData());
if (clientData) {
clientData->removeAgentAvatarAudioStream();
auto nodeList = DependencyManager::get<NodeList>();
nodeList->eachNode([sendingNode](const SharedNodePointer& node){
auto listenerClientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
if (listenerClientData) {
listenerClientData->removeHRTFForStream(sendingNode->getUUID());
}
});
// stage a removal of the avatar audio stream from this Agent, workers handle when preparing mixes for listeners
_workerSharedData.removedStreams.emplace_back(sendingNode->getUUID(), sendingNode->getLocalID(), QUuid());
}
}
void AudioMixer::removeHRTFsForFinishedInjector(const QUuid& streamID) {
auto injectorClientData = qobject_cast<AudioMixerClientData*>(sender());
if (injectorClientData) {
// enumerate the connected listeners to remove HRTF objects for the disconnected injector
auto nodeList = DependencyManager::get<NodeList>();
nodeList->eachNode([injectorClientData, &streamID](const SharedNodePointer& node){
auto listenerClientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
if (listenerClientData) {
listenerClientData->removeHRTFForStream(injectorClientData->getNodeID(), streamID);
}
});
if (injectorClientData) {
// stage the removal of this stream, workers handle when preparing mixes for listeners
_workerSharedData.removedStreams.emplace_back(injectorClientData->getNodeID(), injectorClientData->getNodeLocalID(),
streamID);
}
}
@ -285,7 +275,7 @@ void AudioMixer::sendStatsPacket() {
// timing stats
QJsonObject timingStats;
auto addTiming = [&](Timer& timer, std::string name) {
auto addTiming = [&](Timer& timer, string name) {
uint64_t timing, trailing;
timer.get(timing, trailing);
timingStats[("us_per_" + name).c_str()] = (qint64)(timing / _numStatFrames);
@ -293,12 +283,12 @@ void AudioMixer::sendStatsPacket() {
};
addTiming(_ticTiming, "tic");
addTiming(_checkTimeTiming, "check_time");
addTiming(_sleepTiming, "sleep");
addTiming(_frameTiming, "frame");
addTiming(_prepareTiming, "prepare");
addTiming(_packetsTiming, "packets");
addTiming(_mixTiming, "mix");
addTiming(_eventsTiming, "events");
addTiming(_packetsTiming, "packets");
#ifdef HIFI_AUDIO_MIXER_DEBUG
timingStats["ns_per_mix"] = (_stats.totalMixes > 0) ? (float)(_stats.mixTime / _stats.totalMixes) : 0;
@ -311,11 +301,24 @@ void AudioMixer::sendStatsPacket() {
QJsonObject mixStats;
mixStats["%_hrtf_mixes"] = percentageForMixStats(_stats.hrtfRenders);
mixStats["%_hrtf_silent_mixes"] = percentageForMixStats(_stats.hrtfSilentRenders);
mixStats["%_hrtf_throttle_mixes"] = percentageForMixStats(_stats.hrtfThrottleRenders);
mixStats["%_manual_stereo_mixes"] = percentageForMixStats(_stats.manualStereoMixes);
mixStats["%_manual_echo_mixes"] = percentageForMixStats(_stats.manualEchoMixes);
mixStats["1_hrtf_renders"] = (int)(_stats.hrtfRenders / (float)_numStatFrames);
mixStats["1_hrtf_resets"] = (int)(_stats.hrtfResets / (float)_numStatFrames);
mixStats["1_hrtf_updates"] = (int)(_stats.hrtfUpdates / (float)_numStatFrames);
mixStats["2_skipped_streams"] = (int)(_stats.skipped / (float)_numStatFrames);
mixStats["2_inactive_streams"] = (int)(_stats.inactive / (float)_numStatFrames);
mixStats["2_active_streams"] = (int)(_stats.active / (float)_numStatFrames);
mixStats["3_skippped_to_active"] = (int)(_stats.skippedToActive / (float)_numStatFrames);
mixStats["3_skippped_to_inactive"] = (int)(_stats.skippedToInactive / (float)_numStatFrames);
mixStats["3_inactive_to_skippped"] = (int)(_stats.inactiveToSkipped / (float)_numStatFrames);
mixStats["3_inactive_to_active"] = (int)(_stats.inactiveToActive / (float)_numStatFrames);
mixStats["3_active_to_skippped"] = (int)(_stats.activeToSkipped / (float)_numStatFrames);
mixStats["3_active_to_inactive"] = (int)(_stats.activeToInactive / (float)_numStatFrames);
mixStats["total_mixes"] = _stats.totalMixes;
mixStats["avg_mixes_per_block"] = _stats.totalMixes / _numStatFrames;
@ -366,7 +369,7 @@ AudioMixerClientData* AudioMixer::getOrCreateClientData(Node* node) {
auto clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
if (!clientData) {
node->setLinkedData(std::unique_ptr<NodeData> { new AudioMixerClientData(node->getUUID(), node->getLocalID()) });
node->setLinkedData(unique_ptr<NodeData> { new AudioMixerClientData(node->getUUID(), node->getLocalID()) });
clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
connect(clientData, &AudioMixerClientData::injectorStreamFinished, this, &AudioMixer::removeHRTFsForFinishedInjector);
}
@ -393,33 +396,49 @@ void AudioMixer::start() {
// mix state
unsigned int frame = 1;
auto frameTimestamp = p_high_resolution_clock::now();
while (!_isFinished) {
auto ticTimer = _ticTiming.timer();
{
auto timer = _sleepTiming.timer();
auto frameDuration = timeFrame(frameTimestamp);
if (_startFrameTimestamp.time_since_epoch().count() == 0) {
_startFrameTimestamp = _idealFrameTimestamp = p_high_resolution_clock::now();
} else {
auto timer = _checkTimeTiming.timer();
auto frameDuration = timeFrame();
throttle(frameDuration, frame);
}
auto frameTimer = _frameTiming.timer();
nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
// prepare frames; pop off any new audio from their streams
{
auto prepareTimer = _prepareTiming.timer();
std::for_each(cbegin, cend, [&](const SharedNodePointer& node) {
_stats.sumStreams += prepareFrame(node, frame);
});
}
// process (node-isolated) audio packets across slave threads
{
auto packetsTimer = _packetsTiming.timer();
// first clear the concurrent vector of added streams that the slaves will add to when they process packets
_workerSharedData.addedStreams.clear();
nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
_slavePool.processPackets(cbegin, cend);
});
}
// process queued events (networking, global audio packets, &c.)
{
auto eventsTimer = _eventsTiming.timer();
// clear removed nodes and removed streams before we process events that will setup the new set
_workerSharedData.removedNodes.clear();
_workerSharedData.removedStreams.clear();
// since we're a while loop we need to yield to qt's event processing
QCoreApplication::processEvents();
}
int numToRetain = nodeList->size() * (1 - _throttlingRatio);
nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
// mix across slave threads
{
auto mixTimer = _mixTiming.timer();
_slavePool.mix(cbegin, cend, frame, _throttlingRatio);
}
auto mixTimer = _mixTiming.timer();
_slavePool.mix(cbegin, cend, frame, numToRetain);
});
// gather stats
@ -431,21 +450,6 @@ void AudioMixer::start() {
++frame;
++_numStatFrames;
// process queued events (networking, global audio packets, &c.)
{
auto eventsTimer = _eventsTiming.timer();
// since we're a while loop we need to yield to qt's event processing
QCoreApplication::processEvents();
// process (node-isolated) audio packets across slave threads
{
nodeList->nestedEach([&](NodeList::const_iterator cbegin, NodeList::const_iterator cend) {
auto packetsTimer = _packetsTiming.timer();
_slavePool.processPackets(cbegin, cend);
});
}
}
if (_isFinished) {
// alert qt eventing that this is finished
@ -455,26 +459,26 @@ void AudioMixer::start() {
}
}
std::chrono::microseconds AudioMixer::timeFrame(p_high_resolution_clock::time_point& timestamp) {
chrono::microseconds AudioMixer::timeFrame() {
// advance the next frame
auto nextTimestamp = timestamp + std::chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS);
auto now = p_high_resolution_clock::now();
// compute how long the last frame took
auto duration = std::chrono::duration_cast<std::chrono::microseconds>(now - timestamp);
auto duration = chrono::duration_cast<chrono::microseconds>(now - _startFrameTimestamp);
// set the new frame timestamp
timestamp = std::max(now, nextTimestamp);
_idealFrameTimestamp += chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS);
// sleep until the next frame should start
// WIN32 sleep_until is broken until VS2015 Update 2
// instead, std::max (above) guarantees that timestamp >= now, so we can sleep_for
std::this_thread::sleep_for(timestamp - now);
{
auto timer = _sleepTiming.timer();
this_thread::sleep_until(_idealFrameTimestamp);
}
_startFrameTimestamp = p_high_resolution_clock::now();
return duration;
}
void AudioMixer::throttle(std::chrono::microseconds duration, int frame) {
void AudioMixer::throttle(chrono::microseconds duration, int frame) {
// throttle using a modified proportional-integral controller
const float FRAME_TIME = 10000.0f;
float mixRatio = duration.count() / FRAME_TIME;
@ -508,28 +512,19 @@ void AudioMixer::throttle(std::chrono::microseconds duration, int frame) {
if (_trailingMixRatio > TARGET) {
int proportionalTerm = 1 + (_trailingMixRatio - TARGET) / 0.1f;
_throttlingRatio += THROTTLE_RATE * proportionalTerm;
_throttlingRatio = std::min(_throttlingRatio, 1.0f);
_throttlingRatio = min(_throttlingRatio, 1.0f);
qCDebug(audio) << "audio-mixer is struggling (" << _trailingMixRatio << "mix/sleep) - throttling"
<< _throttlingRatio << "of streams";
} else if (_throttlingRatio > 0.0f && _trailingMixRatio <= BACKOFF_TARGET) {
int proportionalTerm = 1 + (TARGET - _trailingMixRatio) / 0.2f;
_throttlingRatio -= BACKOFF_RATE * proportionalTerm;
_throttlingRatio = std::max(_throttlingRatio, 0.0f);
_throttlingRatio = max(_throttlingRatio, 0.0f);
qCDebug(audio) << "audio-mixer is recovering (" << _trailingMixRatio << "mix/sleep) - throttling"
<< _throttlingRatio << "of streams";
}
}
}
int AudioMixer::prepareFrame(const SharedNodePointer& node, unsigned int frame) {
AudioMixerClientData* data = (AudioMixerClientData*)node->getLinkedData();
if (data == nullptr) {
return 0;
}
return data->checkBuffersBeforeFrameSend();
}
void AudioMixer::clearDomainSettings() {
_numStaticJitterFrames = DISABLE_STATIC_JITTER_FRAMES;
_attenuationPerDoublingInDistance = DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE;
@ -661,8 +656,11 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
const QString Y_MAX = "y_max";
const QString Z_MIN = "z_min";
const QString Z_MAX = "z_max";
foreach (const QString& zone, zones.keys()) {
QJsonObject zoneObject = zones[zone].toObject();
auto zoneNames = zones.keys();
_audioZones.reserve(zoneNames.length());
foreach (const QString& zoneName, zoneNames) {
QJsonObject zoneObject = zones[zoneName].toObject();
if (zoneObject.contains(X_MIN) && zoneObject.contains(X_MAX) && zoneObject.contains(Y_MIN) &&
zoneObject.contains(Y_MAX) && zoneObject.contains(Z_MIN) && zoneObject.contains(Z_MAX)) {
@ -686,8 +684,8 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
glm::vec3 corner(xMin, yMin, zMin);
glm::vec3 dimensions(xMax - xMin, yMax - yMin, zMax - zMin);
AABox zoneAABox(corner, dimensions);
_audioZones.insert(zone, zoneAABox);
qCDebug(audio) << "Added zone:" << zone << "(corner:" << corner << ", dimensions:" << dimensions << ")";
_audioZones.push_back({ zoneName, zoneAABox });
qCDebug(audio) << "Added zone:" << zoneName << "(corner:" << corner << ", dimensions:" << dimensions << ")";
}
}
}
@ -707,18 +705,28 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
coefficientObject.contains(LISTENER) &&
coefficientObject.contains(COEFFICIENT)) {
ZoneSettings settings;
auto itSource = find_if(begin(_audioZones), end(_audioZones), [&](const ZoneDescription& description) {
return description.name == coefficientObject.value(SOURCE).toString();
});
auto itListener = find_if(begin(_audioZones), end(_audioZones), [&](const ZoneDescription& description) {
return description.name == coefficientObject.value(LISTENER).toString();
});
bool ok;
settings.source = coefficientObject.value(SOURCE).toString();
settings.listener = coefficientObject.value(LISTENER).toString();
settings.coefficient = coefficientObject.value(COEFFICIENT).toString().toFloat(&ok);
float coefficient = coefficientObject.value(COEFFICIENT).toString().toFloat(&ok);
if (ok && settings.coefficient >= 0.0f && settings.coefficient <= 1.0f &&
_audioZones.contains(settings.source) && _audioZones.contains(settings.listener)) {
if (ok && coefficient >= 0.0f && coefficient <= 1.0f &&
itSource != end(_audioZones) &&
itListener != end(_audioZones)) {
ZoneSettings settings;
settings.source = itSource - begin(_audioZones);
settings.listener = itListener - begin(_audioZones);
settings.coefficient = coefficient;
_zoneSettings.push_back(settings);
qCDebug(audio) << "Added Coefficient:" << settings.source << settings.listener << settings.coefficient;
qCDebug(audio) << "Added Coefficient:" << itSource->name << itListener->name << settings.coefficient;
}
}
}
@ -739,19 +747,21 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
reverbObject.contains(WET_LEVEL)) {
bool okReverbTime, okWetLevel;
QString zone = reverbObject.value(ZONE).toString();
auto itZone = find_if(begin(_audioZones), end(_audioZones), [&](const ZoneDescription& description) {
return description.name == reverbObject.value(ZONE).toString();
});
float reverbTime = reverbObject.value(REVERB_TIME).toString().toFloat(&okReverbTime);
float wetLevel = reverbObject.value(WET_LEVEL).toString().toFloat(&okWetLevel);
if (okReverbTime && okWetLevel && _audioZones.contains(zone)) {
if (okReverbTime && okWetLevel && itZone != end(_audioZones)) {
ReverbSettings settings;
settings.zone = zone;
settings.zone = itZone - begin(_audioZones);
settings.reverbTime = reverbTime;
settings.wetLevel = wetLevel;
_zoneReverbSettings.push_back(settings);
qCDebug(audio) << "Added Reverb:" << zone << reverbTime << wetLevel;
qCDebug(audio) << "Added Reverb:" << itZone->name << reverbTime << wetLevel;
}
}
}
@ -764,7 +774,7 @@ AudioMixer::Timer::Timing::Timing(uint64_t& sum) : _sum(sum) {
}
AudioMixer::Timer::Timing::~Timing() {
_sum += std::chrono::duration_cast<std::chrono::microseconds>(p_high_resolution_clock::now() - _timing).count();
_sum += chrono::duration_cast<chrono::microseconds>(p_high_resolution_clock::now() - _timing).count();
}
void AudioMixer::Timer::get(uint64_t& timing, uint64_t& trailing) {

View file

@ -34,13 +34,18 @@ class AudioMixer : public ThreadedAssignment {
public:
AudioMixer(ReceivedMessage& message);
struct ZoneDescription {
QString name;
AABox area;
};
struct ZoneSettings {
QString source;
QString listener;
int source;
int listener;
float coefficient;
};
struct ReverbSettings {
QString zone;
int zone;
float reverbTime;
float wetLevel;
};
@ -48,9 +53,9 @@ public:
static int getStaticJitterFrames() { return _numStaticJitterFrames; }
static bool shouldMute(float quietestFrame) { return quietestFrame > _noiseMutingThreshold; }
static float getAttenuationPerDoublingInDistance() { return _attenuationPerDoublingInDistance; }
static const QHash<QString, AABox>& getAudioZones() { return _audioZones; }
static const QVector<ZoneSettings>& getZoneSettings() { return _zoneSettings; }
static const QVector<ReverbSettings>& getReverbSettings() { return _zoneReverbSettings; }
static const std::vector<ZoneDescription>& getAudioZones() { return _audioZones; }
static const std::vector<ZoneSettings>& getZoneSettings() { return _zoneSettings; }
static const std::vector<ReverbSettings>& getReverbSettings() { return _zoneReverbSettings; }
static const std::pair<QString, CodecPluginPointer> negotiateCodec(std::vector<QString> codecs);
static bool shouldReplicateTo(const Node& from, const Node& to) {
@ -79,11 +84,8 @@ private slots:
private:
// mixing helpers
std::chrono::microseconds timeFrame(p_high_resolution_clock::time_point& timestamp);
std::chrono::microseconds timeFrame();
void throttle(std::chrono::microseconds frameDuration, int frame);
// pop a frame from any streams on the node
// returns the number of available streams
int prepareFrame(const SharedNodePointer& node, unsigned int frame);
AudioMixerClientData* getOrCreateClientData(Node* node);
@ -92,6 +94,9 @@ private:
void parseSettingsObject(const QJsonObject& settingsObject);
void clearDomainSettings();
p_high_resolution_clock::time_point _idealFrameTimestamp;
p_high_resolution_clock::time_point _startFrameTimestamp;
float _trailingMixRatio { 0.0f };
float _throttlingRatio { 0.0f };
@ -100,7 +105,7 @@ private:
int _numStatFrames { 0 };
AudioMixerStats _stats;
AudioMixerSlavePool _slavePool;
AudioMixerSlavePool _slavePool { _workerSharedData };
class Timer {
public:
@ -123,7 +128,9 @@ private:
uint64_t _history[TIMER_TRAILING_SECONDS] {};
int _index { 0 };
};
Timer _ticTiming;
Timer _checkTimeTiming;
Timer _sleepTiming;
Timer _frameTiming;
Timer _prepareTiming;
@ -136,10 +143,13 @@ private:
static float _attenuationPerDoublingInDistance;
static std::map<QString, CodecPluginPointer> _availableCodecs;
static QStringList _codecPreferenceOrder;
static QHash<QString, AABox> _audioZones;
static QVector<ZoneSettings> _zoneSettings;
static QVector<ReverbSettings> _zoneReverbSettings;
static std::vector<ZoneDescription> _audioZones;
static std::vector<ZoneSettings> _zoneSettings;
static std::vector<ReverbSettings> _zoneReverbSettings;
AudioMixerSlave::SharedData _workerSharedData;
};
#endif // hifi_AudioMixer_h

View file

@ -13,6 +13,8 @@
#include <random>
#include <glm/detail/func_common.hpp>
#include <QtCore/QDebug>
#include <QtCore/QJsonArray>
@ -28,7 +30,6 @@
AudioMixerClientData::AudioMixerClientData(const QUuid& nodeID, Node::LocalID nodeLocalID) :
NodeData(nodeID, nodeLocalID),
audioLimiter(AudioConstants::SAMPLE_RATE, AudioConstants::STEREO),
_ignoreZone(*this),
_outgoingMixedAudioSequenceNumber(0),
_downstreamAudioStreamStats()
{
@ -56,7 +57,7 @@ void AudioMixerClientData::queuePacket(QSharedPointer<ReceivedMessage> message,
_packetQueue.push(message);
}
void AudioMixerClientData::processPackets() {
int AudioMixerClientData::processPackets(ConcurrentAddedStreams& addedStreams) {
SharedNodePointer node = _packetQueue.node;
assert(_packetQueue.empty() || node);
_packetQueue.node.clear();
@ -69,22 +70,17 @@ void AudioMixerClientData::processPackets() {
case PacketType::MicrophoneAudioWithEcho:
case PacketType::InjectAudio:
case PacketType::SilentAudioFrame: {
if (node->isUpstream()) {
setupCodecForReplicatedAgent(packet);
}
QMutexLocker lock(&getMutex());
parseData(*packet);
processStreamPacket(*packet, addedStreams);
optionallyReplicatePacket(*packet, *node);
break;
}
case PacketType::AudioStreamStats: {
QMutexLocker lock(&getMutex());
parseData(*packet);
break;
}
case PacketType::NegotiateAudioFormat:
@ -109,6 +105,10 @@ void AudioMixerClientData::processPackets() {
_packetQueue.pop();
}
assert(_packetQueue.empty());
// now that we have processed all packets for this frame
// we can prepare the sources from this client to be ready for mixing
return checkBuffersBeforeFrameSend();
}
bool isReplicatedPacket(PacketType packetType) {
@ -186,63 +186,136 @@ void AudioMixerClientData::parseRequestsDomainListData(ReceivedMessage& message)
void AudioMixerClientData::parsePerAvatarGainSet(ReceivedMessage& message, const SharedNodePointer& node) {
QUuid uuid = node->getUUID();
// parse the UUID from the packet
QUuid avatarUuid = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
QUuid avatarUUID = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
uint8_t packedGain;
message.readPrimitive(&packedGain);
float gain = unpackFloatGainFromByte(packedGain);
if (avatarUuid.isNull()) {
if (avatarUUID.isNull()) {
// set the MASTER avatar gain
setMasterAvatarGain(gain);
qCDebug(audio) << "Setting MASTER avatar gain for " << uuid << " to " << gain;
} else {
// set the per-source avatar gain
hrtfForStream(avatarUuid, QUuid()).setGainAdjustment(gain);
qCDebug(audio) << "Setting avatar gain adjustment for hrtf[" << uuid << "][" << avatarUuid << "] to " << gain;
setGainForAvatar(avatarUUID, gain);
qCDebug(audio) << "Setting avatar gain adjustment for hrtf[" << uuid << "][" << avatarUUID << "] to " << gain;
}
}
void AudioMixerClientData::setGainForAvatar(QUuid nodeID, uint8_t gain) {
auto it = std::find_if(_streams.active.cbegin(), _streams.active.cend(), [nodeID](const MixableStream& mixableStream){
return mixableStream.nodeStreamID.nodeID == nodeID && mixableStream.nodeStreamID.streamID.isNull();
});
if (it != _streams.active.cend()) {
it->hrtf->setGainAdjustment(gain);
}
}
void AudioMixerClientData::parseNodeIgnoreRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node) {
node->parseIgnoreRequestMessage(message);
auto ignoredNodesPair = node->parseIgnoreRequestMessage(message);
// we have a vector of ignored or unignored node UUIDs - update our internal data structures so that
// streams can be included or excluded next time a mix is being created
if (ignoredNodesPair.second) {
// we have newly ignored nodes, add them to our vector
_newIgnoredNodeIDs.insert(std::end(_newIgnoredNodeIDs),
std::begin(ignoredNodesPair.first), std::end(ignoredNodesPair.first));
} else {
// we have newly unignored nodes, add them to our vector
_newUnignoredNodeIDs.insert(std::end(_newUnignoredNodeIDs),
std::begin(ignoredNodesPair.first), std::end(ignoredNodesPair.first));
}
auto nodeList = DependencyManager::get<NodeList>();
for (auto& nodeID : ignoredNodesPair.first) {
auto otherNode = nodeList->nodeWithUUID(nodeID);
if (otherNode) {
auto otherNodeMixerClientData = static_cast<AudioMixerClientData*>(otherNode->getLinkedData());
if (otherNodeMixerClientData) {
if (ignoredNodesPair.second) {
otherNodeMixerClientData->ignoredByNode(getNodeID());
} else {
otherNodeMixerClientData->unignoredByNode(getNodeID());
}
}
}
}
}
void AudioMixerClientData::ignoredByNode(QUuid nodeID) {
// first add this ID to the concurrent vector for newly ignoring nodes
_newIgnoringNodeIDs.push_back(nodeID);
// now take a lock and on the consistent vector of ignoring nodes and make sure this node is in it
std::lock_guard<std::mutex> lock(_ignoringNodeIDsMutex);
if (std::find(_ignoringNodeIDs.begin(), _ignoringNodeIDs.end(), nodeID) == _ignoringNodeIDs.end()) {
_ignoringNodeIDs.push_back(nodeID);
}
}
void AudioMixerClientData::unignoredByNode(QUuid nodeID) {
// first add this ID to the concurrent vector for newly unignoring nodes
_newUnignoringNodeIDs.push_back(nodeID);
// now take a lock on the consistent vector of ignoring nodes and make sure this node isn't in it
std::lock_guard<std::mutex> lock(_ignoringNodeIDsMutex);
auto it = _ignoringNodeIDs.begin();
while (it != _ignoringNodeIDs.end()) {
if (*it == nodeID) {
it = _ignoringNodeIDs.erase(it);
} else {
++it;
}
}
}
void AudioMixerClientData::clearStagedIgnoreChanges() {
_newIgnoredNodeIDs.clear();
_newUnignoredNodeIDs.clear();
_newIgnoringNodeIDs.clear();
_newUnignoringNodeIDs.clear();
}
void AudioMixerClientData::parseRadiusIgnoreRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node) {
node->parseIgnoreRadiusRequestMessage(message);
bool enabled;
message->readPrimitive(&enabled);
_isIgnoreRadiusEnabled = enabled;
auto avatarAudioStream = getAvatarAudioStream();
// if we have an avatar audio stream, tell it wether its ignore box should be enabled or disabled
if (avatarAudioStream) {
if (_isIgnoreRadiusEnabled) {
avatarAudioStream->enableIgnoreBox();
} else {
avatarAudioStream->disableIgnoreBox();
}
}
}
AvatarAudioStream* AudioMixerClientData::getAvatarAudioStream() {
QReadLocker readLocker { &_streamsLock };
auto it = std::find_if(_audioStreams.begin(), _audioStreams.end(), [](const SharedStreamPointer& stream){
return stream->getStreamIdentifier().isNull();
});
auto it = _audioStreams.find(QUuid());
if (it != _audioStreams.end()) {
return dynamic_cast<AvatarAudioStream*>(it->second.get());
return dynamic_cast<AvatarAudioStream*>(it->get());
}
// no mic stream found - return NULL
return NULL;
}
void AudioMixerClientData::removeHRTFForStream(const QUuid& nodeID, const QUuid& streamID) {
auto it = _nodeSourcesHRTFMap.find(nodeID);
if (it != _nodeSourcesHRTFMap.end()) {
// erase the stream with the given ID from the given node
it->second.erase(streamID);
// is the map for this node now empty?
// if so we can remove it
if (it->second.size() == 0) {
_nodeSourcesHRTFMap.erase(it);
}
}
}
void AudioMixerClientData::removeAgentAvatarAudioStream() {
QWriteLocker writeLocker { &_streamsLock };
auto it = _audioStreams.find(QUuid());
auto it = std::remove_if(_audioStreams.begin(), _audioStreams.end(), [](const SharedStreamPointer& stream){
return stream->getStreamIdentifier().isNull();
});
if (it != _audioStreams.end()) {
_audioStreams.erase(it);
}
writeLocker.unlock();
}
int AudioMixerClientData::parseData(ReceivedMessage& message) {
@ -252,128 +325,186 @@ int AudioMixerClientData::parseData(ReceivedMessage& message) {
// skip over header, appendFlag, and num stats packed
message.seek(sizeof(quint8) + sizeof(quint16));
if (message.getBytesLeftToRead() != sizeof(AudioStreamStats)) {
qWarning() << "Received AudioStreamStats of wrong size" << message.getBytesLeftToRead()
<< "instead of" << sizeof(AudioStreamStats) << "from"
<< message.getSourceID() << "at" << message.getSenderSockAddr();
return message.getPosition();
}
// read the downstream audio stream stats
message.readPrimitive(&_downstreamAudioStreamStats);
return message.getPosition();
} else {
SharedStreamPointer matchingStream;
bool isMicStream = false;
if (packetType == PacketType::MicrophoneAudioWithEcho
|| packetType == PacketType::ReplicatedMicrophoneAudioWithEcho
|| packetType == PacketType::MicrophoneAudioNoEcho
|| packetType == PacketType::ReplicatedMicrophoneAudioNoEcho
|| packetType == PacketType::SilentAudioFrame
|| packetType == PacketType::ReplicatedSilentAudioFrame) {
QWriteLocker writeLocker { &_streamsLock };
auto micStreamIt = _audioStreams.find(QUuid());
if (micStreamIt == _audioStreams.end()) {
// we don't have a mic stream yet, so add it
// hop past the sequence number that leads the packet
message.seek(sizeof(quint16));
// pull the codec string from the packet
auto codecString = message.readString();
// determine if the stream is stereo or not
bool isStereo;
if (packetType == PacketType::SilentAudioFrame
|| packetType == PacketType::ReplicatedSilentAudioFrame) {
quint16 numSilentSamples;
message.readPrimitive(&numSilentSamples);
isStereo = numSilentSamples == AudioConstants::NETWORK_FRAME_SAMPLES_STEREO;
} else {
quint8 channelFlag;
message.readPrimitive(&channelFlag);
isStereo = channelFlag == 1;
}
auto avatarAudioStream = new AvatarAudioStream(isStereo, AudioMixer::getStaticJitterFrames());
avatarAudioStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
qCDebug(audio) << "creating new AvatarAudioStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
connect(avatarAudioStream, &InboundAudioStream::mismatchedAudioCodec,
this, &AudioMixerClientData::handleMismatchAudioFormat);
auto emplaced = _audioStreams.emplace(
QUuid(),
std::unique_ptr<PositionalAudioStream> { avatarAudioStream }
);
micStreamIt = emplaced.first;
}
matchingStream = micStreamIt->second;
writeLocker.unlock();
isMicStream = true;
} else if (packetType == PacketType::InjectAudio
|| packetType == PacketType::ReplicatedInjectAudio) {
// this is injected audio
// grab the stream identifier for this injected audio
message.seek(sizeof(quint16));
QUuid streamIdentifier = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
bool isStereo;
message.readPrimitive(&isStereo);
QWriteLocker writeLock { &_streamsLock };
auto streamIt = _audioStreams.find(streamIdentifier);
if (streamIt == _audioStreams.end()) {
// we don't have this injected stream yet, so add it
auto injectorStream = new InjectedAudioStream(streamIdentifier, isStereo, AudioMixer::getStaticJitterFrames());
#if INJECTORS_SUPPORT_CODECS
injectorStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
qCDebug(audio) << "creating new injectorStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
#endif
auto emplaced = _audioStreams.emplace(
streamIdentifier,
std::unique_ptr<InjectedAudioStream> { injectorStream }
);
streamIt = emplaced.first;
}
matchingStream = streamIt->second;
writeLock.unlock();
}
// seek to the beginning of the packet so that the next reader is in the right spot
message.seek(0);
// check the overflow count before we parse data
auto overflowBefore = matchingStream->getOverflowCount();
auto parseResult = matchingStream->parseData(message);
if (matchingStream->getOverflowCount() > overflowBefore) {
qCDebug(audio) << "Just overflowed on stream from" << message.getSourceID() << "at" << message.getSenderSockAddr();
qCDebug(audio) << "This stream is for" << (isMicStream ? "microphone audio" : "injected audio");
}
return parseResult;
}
return 0;
}
int AudioMixerClientData::checkBuffersBeforeFrameSend() {
QWriteLocker writeLocker { &_streamsLock };
bool AudioMixerClientData::containsValidPosition(ReceivedMessage& message) const {
static const int SEQUENCE_NUMBER_BYTES = sizeof(quint16);
auto posBefore = message.getPosition();
message.seek(SEQUENCE_NUMBER_BYTES);
// skip over the codec string
message.readString();
switch (message.getType()) {
case PacketType::MicrophoneAudioNoEcho:
case PacketType::MicrophoneAudioWithEcho: {
// skip over the stereo flag
message.seek(message.getPosition() + sizeof(ChannelFlag));
break;
}
case PacketType::SilentAudioFrame: {
// skip the number of silent samples
message.seek(message.getPosition() + sizeof(SilentSamplesBytes));
break;
}
case PacketType::InjectAudio: {
// skip the stream ID, stereo flag, and loopback flag
message.seek(message.getPosition() + NUM_STREAM_ID_BYTES + sizeof(ChannelFlag) + sizeof(LoopbackFlag));
break;
}
default:
Q_UNREACHABLE();
break;
}
glm::vec3 peekPosition;
message.readPrimitive(&peekPosition);
// reset the position the message was at before we were called
message.seek(posBefore);
if (glm::any(glm::isnan(peekPosition))) {
return false;
}
return true;
}
void AudioMixerClientData::processStreamPacket(ReceivedMessage& message, ConcurrentAddedStreams &addedStreams) {
if (!containsValidPosition(message)) {
qDebug() << "Refusing to process audio stream from" << message.getSourceID() << "with invalid position";
return;
}
SharedStreamPointer matchingStream;
auto packetType = message.getType();
bool newStream = false;
if (packetType == PacketType::MicrophoneAudioWithEcho
|| packetType == PacketType::MicrophoneAudioNoEcho
|| packetType == PacketType::SilentAudioFrame) {
auto micStreamIt = std::find_if(_audioStreams.begin(), _audioStreams.end(), [](const SharedStreamPointer& stream){
return stream->getStreamIdentifier().isNull();
});
if (micStreamIt == _audioStreams.end()) {
// we don't have a mic stream yet, so add it
// hop past the sequence number that leads the packet
message.seek(sizeof(StreamSequenceNumber));
// pull the codec string from the packet
auto codecString = message.readString();
// determine if the stream is stereo or not
bool isStereo;
if (packetType == PacketType::SilentAudioFrame || packetType == PacketType::ReplicatedSilentAudioFrame) {
SilentSamplesBytes numSilentSamples;
message.readPrimitive(&numSilentSamples);
isStereo = numSilentSamples == AudioConstants::NETWORK_FRAME_SAMPLES_STEREO;
} else {
ChannelFlag channelFlag;
message.readPrimitive(&channelFlag);
isStereo = channelFlag == 1;
}
auto avatarAudioStream = new AvatarAudioStream(isStereo, AudioMixer::getStaticJitterFrames());
avatarAudioStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
if (_isIgnoreRadiusEnabled) {
avatarAudioStream->enableIgnoreBox();
} else {
avatarAudioStream->disableIgnoreBox();
}
qCDebug(audio) << "creating new AvatarAudioStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
connect(avatarAudioStream, &InboundAudioStream::mismatchedAudioCodec,
this, &AudioMixerClientData::handleMismatchAudioFormat);
matchingStream = SharedStreamPointer(avatarAudioStream);
_audioStreams.push_back(matchingStream);
newStream = true;
} else {
matchingStream = *micStreamIt;
}
} else if (packetType == PacketType::InjectAudio) {
// this is injected audio
// skip the sequence number and codec string and grab the stream identifier for this injected audio
message.seek(sizeof(StreamSequenceNumber));
message.readString();
QUuid streamIdentifier = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
auto streamIt = std::find_if(_audioStreams.begin(), _audioStreams.end(), [&streamIdentifier](const SharedStreamPointer& stream) {
return stream->getStreamIdentifier() == streamIdentifier;
});
if (streamIt == _audioStreams.end()) {
bool isStereo;
message.readPrimitive(&isStereo);
// we don't have this injected stream yet, so add it
auto injectorStream = new InjectedAudioStream(streamIdentifier, isStereo, AudioMixer::getStaticJitterFrames());
#if INJECTORS_SUPPORT_CODECS
injectorStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
qCDebug(audio) << "creating new injectorStream... codec:" << _selectedCodecName << "isStereo:" << isStereo;
#endif
matchingStream = SharedStreamPointer(injectorStream);
_audioStreams.push_back(matchingStream);
newStream = true;
} else {
matchingStream = *streamIt;
}
}
// seek to the beginning of the packet so that the next reader is in the right spot
message.seek(0);
// check the overflow count before we parse data
auto overflowBefore = matchingStream->getOverflowCount();
matchingStream->parseData(message);
if (matchingStream->getOverflowCount() > overflowBefore) {
qCDebug(audio) << "Just overflowed on stream" << matchingStream->getStreamIdentifier()
<< "from" << message.getSourceID();
}
if (newStream) {
// whenever a stream is added, push it to the concurrent vector of streams added this frame
addedStreams.emplace_back(getNodeID(), getNodeLocalID(), matchingStream->getStreamIdentifier(), matchingStream.get());
}
}
int AudioMixerClientData::checkBuffersBeforeFrameSend() {
auto it = _audioStreams.begin();
while (it != _audioStreams.end()) {
SharedStreamPointer stream = it->second;
SharedStreamPointer stream = *it;
if (stream->popFrames(1, true) > 0) {
stream->updateLastPopOutputLoudnessAndTrailingLoudness();
@ -388,7 +519,7 @@ int AudioMixerClientData::checkBuffersBeforeFrameSend() {
// this is an inactive injector, pull it from our streams
// first emit that it is finished so that the HRTF objects for this source can be cleaned up
emit injectorStreamFinished(it->second->getStreamIdentifier());
emit injectorStreamFinished(stream->getStreamIdentifier());
// erase the stream to drop our ref to the shared pointer and remove it
it = _audioStreams.erase(it);
@ -441,7 +572,7 @@ void AudioMixerClientData::sendAudioStreamStatsPackets(const SharedNodePointer&
// pack the calculated number of stream stats
for (int i = 0; i < numStreamStatsToPack; i++) {
PositionalAudioStream* stream = it->second.get();
PositionalAudioStream* stream = it->get();
stream->perSecondCallbackForUpdatingStats();
@ -513,12 +644,12 @@ QJsonObject AudioMixerClientData::getAudioStreamStats() {
QJsonArray injectorArray;
auto streamsCopy = getAudioStreams();
for (auto& injectorPair : streamsCopy) {
if (injectorPair.second->getType() == PositionalAudioStream::Injector) {
if (injectorPair->getType() == PositionalAudioStream::Injector) {
QJsonObject upstreamStats;
AudioStreamStats streamStats = injectorPair.second->getAudioStreamStats();
AudioStreamStats streamStats = injectorPair->getAudioStreamStats();
upstreamStats["inj.desired"] = streamStats._desiredJitterBufferFrames;
upstreamStats["desired_calc"] = injectorPair.second->getCalculatedJitterBufferFrames();
upstreamStats["desired_calc"] = injectorPair->getCalculatedJitterBufferFrames();
upstreamStats["available_avg_10s"] = streamStats._framesAvailableAverage;
upstreamStats["available"] = (double) streamStats._framesAvailable;
upstreamStats["unplayed"] = (double) streamStats._unplayedMs;
@ -609,99 +740,6 @@ void AudioMixerClientData::cleanupCodec() {
}
}
AudioMixerClientData::IgnoreZone& AudioMixerClientData::IgnoreZoneMemo::get(unsigned int frame) {
// check for a memoized zone
if (frame != _frame.load(std::memory_order_acquire)) {
AvatarAudioStream* stream = _data.getAvatarAudioStream();
// get the initial dimensions from the stream
glm::vec3 corner = stream ? stream->getAvatarBoundingBoxCorner() : glm::vec3(0);
glm::vec3 scale = stream ? stream->getAvatarBoundingBoxScale() : glm::vec3(0);
// enforce a minimum scale
static const glm::vec3 MIN_IGNORE_BOX_SCALE = glm::vec3(0.3f, 1.3f, 0.3f);
if (glm::any(glm::lessThan(scale, MIN_IGNORE_BOX_SCALE))) {
scale = MIN_IGNORE_BOX_SCALE;
}
// (this is arbitrary number determined empirically for comfort)
const float IGNORE_BOX_SCALE_FACTOR = 2.4f;
scale *= IGNORE_BOX_SCALE_FACTOR;
// create the box (we use a box for the zone for convenience)
AABox box(corner, scale);
// update the memoized zone
// This may be called by multiple threads concurrently,
// so take a lock and only update the memo if this call is first.
// This prevents concurrent updates from invalidating the returned reference
// (contingent on the preconditions listed in the header).
std::lock_guard<std::mutex> lock(_mutex);
if (frame != _frame.load(std::memory_order_acquire)) {
_zone = box;
unsigned int oldFrame = _frame.exchange(frame, std::memory_order_release);
Q_UNUSED(oldFrame);
}
}
return _zone;
}
void AudioMixerClientData::IgnoreNodeCache::cache(bool shouldIgnore) {
if (!_isCached) {
_shouldIgnore = shouldIgnore;
_isCached = true;
}
}
bool AudioMixerClientData::IgnoreNodeCache::isCached() {
return _isCached;
}
bool AudioMixerClientData::IgnoreNodeCache::shouldIgnore() {
bool ignore = _shouldIgnore;
_isCached = false;
return ignore;
}
bool AudioMixerClientData::shouldIgnore(const SharedNodePointer self, const SharedNodePointer node, unsigned int frame) {
// this is symmetric over self / node; if computed, it is cached in the other
// check the cache to avoid computation
auto& cache = _nodeSourcesIgnoreMap[node->getUUID()];
if (cache.isCached()) {
return cache.shouldIgnore();
}
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
if (!nodeData) {
return false;
}
// compute shouldIgnore
bool shouldIgnore = true;
if ( // the nodes are not ignoring each other explicitly (or are but get data regardless)
(!self->isIgnoringNodeWithID(node->getUUID()) ||
(nodeData->getRequestsDomainListData() && node->getCanKick())) &&
(!node->isIgnoringNodeWithID(self->getUUID()) ||
(getRequestsDomainListData() && self->getCanKick()))) {
// if either node is enabling an ignore radius, check their proximity
if ((self->isIgnoreRadiusEnabled() || node->isIgnoreRadiusEnabled())) {
auto& zone = _ignoreZone.get(frame);
auto& nodeZone = nodeData->_ignoreZone.get(frame);
shouldIgnore = zone.touches(nodeZone);
} else {
shouldIgnore = false;
}
}
// cache in node
nodeData->_nodeSourcesIgnoreMap[self->getUUID()].cache(shouldIgnore);
return shouldIgnore;
}
void AudioMixerClientData::setupCodecForReplicatedAgent(QSharedPointer<ReceivedMessage> message) {
// hop past the sequence number that leads the packet
message->seek(sizeof(quint16));

View file

@ -14,6 +14,8 @@
#include <queue>
#include <tbb/concurrent_vector.h>
#include <QtCore/QJsonObject>
#include <AABox.h>
@ -30,39 +32,34 @@
class AudioMixerClientData : public NodeData {
Q_OBJECT
public:
struct AddedStream {
NodeIDStreamID nodeIDStreamID;
PositionalAudioStream* positionalStream;
AddedStream(QUuid nodeID, Node::LocalID localNodeID,
StreamID streamID, PositionalAudioStream* positionalStream) :
nodeIDStreamID(nodeID, localNodeID, streamID), positionalStream(positionalStream) {};
};
using ConcurrentAddedStreams = tbb::concurrent_vector<AddedStream>;
AudioMixerClientData(const QUuid& nodeID, Node::LocalID nodeLocalID);
~AudioMixerClientData();
using SharedStreamPointer = std::shared_ptr<PositionalAudioStream>;
using AudioStreamMap = std::unordered_map<QUuid, SharedStreamPointer>;
using AudioStreamVector = std::vector<SharedStreamPointer>;
void queuePacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer node);
void processPackets();
int processPackets(ConcurrentAddedStreams& addedStreams); // returns the number of available streams this frame
// locks the mutex to make a copy
AudioStreamMap getAudioStreams() { QReadLocker readLock { &_streamsLock }; return _audioStreams; }
AudioStreamVector& getAudioStreams() { return _audioStreams; }
AvatarAudioStream* getAvatarAudioStream();
// returns whether self (this data's node) should ignore node, memoized by frame
// precondition: frame is increasing after first call (including overflow wrap)
bool shouldIgnore(SharedNodePointer self, SharedNodePointer node, unsigned int frame);
// the following methods should be called from the AudioMixer assignment thread ONLY
// they are not thread-safe
// returns a new or existing HRTF object for the given stream from the given node
AudioHRTF& hrtfForStream(const QUuid& nodeID, const QUuid& streamID = QUuid()) { return _nodeSourcesHRTFMap[nodeID][streamID]; }
// removes an AudioHRTF object for a given stream
void removeHRTFForStream(const QUuid& nodeID, const QUuid& streamID = QUuid());
// remove all sources and data from this node
void removeNode(const QUuid& nodeID) { _nodeSourcesIgnoreMap.unsafe_erase(nodeID); _nodeSourcesHRTFMap.erase(nodeID); }
void removeAgentAvatarAudioStream();
// packet parsers
int parseData(ReceivedMessage& message) override;
void processStreamPacket(ReceivedMessage& message, ConcurrentAddedStreams& addedStreams);
void negotiateAudioFormat(ReceivedMessage& message, const SharedNodePointer& node);
void parseRequestsDomainListData(ReceivedMessage& message);
void parsePerAvatarGainSet(ReceivedMessage& message, const SharedNodePointer& node);
@ -108,11 +105,56 @@ public:
bool shouldMuteClient() { return _shouldMuteClient; }
void setShouldMuteClient(bool shouldMuteClient) { _shouldMuteClient = shouldMuteClient; }
glm::vec3 getPosition() { return getAvatarAudioStream() ? getAvatarAudioStream()->getPosition() : glm::vec3(0); }
bool getRequestsDomainListData() { return _requestsDomainListData; }
bool getRequestsDomainListData() const { return _requestsDomainListData; }
void setRequestsDomainListData(bool requesting) { _requestsDomainListData = requesting; }
void setupCodecForReplicatedAgent(QSharedPointer<ReceivedMessage> message);
struct MixableStream {
float approximateVolume { 0.0f };
NodeIDStreamID nodeStreamID;
std::unique_ptr<AudioHRTF> hrtf;
PositionalAudioStream* positionalStream;
bool ignoredByListener { false };
bool ignoringListener { false };
MixableStream(NodeIDStreamID nodeIDStreamID, PositionalAudioStream* positionalStream) :
nodeStreamID(nodeIDStreamID), hrtf(new AudioHRTF), positionalStream(positionalStream) {};
MixableStream(QUuid nodeID, Node::LocalID localNodeID, StreamID streamID, PositionalAudioStream* positionalStream) :
nodeStreamID(nodeID, localNodeID, streamID), hrtf(new AudioHRTF), positionalStream(positionalStream) {};
};
using MixableStreamsVector = std::vector<MixableStream>;
struct Streams {
MixableStreamsVector active;
MixableStreamsVector inactive;
MixableStreamsVector skipped;
};
Streams& getStreams() { return _streams; }
// thread-safe, called from AudioMixerSlave(s) while processing ignore packets for other nodes
void ignoredByNode(QUuid nodeID);
void unignoredByNode(QUuid nodeID);
// start of methods called non-concurrently from single AudioMixerSlave mixing for the owning node
const Node::IgnoredNodeIDs& getNewIgnoredNodeIDs() const { return _newIgnoredNodeIDs; }
const Node::IgnoredNodeIDs& getNewUnignoredNodeIDs() const { return _newUnignoredNodeIDs; }
using ConcurrentIgnoreNodeIDs = tbb::concurrent_vector<QUuid>;
const ConcurrentIgnoreNodeIDs& getNewIgnoringNodeIDs() const { return _newIgnoringNodeIDs; }
const ConcurrentIgnoreNodeIDs& getNewUnignoringNodeIDs() const { return _newUnignoringNodeIDs; }
void clearStagedIgnoreChanges();
const Node::IgnoredNodeIDs& getIgnoringNodeIDs() const { return _ignoringNodeIDs; }
bool getHasReceivedFirstMix() const { return _hasReceivedFirstMix; }
void setHasReceivedFirstMix(bool hasReceivedFirstMix) { _hasReceivedFirstMix = hasReceivedFirstMix; }
// end of methods called non-concurrently from single AudioMixerSlave
signals:
void injectorStreamFinished(const QUuid& streamIdentifier);
@ -126,52 +168,15 @@ private:
};
PacketQueue _packetQueue;
QReadWriteLock _streamsLock;
AudioStreamMap _audioStreams; // microphone stream from avatar is stored under key of null UUID
AudioStreamVector _audioStreams; // microphone stream from avatar has a null stream ID
void optionallyReplicatePacket(ReceivedMessage& packet, const Node& node);
using IgnoreZone = AABox;
class IgnoreZoneMemo {
public:
IgnoreZoneMemo(AudioMixerClientData& data) : _data(data) {}
void setGainForAvatar(QUuid nodeID, uint8_t gain);
// returns an ignore zone, memoized by frame (lockless if the zone is already memoized)
// preconditions:
// - frame is increasing after first call (including overflow wrap)
// - there are no references left from calls to getIgnoreZone(frame - 1)
IgnoreZone& get(unsigned int frame);
bool containsValidPosition(ReceivedMessage& message) const;
private:
AudioMixerClientData& _data;
IgnoreZone _zone;
std::atomic<unsigned int> _frame { 0 };
std::mutex _mutex;
};
IgnoreZoneMemo _ignoreZone;
class IgnoreNodeCache {
public:
// std::atomic is not copyable - always initialize uncached
IgnoreNodeCache() {}
IgnoreNodeCache(const IgnoreNodeCache& other) {}
void cache(bool shouldIgnore);
bool isCached();
bool shouldIgnore();
private:
std::atomic<bool> _isCached { false };
bool _shouldIgnore { false };
};
struct IgnoreNodeCacheHasher { std::size_t operator()(const QUuid& key) const { return qHash(key); } };
using NodeSourcesIgnoreMap = tbb::concurrent_unordered_map<QUuid, IgnoreNodeCache, IgnoreNodeCacheHasher>;
NodeSourcesIgnoreMap _nodeSourcesIgnoreMap;
using HRTFMap = std::unordered_map<QUuid, AudioHRTF>;
using NodeSourcesHRTFMap = std::unordered_map<QUuid, HRTFMap>;
NodeSourcesHRTFMap _nodeSourcesHRTFMap;
Streams _streams;
quint16 _outgoingMixedAudioSequenceNumber;
@ -190,6 +195,21 @@ private:
bool _shouldMuteClient { false };
bool _requestsDomainListData { false };
std::vector<AddedStream> _newAddedStreams;
Node::IgnoredNodeIDs _newIgnoredNodeIDs;
Node::IgnoredNodeIDs _newUnignoredNodeIDs;
tbb::concurrent_vector<QUuid> _newIgnoringNodeIDs;
tbb::concurrent_vector<QUuid> _newUnignoringNodeIDs;
std::mutex _ignoringNodeIDsMutex;
Node::IgnoredNodeIDs _ignoringNodeIDs;
std::atomic_bool _isIgnoreRadiusEnabled { false };
bool _hasReceivedFirstMix { false };
};
#endif // hifi_AudioMixerClientData_h

View file

@ -36,7 +36,10 @@
#include "InjectedAudioStream.h"
#include "AudioHelpers.h"
using AudioStreamMap = AudioMixerClientData::AudioStreamMap;
using namespace std;
using AudioStreamVector = AudioMixerClientData::AudioStreamVector;
using MixableStream = AudioMixerClientData::MixableStream;
using MixableStreamsVector = AudioMixerClientData::MixableStreamsVector;
// packet helpers
std::unique_ptr<NLPacket> createAudioPacket(PacketType type, int size, quint16 sequence, QString codec);
@ -46,9 +49,8 @@ void sendMutePacket(const SharedNodePointer& node, AudioMixerClientData&);
void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData& data);
// mix helpers
inline float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition);
inline float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudioStream& listeningNodeStream,
inline float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd);
inline float computeGain(float masterListenerGain, const AvatarAudioStream& listeningNodeStream,
const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition, float distance, bool isEcho);
inline float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition);
@ -56,15 +58,16 @@ inline float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const
void AudioMixerSlave::processPackets(const SharedNodePointer& node) {
AudioMixerClientData* data = (AudioMixerClientData*)node->getLinkedData();
if (data) {
data->processPackets();
// process packets and collect the number of streams available for this frame
stats.sumStreams += data->processPackets(_sharedData.addedStreams);
}
}
void AudioMixerSlave::configureMix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio) {
void AudioMixerSlave::configureMix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain) {
_begin = begin;
_end = end;
_frame = frame;
_throttlingRatio = throttlingRatio;
_numToRetain = numToRetain;
}
void AudioMixerSlave::mix(const SharedNodePointer& node) {
@ -125,105 +128,338 @@ void AudioMixerSlave::mix(const SharedNodePointer& node) {
}
}
template <class Container, class Predicate>
void erase_if(Container& cont, Predicate&& pred) {
auto it = remove_if(begin(cont), end(cont), std::forward<Predicate>(pred));
cont.erase(it, end(cont));
}
template <class Container>
bool contains(const Container& cont, typename Container::value_type value) {
return std::any_of(begin(cont), end(cont), [&value](const auto& element) {
return value == element;
});
}
// This class lets you do an erase if in several segments
// that use different predicates
template <class Container>
class SegmentedEraseIf {
public:
using iterator = typename Container::iterator;
SegmentedEraseIf(Container& cont) : _cont(cont) {
_first = begin(_cont);
_it = _first;
}
~SegmentedEraseIf() {
assert(_it == end(_cont));
_cont.erase(_first, _it);
}
template <class Predicate>
void iterateTo(iterator last, Predicate pred) {
while (_it != last) {
if (!pred(*_it)) {
if (_first != _it) {
*_first = move(*_it);
}
++_first;
}
++_it;
}
}
private:
iterator _first;
iterator _it;
Container& _cont;
};
void AudioMixerSlave::addStreams(Node& listener, AudioMixerClientData& listenerData) {
auto& ignoredNodeIDs = listener.getIgnoredNodeIDs();
auto& ignoringNodeIDs = listenerData.getIgnoringNodeIDs();
auto& streams = listenerData.getStreams();
// add data for newly created streams to our vector
if (!listenerData.getHasReceivedFirstMix()) {
// when this listener is new, we need to fill its added streams object with all available streams
std::for_each(_begin, _end, [&](const SharedNodePointer& node) {
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
if (nodeData) {
for (auto& stream : nodeData->getAudioStreams()) {
bool ignoredByListener = contains(ignoredNodeIDs, node->getUUID());
bool ignoringListener = contains(ignoringNodeIDs, node->getUUID());
if (ignoredByListener || ignoringListener) {
streams.skipped.emplace_back(node->getUUID(), node->getLocalID(),
stream->getStreamIdentifier(), stream.get());
// pre-populate ignored and ignoring flags for this stream
streams.skipped.back().ignoredByListener = ignoredByListener;
streams.skipped.back().ignoringListener = ignoringListener;
} else {
streams.active.emplace_back(node->getUUID(), node->getLocalID(),
stream->getStreamIdentifier(), stream.get());
}
}
}
});
// flag this listener as having received their first mix so we know we don't need to enumerate all nodes again
listenerData.setHasReceivedFirstMix(true);
} else {
for (const auto& newStream : _sharedData.addedStreams) {
bool ignoredByListener = contains(ignoredNodeIDs, newStream.nodeIDStreamID.nodeID);
bool ignoringListener = contains(ignoringNodeIDs, newStream.nodeIDStreamID.nodeID);
if (ignoredByListener || ignoringListener) {
streams.skipped.emplace_back(newStream.nodeIDStreamID, newStream.positionalStream);
// pre-populate ignored and ignoring flags for this stream
streams.skipped.back().ignoredByListener = ignoredByListener;
streams.skipped.back().ignoringListener = ignoringListener;
} else {
streams.active.emplace_back(newStream.nodeIDStreamID, newStream.positionalStream);
}
}
}
}
bool shouldBeRemoved(const MixableStream& stream, const AudioMixerSlave::SharedData& sharedData) {
return (contains(sharedData.removedNodes, stream.nodeStreamID.nodeLocalID) ||
contains(sharedData.removedStreams, stream.nodeStreamID));
};
bool shouldBeInactive(MixableStream& stream) {
return (!stream.positionalStream->lastPopSucceeded() ||
stream.positionalStream->getLastPopOutputLoudness() == 0.0f);
};
bool shouldBeSkipped(MixableStream& stream, const Node& listener,
const AvatarAudioStream& listenerAudioStream,
const AudioMixerClientData& listenerData) {
if (stream.nodeStreamID.nodeLocalID == listener.getLocalID()) {
return !stream.positionalStream->shouldLoopbackForNode();
}
// grab the unprocessed ignores and unignores from and for this listener
const auto& nodesIgnoredByListener = listenerData.getNewIgnoredNodeIDs();
const auto& nodesUnignoredByListener = listenerData.getNewUnignoredNodeIDs();
const auto& nodesIgnoringListener = listenerData.getNewIgnoringNodeIDs();
const auto& nodesUnignoringListener = listenerData.getNewUnignoringNodeIDs();
// this stream was previously not ignored by the listener and we have some newly ignored streams
// check now if it is one of the ignored streams and flag it as such
if (stream.ignoredByListener) {
stream.ignoredByListener = !contains(nodesUnignoredByListener, stream.nodeStreamID.nodeID);
} else {
stream.ignoredByListener = contains(nodesIgnoredByListener, stream.nodeStreamID.nodeID);
}
if (stream.ignoringListener) {
stream.ignoringListener = !contains(nodesUnignoringListener, stream.nodeStreamID.nodeID);
} else {
stream.ignoringListener = contains(nodesIgnoringListener, stream.nodeStreamID.nodeID);
}
bool listenerIsAdmin = listenerData.getRequestsDomainListData() && listener.getCanKick();
if (stream.ignoredByListener || (stream.ignoringListener && !listenerIsAdmin)) {
return true;
}
bool shouldCheckIgnoreBox = (listenerAudioStream.isIgnoreBoxEnabled() ||
stream.positionalStream->isIgnoreBoxEnabled());
if (shouldCheckIgnoreBox &&
listenerAudioStream.getIgnoreBox().touches(stream.positionalStream->getIgnoreBox())) {
return true;
}
return false;
};
float approximateVolume(const MixableStream& stream, const AvatarAudioStream* listenerAudioStream) {
if (stream.positionalStream->getLastPopOutputTrailingLoudness() == 0.0f) {
return 0.0f;
}
if (stream.positionalStream == listenerAudioStream) {
return 1.0f;
}
// approximate the gain
float gain = approximateGain(*listenerAudioStream, *(stream.positionalStream));
// for avatar streams, modify by the set gain adjustment
if (stream.nodeStreamID.streamID.isNull()) {
gain *= stream.hrtf->getGainAdjustment();
}
return stream.positionalStream->getLastPopOutputTrailingLoudness() * gain;
};
bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
AvatarAudioStream* listenerAudioStream = static_cast<AudioMixerClientData*>(listener->getLinkedData())->getAvatarAudioStream();
AudioMixerClientData* listenerData = static_cast<AudioMixerClientData*>(listener->getLinkedData());
// if we received an invalid position from this listener, then refuse to make them a mix
// because we don't know how to do it properly
if (!listenerAudioStream->hasValidPosition()) {
return false;
}
// zero out the mix for this listener
memset(_mixSamples, 0, sizeof(_mixSamples));
bool isThrottling = _throttlingRatio > 0.0f;
std::vector<std::pair<float, SharedNodePointer>> throttledNodes;
bool isThrottling = _numToRetain != -1;
typedef void (AudioMixerSlave::*MixFunctor)(
AudioMixerClientData&, const QUuid&, const AvatarAudioStream&, const PositionalAudioStream&);
auto forAllStreams = [&](const SharedNodePointer& node, AudioMixerClientData* nodeData, MixFunctor mixFunctor) {
auto nodeID = node->getUUID();
for (auto& streamPair : nodeData->getAudioStreams()) {
auto nodeStream = streamPair.second;
(this->*mixFunctor)(*listenerData, nodeID, *listenerAudioStream, *nodeStream);
}
};
auto& streams = listenerData->getStreams();
#ifdef HIFI_AUDIO_MIXER_DEBUG
auto mixStart = p_high_resolution_clock::now();
#endif
addStreams(*listener, *listenerData);
std::for_each(_begin, _end, [&](const SharedNodePointer& node) {
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
if (!nodeData) {
return;
// Process skipped streams
erase_if(streams.skipped, [&](MixableStream& stream) {
if (shouldBeRemoved(stream, _sharedData)) {
return true;
}
if (*node == *listener) {
// only mix the echo, if requested
for (auto& streamPair : nodeData->getAudioStreams()) {
auto nodeStream = streamPair.second;
if (nodeStream->shouldLoopbackForNode()) {
mixStream(*listenerData, node->getUUID(), *listenerAudioStream, *nodeStream);
}
}
} else if (!listenerData->shouldIgnore(listener, node, _frame)) {
if (!isThrottling) {
forAllStreams(node, nodeData, &AudioMixerSlave::mixStream);
if (!shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
if (shouldBeInactive(stream)) {
streams.inactive.push_back(move(stream));
++stats.skippedToInactive;
} else {
auto nodeID = node->getUUID();
streams.active.push_back(move(stream));
++stats.skippedToActive;
}
return true;
}
// compute the node's max relative volume
float nodeVolume = 0.0f;
for (auto& streamPair : nodeData->getAudioStreams()) {
auto nodeStream = streamPair.second;
if (!isThrottling) {
updateHRTFParameters(stream, *listenerAudioStream,
listenerData->getMasterAvatarGain());
}
return false;
});
// approximate the gain
glm::vec3 relativePosition = nodeStream->getPosition() - listenerAudioStream->getPosition();
float gain = approximateGain(*listenerAudioStream, *nodeStream, relativePosition);
// Process inactive streams
erase_if(streams.inactive, [&](MixableStream& stream) {
if (shouldBeRemoved(stream, _sharedData)) {
return true;
}
// modify by hrtf gain adjustment
auto& hrtf = listenerData->hrtfForStream(nodeID, nodeStream->getStreamIdentifier());
gain *= hrtf.getGainAdjustment();
if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
streams.skipped.push_back(move(stream));
++stats.inactiveToSkipped;
return true;
}
auto streamVolume = nodeStream->getLastPopOutputTrailingLoudness() * gain;
nodeVolume = std::max(streamVolume, nodeVolume);
}
if (!shouldBeInactive(stream)) {
streams.active.push_back(move(stream));
++stats.inactiveToActive;
return true;
}
// max-heapify the nodes by relative volume
throttledNodes.push_back({ nodeVolume, node });
std::push_heap(throttledNodes.begin(), throttledNodes.end());
if (!isThrottling) {
updateHRTFParameters(stream, *listenerAudioStream,
listenerData->getMasterAvatarGain());
}
return false;
});
// Process active streams
erase_if(streams.active, [&](MixableStream& stream) {
if (shouldBeRemoved(stream, _sharedData)) {
return true;
}
if (isThrottling) {
// we're throttling, so we need to update the approximate volume for any un-skipped streams
// unless this is simply for an echo (in which case the approx volume is 1.0)
stream.approximateVolume = approximateVolume(stream, listenerAudioStream);
} else {
if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
addStream(stream, *listenerAudioStream, 0.0f);
streams.skipped.push_back(move(stream));
++stats.activeToSkipped;
return true;
}
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain());
if (shouldBeInactive(stream)) {
// To reduce artifacts we still call render to flush the HRTF for every silent
// sources on the first frame where the source becomes silent
// this ensures the correct tail from last mixed block
streams.inactive.push_back(move(stream));
++stats.activeToInactive;
return true;
}
}
return false;
});
if (isThrottling) {
// pop the loudest nodes off the heap and mix their streams
int numToRetain = (int)(std::distance(_begin, _end) * (1 - _throttlingRatio));
for (int i = 0; i < numToRetain; i++) {
if (throttledNodes.empty()) {
break;
// since we're throttling, we need to partition the mixable into throttled and unthrottled streams
int numToRetain = min(_numToRetain, (int)streams.active.size()); // Make sure we don't overflow
auto throttlePoint = begin(streams.active) + numToRetain;
std::nth_element(streams.active.begin(), throttlePoint, streams.active.end(),
[](const auto& a, const auto& b)
{
return a.approximateVolume > b.approximateVolume;
});
SegmentedEraseIf<MixableStreamsVector> erase(streams.active);
erase.iterateTo(throttlePoint, [&](MixableStream& stream) {
if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
resetHRTFState(stream);
streams.skipped.push_back(move(stream));
++stats.activeToSkipped;
return true;
}
std::pop_heap(throttledNodes.begin(), throttledNodes.end());
addStream(stream, *listenerAudioStream, listenerData->getMasterAvatarGain());
auto& node = throttledNodes.back().second;
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
forAllStreams(node, nodeData, &AudioMixerSlave::mixStream);
if (shouldBeInactive(stream)) {
// To reduce artifacts we still call render to flush the HRTF for every silent
// sources on the first frame where the source becomes silent
// this ensures the correct tail from last mixed block
streams.inactive.push_back(move(stream));
++stats.activeToInactive;
return true;
}
throttledNodes.pop_back();
}
return false;
});
erase.iterateTo(end(streams.active), [&](MixableStream& stream) {
// To reduce artifacts we reset the HRTF state for every throttled
// sources on the first frame where the source becomes throttled
// this ensures at least remove the tail from last mixed block
// preventing excessive artifacts on the next first block
resetHRTFState(stream);
// throttle the remaining nodes' streams
for (const std::pair<float, SharedNodePointer>& nodePair : throttledNodes) {
auto& node = nodePair.second;
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
forAllStreams(node, nodeData, &AudioMixerSlave::throttleStream);
}
if (shouldBeSkipped(stream, *listener, *listenerAudioStream, *listenerData)) {
streams.skipped.push_back(move(stream));
++stats.activeToSkipped;
return true;
}
if (shouldBeInactive(stream)) {
streams.inactive.push_back(move(stream));
++stats.activeToInactive;
return true;
}
return false;
});
}
stats.skipped += (int)streams.skipped.size();
stats.inactive += (int)streams.inactive.size();
stats.active += (int)streams.active.size();
// clear the newly ignored, un-ignored, ignoring, and un-ignoring streams now that we've processed them
listenerData->clearStagedIgnoreChanges();
#ifdef HIFI_AUDIO_MIXER_DEBUG
auto mixEnd = p_high_resolution_clock::now();
auto mixTime = std::chrono::duration_cast<std::chrono::nanoseconds>(mixEnd - mixStart);
@ -246,51 +482,35 @@ bool AudioMixerSlave::prepareMix(const SharedNodePointer& listener) {
return hasAudio;
}
void AudioMixerSlave::throttleStream(AudioMixerClientData& listenerNodeData, const QUuid& sourceNodeID,
const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd) {
// only throttle this stream to the mix if it has a valid position, we won't know how to mix it otherwise
if (streamToAdd.hasValidPosition()) {
addStream(listenerNodeData, sourceNodeID, listeningNodeStream, streamToAdd, true);
}
}
void AudioMixerSlave::mixStream(AudioMixerClientData& listenerNodeData, const QUuid& sourceNodeID,
const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd) {
// only add the stream to the mix if it has a valid position, we won't know how to mix it otherwise
if (streamToAdd.hasValidPosition()) {
addStream(listenerNodeData, sourceNodeID, listeningNodeStream, streamToAdd, false);
}
}
void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QUuid& sourceNodeID,
const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
bool throttle) {
void AudioMixerSlave::addStream(AudioMixerClientData::MixableStream& mixableStream,
AvatarAudioStream& listeningNodeStream,
float masterListenerGain) {
++stats.totalMixes;
// to reduce artifacts we call the HRTF functor for every source, even if throttled or silent
// this ensures the correct tail from last mixed block and the correct spatialization of next first block
auto streamToAdd = mixableStream.positionalStream;
// check if this is a server echo of a source back to itself
bool isEcho = (&streamToAdd == &listeningNodeStream);
bool isEcho = (streamToAdd == &listeningNodeStream);
glm::vec3 relativePosition = streamToAdd.getPosition() - listeningNodeStream.getPosition();
glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream.getPosition();
float distance = glm::max(glm::length(relativePosition), EPSILON);
float gain = computeGain(listenerNodeData, listeningNodeStream, streamToAdd, relativePosition, distance, isEcho);
float gain = computeGain(masterListenerGain, listeningNodeStream, *streamToAdd, relativePosition, distance, isEcho);
float azimuth = isEcho ? 0.0f : computeAzimuth(listeningNodeStream, listeningNodeStream, relativePosition);
const int HRTF_DATASET_INDEX = 1;
if (!streamToAdd.lastPopSucceeded()) {
if (!streamToAdd->lastPopSucceeded()) {
bool forceSilentBlock = true;
if (!streamToAdd.getLastPopOutput().isNull()) {
bool isInjector = dynamic_cast<const InjectedAudioStream*>(&streamToAdd);
if (!streamToAdd->getLastPopOutput().isNull()) {
bool isInjector = dynamic_cast<const InjectedAudioStream*>(streamToAdd);
// in an injector, just go silent - the injector has likely ended
// in other inputs (microphone, &c.), repeat with fade to avoid the harsh jump to silence
if (!isInjector) {
// calculate its fade factor, which depends on how many times it's already been repeated.
float fadeFactor = calculateRepeatedFrameFadeFactor(streamToAdd.getConsecutiveNotMixedCount() - 1);
float fadeFactor = calculateRepeatedFrameFadeFactor(streamToAdd->getConsecutiveNotMixedCount() - 1);
if (fadeFactor > 0.0f) {
// apply the fadeFactor to the gain
gain *= fadeFactor;
@ -302,15 +522,12 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
if (forceSilentBlock) {
// call renderSilent with a forced silent block to reduce artifacts
// (this is not done for stereo streams since they do not go through the HRTF)
if (!streamToAdd.isStereo() && !isEcho) {
// get the existing listener-source HRTF object, or create a new one
auto& hrtf = listenerNodeData.hrtfForStream(sourceNodeID, streamToAdd.getStreamIdentifier());
if (!streamToAdd->isStereo() && !isEcho) {
static int16_t silentMonoBlock[AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL] = {};
hrtf.renderSilent(silentMonoBlock, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
mixableStream.hrtf->render(silentMonoBlock, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
++stats.hrtfSilentRenders;
++stats.hrtfRenders;
}
return;
@ -318,16 +535,15 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
}
// grab the stream from the ring buffer
AudioRingBuffer::ConstIterator streamPopOutput = streamToAdd.getLastPopOutput();
AudioRingBuffer::ConstIterator streamPopOutput = streamToAdd->getLastPopOutput();
// stereo sources are not passed through HRTF
if (streamToAdd.isStereo()) {
if (streamToAdd->isStereo()) {
// apply the avatar gain adjustment
auto& hrtf = listenerNodeData.hrtfForStream(sourceNodeID, streamToAdd.getStreamIdentifier());
gain *= hrtf.getGainAdjustment();
gain *= mixableStream.hrtf->getGainAdjustment();
const float scale = 1/32768.0f; // int16_t to float
const float scale = 1 / 32768.0f; // int16_t to float
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL; i++) {
_mixSamples[2*i+0] += (float)streamPopOutput[2*i+0] * gain * scale;
@ -335,11 +551,8 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
}
++stats.manualStereoMixes;
return;
}
// echo sources are not passed through HRTF
if (isEcho) {
} else if (isEcho) {
// echo sources are not passed through HRTF
const float scale = 1/32768.0f; // int16_t to float
@ -350,41 +563,38 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
}
++stats.manualEchoMixes;
return;
} else {
streamPopOutput.readSamples(_bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
mixableStream.hrtf->render(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
++stats.hrtfRenders;
}
}
// get the existing listener-source HRTF object, or create a new one
auto& hrtf = listenerNodeData.hrtfForStream(sourceNodeID, streamToAdd.getStreamIdentifier());
void AudioMixerSlave::updateHRTFParameters(AudioMixerClientData::MixableStream& mixableStream,
AvatarAudioStream& listeningNodeStream,
float masterListenerGain) {
auto streamToAdd = mixableStream.positionalStream;
streamPopOutput.readSamples(_bufferSamples, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
// check if this is a server echo of a source back to itself
bool isEcho = (streamToAdd == &listeningNodeStream);
if (streamToAdd.getLastPopOutputLoudness() == 0.0f) {
// call renderSilent to reduce artifacts
hrtf.renderSilent(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream.getPosition();
++stats.hrtfSilentRenders;
return;
}
float distance = glm::max(glm::length(relativePosition), EPSILON);
float gain = computeGain(masterListenerGain, listeningNodeStream, *streamToAdd, relativePosition, distance, isEcho);
float azimuth = isEcho ? 0.0f : computeAzimuth(listeningNodeStream, listeningNodeStream, relativePosition);
if (throttle) {
// call renderSilent with actual frame data and a gain of 0.0f to reduce artifacts
hrtf.renderSilent(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, 0.0f,
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
mixableStream.hrtf->setParameterHistory(azimuth, distance, gain);
++stats.hrtfThrottleRenders;
return;
}
++stats.hrtfUpdates;
}
if (streamToAdd.getType() == PositionalAudioStream::Injector) {
// apply per-avatar gain to positional audio injectors, which wouldn't otherwise be affected by PAL sliders
hrtf.setGainAdjustment(listenerNodeData.hrtfForStream(sourceNodeID, QUuid()).getGainAdjustment());
}
hrtf.render(_bufferSamples, _mixSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
++stats.hrtfRenders;
void AudioMixerSlave::resetHRTFState(AudioMixerClientData::MixableStream& mixableStream) {
mixableStream.hrtf->reset();
++stats.hrtfResets;
}
std::unique_ptr<NLPacket> createAudioPacket(PacketType type, int size, quint16 sequence, QString codec) {
@ -443,12 +653,12 @@ void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData&
glm::vec3 streamPosition = stream->getPosition();
// find reverb properties
for (int i = 0; i < reverbSettings.size(); ++i) {
AABox box = audioZones[reverbSettings[i].zone];
for (const auto& settings : reverbSettings) {
AABox box = audioZones[settings.zone].area;
if (box.contains(streamPosition)) {
hasReverb = true;
reverbTime = reverbSettings[i].reverbTime;
wetLevel = reverbSettings[i].wetLevel;
reverbTime = settings.reverbTime;
wetLevel = settings.wetLevel;
break;
}
}
@ -493,8 +703,7 @@ void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData&
}
}
float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
const glm::vec3& relativePosition) {
float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd) {
float gain = 1.0f;
// injector: apply attenuation
@ -505,13 +714,14 @@ float approximateGain(const AvatarAudioStream& listeningNodeStream, const Positi
// avatar: skip attenuation - it is too costly to approximate
// distance attenuation: approximate, ignore zone-specific attenuations
glm::vec3 relativePosition = streamToAdd.getPosition() - listeningNodeStream.getPosition();
float distance = glm::length(relativePosition);
return gain / distance;
// avatar: skip master gain - it is constant for all streams
}
float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudioStream& listeningNodeStream,
float computeGain(float masterListenerGain, const AvatarAudioStream& listeningNodeStream,
const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition, float distance, bool isEcho) {
float gain = 1.0f;
@ -534,7 +744,7 @@ float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudi
gain *= offAxisCoefficient;
// apply master gain, only to avatars
gain *= listenerNodeData.getMasterAvatarGain();
gain *= masterListenerGain;
}
auto& audioZones = AudioMixer::getAudioZones();
@ -542,10 +752,10 @@ float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudi
// find distance attenuation coefficient
float attenuationPerDoublingInDistance = AudioMixer::getAttenuationPerDoublingInDistance();
for (int i = 0; i < zoneSettings.length(); ++i) {
if (audioZones[zoneSettings[i].source].contains(streamToAdd.getPosition()) &&
audioZones[zoneSettings[i].listener].contains(listeningNodeStream.getPosition())) {
attenuationPerDoublingInDistance = zoneSettings[i].coefficient;
for (const auto& settings : zoneSettings) {
if (audioZones[settings.source].area.contains(streamToAdd.getPosition()) &&
audioZones[settings.listener].area.contains(listeningNodeStream.getPosition())) {
attenuationPerDoublingInDistance = settings.coefficient;
break;
}
}

View file

@ -12,29 +12,39 @@
#ifndef hifi_AudioMixerSlave_h
#define hifi_AudioMixerSlave_h
#include <tbb/concurrent_vector.h>
#include <AABox.h>
#include <AudioHRTF.h>
#include <AudioRingBuffer.h>
#include <ThreadedAssignment.h>
#include <UUIDHasher.h>
#include <NodeList.h>
#include <PositionalAudioStream.h>
#include "AudioMixerClientData.h"
#include "AudioMixerStats.h"
class PositionalAudioStream;
class AvatarAudioStream;
class AudioHRTF;
class AudioMixerClientData;
class AudioMixerSlave {
public:
using ConstIter = NodeList::const_iterator;
struct SharedData {
AudioMixerClientData::ConcurrentAddedStreams addedStreams;
std::vector<Node::LocalID> removedNodes;
std::vector<NodeIDStreamID> removedStreams;
};
AudioMixerSlave(SharedData& sharedData) : _sharedData(sharedData) {};
// process packets for a given node (requires no configuration)
void processPackets(const SharedNodePointer& node);
// configure a round of mixing
void configureMix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio);
void configureMix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain);
// mix and broadcast non-ignored streams to the node (requires configuration using configureMix, above)
// returns true if a mixed packet was sent to the node
@ -45,13 +55,15 @@ public:
private:
// create mix, returns true if mix has audio
bool prepareMix(const SharedNodePointer& listener);
void throttleStream(AudioMixerClientData& listenerData, const QUuid& streamerID,
const AvatarAudioStream& listenerStream, const PositionalAudioStream& streamer);
void mixStream(AudioMixerClientData& listenerData, const QUuid& streamerID,
const AvatarAudioStream& listenerStream, const PositionalAudioStream& streamer);
void addStream(AudioMixerClientData& listenerData, const QUuid& streamerID,
const AvatarAudioStream& listenerStream, const PositionalAudioStream& streamer,
bool throttle);
void addStream(AudioMixerClientData::MixableStream& mixableStream,
AvatarAudioStream& listeningNodeStream,
float masterListenerGain);
void updateHRTFParameters(AudioMixerClientData::MixableStream& mixableStream,
AvatarAudioStream& listeningNodeStream,
float masterListenerGain);
void resetHRTFState(AudioMixerClientData::MixableStream& mixableStream);
void addStreams(Node& listener, AudioMixerClientData& listenerData);
// mixing buffers
float _mixSamples[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
@ -61,7 +73,9 @@ private:
ConstIter _begin;
ConstIter _end;
unsigned int _frame { 0 };
float _throttlingRatio { 0.0f };
int _numToRetain { -1 };
SharedData& _sharedData;
};
#endif // hifi_AudioMixerSlave_h

View file

@ -74,13 +74,11 @@ void AudioMixerSlavePool::processPackets(ConstIter begin, ConstIter end) {
run(begin, end);
}
void AudioMixerSlavePool::mix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio) {
void AudioMixerSlavePool::mix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain) {
_function = &AudioMixerSlave::mix;
_configure = [=](AudioMixerSlave& slave) {
slave.configureMix(_begin, _end, _frame, _throttlingRatio);
slave.configureMix(_begin, _end, frame, numToRetain);
};
_frame = frame;
_throttlingRatio = throttlingRatio;
run(begin, end);
}
@ -167,7 +165,7 @@ void AudioMixerSlavePool::resize(int numThreads) {
if (numThreads > _numThreads) {
// start new slaves
for (int i = 0; i < numThreads - _numThreads; ++i) {
auto slave = new AudioMixerSlaveThread(*this);
auto slave = new AudioMixerSlaveThread(*this, _workerSharedData);
slave->start();
_slaves.emplace_back(slave);
}

View file

@ -31,7 +31,8 @@ class AudioMixerSlaveThread : public QThread, public AudioMixerSlave {
using Lock = std::unique_lock<Mutex>;
public:
AudioMixerSlaveThread(AudioMixerSlavePool& pool) : _pool(pool) {}
AudioMixerSlaveThread(AudioMixerSlavePool& pool, AudioMixerSlave::SharedData& sharedData)
: AudioMixerSlave(sharedData), _pool(pool) {}
void run() override final;
@ -58,14 +59,15 @@ class AudioMixerSlavePool {
public:
using ConstIter = NodeList::const_iterator;
AudioMixerSlavePool(int numThreads = QThread::idealThreadCount()) { setNumThreads(numThreads); }
AudioMixerSlavePool(AudioMixerSlave::SharedData& sharedData, int numThreads = QThread::idealThreadCount())
: _workerSharedData(sharedData) { setNumThreads(numThreads); }
~AudioMixerSlavePool() { resize(0); }
// process packets on slave threads
void processPackets(ConstIter begin, ConstIter end);
// mix on slave threads
void mix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio);
void mix(ConstIter begin, ConstIter end, unsigned int frame, int numToRetain);
// iterate over all slaves
void each(std::function<void(AudioMixerSlave& slave)> functor);
@ -96,10 +98,10 @@ private:
// frame state
Queue _queue;
unsigned int _frame { 0 };
float _throttlingRatio { 0.0f };
ConstIter _begin;
ConstIter _end;
AudioMixerSlave::SharedData& _workerSharedData;
};
#endif // hifi_AudioMixerSlavePool_h

View file

@ -15,12 +15,27 @@ void AudioMixerStats::reset() {
sumStreams = 0;
sumListeners = 0;
sumListenersSilent = 0;
totalMixes = 0;
hrtfRenders = 0;
hrtfSilentRenders = 0;
hrtfThrottleRenders = 0;
hrtfResets = 0;
hrtfUpdates = 0;
manualStereoMixes = 0;
manualEchoMixes = 0;
skippedToActive = 0;
skippedToInactive = 0;
inactiveToSkipped = 0;
inactiveToActive = 0;
activeToSkipped = 0;
activeToInactive = 0;
skipped = 0;
inactive = 0;
active = 0;
#ifdef HIFI_AUDIO_MIXER_DEBUG
mixTime = 0;
#endif
@ -30,12 +45,27 @@ void AudioMixerStats::accumulate(const AudioMixerStats& otherStats) {
sumStreams += otherStats.sumStreams;
sumListeners += otherStats.sumListeners;
sumListenersSilent += otherStats.sumListenersSilent;
totalMixes += otherStats.totalMixes;
hrtfRenders += otherStats.hrtfRenders;
hrtfSilentRenders += otherStats.hrtfSilentRenders;
hrtfThrottleRenders += otherStats.hrtfThrottleRenders;
hrtfResets += otherStats.hrtfResets;
hrtfUpdates += otherStats.hrtfUpdates;
manualStereoMixes += otherStats.manualStereoMixes;
manualEchoMixes += otherStats.manualEchoMixes;
skippedToActive += otherStats.skippedToActive;
skippedToInactive += otherStats.skippedToInactive;
inactiveToSkipped += otherStats.inactiveToSkipped;
inactiveToActive += otherStats.inactiveToActive;
activeToSkipped += otherStats.activeToSkipped;
activeToInactive += otherStats.activeToInactive;
skipped += otherStats.skipped;
inactive += otherStats.inactive;
active += otherStats.active;
#ifdef HIFI_AUDIO_MIXER_DEBUG
mixTime += otherStats.mixTime;
#endif

View file

@ -24,12 +24,23 @@ struct AudioMixerStats {
int totalMixes { 0 };
int hrtfRenders { 0 };
int hrtfSilentRenders { 0 };
int hrtfThrottleRenders { 0 };
int hrtfResets { 0 };
int hrtfUpdates { 0 };
int manualStereoMixes { 0 };
int manualEchoMixes { 0 };
int skippedToActive { 0 };
int skippedToInactive { 0 };
int inactiveToSkipped { 0 };
int inactiveToActive { 0 };
int activeToSkipped { 0 };
int activeToInactive { 0 };
int skipped { 0 };
int inactive { 0 };
int active { 0 };
#ifdef HIFI_AUDIO_MIXER_DEBUG
uint64_t mixTime { 0 };
#endif

View file

@ -23,9 +23,9 @@ int AvatarAudioStream::parseStreamProperties(PacketType type, const QByteArray&
if (type == PacketType::SilentAudioFrame) {
const char* dataAt = packetAfterSeqNum.constData();
quint16 numSilentSamples = *(reinterpret_cast<const quint16*>(dataAt));
readBytes += sizeof(quint16);
numAudioSamples = (int)numSilentSamples;
SilentSamplesBytes numSilentSamples = *(reinterpret_cast<const quint16*>(dataAt));
readBytes += sizeof(SilentSamplesBytes);
numAudioSamples = (int) numSilentSamples;
// read the positional data
readBytes += parsePositionalData(packetAfterSeqNum.mid(readBytes));
@ -34,9 +34,9 @@ int AvatarAudioStream::parseStreamProperties(PacketType type, const QByteArray&
_shouldLoopbackForNode = (type == PacketType::MicrophoneAudioWithEcho);
// read the channel flag
quint8 channelFlag = packetAfterSeqNum.at(readBytes);
ChannelFlag channelFlag = packetAfterSeqNum.at(readBytes);
bool isStereo = channelFlag == 1;
readBytes += sizeof(quint8);
readBytes += sizeof(ChannelFlag);
// if isStereo value has changed, restart the ring buffer with new frame size
if (isStereo != _isStereo) {

View file

@ -16,6 +16,8 @@
#include "PositionalAudioStream.h"
using SilentSamplesBytes = quint16;
class AvatarAudioStream : public PositionalAudioStream {
public:
AvatarAudioStream(bool isStereo, int numStaticJitterFrames = -1);

View file

@ -673,7 +673,13 @@ void AvatarMixer::handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage>
void AvatarMixer::handleRadiusIgnoreRequestPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode) {
auto start = usecTimestampNow();
sendingNode->parseIgnoreRadiusRequestMessage(packet);
bool enabled;
packet->readPrimitive(&enabled);
auto avatarData = getOrCreateClientData(sendingNode);
avatarData->setIsIgnoreRadiusEnabled(enabled);
auto end = usecTimestampNow();
_handleRadiusIgnoreRequestPacketElapsedTime += (end - start);
}

View file

@ -227,7 +227,7 @@ void AvatarMixerClientData::ignoreOther(const Node* self, const Node* other) {
addToRadiusIgnoringSet(other->getUUID());
auto killPacket = NLPacket::create(PacketType::KillAvatar, NUM_BYTES_RFC4122_UUID + sizeof(KillAvatarReason), true);
killPacket->write(other->getUUID().toRfc4122());
if (self->isIgnoreRadiusEnabled()) {
if (_isIgnoreRadiusEnabled) {
killPacket->writePrimitive(KillAvatarReason::TheirAvatarEnteredYourBubble);
} else {
killPacket->writePrimitive(KillAvatarReason::YourAvatarEnteredTheirBubble);

View file

@ -49,6 +49,9 @@ public:
const AvatarData* getConstAvatarData() const { return _avatar.get(); }
AvatarSharedPointer getAvatarSharedPointer() const { return _avatar; }
bool isIgnoreRadiusEnabled() const { return _isIgnoreRadiusEnabled; }
void setIsIgnoreRadiusEnabled(bool enabled) { _isIgnoreRadiusEnabled = enabled; }
uint16_t getLastBroadcastSequenceNumber(const QUuid& nodeUUID) const;
void setLastBroadcastSequenceNumber(const QUuid& nodeUUID, uint16_t sequenceNumber)
{ _lastBroadcastSequenceNumbers[nodeUUID] = sequenceNumber; }
@ -180,6 +183,8 @@ private:
std::unordered_map<Node::LocalID, TraitsCheckTimestamp> _lastSentTraitsTimestamps;
std::unordered_map<Node::LocalID, AvatarTraits::TraitVersions> _sentTraitVersions;
std::atomic_bool _isIgnoreRadiusEnabled { false };
};
#endif // hifi_AvatarMixerClientData_h

View file

@ -345,7 +345,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
} else {
// Check to see if the space bubble is enabled
// Don't bother with these checks if the other avatar has their bubble enabled and we're gettingAnyIgnored
if (destinationNode->isIgnoreRadiusEnabled() || (avatarNode->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
if (nodeData->isIgnoreRadiusEnabled() || (avatarClientNodeData->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
// Perform the collision check between the two bounding boxes
const float OTHER_AVATAR_BUBBLE_EXPANSION_FACTOR = 2.4f; // magic number determined empirically
AABox otherNodeBox = computeBubbleBox(avatarClientNodeData->getAvatar(), OTHER_AVATAR_BUBBLE_EXPANSION_FACTOR);

View file

@ -34,7 +34,7 @@ Preference {
left: parent.left
right: parent.right
}
height: isFirstCheckBox ? hifi.dimensions.controlInterlineHeight : 0
height: isFirstCheckBox && !preference.indented ? 16 : 2
}
CheckBox {
@ -54,6 +54,7 @@ Preference {
left: parent.left
right: parent.right
bottom: parent.bottom
leftMargin: preference.indented ? 20 : 0
}
text: root.label
colorScheme: hifi.colorSchemes.dark

View file

@ -11,14 +11,27 @@
import QtQuick 2.5
import "../../controls-uit"
import "../../styles-uit"
Preference {
id: root
height: control.height + hifi.dimensions.controlInterlineHeight
property int value: 0
Component.onCompleted: {
repeater.itemAt(preference.value).checked = true
value = preference.value;
repeater.itemAt(preference.value).checked = true;
}
function updateValue() {
for (var i = 0; i < repeater.count; i++) {
if (repeater.itemAt(i).checked) {
value = i;
break;
}
}
}
function save() {
@ -33,24 +46,36 @@ Preference {
preference.save();
}
Row {
Column {
id: control
anchors {
left: parent.left
right: parent.right
bottom: parent.bottom
}
spacing: 5
spacing: 3
RalewaySemiBold {
id: heading
size: hifi.fontSizes.inputLabel
text: preference.heading
color: hifi.colors.lightGrayText
visible: text !== ""
bottomPadding: 3
}
Repeater {
id: repeater
model: preference.items.length
delegate: RadioButton {
text: preference.items[index]
letterSpacing: 0
anchors {
verticalCenter: parent.verticalCenter
left: parent.left
}
leftPadding: 0
colorScheme: hifi.colorSchemes.dark
onClicked: updateValue();
}
}
}

View file

@ -138,11 +138,12 @@ Preference {
break;
case Preference.PrimaryHand:
checkBoxCount++;
checkBoxCount = 0;
builder = primaryHandBuilder;
break;
case Preference.RadioButtons:
checkBoxCount++;
checkBoxCount = 0;
builder = radioButtonsBuilder;
break;
};

View file

@ -48,7 +48,7 @@ Rectangle {
HifiModels.PSFListModel {
id: connectionsUserModel;
http: http;
endpoint: "/api/v1/users?filter=connections";
endpoint: "/api/v1/users/connections";
property var sortColumn: connectionsTable.getColumn(connectionsTable.sortIndicatorColumn);
sortProperty: switch (sortColumn && sortColumn.role) {
case 'placeName':

View file

@ -0,0 +1,43 @@
import QtQuick 2.0
Item {
property alias source: sourceImage.sourceItem
property alias maskSource: sourceMask.sourceItem
anchors.fill: parent
ShaderEffectSource {
id: sourceMask
smooth: true
hideSource: true
}
ShaderEffectSource {
id: sourceImage
hideSource: true
}
ShaderEffect {
id: maskEffect
anchors.fill: parent
property variant source: sourceImage
property variant mask: sourceMask
fragmentShader: {
"
varying highp vec2 qt_TexCoord0;
uniform lowp sampler2D source;
uniform lowp sampler2D mask;
void main() {
highp vec4 maskColor = texture2D(mask, vec2(qt_TexCoord0.x, qt_TexCoord0.y));
highp vec4 sourceColor = texture2D(source, vec2(qt_TexCoord0.x, qt_TexCoord0.y));
if (maskColor.a > 0.0)
gl_FragColor = sourceColor;
else
gl_FragColor = maskColor;
}
"
}
}
}

View file

@ -24,20 +24,20 @@ Item {
fragmentShader: {
"
#version 150 core
varying highp vec2 qt_TexCoord0;
uniform lowp sampler2D source;
uniform lowp sampler2D mask;
void main() {
highp vec4 maskColor = texture2D(mask, vec2(qt_TexCoord0.x, qt_TexCoord0.y));
highp vec4 sourceColor = texture2D(source, vec2(qt_TexCoord0.x, qt_TexCoord0.y));
if (maskColor.a > 0.0)
gl_FragColor = sourceColor;
else
gl_FragColor = maskColor;
}
#version 410
in vec2 qt_TexCoord0;
out vec4 color;
uniform sampler2D source;
uniform sampler2D mask;
void main()
{
vec4 maskColor = texture(mask, vec2(qt_TexCoord0.x, qt_TexCoord0.y));
vec4 sourceColor = texture(source, vec2(qt_TexCoord0.x, qt_TexCoord0.y));
if (maskColor.a > 0.0)
color = sourceColor;
else
color = maskColor;
}
"
}
}

View file

@ -122,6 +122,22 @@ Item {
}
}
// Runtime customization of preferences.
var locomotionPreference = findPreference("VR Movement", "Teleporting only / Walking and teleporting");
var flyingPreference = findPreference("VR Movement", "Jumping and flying");
if (locomotionPreference && flyingPreference) {
flyingPreference.visible = (locomotionPreference.value === 1);
locomotionPreference.valueChanged.connect(function () {
flyingPreference.visible = (locomotionPreference.value === 1);
});
}
if (HMD.isHeadControllerAvailable("Oculus")) {
var boundariesPreference = findPreference("VR Movement", "Show room boundaries while teleporting");
if (boundariesPreference) {
boundariesPreference.label = "Show room boundaries and sensors while teleporting";
}
}
if (sections.length) {
// Default sections to expanded/collapsed as appropriate for dialog.
if (sections.length === 1) {
@ -234,4 +250,32 @@ Item {
}
}
}
function findPreference(category, name) {
var section = null;
var preference = null;
var i;
// Find category section.
i = 0;
while (!section && i < sections.length) {
if (sections[i].name === category) {
section = sections[i];
}
i++;
}
// Find named preference.
if (section) {
i = 0;
while (!preference && i < section.preferences.length) {
if (section.preferences[i].preference && section.preferences[i].preference.name === name) {
preference = section.preferences[i];
}
i++;
}
}
return preference;
}
}

View file

@ -153,11 +153,12 @@ Preference {
break;
case Preference.PrimaryHand:
checkBoxCount++;
checkBoxCount = 0;
builder = primaryHandBuilder;
break;
case Preference.RadioButtons:
checkBoxCount++;
checkBoxCount = 0;
builder = radioButtonsBuilder;
break;
};

View file

@ -10,8 +10,11 @@
//
#include "AndroidHelper.h"
#include <QDebug>
#include <AccountManager.h>
#include <AudioClient.h>
#include <src/ui/LoginDialog.h>
#include "Application.h"
#include "Constants.h"
#if defined(qApp)
#undef qApp
@ -69,3 +72,75 @@ void AndroidHelper::notifyHeadsetOn(bool pluggedIn) {
}
#endif
}
void AndroidHelper::signup(QString email, QString username, QString password) {
JSONCallbackParameters callbackParams;
callbackParams.callbackReceiver = this;
callbackParams.jsonCallbackMethod = "signupCompleted";
callbackParams.errorCallbackMethod = "signupFailed";
QJsonObject payload;
QJsonObject userObject;
userObject.insert("email", email);
userObject.insert("username", username);
userObject.insert("password", password);
payload.insert("user", userObject);
auto accountManager = DependencyManager::get<AccountManager>();
accountManager->sendRequest(API_SIGNUP_PATH, AccountManagerAuth::None,
QNetworkAccessManager::PostOperation, callbackParams,
QJsonDocument(payload).toJson());
}
void AndroidHelper::signupCompleted(QNetworkReply* reply) {
emit handleSignupCompleted();
}
QString AndroidHelper::errorStringFromAPIObject(const QJsonValue& apiObject) {
if (apiObject.isArray()) {
return apiObject.toArray()[0].toString();
} else if (apiObject.isString()) {
return apiObject.toString();
} else {
return "is invalid";
}
}
void AndroidHelper::signupFailed(QNetworkReply* reply) {
// parse the returned JSON to see what the problem was
auto jsonResponse = QJsonDocument::fromJson(reply->readAll());
static const QString RESPONSE_DATA_KEY = "data";
auto dataJsonValue = jsonResponse.object()[RESPONSE_DATA_KEY];
if (dataJsonValue.isObject()) {
auto dataObject = dataJsonValue.toObject();
static const QString EMAIL_DATA_KEY = "email";
static const QString USERNAME_DATA_KEY = "username";
static const QString PASSWORD_DATA_KEY = "password";
QStringList errorStringList;
if (dataObject.contains(EMAIL_DATA_KEY)) {
errorStringList.append(QString("Email %1.").arg(errorStringFromAPIObject(dataObject[EMAIL_DATA_KEY])));
}
if (dataObject.contains(USERNAME_DATA_KEY)) {
errorStringList.append(QString("Username %1.").arg(errorStringFromAPIObject(dataObject[USERNAME_DATA_KEY])));
}
if (dataObject.contains(PASSWORD_DATA_KEY)) {
errorStringList.append(QString("Password %1.").arg(errorStringFromAPIObject(dataObject[PASSWORD_DATA_KEY])));
}
emit handleSignupFailed(errorStringList.join('\n'));
} else {
static const QString DEFAULT_SIGN_UP_FAILURE_MESSAGE = "There was an unknown error while creating your account. Please try again later.";
emit handleSignupFailed(DEFAULT_SIGN_UP_FAILURE_MESSAGE);
}
}

View file

@ -16,6 +16,9 @@
#include <QMap>
#include <QUrl>
#include <QNetworkReply>
#include <QtCore/QEventLoop>
class AndroidHelper : public QObject {
Q_OBJECT
public:
@ -36,9 +39,12 @@ public:
AndroidHelper(AndroidHelper const&) = delete;
void operator=(AndroidHelper const&) = delete;
void signup(QString email, QString username, QString password);
public slots:
void showLoginDialog(QUrl url);
void signupCompleted(QNetworkReply* reply);
void signupFailed(QNetworkReply* reply);
signals:
void androidActivityRequested(const QString &activityName, const bool backToScene, QMap<QString, QString> args = QMap<QString, QString>());
void qtAppLoadComplete();
@ -48,9 +54,14 @@ signals:
void hapticFeedbackRequested(int duration);
void handleSignupCompleted();
void handleSignupFailed(QString errorString);
private:
AndroidHelper();
~AndroidHelper();
QString errorStringFromAPIObject(const QJsonValue& apiObject);
};
#endif

View file

@ -410,6 +410,10 @@ public:
});
}
void setMainThreadID(Qt::HANDLE threadID) {
_mainThreadID = threadID;
}
static void updateHeartbeat() {
auto now = usecTimestampNow();
auto elapsed = now - _heartbeat;
@ -417,7 +421,9 @@ public:
_heartbeat = now;
}
static void deadlockDetectionCrash() {
void deadlockDetectionCrash() {
setCrashAnnotation("_mod_faulting_tid", std::to_string((uint64_t)_mainThreadID));
setCrashAnnotation("deadlock", "1");
uint32_t* crashTrigger = nullptr;
*crashTrigger = 0xDEAD10CC;
}
@ -504,6 +510,8 @@ public:
static ThreadSafeMovingAverage<int, HEARTBEAT_SAMPLES> _movingAverage;
bool _quit { false };
Qt::HANDLE _mainThreadID = nullptr;
};
std::atomic<bool> DeadlockWatchdogThread::_paused;
@ -1053,6 +1061,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
auto controllerScriptingInterface = DependencyManager::get<controller::ScriptingInterface>().data();
_controllerScriptingInterface = dynamic_cast<ControllerScriptingInterface*>(controllerScriptingInterface);
connect(PluginManager::getInstance().data(), &PluginManager::inputDeviceRunningChanged,
controllerScriptingInterface, &controller::ScriptingInterface::updateRunningInputDevices);
_entityClipboard->createRootElement();
@ -1092,7 +1102,9 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
}
// Set up a watchdog thread to intentionally crash the application on deadlocks
if (!DISABLE_WATCHDOG) {
(new DeadlockWatchdogThread())->start();
auto deadlockWatchdogThread = new DeadlockWatchdogThread();
deadlockWatchdogThread->setMainThreadID(QThread::currentThreadId());
deadlockWatchdogThread->start();
}
// Set File Logger Session UUID

15
interface/src/Constants.h Normal file
View file

@ -0,0 +1,15 @@
//
// Constants.h
// interface
//
// Created by Gabriel Calero on 9/28/18.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include <QString>
static const QString API_SIGNUP_PATH = "api/v1/users";

View file

@ -505,14 +505,16 @@ void AvatarManager::clearOtherAvatars() {
void AvatarManager::deleteAllAvatars() {
assert(_avatarsToChangeInPhysics.empty());
QWriteLocker locker(&_hashLock);
AvatarHash::iterator avatarIterator = _avatarHash.begin();
QReadLocker locker(&_hashLock);
AvatarHash::iterator avatarIterator = _avatarHash.begin();
while (avatarIterator != _avatarHash.end()) {
auto avatar = std::static_pointer_cast<OtherAvatar>(avatarIterator.value());
auto avatar = std::static_pointer_cast<Avatar>(avatarIterator.value());
avatarIterator = _avatarHash.erase(avatarIterator);
avatar->die();
assert(!avatar->_motionState);
if (avatar != _myAvatar) {
auto otherAvatar = std::static_pointer_cast<OtherAvatar>(avatar);
assert(!otherAvatar->_motionState);
}
}
}

View file

@ -106,6 +106,7 @@ MyAvatar::MyAvatar(QThread* thread) :
_realWorldFieldOfView("realWorldFieldOfView",
DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
_useAdvancedMovementControls("advancedMovementForHandControllersIsChecked", true),
_showPlayArea("showPlayArea", true),
_smoothOrientationTimer(std::numeric_limits<float>::max()),
_smoothOrientationInitial(),
_smoothOrientationTarget(),
@ -3307,7 +3308,7 @@ float MyAvatar::getRawDriveKey(DriveKeys key) const {
}
void MyAvatar::relayDriveKeysToCharacterController() {
if (getDriveKey(TRANSLATE_Y) > 0.0f) {
if (getDriveKey(TRANSLATE_Y) > 0.0f && (!qApp->isHMDMode() || (useAdvancedMovementControls() && getFlyingHMDPref()))) {
_characterController.jump();
}
}

View file

@ -122,8 +122,10 @@ class MyAvatar : public Avatar {
* zone may disallow collisionless avatars.
* @property {boolean} characterControllerEnabled - Synonym of <code>collisionsEnabled</code>.
* <strong>Deprecated:</strong> Use <code>collisionsEnabled</code> instead.
* @property {boolean} useAdvancedMovementControls - Returns the value of the Interface setting, Settings > Advanced
* Movement for Hand Controller. Note: Setting the value has no effect unless Interface is restarted.
* @property {boolean} useAdvancedMovementControls - Returns and sets the value of the Interface setting, Settings >
* Walking and teleporting. Note: Setting the value has no effect unless Interface is restarted.
* @property {boolean} showPlayArea - Returns and sets the value of the Interface setting, Settings > Show room boundaries
* while teleporting. Note: Setting the value has no effect unless Interface is restarted.
* @property {number} yawSpeed=75
* @property {number} pitchSpeed=50
* @property {boolean} hmdRollControlEnabled=true - If <code>true</code>, the roll angle of your HMD turns your avatar
@ -223,6 +225,7 @@ class MyAvatar : public Avatar {
Q_PROPERTY(bool collisionsEnabled READ getCollisionsEnabled WRITE setCollisionsEnabled)
Q_PROPERTY(bool characterControllerEnabled READ getCharacterControllerEnabled WRITE setCharacterControllerEnabled)
Q_PROPERTY(bool useAdvancedMovementControls READ useAdvancedMovementControls WRITE setUseAdvancedMovementControls)
Q_PROPERTY(bool showPlayArea READ getShowPlayArea WRITE setShowPlayArea)
Q_PROPERTY(float yawSpeed MEMBER _yawSpeed)
Q_PROPERTY(float pitchSpeed MEMBER _pitchSpeed)
@ -542,6 +545,9 @@ public:
void setUseAdvancedMovementControls(bool useAdvancedMovementControls)
{ _useAdvancedMovementControls.set(useAdvancedMovementControls); }
bool getShowPlayArea() const { return _showPlayArea.get(); }
void setShowPlayArea(bool showPlayArea) { _showPlayArea.set(showPlayArea); }
void setHMDRollControlEnabled(bool value) { _hmdRollControlEnabled = value; }
bool getHMDRollControlEnabled() const { return _hmdRollControlEnabled; }
void setHMDRollControlDeadZone(float value) { _hmdRollControlDeadZone = value; }
@ -1631,6 +1637,7 @@ private:
Setting::Handle<float> _realWorldFieldOfView;
Setting::Handle<bool> _useAdvancedMovementControls;
Setting::Handle<bool> _showPlayArea;
// Smoothing.
const float SMOOTH_TIME_ORIENTATION = 0.5f;

View file

@ -18,6 +18,7 @@
const glm::vec4 ParabolaPointer::RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_COLOR { 1.0f };
const float ParabolaPointer::RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_WIDTH { 0.01f };
const bool ParabolaPointer::RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_ISVISIBLEINSECONDARYCAMERA { false };
const bool ParabolaPointer::RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_DRAWINFRONT { false };
gpu::PipelinePointer ParabolaPointer::RenderState::ParabolaRenderItem::_parabolaPipeline { nullptr };
gpu::PipelinePointer ParabolaPointer::RenderState::ParabolaRenderItem::_transparentParabolaPipeline { nullptr };
@ -46,6 +47,7 @@ void ParabolaPointer::editRenderStatePath(const std::string& state, const QVaria
float alpha = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_COLOR.a;
float width = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_WIDTH;
bool isVisibleInSecondaryCamera = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_ISVISIBLEINSECONDARYCAMERA;
bool drawInFront = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_DRAWINFRONT;
bool enabled = false;
if (!pathMap.isEmpty()) {
enabled = true;
@ -63,8 +65,11 @@ void ParabolaPointer::editRenderStatePath(const std::string& state, const QVaria
if (pathMap["isVisibleInSecondaryCamera"].isValid()) {
isVisibleInSecondaryCamera = pathMap["isVisibleInSecondaryCamera"].toBool();
}
if (pathMap["drawInFront"].isValid()) {
drawInFront = pathMap["drawInFront"].toBool();
}
}
renderState->editParabola(color, alpha, width, isVisibleInSecondaryCamera, enabled);
renderState->editParabola(color, alpha, width, isVisibleInSecondaryCamera, drawInFront, enabled);
}
}
@ -146,7 +151,7 @@ void ParabolaPointer::setVisualPickResultInternal(PickResultPointer pickResult,
}
ParabolaPointer::RenderState::RenderState(const OverlayID& startID, const OverlayID& endID, const glm::vec3& pathColor, float pathAlpha, float pathWidth,
bool isVisibleInSecondaryCamera, bool pathEnabled) :
bool isVisibleInSecondaryCamera, bool drawInFront, bool pathEnabled) :
StartEndRenderState(startID, endID)
{
render::Transaction transaction;
@ -154,7 +159,7 @@ ParabolaPointer::RenderState::RenderState(const OverlayID& startID, const Overla
_pathID = scene->allocateID();
_pathWidth = pathWidth;
if (render::Item::isValidID(_pathID)) {
auto renderItem = std::make_shared<ParabolaRenderItem>(pathColor, pathAlpha, pathWidth, isVisibleInSecondaryCamera, pathEnabled);
auto renderItem = std::make_shared<ParabolaRenderItem>(pathColor, pathAlpha, pathWidth, isVisibleInSecondaryCamera, drawInFront, pathEnabled);
transaction.resetItem(_pathID, std::make_shared<ParabolaRenderItem::Payload>(renderItem));
scene->enqueueTransaction(transaction);
}
@ -182,15 +187,16 @@ void ParabolaPointer::RenderState::disable() {
}
}
void ParabolaPointer::RenderState::editParabola(const glm::vec3& color, float alpha, float width, bool isVisibleInSecondaryCamera, bool enabled) {
void ParabolaPointer::RenderState::editParabola(const glm::vec3& color, float alpha, float width, bool isVisibleInSecondaryCamera, bool drawInFront, bool enabled) {
if (render::Item::isValidID(_pathID)) {
render::Transaction transaction;
auto scene = qApp->getMain3DScene();
transaction.updateItem<ParabolaRenderItem>(_pathID, [color, alpha, width, isVisibleInSecondaryCamera, enabled](ParabolaRenderItem& item) {
transaction.updateItem<ParabolaRenderItem>(_pathID, [color, alpha, width, isVisibleInSecondaryCamera, drawInFront, enabled](ParabolaRenderItem& item) {
item.setColor(color);
item.setAlpha(alpha);
item.setWidth(width);
item.setIsVisibleInSecondaryCamera(isVisibleInSecondaryCamera);
item.setDrawInFront(drawInFront);
item.setEnabled(enabled);
item.updateKey();
});
@ -238,6 +244,7 @@ std::shared_ptr<StartEndRenderState> ParabolaPointer::buildRenderState(const QVa
float alpha = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_COLOR.a;
float width = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_WIDTH;
bool isVisibleInSecondaryCamera = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_ISVISIBLEINSECONDARYCAMERA;
bool drawInFront = RenderState::ParabolaRenderItem::DEFAULT_PARABOLA_DRAWINFRONT;
bool enabled = false;
if (propMap["path"].isValid()) {
enabled = true;
@ -258,6 +265,10 @@ std::shared_ptr<StartEndRenderState> ParabolaPointer::buildRenderState(const QVa
if (pathMap["isVisibleInSecondaryCamera"].isValid()) {
isVisibleInSecondaryCamera = pathMap["isVisibleInSecondaryCamera"].toBool();
}
if (pathMap["drawInFront"].isValid()) {
drawInFront = pathMap["drawInFront"].toBool();
}
}
QUuid endID;
@ -269,7 +280,7 @@ std::shared_ptr<StartEndRenderState> ParabolaPointer::buildRenderState(const QVa
}
}
return std::make_shared<RenderState>(startID, endID, color, alpha, width, isVisibleInSecondaryCamera, enabled);
return std::make_shared<RenderState>(startID, endID, color, alpha, width, isVisibleInSecondaryCamera, drawInFront, enabled);
}
PointerEvent ParabolaPointer::buildPointerEvent(const PickedObject& target, const PickResultPointer& pickResult, const std::string& button, bool hover) {
@ -321,8 +332,8 @@ glm::vec3 ParabolaPointer::findIntersection(const PickedObject& pickedObject, co
}
ParabolaPointer::RenderState::ParabolaRenderItem::ParabolaRenderItem(const glm::vec3& color, float alpha, float width,
bool isVisibleInSecondaryCamera, bool enabled) :
_isVisibleInSecondaryCamera(isVisibleInSecondaryCamera), _enabled(enabled)
bool isVisibleInSecondaryCamera, bool drawInFront, bool enabled) :
_isVisibleInSecondaryCamera(isVisibleInSecondaryCamera), _drawInFront(drawInFront), _enabled(enabled)
{
_uniformBuffer->resize(sizeof(ParabolaData));
setColor(color);
@ -358,6 +369,10 @@ void ParabolaPointer::RenderState::ParabolaRenderItem::updateKey() {
builder.withTagBits(render::hifi::TAG_MAIN_VIEW);
}
if (_drawInFront) {
builder.withLayer(render::hifi::LAYER_3D_FRONT);
}
_key = builder.build();
}

View file

@ -21,7 +21,7 @@ public:
using Pointer = Payload::DataPointer;
ParabolaRenderItem(const glm::vec3& color, float alpha, float width,
bool isVisibleInSecondaryCamera, bool enabled);
bool isVisibleInSecondaryCamera, bool drawInFront, bool enabled);
~ParabolaRenderItem() {}
static gpu::PipelinePointer _parabolaPipeline;
@ -46,11 +46,13 @@ public:
void setAcceleration(const glm::vec3& acceleration) { _parabolaData.acceleration = acceleration; }
void setOrigin(const glm::vec3& origin) { _origin = origin; }
void setIsVisibleInSecondaryCamera(const bool& isVisibleInSecondaryCamera) { _isVisibleInSecondaryCamera = isVisibleInSecondaryCamera; }
void setDrawInFront(const bool& drawInFront) { _drawInFront = drawInFront; }
void setEnabled(const bool& enabled) { _enabled = enabled; }
static const glm::vec4 DEFAULT_PARABOLA_COLOR;
static const float DEFAULT_PARABOLA_WIDTH;
static const bool DEFAULT_PARABOLA_ISVISIBLEINSECONDARYCAMERA;
static const bool DEFAULT_PARABOLA_DRAWINFRONT;
private:
render::Item::Bound _bound;
@ -58,6 +60,7 @@ public:
glm::vec3 _origin { 0.0f };
bool _isVisibleInSecondaryCamera { DEFAULT_PARABOLA_ISVISIBLEINSECONDARYCAMERA };
bool _drawInFront { DEFAULT_PARABOLA_DRAWINFRONT };
bool _visible { false };
bool _enabled { false };
@ -77,7 +80,7 @@ public:
RenderState() {}
RenderState(const OverlayID& startID, const OverlayID& endID, const glm::vec3& pathColor, float pathAlpha, float pathWidth,
bool isVisibleInSecondaryCamera, bool pathEnabled);
bool isVisibleInSecondaryCamera, bool drawInFront, bool pathEnabled);
void setPathWidth(float width) { _pathWidth = width; }
float getPathWidth() const { return _pathWidth; }
@ -87,7 +90,7 @@ public:
void update(const glm::vec3& origin, const glm::vec3& end, const glm::vec3& surfaceNormal, bool scaleWithAvatar, bool distanceScaleEnd, bool centerEndY,
bool faceAvatar, bool followNormal, float followNormalStrength, float distance, const PickResultPointer& pickResult) override;
void editParabola(const glm::vec3& color, float alpha, float width, bool isVisibleInSecondaryCamera, bool enabled);
void editParabola(const glm::vec3& color, float alpha, float width, bool isVisibleInSecondaryCamera, bool drawInFront, bool enabled);
private:
int _pathID;

View file

@ -218,6 +218,7 @@ unsigned int PointerScriptingInterface::createLaserPointer(const QVariant& prope
* @property {number} alpha=1.0 The alpha of the parabola.
* @property {number} width=0.01 The width of the parabola, in meters.
* @property {boolean} isVisibleInSecondaryCamera=false The width of the parabola, in meters.
* @property {boolean} drawInFront=false If <code>true</code>, the parabola is rendered in front of other items in the scene.
*/
/**jsdoc
* A set of properties used to define the visual aspect of a Parabola Pointer in the case that the Pointer is not intersecting something. Same as a {@link Pointers.ParabolaPointerRenderState},
@ -393,4 +394,4 @@ QVariantMap PointerScriptingInterface::getPrevPickResult(unsigned int uid) const
QVariantMap PointerScriptingInterface::getPointerProperties(unsigned int uid) const {
return DependencyManager::get<PointerManager>()->getPointerProperties(uid);
}
}

View file

@ -205,7 +205,7 @@ public:
/**jsdoc
* Returns information about an existing Pointer
* @function Pointers.getPointerState
* @function Pointers.getPointerProperties
* @param {number} uid The ID of the Pointer, as returned by {@link Pointers.createPointer}.
* @returns {Pointers.LaserPointerProperties|Pointers.StylusPointerProperties|Pointers.ParabolaPointerProperties} The information about the Pointer.
* Currently only includes renderStates and defaultRenderStates with associated overlay IDs.

View file

@ -241,4 +241,4 @@ glm::vec2 StylusPointer::findPos2D(const PickedObject& pickedObject, const glm::
default:
return glm::vec2(NAN);
}
}
}

View file

@ -201,3 +201,12 @@ bool HMDScriptingInterface::isKeyboardVisible() {
void HMDScriptingInterface::centerUI() {
QMetaObject::invokeMethod(qApp, "centerUI", Qt::QueuedConnection);
}
QVariant HMDScriptingInterface::getPlayAreaRect() {
auto rect = qApp->getActiveDisplayPlugin()->getPlayAreaRect();
return qRectFToVariant(rect);
}
QVector<glm::vec3> HMDScriptingInterface::getSensorPositions() {
return qApp->getActiveDisplayPlugin()->getSensorPositions();
}

View file

@ -61,6 +61,8 @@ class QScriptEngine;
* @property {Uuid} miniTabletScreenID - The UUID of the mini tablet's screen overlay. <code>null</code> if not in HMD mode.
* @property {number} miniTabletHand - The hand that the mini tablet is displayed on: <code>0</code> for left hand,
* <code>1</code> for right hand, <code>-1</code> if not in HMD mode.
* @property {Rect} playArea=0,0,0,0 - The size and position of the HMD play area in sensor coordinates. <em>Read-only.</em>
* @property {Vec3[]} sensorPositions=[]] - The positions of the VR system sensors in sensor coordinates. <em>Read-only.</em>
*/
class HMDScriptingInterface : public AbstractHMDScriptingInterface, public Dependency {
Q_OBJECT
@ -75,6 +77,8 @@ class HMDScriptingInterface : public AbstractHMDScriptingInterface, public Depen
Q_PROPERTY(QUuid miniTabletID READ getCurrentMiniTabletID WRITE setCurrentMiniTabletID)
Q_PROPERTY(QUuid miniTabletScreenID READ getCurrentMiniTabletScreenID WRITE setCurrentMiniTabletScreenID)
Q_PROPERTY(int miniTabletHand READ getCurrentMiniTabletHand WRITE setCurrentMiniTabletHand)
Q_PROPERTY(QVariant playArea READ getPlayAreaRect);
Q_PROPERTY(QVector<glm::vec3> sensorPositions READ getSensorPositions);
public:
@ -384,6 +388,9 @@ public:
void setCurrentMiniTabletHand(int miniTabletHand) { _miniTabletHand = miniTabletHand; }
int getCurrentMiniTabletHand() const { return _miniTabletHand; }
QVariant getPlayAreaRect();
QVector<glm::vec3> getSensorPositions();
private:
bool _showTablet { false };
bool _tabletContextualMode { false };

View file

@ -27,6 +27,7 @@
#include "Application.h"
#include "scripting/HMDScriptingInterface.h"
#include "Constants.h"
HIFI_QML_DEF(LoginDialog)
@ -220,8 +221,6 @@ void LoginDialog::signup(const QString& email, const QString& username, const QS
payload.insert("user", userObject);
static const QString API_SIGNUP_PATH = "api/v1/users";
qDebug() << "Sending a request to create an account for" << username;
auto accountManager = DependencyManager::get<AccountManager>();

View file

@ -226,18 +226,22 @@ void setupPreferences() {
static const QString VR_MOVEMENT{ "VR Movement" };
{
static const QString movementsControlChannel = QStringLiteral("Hifi-Advanced-Movement-Disabler");
auto getter = [myAvatar]()->bool { return myAvatar->useAdvancedMovementControls(); };
auto setter = [myAvatar](bool value) { myAvatar->setUseAdvancedMovementControls(value); };
preferences->addPreference(new CheckPreference(VR_MOVEMENT,
QStringLiteral("Advanced movement in VR (Teleport movement when unchecked)"),
getter, setter));
auto getter = [myAvatar]()->int { return myAvatar->useAdvancedMovementControls() ? 1 : 0; };
auto setter = [myAvatar](int value) { myAvatar->setUseAdvancedMovementControls(value == 1); };
auto preference =
new RadioButtonsPreference(VR_MOVEMENT, "Teleporting only / Walking and teleporting", getter, setter);
QStringList items;
items << "Teleporting only" << "Walking and teleporting";
preference->setHeading("Movement mode");
preference->setItems(items);
preferences->addPreference(preference);
}
{
auto getter = [myAvatar]()->bool { return myAvatar->getFlyingHMDPref(); };
auto setter = [myAvatar](bool value) { myAvatar->setFlyingHMDPref(value); };
preferences->addPreference(new CheckPreference(VR_MOVEMENT, "Flying & jumping (HMD)", getter, setter));
auto preference = new CheckPreference(VR_MOVEMENT, "Jumping and flying", getter, setter);
preference->setIndented(true);
preferences->addPreference(preference);
}
{
auto getter = [myAvatar]()->int { return myAvatar->getSnapTurn() ? 0 : 1; };
@ -245,9 +249,16 @@ void setupPreferences() {
auto preference = new RadioButtonsPreference(VR_MOVEMENT, "Snap turn / Smooth turn", getter, setter);
QStringList items;
items << "Snap turn" << "Smooth turn";
preference->setHeading("Rotation mode");
preference->setItems(items);
preferences->addPreference(preference);
}
{
auto getter = [myAvatar]()->bool { return myAvatar->getShowPlayArea(); };
auto setter = [myAvatar](bool value) { myAvatar->setShowPlayArea(value); };
auto preference = new CheckPreference(VR_MOVEMENT, "Show room boundaries while teleporting", getter, setter);
preferences->addPreference(preference);
}
{
auto getter = [=]()->float { return myAvatar->getUserHeight(); };
auto setter = [=](float value) { myAvatar->setUserHeight(value); };
@ -258,12 +269,6 @@ void setupPreferences() {
preference->setStep(0.001f);
preferences->addPreference(preference);
}
{
auto preference = new ButtonPreference(VR_MOVEMENT, "RESET SENSORS", [] {
qApp->resetSensors();
});
preferences->addPreference(preference);
}
static const QString AVATAR_CAMERA{ "Mouse Sensitivity" };
{

View file

@ -1173,20 +1173,5 @@ void AudioHRTF::render(int16_t* input, float* output, int index, float azimuth,
// crossfade old/new output and accumulate
crossfade_4x2(bqBuffer, output, crossfadeTable, HRTF_BLOCK);
_silentState = false;
}
void AudioHRTF::renderSilent(int16_t* input, float* output, int index, float azimuth, float distance, float gain, int numFrames) {
// process the first silent block, to flush internal state
if (!_silentState) {
render(input, output, index, azimuth, distance, gain, numFrames);
}
// new parameters become old
_azimuthState = azimuth;
_distanceState = distance;
_gainState = gain;
_silentState = true;
_resetState = false;
}

View file

@ -47,9 +47,14 @@ public:
void render(int16_t* input, float* output, int index, float azimuth, float distance, float gain, int numFrames);
//
// Fast path when input is known to be silent
// Fast path when input is known to be silent and state as been flushed
//
void renderSilent(int16_t* input, float* output, int index, float azimuth, float distance, float gain, int numFrames);
void setParameterHistory(float azimuth, float distance, float gain) {
// new parameters become old
_azimuthState = azimuth;
_distanceState = distance;
_gainState = gain;
}
//
// HRTF local gain adjustment in amplitude (1.0 == unity)
@ -59,23 +64,25 @@ public:
// clear internal state, but retain settings
void reset() {
// FIR history
memset(_firState, 0, sizeof(_firState));
if (!_resetState) {
// FIR history
memset(_firState, 0, sizeof(_firState));
// integer delay history
memset(_delayState, 0, sizeof(_delayState));
// integer delay history
memset(_delayState, 0, sizeof(_delayState));
// biquad history
memset(_bqState, 0, sizeof(_bqState));
// biquad history
memset(_bqState, 0, sizeof(_bqState));
// parameter history
_azimuthState = 0.0f;
_distanceState = 0.0f;
_gainState = 0.0f;
// parameter history
_azimuthState = 0.0f;
_distanceState = 0.0f;
_gainState = 0.0f;
// _gainAdjust is retained
// _gainAdjust is retained
_silentState = true;
_resetState = true;
}
}
private:
@ -110,7 +117,7 @@ private:
// global and local gain adjustment
float _gainAdjust = HRTF_GAIN;
bool _silentState = true;
bool _resetState = true;
};
#endif // AudioHRTF_h

View file

@ -66,4 +66,6 @@ public:
PacketStreamStats _packetStreamWindowStats;
};
static_assert(sizeof(AudioStreamStats) == 152, "AudioStreamStats size isn't right");
#endif // hifi_AudioStreamStats_h

View file

@ -171,7 +171,6 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
} else {
_mismatchedAudioCodecCount++;
qDebug(audio) << "Codec mismatch: expected" << _selectedCodecName << "got" << codecInPacket;
if (packetPCM) {
// If there are PCM packets in-flight after the codec is changed, use them.
@ -191,7 +190,8 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
auto sendingNode = DependencyManager::get<NodeList>()->nodeWithLocalID(message.getSourceID());
if (sendingNode) {
emit mismatchedAudioCodec(sendingNode, _selectedCodecName, codecInPacket);
qDebug(audio) << "Codec mismatch threshold exceeded, SelectedAudioFormat(" << _selectedCodecName << " ) sent";
qDebug(audio) << "Codec mismatch threshold exceeded, sent selected codec"
<< _selectedCodecName << "to" << message.getSenderSockAddr();
}
}
}
@ -208,7 +208,6 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
int framesAvailable = _ringBuffer.framesAvailable();
// if this stream was starved, check if we're still starved.
if (_isStarved && framesAvailable >= _desiredJitterBufferFrames) {
qCInfo(audiostream, "Starve ended");
_isStarved = false;
}
// if the ringbuffer exceeds the desired size by more than the threshold specified,
@ -378,10 +377,6 @@ void InboundAudioStream::framesAvailableChanged() {
}
void InboundAudioStream::setToStarved() {
if (!_isStarved) {
qCInfo(audiostream, "Starved");
}
_consecutiveNotMixedCount = 0;
_starveCount++;
// if we have more than the desired frames when setToStarved() is called, then we'll immediately

View file

@ -30,6 +30,8 @@
// Audio Env bitset
const int HAS_REVERB_BIT = 0; // 1st bit
using StreamSequenceNumber = quint16;
class InboundAudioStream : public NodeData {
Q_OBJECT

View file

@ -50,7 +50,7 @@ int InjectedAudioStream::parseStreamProperties(PacketType type,
}
// pull the loopback flag and set our boolean
uchar shouldLoopback;
LoopbackFlag shouldLoopback;
packetStream >> shouldLoopback;
_shouldLoopbackForNode = (shouldLoopback == 1);

View file

@ -16,6 +16,8 @@
#include "PositionalAudioStream.h"
using LoopbackFlag = uchar;
class InjectedAudioStream : public PositionalAudioStream {
public:
InjectedAudioStream(const QUuid& streamIdentifier, bool isStereo, int numStaticJitterFrames = -1);

View file

@ -14,7 +14,6 @@
#include <cstring>
#include <glm/detail/func_common.hpp>
#include <QtCore/QDataStream>
#include <QtCore/QLoggingCategory>
@ -78,20 +77,15 @@ int PositionalAudioStream::parsePositionalData(const QByteArray& positionalByteA
QDataStream packetStream(positionalByteArray);
packetStream.readRawData(reinterpret_cast<char*>(&_position), sizeof(_position));
// if the client sends us a bad position, flag it so that we don't consider this stream for mixing
if (glm::isnan(_position.x) || glm::isnan(_position.y) || glm::isnan(_position.z)) {
HIFI_FDEBUG("PositionalAudioStream unpacked invalid position for node" << uuidStringWithoutCurlyBraces(getNodeID()) );
_hasValidPosition = false;
} else {
_hasValidPosition = true;
}
packetStream.readRawData(reinterpret_cast<char*>(&_orientation), sizeof(_orientation));
packetStream.readRawData(reinterpret_cast<char*>(&_avatarBoundingBoxCorner), sizeof(_avatarBoundingBoxCorner));
packetStream.readRawData(reinterpret_cast<char*>(&_avatarBoundingBoxScale), sizeof(_avatarBoundingBoxScale));
if (_avatarBoundingBoxCorner != _ignoreBox.getCorner()) {
// if the ignore box corner changes, we need to re-calculate the ignore box
calculateIgnoreBox();
}
// if this node sent us a NaN for first float in orientation then don't consider this good audio and bail
if (glm::isnan(_orientation.x)) {
// NOTE: why would we reset the ring buffer here?
@ -107,3 +101,29 @@ AudioStreamStats PositionalAudioStream::getAudioStreamStats() const {
streamStats._streamType = _type;
return streamStats;
}
void PositionalAudioStream::calculateIgnoreBox() {
if (_avatarBoundingBoxScale != glm::vec3(0)) {
auto scale = _avatarBoundingBoxScale;
// enforce a minimum scale
static const glm::vec3 MIN_IGNORE_BOX_SCALE = glm::vec3(0.3f, 1.3f, 0.3f);
if (glm::any(glm::lessThan(scale, MIN_IGNORE_BOX_SCALE))) {
scale = MIN_IGNORE_BOX_SCALE;
}
// (this is arbitrary number determined empirically for comfort)
const float IGNORE_BOX_SCALE_FACTOR = 2.4f;
scale *= IGNORE_BOX_SCALE_FACTOR;
// create the box (we use a box for the zone for convenience)
_ignoreBox.setBox(_avatarBoundingBoxCorner, scale);
}
}
void PositionalAudioStream::enableIgnoreBox() {
// re-calculate the ignore box using the latest values
calculateIgnoreBox();
_isIgnoreBoxEnabled = true;
}

View file

@ -19,6 +19,24 @@
const int AUDIOMIXER_INBOUND_RING_BUFFER_FRAME_CAPACITY = 100;
using StreamID = QUuid;
const int NUM_STREAM_ID_BYTES = NUM_BYTES_RFC4122_UUID;
struct NodeIDStreamID {
QUuid nodeID;
Node::LocalID nodeLocalID;
StreamID streamID;
NodeIDStreamID(QUuid nodeID, Node::LocalID nodeLocalID, StreamID streamID)
: nodeID(nodeID), nodeLocalID(nodeLocalID), streamID(streamID) {};
bool operator==(const NodeIDStreamID& other) const {
return (nodeLocalID == other.nodeLocalID || nodeID == other.nodeID) && streamID == other.streamID;
}
};
using ChannelFlag = quint8;
class PositionalAudioStream : public InboundAudioStream {
Q_OBJECT
public:
@ -30,7 +48,7 @@ public:
PositionalAudioStream(PositionalAudioStream::Type type, bool isStereo, int numStaticJitterFrames = -1);
const QUuid DEFAULT_STREAM_IDENTIFIER = QUuid();
virtual const QUuid& getStreamIdentifier() const { return DEFAULT_STREAM_IDENTIFIER; }
virtual const StreamID& getStreamIdentifier() const { return DEFAULT_STREAM_IDENTIFIER; }
virtual void resetStats() override;
@ -51,7 +69,15 @@ public:
const glm::vec3& getAvatarBoundingBoxCorner() const { return _avatarBoundingBoxCorner; }
const glm::vec3& getAvatarBoundingBoxScale() const { return _avatarBoundingBoxScale; }
bool hasValidPosition() const { return _hasValidPosition; }
using IgnoreBox = AABox;
// called from single AudioMixerSlave while processing packets for node
void enableIgnoreBox();
void disableIgnoreBox() { _isIgnoreBoxEnabled = false; }
// thread-safe, called from AudioMixerSlave(s) while preparing mixes
bool isIgnoreBoxEnabled() const { return _isIgnoreBoxEnabled; }
const IgnoreBox& getIgnoreBox() const { return _ignoreBox; }
protected:
// disallow copying of PositionalAudioStream objects
@ -61,6 +87,8 @@ protected:
int parsePositionalData(const QByteArray& positionalByteArray);
protected:
void calculateIgnoreBox();
Type _type;
glm::vec3 _position;
glm::quat _orientation;
@ -79,7 +107,8 @@ protected:
float _quietestFrameLoudness;
int _frameCounter;
bool _hasValidPosition { false };
bool _isIgnoreBoxEnabled { false };
IgnoreBox _ignoreBox;
};
#endif // hifi_PositionalAudioStream_h

View file

@ -178,6 +178,17 @@ namespace controller {
return inputRecorder->getSaveDirectory();
}
QStringList ScriptingInterface::getRunningInputDeviceNames() {
QMutexLocker locker(&_runningDevicesMutex);
return _runningInputDeviceNames;
}
void ScriptingInterface::updateRunningInputDevices(const QString& deviceName, bool isRunning, const QStringList& runningDevices) {
QMutexLocker locker(&_runningDevicesMutex);
_runningInputDeviceNames = runningDevices;
emit inputDeviceRunningChanged(deviceName, isRunning);
}
bool ScriptingInterface::triggerHapticPulseOnDevice(unsigned int device, float strength, float duration, controller::Hand hand) const {
return DependencyManager::get<UserInputMapper>()->triggerHapticPulseOnDevice(device, strength, duration, hand);
}

View file

@ -26,6 +26,7 @@
#include <QThread>
#include <QtCore/QObject>
#include <QtCore/QVariant>
#include <QMutex>
#include <QtQml/QJSValue>
#include <QtScript/QScriptValue>
@ -431,6 +432,13 @@ namespace controller {
*/
Q_INVOKABLE QString getInputRecorderSaveDirectory();
/**jsdoc
* Get all the active and enabled (running) input devices
* @function Controller.getRunningInputDevices
* @returns {string[]} An array of strings with the names
*/
Q_INVOKABLE QStringList getRunningInputDeviceNames();
bool isMouseCaptured() const { return _mouseCaptured; }
bool isTouchCaptured() const { return _touchCaptured; }
bool isWheelCaptured() const { return _wheelCaptured; }
@ -531,6 +539,8 @@ namespace controller {
*/
virtual void releaseActionEvents() { _actionsCaptured = false; }
void updateRunningInputDevices(const QString& deviceName, bool isRunning, const QStringList& runningDevices);
signals:
/**jsdoc
* Triggered when an action occurs.
@ -590,6 +600,17 @@ namespace controller {
*/
void hardwareChanged();
/**jsdoc
* Triggered when a device is enabled/disabled
* Enabling/Disabling Leapmotion on settings/controls will trigger this signal.
* @function Controller.deviceRunningChanged
* @param {string} deviceName - The name of the device that is getting enabled/disabled
* @param {boolean} isEnabled - Return if the device is enabled.
* @returns {Signal}
*/
void inputDeviceRunningChanged(QString deviceName, bool isRunning);
private:
// Update the exposed variant maps reporting active hardware
void updateMaps();
@ -598,10 +619,14 @@ namespace controller {
QVariantMap _actions;
QVariantMap _standard;
QStringList _runningInputDeviceNames;
std::atomic<bool> _mouseCaptured{ false };
std::atomic<bool> _touchCaptured { false };
std::atomic<bool> _wheelCaptured { false };
std::atomic<bool> _actionsCaptured { false };
QMutex _runningDevicesMutex;
};
}

View file

@ -210,7 +210,8 @@ void RenderableModelEntityItem::updateModelBounds() {
}
if (model->getScaleToFitDimensions() != getScaledDimensions() ||
model->getRegistrationPoint() != getRegistrationPoint()) {
model->getRegistrationPoint() != getRegistrationPoint() ||
!model->getIsScaledToFit()) {
// The machinery for updateModelBounds will give existing models the opportunity to fix their
// translation/rotation/scale/registration. The first two are straightforward, but the latter two
// have guards to make sure they don't happen after they've already been set. Here we reset those guards.

View file

@ -96,7 +96,6 @@ Node::Node(const QUuid& uuid, NodeType_t type, const HifiSockAddr& publicSocket,
{
// Update socket's object name
setType(_type);
_ignoreRadiusEnabled = false;
}
void Node::setType(char type) {
@ -114,9 +113,12 @@ void Node::updateClockSkewUsec(qint64 clockSkewSample) {
_clockSkewUsec = (quint64)_clockSkewMovingPercentile.getValueAtPercentile();
}
void Node::parseIgnoreRequestMessage(QSharedPointer<ReceivedMessage> message) {
Node::NodesIgnoredPair Node::parseIgnoreRequestMessage(QSharedPointer<ReceivedMessage> message) {
bool addToIgnore;
message->readPrimitive(&addToIgnore);
std::vector<QUuid> nodesIgnored;
while (message->getBytesLeftToRead()) {
// parse out the UUID being ignored from the packet
QUuid ignoredUUID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
@ -126,17 +128,23 @@ void Node::parseIgnoreRequestMessage(QSharedPointer<ReceivedMessage> message) {
} else {
removeIgnoredNode(ignoredUUID);
}
nodesIgnored.push_back(ignoredUUID);
}
return { nodesIgnored, addToIgnore };
}
void Node::addIgnoredNode(const QUuid& otherNodeID) {
if (!otherNodeID.isNull() && otherNodeID != _uuid) {
QReadLocker lock { &_ignoredNodeIDSetLock };
QWriteLocker lock { &_ignoredNodeIDSetLock };
qCDebug(networking) << "Adding" << uuidStringWithoutCurlyBraces(otherNodeID) << "to ignore set for"
<< uuidStringWithoutCurlyBraces(_uuid);
<< uuidStringWithoutCurlyBraces(_uuid);
// add the session UUID to the set of ignored ones for this listening node
_ignoredNodeIDSet.insert(otherNodeID);
if (std::find(_ignoredNodeIDs.begin(), _ignoredNodeIDs.end(), otherNodeID) == _ignoredNodeIDs.end()) {
_ignoredNodeIDs.push_back(otherNodeID);
}
} else {
qCWarning(networking) << "Node::addIgnoredNode called with null ID or ID of ignoring node.";
}
@ -144,22 +152,25 @@ void Node::addIgnoredNode(const QUuid& otherNodeID) {
void Node::removeIgnoredNode(const QUuid& otherNodeID) {
if (!otherNodeID.isNull() && otherNodeID != _uuid) {
// insert/find are read locked concurrently. unsafe_erase is not concurrent, and needs a write lock.
QWriteLocker lock { &_ignoredNodeIDSetLock };
qCDebug(networking) << "Removing" << uuidStringWithoutCurlyBraces(otherNodeID) << "from ignore set for"
<< uuidStringWithoutCurlyBraces(_uuid);
<< uuidStringWithoutCurlyBraces(_uuid);
// remove the session UUID from the set of ignored ones for this listening node
_ignoredNodeIDSet.unsafe_erase(otherNodeID);
// remove the session UUID from the set of ignored ones for this listening node, if it exists
auto it = std::remove(_ignoredNodeIDs.begin(), _ignoredNodeIDs.end(), otherNodeID);
if (it != _ignoredNodeIDs.end()) {
_ignoredNodeIDs.erase(it);
}
} else {
qCWarning(networking) << "Node::removeIgnoredNode called with null ID or ID of ignoring node.";
}
}
void Node::parseIgnoreRadiusRequestMessage(QSharedPointer<ReceivedMessage> message) {
bool enabled;
message->readPrimitive(&enabled);
_ignoreRadiusEnabled = enabled;
bool Node::isIgnoringNodeWithID(const QUuid& nodeID) const {
QReadLocker lock { &_ignoredNodeIDSetLock };
// check if this node ID is present in the ignore node ID set
return std::find(_ignoredNodeIDs.begin(), _ignoredNodeIDs.end(), nodeID) != _ignoredNodeIDs.end();
}
QDataStream& operator<<(QDataStream& out, const Node& node) {

View file

@ -15,6 +15,7 @@
#include <memory>
#include <ostream>
#include <stdint.h>
#include <vector>
#include <QtCore/QDebug>
#include <QtCore/QMutex>
@ -80,17 +81,19 @@ public:
bool getCanKick() const { return _permissions.can(NodePermissions::Permission::canKick); }
bool getCanReplaceContent() const { return _permissions.can(NodePermissions::Permission::canReplaceDomainContent); }
void parseIgnoreRequestMessage(QSharedPointer<ReceivedMessage> message);
using NodesIgnoredPair = std::pair<std::vector<QUuid>, bool>;
NodesIgnoredPair parseIgnoreRequestMessage(QSharedPointer<ReceivedMessage> message);
void addIgnoredNode(const QUuid& otherNodeID);
void removeIgnoredNode(const QUuid& otherNodeID);
bool isIgnoringNodeWithID(const QUuid& nodeID) const { QReadLocker lock { &_ignoredNodeIDSetLock }; return _ignoredNodeIDSet.find(nodeID) != _ignoredNodeIDSet.cend(); }
void parseIgnoreRadiusRequestMessage(QSharedPointer<ReceivedMessage> message);
bool isIgnoringNodeWithID(const QUuid& nodeID) const;
using IgnoredNodeIDs = std::vector<QUuid>;
const IgnoredNodeIDs& getIgnoredNodeIDs() const { return _ignoredNodeIDs; }
friend QDataStream& operator<<(QDataStream& out, const Node& node);
friend QDataStream& operator>>(QDataStream& in, Node& node);
bool isIgnoreRadiusEnabled() const { return _ignoreRadiusEnabled; }
private:
// privatize copy and assignment operator to disallow Node copying
Node(const Node &otherNode);
@ -108,11 +111,10 @@ private:
MovingPercentile _clockSkewMovingPercentile;
NodePermissions _permissions;
bool _isUpstream { false };
tbb::concurrent_unordered_set<QUuid, UUIDHasher> _ignoredNodeIDSet;
IgnoredNodeIDs _ignoredNodeIDs;
mutable QReadWriteLock _ignoredNodeIDSetLock;
std::vector<QString> _replicatedUsernames { };
std::atomic_bool _ignoreRadiusEnabled;
};
Q_DECLARE_METATYPE(Node*)

View file

@ -112,6 +112,9 @@ public:
virtual bool suppressKeyboard() { return false; }
virtual void unsuppressKeyboard() {};
virtual bool isKeyboardVisible() { return false; }
virtual QRectF getPlayAreaRect() { return QRectF(); }
virtual QVector<glm::vec3> getSensorPositions() { return QVector<glm::vec3>(); }
};
class DisplayPlugin : public Plugin, public HmdDisplay {

View file

@ -75,8 +75,11 @@ public:
virtual void saveSettings() const {}
virtual void loadSettings() {}
virtual bool isRunning() const { return _active; }
signals:
void deviceStatusChanged(const QString& deviceName, bool isRunning) const;
// These signals should be emitted when a device is first known to be available. In some cases this will
// be in `init()`, in other cases, like Neuron, this isn't known until activation.
// SDL2 isn't a device itself, but can have 0+ subdevices. subdeviceConnected is used in this case.
@ -85,6 +88,7 @@ signals:
protected:
bool _active { false };
bool _enabled { false };
bool _sessionStatus { false };
PluginContainer* _container { nullptr };
static const char* UNKNOWN_PLUGIN_ID;

View file

@ -225,8 +225,11 @@ void PluginManager::disableDisplayPlugin(const QString& name) {
const InputPluginList& PluginManager::getInputPlugins() {
static std::once_flag once;
static auto deviceAddedCallback = [](QString deviceName) {
static auto deviceAddedCallback = [&](QString deviceName) {
qCDebug(plugins) << "Added device: " << deviceName;
QStringList runningDevices = getRunningInputDeviceNames();
bool isDeviceRunning = runningDevices.indexOf(deviceName) >= 0;
emit inputDeviceRunningChanged(deviceName, isDeviceRunning, runningDevices);
UserActivityLogger::getInstance().connectedDevice("input", deviceName);
};
static auto subdeviceAddedCallback = [](QString pluginName, QString deviceName) {
@ -252,6 +255,9 @@ const InputPluginList& PluginManager::getInputPlugins() {
for (auto plugin : _inputPlugins) {
connect(plugin.get(), &Plugin::deviceConnected, this, deviceAddedCallback, Qt::QueuedConnection);
connect(plugin.get(), &Plugin::subdeviceConnected, this, subdeviceAddedCallback, Qt::QueuedConnection);
connect(plugin.get(), &Plugin::deviceStatusChanged, this, [&](const QString& deviceName, bool isRunning) {
emit inputDeviceRunningChanged(deviceName, isRunning, getRunningInputDeviceNames());
}, Qt::QueuedConnection);
plugin->setContainer(_container);
plugin->init();
}
@ -259,6 +265,16 @@ const InputPluginList& PluginManager::getInputPlugins() {
return _inputPlugins;
}
QStringList PluginManager::getRunningInputDeviceNames() const {
QStringList runningDevices;
for (auto plugin: _inputPlugins) {
if (plugin->isRunning()) {
runningDevices << plugin->getName();
}
}
return runningDevices;
}
void PluginManager::setPreferredDisplayPlugins(const QStringList& displays) {
preferredDisplayPlugins = displays;
}

View file

@ -19,6 +19,7 @@ using PluginManagerPointer = QSharedPointer<PluginManager>;
class PluginManager : public QObject, public Dependency {
SINGLETON_DEPENDENCY
Q_OBJECT
public:
static PluginManagerPointer getInstance();
@ -44,6 +45,10 @@ public:
void setInputPluginProvider(const InputPluginProvider& provider);
void setCodecPluginProvider(const CodecPluginProvider& provider);
void setInputPluginSettingsPersister(const InputPluginSettingsPersister& persister);
QStringList getRunningInputDeviceNames() const;
signals:
void inputDeviceRunningChanged(const QString& pluginName, bool isRunning, const QStringList& runningDevices);
private:
PluginManager() = default;

View file

@ -153,4 +153,4 @@ bool PointerManager::isMouse(unsigned int uid) {
return pointer->isMouse();
}
return false;
}
}

View file

@ -26,7 +26,6 @@ layout(location=0) in vec2 varTexCoord0;
layout(location=0) out vec4 outFragColor;
const float FAR_Z = 1.0;
const float LINEAR_DEPTH_BIAS = 5e-3;
const float OPACITY_EPSILON = 5e-3;
<@func main(IS_FILLED)@>
@ -46,7 +45,7 @@ void main(void) {
highlightedDepth = -evalZeyeFromZdb(highlightedDepth);
sceneDepth = -evalZeyeFromZdb(sceneDepth);
if (sceneDepth < (highlightedDepth-LINEAR_DEPTH_BIAS)) {
if (sceneDepth < highlightedDepth) {
outFragColor = vec4(params._fillOccludedColor, params._fillOccludedAlpha);
} else {
outFragColor = vec4(params._fillUnoccludedColor, params._fillUnoccludedAlpha);
@ -107,7 +106,7 @@ void main(void) {
sceneDepth = -evalZeyeFromZdb(sceneDepth);
// Are we occluded?
if (sceneDepth < (outlinedDepth/*-LINEAR_DEPTH_BIAS*/)) {
if (sceneDepth < outlinedDepth) {
outFragColor = vec4(params._outlineOccludedColor, intensity * params._outlineOccludedAlpha);
} else {
outFragColor = vec4(params._outlineUnoccludedColor, intensity * params._outlineUnoccludedAlpha);

View file

@ -340,10 +340,16 @@ public:
class CheckPreference : public BoolPreference {
Q_OBJECT
Q_PROPERTY(bool indented READ getIndented CONSTANT)
public:
CheckPreference(const QString& category, const QString& name, Getter getter, Setter setter)
: BoolPreference(category, name, getter, setter) { }
Type getType() override { return Checkbox; }
bool getIndented() { return _isIndented; }
void setIndented(const bool indented) { _isIndented = indented; }
protected:
bool _isIndented { false };
};
class PrimaryHandPreference : public StringPreference {
@ -356,16 +362,20 @@ public:
class RadioButtonsPreference : public IntPreference {
Q_OBJECT
Q_PROPERTY(QString heading READ getHeading CONSTANT)
Q_PROPERTY(QStringList items READ getItems CONSTANT)
public:
RadioButtonsPreference(const QString& category, const QString& name, Getter getter, Setter setter)
: IntPreference(category, name, getter, setter) { }
Type getType() override { return RadioButtons; }
const QString& getHeading() { return _heading; }
const QStringList& getItems() { return _items; }
void setHeading(const QString& heading) { _heading = heading; }
void setItems(const QStringList& items) { _items = items; }
protected:
QString _heading;
QStringList _items;
};
#endif

View file

@ -857,14 +857,6 @@ void qRectFromScriptValue(const QScriptValue &object, QRect& rect) {
rect.setHeight(object.property("height").toVariant().toInt());
}
/**jsdoc
* Defines a rectangular portion of an image or screen.
* @typedef {object} Rect
* @property {number} x - Integer left, x-coordinate value.
* @property {number} y - Integer top, y-coordinate value.
* @property {number} width - Integer width of the rectangle.
* @property {number} height - Integer height of the rectangle.
*/
QVariant qRectToVariant(const QRect& rect) {
QVariantMap obj;
obj["x"] = rect.x();
@ -896,6 +888,53 @@ QRect qRectFromVariant(const QVariant& object) {
return qRectFromVariant(object, valid);
}
QScriptValue qRectFToScriptValue(QScriptEngine* engine, const QRectF& rect) {
QScriptValue obj = engine->newObject();
obj.setProperty("x", rect.x());
obj.setProperty("y", rect.y());
obj.setProperty("width", rect.width());
obj.setProperty("height", rect.height());
return obj;
}
void qRectFFromScriptValue(const QScriptValue &object, QRectF& rect) {
rect.setX(object.property("x").toVariant().toFloat());
rect.setY(object.property("y").toVariant().toFloat());
rect.setWidth(object.property("width").toVariant().toFloat());
rect.setHeight(object.property("height").toVariant().toFloat());
}
QVariant qRectFToVariant(const QRectF& rect) {
QVariantMap obj;
obj["x"] = rect.x();
obj["y"] = rect.y();
obj["width"] = rect.width();
obj["height"] = rect.height();
return obj;
}
QRectF qRectFFromVariant(const QVariant& objectVar, bool& valid) {
QVariantMap object = objectVar.toMap();
QRectF rect;
valid = false;
rect.setX(object["x"].toFloat(&valid));
if (valid) {
rect.setY(object["y"].toFloat(&valid));
}
if (valid) {
rect.setWidth(object["width"].toFloat(&valid));
}
if (valid) {
rect.setHeight(object["height"].toFloat(&valid));
}
return rect;
}
QRectF qRectFFromVariant(const QVariant& object) {
bool valid;
return qRectFFromVariant(object, valid);
}
QScriptValue qColorToScriptValue(QScriptEngine* engine, const QColor& color) {
QScriptValue object = engine->newObject();
object.setProperty("red", color.red());

View file

@ -163,13 +163,26 @@ QVariant quatToVariant(const glm::quat& quat);
glm::quat quatFromVariant(const QVariant &object, bool& isValid);
glm::quat quatFromVariant(const QVariant &object);
// Rect
/**jsdoc
* Defines a rectangular portion of an image or screen, or similar.
* @typedef {object} Rect
* @property {number} x - Left, x-coordinate value.
* @property {number} y - Top, y-coordinate value.
* @property {number} width - Width of the rectangle.
* @property {number} height - Height of the rectangle.
*/
QScriptValue qRectToScriptValue(QScriptEngine* engine, const QRect& rect);
void qRectFromScriptValue(const QScriptValue& object, QRect& rect);
QRect qRectFromVariant(const QVariant& object, bool& isValid);
QRect qRectFromVariant(const QVariant& object);
QVariant qRectToVariant(const QRect& rect);
QScriptValue qRectFToScriptValue(QScriptEngine* engine, const QRectF& rect);
void qRectFFromScriptValue(const QScriptValue& object, QRectF& rect);
QRectF qRectFFromVariant(const QVariant& object, bool& isValid);
QRectF qRectFFromVariant(const QVariant& object);
QVariant qRectFToVariant(const QRectF& rect);
// QColor
QScriptValue qColorToScriptValue(QScriptEngine* engine, const QColor& color);
void qColorFromScriptValue(const QScriptValue& object, QColor& color);

View file

@ -21,7 +21,7 @@
Q_DECLARE_LOGGING_CATEGORY(inputplugins)
Q_LOGGING_CATEGORY(inputplugins, "hifi.inputplugins")
const char* LeapMotionPlugin::NAME = "Leap Motion";
const char* LeapMotionPlugin::NAME = "LeapMotion";
const char* LeapMotionPlugin::LEAPMOTION_ID_STRING = "Leap Motion";
const bool DEFAULT_ENABLED = false;
@ -203,7 +203,6 @@ static const char* getControllerJointName(controller::StandardPoseChannel i) {
return "unknown";
}
void LeapMotionPlugin::pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) {
if (!_enabled) {
return;
@ -312,13 +311,13 @@ void LeapMotionPlugin::InputDevice::update(float deltaTime, const controller::In
void LeapMotionPlugin::init() {
loadSettings();
auto preferences = DependencyManager::get<Preferences>();
static const QString LEAPMOTION_PLUGIN { "Leap Motion" };
{
auto getter = [this]()->bool { return _enabled; };
auto setter = [this](bool value) {
_enabled = value;
emit deviceStatusChanged(getName(), isRunning());
saveSettings();
if (!_enabled) {
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
@ -406,6 +405,7 @@ void LeapMotionPlugin::loadSettings() {
settings.beginGroup(idString);
{
_enabled = settings.value(SETTINGS_ENABLED_KEY, QVariant(DEFAULT_ENABLED)).toBool();
emit deviceStatusChanged(getName(), isRunning());
_sensorLocation = settings.value(SETTINGS_SENSOR_LOCATION_KEY, QVariant(DEFAULT_SENSOR_LOCATION)).toString();
_desktopHeightOffset =
settings.value(SETTINGS_DESKTOP_HEIGHT_OFFSET_KEY, QVariant(DEFAULT_DESKTOP_HEIGHT_OFFSET)).toFloat();

View file

@ -30,7 +30,7 @@ public:
// Plugin methods
virtual const QString getName() const override { return NAME; }
const QString getID() const override { return LEAPMOTION_ID_STRING; }
bool isRunning() const override { return _active && _enabled; }
virtual void init() override;
virtual bool activate() override;
@ -43,8 +43,6 @@ protected:
static const char* NAME;
static const char* LEAPMOTION_ID_STRING;
const float DEFAULT_DESKTOP_HEIGHT_OFFSET = 0.2f;
bool _enabled { false };
QString _sensorLocation;
float _desktopHeightOffset { DEFAULT_DESKTOP_HEIGHT_OFFSET };

View file

@ -369,7 +369,11 @@ void NeuronPlugin::init() {
static const QString NEURON_PLUGIN { "Perception Neuron" };
{
auto getter = [this]()->bool { return _enabled; };
auto setter = [this](bool value) { _enabled = value; saveSettings(); };
auto setter = [this](bool value) {
_enabled = value;
saveSettings();
emit deviceStatusChanged(getName(), _enabled && _active);
};
auto preference = new CheckPreference(NEURON_PLUGIN, "Enabled", getter, setter);
preferences->addPreference(preference);
}
@ -493,7 +497,7 @@ void NeuronPlugin::loadSettings() {
{
// enabled
_enabled = settings.value("enabled", QVariant(DEFAULT_ENABLED)).toBool();
emit deviceStatusChanged(getName(), _enabled && _active);
// serverAddress
_serverAddress = settings.value("serverAddress", QVariant(DEFAULT_SERVER_ADDRESS)).toString();

View file

@ -30,7 +30,7 @@ public:
virtual bool isSupported() const override;
virtual const QString getName() const override { return NAME; }
const QString getID() const override { return NEURON_ID_STRING; }
bool isRunning() const override { return _active && _enabled; }
virtual bool activate() override;
virtual void deactivate() override;
@ -67,7 +67,6 @@ protected:
static const char* NAME;
static const char* NEURON_ID_STRING;
bool _enabled;
QString _serverAddress;
int _serverPort;
void* _socketRef;

View file

@ -79,10 +79,11 @@ bool SDL2Manager::activate() {
auto preferences = DependencyManager::get<Preferences>();
static const QString SDL2_PLUGIN { "Game Controller" };
{
auto getter = [this]()->bool { return _isEnabled; };
auto getter = [this]()->bool { return _enabled; };
auto setter = [this](bool value) {
_isEnabled = value;
_enabled = value;
saveSettings();
emit deviceStatusChanged(getName(), isRunning());
};
auto preference = new CheckPreference(SDL2_PLUGIN, "Enabled", getter, setter);
preferences->addPreference(preference);
@ -147,7 +148,7 @@ void SDL2Manager::saveSettings() const {
QString idString = getID();
settings.beginGroup(idString);
{
settings.setValue(QString(SETTINGS_ENABLED_KEY), _isEnabled);
settings.setValue(QString(SETTINGS_ENABLED_KEY), _enabled);
}
settings.endGroup();
}
@ -157,7 +158,8 @@ void SDL2Manager::loadSettings() {
QString idString = getID();
settings.beginGroup(idString);
{
_isEnabled = settings.value(SETTINGS_ENABLED_KEY, QVariant(DEFAULT_ENABLED)).toBool();
_enabled = settings.value(SETTINGS_ENABLED_KEY, QVariant(DEFAULT_ENABLED)).toBool();
emit deviceStatusChanged(getName(), isRunning());
}
settings.endGroup();
}
@ -173,7 +175,7 @@ void SDL2Manager::pluginFocusOutEvent() {
}
void SDL2Manager::pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) {
if (!_isEnabled) {
if (!_enabled) {
return;
}

View file

@ -26,7 +26,7 @@ public:
bool isSupported() const override;
const QString getName() const override { return NAME; }
const QString getID() const override { return SDL2_ID_STRING; }
bool isRunning() const override { return _active && _enabled; }
QStringList getSubdeviceNames() override;
void init() override;
@ -81,7 +81,6 @@ private:
int buttonRelease() const { return SDL_RELEASED; }
QMap<SDL_JoystickID, Joystick::Pointer> _openJoysticks;
bool _isEnabled { false };
bool _isInitialized { false };
static const char* NAME;
static const char* SDL2_ID_STRING;

View file

@ -171,3 +171,53 @@ void OculusBaseDisplayPlugin::updatePresentPose() {
_currentPresentFrameInfo.presentPose = ovr::toGlm(trackingState.HeadPose.ThePose);
_currentPresentFrameInfo.renderPose = _currentPresentFrameInfo.presentPose;
}
QRectF OculusBaseDisplayPlugin::getPlayAreaRect() {
if (!_session) {
return QRectF();
}
int floorPointsCount = 0;
auto result = ovr_GetBoundaryGeometry(_session, ovrBoundary_PlayArea, nullptr, &floorPointsCount);
if (!OVR_SUCCESS(result) || floorPointsCount != 4) {
return QRectF();
}
auto floorPoints = new ovrVector3f[floorPointsCount];
result = ovr_GetBoundaryGeometry(_session, ovrBoundary_PlayArea, floorPoints, nullptr);
if (!OVR_SUCCESS(result)) {
return QRectF();
}
auto minXZ = ovr::toGlm(floorPoints[0]);
auto maxXZ = minXZ;
for (int i = 1; i < floorPointsCount; i++) {
auto point = ovr::toGlm(floorPoints[i]);
minXZ.x = std::min(minXZ.x, point.x);
minXZ.z = std::min(minXZ.z, point.z);
maxXZ.x = std::max(maxXZ.x, point.x);
maxXZ.z = std::max(maxXZ.z, point.z);
}
glm::vec2 center = glm::vec2((minXZ.x + maxXZ.x) / 2, (minXZ.z + maxXZ.z) / 2);
glm::vec2 dimensions = glm::vec2(maxXZ.x - minXZ.x, maxXZ.z - minXZ.z);
return QRectF(center.x, center.y, dimensions.x, dimensions.y);
}
QVector<glm::vec3> OculusBaseDisplayPlugin::getSensorPositions() {
if (!_session) {
return QVector<glm::vec3>();
}
QVector<glm::vec3> result;
auto numTrackers = ovr_GetTrackerCount(_session);
for (uint i = 0; i < numTrackers; i++) {
auto trackerPose = ovr_GetTrackerPose(_session, i);
if (trackerPose.TrackerFlags & ovrTracker_PoseTracked) {
result.append(ovr::toGlm(trackerPose.Pose.Position));
}
}
return result;
}

View file

@ -27,6 +27,9 @@ public:
void resetSensors() override final;
bool beginFrameRender(uint32_t frameIndex) override;
float getTargetFrameRate() const override { return _hmdDesc.DisplayRefreshRate; }
QRectF getPlayAreaRect() override;
QVector<glm::vec3> getSensorPositions() override;
protected:
void customizeContext() override;

View file

@ -750,3 +750,37 @@ QString OpenVrDisplayPlugin::getPreferredAudioOutDevice() const {
}
return device;
}
QRectF OpenVrDisplayPlugin::getPlayAreaRect() {
auto chaperone = vr::VRChaperone();
if (!chaperone) {
qWarning() << "No chaperone";
return QRectF();
}
if (chaperone->GetCalibrationState() >= vr::ChaperoneCalibrationState_Error) {
qWarning() << "Chaperone status =" << chaperone->GetCalibrationState();
return QRectF();
}
vr::HmdQuad_t rect;
if (!chaperone->GetPlayAreaRect(&rect)) {
qWarning() << "Chaperone rect not obtained";
return QRectF();
}
auto minXZ = transformPoint(_sensorResetMat, toGlm(rect.vCorners[0]));
auto maxXZ = minXZ;
for (int i = 1; i < 4; i++) {
auto point = transformPoint(_sensorResetMat, toGlm(rect.vCorners[i]));
minXZ.x = std::min(minXZ.x, point.x);
minXZ.z = std::min(minXZ.z, point.z);
maxXZ.x = std::max(maxXZ.x, point.x);
maxXZ.z = std::max(maxXZ.z, point.z);
}
glm::vec2 center = glm::vec2((minXZ.x + maxXZ.x) / 2, (minXZ.z + maxXZ.z) / 2);
glm::vec2 dimensions = glm::vec2(maxXZ.x - minXZ.x, maxXZ.z - minXZ.z);
return QRectF(center.x, center.y, dimensions.x, dimensions.y);
}

View file

@ -64,6 +64,8 @@ public:
QString getPreferredAudioInDevice() const override;
QString getPreferredAudioOutDevice() const override;
QRectF getPlayAreaRect() override;
protected:
bool internalActivate() override;
void internalDeactivate() override;

View file

@ -77,14 +77,10 @@ static glm::mat4 computeOffset(glm::mat4 defaultToReferenceMat, glm::mat4 defaul
return glm::inverse(poseMat) * referenceJointMat;
}
static bool sortPucksYPosition(PuckPosePair firstPuck, PuckPosePair secondPuck) {
static bool sortPucksYPosition(const PuckPosePair& firstPuck, const PuckPosePair& secondPuck) {
return (firstPuck.second.translation.y < secondPuck.second.translation.y);
}
static bool sortPucksXPosition(PuckPosePair firstPuck, PuckPosePair secondPuck) {
return (firstPuck.second.translation.x < secondPuck.second.translation.x);
}
static bool determineLimbOrdering(const controller::Pose& poseA, const controller::Pose& poseB, glm::vec3 axis, glm::vec3 axisOrigin) {
glm::vec3 poseAPosition = poseA.getTranslation();
glm::vec3 poseBPosition = poseB.getTranslation();
@ -568,6 +564,7 @@ void ViveControllerManager::InputDevice::calibrate(const controller::InputCalibr
return;
}
// Compute the defaultToRefrenceMat, this will take inputCalibration default poses into the reference frame. (sensor space)
glm::mat4 defaultToReferenceMat = glm::mat4();
if (_headConfig == HeadConfig::HMD) {
defaultToReferenceMat = calculateDefaultToReferenceForHmd(inputCalibration);
@ -591,7 +588,17 @@ void ViveControllerManager::InputDevice::calibrate(const controller::InputCalibr
}
bool ViveControllerManager::InputDevice::configureHands(const glm::mat4& defaultToReferenceMat, const controller::InputCalibrationData& inputCalibration) {
std::sort(_validTrackedObjects.begin(), _validTrackedObjects.end(), sortPucksXPosition);
// Sort valid tracked objects in the default frame by the x dimension (left to right).
// Because the sort is in the default frame we guarentee that poses are relative to the head facing.
// i.e. -x will always be to the left of the head, and +x will be to the right.
// This allows the user to be facing in any direction in sensor space while calibrating.
glm::mat4 referenceToDefaultMat = glm::inverse(defaultToReferenceMat);
std::sort(_validTrackedObjects.begin(), _validTrackedObjects.end(), [&referenceToDefaultMat](const PuckPosePair& a, const PuckPosePair& b) {
glm::vec3 aPos = transformPoint(referenceToDefaultMat, a.second.translation);
glm::vec3 bPos = transformPoint(referenceToDefaultMat, b.second.translation);
return (aPos.x < bPos.x);
});
int puckCount = (int)_validTrackedObjects.size();
if (_handConfig == HandConfig::Pucks && puckCount >= MIN_PUCK_COUNT) {
glm::vec3 headXAxis = getReferenceHeadXAxis(defaultToReferenceMat, inputCalibration.defaultHeadMat);
@ -1025,25 +1032,23 @@ void ViveControllerManager::InputDevice::hapticsHelper(float deltaTime, bool lef
void ViveControllerManager::InputDevice::calibrateLeftHand(const glm::mat4& defaultToReferenceMat, const controller::InputCalibrationData& inputCalibration, PuckPosePair& handPair) {
controller::Pose& handPose = handPair.second;
glm::mat4 handPoseAvatarMat = createMatFromQuatAndPos(handPose.getRotation(), handPose.getTranslation());
glm::vec3 handPoseTranslation = extractTranslation(handPoseAvatarMat);
glm::vec3 handPoseZAxis = glmExtractRotation(handPoseAvatarMat) * glm::vec3(0.0f, 0.0f, 1.0f);
glm::vec3 avatarHandYAxis = transformVectorFast(inputCalibration.defaultLeftHand, glm::vec3(0.0f, 1.0f, 0.0f));
glm::vec3 handPoseZAxis = handPose.getRotation() * glm::vec3(0.0f, 0.0f, 1.0f);
glm::vec3 referenceHandYAxis = transformVectorFast(defaultToReferenceMat * inputCalibration.defaultLeftHand, glm::vec3(0.0f, 1.0f, 0.0f));
const float EPSILON = 1.0e-4f;
if (fabsf(fabsf(glm::dot(glm::normalize(avatarHandYAxis), glm::normalize(handPoseZAxis))) - 1.0f) < EPSILON) {
if (fabsf(fabsf(glm::dot(glm::normalize(referenceHandYAxis), glm::normalize(handPoseZAxis))) - 1.0f) < EPSILON) {
handPoseZAxis = glm::vec3(0.0f, 0.0f, 1.0f);
}
// This allows the user to not have to match the t-pose exactly. We assume that the y facing of the hand lies in the plane of the puck.
// Where the plane of the puck is defined by the the local z-axis of the puck, which is facing out of the vive logo/power button.
glm::vec3 zPrime = handPoseZAxis;
glm::vec3 xPrime = glm::normalize(glm::cross(avatarHandYAxis, handPoseZAxis));
glm::vec3 xPrime = glm::normalize(glm::cross(referenceHandYAxis, handPoseZAxis));
glm::vec3 yPrime = glm::normalize(glm::cross(zPrime, xPrime));
glm::mat4 newHandMat = glm::mat4(glm::vec4(xPrime, 0.0f), glm::vec4(yPrime, 0.0f),
glm::vec4(zPrime, 0.0f), glm::vec4(0.0f, 0.0f, 0.0f, 1.0f));
glm::vec3 translationOffset = glm::vec3(0.0f, _handPuckYOffset, _handPuckZOffset);
glm::quat initialRotation = glmExtractRotation(handPoseAvatarMat);
glm::quat initialRotation = handPose.getRotation();
glm::quat finalRotation = glmExtractRotation(newHandMat);
glm::quat rotationOffset = glm::inverse(initialRotation) * finalRotation;
@ -1056,25 +1061,23 @@ void ViveControllerManager::InputDevice::calibrateLeftHand(const glm::mat4& defa
void ViveControllerManager::InputDevice::calibrateRightHand(const glm::mat4& defaultToReferenceMat, const controller::InputCalibrationData& inputCalibration, PuckPosePair& handPair) {
controller::Pose& handPose = handPair.second;
glm::mat4 handPoseAvatarMat = createMatFromQuatAndPos(handPose.getRotation(), handPose.getTranslation());
glm::vec3 handPoseTranslation = extractTranslation(handPoseAvatarMat);
glm::vec3 handPoseZAxis = glmExtractRotation(handPoseAvatarMat) * glm::vec3(0.0f, 0.0f, 1.0f);
glm::vec3 avatarHandYAxis = transformVectorFast(inputCalibration.defaultRightHand, glm::vec3(0.0f, 1.0f, 0.0f));
glm::vec3 handPoseZAxis = handPose.getRotation() * glm::vec3(0.0f, 0.0f, 1.0f);
glm::vec3 referenceHandYAxis = transformVectorFast(defaultToReferenceMat * inputCalibration.defaultRightHand, glm::vec3(0.0f, 1.0f, 0.0f));
const float EPSILON = 1.0e-4f;
if (fabsf(fabsf(glm::dot(glm::normalize(avatarHandYAxis), glm::normalize(handPoseZAxis))) - 1.0f) < EPSILON) {
if (fabsf(fabsf(glm::dot(glm::normalize(referenceHandYAxis), glm::normalize(handPoseZAxis))) - 1.0f) < EPSILON) {
handPoseZAxis = glm::vec3(0.0f, 0.0f, 1.0f);
}
// This allows the user to not have to match the t-pose exactly. We assume that the y facing of the hand lies in the plane of the puck.
// Where the plane of the puck is defined by the the local z-axis of the puck, which is facing out of the vive logo/power button.
glm::vec3 zPrime = handPoseZAxis;
glm::vec3 xPrime = glm::normalize(glm::cross(avatarHandYAxis, handPoseZAxis));
glm::vec3 xPrime = glm::normalize(glm::cross(referenceHandYAxis, handPoseZAxis));
glm::vec3 yPrime = glm::normalize(glm::cross(zPrime, xPrime));
glm::mat4 newHandMat = glm::mat4(glm::vec4(xPrime, 0.0f), glm::vec4(yPrime, 0.0f),
glm::vec4(zPrime, 0.0f), glm::vec4(0.0f, 0.0f, 0.0f, 1.0f));
glm::vec3 translationOffset = glm::vec3(0.0f, _handPuckYOffset, _handPuckZOffset);
glm::quat initialRotation = glmExtractRotation(handPoseAvatarMat);
glm::quat initialRotation = handPose.getRotation();
glm::quat finalRotation = glmExtractRotation(newHandMat);
glm::quat rotationOffset = glm::inverse(initialRotation) * finalRotation;
@ -1105,15 +1108,18 @@ void ViveControllerManager::InputDevice::calibrateFeet(const glm::mat4& defaultT
void ViveControllerManager::InputDevice::calibrateFoot(const glm::mat4& defaultToReferenceMat, const controller::InputCalibrationData& inputCalibration, PuckPosePair& footPair, bool isLeftFoot){
controller::Pose footPose = footPair.second;
glm::mat4 puckPoseAvatarMat = createMatFromQuatAndPos(footPose.getRotation(), footPose.getTranslation());
glm::mat4 puckPoseMat = createMatFromQuatAndPos(footPose.getRotation(), footPose.getTranslation());
glm::mat4 defaultFoot = isLeftFoot ? inputCalibration.defaultLeftFoot : inputCalibration.defaultRightFoot;
glm::mat4 footOffset = computeOffset(defaultToReferenceMat, defaultFoot, footPose);
glm::quat rotationOffset = glmExtractRotation(footOffset);
glm::vec3 translationOffset = extractTranslation(footOffset);
glm::vec3 avatarXAxisInPuckFrame = glm::normalize(transformVectorFast(glm::inverse(puckPoseAvatarMat), glm::vec3(-1.0f, 0.0f, 0.0f)));
float distance = glm::dot(translationOffset, avatarXAxisInPuckFrame);
glm::vec3 finalTranslation = translationOffset - (distance * avatarXAxisInPuckFrame);
glm::vec3 localXAxisInPuckFrame = glm::normalize(transformVectorFast(glm::inverse(puckPoseMat) * defaultToReferenceMat, glm::vec3(-1.0f, 0.0f, 0.0f)));
float distance = glm::dot(translationOffset, localXAxisInPuckFrame);
// We ensure the offset vector lies in the sagittal plane of the avatar.
// This helps prevent wide or narrow stances due to the user not matching the t-pose perfectly.
glm::vec3 finalTranslation = translationOffset - (distance * localXAxisInPuckFrame);
glm::mat4 finalOffset = createMatFromQuatAndPos(rotationOffset, finalTranslation);
if (isLeftFoot) {

Binary file not shown.

Binary file not shown.

View file

@ -21,14 +21,10 @@ Script.include("/~/system/libraries/controllers.js");
(function() { // BEGIN LOCAL_SCOPE
var TARGET_MODEL_URL = Script.resolvePath("../../assets/models/teleport-destination.fbx");
var TARGET_MODEL_URL = Script.resolvePath("../../assets/models/teleportationSpotBasev8.fbx");
var SEAT_MODEL_URL = Script.resolvePath("../../assets/models/teleport-seat.fbx");
var TARGET_MODEL_DIMENSIONS = {
x: 1.15,
y: 0.5,
z: 1.15
};
var TARGET_MODEL_DIMENSIONS = { x: 0.6552, y: 0.3063, z: 0.6552 };
var COLORS_TELEPORT_SEAT = {
red: 255,
@ -59,20 +55,23 @@ Script.include("/~/system/libraries/controllers.js");
var cancelPath = {
color: COLORS_TELEPORT_CANCEL,
alpha: 1,
width: 0.025
alpha: 0.3,
width: 0.025,
drawInFront: true
};
var teleportPath = {
color: COLORS_TELEPORT_CAN_TELEPORT,
alpha: 1,
width: 0.025
alpha: 0.7,
width: 0.025,
drawInFront: true
};
var seatPath = {
color: COLORS_TELEPORT_SEAT,
alpha: 1,
width: 0.025
alpha: 0.7,
width: 0.025,
drawInFront: true
};
var teleportEnd = {
@ -150,19 +149,149 @@ Script.include("/~/system/libraries/controllers.js");
this.teleportParabolaHeadVisuals;
this.teleportParabolaHeadCollisions;
this.PLAY_AREA_OVERLAY_MODEL = Script.resolvePath("../../assets/models/trackingSpacev18.fbx");
this.PLAY_AREA_OVERLAY_MODEL_DIMENSIONS = { x: 1.969, y: 0.001, z: 1.969 };
this.PLAY_AREA_FLOAT_ABOVE_FLOOR = 0.005;
this.PLAY_AREA_OVERLAY_OFFSET = // Offset from floor.
{ x: 0, y: this.PLAY_AREA_OVERLAY_MODEL_DIMENSIONS.y / 2 + this.PLAY_AREA_FLOAT_ABOVE_FLOOR, z: 0 };
this.PLAY_AREA_SENSOR_OVERLAY_MODEL = Script.resolvePath("../../assets/models/oculusSensorv11.fbx");
this.PLAY_AREA_SENSOR_OVERLAY_DIMENSIONS = { x: 0.1198, y: 0.2981, z: 0.1198 };
this.PLAY_AREA_SENSOR_OVERLAY_ROTATION = Quat.fromVec3Degrees({ x: 0, y: -90, z: 0 });
this.PLAY_AREA_BOX_ALPHA = 1.0;
this.PLAY_AREA_SENSOR_ALPHA = 0.8;
this.playAreaSensorPositions = [];
this.playArea = { x: 0, y: 0 };
this.playAreaCenterOffset = this.PLAY_AREA_OVERLAY_OFFSET;
this.isPlayAreaVisible = false;
this.wasPlayAreaVisible = false;
this.isPlayAreaAvailable = false;
this.targetOverlayID = null;
this.playAreaOverlay = null;
this.playAreaSensorPositionOverlays = [];
this.TELEPORT_SCALE_DURATION = 130;
this.TELEPORT_SCALE_TIMEOUT = 25;
this.isTeleportVisible = false;
this.teleportScaleTimer = null;
this.teleportScaleStart = 0;
this.teleportScaleFactor = 0;
this.teleportScaleMode = "head";
this.TELEPORTED_FADE_DELAY_DURATION = 900;
this.TELEPORTED_FADE_DURATION = 200;
this.TELEPORTED_FADE_INTERVAL = 25;
this.TELEPORTED_FADE_DELAY_DELTA = this.TELEPORTED_FADE_INTERVAL / this.TELEPORTED_FADE_DELAY_DURATION;
this.TELEPORTED_FADE_DELTA = this.TELEPORTED_FADE_INTERVAL / this.TELEPORTED_FADE_DURATION;
this.teleportedFadeTimer = null;
this.teleportedFadeDelayFactor = 0;
this.teleportedFadeFactor = 0;
this.teleportedPosition = Vec3.ZERO;
this.TELEPORTED_TARGET_ALPHA = 1.0;
this.TELEPORTED_TARGET_ROTATION = Quat.fromVec3Degrees({ x: 0, y: 180, z: 0 });
this.teleportedTargetOverlay = null;
this.setPlayAreaDimensions = function () {
var avatarScale = MyAvatar.sensorToWorldScale;
var playAreaOverlayProperties = {
dimensions:
Vec3.multiply(this.teleportScaleFactor * avatarScale, {
x: this.playArea.width,
y: this.PLAY_AREA_OVERLAY_MODEL_DIMENSIONS.y,
z: this.playArea.height
})
};
if (this.teleportScaleFactor < 1) {
// Adjust position of playAreOverlay so that its base is at correct height.
// Always parenting to teleport target is good enough for this.
var sensorToWorldMatrix = MyAvatar.sensorToWorldMatrix;
var sensorToWorldRotation = Mat4.extractRotation(MyAvatar.sensorToWorldMatrix);
var worldToSensorMatrix = Mat4.inverse(sensorToWorldMatrix);
var avatarSensorPosition = Mat4.transformPoint(worldToSensorMatrix, MyAvatar.position);
avatarSensorPosition.y = 0;
var targetRotation = Overlays.getProperty(this.targetOverlayID, "rotation");
var relativePlayAreaCenterOffset =
Vec3.sum(this.playAreaCenterOffset, { x: 0, y: -TARGET_MODEL_DIMENSIONS.y / 2, z: 0 });
var localPosition = Vec3.multiplyQbyV(Quat.inverse(targetRotation),
Vec3.multiplyQbyV(sensorToWorldRotation,
Vec3.multiply(avatarScale, Vec3.subtract(relativePlayAreaCenterOffset, avatarSensorPosition))));
localPosition.y = this.teleportScaleFactor * localPosition.y;
playAreaOverlayProperties.parentID = this.targetOverlayID;
playAreaOverlayProperties.localPosition = localPosition;
}
Overlays.editOverlay(this.playAreaOverlay, playAreaOverlayProperties);
for (var i = 0; i < this.playAreaSensorPositionOverlays.length; i++) {
localPosition = this.playAreaSensorPositions[i];
localPosition = Vec3.multiply(avatarScale, localPosition);
// Position relative to the play area.
localPosition.y = avatarScale * (this.PLAY_AREA_SENSOR_OVERLAY_DIMENSIONS.y / 2
- this.PLAY_AREA_OVERLAY_MODEL_DIMENSIONS.y / 2);
Overlays.editOverlay(this.playAreaSensorPositionOverlays[i], {
dimensions: Vec3.multiply(this.teleportScaleFactor * avatarScale, this.PLAY_AREA_SENSOR_OVERLAY_DIMENSIONS),
parentID: this.playAreaOverlay,
localPosition: localPosition
});
}
};
this.updatePlayAreaScale = function () {
if (this.isPlayAreaAvailable) {
this.setPlayAreaDimensions();
}
};
this.teleporterSelectionName = "teleporterSelection" + hand.toString();
this.TELEPORTER_SELECTION_STYLE = {
outlineUnoccludedColor: { red: 0, green: 0, blue: 0 },
outlineUnoccludedAlpha: 0,
outlineOccludedColor: { red: 0, green: 0, blue: 0 },
outlineOccludedAlpha: 0,
fillUnoccludedColor: { red: 0, green: 0, blue: 0 },
fillUnoccludedAlpha: 0,
fillOccludedColor: { red: 0, green: 0, blue: 255 },
fillOccludedAlpha: 0.84,
outlineWidth: 0,
isOutlineSmooth: false
};
this.addToSelectedItemsList = function (properties) {
for (var i = 0, length = teleportRenderStates.length; i < length; i++) {
var state = properties.renderStates[teleportRenderStates[i].name];
if (state && state.end) {
Selection.addToSelectedItemsList(this.teleporterSelectionName, "overlay", state.end);
}
}
};
this.cleanup = function() {
Selection.removeListFromMap(_this.teleporterSelectionName);
Pointers.removePointer(_this.teleportParabolaHandVisuals);
Pointers.removePointer(_this.teleportParabolaHandCollisions);
Pointers.removePointer(_this.teleportParabolaHeadVisuals);
Pointers.removePointer(_this.teleportParabolaHeadCollisions);
Picks.removePick(_this.teleportHandCollisionPick);
Picks.removePick(_this.teleportHeadCollisionPick);
Overlays.deleteOverlay(_this.teleportedTargetOverlay);
Overlays.deleteOverlay(_this.playAreaOverlay);
for (var i = 0; i < _this.playAreaSensorPositionOverlays.length; i++) {
Overlays.deleteOverlay(_this.playAreaSensorPositionOverlays[i]);
}
_this.playAreaSensorPositionOverlays = [];
};
this.initPointers = function () {
this.initPointers = function() {
if (_this.init) {
_this.cleanup();
}
_this.teleportParabolaHandVisuals = Pointers.createPointer(PickType.Parabola, {
joint: (_this.hand === RIGHT_HAND) ? "_CAMERA_RELATIVE_CONTROLLER_RIGHTHAND" : "_CAMERA_RELATIVE_CONTROLLER_LEFTHAND",
dirOffset: { x: 0, y: 1, z: 0.1 },
@ -221,6 +350,9 @@ Script.include("/~/system/libraries/controllers.js");
maxDistance: 8.0
});
_this.addToSelectedItemsList(Pointers.getPointerProperties(_this.teleportParabolaHandVisuals));
_this.addToSelectedItemsList(Pointers.getPointerProperties(_this.teleportParabolaHeadVisuals));
var capsuleData = MyAvatar.getCollisionCapsule();
@ -262,11 +394,264 @@ Script.include("/~/system/libraries/controllers.js");
position: { x: 0, y: offset + height * 0.5, z: 0 },
threshold: _this.capsuleThreshold
});
_this.playAreaOverlay = Overlays.addOverlay("model", {
url: _this.PLAY_AREA_OVERLAY_MODEL,
drawInFront: false,
visible: false
});
_this.teleportedTargetOverlay = Overlays.addOverlay("model", {
url: TARGET_MODEL_URL,
alpha: _this.TELEPORTED_TARGET_ALPHA,
visible: false
});
Selection.addToSelectedItemsList(_this.teleporterSelectionName, "overlay", _this.playAreaOverlay);
Selection.addToSelectedItemsList(_this.teleporterSelectionName, "overlay", _this.teleportedTargetOverlay);
_this.playArea = HMD.playArea;
_this.isPlayAreaAvailable = HMD.active && _this.playArea.width !== 0 && _this.playArea.height !== 0;
if (_this.isPlayAreaAvailable) {
_this.playAreaCenterOffset = Vec3.sum({ x: _this.playArea.x, y: 0, z: _this.playArea.y },
_this.PLAY_AREA_OVERLAY_OFFSET);
_this.playAreaSensorPositions = HMD.sensorPositions;
for (var i = 0; i < _this.playAreaSensorPositions.length; i++) {
if (i > _this.playAreaSensorPositionOverlays.length - 1) {
var overlay = Overlays.addOverlay("model", {
url: _this.PLAY_AREA_SENSOR_OVERLAY_MODEL,
dimensions: _this.PLAY_AREA_SENSOR_OVERLAY_DIMENSIONS,
parentID: _this.playAreaOverlay,
localRotation: _this.PLAY_AREA_SENSOR_OVERLAY_ROTATION,
drawInFront: false,
visible: false
});
_this.playAreaSensorPositionOverlays.push(overlay);
Selection.addToSelectedItemsList(_this.teleporterSelectionName, "overlay", overlay);
}
}
_this.setPlayAreaDimensions();
}
_this.init = true;
}
};
_this.initPointers();
this.translateXAction = Controller.findAction("TranslateX");
this.translateYAction = Controller.findAction("TranslateY");
this.translateZAction = Controller.findAction("TranslateZ");
this.setPlayAreaVisible = function (visible, targetOverlayID, fade) {
if (!this.isPlayAreaAvailable || this.isPlayAreaVisible === visible) {
return;
}
this.wasPlayAreaVisible = this.isPlayAreaVisible;
this.isPlayAreaVisible = visible;
this.targetOverlayID = targetOverlayID;
if (this.teleportedFadeTimer !== null) {
Script.clearTimeout(this.teleportedFadeTimer);
this.teleportedFadeTimer = null;
}
if (visible || !fade) {
// Immediately make visible or invisible.
this.isPlayAreaVisible = visible;
Overlays.editOverlay(this.playAreaOverlay, {
dimensions: Vec3.ZERO,
alpha: this.PLAY_AREA_BOX_ALPHA,
visible: visible
});
for (var i = 0; i < this.playAreaSensorPositionOverlays.length; i++) {
Overlays.editOverlay(this.playAreaSensorPositionOverlays[i], {
dimensions: Vec3.ZERO,
alpha: this.PLAY_AREA_SENSOR_ALPHA,
visible: visible
});
}
Overlays.editOverlay(this.teleportedTargetOverlay, { visible: false });
} else {
// Fading out of overlays is initiated in setTeleportVisible().
}
};
this.updatePlayArea = function (position) {
var sensorToWorldMatrix = MyAvatar.sensorToWorldMatrix;
var sensorToWorldRotation = Mat4.extractRotation(MyAvatar.sensorToWorldMatrix);
var worldToSensorMatrix = Mat4.inverse(sensorToWorldMatrix);
var avatarSensorPosition = Mat4.transformPoint(worldToSensorMatrix, MyAvatar.position);
avatarSensorPosition.y = 0;
var targetXZPosition = { x: position.x, y: 0, z: position.z };
var avatarXZPosition = MyAvatar.position;
avatarXZPosition.y = 0;
var MIN_PARENTING_DISTANCE = 0.2; // Parenting under this distance results in the play area's rotation jittering.
if (Vec3.distance(targetXZPosition, avatarXZPosition) < MIN_PARENTING_DISTANCE) {
// Set play area position and rotation in world coordinates with no parenting.
Overlays.editOverlay(this.playAreaOverlay, {
parentID: Uuid.NULL,
position: Vec3.sum(position,
Vec3.multiplyQbyV(sensorToWorldRotation,
Vec3.multiply(MyAvatar.sensorToWorldScale,
Vec3.subtract(this.playAreaCenterOffset, avatarSensorPosition)))),
rotation: sensorToWorldRotation
});
} else {
// Set play area position and rotation in local coordinates with parenting.
var targetRotation = Overlays.getProperty(this.targetOverlayID, "rotation");
var sensorToTargetRotation = Quat.multiply(Quat.inverse(targetRotation), sensorToWorldRotation);
var relativePlayAreaCenterOffset =
Vec3.sum(this.playAreaCenterOffset, { x: 0, y: -TARGET_MODEL_DIMENSIONS.y / 2, z: 0 });
Overlays.editOverlay(this.playAreaOverlay, {
parentID: this.targetOverlayID,
localPosition: Vec3.multiplyQbyV(Quat.inverse(targetRotation),
Vec3.multiplyQbyV(sensorToWorldRotation,
Vec3.multiply(MyAvatar.sensorToWorldScale,
Vec3.subtract(relativePlayAreaCenterOffset, avatarSensorPosition)))),
localRotation: sensorToTargetRotation
});
}
};
this.scaleInTeleport = function () {
_this.teleportScaleFactor = Math.min((Date.now() - _this.teleportScaleStart) / _this.TELEPORT_SCALE_DURATION, 1);
Pointers.editRenderState(
_this.teleportScaleMode === "head" ? _this.teleportParabolaHeadVisuals : _this.teleportParabolaHandVisuals,
"teleport",
{
path: teleportPath, // Teleport beam disappears if not included.
end: { dimensions: Vec3.multiply(_this.teleportScaleFactor, TARGET_MODEL_DIMENSIONS) }
}
);
if (_this.isPlayAreaVisible) {
_this.setPlayAreaDimensions();
}
if (_this.teleportScaleFactor < 1) {
_this.teleportScaleTimer = Script.setTimeout(_this.scaleInTeleport, _this.TELEPORT_SCALE_TIMEOUT);
} else {
_this.teleportScaleTimer = null;
}
};
this.fadeOutTeleport = function () {
var isAvatarMoving,
i, length;
isAvatarMoving = Controller.getActionValue(_this.translateXAction) !== 0
|| Controller.getActionValue(_this.translateYAction) !== 0
|| Controller.getActionValue(_this.translateZAction) !== 0;
if (_this.teleportedFadeDelayFactor > 0 && !_this.isTeleportVisible && !isAvatarMoving) {
// Delay fade.
_this.teleportedFadeDelayFactor = _this.teleportedFadeDelayFactor - _this.TELEPORTED_FADE_DELAY_DELTA;
_this.teleportedFadeTimer = Script.setTimeout(_this.fadeOutTeleport, _this.TELEPORTED_FADE_INTERVAL);
} else if (_this.teleportedFadeFactor > 0 && !_this.isTeleportVisible && !isAvatarMoving) {
// Fade.
_this.teleportedFadeFactor = _this.teleportedFadeFactor - _this.TELEPORTED_FADE_DELTA;
Overlays.editOverlay(_this.teleportedTargetOverlay, {
alpha: _this.teleportedFadeFactor * _this.TELEPORTED_TARGET_ALPHA
});
if (_this.wasPlayAreaVisible) {
Overlays.editOverlay(_this.playAreaOverlay, {
alpha: _this.teleportedFadeFactor * _this.PLAY_AREA_BOX_ALPHA
});
var sensorAlpha = _this.teleportedFadeFactor * _this.PLAY_AREA_SENSOR_ALPHA;
for (i = 0, length = _this.playAreaSensorPositionOverlays.length; i < length; i++) {
Overlays.editOverlay(_this.playAreaSensorPositionOverlays[i], { alpha: sensorAlpha });
}
}
_this.teleportedFadeTimer = Script.setTimeout(_this.fadeOutTeleport, _this.TELEPORTED_FADE_INTERVAL);
} else {
// Make invisible.
Overlays.editOverlay(_this.teleportedTargetOverlay, { visible: false });
if (_this.wasPlayAreaVisible) {
Overlays.editOverlay(_this.playAreaOverlay, { visible: false });
for (i = 0, length = _this.playAreaSensorPositionOverlays.length; i < length; i++) {
Overlays.editOverlay(_this.playAreaSensorPositionOverlays[i], { visible: false });
}
}
_this.teleportedFadeTimer = null;
Selection.disableListHighlight(this.teleporterSelectionName);
}
};
this.cancelFade = function () {
// Other hand may call this to immediately hide fading overlays.
var i, length;
if (this.teleportedFadeTimer) {
Overlays.editOverlay(this.teleportedTargetOverlay, { visible: false });
if (this.wasPlayAreaVisible) {
Overlays.editOverlay(this.playAreaOverlay, { visible: false });
for (i = 0, length = this.playAreaSensorPositionOverlays.length; i < length; i++) {
Overlays.editOverlay(this.playAreaSensorPositionOverlays[i], { visible: false });
}
}
this.teleportedFadeTimer = null;
}
};
this.setTeleportVisible = function (visible, mode, fade) {
// Scales in teleport target and play area when start displaying them.
if (visible === this.isTeleportVisible) {
return;
}
if (visible) {
this.teleportScaleMode = mode;
Pointers.editRenderState(
mode === "head" ? _this.teleportParabolaHeadVisuals : _this.teleportParabolaHandVisuals,
"teleport",
{
path: teleportPath, // Teleport beam disappears if not included.
end: { dimensions: Vec3.ZERO }
}
);
this.getOtherModule().cancelFade();
this.teleportScaleStart = Date.now();
this.teleportScaleFactor = 0;
this.scaleInTeleport();
Selection.enableListHighlight(this.teleporterSelectionName, this.TELEPORTER_SELECTION_STYLE);
} else {
if (this.teleportScaleTimer !== null) {
Script.clearTimeout(this.teleportScaleTimer);
this.teleportScaleTimer = null;
}
if (fade) {
// Copy of target at teleported position for fading.
var avatarScale = MyAvatar.sensorToWorldScale;
Overlays.editOverlay(this.teleportedTargetOverlay, {
position: Vec3.sum(this.teleportedPosition, {
x: 0,
y: -getAvatarFootOffset() + avatarScale * TARGET_MODEL_DIMENSIONS.y / 2,
z: 0
}),
rotation: Quat.multiply(this.TELEPORTED_TARGET_ROTATION, MyAvatar.orientation),
dimensions: Vec3.multiply(avatarScale, TARGET_MODEL_DIMENSIONS),
alpha: this.TELEPORTED_TARGET_ALPHA,
visible: true
});
// Fade out over time.
this.teleportedFadeDelayFactor = 1.0;
this.teleportedFadeFactor = 1.0;
this.teleportedFadeTimer = Script.setTimeout(this.fadeOutTeleport, this.TELEPORTED_FADE_DELAY);
} else {
Selection.disableListHighlight(this.teleporterSelectionName);
}
}
this.isTeleportVisible = visible;
};
this.axisButtonStateX = 0; // Left/right axis button pressed.
this.axisButtonStateY = 0; // Up/down axis button pressed.
this.BUTTON_TRANSITION_DELAY = 100; // Allow time for transition from direction buttons to touch-pad.
@ -379,6 +764,7 @@ Script.include("/~/system/libraries/controllers.js");
this.setTeleportState(mode, "cancel", "collision");
} else if (teleportLocationType === TARGET.SURFACE || teleportLocationType === TARGET.DISCREPANCY) {
this.setTeleportState(mode, "teleport", "collision");
this.updatePlayArea(result.intersection);
} else if (teleportLocationType === TARGET.SEAT) {
this.setTeleportState(mode, "collision", "seat");
}
@ -387,6 +773,7 @@ Script.include("/~/system/libraries/controllers.js");
this.teleport = function(newResult, target) {
var result = newResult;
this.teleportedPosition = newResult.intersection;
if (_this.buttonValue !== 0) {
return makeRunningValues(true, [], []);
}
@ -410,6 +797,8 @@ Script.include("/~/system/libraries/controllers.js");
};
this.disableLasers = function() {
this.setPlayAreaVisible(false, null, true);
this.setTeleportVisible(false, null, true);
Pointers.disablePointer(_this.teleportParabolaHandVisuals);
Pointers.disablePointer(_this.teleportParabolaHandCollisions);
Pointers.disablePointer(_this.teleportParabolaHeadVisuals);
@ -418,14 +807,29 @@ Script.include("/~/system/libraries/controllers.js");
Picks.disablePick(_this.teleportHandCollisionPick);
};
this.setTeleportState = function(mode, visibleState, invisibleState) {
this.teleportState = "";
this.setTeleportState = function (mode, visibleState, invisibleState) {
var teleportState = mode + visibleState + invisibleState;
if (teleportState === this.teleportState) {
return;
}
this.teleportState = teleportState;
var pointerID;
if (mode === 'head') {
Pointers.setRenderState(_this.teleportParabolaHeadVisuals, visibleState);
Pointers.setRenderState(_this.teleportParabolaHeadCollisions, invisibleState);
pointerID = _this.teleportParabolaHeadVisuals;
} else {
Pointers.setRenderState(_this.teleportParabolaHandVisuals, visibleState);
Pointers.setRenderState(_this.teleportParabolaHandCollisions, invisibleState);
pointerID = _this.teleportParabolaHandVisuals;
}
var visible = visibleState === "teleport";
this.setPlayAreaVisible(visible && MyAvatar.showPlayArea,
Pointers.getPointerProperties(pointerID).renderStates.teleport.end, false);
this.setTeleportVisible(visible, mode, false);
};
this.setIgnoreEntities = function(entitiesToIgnore) {
@ -642,4 +1046,9 @@ Script.include("/~/system/libraries/controllers.js");
Messages.subscribe('Hifi-Teleport-Ignore-Remove');
Messages.messageReceived.connect(handleTeleportMessages);
MyAvatar.sensorToWorldScaleChanged.connect(function () {
leftTeleporter.updatePlayAreaScale();
rightTeleporter.updatePlayAreaScale();
});
}()); // END LOCAL_SCOPE

View file

@ -16,7 +16,9 @@
(function () {
var LEAP_MOTION_NAME = "LeapMotion";
var handTouchEnabled = true;
var leapMotionEnabled = Controller.getRunningInputDeviceNames().indexOf(LEAP_MOTION_NAME) >= 0;
var MSECONDS_AFTER_LOAD = 2000;
var updateFingerWithIndex = 0;
var untouchableEntities = [];
@ -870,6 +872,12 @@
handTouchEnabled = !shouldDisable;
});
Controller.inputDeviceRunningChanged.connect(function (deviceName, isEnabled) {
if (deviceName == LEAP_MOTION_NAME) {
leapMotionEnabled = isEnabled;
}
});
MyAvatar.disableHandTouchForIDChanged.connect(function (entityID, disable) {
var entityIndex = untouchableEntities.indexOf(entityID);
if (disable) {
@ -902,7 +910,7 @@
Script.update.connect(function () {
if (!handTouchEnabled) {
if (!handTouchEnabled || leapMotionEnabled) {
return;
}

View file

@ -15,6 +15,7 @@
<script type="text/javascript" src="js/jquery-2.1.4.min.js"></script>
<script type="text/javascript" src="js/eventBridgeLoader.js"></script>
<script type="text/javascript" src="js/spinButtons.js"></script>
<script type="text/javascript" src="js/listView.js"></script>
<script type="text/javascript" src="js/entityList.js"></script>
</head>
<body onload='loaded();'>

View file

@ -15,9 +15,31 @@ const VISIBLE_GLYPH = "&#xe007;";
const TRANSPARENCY_GLYPH = "&#xe00b;";
const BAKED_GLYPH = "&#xe01a;"
const SCRIPT_GLYPH = "k";
const BYTES_PER_MEGABYTE = 1024 * 1024;
const IMAGE_MODEL_NAME = 'default-image-model.fbx';
const COLLAPSE_EXTRA_INFO = "E";
const EXPAND_EXTRA_INFO = "D";
const FILTER_IN_VIEW_ATTRIBUTE = "pressed";
const WINDOW_NONVARIABLE_HEIGHT = 207;
const NUM_COLUMNS = 12;
const EMPTY_ENTITY_ID = "0";
const DELETE = 46; // Key code for the delete key.
const KEY_P = 80; // Key code for letter p used for Parenting hotkey.
const MAX_ITEMS = Number.MAX_VALUE; // Used to set the max length of the list of discovered entities.
const COLUMN_INDEX = {
TYPE: 0,
NAME: 1,
URL: 2,
LOCKED: 3,
VISIBLE: 4,
VERTICLES_COUNT: 5,
TEXTURES_COUNT: 6,
TEXTURES_SIZE: 7,
HAS_TRANSPARENT: 8,
IS_BAKED: 9,
DRAW_CALLS: 10,
HAS_SCRIPT: 11
};
const COMPARE_ASCENDING = function(a, b) {
let va = a[currentSortColumn];
@ -37,17 +59,21 @@ const COMPARE_DESCENDING = function(a, b) {
return COMPARE_ASCENDING(b, a);
}
// List of all entities
let entities = []
var entities = []
// List of all entities, indexed by Entity ID
var entitiesByID = {};
// The filtered and sorted list of entities
// The filtered and sorted list of entities passed to ListView
var visibleEntities = [];
// List of all entities that are currently selected
var selectedEntities = [];
var entityList = null; // The ListView
var currentSortColumn = 'type';
var currentSortOrder = ASCENDING_SORT;
var isFilterInView = false;
var showExtraInfo = false;
const ENABLE_PROFILING = false;
var profileIndent = '';
@ -56,19 +82,25 @@ const PROFILE_NOOP = function(_name, fn, args) {
} ;
const PROFILE = !ENABLE_PROFILING ? PROFILE_NOOP : function(name, fn, args) {
console.log("PROFILE-Web " + profileIndent + "(" + name + ") Begin");
var previousIndent = profileIndent;
let previousIndent = profileIndent;
profileIndent += ' ';
var before = Date.now();
let before = Date.now();
fn.apply(this, args);
var delta = Date.now() - before;
let delta = Date.now() - before;
profileIndent = previousIndent;
console.log("PROFILE-Web " + profileIndent + "(" + name + ") End " + delta + "ms");
};
debugPrint = function (message) {
console.log(message);
};
function loaded() {
openEventBridge(function() {
elEntityTable = document.getElementById("entity-table");
elEntityTableBody = document.getElementById("entity-table-body");
elEntityTableScroll = document.getElementById("entity-table-scroll");
elEntityTableHeaderRow = document.querySelectorAll("#entity-table thead th");
elRefresh = document.getElementById("refresh");
elToggleLocked = document.getElementById("locked");
elToggleVisible = document.getElementById("visible");
@ -78,15 +110,13 @@ function loaded() {
elRadius = document.getElementById("radius");
elExport = document.getElementById("export");
elPal = document.getElementById("pal");
elEntityTable = document.getElementById("entity-table");
elInfoToggle = document.getElementById("info-toggle");
elInfoToggleGlyph = elInfoToggle.firstChild;
elFooter = document.getElementById("footer-text");
elNoEntitiesMessage = document.getElementById("no-entities");
elNoEntitiesInView = document.getElementById("no-entities-in-view");
elNoEntitiesRadius = document.getElementById("no-entities-radius");
elEntityTableScroll = document.getElementById("entity-table-scroll");
document.getElementById("entity-name").onclick = function() {
setSortColumn('name');
};
@ -123,14 +153,46 @@ function loaded() {
document.getElementById("entity-hasScript").onclick = function () {
setSortColumn('hasScript');
};
elRefresh.onclick = function() {
refreshEntities();
}
elToggleLocked.onclick = function() {
EventBridge.emitWebEvent(JSON.stringify({ type: 'toggleLocked' }));
}
elToggleVisible.onclick = function() {
EventBridge.emitWebEvent(JSON.stringify({ type: 'toggleVisible' }));
}
elExport.onclick = function() {
EventBridge.emitWebEvent(JSON.stringify({ type: 'export'}));
}
elPal.onclick = function() {
EventBridge.emitWebEvent(JSON.stringify({ type: 'pal' }));
}
elDelete.onclick = function() {
EventBridge.emitWebEvent(JSON.stringify({ type: 'delete' }));
}
elFilter.onkeyup = refreshEntityList;
elFilter.onpaste = refreshEntityList;
elFilter.onchange = onFilterChange;
elFilter.onblur = refreshFooter;
elInView.onclick = toggleFilterInView;
elRadius.onchange = onRadiusChange;
elInfoToggle.onclick = toggleInfo;
elNoEntitiesInView.style.display = "none";
entityList = new ListView(elEntityTableBody, elEntityTableScroll, elEntityTableHeaderRow,
createRow, updateRow, clearRow, WINDOW_NONVARIABLE_HEIGHT);
function onRowClicked(clickEvent) {
let entityID = this.dataset.entityID;
let selection = [entityID];
if (clickEvent.ctrlKey) {
let selectedIndex = selectedEntities.indexOf(entityID);
if (selectedIndex >= 0) {
selection = selectedEntities;
selection = [];
selection = selection.concat(selectedEntities);
selection.splice(selectedIndex, 1)
} else {
selection = selection.concat(selectedEntities);
@ -145,35 +207,26 @@ function loaded() {
} else if (previousItemFound === -1 && selectedEntities[0] === entity.id) {
previousItemFound = i;
}
};
}
if (previousItemFound !== -1 && clickedItemFound !== -1) {
let betweenItems = [];
selection = [];
let toItem = Math.max(previousItemFound, clickedItemFound);
// skip first and last item in this loop, we add them to selection after the loop
for (let i = (Math.min(previousItemFound, clickedItemFound) + 1); i < toItem; i++) {
visibleEntities[i].el.className = 'selected';
betweenItems.push(visibleEntities[i].id);
for (let i = Math.min(previousItemFound, clickedItemFound); i <= toItem; i++) {
selection.push(visibleEntities[i].id);
}
if (previousItemFound > clickedItemFound) {
// always make sure that we add the items in the right order
betweenItems.reverse();
selection.reverse();
}
selection = selection.concat(betweenItems, selectedEntities);
}
} else if (!clickEvent.ctrlKey && !clickEvent.shiftKey && selectedEntities.length === 1) {
// if reselecting the same entity then deselect it
if (selectedEntities[0] === entityID) {
selection = [];
}
}
selectedEntities.forEach(function(entityID) {
if (selection.indexOf(entityID) === -1) {
let entity = entitiesByID[entityID];
if (entity !== undefined) {
entity.el.className = '';
}
}
});
selectedEntities = selection;
this.className = 'selected';
updateSelectedEntities(selection);
EventBridge.emitWebEvent(JSON.stringify({
type: "selectionUpdate",
@ -191,9 +244,7 @@ function loaded() {
entityIds: [this.dataset.entityID],
}));
}
const BYTES_PER_MEGABYTE = 1024 * 1024;
function decimalMegabytes(number) {
return number ? (number / BYTES_PER_MEGABYTE).toFixed(1) : "";
}
@ -206,15 +257,12 @@ function loaded() {
let urlParts = url.split('/');
return urlParts[urlParts.length - 1];
}
// Update the entity list with the new set of data sent from edit.js
function updateEntityList(entityData) {
const IMAGE_MODEL_NAME = 'default-image-model.fbx';
entities = []
function updateEntityData(entityData) {
entities = [];
entitiesByID = {};
visibleEntities = [];
visibleEntities.length = 0; // maintains itemData reference in ListView
PROFILE("map-data", function() {
entityData.forEach(function(entity) {
let type = entity.type;
@ -222,7 +270,7 @@ function loaded() {
if (filename === IMAGE_MODEL_NAME) {
type = "Image";
}
let entityData = {
id: entity.id,
name: entity.name,
@ -231,60 +279,25 @@ function loaded() {
fullUrl: entity.url,
locked: entity.locked,
visible: entity.visible,
verticesCount: entity.verticesCount,
texturesCount: entity.texturesCount,
texturesSize: entity.texturesSize,
verticesCount: displayIfNonZero(entity.verticesCount),
texturesCount: displayIfNonZero(entity.texturesCount),
texturesSize: decimalMegabytes(entity.texturesSize),
hasTransparent: entity.hasTransparent,
isBaked: entity.isBaked,
drawCalls: entity.drawCalls,
drawCalls: displayIfNonZero(entity.drawCalls),
hasScript: entity.hasScript,
elRow: null, // if this entity has a visible row element assigned to it
selected: false // if this entity is selected for edit regardless of having a visible row
}
entities.push(entityData);
entitiesByID[entityData.id] = entityData;
});
});
PROFILE("create-rows", function() {
entities.forEach(function(entity) {
let row = document.createElement('tr');
row.dataset.entityID = entity.id;
row.attributes.title = entity.fullUrl;
function addColumn(cls, text) {
let col = document.createElement('td');
col.className = cls;
col.innerText = text;
row.append(col);
}
function addColumnHTML(cls, text) {
let col = document.createElement('td');
col.className = cls;
col.innerHTML = text;
row.append(col);
}
addColumn('type', entity.type);
addColumn('name', entity.name);
addColumn('url', entity.url);
addColumnHTML('locked glyph', entity.locked ? LOCKED_GLYPH : null);
addColumnHTML('visible glyph', entity.visible ? VISIBLE_GLYPH : null);
addColumn('verticesCount', displayIfNonZero(entity.verticesCount));
addColumn('texturesCount', displayIfNonZero(entity.texturesCount));
addColumn('texturesSize', decimalMegabytes(entity.texturesSize));
addColumnHTML('hasTransparent glyph', entity.hasTransparent ? TRANSPARENCY_GLYPH : null);
addColumnHTML('isBaked glyph', entity.isBaked ? BAKED_GLYPH : null);
addColumn('drawCalls', displayIfNonZero(entity.drawCalls));
addColumn('hasScript glyph', entity.hasScript ? SCRIPT_GLYPH : null);
row.addEventListener('click', onRowClicked);
row.addEventListener('dblclick', onRowDoubleClicked);
entity.el = row;
});
});
refreshEntityList();
updateSelectedEntities(selectedEntities);
}
function refreshEntityList() {
PROFILE("refresh-entity-list", function() {
PROFILE("filter", function() {
@ -300,44 +313,95 @@ function loaded() {
});
}
});
PROFILE("sort", function() {
let cmp = currentSortOrder === ASCENDING_SORT ? COMPARE_ASCENDING : COMPARE_DESCENDING;
visibleEntities.sort(cmp);
});
PROFILE("update-dom", function() {
elEntityTableBody.innerHTML = '';
for (let i = 0, len = visibleEntities.length; i < len; ++i) {
elEntityTableBody.append(visibleEntities[i].el);
}
entityList.itemData = visibleEntities;
entityList.refresh();
});
refreshFooter();
refreshNoEntitiesMessage();
});
}
function removeEntities(deletedIDs) {
// Loop from the back so we can pop items off while iterating
// delete any entities matching deletedIDs list from entities and entitiesByID lists
// if the entity had an associated row element then ensure row is unselected and clear it's entity
for (let j = entities.length - 1; j >= 0; --j) {
let id = entities[j];
let id = entities[j].id;
for (let i = 0, length = deletedIDs.length; i < length; ++i) {
if (id === deletedIDs[i]) {
let elRow = entities[j].elRow;
if (elRow) {
elRow.className = '';
elRow.dataset.entityID = EMPTY_ENTITY_ID;
}
entities.splice(j, 1);
entitiesByID[id].el.remove();
delete entitiesByID[id];
break;
}
}
}
refreshEntities();
}
function clearEntities() {
entities = []
entitiesByID = {};
visibleEntities = [];
elEntityTableBody.innerHTML = '';
// delete any entities matching deletedIDs list from selectedEntities list
for (let j = selectedEntities.length - 1; j >= 0; --j) {
let id = selectedEntities[j].id;
for (let i = 0, length = deletedIDs.length; i < length; ++i) {
if (id === deletedIDs[i]) {
selectedEntities.splice(j, 1);
break;
}
}
}
// delete any entities matching deletedIDs list from visibleEntities list
// if this was a row that was above our current row offset (a hidden top row in the top buffer),
// then decrease row offset accordingly
let firstVisibleRow = entityList.getFirstVisibleRowIndex();
for (let j = visibleEntities.length - 1; j >= 0; --j) {
let id = visibleEntities[j].id;
for (let i = 0, length = deletedIDs.length; i < length; ++i) {
if (id === deletedIDs[i]) {
if (j < firstVisibleRow && entityList.rowOffset > 0) {
entityList.rowOffset--;
}
visibleEntities.splice(j, 1);
break;
}
}
}
entityList.refresh();
refreshFooter();
refreshNoEntitiesMessage();
}
function clearEntities() {
// clear the associated entity ID from all visible row elements
let firstVisibleRow = entityList.getFirstVisibleRowIndex();
let lastVisibleRow = entityList.getLastVisibleRowIndex();
for (let i = firstVisibleRow; i <= lastVisibleRow && i < visibleEntities.length; i++) {
let entity = visibleEntities[i];
entity.elRow.dataset.entityID = EMPTY_ENTITY_ID;
}
entities = [];
entitiesByID = {};
visibleEntities.length = 0; // maintains itemData reference in ListView
entityList.resetToTop();
entityList.clear();
refreshFooter();
refreshNoEntitiesMessage();
}
var elSortOrder = {
@ -363,17 +427,18 @@ function loaded() {
currentSortColumn = column;
currentSortOrder = ASCENDING_SORT;
}
elSortOrder[column].innerHTML = currentSortOrder === ASCENDING_SORT ? ASCENDING_STRING : DESCENDING_STRING;
refreshSortOrder();
refreshEntityList();
});
}
setSortColumn('type');
function refreshSortOrder() {
elSortOrder[currentSortColumn].innerHTML = currentSortOrder === ASCENDING_SORT ? ASCENDING_STRING : DESCENDING_STRING;
}
function refreshEntities() {
clearEntities();
EventBridge.emitWebEvent(JSON.stringify({ type: 'refresh' }));
}
function refreshFooter() {
if (selectedEntities.length > 1) {
elFooter.firstChild.nodeValue = selectedEntities.length + " entities selected";
@ -385,76 +450,142 @@ function loaded() {
elFooter.firstChild.nodeValue = visibleEntities.length + " entities found";
}
}
function refreshNoEntitiesMessage() {
if (visibleEntities.length > 0) {
elNoEntitiesMessage.style.display = "none";
} else {
elNoEntitiesMessage.style.display = "block";
}
}
function updateSelectedEntities(selectedIDs) {
let notFound = false;
// reset all currently selected entities and their rows first
selectedEntities.forEach(function(id) {
let entity = entitiesByID[id];
if (entity !== undefined) {
entity.el.className = '';
entity.selected = false;
if (entity.elRow) {
entity.elRow.className = '';
}
}
});
// then reset selected entities list with newly selected entities and set them selected
selectedEntities = [];
for (let i = 0; i < selectedIDs.length; i++) {
let id = selectedIDs[i];
selectedIDs.forEach(function(id) {
selectedEntities.push(id);
let entity = entitiesByID[id];
if (entity !== undefined) {
entity.el.className = 'selected';
entity.selected = true;
if (entity.elRow) {
entity.elRow.className = 'selected';
}
} else {
notFound = true;
}
}
});
refreshFooter();
return notFound;
}
elRefresh.onclick = function() {
refreshEntities();
function isGlyphColumn(columnIndex) {
return columnIndex === COLUMN_INDEX.LOCKED || columnIndex === COLUMN_INDEX.VISIBLE ||
columnIndex === COLUMN_INDEX.HAS_TRANSPARENT || columnIndex === COLUMN_INDEX.IS_BAKED ||
columnIndex === COLUMN_INDEX.HAS_SCRIPT;
}
elToggleLocked.onclick = function () {
EventBridge.emitWebEvent(JSON.stringify({ type: 'toggleLocked' }));
}
elToggleVisible.onclick = function () {
EventBridge.emitWebEvent(JSON.stringify({ type: 'toggleVisible' }));
}
elExport.onclick = function() {
EventBridge.emitWebEvent(JSON.stringify({ type: 'export'}));
}
elPal.onclick = function () {
EventBridge.emitWebEvent(JSON.stringify({ type: 'pal' }));
}
elDelete.onclick = function() {
EventBridge.emitWebEvent(JSON.stringify({ type: 'delete' }));
}
document.addEventListener("keydown", function (keyDownEvent) {
if (keyDownEvent.target.nodeName === "INPUT") {
return;
function createRow() {
let row = document.createElement("tr");
for (let i = 0; i < NUM_COLUMNS; i++) {
let column = document.createElement("td");
if (isGlyphColumn(i)) {
column.className = 'glyph';
}
row.appendChild(column);
}
var keyCode = keyDownEvent.keyCode;
if (keyCode === DELETE) {
EventBridge.emitWebEvent(JSON.stringify({ type: 'delete' }));
refreshEntities();
row.onclick = onRowClicked;
row.ondblclick = onRowDoubleClicked;
return row;
}
function updateRow(elRow, itemData) {
// update all column texts and glyphs to this entity's data
let typeCell = elRow.childNodes[COLUMN_INDEX.TYPE];
typeCell.innerText = itemData.type;
let nameCell = elRow.childNodes[COLUMN_INDEX.NAME];
nameCell.innerText = itemData.name;
let urlCell = elRow.childNodes[COLUMN_INDEX.URL];
urlCell.innerText = itemData.url;
let lockedCell = elRow.childNodes[COLUMN_INDEX.LOCKED];
lockedCell.innerHTML = itemData.locked ? LOCKED_GLYPH : null;
let visibleCell = elRow.childNodes[COLUMN_INDEX.VISIBLE];
visibleCell.innerHTML = itemData.visible ? VISIBLE_GLYPH : null;
let verticesCountCell = elRow.childNodes[COLUMN_INDEX.VERTICLES_COUNT];
verticesCountCell.innerText = itemData.verticesCount;
let texturesCountCell = elRow.childNodes[COLUMN_INDEX.TEXTURES_COUNT];
texturesCountCell.innerText = itemData.texturesCount;
let texturesSizeCell = elRow.childNodes[COLUMN_INDEX.TEXTURES_SIZE];
texturesSizeCell.innerText = itemData.texturesSize;
let hasTransparentCell = elRow.childNodes[COLUMN_INDEX.HAS_TRANSPARENT];
hasTransparentCell.innerHTML = itemData.hasTransparent ? TRANSPARENCY_GLYPH : null;
let isBakedCell = elRow.childNodes[COLUMN_INDEX.IS_BAKED];
isBakedCell.innerHTML = itemData.isBaked ? BAKED_GLYPH : null;
let drawCallsCell = elRow.childNodes[COLUMN_INDEX.DRAW_CALLS];
drawCallsCell.innerText = itemData.drawCalls;
let hasScriptCell = elRow.childNodes[COLUMN_INDEX.HAS_SCRIPT];
hasScriptCell.innerHTML = itemData.hasScript ? SCRIPT_GLYPH : null;
// if this entity was previously selected flag it's row as selected
if (itemData.selected) {
elRow.className = 'selected';
} else {
elRow.className = '';
}
if (keyDownEvent.keyCode === KEY_P && keyDownEvent.ctrlKey) {
if (keyDownEvent.shiftKey) {
EventBridge.emitWebEvent(JSON.stringify({ type: 'unparent' }));
// if this row previously had an associated entity ID that wasn't the new entity ID then clear
// the ID from the row and the row element from the previous entity's data, then set the new
// entity ID to the row and the row element to the new entity's data
let prevEntityID = elRow.dataset.entityID;
let newEntityID = itemData.id;
let validPrevItemID = prevEntityID !== undefined && prevEntityID !== EMPTY_ENTITY_ID;
if (validPrevItemID && prevEntityID !== newEntityID && entitiesByID[prevEntityID].elRow === elRow) {
elRow.dataset.entityID = EMPTY_ENTITY_ID;
entitiesByID[prevEntityID].elRow = null;
}
if (!validPrevItemID || prevEntityID !== newEntityID) {
elRow.dataset.entityID = newEntityID;
entitiesByID[newEntityID].elRow = elRow;
}
}
function clearRow(elRow) {
// reset all texts and glyphs for each of the row's column
for (let i = 0; i < NUM_COLUMNS; i++) {
let cell = elRow.childNodes[i];
if (isGlyphColumn(i)) {
cell.innerHTML = "";
} else {
EventBridge.emitWebEvent(JSON.stringify({ type: 'parent' }));
cell.innerText = "";
}
}
}, false);
var isFilterInView = false;
var FILTER_IN_VIEW_ATTRIBUTE = "pressed";
elNoEntitiesInView.style.display = "none";
elInView.onclick = function () {
// clear the row from any associated entity
let entityID = elRow.dataset.entityID;
if (entityID && entitiesByID[entityID]) {
entitiesByID[entityID].elRow = null;
}
// reset the row to hidden and clear the entity from the row
elRow.className = '';
elRow.dataset.entityID = EMPTY_ENTITY_ID;
}
function toggleFilterInView() {
isFilterInView = !isFilterInView;
if (isFilterInView) {
elInView.setAttribute(FILTER_IN_VIEW_ATTRIBUTE, FILTER_IN_VIEW_ATTRIBUTE);
@ -466,100 +597,20 @@ function loaded() {
EventBridge.emitWebEvent(JSON.stringify({ type: "filterInView", filterInView: isFilterInView }));
refreshEntities();
}
elRadius.onchange = function () {
function onFilterChange() {
refreshEntityList();
entityList.resize();
}
function onRadiusChange() {
elRadius.value = Math.max(elRadius.value, 0);
elNoEntitiesRadius.firstChild.nodeValue = elRadius.value;
elNoEntitiesMessage.style.display = "none";
EventBridge.emitWebEvent(JSON.stringify({ type: 'radius', radius: elRadius.value }));
refreshEntities();
elNoEntitiesRadius.firstChild.nodeValue = elRadius.value;
}
if (window.EventBridge !== undefined) {
EventBridge.scriptEventReceived.connect(function(data) {
data = JSON.parse(data);
if (data.type === "clearEntityList") {
clearEntities();
} else if (data.type == "selectionUpdate") {
var notFound = updateSelectedEntities(data.selectedIDs);
if (notFound) {
refreshEntities();
}
} else if (data.type === "update" && data.selectedIDs !== undefined) {
PROFILE("update", function() {
var newEntities = data.entities;
if (newEntities && newEntities.length === 0) {
elNoEntitiesMessage.style.display = "block";
elFooter.firstChild.nodeValue = "0 entities found";
} else if (newEntities) {
elNoEntitiesMessage.style.display = "none";
updateEntityList(newEntities);
updateSelectedEntities(data.selectedIDs);
}
});
} else if (data.type === "removeEntities" && data.deletedIDs !== undefined && data.selectedIDs !== undefined) {
removeEntities(data.deletedIDs);
updateSelectedEntities(data.selectedIDs);
} else if (data.type === "deleted" && data.ids) {
removeEntities(data.ids);
refreshFooter();
}
});
setTimeout(refreshEntities, 1000);
}
function resize() {
// Take up available window space
elEntityTableScroll.style.height = window.innerHeight - 207;
var SCROLLABAR_WIDTH = 21;
var tds = document.querySelectorAll("#entity-table-body tr:first-child td");
var ths = document.querySelectorAll("#entity-table thead th");
if (tds.length >= ths.length) {
// Update the widths of the header cells to match the body
for (var i = 0; i < ths.length; i++) {
ths[i].width = tds[i].offsetWidth;
}
} else {
// Reasonable widths if nothing is displayed
var tableWidth = document.getElementById("entity-table").offsetWidth - SCROLLABAR_WIDTH;
if (showExtraInfo) {
ths[0].width = 0.10 * tableWidth;
ths[1].width = 0.20 * tableWidth;
ths[2].width = 0.20 * tableWidth;
ths[3].width = 0.04 * tableWidth;
ths[4].width = 0.04 * tableWidth;
ths[5].width = 0.08 * tableWidth;
ths[6].width = 0.08 * tableWidth;
ths[7].width = 0.10 * tableWidth;
ths[8].width = 0.04 * tableWidth;
ths[9].width = 0.08 * tableWidth;
ths[10].width = 0.04 * tableWidth + SCROLLABAR_WIDTH;
} else {
ths[0].width = 0.16 * tableWidth;
ths[1].width = 0.34 * tableWidth;
ths[2].width = 0.34 * tableWidth;
ths[3].width = 0.08 * tableWidth;
ths[4].width = 0.08 * tableWidth;
}
}
};
window.onresize = resize;
elFilter.onkeyup = refreshEntityList;
elFilter.onpaste = refreshEntityList;
elFilter.onchange = function() {
refreshEntityList();
resize();
};
elFilter.onblur = refreshFooter;
var showExtraInfo = false;
var COLLAPSE_EXTRA_INFO = "E";
var EXPAND_EXTRA_INFO = "D";
function toggleInfo(event) {
showExtraInfo = !showExtraInfo;
if (showExtraInfo) {
@ -569,15 +620,62 @@ function loaded() {
elEntityTable.className = "";
elInfoToggleGlyph.innerHTML = EXPAND_EXTRA_INFO;
}
resize();
entityList.resize();
event.stopPropagation();
}
elInfoToggle.addEventListener("click", toggleInfo, true);
resize();
document.addEventListener("keydown", function (keyDownEvent) {
if (keyDownEvent.target.nodeName === "INPUT") {
return;
}
let keyCode = keyDownEvent.keyCode;
if (keyCode === DELETE) {
EventBridge.emitWebEvent(JSON.stringify({ type: 'delete' }));
}
if (keyDownEvent.keyCode === KEY_P && keyDownEvent.ctrlKey) {
if (keyDownEvent.shiftKey) {
EventBridge.emitWebEvent(JSON.stringify({ type: 'unparent' }));
} else {
EventBridge.emitWebEvent(JSON.stringify({ type: 'parent' }));
}
}
}, false);
if (window.EventBridge !== undefined) {
EventBridge.scriptEventReceived.connect(function(data) {
data = JSON.parse(data);
if (data.type === "clearEntityList") {
clearEntities();
} else if (data.type == "selectionUpdate") {
let notFound = updateSelectedEntities(data.selectedIDs);
if (notFound) {
refreshEntities();
}
} else if (data.type === "update" && data.selectedIDs !== undefined) {
PROFILE("update", function() {
let newEntities = data.entities;
if (newEntities) {
if (newEntities.length === 0) {
clearEntities();
} else {
updateEntityData(newEntities);
updateSelectedEntities(data.selectedIDs);
}
}
});
} else if (data.type === "removeEntities" && data.deletedIDs !== undefined && data.selectedIDs !== undefined) {
removeEntities(data.deletedIDs);
updateSelectedEntities(data.selectedIDs);
} else if (data.type === "deleted" && data.ids) {
removeEntities(data.ids);
}
});
}
refreshSortOrder();
refreshEntities();
});
augmentSpinButtons();
// Disable right-click context menu which is not visible in the HMD and makes it seem like the app has locked

View file

@ -0,0 +1,307 @@
// listView.js
//
// Created by David Back on 27 Aug 2018
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
const SCROLL_ROWS = 2; // number of rows used as scrolling buffer, each time we pass this number of rows we scroll
const FIRST_ROW_INDEX = 3; // the first elRow element's index in the child nodes of the table body
debugPrint = function (message) {
console.log(message);
};
function ListView(elTableBody, elTableScroll, elTableHeaderRow, createRowFunction,
updateRowFunction, clearRowFunction, WINDOW_NONVARIABLE_HEIGHT) {
this.elTableBody = elTableBody;
this.elTableScroll = elTableScroll;
this.elTableHeaderRow = elTableHeaderRow;
this.elTopBuffer = null;
this.elBottomBuffer = null;
this.createRowFunction = createRowFunction;
this.updateRowFunction = updateRowFunction;
this.clearRowFunction = clearRowFunction;
// the list of row elements created in the table up to max viewable height plus SCROLL_ROWS rows for scrolling buffer
this.elRows = [];
// the list of all row item data to show in the scrolling table, passed to updateRowFunction to set to each row
this.itemData = [];
// the current index within the itemData list that is set to the top most elRow element
this.rowOffset = 0;
// height of the elRow elements
this.rowHeight = 0;
// the previous elTableScroll.scrollTop value when the elRows were last shifted for scrolling
this.lastRowShiftScrollTop = 0;
this.initialize();
};
ListView.prototype = {
getNumRows: function() {
return this.elRows.length;
},
getScrollHeight: function() {
return this.rowHeight * SCROLL_ROWS;
},
getFirstVisibleRowIndex: function() {
return this.rowOffset;
},
getLastVisibleRowIndex: function() {
return this.getFirstVisibleRowIndex() + entityList.getNumRows() - 1;
},
resetToTop: function() {
this.rowOffset = 0;
this.lastRowShiftScrollTop = 0;
this.refreshBuffers();
this.elTableScroll.scrollTop = 0;
},
clear: function() {
for (let i = 0; i < this.getNumRows(); i++) {
let elRow = this.elTableBody.childNodes[i + FIRST_ROW_INDEX];
this.clearRowFunction(elRow);
elRow.style.display = "none"; // hide cleared rows
}
},
onScroll: function() {
var that = this.listView;
that.scroll();
},
scroll: function() {
let scrollTop = this.elTableScroll.scrollTop;
let scrollHeight = this.getScrollHeight();
let nextRowChangeScrollTop = this.lastRowShiftScrollTop + scrollHeight;
let totalItems = this.itemData.length;
let numRows = this.getNumRows();
// if the top of the scroll area has past the amount of scroll row space since the last point of scrolling and there
// are still more rows to scroll to then trigger a scroll down by the min of the scroll row space or number of
// remaining rows below
// if the top of the scroll area has gone back above the last point of scrolling then trigger a scroll up by min of
// the scroll row space or number of rows above
if (scrollTop >= nextRowChangeScrollTop && numRows + this.rowOffset < totalItems) {
let numScrolls = Math.ceil((scrollTop - nextRowChangeScrollTop) / scrollHeight);
let numScrollRows = numScrolls * SCROLL_ROWS;
if (numScrollRows + this.rowOffset + numRows > totalItems) {
numScrollRows = totalItems - this.rowOffset - numRows;
}
this.scrollRows(numScrollRows);
} else if (scrollTop < this.lastRowShiftScrollTop) {
let numScrolls = Math.ceil((this.lastRowShiftScrollTop - scrollTop) / scrollHeight);
let numScrollRows = numScrolls * SCROLL_ROWS;
if (this.rowOffset - numScrollRows < 0) {
numScrollRows = this.rowOffset;
}
this.scrollRows(-numScrollRows);
}
},
scrollRows: function(numScrollRows) {
let numScrollRowsAbsolute = Math.abs(numScrollRows);
if (numScrollRowsAbsolute === 0) {
return;
}
let scrollDown = numScrollRows > 0;
let prevTopHeight = parseInt(this.elTopBuffer.getAttribute("height"));
let prevBottomHeight = parseInt(this.elBottomBuffer.getAttribute("height"));
// if the number of rows to scroll at once is greater than the total visible number of row elements,
// then just advance the rowOffset accordingly and allow the refresh below to update all rows
if (numScrollRowsAbsolute > this.getNumRows()) {
this.rowOffset += numScrollRows;
} else {
// for each row to scroll down, move the top row element to the bottom of the
// table before the bottom buffer and reset it's row data to the new item
// for each row to scroll up, move the bottom row element to the top of
// the table before the top row and reset it's row data to the new item
for (let i = 0; i < numScrollRowsAbsolute; i++) {
let topRow = this.elTableBody.childNodes[FIRST_ROW_INDEX];
let rowToMove = scrollDown ? topRow : this.elTableBody.childNodes[FIRST_ROW_INDEX + this.getNumRows() - 1];
let rowIndex = scrollDown ? this.getNumRows() + this.rowOffset : this.rowOffset - 1;
let moveRowBefore = scrollDown ? this.elBottomBuffer : topRow;
this.elTableBody.removeChild(rowToMove);
this.elTableBody.insertBefore(rowToMove, moveRowBefore);
this.updateRowFunction(rowToMove, this.itemData[rowIndex]);
this.rowOffset += scrollDown ? 1 : -1;
}
}
// add/remove the row space that was scrolled away to the top buffer height and last scroll point
// add/remove the row space that was scrolled away to the bottom buffer height
let scrolledSpace = this.rowHeight * numScrollRows;
let newTopHeight = prevTopHeight + scrolledSpace;
let newBottomHeight = prevBottomHeight - scrolledSpace;
this.elTopBuffer.setAttribute("height", newTopHeight);
this.elBottomBuffer.setAttribute("height", newBottomHeight);
this.lastRowShiftScrollTop += scrolledSpace;
// if scrolling more than the total number of visible rows at once then refresh all row data
if (numScrollRowsAbsolute > this.getNumRows()) {
this.refresh();
}
},
refresh: function() {
// block refreshing before rows are initialized
let numRows = this.getNumRows();
if (numRows === 0) {
return;
}
let prevScrollTop = this.elTableScroll.scrollTop;
// start with all row data cleared and initially set to invisible
this.clear();
// if we are at the bottom of the list adjust row offset to make sure all rows stay in view
this.refreshRowOffset();
// update all row data and set rows visible until max visible items reached
for (let i = 0; i < numRows; i++) {
let rowIndex = i + this.rowOffset;
if (rowIndex >= this.itemData.length) {
break;
}
let rowElementIndex = i + FIRST_ROW_INDEX;
let elRow = this.elTableBody.childNodes[rowElementIndex];
let itemData = this.itemData[rowIndex];
this.updateRowFunction(elRow, itemData);
elRow.style.display = ""; // make sure the row is visible
}
// update the top and bottom buffer heights to adjust for above changes
this.refreshBuffers();
// adjust the last row shift scroll point based on how much the current scroll point changed
let scrollTopDifference = this.elTableScroll.scrollTop - prevScrollTop;
if (scrollTopDifference !== 0) {
this.lastRowShiftScrollTop += scrollTopDifference;
if (this.lastRowShiftScrollTop < 0) {
this.lastRowShiftScrollTop = 0;
}
}
},
refreshBuffers: function() {
// top buffer height is the number of hidden rows above the top row
let topHiddenRows = this.rowOffset;
let topBufferHeight = this.rowHeight * topHiddenRows;
this.elTopBuffer.setAttribute("height", topBufferHeight);
// bottom buffer height is the number of hidden rows below the bottom row (last scroll buffer row)
let bottomHiddenRows = this.itemData.length - this.getNumRows() - this.rowOffset;
let bottomBufferHeight = this.rowHeight * bottomHiddenRows;
if (bottomHiddenRows < 0) {
bottomBufferHeight = 0;
}
this.elBottomBuffer.setAttribute("height", bottomBufferHeight);
},
refreshRowOffset: function() {
// make sure the row offset isn't causing visible rows to pass the end of the item list and is clamped to 0
var numRows = this.getNumRows();
if (this.rowOffset + numRows > this.itemData.length) {
this.rowOffset = this.itemData.length - numRows;
}
if (this.rowOffset < 0) {
this.rowOffset = 0;
}
},
onResize: function() {
var that = this.listView;
that.resize();
},
resize: function() {
if (!this.elTableBody || !this.elTableScroll) {
debugPrint("ListView.resize - no valid table body or table scroll element");
return;
}
let prevScrollTop = this.elTableScroll.scrollTop;
// take up available window space
this.elTableScroll.style.height = window.innerHeight - WINDOW_NONVARIABLE_HEIGHT;
let viewableHeight = parseInt(this.elTableScroll.style.height) + 1;
// remove all existing row elements and clear row list
for (let i = 0; i < this.getNumRows(); i++) {
let elRow = this.elRows[i];
this.elTableBody.removeChild(elRow);
}
this.elRows = [];
// create new row elements inserted between the top and bottom buffers up until the max viewable scroll area
let usedHeight = 0;
while (usedHeight < viewableHeight) {
let newRow = this.createRowFunction();
this.elTableBody.insertBefore(newRow, this.elBottomBuffer);
this.rowHeight = newRow.offsetHeight;
usedHeight += this.rowHeight;
this.elRows.push(newRow);
}
// add SCROLL_ROWS extras rows for scrolling buffer purposes
for (let i = 0; i < SCROLL_ROWS; i++) {
let scrollRow = this.createRowFunction();
this.elTableBody.insertBefore(scrollRow, this.elBottomBuffer);
this.elRows.push(scrollRow);
}
let ths = this.elTableHeaderRow;
let tds = this.getNumRows() > 0 ? this.elRows[0].childNodes : [];
if (!ths) {
debugPrint("ListView.resize - no valid table header row");
} else if (tds.length !== ths.length) {
debugPrint("ListView.resize - td list size " + tds.length + " does not match th list size " + ths.length);
}
// update the widths of the header cells to match the body cells (using first body row)
for (let i = 0; i < ths.length; i++) {
ths[i].width = tds[i].offsetWidth;
}
// restore the scroll point to the same scroll point from before above changes
this.elTableScroll.scrollTop = prevScrollTop;
this.refresh();
},
initialize: function() {
if (!this.elTableBody || !this.elTableScroll) {
debugPrint("ListView.initialize - no valid table body or table scroll element");
return;
}
// delete initial blank row
this.elTableBody.deleteRow(0);
this.elTopBuffer = document.createElement("tr");
this.elTableBody.appendChild(this.elTopBuffer);
this.elTopBuffer.setAttribute("height", 0);
this.elBottomBuffer = document.createElement("tr");
this.elTableBody.appendChild(this.elBottomBuffer);
this.elBottomBuffer.setAttribute("height", 0);
this.elTableScroll.listView = this;
this.elTableScroll.onscroll = this.onScroll;
window.listView = this;
window.onresize = this.onResize;
// initialize all row elements
this.resize();
}
};

File diff suppressed because it is too large Load diff