mirror of
https://github.com/overte-org/overte.git
synced 2025-04-09 07:12:45 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into punk
This commit is contained in:
commit
60765902fb
131 changed files with 5004 additions and 2473 deletions
17
BUILD.md
17
BUILD.md
|
@ -1,3 +1,10 @@
|
|||
### OS Specific Build Guides
|
||||
|
||||
* [BUILD_WIN.md](BUILD_WIN.md) - complete instructions for Windows.
|
||||
* [BUILD_OSX.md](BUILD_OSX.md) - additional instructions for OS X.
|
||||
* [BUILD_LINUX.md](BUILD_LINUX.md) - additional instructions for Linux.
|
||||
* [BUILD_ANDROID.md](BUILD_ANDROID.md) - additional instructions for Android
|
||||
|
||||
### Dependencies
|
||||
|
||||
- [cmake](https://cmake.org/download/): 3.9
|
||||
|
@ -27,14 +34,7 @@ These are not placed in your normal build tree when doing an out of source build
|
|||
|
||||
If you would like to use a specific install of a dependency instead of the version that would be grabbed as a CMake ExternalProject, you can pass -DUSE\_LOCAL\_$NAME=0 (where $NAME is the name of the subfolder in [cmake/externals](cmake/externals)) when you run CMake to tell it not to get that dependency as an external project.
|
||||
|
||||
### OS Specific Build Guides
|
||||
|
||||
* [BUILD_OSX.md](BUILD_OSX.md) - additional instructions for OS X.
|
||||
* [BUILD_LINUX.md](BUILD_LINUX.md) - additional instructions for Linux.
|
||||
* [BUILD_WIN.md](BUILD_WIN.md) - additional instructions for Windows.
|
||||
* [BUILD_ANDROID.md](BUILD_ANDROID.md) - additional instructions for Android
|
||||
|
||||
### CMake
|
||||
#### CMake
|
||||
|
||||
Hifi uses CMake to generate build files and project files for your platform.
|
||||
|
||||
|
@ -80,6 +80,7 @@ In the examples below the variable $NAME would be replaced by the name of the de
|
|||
* $NAME_ROOT_DIR - set this variable in your ENV
|
||||
* HIFI_LIB_DIR - set this variable in your ENV to your High Fidelity lib folder, should contain a folder '$name'
|
||||
|
||||
|
||||
### Optional Components
|
||||
|
||||
#### Devices
|
||||
|
|
|
@ -145,5 +145,7 @@ dependencies {
|
|||
compile 'com.squareup.retrofit2:converter-gson:2.4.0'
|
||||
implementation 'com.squareup.picasso:picasso:2.71828'
|
||||
|
||||
compile 'com.sothree.slidinguppanel:library:3.4.0'
|
||||
|
||||
implementation fileTree(include: ['*.jar'], dir: 'libs')
|
||||
}
|
||||
|
|
|
@ -209,6 +209,11 @@ JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeGotoUr
|
|||
DependencyManager::get<AddressManager>()->loadSettings(jniUrl.toString());
|
||||
}
|
||||
|
||||
JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeGoToUser(JNIEnv* env, jobject obj, jstring username) {
|
||||
QAndroidJniObject jniUsername("java/lang/String", "(Ljava/lang/String;)V", username);
|
||||
DependencyManager::get<AddressManager>()->goToUser(jniUsername.toString(), false);
|
||||
}
|
||||
|
||||
JNIEXPORT void Java_io_highfidelity_hifiinterface_InterfaceActivity_nativeOnPause(JNIEnv* env, jobject obj) {
|
||||
}
|
||||
|
||||
|
@ -285,6 +290,18 @@ Java_io_highfidelity_hifiinterface_fragment_LoginFragment_nativeLogin(JNIEnv *en
|
|||
Q_ARG(const QString&, username), Q_ARG(const QString&, password));
|
||||
}
|
||||
|
||||
JNIEXPORT jboolean JNICALL
|
||||
Java_io_highfidelity_hifiinterface_fragment_FriendsFragment_nativeIsLoggedIn(JNIEnv *env, jobject instance) {
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
return accountManager->isLoggedIn();
|
||||
}
|
||||
|
||||
JNIEXPORT jstring JNICALL
|
||||
Java_io_highfidelity_hifiinterface_fragment_FriendsFragment_nativeGetAccessToken(JNIEnv *env, jobject instance) {
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
return env->NewStringUTF(accountManager->getAccountInfo().getAccessToken().token.toLatin1().data());
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_io_highfidelity_hifiinterface_SplashActivity_registerLoadCompleteListener(JNIEnv *env,
|
||||
jobject instance) {
|
||||
|
|
|
@ -48,6 +48,7 @@ import com.google.vr.ndk.base.GvrApi;*/
|
|||
public class InterfaceActivity extends QtActivity implements WebViewFragment.OnWebViewInteractionListener {
|
||||
|
||||
public static final String DOMAIN_URL = "url";
|
||||
public static final String EXTRA_GOTO_USERNAME = "gotousername";
|
||||
private static final String TAG = "Interface";
|
||||
private static final int WEB_DRAWER_RIGHT_MARGIN = 262;
|
||||
private static final int WEB_DRAWER_BOTTOM_MARGIN = 150;
|
||||
|
@ -59,6 +60,7 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
|
|||
private native long nativeOnCreate(InterfaceActivity instance, AssetManager assetManager);
|
||||
private native void nativeOnDestroy();
|
||||
private native void nativeGotoUrl(String url);
|
||||
private native void nativeGoToUser(String username);
|
||||
private native void nativeBeforeEnterBackground();
|
||||
private native void nativeEnterBackground();
|
||||
private native void nativeEnterForeground();
|
||||
|
@ -280,6 +282,9 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
|
|||
if (intent.hasExtra(DOMAIN_URL)) {
|
||||
webSlidingDrawer.setVisibility(View.GONE);
|
||||
nativeGotoUrl(intent.getStringExtra(DOMAIN_URL));
|
||||
} else if (intent.hasExtra(EXTRA_GOTO_USERNAME)) {
|
||||
webSlidingDrawer.setVisibility(View.GONE);
|
||||
nativeGoToUser(intent.getStringExtra(EXTRA_GOTO_USERNAME));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ import android.widget.TextView;
|
|||
import com.squareup.picasso.Callback;
|
||||
import com.squareup.picasso.Picasso;
|
||||
|
||||
import io.highfidelity.hifiinterface.fragment.FriendsFragment;
|
||||
import io.highfidelity.hifiinterface.fragment.HomeFragment;
|
||||
import io.highfidelity.hifiinterface.fragment.LoginFragment;
|
||||
import io.highfidelity.hifiinterface.fragment.PolicyFragment;
|
||||
|
@ -36,7 +37,8 @@ import io.highfidelity.hifiinterface.task.DownloadProfileImageTask;
|
|||
|
||||
public class MainActivity extends AppCompatActivity implements NavigationView.OnNavigationItemSelectedListener,
|
||||
LoginFragment.OnLoginInteractionListener,
|
||||
HomeFragment.OnHomeInteractionListener {
|
||||
HomeFragment.OnHomeInteractionListener,
|
||||
FriendsFragment.OnHomeInteractionListener {
|
||||
|
||||
private static final int PROFILE_PICTURE_PLACEHOLDER = R.drawable.default_profile_avatar;
|
||||
public static final String DEFAULT_FRAGMENT = "Home";
|
||||
|
@ -56,6 +58,7 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
private View mLoginPanel;
|
||||
private View mProfilePanel;
|
||||
private TextView mLogoutOption;
|
||||
private MenuItem mPeopleMenuItem;
|
||||
|
||||
private boolean backToScene;
|
||||
|
||||
|
@ -75,6 +78,8 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
mDisplayName = mNavigationView.getHeaderView(0).findViewById(R.id.displayName);
|
||||
mProfilePicture = mNavigationView.getHeaderView(0).findViewById(R.id.profilePicture);
|
||||
|
||||
mPeopleMenuItem = mNavigationView.getMenu().findItem(R.id.action_people);
|
||||
|
||||
Toolbar toolbar = findViewById(R.id.toolbar);
|
||||
toolbar.setTitleTextAppearance(this, R.style.HomeActionBarTitleStyle);
|
||||
setSupportActionBar(toolbar);
|
||||
|
@ -109,40 +114,69 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
loadLoginFragment();
|
||||
break;
|
||||
case "Home":
|
||||
loadHomeFragment();
|
||||
loadHomeFragment(true);
|
||||
break;
|
||||
case "Privacy Policy":
|
||||
loadPrivacyPolicyFragment();
|
||||
break;
|
||||
case "People":
|
||||
loadPeopleFragment();
|
||||
break;
|
||||
default:
|
||||
Log.e(TAG, "Unknown fragment " + fragment);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void loadHomeFragment() {
|
||||
private void loadHomeFragment(boolean addToBackStack) {
|
||||
Fragment fragment = HomeFragment.newInstance();
|
||||
loadFragment(fragment, getString(R.string.home), false);
|
||||
loadFragment(fragment, getString(R.string.home), getString(R.string.tagFragmentHome), addToBackStack);
|
||||
}
|
||||
|
||||
private void loadLoginFragment() {
|
||||
Fragment fragment = LoginFragment.newInstance();
|
||||
|
||||
loadFragment(fragment, getString(R.string.login), true);
|
||||
loadFragment(fragment, getString(R.string.login), getString(R.string.tagFragmentLogin), true);
|
||||
}
|
||||
|
||||
private void loadPrivacyPolicyFragment() {
|
||||
Fragment fragment = PolicyFragment.newInstance();
|
||||
|
||||
loadFragment(fragment, getString(R.string.privacyPolicy), true);
|
||||
loadFragment(fragment, getString(R.string.privacyPolicy), getString(R.string.tagFragmentPolicy), true);
|
||||
}
|
||||
|
||||
private void loadFragment(Fragment fragment, String title, boolean addToBackStack) {
|
||||
private void loadPeopleFragment() {
|
||||
Fragment fragment = FriendsFragment.newInstance();
|
||||
|
||||
loadFragment(fragment, getString(R.string.people), getString(R.string.tagFragmentPeople), true);
|
||||
}
|
||||
|
||||
private void loadFragment(Fragment fragment, String title, String tag, boolean addToBackStack) {
|
||||
FragmentManager fragmentManager = getFragmentManager();
|
||||
|
||||
// check if it's the same fragment
|
||||
String currentFragmentName = fragmentManager.getBackStackEntryCount() > 0
|
||||
? fragmentManager.getBackStackEntryAt(fragmentManager.getBackStackEntryCount() - 1).getName()
|
||||
: "";
|
||||
if (currentFragmentName.equals(title)) {
|
||||
mDrawerLayout.closeDrawer(mNavigationView);
|
||||
return; // cancel as we are already in that fragment
|
||||
}
|
||||
|
||||
// go back until first transaction
|
||||
int backStackEntryCount = fragmentManager.getBackStackEntryCount();
|
||||
for (int i = 0; i < backStackEntryCount - 1; i++) {
|
||||
fragmentManager.popBackStackImmediate();
|
||||
}
|
||||
|
||||
// this case is when we wanted to go home.. rollback already did that!
|
||||
// But asking for a new Home fragment makes it easier to have an updated list so we let it to continue
|
||||
|
||||
FragmentTransaction ft = fragmentManager.beginTransaction();
|
||||
ft.replace(R.id.content_frame, fragment);
|
||||
ft.replace(R.id.content_frame, fragment, tag);
|
||||
|
||||
if (addToBackStack) {
|
||||
ft.addToBackStack(null);
|
||||
ft.addToBackStack(title);
|
||||
}
|
||||
ft.commit();
|
||||
setTitle(title);
|
||||
|
@ -155,11 +189,13 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
mLoginPanel.setVisibility(View.GONE);
|
||||
mProfilePanel.setVisibility(View.VISIBLE);
|
||||
mLogoutOption.setVisibility(View.VISIBLE);
|
||||
mPeopleMenuItem.setVisible(true);
|
||||
updateProfileHeader();
|
||||
} else {
|
||||
mLoginPanel.setVisibility(View.VISIBLE);
|
||||
mProfilePanel.setVisibility(View.GONE);
|
||||
mLogoutOption.setVisibility(View.GONE);
|
||||
mPeopleMenuItem.setVisible(false);
|
||||
mDisplayName.setText("");
|
||||
}
|
||||
}
|
||||
|
@ -200,7 +236,10 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
public boolean onNavigationItemSelected(@NonNull MenuItem item) {
|
||||
switch(item.getItemId()) {
|
||||
case R.id.action_home:
|
||||
loadHomeFragment();
|
||||
loadHomeFragment(false);
|
||||
return true;
|
||||
case R.id.action_people:
|
||||
loadPeopleFragment();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
@ -219,6 +258,19 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
public void onLogoutClicked(View view) {
|
||||
nativeLogout();
|
||||
updateLoginMenu();
|
||||
exitLoggedInFragment();
|
||||
|
||||
}
|
||||
|
||||
private void exitLoggedInFragment() {
|
||||
// If we are in a "logged in" fragment (like People), go back to home. This could be expanded to multiple fragments
|
||||
FragmentManager fragmentManager = getFragmentManager();
|
||||
String currentFragmentName = fragmentManager.getBackStackEntryCount() > 0
|
||||
? fragmentManager.getBackStackEntryAt(fragmentManager.getBackStackEntryCount() - 1).getName()
|
||||
: "";
|
||||
if (currentFragmentName.equals(getString(R.string.people))) {
|
||||
loadHomeFragment(false);
|
||||
}
|
||||
}
|
||||
|
||||
public void onSelectedDomain(String domainUrl) {
|
||||
|
@ -237,9 +289,17 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
startActivity(intent);
|
||||
}
|
||||
|
||||
private void goToUser(String username) {
|
||||
Intent intent = new Intent(this, InterfaceActivity.class);
|
||||
intent.putExtra(InterfaceActivity.EXTRA_GOTO_USERNAME, username);
|
||||
finish();
|
||||
intent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP);
|
||||
startActivity(intent);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onLoginCompleted() {
|
||||
loadHomeFragment();
|
||||
loadHomeFragment(false);
|
||||
updateLoginMenu();
|
||||
if (backToScene) {
|
||||
backToScene = false;
|
||||
|
@ -266,6 +326,11 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
loadPrivacyPolicyFragment();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onVisitUserSelected(String username) {
|
||||
goToUser(username);
|
||||
}
|
||||
|
||||
private class RoundProfilePictureCallback implements Callback {
|
||||
@Override
|
||||
public void onSuccess() {
|
||||
|
@ -284,15 +349,30 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
|
|||
|
||||
@Override
|
||||
public void onBackPressed() {
|
||||
int index = getFragmentManager().getBackStackEntryCount() - 1;
|
||||
if (index > -1) {
|
||||
// if a fragment needs to internally manage back presses..
|
||||
FragmentManager fm = getFragmentManager();
|
||||
Log.d("[BACK]", "getBackStackEntryCount " + fm.getBackStackEntryCount());
|
||||
Fragment friendsFragment = fm.findFragmentByTag(getString(R.string.tagFragmentPeople));
|
||||
if (friendsFragment != null && friendsFragment instanceof FriendsFragment) {
|
||||
if (((FriendsFragment) friendsFragment).onBackPressed()) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
int index = fm.getBackStackEntryCount() - 1;
|
||||
|
||||
if (index > 0) {
|
||||
super.onBackPressed();
|
||||
index--;
|
||||
if (index > -1) {
|
||||
setTitle(fm.getBackStackEntryAt(index).getName());
|
||||
}
|
||||
if (backToScene) {
|
||||
backToScene = false;
|
||||
goToLastLocation();
|
||||
}
|
||||
} else {
|
||||
finishAffinity();
|
||||
finishAffinity();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,193 @@
|
|||
package io.highfidelity.hifiinterface.fragment;
|
||||
|
||||
|
||||
import android.app.Fragment;
|
||||
import android.content.Context;
|
||||
import android.content.DialogInterface;
|
||||
import android.os.Bundle;
|
||||
import android.support.v4.widget.SwipeRefreshLayout;
|
||||
import android.support.v7.app.AlertDialog;
|
||||
import android.support.v7.widget.GridLayoutManager;
|
||||
import android.support.v7.widget.RecyclerView;
|
||||
import android.util.Log;
|
||||
import android.view.LayoutInflater;
|
||||
import android.view.View;
|
||||
import android.view.ViewGroup;
|
||||
|
||||
import com.sothree.slidinguppanel.SlidingUpPanelLayout;
|
||||
|
||||
import io.highfidelity.hifiinterface.R;
|
||||
import io.highfidelity.hifiinterface.provider.EndpointUsersProvider;
|
||||
import io.highfidelity.hifiinterface.provider.UsersProvider;
|
||||
import io.highfidelity.hifiinterface.view.UserListAdapter;
|
||||
|
||||
public class FriendsFragment extends Fragment {
|
||||
|
||||
public native boolean nativeIsLoggedIn();
|
||||
|
||||
public native String nativeGetAccessToken();
|
||||
|
||||
private RecyclerView mUsersView;
|
||||
private View mUserActions;
|
||||
private UserListAdapter mUsersAdapter;
|
||||
private SlidingUpPanelLayout mSlidingUpPanelLayout;
|
||||
private EndpointUsersProvider mUsersProvider;
|
||||
private String mSelectedUsername;
|
||||
|
||||
private OnHomeInteractionListener mListener;
|
||||
private SwipeRefreshLayout mSwipeRefreshLayout;
|
||||
|
||||
public FriendsFragment() {
|
||||
// Required empty public constructor
|
||||
}
|
||||
|
||||
public static FriendsFragment newInstance() {
|
||||
FriendsFragment fragment = new FriendsFragment();
|
||||
return fragment;
|
||||
}
|
||||
|
||||
@Override
|
||||
public View onCreateView(LayoutInflater inflater, ViewGroup container,
|
||||
Bundle savedInstanceState) {
|
||||
View rootView = inflater.inflate(R.layout.fragment_friends, container, false);
|
||||
|
||||
String accessToken = nativeGetAccessToken();
|
||||
mUsersProvider = new EndpointUsersProvider(accessToken);
|
||||
|
||||
Log.d("[USERS]", "token : [" + accessToken + "]");
|
||||
|
||||
mSwipeRefreshLayout = rootView.findViewById(R.id.swipeRefreshLayout);
|
||||
|
||||
mUsersView = rootView.findViewById(R.id.rvUsers);
|
||||
int numberOfColumns = 1;
|
||||
GridLayoutManager gridLayoutMgr = new GridLayoutManager(getContext(), numberOfColumns);
|
||||
mUsersView.setLayoutManager(gridLayoutMgr);
|
||||
|
||||
mUsersAdapter = new UserListAdapter(getContext(), mUsersProvider);
|
||||
mSwipeRefreshLayout.setRefreshing(true);
|
||||
|
||||
mUserActions = rootView.findViewById(R.id.userActionsLayout);
|
||||
|
||||
mSlidingUpPanelLayout = rootView.findViewById(R.id.sliding_layout);
|
||||
mSlidingUpPanelLayout.setPanelHeight(0);
|
||||
|
||||
rootView.findViewById(R.id.userActionDelete).setOnClickListener(view -> onRemoveConnectionClick());
|
||||
|
||||
rootView.findViewById(R.id.userActionVisit).setOnClickListener(new View.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
if (mListener != null && mSelectedUsername != null) {
|
||||
mListener.onVisitUserSelected(mSelectedUsername);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
mUsersAdapter.setClickListener(new UserListAdapter.ItemClickListener() {
|
||||
@Override
|
||||
public void onItemClick(View view, int position, UserListAdapter.User user) {
|
||||
// 1. 'select' user
|
||||
mSelectedUsername = user.name;
|
||||
// ..
|
||||
// 2. adapt options
|
||||
// ..
|
||||
rootView.findViewById(R.id.userActionVisit).setVisibility(user.online ? View.VISIBLE : View.GONE);
|
||||
// 3. show
|
||||
mSlidingUpPanelLayout.setPanelState(SlidingUpPanelLayout.PanelState.EXPANDED);
|
||||
}
|
||||
});
|
||||
|
||||
mUsersAdapter.setListener(new UserListAdapter.AdapterListener() {
|
||||
@Override
|
||||
public void onEmptyAdapter() {
|
||||
mSwipeRefreshLayout.setRefreshing(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onNonEmptyAdapter() {
|
||||
mSwipeRefreshLayout.setRefreshing(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(Exception e, String message) {
|
||||
mSwipeRefreshLayout.setRefreshing(false);
|
||||
}
|
||||
});
|
||||
|
||||
mUsersView.setAdapter(mUsersAdapter);
|
||||
|
||||
mSlidingUpPanelLayout.setFadeOnClickListener(new View.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
mSlidingUpPanelLayout.setPanelState(SlidingUpPanelLayout.PanelState.COLLAPSED);
|
||||
mSelectedUsername = null;
|
||||
}
|
||||
});
|
||||
|
||||
mSwipeRefreshLayout.setOnRefreshListener(() -> mUsersAdapter.loadUsers());
|
||||
|
||||
return rootView;
|
||||
}
|
||||
|
||||
private void onRemoveConnectionClick() {
|
||||
if (mSelectedUsername == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
|
||||
builder.setMessage("Remove '" + mSelectedUsername + "' from People?");
|
||||
builder.setPositiveButton("Remove", new DialogInterface.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(DialogInterface dialogInterface, int i) {
|
||||
mUsersProvider.removeConnection(mSelectedUsername, new UsersProvider.UserActionCallback() {
|
||||
@Override
|
||||
public void requestOk() {
|
||||
mSlidingUpPanelLayout.setPanelState(SlidingUpPanelLayout.PanelState.COLLAPSED);
|
||||
mSelectedUsername = null;
|
||||
mUsersAdapter.loadUsers();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void requestError(Exception e, String message) {
|
||||
// CLD: Show error message?
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
builder.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(DialogInterface dialogInterface, int i) {
|
||||
// Cancelled, nothing to do
|
||||
}
|
||||
});
|
||||
builder.show();
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes the back pressed event and returns true if it was managed by this Fragment
|
||||
* @return
|
||||
*/
|
||||
public boolean onBackPressed() {
|
||||
if (mSlidingUpPanelLayout.getPanelState().equals(SlidingUpPanelLayout.PanelState.EXPANDED)) {
|
||||
mSlidingUpPanelLayout.setPanelState(SlidingUpPanelLayout.PanelState.COLLAPSED);
|
||||
mSelectedUsername = null;
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onAttach(Context context) {
|
||||
super.onAttach(context);
|
||||
if (context instanceof OnHomeInteractionListener) {
|
||||
mListener = (OnHomeInteractionListener) context;
|
||||
} else {
|
||||
throw new RuntimeException(context.toString()
|
||||
+ " must implement OnHomeInteractionListener");
|
||||
}
|
||||
}
|
||||
|
||||
public interface OnHomeInteractionListener {
|
||||
void onVisitUserSelected(String username);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,225 @@
|
|||
package io.highfidelity.hifiinterface.provider;
|
||||
|
||||
import android.util.Log;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import io.highfidelity.hifiinterface.view.UserListAdapter;
|
||||
import okhttp3.Interceptor;
|
||||
import okhttp3.OkHttpClient;
|
||||
import okhttp3.Request;
|
||||
import okhttp3.Response;
|
||||
import retrofit2.Call;
|
||||
import retrofit2.Callback;
|
||||
import retrofit2.Retrofit;
|
||||
import retrofit2.converter.gson.GsonConverterFactory;
|
||||
import retrofit2.http.Body;
|
||||
import retrofit2.http.DELETE;
|
||||
import retrofit2.http.GET;
|
||||
import retrofit2.http.POST;
|
||||
import retrofit2.http.Path;
|
||||
import retrofit2.http.Query;
|
||||
|
||||
/**
|
||||
* Created by cduarte on 6/13/18.
|
||||
*/
|
||||
|
||||
public class EndpointUsersProvider implements UsersProvider {
|
||||
|
||||
public static final String BASE_URL = "https://metaverse.highfidelity.com/";
|
||||
private final Retrofit mRetrofit;
|
||||
private final EndpointUsersProviderService mEndpointUsersProviderService;
|
||||
|
||||
public EndpointUsersProvider(String accessToken) {
|
||||
mRetrofit = createAuthorizedRetrofit(accessToken);
|
||||
mEndpointUsersProviderService = mRetrofit.create(EndpointUsersProviderService.class);
|
||||
}
|
||||
|
||||
private Retrofit createAuthorizedRetrofit(String accessToken) {
|
||||
Retrofit mRetrofit;
|
||||
OkHttpClient.Builder httpClient = new OkHttpClient.Builder();
|
||||
httpClient.addInterceptor(new Interceptor() {
|
||||
@Override
|
||||
public Response intercept(Chain chain) throws IOException {
|
||||
Request original = chain.request();
|
||||
|
||||
Request request = original.newBuilder()
|
||||
.header("Authorization", "Bearer " + accessToken)
|
||||
.method(original.method(), original.body())
|
||||
.build();
|
||||
|
||||
return chain.proceed(request);
|
||||
}
|
||||
});
|
||||
|
||||
OkHttpClient client = httpClient.build();
|
||||
|
||||
mRetrofit = new Retrofit.Builder()
|
||||
.baseUrl(BASE_URL)
|
||||
.addConverterFactory(GsonConverterFactory.create())
|
||||
.client(client)
|
||||
.build();
|
||||
return mRetrofit;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void retrieve(UsersCallback usersCallback) {
|
||||
Call<UsersResponse> friendsCall = mEndpointUsersProviderService.getUsers(
|
||||
CONNECTION_FILTER_CONNECTIONS,
|
||||
400,
|
||||
null);
|
||||
friendsCall.enqueue(new Callback<UsersResponse>() {
|
||||
@Override
|
||||
public void onResponse(Call<UsersResponse> call, retrofit2.Response<UsersResponse> response) {
|
||||
if (!response.isSuccessful()) {
|
||||
usersCallback.retrieveError(new Exception("Error calling Users API"), "Error calling Users API");
|
||||
return;
|
||||
}
|
||||
UsersResponse usersResponse = response.body();
|
||||
List<UserListAdapter.User> adapterUsers = new ArrayList<>(usersResponse.total_entries);
|
||||
for (User user : usersResponse.data.users) {
|
||||
UserListAdapter.User adapterUser = new UserListAdapter.User();
|
||||
adapterUser.connection = user.connection;
|
||||
adapterUser.imageUrl = user.images.thumbnail;
|
||||
adapterUser.name = user.username;
|
||||
adapterUser.online = user.online;
|
||||
adapterUser.locationName = (user.location != null ?
|
||||
(user.location.root != null ? user.location.root.name :
|
||||
(user.location.domain != null ? user.location.domain.name : ""))
|
||||
: "");
|
||||
adapterUsers.add(adapterUser);
|
||||
}
|
||||
usersCallback.retrieveOk(adapterUsers);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Call<UsersResponse> call, Throwable t) {
|
||||
usersCallback.retrieveError(new Exception(t), "Error calling Users API");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public class UserActionRetrofitCallback implements Callback<UsersResponse> {
|
||||
|
||||
UserActionCallback callback;
|
||||
|
||||
public UserActionRetrofitCallback(UserActionCallback callback) {
|
||||
this.callback = callback;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onResponse(Call<UsersResponse> call, retrofit2.Response<UsersResponse> response) {
|
||||
if (!response.isSuccessful()) {
|
||||
callback.requestError(new Exception("Error with "
|
||||
+ call.request().url().toString() + " "
|
||||
+ call.request().method() + " call " + response.message()),
|
||||
response.message());
|
||||
return;
|
||||
}
|
||||
|
||||
if (response.body() == null || !"success".equals(response.body().status)) {
|
||||
callback.requestError(new Exception("Error with "
|
||||
+ call.request().url().toString() + " "
|
||||
+ call.request().method() + " call " + response.message()),
|
||||
response.message());
|
||||
return;
|
||||
}
|
||||
callback.requestOk();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Call<UsersResponse> call, Throwable t) {
|
||||
callback.requestError(new Exception(t), t.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addFriend(String friendUserName, UserActionCallback callback) {
|
||||
Call<UsersResponse> friendCall = mEndpointUsersProviderService.addFriend(new BodyAddFriend(friendUserName));
|
||||
friendCall.enqueue(new UserActionRetrofitCallback(callback));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeFriend(String friendUserName, UserActionCallback callback) {
|
||||
Call<UsersResponse> friendCall = mEndpointUsersProviderService.removeFriend(friendUserName);
|
||||
friendCall.enqueue(new UserActionRetrofitCallback(callback));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeConnection(String connectionUserName, UserActionCallback callback) {
|
||||
Call<UsersResponse> connectionCall = mEndpointUsersProviderService.removeConnection(connectionUserName);
|
||||
connectionCall.enqueue(new UserActionRetrofitCallback(callback));
|
||||
}
|
||||
|
||||
public interface EndpointUsersProviderService {
|
||||
@GET("api/v1/users")
|
||||
Call<UsersResponse> getUsers(@Query("filter") String filter,
|
||||
@Query("per_page") int perPage,
|
||||
@Query("online") Boolean online);
|
||||
|
||||
@DELETE("api/v1/user/connections/{connectionUserName}")
|
||||
Call<UsersResponse> removeConnection(@Path("connectionUserName") String connectionUserName);
|
||||
|
||||
@DELETE("api/v1/user/friends/{friendUserName}")
|
||||
Call<UsersResponse> removeFriend(@Path("friendUserName") String friendUserName);
|
||||
|
||||
@POST("api/v1/user/friends")
|
||||
Call<UsersResponse> addFriend(@Body BodyAddFriend friendUserName);
|
||||
|
||||
/* response
|
||||
{
|
||||
"status": "success"
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
class BodyAddFriend {
|
||||
String username;
|
||||
public BodyAddFriend(String username) {
|
||||
this.username = username;
|
||||
}
|
||||
}
|
||||
|
||||
class UsersResponse {
|
||||
public UsersResponse() {}
|
||||
String status;
|
||||
int current_page;
|
||||
int total_pages;
|
||||
int per_page;
|
||||
int total_entries;
|
||||
Data data;
|
||||
}
|
||||
|
||||
class Data {
|
||||
public Data() {}
|
||||
List<User> users;
|
||||
}
|
||||
|
||||
class User {
|
||||
public User() {}
|
||||
String username;
|
||||
boolean online;
|
||||
String connection;
|
||||
Images images;
|
||||
LocationData location;
|
||||
}
|
||||
|
||||
class Images {
|
||||
public Images() {}
|
||||
String hero;
|
||||
String thumbnail;
|
||||
String tiny;
|
||||
}
|
||||
|
||||
class LocationData {
|
||||
public LocationData() {}
|
||||
NameContainer root;
|
||||
NameContainer domain;
|
||||
}
|
||||
class NameContainer {
|
||||
String name;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
package io.highfidelity.hifiinterface.provider;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import io.highfidelity.hifiinterface.view.UserListAdapter;
|
||||
|
||||
/**
|
||||
* Created by cduarte on 6/13/18.
|
||||
*/
|
||||
|
||||
public interface UsersProvider {
|
||||
|
||||
public static String CONNECTION_TYPE_FRIEND = "friend";
|
||||
public static String CONNECTION_FILTER_CONNECTIONS = "connections";
|
||||
|
||||
void retrieve(UsersProvider.UsersCallback usersCallback);
|
||||
|
||||
interface UsersCallback {
|
||||
void retrieveOk(List<UserListAdapter.User> users);
|
||||
void retrieveError(Exception e, String message);
|
||||
}
|
||||
|
||||
|
||||
void addFriend(String friendUserName, UserActionCallback callback);
|
||||
|
||||
void removeFriend(String friendUserName, UserActionCallback callback);
|
||||
|
||||
void removeConnection(String connectionUserName, UserActionCallback callback);
|
||||
|
||||
interface UserActionCallback {
|
||||
void requestOk();
|
||||
void requestError(Exception e, String message);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,247 @@
|
|||
package io.highfidelity.hifiinterface.view;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
import android.graphics.drawable.BitmapDrawable;
|
||||
import android.net.Uri;
|
||||
import android.support.v4.content.ContextCompat;
|
||||
import android.support.v4.graphics.drawable.RoundedBitmapDrawable;
|
||||
import android.support.v4.graphics.drawable.RoundedBitmapDrawableFactory;
|
||||
import android.support.v7.widget.RecyclerView;
|
||||
import android.util.Log;
|
||||
import android.view.LayoutInflater;
|
||||
import android.view.View;
|
||||
import android.view.ViewGroup;
|
||||
import android.widget.ImageView;
|
||||
import android.widget.TextView;
|
||||
|
||||
import com.squareup.picasso.Callback;
|
||||
import com.squareup.picasso.Picasso;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import io.highfidelity.hifiinterface.R;
|
||||
import io.highfidelity.hifiinterface.provider.UsersProvider;
|
||||
|
||||
/**
|
||||
* Created by cduarte on 6/13/18.
|
||||
*/
|
||||
|
||||
public class UserListAdapter extends RecyclerView.Adapter<UserListAdapter.ViewHolder> {
|
||||
|
||||
private UsersProvider mProvider;
|
||||
private LayoutInflater mInflater;
|
||||
private Context mContext;
|
||||
private List<User> mUsers = new ArrayList<>();
|
||||
private ItemClickListener mClickListener;
|
||||
private AdapterListener mAdapterListener;
|
||||
|
||||
public UserListAdapter(Context c, UsersProvider usersProvider) {
|
||||
mContext = c;
|
||||
mInflater = LayoutInflater.from(mContext);
|
||||
mProvider = usersProvider;
|
||||
loadUsers();
|
||||
}
|
||||
|
||||
public void setListener(AdapterListener adapterListener) {
|
||||
mAdapterListener = adapterListener;
|
||||
}
|
||||
|
||||
public void loadUsers() {
|
||||
mProvider.retrieve(new UsersProvider.UsersCallback() {
|
||||
@Override
|
||||
public void retrieveOk(List<User> users) {
|
||||
mUsers = new ArrayList<>(users);
|
||||
notifyDataSetChanged();
|
||||
if (mAdapterListener != null) {
|
||||
if (mUsers.isEmpty()) {
|
||||
mAdapterListener.onEmptyAdapter();
|
||||
} else {
|
||||
mAdapterListener.onNonEmptyAdapter();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void retrieveError(Exception e, String message) {
|
||||
Log.e("[USERS]", message, e);
|
||||
if (mAdapterListener != null) {
|
||||
mAdapterListener.onError(e, message);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public UserListAdapter.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
|
||||
View view = mInflater.inflate(R.layout.user_item, parent, false);
|
||||
return new UserListAdapter.ViewHolder(view);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onBindViewHolder(UserListAdapter.ViewHolder holder, int position) {
|
||||
User aUser = mUsers.get(position);
|
||||
holder.mUsername.setText(aUser.name);
|
||||
|
||||
holder.mOnlineInfo.setVisibility(aUser.online? View.VISIBLE : View.GONE);
|
||||
holder.mLocation.setText("- " + aUser.locationName); // Bring info from the API and use it here
|
||||
|
||||
holder.mFriendStar.onBindSet(aUser.name, aUser.connection.equals(UsersProvider.CONNECTION_TYPE_FRIEND));
|
||||
Uri uri = Uri.parse(aUser.imageUrl);
|
||||
Picasso.get().load(uri).into(holder.mImage, new RoundProfilePictureCallback(holder.mImage));
|
||||
}
|
||||
|
||||
private class RoundProfilePictureCallback implements Callback {
|
||||
private ImageView mProfilePicture;
|
||||
public RoundProfilePictureCallback(ImageView imageView) {
|
||||
mProfilePicture = imageView;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSuccess() {
|
||||
Bitmap imageBitmap = ((BitmapDrawable) mProfilePicture.getDrawable()).getBitmap();
|
||||
RoundedBitmapDrawable imageDrawable = RoundedBitmapDrawableFactory.create(mProfilePicture.getContext().getResources(), imageBitmap);
|
||||
imageDrawable.setCircular(true);
|
||||
imageDrawable.setCornerRadius(Math.max(imageBitmap.getWidth(), imageBitmap.getHeight()) / 2.0f);
|
||||
mProfilePicture.setImageDrawable(imageDrawable);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(Exception e) {
|
||||
mProfilePicture.setImageResource(R.drawable.default_profile_avatar);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getItemCount() {
|
||||
return mUsers.size();
|
||||
}
|
||||
|
||||
public class ToggleWrapper {
|
||||
|
||||
private ViewGroup mFrame;
|
||||
private ImageView mImage;
|
||||
private boolean mChecked = false;
|
||||
private String mUsername;
|
||||
private boolean waitingChangeConfirm = false;
|
||||
|
||||
public ToggleWrapper(ViewGroup toggleFrame) {
|
||||
mFrame = toggleFrame;
|
||||
mImage = toggleFrame.findViewById(R.id.userFavImage);
|
||||
mFrame.setOnClickListener(view -> toggle());
|
||||
}
|
||||
|
||||
private void refreshUI() {
|
||||
mImage.setColorFilter(ContextCompat.getColor(mImage.getContext(),
|
||||
mChecked ? R.color.starSelectedTint : R.color.starUnselectedTint));
|
||||
}
|
||||
|
||||
class RollbackUICallback implements UsersProvider.UserActionCallback {
|
||||
|
||||
boolean previousStatus;
|
||||
|
||||
RollbackUICallback(boolean previousStatus) {
|
||||
this.previousStatus = previousStatus;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void requestOk() {
|
||||
if (!waitingChangeConfirm) {
|
||||
return;
|
||||
}
|
||||
mFrame.setClickable(true);
|
||||
// nothing to do, new status was set
|
||||
}
|
||||
|
||||
@Override
|
||||
public void requestError(Exception e, String message) {
|
||||
if (!waitingChangeConfirm) {
|
||||
return;
|
||||
}
|
||||
// new status was not set, rolling back
|
||||
mChecked = previousStatus;
|
||||
mFrame.setClickable(true);
|
||||
refreshUI();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected void toggle() {
|
||||
// TODO API CALL TO CHANGE
|
||||
final boolean previousStatus = mChecked;
|
||||
mChecked = !mChecked;
|
||||
mFrame.setClickable(false);
|
||||
refreshUI();
|
||||
waitingChangeConfirm = true;
|
||||
if (mChecked) {
|
||||
mProvider.addFriend(mUsername, new RollbackUICallback(previousStatus));
|
||||
} else {
|
||||
mProvider.removeFriend(mUsername, new RollbackUICallback(previousStatus));
|
||||
}
|
||||
}
|
||||
|
||||
protected void onBindSet(String username, boolean checked) {
|
||||
mChecked = checked;
|
||||
mUsername = username;
|
||||
waitingChangeConfirm = false;
|
||||
mFrame.setClickable(true);
|
||||
refreshUI();
|
||||
}
|
||||
}
|
||||
|
||||
public class ViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener {
|
||||
|
||||
TextView mUsername;
|
||||
TextView mOnline;
|
||||
View mOnlineInfo;
|
||||
TextView mLocation;
|
||||
ImageView mImage;
|
||||
ToggleWrapper mFriendStar;
|
||||
|
||||
public ViewHolder(View itemView) {
|
||||
super(itemView);
|
||||
mUsername = itemView.findViewById(R.id.userName);
|
||||
mOnline = itemView.findViewById(R.id.userOnline);
|
||||
mImage = itemView.findViewById(R.id.userImage);
|
||||
mOnlineInfo = itemView.findViewById(R.id.userOnlineInfo);
|
||||
mLocation = itemView.findViewById(R.id.userLocation);
|
||||
mFriendStar = new ToggleWrapper(itemView.findViewById(R.id.userFav));
|
||||
itemView.setOnClickListener(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
int position = getAdapterPosition();
|
||||
if (mClickListener != null) {
|
||||
mClickListener.onItemClick(view, position, mUsers.get(position));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// allows clicks events to be caught
|
||||
public void setClickListener(ItemClickListener itemClickListener) {
|
||||
this.mClickListener = itemClickListener;
|
||||
}
|
||||
|
||||
public interface ItemClickListener {
|
||||
void onItemClick(View view, int position, User user);
|
||||
}
|
||||
|
||||
public static class User {
|
||||
public String name;
|
||||
public String imageUrl;
|
||||
public String connection;
|
||||
public boolean online;
|
||||
|
||||
public String locationName;
|
||||
|
||||
public User() {}
|
||||
}
|
||||
|
||||
public interface AdapterListener {
|
||||
void onEmptyAdapter();
|
||||
void onNonEmptyAdapter();
|
||||
void onError(Exception e, String message);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
<vector xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
android:width="24dp"
|
||||
android:height="24dp"
|
||||
android:viewportWidth="24.0"
|
||||
android:viewportHeight="24.0">
|
||||
<path
|
||||
android:fillColor="#FF000000"
|
||||
android:pathData="M6,19c0,1.1 0.9,2 2,2h8c1.1,0 2,-0.9 2,-2V7H6v12zM19,4h-3.5l-1,-1h-5l-1,1H5v2h14V4z"/>
|
||||
</vector>
|
4
android/app/src/main/res/drawable/ic_star.xml
Normal file
4
android/app/src/main/res/drawable/ic_star.xml
Normal file
|
@ -0,0 +1,4 @@
|
|||
<vector android:height="31dp" android:viewportHeight="25.0"
|
||||
android:viewportWidth="27.0" android:width="31dp" xmlns:android="http://schemas.android.com/apk/res/android">
|
||||
<path android:fillColor="#FBD92A" android:pathData="M12.549,0.927C12.848,0.006 14.152,0.006 14.451,0.927L16.756,8.019C16.889,8.431 17.273,8.71 17.706,8.71H25.164C26.132,8.71 26.535,9.95 25.751,10.519L19.719,14.903C19.368,15.157 19.221,15.608 19.355,16.021L21.66,23.113C21.959,24.034 20.904,24.8 20.121,24.231L14.088,19.847C13.737,19.593 13.263,19.593 12.912,19.847L6.879,24.231C6.096,24.8 5.041,24.034 5.34,23.113L7.645,16.021C7.779,15.608 7.632,15.157 7.282,14.903L1.249,10.519C0.465,9.95 0.868,8.71 1.836,8.71H9.293C9.727,8.71 10.111,8.431 10.245,8.019L12.549,0.927Z"/>
|
||||
</vector>
|
31
android/app/src/main/res/drawable/ic_teleporticon.xml
Normal file
31
android/app/src/main/res/drawable/ic_teleporticon.xml
Normal file
|
@ -0,0 +1,31 @@
|
|||
<vector xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
android:width="24dp"
|
||||
android:height="24dp"
|
||||
android:viewportWidth="24.0"
|
||||
android:viewportHeight="24.0">
|
||||
<path
|
||||
android:fillColor="#FF000000"
|
||||
android:pathData="M0.6,3h22.8v18.7h-22.8z"
|
||||
android:fillAlpha="0"/>
|
||||
<path
|
||||
android:fillColor="#FF000000"
|
||||
android:pathData="M0.6,3h16.3v18.7h-16.3z"
|
||||
android:fillAlpha="0"/>
|
||||
<path
|
||||
android:fillColor="#FF000000"
|
||||
android:pathData="M0.6,3h16.3v18.7h-16.3z"
|
||||
android:fillAlpha="0"/>
|
||||
<path
|
||||
android:fillColor="#FF000000"
|
||||
android:pathData="M13.8,9.9h9.6v7.8h-9.6z"
|
||||
android:fillAlpha="0"/>
|
||||
<path
|
||||
android:pathData="M11.9,16.9c-0.2,-0.9 -0.3,-2.3 -0.4,-3.4c-0.1,-0.7 -0.1,-1.3 -0.2,-1.7c0,-0.1 -0.1,-0.3 0.3,-0.4c0.1,0 0.1,0 0.2,-0.1l4.4,-1.7c0.3,-0.1 0.5,-0.4 0.6,-0.7c0.1,-0.3 0.1,-0.7 -0.2,-0.9L16.6,8c-0.2,-0.2 -0.5,-0.3 -0.8,-0.3c-0.1,0 -4.8,0.7 -6.8,0.7c-0.1,0 -0.1,0 -0.1,0c-2,0 -6.9,-0.8 -7,-0.8c-0.4,-0.1 -0.8,0.1 -1,0.4L0.7,8.3C0.6,8.5 0.6,8.8 0.6,9.1c0.1,0.3 0.3,0.5 0.5,0.6C2,10 5,11.2 5.9,11.3c0.2,0 0.4,0.1 0.5,0.6c0.1,0.6 -0.2,3.6 -0.6,5c-0.4,1.4 -1,3.2 -1,3.2c-0.2,0.5 0.1,1 0.6,1.2l0.6,0.2c0.2,0.1 0.5,0.1 0.7,-0.1c0.2,-0.1 0.4,-0.3 0.5,-0.6l1.7,-5l1.6,5.1c0.1,0.3 0.3,0.5 0.5,0.6c0.1,0.1 0.3,0.1 0.4,0.1c0.1,0 0.2,0 0.3,-0.1l0.5,-0.2c0.4,-0.2 0.7,-0.6 0.6,-1.1C12.8,20.3 12.3,18.5 11.9,16.9z"
|
||||
android:fillColor="#FFFFFF"/>
|
||||
<path
|
||||
android:pathData="M8.9,7.5c1.3,0 2.3,-1 2.3,-2.3S10.2,3 8.9,3S6.6,4 6.6,5.3S7.7,7.5 8.9,7.5z"
|
||||
android:fillColor="#FFFFFF"/>
|
||||
<path
|
||||
android:pathData="M23,13.4L22.6,13c0,0 0,0 0,0l-2.9,-2.8c-0.2,-0.2 -0.5,-0.2 -0.7,0l-0.7,0.7c-0.2,0.2 -0.2,0.5 0,0.7l1.2,1.2h-5.2c-0.3,0 -0.5,0.2 -0.5,0.5v0.9c0,0.3 0.2,0.5 0.5,0.5h5.1l-1.2,1.1c-0.2,0.2 -0.2,0.5 0,0.7l0.7,0.7c0.2,0.2 0.5,0.2 0.7,0l3.3,-3.2C23.2,13.9 23.2,13.6 23,13.4z"
|
||||
android:fillColor="#FFFFFF"/>
|
||||
</vector>
|
88
android/app/src/main/res/layout/fragment_friends.xml
Normal file
88
android/app/src/main/res/layout/fragment_friends.xml
Normal file
|
@ -0,0 +1,88 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<com.sothree.slidinguppanel.SlidingUpPanelLayout
|
||||
xmlns:app="http://schemas.android.com/apk/res-auto"
|
||||
xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
android:id="@+id/sliding_layout"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent"
|
||||
android:gravity="bottom"
|
||||
app:umanoFadeColor="@color/slidingUpPanelFadeColor"
|
||||
app:umanoShadowHeight="4dp"
|
||||
android:background="@color/backgroundLight">
|
||||
|
||||
<android.support.v4.widget.SwipeRefreshLayout
|
||||
android:id="@+id/swipeRefreshLayout"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent">
|
||||
<android.support.v7.widget.RecyclerView
|
||||
android:id="@+id/rvUsers"
|
||||
android:paddingTop="@dimen/list_vertical_padding"
|
||||
android:paddingBottom="@dimen/list_vertical_padding"
|
||||
android:clipToPadding="false"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent" />
|
||||
</android.support.v4.widget.SwipeRefreshLayout>
|
||||
|
||||
<LinearLayout
|
||||
android:id="@+id/userActionsLayout"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="270dp"
|
||||
android:orientation="vertical"
|
||||
android:background="@color/backgroundDark">
|
||||
|
||||
<android.support.constraint.ConstraintLayout
|
||||
android:id="@+id/userActionVisit"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="56dp"
|
||||
android:clickable="true"
|
||||
android:focusable="true"
|
||||
android:background="?attr/selectableItemBackground">
|
||||
<ImageView android:id="@+id/userActionVisitIcon"
|
||||
android:layout_width="16dp"
|
||||
android:layout_height="16dp"
|
||||
app:layout_constraintStart_toStartOf="parent"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
android:layout_marginStart="@dimen/activity_horizontal_margin"
|
||||
android:src="@drawable/ic_teleporticon"
|
||||
android:tint="@color/white_opaque" />
|
||||
<TextView android:id="@+id/userActionVisitText"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="Visit In-World"
|
||||
android:fontFamily="@font/raleway"
|
||||
android:textColor="@color/white_opaque"
|
||||
app:layout_constraintStart_toEndOf="@id/userActionVisitIcon"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
android:layout_marginStart="32dp" />
|
||||
</android.support.constraint.ConstraintLayout>
|
||||
<android.support.constraint.ConstraintLayout
|
||||
android:id="@+id/userActionDelete"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="56dp"
|
||||
android:clickable="true"
|
||||
android:focusable="true"
|
||||
android:background="?attr/selectableItemBackground">
|
||||
<ImageView android:id="@+id/userActionDeleteIcon"
|
||||
android:layout_width="16dp"
|
||||
android:layout_height="16dp"
|
||||
app:layout_constraintStart_toStartOf="parent"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
android:layout_marginStart="@dimen/activity_horizontal_margin"
|
||||
android:src="@drawable/ic_delete_black_24dp"
|
||||
android:tint="@color/white_opaque" />
|
||||
<TextView android:id="@+id/userActionDeleteText"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="Remove from People"
|
||||
android:fontFamily="@font/raleway"
|
||||
android:textColor="@color/white_opaque"
|
||||
app:layout_constraintStart_toEndOf="@id/userActionDeleteIcon"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
android:layout_marginStart="32dp" />
|
||||
</android.support.constraint.ConstraintLayout>
|
||||
</LinearLayout>
|
||||
</com.sothree.slidinguppanel.SlidingUpPanelLayout>
|
69
android/app/src/main/res/layout/user_item.xml
Normal file
69
android/app/src/main/res/layout/user_item.xml
Normal file
|
@ -0,0 +1,69 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:app="http://schemas.android.com/apk/res-auto"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="56dp"
|
||||
android:background="?attr/selectableItemBackground"
|
||||
android:clickable="true">
|
||||
|
||||
<ImageView
|
||||
android:id="@+id/userImage"
|
||||
android:layout_width="40dp"
|
||||
android:layout_height="40dp"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
android:layout_marginStart="@dimen/activity_horizontal_margin"
|
||||
app:layout_constraintStart_toStartOf="parent"/>
|
||||
<LinearLayout
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
android:layout_marginStart="@dimen/activity_horizontal_margin"
|
||||
app:layout_constraintStart_toEndOf="@id/userImage"
|
||||
android:orientation="vertical">
|
||||
<TextView
|
||||
android:id="@+id/userName"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:fontFamily="@font/raleway"
|
||||
android:textColor="@color/menuOption"/>
|
||||
<LinearLayout android:id="@+id/userOnlineInfo"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content">
|
||||
<TextView
|
||||
android:id="@+id/userOnline"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="@string/online"
|
||||
android:fontFamily="@font/raleway"
|
||||
android:textColor="@color/hifiAquamarine" />
|
||||
<TextView
|
||||
android:id="@+id/userLocation"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:layout_marginStart="3dp"
|
||||
android:fontFamily="@font/raleway_italic"
|
||||
android:textColor="@color/menuOption"/>
|
||||
</LinearLayout>
|
||||
</LinearLayout>
|
||||
<RelativeLayout android:id="@+id/userFav"
|
||||
android:layout_width="48dp"
|
||||
android:layout_height="48dp"
|
||||
android:clickable="true"
|
||||
android:focusable="true"
|
||||
app:layout_constraintEnd_toEndOf="parent"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
android:layout_marginEnd="5.5dp">
|
||||
<ImageView android:id="@+id/userFavImage"
|
||||
android:layout_width="27dp"
|
||||
android:layout_height="27dp"
|
||||
android:src="@drawable/ic_star"
|
||||
android:tint="@color/starUnselectedTint"
|
||||
android:background="?attr/selectableItemBackgroundBorderless"
|
||||
android:layout_centerInParent="true"
|
||||
android:layout_marginEnd="0dp" />
|
||||
</RelativeLayout>
|
||||
|
||||
</android.support.constraint.ConstraintLayout>
|
|
@ -5,4 +5,8 @@
|
|||
android:id="@+id/action_home"
|
||||
android:title="@string/home"
|
||||
/>
|
||||
<item
|
||||
android:id="@+id/action_people"
|
||||
android:title="@string/people"
|
||||
/>
|
||||
</menu>
|
||||
|
|
|
@ -18,4 +18,8 @@
|
|||
<color name="black_060">#99000000</color>
|
||||
<color name="statusbar_color">#292929</color>
|
||||
<color name="hifiLogoColor">#23B2E7</color>
|
||||
<color name="hifiAquamarine">#62D5C6</color>
|
||||
<color name="starSelectedTint">#FBD92A</color>
|
||||
<color name="starUnselectedTint">#8A8A8A</color>
|
||||
<color name="slidingUpPanelFadeColor">#40000000</color>
|
||||
</resources>
|
||||
|
|
|
@ -37,4 +37,6 @@
|
|||
<dimen name="header_hifi_height">101dp</dimen>
|
||||
<dimen name="header_hifi_width">425dp</dimen>
|
||||
|
||||
<dimen name="list_vertical_padding">8dp</dimen>
|
||||
|
||||
</resources>
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
<resources>
|
||||
<string name="app_name" translatable="false">Interface</string>
|
||||
<string name="home">Home</string>
|
||||
<string name="people">People</string>
|
||||
<string name="web_view_action_open_in_browser" translatable="false">Open in browser</string>
|
||||
<string name="web_view_action_share" translatable="false">Share link</string>
|
||||
<string name="web_view_action_share_subject" translatable="false">Shared a link</string>
|
||||
|
@ -21,5 +22,11 @@
|
|||
<string name="search_no_results">No places exist with that name</string>
|
||||
<string name="privacyPolicy">Privacy Policy</string>
|
||||
<string name="your_last_location">Your Last Location</string>
|
||||
<string name="online">Online</string>
|
||||
|
||||
<!-- tags -->
|
||||
<string name="tagFragmentHome">tagFragmentHome</string>
|
||||
<string name="tagFragmentLogin">tagFragmentLogin</string>
|
||||
<string name="tagFragmentPolicy">tagFragmentPolicy</string>
|
||||
<string name="tagFragmentPeople">tagFragmentPeople</string>
|
||||
</resources>
|
||||
|
|
|
@ -28,6 +28,7 @@ allprojects {
|
|||
repositories {
|
||||
jcenter()
|
||||
google()
|
||||
mavenCentral()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -82,8 +82,6 @@ Agent::Agent(ReceivedMessage& message) :
|
|||
DependencyManager::set<recording::ClipCache>();
|
||||
|
||||
DependencyManager::set<ScriptCache>();
|
||||
DependencyManager::set<ScriptEngines>(ScriptEngine::AGENT_SCRIPT);
|
||||
|
||||
DependencyManager::set<RecordingScriptingInterface>();
|
||||
DependencyManager::set<UsersScriptingInterface>();
|
||||
|
||||
|
@ -162,6 +160,8 @@ void Agent::handleAudioPacket(QSharedPointer<ReceivedMessage> message) {
|
|||
static const QString AGENT_LOGGING_NAME = "agent";
|
||||
|
||||
void Agent::run() {
|
||||
// Create ScriptEngines on threaded-assignment thread then move to main thread.
|
||||
DependencyManager::set<ScriptEngines>(ScriptEngine::AGENT_SCRIPT)->moveToThread(qApp->thread());
|
||||
|
||||
// make sure we request our script once the agent connects to the domain
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
@ -497,7 +497,6 @@ void Agent::executeScript() {
|
|||
Frame::clearFrameHandler(AVATAR_FRAME_TYPE);
|
||||
|
||||
DependencyManager::destroy<RecordingScriptingInterface>();
|
||||
|
||||
setFinished(true);
|
||||
}
|
||||
|
||||
|
@ -516,7 +515,7 @@ void Agent::setIsListeningToAudioStream(bool isListeningToAudioStream) {
|
|||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->eachMatchingNode(
|
||||
[&](const SharedNodePointer& node)->bool {
|
||||
[](const SharedNodePointer& node)->bool {
|
||||
return (node->getType() == NodeType::AudioMixer) && node->getActiveSocket();
|
||||
},
|
||||
[&](const SharedNodePointer& node) {
|
||||
|
@ -847,7 +846,7 @@ void Agent::aboutToFinish() {
|
|||
DependencyManager::destroy<recording::Deck>();
|
||||
DependencyManager::destroy<recording::Recorder>();
|
||||
DependencyManager::destroy<recording::ClipCache>();
|
||||
|
||||
DependencyManager::destroy<ScriptEngine>();
|
||||
QMetaObject::invokeMethod(&_avatarAudioTimer, "stop");
|
||||
|
||||
// cleanup codec & encoder
|
||||
|
|
|
@ -945,22 +945,14 @@ void AssetServer::sendStatsPacket() {
|
|||
upstreamStats["2. Sent Packets"] = stat.second.sentPackets;
|
||||
upstreamStats["3. Recvd ACK"] = events[Events::ReceivedACK];
|
||||
upstreamStats["4. Procd ACK"] = events[Events::ProcessedACK];
|
||||
upstreamStats["5. Recvd LACK"] = events[Events::ReceivedLightACK];
|
||||
upstreamStats["6. Recvd NAK"] = events[Events::ReceivedNAK];
|
||||
upstreamStats["7. Recvd TNAK"] = events[Events::ReceivedTimeoutNAK];
|
||||
upstreamStats["8. Sent ACK2"] = events[Events::SentACK2];
|
||||
upstreamStats["9. Retransmitted"] = events[Events::Retransmission];
|
||||
upstreamStats["5. Retransmitted"] = events[Events::Retransmission];
|
||||
nodeStats["Upstream Stats"] = upstreamStats;
|
||||
|
||||
QJsonObject downstreamStats;
|
||||
downstreamStats["1. Recvd (P/s)"] = stat.second.receiveRate;
|
||||
downstreamStats["2. Recvd Packets"] = stat.second.receivedPackets;
|
||||
downstreamStats["3. Sent ACK"] = events[Events::SentACK];
|
||||
downstreamStats["4. Sent LACK"] = events[Events::SentLightACK];
|
||||
downstreamStats["5. Sent NAK"] = events[Events::SentNAK];
|
||||
downstreamStats["6. Sent TNAK"] = events[Events::SentTimeoutNAK];
|
||||
downstreamStats["7. Recvd ACK2"] = events[Events::ReceivedACK2];
|
||||
downstreamStats["8. Duplicates"] = events[Events::Duplicate];
|
||||
downstreamStats["4. Duplicates"] = events[Events::Duplicate];
|
||||
nodeStats["Downstream Stats"] = downstreamStats;
|
||||
|
||||
QString uuid;
|
||||
|
|
|
@ -447,18 +447,21 @@ void AvatarMixer::handleAvatarKilled(SharedNodePointer avatarNode) {
|
|||
// send a kill packet for it to our other nodes
|
||||
nodeList->eachMatchingNode([&](const SharedNodePointer& node) {
|
||||
// we relay avatar kill packets to agents that are not upstream
|
||||
// and downstream avatar mixers, if the node that was just killed was being replicated
|
||||
return (node->getType() == NodeType::Agent && !node->isUpstream()) ||
|
||||
(avatarNode->isReplicated() && shouldReplicateTo(*avatarNode, *node));
|
||||
// and downstream avatar mixers, if the node that was just killed was being replicatedConnectedAgent
|
||||
return node->getActiveSocket() &&
|
||||
((node->getType() == NodeType::Agent && !node->isUpstream()) ||
|
||||
(avatarNode->isReplicated() && shouldReplicateTo(*avatarNode, *node)));
|
||||
}, [&](const SharedNodePointer& node) {
|
||||
if (node->getType() == NodeType::Agent) {
|
||||
if (!killPacket) {
|
||||
killPacket = NLPacket::create(PacketType::KillAvatar, NUM_BYTES_RFC4122_UUID + sizeof(KillAvatarReason));
|
||||
killPacket = NLPacket::create(PacketType::KillAvatar, NUM_BYTES_RFC4122_UUID + sizeof(KillAvatarReason), true);
|
||||
killPacket->write(avatarNode->getUUID().toRfc4122());
|
||||
killPacket->writePrimitive(KillAvatarReason::AvatarDisconnected);
|
||||
}
|
||||
|
||||
nodeList->sendUnreliablePacket(*killPacket, *node);
|
||||
auto killPacketCopy = NLPacket::createCopy(*killPacket);
|
||||
|
||||
nodeList->sendPacket(std::move(killPacketCopy), *node);
|
||||
} else {
|
||||
// send a replicated kill packet to the downstream avatar mixer
|
||||
if (!replicatedKillPacket) {
|
||||
|
|
|
@ -108,7 +108,7 @@ uint16_t AvatarMixerClientData::getLastBroadcastSequenceNumber(const QUuid& node
|
|||
void AvatarMixerClientData::ignoreOther(SharedNodePointer self, SharedNodePointer other) {
|
||||
if (!isRadiusIgnoring(other->getUUID())) {
|
||||
addToRadiusIgnoringSet(other->getUUID());
|
||||
auto killPacket = NLPacket::create(PacketType::KillAvatar, NUM_BYTES_RFC4122_UUID + sizeof(KillAvatarReason));
|
||||
auto killPacket = NLPacket::create(PacketType::KillAvatar, NUM_BYTES_RFC4122_UUID + sizeof(KillAvatarReason), true);
|
||||
killPacket->write(other->getUUID().toRfc4122());
|
||||
if (self->isIgnoreRadiusEnabled()) {
|
||||
killPacket->writePrimitive(KillAvatarReason::TheirAvatarEnteredYourBubble);
|
||||
|
@ -116,7 +116,7 @@ void AvatarMixerClientData::ignoreOther(SharedNodePointer self, SharedNodePointe
|
|||
killPacket->writePrimitive(KillAvatarReason::YourAvatarEnteredTheirBubble);
|
||||
}
|
||||
setLastBroadcastTime(other->getUUID(), 0);
|
||||
DependencyManager::get<NodeList>()->sendUnreliablePacket(*killPacket, *self);
|
||||
DependencyManager::get<NodeList>()->sendPacket(std::move(killPacket), *self);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -264,7 +264,8 @@ void DomainGatekeeper::updateNodePermissions() {
|
|||
QList<SharedNodePointer> nodesToKill;
|
||||
|
||||
auto limitedNodeList = DependencyManager::get<LimitedNodeList>();
|
||||
limitedNodeList->eachNode([this, limitedNodeList, &nodesToKill](const SharedNodePointer& node){
|
||||
QWeakPointer<LimitedNodeList> limitedNodeListWeak = limitedNodeList;
|
||||
limitedNodeList->eachNode([this, limitedNodeListWeak, &nodesToKill](const SharedNodePointer& node){
|
||||
// the id and the username in NodePermissions will often be the same, but id is set before
|
||||
// authentication and verifiedUsername is only set once they user's key has been confirmed.
|
||||
QString verifiedUsername = node->getPermissions().getVerifiedUserName();
|
||||
|
@ -296,7 +297,8 @@ void DomainGatekeeper::updateNodePermissions() {
|
|||
machineFingerprint = nodeData->getMachineFingerprint();
|
||||
|
||||
auto sendingAddress = nodeData->getSendingSockAddr().getAddress();
|
||||
isLocalUser = (sendingAddress == limitedNodeList->getLocalSockAddr().getAddress() ||
|
||||
auto nodeList = limitedNodeListWeak.lock();
|
||||
isLocalUser = ((nodeList && sendingAddress == nodeList->getLocalSockAddr().getAddress()) ||
|
||||
sendingAddress == QHostAddress::LocalHost);
|
||||
}
|
||||
|
||||
|
@ -458,7 +460,7 @@ SharedNodePointer DomainGatekeeper::processAgentConnectRequest(const NodeConnect
|
|||
|
||||
// in case this is a node that's failing to connect
|
||||
// double check we don't have the same node whose sockets match exactly already in the list
|
||||
limitedNodeList->eachNodeBreakable([&](const SharedNodePointer& node){
|
||||
limitedNodeList->eachNodeBreakable([nodeConnection, username, &existingNodeID](const SharedNodePointer& node){
|
||||
|
||||
if (node->getPublicSocket() == nodeConnection.publicSockAddr && node->getLocalSocket() == nodeConnection.localSockAddr) {
|
||||
// we have a node that already has these exact sockets - this can occur if a node
|
||||
|
@ -1009,7 +1011,7 @@ void DomainGatekeeper::refreshGroupsCache() {
|
|||
getDomainOwnerFriendsList();
|
||||
|
||||
auto nodeList = DependencyManager::get<LimitedNodeList>();
|
||||
nodeList->eachNode([&](const SharedNodePointer& node) {
|
||||
nodeList->eachNode([this](const SharedNodePointer& node) {
|
||||
if (!node->getPermissions().isAssignment) {
|
||||
// this node is an agent
|
||||
const QString& verifiedUserName = node->getPermissions().getVerifiedUserName();
|
||||
|
|
|
@ -1078,7 +1078,7 @@ bool DomainServer::isInInterestSet(const SharedNodePointer& nodeA, const SharedN
|
|||
unsigned int DomainServer::countConnectedUsers() {
|
||||
unsigned int result = 0;
|
||||
auto nodeList = DependencyManager::get<LimitedNodeList>();
|
||||
nodeList->eachNode([&](const SharedNodePointer& node){
|
||||
nodeList->eachNode([&result](const SharedNodePointer& node){
|
||||
// only count unassigned agents (i.e., users)
|
||||
if (node->getType() == NodeType::Agent) {
|
||||
auto nodeData = static_cast<DomainServerNodeData*>(node->getLinkedData());
|
||||
|
@ -1181,7 +1181,7 @@ void DomainServer::sendDomainListToNode(const SharedNodePointer& node, const Hif
|
|||
// DTLSServerSession* dtlsSession = _isUsingDTLS ? _dtlsSessions[senderSockAddr] : NULL;
|
||||
if (nodeData->isAuthenticated()) {
|
||||
// if this authenticated node has any interest types, send back those nodes as well
|
||||
limitedNodeList->eachNode([&](const SharedNodePointer& otherNode) {
|
||||
limitedNodeList->eachNode([this, node, &domainListPackets, &domainListStream](const SharedNodePointer& otherNode) {
|
||||
if (otherNode->getUUID() != node->getUUID() && isInInterestSet(node, otherNode)) {
|
||||
// since we're about to add a node to the packet we start a segment
|
||||
domainListPackets->startSegment();
|
||||
|
@ -1230,6 +1230,7 @@ QUuid DomainServer::connectionSecretForNodes(const SharedNodePointer& nodeA, con
|
|||
void DomainServer::broadcastNewNode(const SharedNodePointer& addedNode) {
|
||||
|
||||
auto limitedNodeList = DependencyManager::get<LimitedNodeList>();
|
||||
QWeakPointer<LimitedNodeList> limitedNodeListWeak = limitedNodeList;
|
||||
|
||||
auto addNodePacket = NLPacket::create(PacketType::DomainServerAddedNode);
|
||||
|
||||
|
@ -1241,7 +1242,7 @@ void DomainServer::broadcastNewNode(const SharedNodePointer& addedNode) {
|
|||
int connectionSecretIndex = addNodePacket->pos();
|
||||
|
||||
limitedNodeList->eachMatchingNode(
|
||||
[&](const SharedNodePointer& node)->bool {
|
||||
[this, addedNode](const SharedNodePointer& node)->bool {
|
||||
if (node->getLinkedData() && node->getActiveSocket() && node != addedNode) {
|
||||
// is the added Node in this node's interest list?
|
||||
return isInInterestSet(node, addedNode);
|
||||
|
@ -1249,16 +1250,19 @@ void DomainServer::broadcastNewNode(const SharedNodePointer& addedNode) {
|
|||
return false;
|
||||
}
|
||||
},
|
||||
[&](const SharedNodePointer& node) {
|
||||
addNodePacket->seek(connectionSecretIndex);
|
||||
|
||||
QByteArray rfcConnectionSecret = connectionSecretForNodes(node, addedNode).toRfc4122();
|
||||
|
||||
// replace the bytes at the end of the packet for the connection secret between these nodes
|
||||
addNodePacket->write(rfcConnectionSecret);
|
||||
|
||||
[this, &addNodePacket, connectionSecretIndex, addedNode, limitedNodeListWeak](const SharedNodePointer& node) {
|
||||
// send off this packet to the node
|
||||
limitedNodeList->sendUnreliablePacket(*addNodePacket, *node);
|
||||
auto limitedNodeList = limitedNodeListWeak.lock();
|
||||
if (limitedNodeList) {
|
||||
addNodePacket->seek(connectionSecretIndex);
|
||||
|
||||
QByteArray rfcConnectionSecret = connectionSecretForNodes(node, addedNode).toRfc4122();
|
||||
|
||||
// replace the bytes at the end of the packet for the connection secret between these nodes
|
||||
addNodePacket->write(rfcConnectionSecret);
|
||||
|
||||
limitedNodeList->sendUnreliablePacket(*addNodePacket, *node);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
@ -2864,7 +2868,7 @@ void DomainServer::updateReplicationNodes(ReplicationServerDirection direction)
|
|||
auto serversSettings = replicationSettings.value(serversKey).toList();
|
||||
|
||||
std::vector<HifiSockAddr> knownReplicationNodes;
|
||||
nodeList->eachNode([&](const SharedNodePointer& otherNode) {
|
||||
nodeList->eachNode([direction, &knownReplicationNodes](const SharedNodePointer& otherNode) {
|
||||
if ((direction == Upstream && NodeType::isUpstream(otherNode->getType()))
|
||||
|| (direction == Downstream && NodeType::isDownstream(otherNode->getType()))) {
|
||||
knownReplicationNodes.push_back(otherNode->getPublicSocket());
|
||||
|
@ -2902,7 +2906,7 @@ void DomainServer::updateReplicationNodes(ReplicationServerDirection direction)
|
|||
// collect them in a vector to separately remove them with handleKillNode (since eachNode has a read lock and
|
||||
// we cannot recursively take the write lock required by handleKillNode)
|
||||
std::vector<SharedNodePointer> nodesToKill;
|
||||
nodeList->eachNode([&](const SharedNodePointer& otherNode) {
|
||||
nodeList->eachNode([this, direction, replicationNodesInSettings, replicationDirection, &nodesToKill](const SharedNodePointer& otherNode) {
|
||||
if ((direction == Upstream && NodeType::isUpstream(otherNode->getType()))
|
||||
|| (direction == Downstream && NodeType::isDownstream(otherNode->getType()))) {
|
||||
bool nodeInSettings = find(replicationNodesInSettings.cbegin(), replicationNodesInSettings.cend(),
|
||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -380,15 +380,21 @@
|
|||
{
|
||||
"properties": {
|
||||
"acceleration": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"actionData": "",
|
||||
"age": 14.011327743530273,
|
||||
"ageAsText": "0 hours 0 minutes 14 seconds",
|
||||
"age": 321.8835144042969,
|
||||
"ageAsText": "0 hours 5 minutes 21 seconds",
|
||||
"angularDamping": 0.39346998929977417,
|
||||
"angularVelocity": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
|
@ -406,24 +412,36 @@
|
|||
},
|
||||
"boundingBox": {
|
||||
"brn": {
|
||||
"x": -0.20154684782028198,
|
||||
"y": 0.03644842654466629,
|
||||
"z": -0.2641940414905548
|
||||
"blue": -0.03950843587517738,
|
||||
"green": 0.20785385370254517,
|
||||
"red": -0.04381325840950012,
|
||||
"x": -0.04381325840950012,
|
||||
"y": 0.20785385370254517,
|
||||
"z": -0.03950843587517738
|
||||
},
|
||||
"center": {
|
||||
"x": -0.030000001192092896,
|
||||
"y": 0.12999820709228516,
|
||||
"z": -0.07000023126602173
|
||||
"blue": 0,
|
||||
"green": 0.23000000417232513,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 0.23000000417232513,
|
||||
"z": 0
|
||||
},
|
||||
"dimensions": {
|
||||
"x": 0.3430936932563782,
|
||||
"y": 0.18709957599639893,
|
||||
"z": 0.38838762044906616
|
||||
"blue": 0.07901687175035477,
|
||||
"green": 0.044292300939559937,
|
||||
"red": 0.08762651681900024,
|
||||
"x": 0.08762651681900024,
|
||||
"y": 0.044292300939559937,
|
||||
"z": 0.07901687175035477
|
||||
},
|
||||
"tfl": {
|
||||
"x": 0.1415468454360962,
|
||||
"y": 0.22354799509048462,
|
||||
"z": 0.12419357895851135
|
||||
"blue": 0.03950843587517738,
|
||||
"green": 0.2521461546421051,
|
||||
"red": 0.04381325840950012,
|
||||
"x": 0.04381325840950012,
|
||||
"y": 0.2521461546421051,
|
||||
"z": 0.03950843587517738
|
||||
}
|
||||
},
|
||||
"canCastShadow": true,
|
||||
|
@ -441,189 +459,14 @@
|
|||
"collisionless": false,
|
||||
"collisionsWillMove": false,
|
||||
"compoundShapeURL": "",
|
||||
"created": "2018-06-06T17:25:42Z",
|
||||
"damping": 0.39346998929977417,
|
||||
"density": 1000,
|
||||
"description": "",
|
||||
"dimensions": {
|
||||
"x": 0.33466479182243347,
|
||||
"y": 0.16981728374958038,
|
||||
"z": 0.38838762044906616
|
||||
},
|
||||
"dynamic": false,
|
||||
"editionNumber": 19,
|
||||
"entityInstanceNumber": 0,
|
||||
"friction": 0.5,
|
||||
"gravity": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"href": "",
|
||||
"id": "{6b0a2b08-e8e3-4d43-95cc-dfc4f7a4b0c9}",
|
||||
"ignoreForCollisions": false,
|
||||
"itemArtist": "jyoum",
|
||||
"itemCategories": "Wearables",
|
||||
"itemDescription": "A stylish and classic piece of headwear for your avatar.",
|
||||
"itemLicense": "",
|
||||
"itemName": "Fedora",
|
||||
"jointRotations": [
|
||||
],
|
||||
"jointRotationsSet": [
|
||||
],
|
||||
"jointTranslations": [
|
||||
],
|
||||
"jointTranslationsSet": [
|
||||
],
|
||||
"lastEdited": 1528306032827319,
|
||||
"lastEditedBy": "{4c770def-4abb-40c6-91a1-88da5247b2db}",
|
||||
"lifetime": -1,
|
||||
"limitedRun": 4294967295,
|
||||
"localPosition": {
|
||||
"x": -0.030000008642673492,
|
||||
"y": 0.12999820709228516,
|
||||
"z": -0.07000023126602173
|
||||
},
|
||||
"localRotation": {
|
||||
"w": 0.9996573328971863,
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0.026176949962973595
|
||||
},
|
||||
"locked": false,
|
||||
"marketplaceID": "11c4208d-15d7-4449-9758-a08da6dbd3dc",
|
||||
"modelURL": "http://mpassets.highfidelity.com/11c4208d-15d7-4449-9758-a08da6dbd3dc-v1/Fedora.fbx",
|
||||
"name": "",
|
||||
"naturalDimensions": {
|
||||
"x": 0.2765824794769287,
|
||||
"y": 0.14034485816955566,
|
||||
"z": 0.320981502532959
|
||||
},
|
||||
"naturalPosition": {
|
||||
"x": 0.000143393874168396,
|
||||
"y": 1.7460365295410156,
|
||||
"z": 0.022502630949020386
|
||||
},
|
||||
"originalTextures": "{\n \"file5\": \"http://mpassets.highfidelity.com/11c4208d-15d7-4449-9758-a08da6dbd3dc-v1/Fedora.fbx/Texture/Fedora_Hat1_Base_Color.png\",\n \"file7\": \"http://mpassets.highfidelity.com/11c4208d-15d7-4449-9758-a08da6dbd3dc-v1/Fedora.fbx/Texture/Fedora_Hat1_Roughness.png\"\n}\n",
|
||||
"owningAvatarID": "{4c770def-4abb-40c6-91a1-88da5247b2db}",
|
||||
"parentID": "{4c770def-4abb-40c6-91a1-88da5247b2db}",
|
||||
"parentJointIndex": 64,
|
||||
"position": {
|
||||
"x": -0.030000008642673492,
|
||||
"y": 0.12999820709228516,
|
||||
"z": -0.07000023126602173
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 1.6202316284179688,
|
||||
"x": -0.5601736903190613,
|
||||
"y": -10.668098449707031,
|
||||
"z": -0.8933582305908203
|
||||
},
|
||||
"registrationPoint": {
|
||||
"x": 0.5,
|
||||
"y": 0.5,
|
||||
"z": 0.5
|
||||
},
|
||||
"relayParentJoints": false,
|
||||
"renderInfo": {
|
||||
"drawCalls": 1,
|
||||
"hasTransparent": false,
|
||||
"texturesCount": 2,
|
||||
"texturesSize": 327680,
|
||||
"verticesCount": 719
|
||||
},
|
||||
"restitution": 0.5,
|
||||
"rotation": {
|
||||
"w": 0.9996573328971863,
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0.026176949962973595
|
||||
},
|
||||
"script": "",
|
||||
"scriptTimestamp": 0,
|
||||
"serverScripts": "",
|
||||
"shapeType": "box",
|
||||
"staticCertificateVersion": 0,
|
||||
"textures": "",
|
||||
"type": "Model",
|
||||
"userData": "{\"Attachment\":{\"action\":\"attach\",\"joint\":\"HeadTop_End\",\"attached\":false,\"options\":{\"translation\":{\"x\":0,\"y\":0,\"z\":0},\"scale\":1}},\"grabbableKey\":{\"cloneable\":false,\"grabbable\":true}}",
|
||||
"velocity": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"visible": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"acceleration": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"actionData": "",
|
||||
"age": 14.011027336120605,
|
||||
"ageAsText": "0 hours 0 minutes 14 seconds",
|
||||
"angularDamping": 0.39346998929977417,
|
||||
"angularVelocity": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"animation": {
|
||||
"allowTranslation": true,
|
||||
"currentFrame": 0,
|
||||
"firstFrame": 0,
|
||||
"fps": 30,
|
||||
"hold": false,
|
||||
"lastFrame": 100000,
|
||||
"loop": true,
|
||||
"running": false,
|
||||
"url": ""
|
||||
},
|
||||
"boundingBox": {
|
||||
"brn": {
|
||||
"x": -0.04381517320871353,
|
||||
"y": 0.20789726078510284,
|
||||
"z": -0.0394962802529335
|
||||
},
|
||||
"center": {
|
||||
"x": -1.9073486328125e-06,
|
||||
"y": 0.2300434112548828,
|
||||
"z": 1.2159347534179688e-05
|
||||
},
|
||||
"dimensions": {
|
||||
"x": 0.08762653172016144,
|
||||
"y": 0.04429228603839874,
|
||||
"z": 0.07901687920093536
|
||||
},
|
||||
"tfl": {
|
||||
"x": 0.043811358511447906,
|
||||
"y": 0.2521895468235016,
|
||||
"z": 0.03952059894800186
|
||||
}
|
||||
},
|
||||
"canCastShadow": true,
|
||||
"certificateID": "",
|
||||
"clientOnly": true,
|
||||
"cloneAvatarEntity": false,
|
||||
"cloneDynamic": false,
|
||||
"cloneLifetime": 300,
|
||||
"cloneLimit": 0,
|
||||
"cloneOriginID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"cloneable": false,
|
||||
"collidesWith": "",
|
||||
"collisionMask": 0,
|
||||
"collisionSoundURL": "",
|
||||
"collisionless": false,
|
||||
"collisionsWillMove": false,
|
||||
"compoundShapeURL": "",
|
||||
"created": "2018-06-06T17:25:42Z",
|
||||
"created": "2018-07-26T23:56:46Z",
|
||||
"damping": 0.39346998929977417,
|
||||
"density": 1000,
|
||||
"description": "",
|
||||
"dimensions": {
|
||||
"blue": 0.07229919731616974,
|
||||
"green": 0.06644226610660553,
|
||||
"red": 0.03022606298327446,
|
||||
"x": 0.03022606298327446,
|
||||
"y": 0.06644226610660553,
|
||||
"z": 0.07229919731616974
|
||||
|
@ -633,12 +476,15 @@
|
|||
"entityInstanceNumber": 0,
|
||||
"friction": 0.5,
|
||||
"gravity": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"href": "",
|
||||
"id": "{d018c6ea-b2f4-441e-85e1-d17373ae6f34}",
|
||||
"id": "{03053239-bb37-4c51-a013-a1772baaeed5}",
|
||||
"ignoreForCollisions": false,
|
||||
"itemArtist": "jyoum",
|
||||
"itemCategories": "Wearables",
|
||||
|
@ -653,51 +499,66 @@
|
|||
],
|
||||
"jointTranslationsSet": [
|
||||
],
|
||||
"lastEdited": 1528306032505220,
|
||||
"lastEditedBy": "{b46f9c9e-4cd3-4964-96d6-cf3954abb908}",
|
||||
"lastEdited": 1532649569894305,
|
||||
"lastEditedBy": "{042ac463-7879-40f0-8126-e2e56c4345ca}",
|
||||
"lifetime": -1,
|
||||
"limitedRun": 4294967295,
|
||||
"localPosition": {
|
||||
"x": -1.9073486328125e-06,
|
||||
"y": 0.2300434112548828,
|
||||
"z": 1.2159347534179688e-05
|
||||
"blue": 0,
|
||||
"green": 0.23000000417232513,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 0.23000000417232513,
|
||||
"z": 0
|
||||
},
|
||||
"localRotation": {
|
||||
"w": 0.5910987257957458,
|
||||
"x": -0.48726412653923035,
|
||||
"y": -0.4088631868362427,
|
||||
"z": 0.49599069356918335
|
||||
"w": 0.5910986065864563,
|
||||
"x": -0.48726415634155273,
|
||||
"y": -0.4088630974292755,
|
||||
"z": 0.49599072337150574
|
||||
},
|
||||
"locked": false,
|
||||
"marketplaceID": "0685794d-fddb-4bad-a608-6d7789ceda90",
|
||||
"modelURL": "http://mpassets.highfidelity.com/0685794d-fddb-4bad-a608-6d7789ceda90-v1/ScifiWatch.fbx",
|
||||
"name": "Scifi Watch by Jimi",
|
||||
"naturalDimensions": {
|
||||
"blue": 0.055614765733480453,
|
||||
"green": 0.0511094331741333,
|
||||
"red": 0.023250818252563477,
|
||||
"x": 0.023250818252563477,
|
||||
"y": 0.0511094331741333,
|
||||
"z": 0.055614765733480453
|
||||
},
|
||||
"naturalPosition": {
|
||||
"blue": -0.06031447649002075,
|
||||
"green": 1.4500460624694824,
|
||||
"red": 0.6493338942527771,
|
||||
"x": 0.6493338942527771,
|
||||
"y": 1.4500460624694824,
|
||||
"z": -0.06031447649002075
|
||||
},
|
||||
"originalTextures": "{\n \"file4\": \"http://mpassets.highfidelity.com/0685794d-fddb-4bad-a608-6d7789ceda90-v1/ScifiWatch.fbx/ScifiWatch/texture/lambert1_Base_Color.png\",\n \"file5\": \"http://mpassets.highfidelity.com/0685794d-fddb-4bad-a608-6d7789ceda90-v1/ScifiWatch.fbx/ScifiWatch/texture/lambert1_Normal_OpenGL.png\",\n \"file6\": \"http://mpassets.highfidelity.com/0685794d-fddb-4bad-a608-6d7789ceda90-v1/ScifiWatch.fbx/ScifiWatch/texture/lambert1_Metallic.png\",\n \"file7\": \"http://mpassets.highfidelity.com/0685794d-fddb-4bad-a608-6d7789ceda90-v1/ScifiWatch.fbx/ScifiWatch/texture/lambert1_Roughness.png\",\n \"file8\": \"http://mpassets.highfidelity.com/0685794d-fddb-4bad-a608-6d7789ceda90-v1/ScifiWatch.fbx/ScifiWatch/texture/lambert1_Emissive.png\"\n}\n",
|
||||
"owningAvatarID": "{4c770def-4abb-40c6-91a1-88da5247b2db}",
|
||||
"parentID": "{4c770def-4abb-40c6-91a1-88da5247b2db}",
|
||||
"owningAvatarID": "{042ac463-7879-40f0-8126-e2e56c4345ca}",
|
||||
"parentID": "{042ac463-7879-40f0-8126-e2e56c4345ca}",
|
||||
"parentJointIndex": 16,
|
||||
"position": {
|
||||
"x": -1.9073486328125e-06,
|
||||
"y": 0.2300434112548828,
|
||||
"z": 1.2159347534179688e-05
|
||||
"blue": 0,
|
||||
"green": 0.23000000417232513,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 0.23000000417232513,
|
||||
"z": 0
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 0.3082179129123688,
|
||||
"x": -0.19203892350196838,
|
||||
"y": -10.429610252380371,
|
||||
"z": -0.4076632857322693
|
||||
"x": 495.7716979980469,
|
||||
"y": 498.345703125,
|
||||
"z": 498.52044677734375
|
||||
},
|
||||
"registrationPoint": {
|
||||
"blue": 0.5,
|
||||
"green": 0.5,
|
||||
"red": 0.5,
|
||||
"x": 0.5,
|
||||
"y": 0.5,
|
||||
"z": 0.5
|
||||
|
@ -712,10 +573,10 @@
|
|||
},
|
||||
"restitution": 0.5,
|
||||
"rotation": {
|
||||
"w": 0.5910987257957458,
|
||||
"x": -0.48726412653923035,
|
||||
"y": -0.4088631868362427,
|
||||
"z": 0.49599069356918335
|
||||
"w": 0.5910986065864563,
|
||||
"x": -0.48726415634155273,
|
||||
"y": -0.4088630974292755,
|
||||
"z": 0.49599072337150574
|
||||
},
|
||||
"script": "",
|
||||
"scriptTimestamp": 0,
|
||||
|
@ -726,6 +587,229 @@
|
|||
"type": "Model",
|
||||
"userData": "{\"Attachment\":{\"action\":\"attach\",\"joint\":\"[LR]ForeArm\",\"attached\":false,\"options\":{\"translation\":{\"x\":0,\"y\":0,\"z\":0},\"scale\":1}},\"grabbableKey\":{\"cloneable\":false,\"grabbable\":true}}",
|
||||
"velocity": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"visible": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"acceleration": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"actionData": "",
|
||||
"age": 308.8044128417969,
|
||||
"ageAsText": "0 hours 5 minutes 8 seconds",
|
||||
"angularDamping": 0.39346998929977417,
|
||||
"angularVelocity": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"animation": {
|
||||
"allowTranslation": true,
|
||||
"currentFrame": 0,
|
||||
"firstFrame": 0,
|
||||
"fps": 30,
|
||||
"hold": false,
|
||||
"lastFrame": 100000,
|
||||
"loop": true,
|
||||
"running": false,
|
||||
"url": ""
|
||||
},
|
||||
"boundingBox": {
|
||||
"brn": {
|
||||
"blue": -0.2340194433927536,
|
||||
"green": -0.07067721337080002,
|
||||
"red": -0.17002610862255096,
|
||||
"x": -0.17002610862255096,
|
||||
"y": -0.07067721337080002,
|
||||
"z": -0.2340194433927536
|
||||
},
|
||||
"center": {
|
||||
"blue": -0.039825439453125,
|
||||
"green": 0.02001953125,
|
||||
"red": 0.0001678466796875,
|
||||
"x": 0.0001678466796875,
|
||||
"y": 0.02001953125,
|
||||
"z": -0.039825439453125
|
||||
},
|
||||
"dimensions": {
|
||||
"blue": 0.3883880078792572,
|
||||
"green": 0.18139348924160004,
|
||||
"red": 0.34038791060447693,
|
||||
"x": 0.34038791060447693,
|
||||
"y": 0.18139348924160004,
|
||||
"z": 0.3883880078792572
|
||||
},
|
||||
"tfl": {
|
||||
"blue": 0.1543685644865036,
|
||||
"green": 0.11071627587080002,
|
||||
"red": 0.17036180198192596,
|
||||
"x": 0.17036180198192596,
|
||||
"y": 0.11071627587080002,
|
||||
"z": 0.1543685644865036
|
||||
}
|
||||
},
|
||||
"canCastShadow": true,
|
||||
"certificateID": "",
|
||||
"clientOnly": true,
|
||||
"cloneAvatarEntity": false,
|
||||
"cloneDynamic": false,
|
||||
"cloneLifetime": 300,
|
||||
"cloneLimit": 0,
|
||||
"cloneOriginID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"cloneable": false,
|
||||
"collidesWith": "",
|
||||
"collisionMask": 0,
|
||||
"collisionSoundURL": "",
|
||||
"collisionless": false,
|
||||
"collisionsWillMove": false,
|
||||
"compoundShapeURL": "",
|
||||
"created": "2018-07-26T23:56:46Z",
|
||||
"damping": 0.39346998929977417,
|
||||
"density": 1000,
|
||||
"description": "",
|
||||
"dimensions": {
|
||||
"blue": 0.38838762044906616,
|
||||
"green": 0.16981728374958038,
|
||||
"red": 0.33466479182243347,
|
||||
"x": 0.33466479182243347,
|
||||
"y": 0.16981728374958038,
|
||||
"z": 0.38838762044906616
|
||||
},
|
||||
"dynamic": false,
|
||||
"editionNumber": 18,
|
||||
"entityInstanceNumber": 0,
|
||||
"friction": 0.5,
|
||||
"gravity": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"href": "",
|
||||
"id": "{1bf231ce-3913-4c53-be3c-b1f4094dac51}",
|
||||
"ignoreForCollisions": false,
|
||||
"itemArtist": "jyoum",
|
||||
"itemCategories": "Wearables",
|
||||
"itemDescription": "A stylish and classic piece of headwear for your avatar.",
|
||||
"itemLicense": "",
|
||||
"itemName": "Fedora",
|
||||
"jointRotations": [
|
||||
],
|
||||
"jointRotationsSet": [
|
||||
],
|
||||
"jointTranslations": [
|
||||
],
|
||||
"jointTranslationsSet": [
|
||||
],
|
||||
"lastEdited": 1532649698129709,
|
||||
"lastEditedBy": "{042ac463-7879-40f0-8126-e2e56c4345ca}",
|
||||
"lifetime": -1,
|
||||
"limitedRun": 4294967295,
|
||||
"localPosition": {
|
||||
"blue": -0.039825439453125,
|
||||
"green": 0.02001953125,
|
||||
"red": 0.0001678466796875,
|
||||
"x": 0.0001678466796875,
|
||||
"y": 0.02001953125,
|
||||
"z": -0.039825439453125
|
||||
},
|
||||
"localRotation": {
|
||||
"w": 0.9998477101325989,
|
||||
"x": -9.898545982878204e-09,
|
||||
"y": 5.670873406415922e-07,
|
||||
"z": 0.017452405765652657
|
||||
},
|
||||
"locked": false,
|
||||
"marketplaceID": "11c4208d-15d7-4449-9758-a08da6dbd3dc",
|
||||
"modelURL": "http://mpassets.highfidelity.com/11c4208d-15d7-4449-9758-a08da6dbd3dc-v1/Fedora.fbx",
|
||||
"name": "",
|
||||
"naturalDimensions": {
|
||||
"blue": 0.320981502532959,
|
||||
"green": 0.14034485816955566,
|
||||
"red": 0.2765824794769287,
|
||||
"x": 0.2765824794769287,
|
||||
"y": 0.14034485816955566,
|
||||
"z": 0.320981502532959
|
||||
},
|
||||
"naturalPosition": {
|
||||
"blue": 0.022502630949020386,
|
||||
"green": 1.7460365295410156,
|
||||
"red": 0.000143393874168396,
|
||||
"x": 0.000143393874168396,
|
||||
"y": 1.7460365295410156,
|
||||
"z": 0.022502630949020386
|
||||
},
|
||||
"originalTextures": "{\n \"file5\": \"http://mpassets.highfidelity.com/11c4208d-15d7-4449-9758-a08da6dbd3dc-v1/Fedora.fbx/Texture/Fedora_Hat1_Base_Color.png\",\n \"file7\": \"http://mpassets.highfidelity.com/11c4208d-15d7-4449-9758-a08da6dbd3dc-v1/Fedora.fbx/Texture/Fedora_Hat1_Roughness.png\"\n}\n",
|
||||
"owningAvatarID": "{042ac463-7879-40f0-8126-e2e56c4345ca}",
|
||||
"parentID": "{042ac463-7879-40f0-8126-e2e56c4345ca}",
|
||||
"parentJointIndex": 66,
|
||||
"position": {
|
||||
"blue": -0.039825439453125,
|
||||
"green": 0.02001953125,
|
||||
"red": 0.0001678466796875,
|
||||
"x": 0.0001678466796875,
|
||||
"y": 0.02001953125,
|
||||
"z": -0.039825439453125
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 1.6202316284179688,
|
||||
"x": 495.21051025390625,
|
||||
"y": 498.5577697753906,
|
||||
"z": 497.6370849609375
|
||||
},
|
||||
"registrationPoint": {
|
||||
"blue": 0.5,
|
||||
"green": 0.5,
|
||||
"red": 0.5,
|
||||
"x": 0.5,
|
||||
"y": 0.5,
|
||||
"z": 0.5
|
||||
},
|
||||
"relayParentJoints": false,
|
||||
"renderInfo": {
|
||||
"drawCalls": 1,
|
||||
"hasTransparent": false,
|
||||
"texturesCount": 2,
|
||||
"texturesSize": 327680,
|
||||
"verticesCount": 719
|
||||
},
|
||||
"restitution": 0.5,
|
||||
"rotation": {
|
||||
"w": 0.9998477101325989,
|
||||
"x": -9.898545982878204e-09,
|
||||
"y": 5.670873406415922e-07,
|
||||
"z": 0.017452405765652657
|
||||
},
|
||||
"script": "",
|
||||
"scriptTimestamp": 0,
|
||||
"serverScripts": "",
|
||||
"shapeType": "box",
|
||||
"staticCertificateVersion": 0,
|
||||
"textures": "",
|
||||
"type": "Model",
|
||||
"userData": "{\"Attachment\":{\"action\":\"attach\",\"joint\":\"HeadTop_End\",\"attached\":false,\"options\":{\"translation\":{\"x\":0,\"y\":0,\"z\":0},\"scale\":1}},\"grabbableKey\":{\"cloneable\":false,\"grabbable\":true}}",
|
||||
"velocity": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
|
|
|
@ -1,21 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 22.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 100 100.8" style="enable-background:new 0 0 100 100.8;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
</style>
|
||||
<path class="st0" d="M26.7,83.9c7.3,1.2,14.8,1.8,22.1,1.8c0.4,0,0.8,0,1.2,0c7.8-0.1,15.6-0.8,23.4-2.2l0,0
|
||||
c5.7-1.1,11.3-6.6,12.5-12.3C87.3,64.2,88,57,88,50s-0.7-14.2-2.1-21.2c-1.2-5.6-6.8-11.1-12.5-12.2c-7.7-1.4-15.6-2.2-23.4-2.2
|
||||
c-7.7-0.1-15.6,0.5-23.4,1.8c-5.7,1-11.4,6.5-12.6,12.3c-1.4,7.2-2.1,14.4-2.1,21.6s0.7,14.4,2.1,21.7
|
||||
C15.3,77.4,20.9,82.9,26.7,83.9z M20.9,29.8c0.6-2.9,4-6.3,6.9-6.8c7-1.1,14-1.7,21-1.7c0.4,0,0.8,0,1.2,0
|
||||
c7.4,0.1,14.8,0.8,22.1,2.1c2.9,0.6,6.4,3.9,6.9,6.7c1.3,6.6,1.9,13.3,1.9,19.9c0,6.6-0.6,13.3-1.9,19.8c-0.6,2.8-4,6.2-6.9,6.8
|
||||
c-7.3,1.3-14.8,2.1-22.1,2.1c-7.4,0.1-14.8-0.5-22.1-1.7c-2.9-0.5-6.3-3.9-6.9-6.7c-1.3-6.7-2-13.5-2-20.3
|
||||
C19,43.3,19.6,36.4,20.9,29.8z"/>
|
||||
<path class="st0" d="M32.3,61.4c-0.5,1.3-0.1,2.8,0.9,3.8c0.3,0.3,7.2,6.6,15.9,6.6c0.8,0,1.7-0.1,2.6-0.2
|
||||
c9.8-1.5,15.5-11.1,15.8-11.5c0.7-1.2,0.6-2.8-0.2-3.9c-0.9-1.1-2.3-1.6-3.7-1.3c-9.2,2.5-18.6,3.9-28.1,4.2
|
||||
C34,59.1,32.8,60,32.3,61.4z"/>
|
||||
<circle class="st0" cx="36.5" cy="42.8" r="9"/>
|
||||
<path class="st0" d="M61.4,44.1h6.1c1.9,0,3.3-1.5,3.3-3.3c0-1.9-1.5-3.3-3.3-3.3h-6.1c-1.9,0-3.3,1.5-3.3,3.3
|
||||
C58.1,42.7,59.6,44.1,61.4,44.1z"/>
|
||||
</svg>
|
Before Width: | Height: | Size: 1.5 KiB |
30
interface/resources/icons/tablet-icons/emote-a.svg
Normal file
30
interface/resources/icons/tablet-icons/emote-a.svg
Normal file
|
@ -0,0 +1,30 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 22.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
|
||||
<g>
|
||||
<path d="M39.8,10.7c0.8,0.1,1.5,0.7,1.8,1.4s0.2,1.6-0.2,2.2c-0.3,0.4-0.2,1,0.3,1.4c0.2,0.1,0.4,0.1,0.5,0.1
|
||||
c0.3,0,0.6-0.1,0.6-0.2l0.1-0.1l0.1-0.1c0.8-1.2,1-2.8,0.4-4.1C42.8,10,41.6,9,40.1,8.8c-0.3-0.1-0.5,0-0.7,0.1
|
||||
C39.2,9,39,9.3,39,9.5C38.8,10.1,39.2,10.6,39.8,10.7z"/>
|
||||
<path d="M42.5,8.1c1.2,0.2,2.2,1,2.7,2.1c0.4,1.1,0.3,2.4-0.3,3.3c-0.2,0.2-0.2,0.5-0.2,0.7s0.2,0.5,0.5,0.7
|
||||
c0.2,0.1,0.4,0.1,0.5,0.1c0.4,0,0.7-0.2,0.8-0.5c1-1.5,1.2-3.4,0.5-5.1s-2.3-3-4.2-3.3c-0.5-0.1-1,0.2-1.1,0.8
|
||||
C41.6,7.5,41.9,8,42.5,8.1z M42.1,7L42.1,7L42.1,7L42.1,7z"/>
|
||||
<path d="M6.7,14.3c-0.4-0.6-0.5-1.5-0.2-2.2c0.3-0.8,1-1.3,1.9-1.4c0.6-0.1,0.9-0.6,0.8-1.1C9.1,9.3,9,9,8.7,8.9
|
||||
C8.5,8.8,8.3,8.7,8,8.8C6.5,9,5.3,10,4.7,11.4s-0.4,2.9,0.4,4.1l0.1,0.1l0.1,0.1c0.1,0,0.4,0.2,0.6,0.2c0.1,0,0.3,0,0.6-0.2
|
||||
C6.9,15.4,7,14.8,6.7,14.3z"/>
|
||||
<path d="M5.6,8.1C5.9,8,6.1,7.9,6.3,7.7C6.5,7.5,6.5,7.2,6.5,7C6.4,6.7,6.2,6.4,6,6.3C5.8,6.1,5.6,6.1,5.3,6.1
|
||||
C3.5,6.5,1.9,7.7,1.1,9.5c-0.7,1.7-0.5,3.6,0.5,5.1c0.1,0.3,0.4,0.5,0.8,0.5c0.1,0,0.3,0,0.6-0.2c0.2-0.2,0.4-0.4,0.4-0.6
|
||||
c0-0.3,0-0.5-0.2-0.7c-0.6-1-0.8-2.2-0.3-3.3C3.4,9.1,4.4,8.3,5.6,8.1z"/>
|
||||
<path d="M48.8,25.1c-0.7-0.7-1.8-0.7-2.6-0.2c-0.4,0.3-0.7,0.5-1,0.8L44.9,26c-0.3,0.3-0.6,0.5-0.9,0.8c-0.6,0.6-1.2,1.1-1.9,1.6
|
||||
c-1.2,0.8-2.7,1-4.1,0.5c-1.3-0.5-2.3-1.6-2.6-3c-0.1-0.6-0.2-1.3-0.2-2c-0.2-2.8-0.3-5.6-0.5-8.5l-0.3-5.2c0-0.7-0.1-1.4-0.2-2.2
|
||||
c-0.1-0.8-0.5-1.5-1.1-1.8s-1.2-0.3-1.8,0c-1,0.5-1.1,1.6-1.1,2.1c-0.1,1.4-0.2,2.8-0.2,4.2c0,0.9-0.1,1.8-0.1,2.7
|
||||
c-0.1,2.4-0.3,4.8-0.4,7.2v0.2c0,0.8-0.1,0.9-0.5,0.9s-0.4-0.1-0.6-0.9L27,16.4c-0.8-3.3-1.5-6.7-2.3-10c-0.3-1.3-1.2-1.9-2.4-1.7
|
||||
c-1.1,0.2-1.7,1.2-1.6,2.4C20.7,8,20.9,9,21,9.9c0.1,0.6,0.2,1.3,0.2,1.9c0.5,3.8,0.9,7.6,1.4,11.3l0.1,1.1
|
||||
c0.1,0.8-0.1,0.9-0.4,0.9c-0.3,0.1-0.4,0.1-0.7-0.6L20,20.7c-1.2-2.9-2.5-5.7-3.7-8.6c-0.8-1.9-2-1.9-2.8-1.5
|
||||
c-0.6,0.2-1.5,0.9-0.9,2.9c0.3,1,0.6,1.9,0.9,2.9c0.3,0.9,0.5,1.8,0.8,2.7c0.5,1.8,1,3.5,1.6,5.3l1.1,3.7c0.2,0.6,0,0.7-0.2,0.8
|
||||
c-0.1,0.1-0.3,0.1-0.7-0.4c-0.1-0.1-0.2-0.2-0.2-0.3l-3.8-5.7c-0.5-0.7-0.9-1.4-1.4-2.1c-0.6-0.8-1.4-1-2.3-0.6s-1.3,1.1-1.2,2
|
||||
c0.1,0.5,0.3,0.9,0.4,1.2c0.8,1.6,1.6,3.2,2.4,4.8c2.1,4.2,4.2,8.5,6.4,12.8c2.3,4.5,6.2,7,11.5,7.3l0,0l0,0
|
||||
c4.7-0.2,8.2-1.9,10.7-5.2c2.1-2.8,4.2-5.7,6.2-8.5c0.9-1.3,1.8-2.5,2.7-3.7c0.2-0.3,0.4-0.5,0.6-0.8c0.4-0.6,0.8-1.1,1.2-1.7
|
||||
C49.5,26.7,49.5,25.8,48.8,25.1z"/>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 2.7 KiB |
33
interface/resources/icons/tablet-icons/emote-i.svg
Normal file
33
interface/resources/icons/tablet-icons/emote-i.svg
Normal file
|
@ -0,0 +1,33 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 22.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
</style>
|
||||
<g>
|
||||
<path class="st0" d="M39.8,10.7c0.8,0.1,1.5,0.7,1.8,1.4s0.2,1.6-0.2,2.2c-0.3,0.4-0.2,1,0.3,1.4c0.2,0.1,0.4,0.1,0.5,0.1
|
||||
c0.3,0,0.6-0.1,0.6-0.2l0.1-0.1l0.1-0.1c0.8-1.2,1-2.8,0.4-4.1C42.8,10,41.6,9,40.1,8.8c-0.3-0.1-0.5,0-0.7,0.1
|
||||
C39.2,9,39,9.3,39,9.5C38.8,10.1,39.2,10.6,39.8,10.7z"/>
|
||||
<path class="st0" d="M42.5,8.1c1.2,0.2,2.2,1,2.7,2.1c0.4,1.1,0.3,2.4-0.3,3.3c-0.2,0.2-0.2,0.5-0.2,0.7s0.2,0.5,0.5,0.7
|
||||
c0.2,0.1,0.4,0.1,0.5,0.1c0.4,0,0.7-0.2,0.8-0.5c1-1.5,1.2-3.4,0.5-5.1s-2.3-3-4.2-3.3c-0.5-0.1-1,0.2-1.1,0.8
|
||||
C41.6,7.5,41.9,8,42.5,8.1z M42.1,7L42.1,7L42.1,7L42.1,7z"/>
|
||||
<path class="st0" d="M6.7,14.3c-0.4-0.6-0.5-1.5-0.2-2.2c0.3-0.8,1-1.3,1.9-1.4c0.6-0.1,0.9-0.6,0.8-1.1C9.1,9.3,9,9,8.7,8.9
|
||||
C8.5,8.8,8.3,8.7,8,8.8C6.5,9,5.3,10,4.7,11.4s-0.4,2.9,0.4,4.1l0.1,0.1l0.1,0.1c0.1,0,0.4,0.2,0.6,0.2c0.1,0,0.3,0,0.6-0.2
|
||||
C6.9,15.4,7,14.8,6.7,14.3z"/>
|
||||
<path class="st0" d="M5.6,8.1C5.9,8,6.1,7.9,6.3,7.7C6.5,7.5,6.5,7.2,6.5,7C6.4,6.7,6.2,6.4,6,6.3C5.8,6.1,5.6,6.1,5.3,6.1
|
||||
C3.5,6.5,1.9,7.7,1.1,9.5c-0.7,1.7-0.5,3.6,0.5,5.1c0.1,0.3,0.4,0.5,0.8,0.5c0.1,0,0.3,0,0.6-0.2c0.2-0.2,0.4-0.4,0.4-0.6
|
||||
c0-0.3,0-0.5-0.2-0.7c-0.6-1-0.8-2.2-0.3-3.3C3.4,9.1,4.4,8.3,5.6,8.1z"/>
|
||||
<path class="st0" d="M48.8,25.1c-0.7-0.7-1.8-0.7-2.6-0.2c-0.4,0.3-0.7,0.5-1,0.8L44.9,26c-0.3,0.3-0.6,0.5-0.9,0.8
|
||||
c-0.6,0.6-1.2,1.1-1.9,1.6c-1.2,0.8-2.7,1-4.1,0.5c-1.3-0.5-2.3-1.6-2.6-3c-0.1-0.6-0.2-1.3-0.2-2c-0.2-2.8-0.3-5.6-0.5-8.5
|
||||
l-0.3-5.2c0-0.7-0.1-1.4-0.2-2.2c-0.1-0.8-0.5-1.5-1.1-1.8s-1.2-0.3-1.8,0c-1,0.5-1.1,1.6-1.1,2.1c-0.1,1.4-0.2,2.8-0.2,4.2
|
||||
c0,0.9-0.1,1.8-0.1,2.7c-0.1,2.4-0.3,4.8-0.4,7.2v0.2c0,0.8-0.1,0.9-0.5,0.9s-0.4-0.1-0.6-0.9L27,16.4c-0.8-3.3-1.5-6.7-2.3-10
|
||||
c-0.3-1.3-1.2-1.9-2.4-1.7c-1.1,0.2-1.7,1.2-1.6,2.4C20.7,8,20.9,9,21,9.9c0.1,0.6,0.2,1.3,0.2,1.9c0.5,3.8,0.9,7.6,1.4,11.3
|
||||
l0.1,1.1c0.1,0.8-0.1,0.9-0.4,0.9c-0.3,0.1-0.4,0.1-0.7-0.6L20,20.7c-1.2-2.9-2.5-5.7-3.7-8.6c-0.8-1.9-2-1.9-2.8-1.5
|
||||
c-0.6,0.2-1.5,0.9-0.9,2.9c0.3,1,0.6,1.9,0.9,2.9c0.3,0.9,0.5,1.8,0.8,2.7c0.5,1.8,1,3.5,1.6,5.3l1.1,3.7c0.2,0.6,0,0.7-0.2,0.8
|
||||
c-0.1,0.1-0.3,0.1-0.7-0.4c-0.1-0.1-0.2-0.2-0.2-0.3l-3.8-5.7c-0.5-0.7-0.9-1.4-1.4-2.1c-0.6-0.8-1.4-1-2.3-0.6s-1.3,1.1-1.2,2
|
||||
c0.1,0.5,0.3,0.9,0.4,1.2c0.8,1.6,1.6,3.2,2.4,4.8c2.1,4.2,4.2,8.5,6.4,12.8c2.3,4.5,6.2,7,11.5,7.3l0,0l0,0
|
||||
c4.7-0.2,8.2-1.9,10.7-5.2c2.1-2.8,4.2-5.7,6.2-8.5c0.9-1.3,1.8-2.5,2.7-3.7c0.2-0.3,0.4-0.5,0.6-0.8c0.4-0.6,0.8-1.1,1.2-1.7
|
||||
C49.5,26.7,49.5,25.8,48.8,25.1z"/>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 2.8 KiB |
|
@ -471,7 +471,6 @@ TabletModalWindow {
|
|||
bottomMargin: hifi.dimensions.contentSpacing.y + currentSelection.controlHeight - currentSelection.height
|
||||
}
|
||||
headerVisible: !selectDirectory
|
||||
onClicked: navigateToRow(row);
|
||||
onDoubleClicked: navigateToRow(row);
|
||||
focus: true
|
||||
Keys.onReturnPressed: navigateToCurrentRow();
|
||||
|
|
|
@ -186,6 +186,8 @@ Windows.ScrollingWindow {
|
|||
return;
|
||||
}
|
||||
|
||||
var grabbable = MenuInterface.isOptionChecked("Create Entities As Grabbable (except Zones, Particles, and Lights)");
|
||||
|
||||
if (defaultURL.endsWith(".jpg") || defaultURL.endsWith(".png")) {
|
||||
var name = assetProxyModel.data(treeView.selection.currentIndex);
|
||||
var modelURL = "https://hifi-content.s3.amazonaws.com/DomainContent/production/default-image-model.fbx";
|
||||
|
@ -195,7 +197,7 @@ Windows.ScrollingWindow {
|
|||
var collisionless = true;
|
||||
var position = Vec3.sum(MyAvatar.position, Vec3.multiply(2, Quat.getForward(MyAvatar.orientation)));
|
||||
var gravity = Vec3.multiply(Vec3.fromPolar(Math.PI / 2, 0), 0);
|
||||
Entities.addModelEntity(name, modelURL, textures, shapeType, dynamic, collisionless, position, gravity);
|
||||
Entities.addModelEntity(name, modelURL, textures, shapeType, dynamic, collisionless, grabbable, position, gravity);
|
||||
} else {
|
||||
var SHAPE_TYPE_NONE = 0;
|
||||
var SHAPE_TYPE_SIMPLE_HULL = 1;
|
||||
|
@ -281,7 +283,7 @@ Windows.ScrollingWindow {
|
|||
print("Asset browser - adding asset " + url + " (" + name + ") to world.");
|
||||
|
||||
// Entities.addEntity doesn't work from QML, so we use this.
|
||||
Entities.addModelEntity(name, url, "", shapeType, dynamic, collisionless, addPosition, gravity);
|
||||
Entities.addModelEntity(name, url, "", shapeType, dynamic, collisionless, grabbable, addPosition, gravity);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -41,6 +41,8 @@ Column {
|
|||
property var goFunction: null;
|
||||
property var http: null;
|
||||
|
||||
property bool autoScrollTimerEnabled: false;
|
||||
|
||||
HifiConstants { id: hifi }
|
||||
Component.onCompleted: suggestions.getFirstPage();
|
||||
HifiModels.PSFListModel {
|
||||
|
@ -88,7 +90,9 @@ Column {
|
|||
online_users: data.details.connections || data.details.concurrency || 0,
|
||||
// Server currently doesn't give isStacked (undefined). Could give bool.
|
||||
drillDownToPlace: data.is_stacked || (data.action === 'concurrency'),
|
||||
isStacked: !!data.is_stacked
|
||||
isStacked: !!data.is_stacked,
|
||||
|
||||
time_before_autoscroll_ms: data.hold_time || 3000
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -102,9 +106,12 @@ Column {
|
|||
id: scroll;
|
||||
model: suggestions;
|
||||
orientation: ListView.Horizontal;
|
||||
highlightFollowsCurrentItem: false
|
||||
highlightMoveDuration: -1;
|
||||
highlightMoveVelocity: -1;
|
||||
highlightFollowsCurrentItem: true;
|
||||
preferredHighlightBegin: 0;
|
||||
preferredHighlightEnd: cardWidth;
|
||||
highlightRangeMode: ListView.StrictlyEnforceRange;
|
||||
highlightMoveDuration: 800;
|
||||
highlightMoveVelocity: 1;
|
||||
currentIndex: -1;
|
||||
|
||||
spacing: 12;
|
||||
|
@ -134,8 +141,49 @@ Column {
|
|||
textSizeSmall: root.textSizeSmall;
|
||||
stackShadowNarrowing: root.stackShadowNarrowing;
|
||||
shadowHeight: root.stackedCardShadowHeight;
|
||||
hoverThunk: function () { hovered = true }
|
||||
unhoverThunk: function () { hovered = false }
|
||||
hoverThunk: function () {
|
||||
hovered = true;
|
||||
if(root.autoScrollTimerEnabled) {
|
||||
autoScrollTimer.stop();
|
||||
}
|
||||
}
|
||||
unhoverThunk: function () {
|
||||
hovered = false;
|
||||
if(root.autoScrollTimerEnabled) {
|
||||
autoScrollTimer.start();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
onCountChanged: {
|
||||
if (scroll.currentIndex === -1 && scroll.count > 0 && root.autoScrollTimerEnabled) {
|
||||
scroll.currentIndex = 0;
|
||||
autoScrollTimer.interval = suggestions.get(scroll.currentIndex).time_before_autoscroll_ms;
|
||||
autoScrollTimer.start();
|
||||
}
|
||||
}
|
||||
|
||||
onCurrentIndexChanged: {
|
||||
if (root.autoScrollTimerEnabled) {
|
||||
autoScrollTimer.interval = suggestions.get(scroll.currentIndex).time_before_autoscroll_ms;
|
||||
autoScrollTimer.start();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Timer {
|
||||
id: autoScrollTimer;
|
||||
interval: 3000;
|
||||
running: false;
|
||||
repeat: false;
|
||||
onTriggered: {
|
||||
if (scroll.currentIndex !== -1) {
|
||||
if (scroll.currentIndex === scroll.count - 1) {
|
||||
scroll.currentIndex = 0;
|
||||
} else {
|
||||
scroll.currentIndex++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1046,12 +1046,13 @@ Rectangle {
|
|||
enabled: myData.userName !== "Unknown user" && !userInfoViewer.visible;
|
||||
hoverEnabled: true;
|
||||
onClicked: {
|
||||
// TODO: Change language from "Happening Now" to something else (or remove entirely)
|
||||
popupComboDialog("Set your availability:",
|
||||
availabilityComboBox.availabilityStrings,
|
||||
["Your username will be visible in everyone's 'Nearby' list. Anyone will be able to jump to your location from within the 'Nearby' list.",
|
||||
"Your location will be visible in the 'Connections' list only for those with whom you are connected or friends. They'll be able to jump to your location if the domain allows.",
|
||||
"Your location will be visible in the 'Connections' list only for those with whom you are friends. They'll be able to jump to your location if the domain allows. You will only receive 'Happening Now' notifications in 'Go To' from friends.",
|
||||
"You will appear offline in the 'Connections' list, and you will not receive 'Happening Now' notifications in 'Go To'."],
|
||||
"Your location will be visible in the 'Connections' list only for those with whom you are connected or friends. They'll be able to jump to your location if the domain allows, and you will see 'Snaps' Blasts from them in 'Go To'.",
|
||||
"Your location will be visible in the 'Connections' list only for those with whom you are friends. They'll be able to jump to your location if the domain allows, and you will see 'Snaps' Blasts from them in 'Go To'",
|
||||
"You will appear offline in the 'Connections' list, and you will not receive Snaps Blasts from connections or friends in 'Go To'."],
|
||||
["all", "connections", "friends", "none"]);
|
||||
}
|
||||
onEntered: availabilityComboBox.color = hifi.colors.lightGrayText;
|
||||
|
|
|
@ -41,8 +41,8 @@ MessageBox {
|
|||
popup.button1text = 'CANCEL'
|
||||
popup.titleText = 'Get Wearables'
|
||||
popup.bodyText = 'Buy wearables from <b><a href="app://marketplace">Marketplace.</a></b>' + '<br/>' +
|
||||
'Wear wearables from <b><a href="app://purchases">My Purchases.</a></b>' + '<br/>' + '<br/>' +
|
||||
'Visit “AvatarIsland” to get wearables'
|
||||
'Use wearables in <b><a href="app://purchases">My Purchases.</a></b>' + '<br/>' + '<br/>' +
|
||||
'Visit “AvatarIsland” to get wearables.'
|
||||
|
||||
popup.imageSource = getWearablesUrl;
|
||||
popup.onButton2Clicked = function() {
|
||||
|
@ -102,7 +102,7 @@ MessageBox {
|
|||
popup.titleText = 'Get Avatars'
|
||||
|
||||
popup.bodyText = 'Buy avatars from <b><a href="app://marketplace">Marketplace.</a></b>' + '<br/>' +
|
||||
'Wear avatars from <b><a href="app://purchases">My Purchases.</a></b>' + '<br/>' + '<br/>' +
|
||||
'Wear avatars in <b><a href="app://purchases">My Purchases.</a></b>' + '<br/>' + '<br/>' +
|
||||
'Visit “BodyMart” to get free avatars.'
|
||||
|
||||
popup.imageSource = getAvatarsUrl;
|
||||
|
|
|
@ -186,6 +186,8 @@ Rectangle {
|
|||
return;
|
||||
}
|
||||
|
||||
var grabbable = MenuInterface.isOptionChecked("Create Entities As Grabbable (except Zones, Particles, and Lights)");
|
||||
|
||||
if (defaultURL.endsWith(".jpg") || defaultURL.endsWith(".png")) {
|
||||
var name = assetProxyModel.data(treeView.selection.currentIndex);
|
||||
var modelURL = "https://hifi-content.s3.amazonaws.com/DomainContent/production/default-image-model.fbx";
|
||||
|
@ -195,7 +197,7 @@ Rectangle {
|
|||
var collisionless = true;
|
||||
var position = Vec3.sum(MyAvatar.position, Vec3.multiply(2, Quat.getForward(MyAvatar.orientation)));
|
||||
var gravity = Vec3.multiply(Vec3.fromPolar(Math.PI / 2, 0), 0);
|
||||
Entities.addModelEntity(name, modelURL, textures, shapeType, dynamic, collisionless, position, gravity);
|
||||
Entities.addModelEntity(name, modelURL, textures, shapeType, dynamic, collisionless, grabbable, position, gravity);
|
||||
} else {
|
||||
var SHAPE_TYPE_NONE = 0;
|
||||
var SHAPE_TYPE_SIMPLE_HULL = 1;
|
||||
|
@ -281,7 +283,7 @@ Rectangle {
|
|||
print("Asset browser - adding asset " + url + " (" + name + ") to world.");
|
||||
|
||||
// Entities.addEntity doesn't work from QML, so we use this.
|
||||
Entities.addModelEntity(name, url, "", shapeType, dynamic, collisionless, addPosition, gravity);
|
||||
Entities.addModelEntity(name, url, "", shapeType, dynamic, collisionless, grabbable, addPosition, gravity);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -320,11 +320,12 @@ StackView {
|
|||
width: parent.width;
|
||||
cardWidth: 312 + (2 * 4);
|
||||
cardHeight: 163 + (2 * 4);
|
||||
labelText: 'HAPPENING NOW';
|
||||
labelText: 'FEATURED';
|
||||
actions: 'announcement';
|
||||
filter: addressLine.text;
|
||||
goFunction: goCard;
|
||||
http: http;
|
||||
autoScrollTimerEnabled: true;
|
||||
}
|
||||
Feed {
|
||||
id: places;
|
||||
|
|
|
@ -45,9 +45,7 @@ QString AboutUtil::getQtVersion() const {
|
|||
}
|
||||
|
||||
void AboutUtil::openUrl(const QString& url) const {
|
||||
|
||||
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
|
||||
auto tablet = tabletScriptingInterface->getTablet("com.highfidelity.interface.tablet.system");
|
||||
auto tablet = DependencyManager::get<TabletScriptingInterface>()->getTablet("com.highfidelity.interface.tablet.system");
|
||||
auto hmd = DependencyManager::get<HMDScriptingInterface>();
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
|
||||
|
|
|
@ -1139,8 +1139,10 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
|
||||
// setup a timer for domain-server check ins
|
||||
QTimer* domainCheckInTimer = new QTimer(this);
|
||||
connect(domainCheckInTimer, &QTimer::timeout, [this, nodeList] {
|
||||
if (!isServerlessMode()) {
|
||||
QWeakPointer<NodeList> nodeListWeak = nodeList;
|
||||
connect(domainCheckInTimer, &QTimer::timeout, [this, nodeListWeak] {
|
||||
auto nodeList = nodeListWeak.lock();
|
||||
if (!isServerlessMode() && nodeList) {
|
||||
nodeList->sendDomainServerCheckIn();
|
||||
}
|
||||
});
|
||||
|
@ -1150,33 +1152,34 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
domainCheckInTimer->deleteLater();
|
||||
});
|
||||
|
||||
{
|
||||
auto audioIO = DependencyManager::get<AudioClient>().data();
|
||||
audioIO->setPositionGetter([] {
|
||||
auto avatarManager = DependencyManager::get<AvatarManager>();
|
||||
auto myAvatar = avatarManager ? avatarManager->getMyAvatar() : nullptr;
|
||||
|
||||
auto audioIO = DependencyManager::get<AudioClient>();
|
||||
audioIO->setPositionGetter([]{
|
||||
auto avatarManager = DependencyManager::get<AvatarManager>();
|
||||
auto myAvatar = avatarManager ? avatarManager->getMyAvatar() : nullptr;
|
||||
return myAvatar ? myAvatar->getPositionForAudio() : Vectors::ZERO;
|
||||
});
|
||||
audioIO->setOrientationGetter([] {
|
||||
auto avatarManager = DependencyManager::get<AvatarManager>();
|
||||
auto myAvatar = avatarManager ? avatarManager->getMyAvatar() : nullptr;
|
||||
|
||||
return myAvatar ? myAvatar->getPositionForAudio() : Vectors::ZERO;
|
||||
});
|
||||
audioIO->setOrientationGetter([]{
|
||||
auto avatarManager = DependencyManager::get<AvatarManager>();
|
||||
auto myAvatar = avatarManager ? avatarManager->getMyAvatar() : nullptr;
|
||||
return myAvatar ? myAvatar->getOrientationForAudio() : Quaternions::IDENTITY;
|
||||
});
|
||||
|
||||
return myAvatar ? myAvatar->getOrientationForAudio() : Quaternions::IDENTITY;
|
||||
});
|
||||
recording::Frame::registerFrameHandler(AudioConstants::getAudioFrameName(), [&audioIO](recording::Frame::ConstPointer frame) {
|
||||
audioIO->handleRecordedAudioInput(frame->data);
|
||||
});
|
||||
|
||||
recording::Frame::registerFrameHandler(AudioConstants::getAudioFrameName(), [=](recording::Frame::ConstPointer frame) {
|
||||
audioIO->handleRecordedAudioInput(frame->data);
|
||||
});
|
||||
|
||||
connect(audioIO.data(), &AudioClient::inputReceived, [](const QByteArray& audio){
|
||||
static auto recorder = DependencyManager::get<recording::Recorder>();
|
||||
if (recorder->isRecording()) {
|
||||
static const recording::FrameType AUDIO_FRAME_TYPE = recording::Frame::registerFrameType(AudioConstants::getAudioFrameName());
|
||||
recorder->recordFrame(AUDIO_FRAME_TYPE, audio);
|
||||
}
|
||||
});
|
||||
audioIO->startThread();
|
||||
connect(audioIO, &AudioClient::inputReceived, [](const QByteArray& audio) {
|
||||
static auto recorder = DependencyManager::get<recording::Recorder>();
|
||||
if (recorder->isRecording()) {
|
||||
static const recording::FrameType AUDIO_FRAME_TYPE = recording::Frame::registerFrameType(AudioConstants::getAudioFrameName());
|
||||
recorder->recordFrame(AUDIO_FRAME_TYPE, audio);
|
||||
}
|
||||
});
|
||||
audioIO->startThread();
|
||||
}
|
||||
|
||||
// Make sure we don't time out during slow operations at startup
|
||||
updateHeartbeat();
|
||||
|
@ -1275,27 +1278,29 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
// Inititalize sample before registering
|
||||
_sampleSound = DependencyManager::get<SoundCache>()->getSound(PathUtils::resourcesUrl("sounds/sample.wav"));
|
||||
|
||||
auto scriptEngines = DependencyManager::get<ScriptEngines>().data();
|
||||
scriptEngines->registerScriptInitializer([this](ScriptEnginePointer engine){
|
||||
registerScriptEngineWithApplicationServices(engine);
|
||||
});
|
||||
{
|
||||
auto scriptEngines = DependencyManager::get<ScriptEngines>().data();
|
||||
scriptEngines->registerScriptInitializer([this](ScriptEnginePointer engine) {
|
||||
registerScriptEngineWithApplicationServices(engine);
|
||||
});
|
||||
|
||||
connect(scriptEngines, &ScriptEngines::scriptCountChanged, scriptEngines, [this] {
|
||||
auto scriptEngines = DependencyManager::get<ScriptEngines>();
|
||||
if (scriptEngines->getRunningScripts().isEmpty()) {
|
||||
getMyAvatar()->clearScriptableSettings();
|
||||
}
|
||||
}, Qt::QueuedConnection);
|
||||
connect(scriptEngines, &ScriptEngines::scriptCountChanged, this, [this] {
|
||||
auto scriptEngines = DependencyManager::get<ScriptEngines>();
|
||||
if (scriptEngines->getRunningScripts().isEmpty()) {
|
||||
getMyAvatar()->clearScriptableSettings();
|
||||
}
|
||||
}, Qt::QueuedConnection);
|
||||
|
||||
connect(scriptEngines, &ScriptEngines::scriptsReloading, scriptEngines, [this] {
|
||||
getEntities()->reloadEntityScripts();
|
||||
loadAvatarScripts(getMyAvatar()->getScriptUrls());
|
||||
}, Qt::QueuedConnection);
|
||||
connect(scriptEngines, &ScriptEngines::scriptsReloading, this, [this] {
|
||||
getEntities()->reloadEntityScripts();
|
||||
loadAvatarScripts(getMyAvatar()->getScriptUrls());
|
||||
}, Qt::QueuedConnection);
|
||||
|
||||
connect(scriptEngines, &ScriptEngines::scriptLoadError,
|
||||
scriptEngines, [](const QString& filename, const QString& error){
|
||||
OffscreenUi::asyncWarning(nullptr, "Error Loading Script", filename + " failed to load.");
|
||||
}, Qt::QueuedConnection);
|
||||
connect(scriptEngines, &ScriptEngines::scriptLoadError,
|
||||
this, [](const QString& filename, const QString& error) {
|
||||
OffscreenUi::asyncWarning(nullptr, "Error Loading Script", filename + " failed to load.");
|
||||
}, Qt::QueuedConnection);
|
||||
}
|
||||
|
||||
#ifdef _WIN32
|
||||
WSADATA WsaData;
|
||||
|
@ -1365,10 +1370,11 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
// so we defer the setup of the `scripting::Audio` class until this point
|
||||
{
|
||||
auto audioScriptingInterface = DependencyManager::set<AudioScriptingInterface, scripting::Audio>();
|
||||
connect(audioIO.data(), &AudioClient::mutedByMixer, audioScriptingInterface.data(), &AudioScriptingInterface::mutedByMixer);
|
||||
connect(audioIO.data(), &AudioClient::receivedFirstPacket, audioScriptingInterface.data(), &AudioScriptingInterface::receivedFirstPacket);
|
||||
connect(audioIO.data(), &AudioClient::disconnected, audioScriptingInterface.data(), &AudioScriptingInterface::disconnected);
|
||||
connect(audioIO.data(), &AudioClient::muteEnvironmentRequested, [](glm::vec3 position, float radius) {
|
||||
auto audioIO = DependencyManager::get<AudioClient>().data();
|
||||
connect(audioIO, &AudioClient::mutedByMixer, audioScriptingInterface.data(), &AudioScriptingInterface::mutedByMixer);
|
||||
connect(audioIO, &AudioClient::receivedFirstPacket, audioScriptingInterface.data(), &AudioScriptingInterface::receivedFirstPacket);
|
||||
connect(audioIO, &AudioClient::disconnected, audioScriptingInterface.data(), &AudioScriptingInterface::disconnected);
|
||||
connect(audioIO, &AudioClient::muteEnvironmentRequested, [](glm::vec3 position, float radius) {
|
||||
auto audioClient = DependencyManager::get<AudioClient>();
|
||||
auto audioScriptingInterface = DependencyManager::get<AudioScriptingInterface>();
|
||||
auto myAvatarPosition = DependencyManager::get<AvatarManager>()->getMyAvatar()->getWorldPosition();
|
||||
|
@ -1697,23 +1703,26 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
userInputMapper->registerDevice(_touchscreenVirtualPadDevice->getInputDevice());
|
||||
}
|
||||
|
||||
// this will force the model the look at the correct directory (weird order of operations issue)
|
||||
scriptEngines->reloadLocalFiles();
|
||||
{
|
||||
auto scriptEngines = DependencyManager::get<ScriptEngines>().data();
|
||||
// this will force the model the look at the correct directory (weird order of operations issue)
|
||||
scriptEngines->reloadLocalFiles();
|
||||
|
||||
// do this as late as possible so that all required subsystems are initialized
|
||||
// If we've overridden the default scripts location, just load default scripts
|
||||
// otherwise, load 'em all
|
||||
// do this as late as possible so that all required subsystems are initialized
|
||||
// If we've overridden the default scripts location, just load default scripts
|
||||
// otherwise, load 'em all
|
||||
|
||||
// we just want to see if --scripts was set, we've already parsed it and done
|
||||
// the change in PathUtils. Rather than pass that in the constructor, lets just
|
||||
// look (this could be debated)
|
||||
QString scriptsSwitch = QString("--").append(SCRIPTS_SWITCH);
|
||||
QDir defaultScriptsLocation(getCmdOption(argc, constArgv, scriptsSwitch.toStdString().c_str()));
|
||||
if (!defaultScriptsLocation.exists()) {
|
||||
scriptEngines->loadDefaultScripts();
|
||||
scriptEngines->defaultScriptsLocationOverridden(true);
|
||||
} else {
|
||||
scriptEngines->loadScripts();
|
||||
// we just want to see if --scripts was set, we've already parsed it and done
|
||||
// the change in PathUtils. Rather than pass that in the constructor, lets just
|
||||
// look (this could be debated)
|
||||
QString scriptsSwitch = QString("--").append(SCRIPTS_SWITCH);
|
||||
QDir defaultScriptsLocation(getCmdOption(argc, constArgv, scriptsSwitch.toStdString().c_str()));
|
||||
if (!defaultScriptsLocation.exists()) {
|
||||
scriptEngines->loadDefaultScripts();
|
||||
scriptEngines->defaultScriptsLocationOverridden(true);
|
||||
} else {
|
||||
scriptEngines->loadScripts();
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure we don't time out during slow operations at startup
|
||||
|
@ -1763,13 +1772,16 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
cameraMenuChanged();
|
||||
}
|
||||
|
||||
// set the local loopback interface for local sounds
|
||||
AudioInjector::setLocalAudioInterface(audioIO.data());
|
||||
auto audioScriptingInterface = DependencyManager::get<AudioScriptingInterface>();
|
||||
audioScriptingInterface->setLocalAudioInterface(audioIO.data());
|
||||
connect(audioIO.data(), &AudioClient::noiseGateOpened, audioScriptingInterface.data(), &AudioScriptingInterface::noiseGateOpened);
|
||||
connect(audioIO.data(), &AudioClient::noiseGateClosed, audioScriptingInterface.data(), &AudioScriptingInterface::noiseGateClosed);
|
||||
connect(audioIO.data(), &AudioClient::inputReceived, audioScriptingInterface.data(), &AudioScriptingInterface::inputReceived);
|
||||
{
|
||||
auto audioIO = DependencyManager::get<AudioClient>().data();
|
||||
// set the local loopback interface for local sounds
|
||||
AudioInjector::setLocalAudioInterface(audioIO);
|
||||
auto audioScriptingInterface = DependencyManager::get<AudioScriptingInterface>();
|
||||
audioScriptingInterface->setLocalAudioInterface(audioIO);
|
||||
connect(audioIO, &AudioClient::noiseGateOpened, audioScriptingInterface.data(), &AudioScriptingInterface::noiseGateOpened);
|
||||
connect(audioIO, &AudioClient::noiseGateClosed, audioScriptingInterface.data(), &AudioScriptingInterface::noiseGateClosed);
|
||||
connect(audioIO, &AudioClient::inputReceived, audioScriptingInterface.data(), &AudioScriptingInterface::inputReceived);
|
||||
}
|
||||
|
||||
this->installEventFilter(this);
|
||||
|
||||
|
@ -1826,13 +1838,13 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
}
|
||||
});
|
||||
|
||||
connect(entityScriptingInterface.data(), &EntityScriptingInterface::deletingEntity, [=](const EntityItemID& entityItemID) {
|
||||
connect(entityScriptingInterface.data(), &EntityScriptingInterface::deletingEntity, [this](const EntityItemID& entityItemID) {
|
||||
if (entityItemID == _keyboardFocusedEntity.get()) {
|
||||
setKeyboardFocusEntity(UNKNOWN_ENTITY_ID);
|
||||
}
|
||||
});
|
||||
|
||||
connect(getEntities()->getTree().get(), &EntityTree::deletingEntity, [=](const EntityItemID& entityItemID) {
|
||||
connect(getEntities()->getTree().get(), &EntityTree::deletingEntity, [](const EntityItemID& entityItemID) {
|
||||
auto avatarManager = DependencyManager::get<AvatarManager>();
|
||||
auto myAvatar = avatarManager ? avatarManager->getMyAvatar() : nullptr;
|
||||
if (myAvatar) {
|
||||
|
@ -1840,7 +1852,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
}
|
||||
});
|
||||
|
||||
EntityTree::setAddMaterialToEntityOperator([&](const QUuid& entityID, graphics::MaterialLayer material, const std::string& parentMaterialName) {
|
||||
EntityTree::setAddMaterialToEntityOperator([this](const QUuid& entityID, graphics::MaterialLayer material, const std::string& parentMaterialName) {
|
||||
// try to find the renderable
|
||||
auto renderable = getEntities()->renderableForEntityId(entityID);
|
||||
if (renderable) {
|
||||
|
@ -1855,7 +1867,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
}
|
||||
return false;
|
||||
});
|
||||
EntityTree::setRemoveMaterialFromEntityOperator([&](const QUuid& entityID, graphics::MaterialPointer material, const std::string& parentMaterialName) {
|
||||
EntityTree::setRemoveMaterialFromEntityOperator([this](const QUuid& entityID, graphics::MaterialPointer material, const std::string& parentMaterialName) {
|
||||
// try to find the renderable
|
||||
auto renderable = getEntities()->renderableForEntityId(entityID);
|
||||
if (renderable) {
|
||||
|
@ -1890,7 +1902,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
return false;
|
||||
});
|
||||
|
||||
EntityTree::setAddMaterialToOverlayOperator([&](const QUuid& overlayID, graphics::MaterialLayer material, const std::string& parentMaterialName) {
|
||||
EntityTree::setAddMaterialToOverlayOperator([this](const QUuid& overlayID, graphics::MaterialLayer material, const std::string& parentMaterialName) {
|
||||
auto overlay = _overlays.getOverlay(overlayID);
|
||||
if (overlay) {
|
||||
overlay->addMaterial(material, parentMaterialName);
|
||||
|
@ -1898,7 +1910,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
}
|
||||
return false;
|
||||
});
|
||||
EntityTree::setRemoveMaterialFromOverlayOperator([&](const QUuid& overlayID, graphics::MaterialPointer material, const std::string& parentMaterialName) {
|
||||
EntityTree::setRemoveMaterialFromOverlayOperator([this](const QUuid& overlayID, graphics::MaterialPointer material, const std::string& parentMaterialName) {
|
||||
auto overlay = _overlays.getOverlay(overlayID);
|
||||
if (overlay) {
|
||||
overlay->removeMaterial(material, parentMaterialName);
|
||||
|
@ -1909,13 +1921,13 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
|
||||
// Keyboard focus handling for Web overlays.
|
||||
auto overlays = &(qApp->getOverlays());
|
||||
connect(overlays, &Overlays::overlayDeleted, [=](const OverlayID& overlayID) {
|
||||
connect(overlays, &Overlays::overlayDeleted, [this](const OverlayID& overlayID) {
|
||||
if (overlayID == _keyboardFocusedOverlay.get()) {
|
||||
setKeyboardFocusOverlay(UNKNOWN_OVERLAY_ID);
|
||||
}
|
||||
});
|
||||
|
||||
connect(this, &Application::aboutToQuit, [=]() {
|
||||
connect(this, &Application::aboutToQuit, [this]() {
|
||||
setKeyboardFocusOverlay(UNKNOWN_OVERLAY_ID);
|
||||
setKeyboardFocusEntity(UNKNOWN_ENTITY_ID);
|
||||
});
|
||||
|
@ -2184,23 +2196,22 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
QVariant testProperty = property(hifi::properties::TEST);
|
||||
qDebug() << testProperty;
|
||||
if (testProperty.isValid()) {
|
||||
auto scriptEngines = DependencyManager::get<ScriptEngines>();
|
||||
const auto testScript = property(hifi::properties::TEST).toUrl();
|
||||
|
||||
// Set last parameter to exit interface when the test script finishes, if so requested
|
||||
scriptEngines->loadScript(testScript, false, false, false, false, quitWhenFinished);
|
||||
DependencyManager::get<ScriptEngines>()->loadScript(testScript, false, false, false, false, quitWhenFinished);
|
||||
|
||||
// This is done so we don't get a "connection time-out" message when we haven't passed in a URL.
|
||||
if (arguments().contains("--url")) {
|
||||
auto reply = SandboxUtils::getStatus();
|
||||
connect(reply, &QNetworkReply::finished, this, [=] {
|
||||
connect(reply, &QNetworkReply::finished, this, [this, reply] {
|
||||
handleSandboxStatus(reply);
|
||||
});
|
||||
}
|
||||
} else {
|
||||
PROFILE_RANGE(render, "GetSandboxStatus");
|
||||
auto reply = SandboxUtils::getStatus();
|
||||
connect(reply, &QNetworkReply::finished, this, [=] {
|
||||
connect(reply, &QNetworkReply::finished, this, [this, reply] {
|
||||
handleSandboxStatus(reply);
|
||||
});
|
||||
}
|
||||
|
@ -2227,8 +2238,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
|
||||
connect(&_myCamera, &Camera::modeUpdated, this, &Application::cameraModeChanged);
|
||||
|
||||
DependencyManager::get<PickManager>()->setShouldPickHUDOperator([&]() { return DependencyManager::get<HMDScriptingInterface>()->isHMDMode(); });
|
||||
DependencyManager::get<PickManager>()->setCalculatePos2DFromHUDOperator([&](const glm::vec3& intersection) {
|
||||
DependencyManager::get<PickManager>()->setShouldPickHUDOperator([]() { return DependencyManager::get<HMDScriptingInterface>()->isHMDMode(); });
|
||||
DependencyManager::get<PickManager>()->setCalculatePos2DFromHUDOperator([this](const glm::vec3& intersection) {
|
||||
const glm::vec2 MARGIN(25.0f);
|
||||
glm::vec2 maxPos = _controllerScriptingInterface->getViewportDimensions() - MARGIN;
|
||||
glm::vec2 pos2D = DependencyManager::get<HMDScriptingInterface>()->overlayFromWorldPoint(intersection);
|
||||
|
@ -2238,7 +2249,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
// Setup the mouse ray pick and related operators
|
||||
DependencyManager::get<EntityTreeRenderer>()->setMouseRayPickID(DependencyManager::get<PickManager>()->addPick(PickQuery::Ray, std::make_shared<MouseRayPick>(
|
||||
PickFilter(PickScriptingInterface::PICK_ENTITIES() | PickScriptingInterface::PICK_INCLUDE_NONCOLLIDABLE()), 0.0f, true)));
|
||||
DependencyManager::get<EntityTreeRenderer>()->setMouseRayPickResultOperator([&](unsigned int rayPickID) {
|
||||
DependencyManager::get<EntityTreeRenderer>()->setMouseRayPickResultOperator([](unsigned int rayPickID) {
|
||||
RayToEntityIntersectionResult entityResult;
|
||||
entityResult.intersects = false;
|
||||
auto pickResult = DependencyManager::get<PickManager>()->getPrevPickResultTyped<RayPickResult>(rayPickID);
|
||||
|
@ -2254,7 +2265,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
}
|
||||
return entityResult;
|
||||
});
|
||||
DependencyManager::get<EntityTreeRenderer>()->setSetPrecisionPickingOperator([&](unsigned int rayPickID, bool value) {
|
||||
DependencyManager::get<EntityTreeRenderer>()->setSetPrecisionPickingOperator([](unsigned int rayPickID, bool value) {
|
||||
DependencyManager::get<PickManager>()->setPrecisionPicking(rayPickID, value);
|
||||
});
|
||||
|
||||
|
@ -2453,21 +2464,35 @@ void Application::cleanupBeforeQuit() {
|
|||
_keyboardFocusHighlight = nullptr;
|
||||
}
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
{
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
// send the domain a disconnect packet, force stoppage of domain-server check-ins
|
||||
nodeList->getDomainHandler().disconnect();
|
||||
nodeList->setIsShuttingDown(true);
|
||||
// send the domain a disconnect packet, force stoppage of domain-server check-ins
|
||||
nodeList->getDomainHandler().disconnect();
|
||||
nodeList->setIsShuttingDown(true);
|
||||
|
||||
// tell the packet receiver we're shutting down, so it can drop packets
|
||||
nodeList->getPacketReceiver().setShouldDropPackets(true);
|
||||
// tell the packet receiver we're shutting down, so it can drop packets
|
||||
nodeList->getPacketReceiver().setShouldDropPackets(true);
|
||||
}
|
||||
|
||||
getEntities()->shutdown(); // tell the entities system we're shutting down, so it will stop running scripts
|
||||
|
||||
// Clear any queued processing (I/O, FBX/OBJ/Texture parsing)
|
||||
QThreadPool::globalInstance()->clear();
|
||||
|
||||
DependencyManager::destroy<RecordingScriptingInterface>();
|
||||
|
||||
// FIXME: Something is still holding on to the ScriptEnginePointers contained in ScriptEngines, and they hold backpointers to ScriptEngines,
|
||||
// so this doesn't shut down properly
|
||||
DependencyManager::get<ScriptEngines>()->shutdownScripting(); // stop all currently running global scripts
|
||||
// These classes hold ScriptEnginePointers, so they must be destroyed before ScriptEngines
|
||||
// Must be done after shutdownScripting in case any scripts try to access these things
|
||||
{
|
||||
DependencyManager::destroy<StandAloneJSConsole>();
|
||||
EntityTreePointer tree = getEntities()->getTree();
|
||||
tree->setSimulation(nullptr);
|
||||
DependencyManager::destroy<EntityTreeRenderer>();
|
||||
}
|
||||
DependencyManager::destroy<ScriptEngines>();
|
||||
|
||||
_displayPlugin.reset();
|
||||
|
@ -2504,6 +2529,8 @@ void Application::cleanupBeforeQuit() {
|
|||
DependencyManager::destroy<EyeTracker>();
|
||||
#endif
|
||||
|
||||
DependencyManager::destroy<ContextOverlayInterface>(); // Must be destroyed before TabletScriptingInterface
|
||||
|
||||
// stop QML
|
||||
DependencyManager::destroy<TabletScriptingInterface>();
|
||||
DependencyManager::destroy<ToolbarScriptingInterface>();
|
||||
|
@ -2515,10 +2542,6 @@ void Application::cleanupBeforeQuit() {
|
|||
_snapshotSoundInjector->stop();
|
||||
}
|
||||
|
||||
// FIXME: something else is holding a reference to AudioClient,
|
||||
// so it must be explicitly synchronously stopped here
|
||||
DependencyManager::get<AudioClient>()->cleanupBeforeQuit();
|
||||
|
||||
// destroy Audio so it and its threads have a chance to go down safely
|
||||
// this must happen after QML, as there are unexplained audio crashes originating in qtwebengine
|
||||
DependencyManager::destroy<AudioClient>();
|
||||
|
@ -2558,9 +2581,6 @@ Application::~Application() {
|
|||
_entityClipboard->eraseAllOctreeElements();
|
||||
_entityClipboard.reset();
|
||||
|
||||
EntityTreePointer tree = getEntities()->getTree();
|
||||
tree->setSimulation(nullptr);
|
||||
|
||||
_octreeProcessor.terminate();
|
||||
_entityEditSender.terminate();
|
||||
|
||||
|
@ -2924,6 +2944,15 @@ void Application::initializeUi() {
|
|||
|
||||
// Pre-create a couple of Web3D overlays to speed up tablet UI
|
||||
auto offscreenSurfaceCache = DependencyManager::get<OffscreenQmlSurfaceCache>();
|
||||
offscreenSurfaceCache->setOnRootContextCreated([&](const QString& rootObject, QQmlContext* surfaceContext) {
|
||||
if (rootObject == TabletScriptingInterface::QML) {
|
||||
// in Qt 5.10.0 there is already an "Audio" object in the QML context
|
||||
// though I failed to find it (from QtMultimedia??). So.. let it be "AudioScriptingInterface"
|
||||
surfaceContext->setContextProperty("AudioScriptingInterface", DependencyManager::get<AudioScriptingInterface>().data());
|
||||
surfaceContext->setContextProperty("Account", AccountServicesScriptingInterface::getInstance()); // DEPRECATED - TO BE REMOVED
|
||||
}
|
||||
});
|
||||
|
||||
offscreenSurfaceCache->reserve(TabletScriptingInterface::QML, 1);
|
||||
offscreenSurfaceCache->reserve(Web3DOverlay::QML, 2);
|
||||
|
||||
|
@ -3265,8 +3294,7 @@ void Application::showHelp() {
|
|||
QUrlQuery queryString;
|
||||
queryString.addQueryItem("handControllerName", handControllerName);
|
||||
queryString.addQueryItem("defaultTab", defaultTab);
|
||||
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
|
||||
TabletProxy* tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet(SYSTEM_TABLET));
|
||||
TabletProxy* tablet = dynamic_cast<TabletProxy*>(DependencyManager::get<TabletScriptingInterface>()->getTablet(SYSTEM_TABLET));
|
||||
tablet->gotoWebScreen(PathUtils::resourcesUrl() + INFO_HELP_PATH + "?" + queryString.toString());
|
||||
DependencyManager::get<HMDScriptingInterface>()->openTablet();
|
||||
//InfoView::show(INFO_HELP_PATH, false, queryString.toString());
|
||||
|
@ -3579,6 +3607,10 @@ static void dumpEventQueue(QThread* thread) {
|
|||
|
||||
bool Application::event(QEvent* event) {
|
||||
|
||||
if (_aboutToQuit) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!Menu::getInstance()) {
|
||||
return false;
|
||||
}
|
||||
|
@ -3696,7 +3728,10 @@ static bool _altPressed{ false };
|
|||
|
||||
void Application::keyPressEvent(QKeyEvent* event) {
|
||||
_altPressed = event->key() == Qt::Key_Alt;
|
||||
_keysPressed.insert(event->key());
|
||||
|
||||
if (!event->isAutoRepeat()) {
|
||||
_keysPressed.insert(event->key(), *event);
|
||||
}
|
||||
|
||||
_controllerScriptingInterface->emitKeyPressEvent(event); // send events to any registered scripts
|
||||
// if one of our scripts have asked to capture this event, then stop processing it
|
||||
|
@ -3907,7 +3942,9 @@ void Application::keyPressEvent(QKeyEvent* event) {
|
|||
}
|
||||
|
||||
void Application::keyReleaseEvent(QKeyEvent* event) {
|
||||
_keysPressed.remove(event->key());
|
||||
if (!event->isAutoRepeat()) {
|
||||
_keysPressed.remove(event->key());
|
||||
}
|
||||
|
||||
#if defined(Q_OS_ANDROID)
|
||||
if (event->key() == Qt::Key_Back) {
|
||||
|
@ -3943,11 +3980,14 @@ void Application::focusOutEvent(QFocusEvent* event) {
|
|||
#endif
|
||||
|
||||
// synthesize events for keys currently pressed, since we may not get their release events
|
||||
foreach (int key, _keysPressed) {
|
||||
QKeyEvent keyEvent(QEvent::KeyRelease, key, Qt::NoModifier);
|
||||
keyReleaseEvent(&keyEvent);
|
||||
// Because our key event handlers may manipulate _keysPressed, lets swap the keys pressed into a local copy,
|
||||
// clearing the existing list.
|
||||
QHash<int, QKeyEvent> keysPressed;
|
||||
std::swap(keysPressed, _keysPressed);
|
||||
for (auto& ev : keysPressed) {
|
||||
QKeyEvent synthesizedEvent { QKeyEvent::KeyRelease, ev.key(), Qt::NoModifier, ev.text() };
|
||||
keyReleaseEvent(&synthesizedEvent);
|
||||
}
|
||||
_keysPressed.clear();
|
||||
}
|
||||
|
||||
void Application::maybeToggleMenuVisible(QMouseEvent* event) const {
|
||||
|
@ -3975,10 +4015,6 @@ void Application::maybeToggleMenuVisible(QMouseEvent* event) const {
|
|||
void Application::mouseMoveEvent(QMouseEvent* event) {
|
||||
PROFILE_RANGE(app_input_mouse, __FUNCTION__);
|
||||
|
||||
if (_aboutToQuit) {
|
||||
return;
|
||||
}
|
||||
|
||||
maybeToggleMenuVisible(event);
|
||||
|
||||
auto& compositor = getApplicationCompositor();
|
||||
|
@ -4043,11 +4079,9 @@ void Application::mousePressEvent(QMouseEvent* event) {
|
|||
event->screenPos(), event->button(),
|
||||
event->buttons(), event->modifiers());
|
||||
|
||||
if (!_aboutToQuit) {
|
||||
getOverlays().mousePressEvent(&mappedEvent);
|
||||
if (!_controllerScriptingInterface->areEntityClicksCaptured()) {
|
||||
getEntities()->mousePressEvent(&mappedEvent);
|
||||
}
|
||||
getOverlays().mousePressEvent(&mappedEvent);
|
||||
if (!_controllerScriptingInterface->areEntityClicksCaptured()) {
|
||||
getEntities()->mousePressEvent(&mappedEvent);
|
||||
}
|
||||
|
||||
_controllerScriptingInterface->emitMousePressEvent(&mappedEvent); // send events to any registered scripts
|
||||
|
@ -4084,14 +4118,11 @@ void Application::mouseDoublePressEvent(QMouseEvent* event) {
|
|||
event->screenPos(), event->button(),
|
||||
event->buttons(), event->modifiers());
|
||||
|
||||
if (!_aboutToQuit) {
|
||||
getOverlays().mouseDoublePressEvent(&mappedEvent);
|
||||
if (!_controllerScriptingInterface->areEntityClicksCaptured()) {
|
||||
getEntities()->mouseDoublePressEvent(&mappedEvent);
|
||||
}
|
||||
getOverlays().mouseDoublePressEvent(&mappedEvent);
|
||||
if (!_controllerScriptingInterface->areEntityClicksCaptured()) {
|
||||
getEntities()->mouseDoublePressEvent(&mappedEvent);
|
||||
}
|
||||
|
||||
|
||||
// if one of our scripts have asked to capture this event, then stop processing it
|
||||
if (_controllerScriptingInterface->isMouseCaptured()) {
|
||||
return;
|
||||
|
@ -4110,10 +4141,8 @@ void Application::mouseReleaseEvent(QMouseEvent* event) {
|
|||
event->screenPos(), event->button(),
|
||||
event->buttons(), event->modifiers());
|
||||
|
||||
if (!_aboutToQuit) {
|
||||
getOverlays().mouseReleaseEvent(&mappedEvent);
|
||||
getEntities()->mouseReleaseEvent(&mappedEvent);
|
||||
}
|
||||
getOverlays().mouseReleaseEvent(&mappedEvent);
|
||||
getEntities()->mouseReleaseEvent(&mappedEvent);
|
||||
|
||||
_controllerScriptingInterface->emitMouseReleaseEvent(&mappedEvent); // send events to any registered scripts
|
||||
|
||||
|
@ -4264,7 +4293,6 @@ bool Application::shouldPaint() const {
|
|||
return false;
|
||||
}
|
||||
|
||||
|
||||
auto displayPlugin = getActiveDisplayPlugin();
|
||||
|
||||
#ifdef DEBUG_PAINT_DELAY
|
||||
|
@ -4744,7 +4772,7 @@ bool Application::exportEntities(const QString& filename,
|
|||
exportTree->createRootElement();
|
||||
glm::vec3 root(TREE_SCALE, TREE_SCALE, TREE_SCALE);
|
||||
bool success = true;
|
||||
entityTree->withReadLock([&] {
|
||||
entityTree->withReadLock([entityIDs, entityTree, givenOffset, myAvatarID, &root, &entities, &success, &exportTree] {
|
||||
for (auto entityID : entityIDs) { // Gather entities and properties.
|
||||
auto entityItem = entityTree->findEntityByEntityItemID(entityID);
|
||||
if (!entityItem) {
|
||||
|
@ -5493,6 +5521,10 @@ static bool domainLoadingInProgress = false;
|
|||
void Application::update(float deltaTime) {
|
||||
PROFILE_RANGE_EX(app, __FUNCTION__, 0xffff0000, (uint64_t)_renderFrameCount + 1);
|
||||
|
||||
if (_aboutToQuit) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!_physicsEnabled) {
|
||||
if (!domainLoadingInProgress) {
|
||||
PROFILE_ASYNC_BEGIN(app, "Scene Loading", "");
|
||||
|
@ -5761,15 +5793,13 @@ void Application::update(float deltaTime) {
|
|||
_entitySimulation->handleDeactivatedMotionStates(deactivations);
|
||||
});
|
||||
|
||||
if (!_aboutToQuit) {
|
||||
// handleCollisionEvents() AFTER handleChangedMotionStates()
|
||||
{
|
||||
PROFILE_RANGE(simulation_physics, "CollisionEvents");
|
||||
avatarManager->handleCollisionEvents(collisionEvents);
|
||||
// Collision events (and their scripts) must not be handled when we're locked, above. (That would risk
|
||||
// deadlock.)
|
||||
_entitySimulation->handleCollisionEvents(collisionEvents);
|
||||
}
|
||||
// handleCollisionEvents() AFTER handleChangedMotionStates()
|
||||
{
|
||||
PROFILE_RANGE(simulation_physics, "CollisionEvents");
|
||||
avatarManager->handleCollisionEvents(collisionEvents);
|
||||
// Collision events (and their scripts) must not be handled when we're locked, above. (That would risk
|
||||
// deadlock.)
|
||||
_entitySimulation->handleCollisionEvents(collisionEvents);
|
||||
}
|
||||
|
||||
{
|
||||
|
@ -5787,11 +5817,9 @@ void Application::update(float deltaTime) {
|
|||
}
|
||||
auto t4 = std::chrono::high_resolution_clock::now();
|
||||
|
||||
if (!_aboutToQuit) {
|
||||
// NOTE: the getEntities()->update() call below will wait for lock
|
||||
// and will provide non-physical entity motion
|
||||
getEntities()->update(true); // update the models...
|
||||
}
|
||||
// NOTE: the getEntities()->update() call below will wait for lock
|
||||
// and will provide non-physical entity motion
|
||||
getEntities()->update(true); // update the models...
|
||||
|
||||
auto t5 = std::chrono::high_resolution_clock::now();
|
||||
|
||||
|
@ -6357,7 +6385,6 @@ void Application::domainURLChanged(QUrl domainURL) {
|
|||
void Application::resettingDomain() {
|
||||
_notifiedPacketVersionMismatchThisDomain = false;
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
clearDomainOctreeDetails();
|
||||
}
|
||||
|
||||
|
@ -6708,19 +6735,16 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEnginePointe
|
|||
|
||||
registerInteractiveWindowMetaType(scriptEngine.data());
|
||||
|
||||
DependencyManager::get<PickScriptingInterface>()->registerMetaTypes(scriptEngine.data());
|
||||
auto pickScriptingInterface = DependencyManager::get<PickScriptingInterface>();
|
||||
pickScriptingInterface->registerMetaTypes(scriptEngine.data());
|
||||
|
||||
// connect this script engines printedMessage signal to the global ScriptEngines these various messages
|
||||
connect(scriptEngine.data(), &ScriptEngine::printedMessage,
|
||||
DependencyManager::get<ScriptEngines>().data(), &ScriptEngines::onPrintedMessage);
|
||||
connect(scriptEngine.data(), &ScriptEngine::errorMessage,
|
||||
DependencyManager::get<ScriptEngines>().data(), &ScriptEngines::onErrorMessage);
|
||||
connect(scriptEngine.data(), &ScriptEngine::warningMessage,
|
||||
DependencyManager::get<ScriptEngines>().data(), &ScriptEngines::onWarningMessage);
|
||||
connect(scriptEngine.data(), &ScriptEngine::infoMessage,
|
||||
DependencyManager::get<ScriptEngines>().data(), &ScriptEngines::onInfoMessage);
|
||||
connect(scriptEngine.data(), &ScriptEngine::clearDebugWindow,
|
||||
DependencyManager::get<ScriptEngines>().data(), &ScriptEngines::onClearDebugWindow);
|
||||
auto scriptEngines = DependencyManager::get<ScriptEngines>().data();
|
||||
connect(scriptEngine.data(), &ScriptEngine::printedMessage, scriptEngines, &ScriptEngines::onPrintedMessage);
|
||||
connect(scriptEngine.data(), &ScriptEngine::errorMessage, scriptEngines, &ScriptEngines::onErrorMessage);
|
||||
connect(scriptEngine.data(), &ScriptEngine::warningMessage, scriptEngines, &ScriptEngines::onWarningMessage);
|
||||
connect(scriptEngine.data(), &ScriptEngine::infoMessage, scriptEngines, &ScriptEngines::onInfoMessage);
|
||||
connect(scriptEngine.data(), &ScriptEngine::clearDebugWindow, scriptEngines, &ScriptEngines::onClearDebugWindow);
|
||||
|
||||
}
|
||||
|
||||
|
@ -7028,10 +7052,9 @@ void Application::showDialog(const QUrl& widgetUrl, const QUrl& tabletUrl, const
|
|||
}
|
||||
|
||||
void Application::showScriptLogs() {
|
||||
auto scriptEngines = DependencyManager::get<ScriptEngines>();
|
||||
QUrl defaultScriptsLoc = PathUtils::defaultScriptsLocation();
|
||||
defaultScriptsLoc.setPath(defaultScriptsLoc.path() + "developer/debugging/debugWindow.js");
|
||||
scriptEngines->loadScript(defaultScriptsLoc.toString());
|
||||
DependencyManager::get<ScriptEngines>()->loadScript(defaultScriptsLoc.toString());
|
||||
}
|
||||
|
||||
void Application::showAssetServerWidget(QString filePath) {
|
||||
|
@ -7284,6 +7307,8 @@ void Application::addAssetToWorldAddEntity(QString filePath, QString mapping) {
|
|||
}
|
||||
properties.setCollisionless(true); // Temporarily set so that doesn't collide with avatar.
|
||||
properties.setVisible(false); // Temporarily set so that don't see at large unresized dimensions.
|
||||
bool grabbable = (Menu::getInstance()->isOptionChecked(MenuOption::CreateEntitiesGrabbable));
|
||||
properties.setUserData(grabbable ? GRABBABLE_USER_DATA : NOT_GRABBABLE_USER_DATA);
|
||||
glm::vec3 positionOffset = getMyAvatar()->getWorldOrientation() * (getMyAvatar()->getSensorToWorldScale() * glm::vec3(0.0f, 0.0f, -2.0f));
|
||||
properties.setPosition(getMyAvatar()->getWorldPosition() + positionOffset);
|
||||
properties.setRotation(getMyAvatar()->getWorldOrientation());
|
||||
|
@ -7326,7 +7351,6 @@ void Application::addAssetToWorldCheckModelSize() {
|
|||
auto name = properties.getName();
|
||||
auto dimensions = properties.getDimensions();
|
||||
|
||||
const QString GRABBABLE_USER_DATA = "{\"grabbableKey\":{\"grabbable\":true}}";
|
||||
bool doResize = false;
|
||||
|
||||
const glm::vec3 DEFAULT_DIMENSIONS = glm::vec3(0.1f, 0.1f, 0.1f);
|
||||
|
@ -7370,7 +7394,8 @@ void Application::addAssetToWorldCheckModelSize() {
|
|||
if (!name.toLower().endsWith(PNG_EXTENSION) && !name.toLower().endsWith(JPG_EXTENSION)) {
|
||||
properties.setCollisionless(false);
|
||||
}
|
||||
properties.setUserData(GRABBABLE_USER_DATA);
|
||||
bool grabbable = (Menu::getInstance()->isOptionChecked(MenuOption::CreateEntitiesGrabbable));
|
||||
properties.setUserData(grabbable ? GRABBABLE_USER_DATA : NOT_GRABBABLE_USER_DATA);
|
||||
properties.setLastEdited(usecTimestampNow());
|
||||
entityScriptingInterface->editEntity(entityID, properties);
|
||||
}
|
||||
|
@ -7595,7 +7620,6 @@ void Application::openUrl(const QUrl& url) const {
|
|||
}
|
||||
|
||||
void Application::loadDialog() {
|
||||
auto scriptEngines = DependencyManager::get<ScriptEngines>();
|
||||
ModalDialogListener* dlg = OffscreenUi::getOpenFileNameAsync(_glWidget, tr("Open Script"),
|
||||
getPreviousScriptLocation(),
|
||||
tr("JavaScript Files (*.js)"));
|
||||
|
|
|
@ -621,7 +621,7 @@ private:
|
|||
float _mirrorYawOffset;
|
||||
float _raiseMirror;
|
||||
|
||||
QSet<int> _keysPressed;
|
||||
QHash<int, QKeyEvent> _keysPressed;
|
||||
|
||||
bool _enableProcessOctreeThread;
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ void addAvatarEntities(const QVariantList& avatarEntities) {
|
|||
|
||||
EntityItemID id = EntityItemID(QUuid::createUuid());
|
||||
bool success = true;
|
||||
entityTree->withWriteLock([&] {
|
||||
entityTree->withWriteLock([&entityTree, id, &entityProperties, &success] {
|
||||
EntityItemPointer entity = entityTree->addEntity(id, entityProperties);
|
||||
if (entity) {
|
||||
if (entityProperties.queryAACubeRelatedPropertyChanged()) {
|
||||
|
@ -171,6 +171,13 @@ void AvatarBookmarks::loadBookmark(const QString& bookmarkName) {
|
|||
|
||||
if (bookmarkEntry != _bookmarks.end()) {
|
||||
QVariantMap bookmark = bookmarkEntry.value().toMap();
|
||||
if (bookmark.empty()) { // compatibility with bookmarks like this: "Wooden Doll": "http://mpassets.highfidelity.com/7fe80a1e-f445-4800-9e89-40e677b03bee-v1/mannequin.fst?noDownload=false",
|
||||
auto avatarUrl = bookmarkEntry.value().toString();
|
||||
if (!avatarUrl.isEmpty()) {
|
||||
bookmark.insert(ENTRY_AVATAR_URL, avatarUrl);
|
||||
}
|
||||
}
|
||||
|
||||
if (!bookmark.empty()) {
|
||||
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
auto treeRenderer = DependencyManager::get<EntityTreeRenderer>();
|
||||
|
|
|
@ -147,9 +147,11 @@ Menu::Menu() {
|
|||
auto assetServerAction = addActionToQMenuAndActionHash(editMenu, MenuOption::AssetServer,
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_A,
|
||||
qApp, SLOT(showAssetServerWidget()));
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
QObject::connect(nodeList.data(), &NodeList::canWriteAssetsChanged, assetServerAction, &QAction::setEnabled);
|
||||
assetServerAction->setEnabled(nodeList->getThisNodeCanWriteAssets());
|
||||
{
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
QObject::connect(nodeList.data(), &NodeList::canWriteAssetsChanged, assetServerAction, &QAction::setEnabled);
|
||||
assetServerAction->setEnabled(nodeList->getThisNodeCanWriteAssets());
|
||||
}
|
||||
|
||||
// Edit > Package Model as .fst...
|
||||
addActionToQMenuAndActionHash(editMenu, MenuOption::PackageModel, 0,
|
||||
|
@ -620,8 +622,11 @@ Menu::Menu() {
|
|||
addCheckableActionToQMenuAndActionHash(networkMenu, MenuOption::SendWrongProtocolVersion, 0, false,
|
||||
qApp, SLOT(sendWrongProtocolVersionsSignature(bool)));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(networkMenu, MenuOption::SendWrongDSConnectVersion, 0, false,
|
||||
nodeList.data(), SLOT(toggleSendNewerDSConnectVersion(bool)));
|
||||
{
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
addCheckableActionToQMenuAndActionHash(networkMenu, MenuOption::SendWrongDSConnectVersion, 0, false,
|
||||
nodeList.data(), SLOT(toggleSendNewerDSConnectVersion(bool)));
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
|
@ -655,10 +660,9 @@ Menu::Menu() {
|
|||
|
||||
action = addActionToQMenuAndActionHash(audioDebugMenu, "Stats...");
|
||||
connect(action, &QAction::triggered, [] {
|
||||
auto scriptEngines = DependencyManager::get<ScriptEngines>();
|
||||
QUrl defaultScriptsLoc = PathUtils::defaultScriptsLocation();
|
||||
defaultScriptsLoc.setPath(defaultScriptsLoc.path() + "developer/utilities/audio/stats.js");
|
||||
scriptEngines->loadScript(defaultScriptsLoc.toString());
|
||||
DependencyManager::get<ScriptEngines>()->loadScript(defaultScriptsLoc.toString());
|
||||
});
|
||||
|
||||
action = addActionToQMenuAndActionHash(audioDebugMenu, "Buffers...");
|
||||
|
@ -667,16 +671,14 @@ Menu::Menu() {
|
|||
QString("hifi/tablet/TabletAudioBuffers.qml"), "AudioBuffersDialog");
|
||||
});
|
||||
|
||||
auto audioIO = DependencyManager::get<AudioClient>();
|
||||
addActionToQMenuAndActionHash(audioDebugMenu, MenuOption::MuteEnvironment, 0,
|
||||
audioIO.data(), SLOT(sendMuteEnvironmentPacket()));
|
||||
DependencyManager::get<AudioClient>().data(), SLOT(sendMuteEnvironmentPacket()));
|
||||
|
||||
action = addActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioScope);
|
||||
connect(action, &QAction::triggered, [] {
|
||||
auto scriptEngines = DependencyManager::get<ScriptEngines>();
|
||||
QUrl defaultScriptsLoc = PathUtils::defaultScriptsLocation();
|
||||
defaultScriptsLoc.setPath(defaultScriptsLoc.path() + "developer/utilities/audio/audioScope.js");
|
||||
scriptEngines->loadScript(defaultScriptsLoc.toString());
|
||||
DependencyManager::get<ScriptEngines>()->loadScript(defaultScriptsLoc.toString());
|
||||
});
|
||||
|
||||
// Developer > Physics >>>
|
||||
|
@ -756,10 +758,9 @@ Menu::Menu() {
|
|||
// Developer > API Debugger
|
||||
action = addActionToQMenuAndActionHash(developerMenu, "API Debugger");
|
||||
connect(action, &QAction::triggered, [] {
|
||||
auto scriptEngines = DependencyManager::get<ScriptEngines>();
|
||||
QUrl defaultScriptsLoc = PathUtils::defaultScriptsLocation();
|
||||
defaultScriptsLoc.setPath(defaultScriptsLoc.path() + "developer/utilities/tools/currentAPI.js");
|
||||
scriptEngines->loadScript(defaultScriptsLoc.toString());
|
||||
DependencyManager::get<ScriptEngines>()->loadScript(defaultScriptsLoc.toString());
|
||||
});
|
||||
|
||||
// Developer > Log...
|
||||
|
@ -767,11 +768,14 @@ Menu::Menu() {
|
|||
qApp, SLOT(toggleLogDialog()));
|
||||
auto essLogAction = addActionToQMenuAndActionHash(developerMenu, MenuOption::EntityScriptServerLog, 0,
|
||||
qApp, SLOT(toggleEntityScriptServerLogDialog()));
|
||||
QObject::connect(nodeList.data(), &NodeList::canRezChanged, essLogAction, [essLogAction] {
|
||||
{
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
QObject::connect(nodeList.data(), &NodeList::canRezChanged, essLogAction, [essLogAction] {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
essLogAction->setEnabled(nodeList->getThisNodeCanRez());
|
||||
});
|
||||
essLogAction->setEnabled(nodeList->getThisNodeCanRez());
|
||||
});
|
||||
essLogAction->setEnabled(nodeList->getThisNodeCanRez());
|
||||
}
|
||||
|
||||
addActionToQMenuAndActionHash(developerMenu, "Script Log (HMD friendly)...", Qt::NoButton,
|
||||
qApp, SLOT(showScriptLogs()));
|
||||
|
|
|
@ -76,6 +76,7 @@ namespace MenuOption {
|
|||
const QString CrashOutOfBoundsVectorAccessThreaded = "Out of Bounds Vector Access (threaded)";
|
||||
const QString CrashNewFault = "New Fault";
|
||||
const QString CrashNewFaultThreaded = "New Fault (threaded)";
|
||||
const QString CreateEntitiesGrabbable = "Create Entities As Grabbable (except Zones, Particles, and Lights)";
|
||||
const QString DeadlockInterface = "Deadlock Interface";
|
||||
const QString UnresponsiveInterface = "Unresponsive Interface";
|
||||
const QString DecreaseAvatarSize = "Decrease Avatar Size";
|
||||
|
|
|
@ -79,7 +79,7 @@ AvatarManager::AvatarManager(QObject* parent) :
|
|||
|
||||
// when we hear that the user has ignored an avatar by session UUID
|
||||
// immediately remove that avatar instead of waiting for the absence of packets from avatar mixer
|
||||
connect(nodeList.data(), &NodeList::ignoredNode, this, [=](const QUuid& nodeID, bool enabled) {
|
||||
connect(nodeList.data(), &NodeList::ignoredNode, this, [this](const QUuid& nodeID, bool enabled) {
|
||||
if (enabled) {
|
||||
removeAvatar(nodeID, KillAvatarReason::AvatarIgnored);
|
||||
}
|
||||
|
@ -277,6 +277,9 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
|
|||
sortedAvatars.pop();
|
||||
}
|
||||
|
||||
if (_shouldRender) {
|
||||
qApp->getMain3DScene()->enqueueTransaction(transaction);
|
||||
}
|
||||
_numAvatarsUpdated = numAvatarsUpdated;
|
||||
_numAvatarsNotUpdated = numAVatarsNotUpdated;
|
||||
|
||||
|
@ -318,16 +321,21 @@ void AvatarManager::postUpdate(float deltaTime, const render::ScenePointer& scen
|
|||
|
||||
void AvatarManager::sendIdentityRequest(const QUuid& avatarID) const {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
QWeakPointer<NodeList> nodeListWeak = nodeList;
|
||||
nodeList->eachMatchingNode(
|
||||
[&](const SharedNodePointer& node)->bool {
|
||||
return node->getType() == NodeType::AvatarMixer && node->getActiveSocket();
|
||||
},
|
||||
[&](const SharedNodePointer& node) {
|
||||
auto packet = NLPacket::create(PacketType::AvatarIdentityRequest, NUM_BYTES_RFC4122_UUID, true);
|
||||
packet->write(avatarID.toRfc4122());
|
||||
nodeList->sendPacket(std::move(packet), *node);
|
||||
++_identityRequestsSent;
|
||||
});
|
||||
[](const SharedNodePointer& node)->bool {
|
||||
return node->getType() == NodeType::AvatarMixer && node->getActiveSocket();
|
||||
},
|
||||
[this, avatarID, nodeListWeak](const SharedNodePointer& node) {
|
||||
auto nodeList = nodeListWeak.lock();
|
||||
if (nodeList) {
|
||||
auto packet = NLPacket::create(PacketType::AvatarIdentityRequest, NUM_BYTES_RFC4122_UUID, true);
|
||||
packet->write(avatarID.toRfc4122());
|
||||
nodeList->sendPacket(std::move(packet), *node);
|
||||
++_identityRequestsSent;
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
void AvatarManager::simulateAvatarFades(float deltaTime) {
|
||||
|
|
|
@ -139,6 +139,12 @@ MyAvatar::MyAvatar(QThread* thread) :
|
|||
auto geometry = getSkeletonModel()->getFBXGeometry();
|
||||
qApp->loadAvatarScripts(geometry.scripts);
|
||||
_shouldLoadScripts = false;
|
||||
}
|
||||
// Load and convert old attachments to avatar entities
|
||||
if (_oldAttachmentData.size() > 0) {
|
||||
setAttachmentData(_oldAttachmentData);
|
||||
_oldAttachmentData.clear();
|
||||
_attachmentData.clear();
|
||||
}
|
||||
});
|
||||
connect(_skeletonModel.get(), &Model::rigReady, this, &Avatar::rigReady);
|
||||
|
@ -492,14 +498,14 @@ void MyAvatar::update(float deltaTime) {
|
|||
// Get audio loudness data from audio input device
|
||||
// Also get the AudioClient so we can update the avatar bounding box data
|
||||
// on the AudioClient side.
|
||||
auto audio = DependencyManager::get<AudioClient>();
|
||||
auto audio = DependencyManager::get<AudioClient>().data();
|
||||
setAudioLoudness(audio->getLastInputLoudness());
|
||||
setAudioAverageLoudness(audio->getAudioAverageInputLoudness());
|
||||
|
||||
glm::vec3 halfBoundingBoxDimensions(_characterController.getCapsuleRadius(), _characterController.getCapsuleHalfHeight(), _characterController.getCapsuleRadius());
|
||||
// This might not be right! Isn't the capsule local offset in avatar space? -HRS 5/26/17
|
||||
halfBoundingBoxDimensions += _characterController.getCapsuleLocalOffset();
|
||||
QMetaObject::invokeMethod(audio.data(), "setAvatarBoundingBoxParameters",
|
||||
QMetaObject::invokeMethod(audio, "setAvatarBoundingBoxParameters",
|
||||
Q_ARG(glm::vec3, (getWorldPosition() - halfBoundingBoxDimensions)),
|
||||
Q_ARG(glm::vec3, (halfBoundingBoxDimensions*2.0f)));
|
||||
|
||||
|
@ -1249,7 +1255,6 @@ void MyAvatar::loadData() {
|
|||
|
||||
useFullAvatarURL(_fullAvatarURLFromPreferences, _fullAvatarModelName);
|
||||
|
||||
QVector<AttachmentData> attachmentData;
|
||||
int attachmentCount = settings.beginReadArray("attachmentData");
|
||||
for (int i = 0; i < attachmentCount; i++) {
|
||||
settings.setArrayIndex(i);
|
||||
|
@ -1266,10 +1271,10 @@ void MyAvatar::loadData() {
|
|||
attachment.rotation = glm::quat(eulers);
|
||||
attachment.scale = loadSetting(settings, "scale", 1.0f);
|
||||
attachment.isSoft = settings.value("isSoft").toBool();
|
||||
attachmentData.append(attachment);
|
||||
// old attachments are stored and loaded/converted later when rig is ready
|
||||
_oldAttachmentData.append(attachment);
|
||||
}
|
||||
settings.endArray();
|
||||
setAttachmentData(attachmentData);
|
||||
|
||||
int avatarEntityCount = settings.beginReadArray("avatarEntityData");
|
||||
for (int i = 0; i < avatarEntityCount; i++) {
|
||||
|
@ -1494,50 +1499,126 @@ void MyAvatar::setJointRotations(const QVector<glm::quat>& jointRotations) {
|
|||
}
|
||||
|
||||
void MyAvatar::setJointData(int index, const glm::quat& rotation, const glm::vec3& translation) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "setJointData", Q_ARG(int, index), Q_ARG(const glm::quat&, rotation),
|
||||
Q_ARG(const glm::vec3&, translation));
|
||||
return;
|
||||
switch (index) {
|
||||
case FARGRAB_RIGHTHAND_INDEX: {
|
||||
_farGrabRightMatrixCache.set(createMatFromQuatAndPos(rotation, translation));
|
||||
break;
|
||||
}
|
||||
case FARGRAB_LEFTHAND_INDEX: {
|
||||
_farGrabLeftMatrixCache.set(createMatFromQuatAndPos(rotation, translation));
|
||||
break;
|
||||
}
|
||||
case FARGRAB_MOUSE_INDEX: {
|
||||
_farGrabMouseMatrixCache.set(createMatFromQuatAndPos(rotation, translation));
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "setJointData", Q_ARG(int, index), Q_ARG(const glm::quat&, rotation),
|
||||
Q_ARG(const glm::vec3&, translation));
|
||||
return;
|
||||
}
|
||||
// HACK: ATM only JS scripts call setJointData() on MyAvatar so we hardcode the priority
|
||||
_skeletonModel->getRig().setJointState(index, true, rotation, translation, SCRIPT_PRIORITY);
|
||||
}
|
||||
}
|
||||
// HACK: ATM only JS scripts call setJointData() on MyAvatar so we hardcode the priority
|
||||
_skeletonModel->getRig().setJointState(index, true, rotation, translation, SCRIPT_PRIORITY);
|
||||
}
|
||||
|
||||
void MyAvatar::setJointRotation(int index, const glm::quat& rotation) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "setJointRotation", Q_ARG(int, index), Q_ARG(const glm::quat&, rotation));
|
||||
return;
|
||||
switch (index) {
|
||||
case FARGRAB_RIGHTHAND_INDEX: {
|
||||
glm::mat4 prevMat = _farGrabRightMatrixCache.get();
|
||||
glm::vec3 previousTranslation = extractTranslation(prevMat);
|
||||
_farGrabRightMatrixCache.set(createMatFromQuatAndPos(rotation, previousTranslation));
|
||||
break;
|
||||
}
|
||||
case FARGRAB_LEFTHAND_INDEX: {
|
||||
glm::mat4 prevMat = _farGrabLeftMatrixCache.get();
|
||||
glm::vec3 previousTranslation = extractTranslation(prevMat);
|
||||
_farGrabLeftMatrixCache.set(createMatFromQuatAndPos(rotation, previousTranslation));
|
||||
break;
|
||||
}
|
||||
case FARGRAB_MOUSE_INDEX: {
|
||||
glm::mat4 prevMat = _farGrabMouseMatrixCache.get();
|
||||
glm::vec3 previousTranslation = extractTranslation(prevMat);
|
||||
_farGrabMouseMatrixCache.set(createMatFromQuatAndPos(rotation, previousTranslation));
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "setJointRotation", Q_ARG(int, index), Q_ARG(const glm::quat&, rotation));
|
||||
return;
|
||||
}
|
||||
// HACK: ATM only JS scripts call setJointData() on MyAvatar so we hardcode the priority
|
||||
_skeletonModel->getRig().setJointRotation(index, true, rotation, SCRIPT_PRIORITY);
|
||||
}
|
||||
}
|
||||
// HACK: ATM only JS scripts call setJointData() on MyAvatar so we hardcode the priority
|
||||
_skeletonModel->getRig().setJointRotation(index, true, rotation, SCRIPT_PRIORITY);
|
||||
}
|
||||
|
||||
void MyAvatar::setJointTranslation(int index, const glm::vec3& translation) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "setJointTranslation", Q_ARG(int, index), Q_ARG(const glm::vec3&, translation));
|
||||
return;
|
||||
switch (index) {
|
||||
case FARGRAB_RIGHTHAND_INDEX: {
|
||||
glm::mat4 prevMat = _farGrabRightMatrixCache.get();
|
||||
glm::quat previousRotation = extractRotation(prevMat);
|
||||
_farGrabRightMatrixCache.set(createMatFromQuatAndPos(previousRotation, translation));
|
||||
break;
|
||||
}
|
||||
case FARGRAB_LEFTHAND_INDEX: {
|
||||
glm::mat4 prevMat = _farGrabLeftMatrixCache.get();
|
||||
glm::quat previousRotation = extractRotation(prevMat);
|
||||
_farGrabLeftMatrixCache.set(createMatFromQuatAndPos(previousRotation, translation));
|
||||
break;
|
||||
}
|
||||
case FARGRAB_MOUSE_INDEX: {
|
||||
glm::mat4 prevMat = _farGrabMouseMatrixCache.get();
|
||||
glm::quat previousRotation = extractRotation(prevMat);
|
||||
_farGrabMouseMatrixCache.set(createMatFromQuatAndPos(previousRotation, translation));
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "setJointTranslation",
|
||||
Q_ARG(int, index), Q_ARG(const glm::vec3&, translation));
|
||||
return;
|
||||
}
|
||||
// HACK: ATM only JS scripts call setJointData() on MyAvatar so we hardcode the priority
|
||||
_skeletonModel->getRig().setJointTranslation(index, true, translation, SCRIPT_PRIORITY);
|
||||
}
|
||||
}
|
||||
// HACK: ATM only JS scripts call setJointData() on MyAvatar so we hardcode the priority
|
||||
_skeletonModel->getRig().setJointTranslation(index, true, translation, SCRIPT_PRIORITY);
|
||||
}
|
||||
|
||||
void MyAvatar::clearJointData(int index) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "clearJointData", Q_ARG(int, index));
|
||||
return;
|
||||
switch (index) {
|
||||
case FARGRAB_RIGHTHAND_INDEX: {
|
||||
_farGrabRightMatrixCache.invalidate();
|
||||
break;
|
||||
}
|
||||
case FARGRAB_LEFTHAND_INDEX: {
|
||||
_farGrabLeftMatrixCache.invalidate();
|
||||
break;
|
||||
}
|
||||
case FARGRAB_MOUSE_INDEX: {
|
||||
_farGrabMouseMatrixCache.invalidate();
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "clearJointData", Q_ARG(int, index));
|
||||
return;
|
||||
}
|
||||
_skeletonModel->getRig().clearJointAnimationPriority(index);
|
||||
}
|
||||
}
|
||||
_skeletonModel->getRig().clearJointAnimationPriority(index);
|
||||
}
|
||||
|
||||
void MyAvatar::setJointData(const QString& name, const glm::quat& rotation, const glm::vec3& translation) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "setJointData", Q_ARG(QString, name), Q_ARG(const glm::quat&, rotation),
|
||||
Q_ARG(const glm::vec3&, translation));
|
||||
Q_ARG(const glm::vec3&, translation));
|
||||
return;
|
||||
}
|
||||
writeLockWithNamedJointIndex(name, [&](int index) {
|
||||
// HACK: ATM only JS scripts call setJointData() on MyAvatar so we hardcode the priority
|
||||
_skeletonModel->getRig().setJointState(index, true, rotation, translation, SCRIPT_PRIORITY);
|
||||
setJointData(index, rotation, translation);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1547,8 +1628,7 @@ void MyAvatar::setJointRotation(const QString& name, const glm::quat& rotation)
|
|||
return;
|
||||
}
|
||||
writeLockWithNamedJointIndex(name, [&](int index) {
|
||||
// HACK: ATM only JS scripts call setJointData() on MyAvatar so we hardcode the priority
|
||||
_skeletonModel->getRig().setJointRotation(index, true, rotation, SCRIPT_PRIORITY);
|
||||
setJointRotation(index, rotation);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1558,8 +1638,7 @@ void MyAvatar::setJointTranslation(const QString& name, const glm::vec3& transla
|
|||
return;
|
||||
}
|
||||
writeLockWithNamedJointIndex(name, [&](int index) {
|
||||
// HACK: ATM only JS scripts call setJointData() on MyAvatar so we hardcode the priority
|
||||
_skeletonModel->getRig().setJointTranslation(index, true, translation, SCRIPT_PRIORITY);
|
||||
setJointTranslation(index, translation);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1569,7 +1648,7 @@ void MyAvatar::clearJointData(const QString& name) {
|
|||
return;
|
||||
}
|
||||
writeLockWithNamedJointIndex(name, [&](int index) {
|
||||
_skeletonModel->getRig().clearJointAnimationPriority(index);
|
||||
clearJointData(index);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1578,6 +1657,9 @@ void MyAvatar::clearJointsData() {
|
|||
QMetaObject::invokeMethod(this, "clearJointsData");
|
||||
return;
|
||||
}
|
||||
_farGrabRightMatrixCache.invalidate();
|
||||
_farGrabLeftMatrixCache.invalidate();
|
||||
_farGrabMouseMatrixCache.invalidate();
|
||||
_skeletonModel->getRig().clearJointStates();
|
||||
}
|
||||
|
||||
|
@ -1688,16 +1770,6 @@ void MyAvatar::useFullAvatarURL(const QUrl& fullAvatarURL, const QString& modelN
|
|||
markIdentityDataChanged();
|
||||
}
|
||||
|
||||
void MyAvatar::setAttachmentData(const QVector<AttachmentData>& attachmentData) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
BLOCKING_INVOKE_METHOD(this, "setAttachmentData",
|
||||
Q_ARG(const QVector<AttachmentData>, attachmentData));
|
||||
return;
|
||||
}
|
||||
Avatar::setAttachmentData(attachmentData);
|
||||
emit attachmentsChanged();
|
||||
}
|
||||
|
||||
glm::vec3 MyAvatar::getSkeletonPosition() const {
|
||||
CameraMode mode = qApp->getCamera().getMode();
|
||||
if (mode == CAMERA_MODE_THIRD_PERSON || mode == CAMERA_MODE_INDEPENDENT) {
|
||||
|
@ -1968,20 +2040,164 @@ void MyAvatar::attach(const QString& modelURL, const QString& jointName,
|
|||
float scale, bool isSoft,
|
||||
bool allowDuplicates, bool useSaved) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
Avatar::attach(modelURL, jointName, translation, rotation, scale, isSoft, allowDuplicates, useSaved);
|
||||
BLOCKING_INVOKE_METHOD(this, "attach",
|
||||
Q_ARG(const QString&, modelURL),
|
||||
Q_ARG(const QString&, jointName),
|
||||
Q_ARG(const glm::vec3&, translation),
|
||||
Q_ARG(const glm::quat&, rotation),
|
||||
Q_ARG(float, scale),
|
||||
Q_ARG(bool, isSoft),
|
||||
Q_ARG(bool, allowDuplicates),
|
||||
Q_ARG(bool, useSaved)
|
||||
);
|
||||
return;
|
||||
}
|
||||
if (useSaved) {
|
||||
AttachmentData attachment = loadAttachmentData(modelURL, jointName);
|
||||
if (attachment.isValid()) {
|
||||
Avatar::attach(modelURL, attachment.jointName,
|
||||
attachment.translation, attachment.rotation,
|
||||
attachment.scale, attachment.isSoft,
|
||||
allowDuplicates, useSaved);
|
||||
return;
|
||||
AttachmentData data;
|
||||
data.modelURL = modelURL;
|
||||
data.jointName = jointName;
|
||||
data.translation = translation;
|
||||
data.rotation = rotation;
|
||||
data.scale = scale;
|
||||
data.isSoft = isSoft;
|
||||
EntityItemProperties properties;
|
||||
attachmentDataToEntityProperties(data, properties);
|
||||
DependencyManager::get<EntityScriptingInterface>()->addEntity(properties, true);
|
||||
emit attachmentsChanged();
|
||||
}
|
||||
|
||||
void MyAvatar::detachOne(const QString& modelURL, const QString& jointName) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
BLOCKING_INVOKE_METHOD(this, "detachOne",
|
||||
Q_ARG(const QString&, modelURL),
|
||||
Q_ARG(const QString&, jointName)
|
||||
);
|
||||
return;
|
||||
}
|
||||
QUuid entityID;
|
||||
if (findAvatarEntity(modelURL, jointName, entityID)) {
|
||||
DependencyManager::get<EntityScriptingInterface>()->deleteEntity(entityID);
|
||||
}
|
||||
emit attachmentsChanged();
|
||||
}
|
||||
|
||||
void MyAvatar::detachAll(const QString& modelURL, const QString& jointName) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
BLOCKING_INVOKE_METHOD(this, "detachAll",
|
||||
Q_ARG(const QString&, modelURL),
|
||||
Q_ARG(const QString&, jointName)
|
||||
);
|
||||
return;
|
||||
}
|
||||
QUuid entityID;
|
||||
while (findAvatarEntity(modelURL, jointName, entityID)) {
|
||||
DependencyManager::get<EntityScriptingInterface>()->deleteEntity(entityID);
|
||||
}
|
||||
emit attachmentsChanged();
|
||||
}
|
||||
|
||||
void MyAvatar::setAttachmentData(const QVector<AttachmentData>& attachmentData) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
BLOCKING_INVOKE_METHOD(this, "setAttachmentData",
|
||||
Q_ARG(const QVector<AttachmentData>&, attachmentData));
|
||||
return;
|
||||
}
|
||||
std::vector<EntityItemProperties> newEntitiesProperties;
|
||||
for (auto& data : attachmentData) {
|
||||
QUuid entityID;
|
||||
EntityItemProperties properties;
|
||||
if (findAvatarEntity(data.modelURL.toString(), data.jointName, entityID)) {
|
||||
properties = DependencyManager::get<EntityScriptingInterface>()->getEntityProperties(entityID);
|
||||
}
|
||||
attachmentDataToEntityProperties(data, properties);
|
||||
newEntitiesProperties.push_back(properties);
|
||||
}
|
||||
removeAvatarEntities();
|
||||
for (auto& properties : newEntitiesProperties) {
|
||||
DependencyManager::get<EntityScriptingInterface>()->addEntity(properties, true);
|
||||
}
|
||||
emit attachmentsChanged();
|
||||
}
|
||||
|
||||
QVector<AttachmentData> MyAvatar::getAttachmentData() const {
|
||||
QVector<AttachmentData> avatarData;
|
||||
auto avatarEntities = getAvatarEntityData();
|
||||
AvatarEntityMap::const_iterator dataItr = avatarEntities.begin();
|
||||
while (dataItr != avatarEntities.end()) {
|
||||
QUuid entityID = dataItr.key();
|
||||
auto properties = DependencyManager::get<EntityScriptingInterface>()->getEntityProperties(entityID);
|
||||
AttachmentData data = entityPropertiesToAttachmentData(properties);
|
||||
avatarData.append(data);
|
||||
dataItr++;
|
||||
}
|
||||
return avatarData;
|
||||
}
|
||||
|
||||
QVariantList MyAvatar::getAttachmentsVariant() const {
|
||||
QVariantList result;
|
||||
for (const auto& attachment : getAttachmentData()) {
|
||||
result.append(attachment.toVariant());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
void MyAvatar::setAttachmentsVariant(const QVariantList& variant) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
BLOCKING_INVOKE_METHOD(this, "setAttachmentsVariant",
|
||||
Q_ARG(const QVariantList&, variant));
|
||||
return;
|
||||
}
|
||||
QVector<AttachmentData> newAttachments;
|
||||
newAttachments.reserve(variant.size());
|
||||
for (const auto& attachmentVar : variant) {
|
||||
AttachmentData attachment;
|
||||
if (attachment.fromVariant(attachmentVar)) {
|
||||
newAttachments.append(attachment);
|
||||
}
|
||||
}
|
||||
Avatar::attach(modelURL, jointName, translation, rotation, scale, isSoft, allowDuplicates, useSaved);
|
||||
setAttachmentData(newAttachments);
|
||||
}
|
||||
|
||||
bool MyAvatar::findAvatarEntity(const QString& modelURL, const QString& jointName, QUuid& entityID) {
|
||||
auto avatarEntities = getAvatarEntityData();
|
||||
AvatarEntityMap::const_iterator dataItr = avatarEntities.begin();
|
||||
while (dataItr != avatarEntities.end()) {
|
||||
entityID = dataItr.key();
|
||||
auto props = DependencyManager::get<EntityScriptingInterface>()->getEntityProperties(entityID);
|
||||
if (props.getModelURL() == modelURL &&
|
||||
(jointName.isEmpty() || props.getParentJointIndex() == getJointIndex(jointName))) {
|
||||
return true;
|
||||
}
|
||||
dataItr++;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
AttachmentData MyAvatar::entityPropertiesToAttachmentData(const EntityItemProperties& properties) const {
|
||||
AttachmentData data;
|
||||
data.modelURL = properties.getModelURL();
|
||||
data.translation = properties.getLocalPosition();
|
||||
data.rotation = properties.getLocalRotation();
|
||||
data.isSoft = properties.getRelayParentJoints();
|
||||
int jointIndex = (int)properties.getParentJointIndex();
|
||||
if (jointIndex > -1 && jointIndex < getJointNames().size()) {
|
||||
data.jointName = getJointNames()[jointIndex];
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
void MyAvatar::attachmentDataToEntityProperties(const AttachmentData& data, EntityItemProperties& properties) {
|
||||
QString url = data.modelURL.toString();
|
||||
properties.setName(QFileInfo(url).baseName());
|
||||
properties.setType(EntityTypes::Model);
|
||||
properties.setParentID(AVATAR_SELF_ID);
|
||||
properties.setLocalPosition(data.translation);
|
||||
properties.setLocalRotation(data.rotation);
|
||||
if (!data.isSoft) {
|
||||
properties.setParentJointIndex(getJointIndex(data.jointName));
|
||||
} else {
|
||||
properties.setRelayParentJoints(true);
|
||||
}
|
||||
properties.setModelURL(url);
|
||||
}
|
||||
|
||||
void MyAvatar::initHeadBones() {
|
||||
|
|
|
@ -869,8 +869,6 @@ public:
|
|||
|
||||
void resetFullAvatarURL();
|
||||
|
||||
virtual void setAttachmentData(const QVector<AttachmentData>& attachmentData) override;
|
||||
|
||||
MyCharacterController* getCharacterController() { return &_characterController; }
|
||||
const MyCharacterController* getCharacterController() const { return &_characterController; }
|
||||
|
||||
|
@ -1090,6 +1088,12 @@ public:
|
|||
float computeStandingHeightMode(const controller::Pose& head);
|
||||
glm::quat computeAverageHeadRotation(const controller::Pose& head);
|
||||
|
||||
virtual void setAttachmentData(const QVector<AttachmentData>& attachmentData) override;
|
||||
virtual QVector<AttachmentData> getAttachmentData() const override;
|
||||
|
||||
virtual QVariantList getAttachmentsVariant() const override;
|
||||
virtual void setAttachmentsVariant(const QVariantList& variant) override;
|
||||
|
||||
public slots:
|
||||
|
||||
/**jsdoc
|
||||
|
@ -1528,11 +1532,21 @@ private:
|
|||
void setScriptedMotorTimescale(float timescale);
|
||||
void setScriptedMotorFrame(QString frame);
|
||||
void setScriptedMotorMode(QString mode);
|
||||
|
||||
// Attachments
|
||||
virtual void attach(const QString& modelURL, const QString& jointName = QString(),
|
||||
const glm::vec3& translation = glm::vec3(), const glm::quat& rotation = glm::quat(),
|
||||
float scale = 1.0f, bool isSoft = false,
|
||||
bool allowDuplicates = false, bool useSaved = true) override;
|
||||
|
||||
virtual void detachOne(const QString& modelURL, const QString& jointName = QString()) override;
|
||||
virtual void detachAll(const QString& modelURL, const QString& jointName = QString()) override;
|
||||
|
||||
// Attachments/Avatar Entity
|
||||
void attachmentDataToEntityProperties(const AttachmentData& data, EntityItemProperties& properties);
|
||||
AttachmentData entityPropertiesToAttachmentData(const EntityItemProperties& properties) const;
|
||||
bool findAvatarEntity(const QString& modelURL, const QString& jointName, QUuid& entityID);
|
||||
|
||||
bool cameraInsideHead(const glm::vec3& cameraPosition) const;
|
||||
|
||||
void updateEyeContactTarget(float deltaTime);
|
||||
|
|
|
@ -352,8 +352,7 @@ bool QmlCommerce::openApp(const QString& itemHref) {
|
|||
QJsonObject appFileJsonObject = appFileJsonDocument.object();
|
||||
QString homeUrl = appFileJsonObject["homeURL"].toString();
|
||||
|
||||
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
|
||||
auto tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet("com.highfidelity.interface.tablet.system"));
|
||||
auto tablet = dynamic_cast<TabletProxy*>(DependencyManager::get<TabletScriptingInterface>()->getTablet("com.highfidelity.interface.tablet.system"));
|
||||
if (homeUrl.contains(".qml", Qt::CaseInsensitive)) {
|
||||
tablet->loadQMLSource(homeUrl);
|
||||
} else if (homeUrl.contains(".html", Qt::CaseInsensitive)) {
|
||||
|
|
|
@ -328,7 +328,7 @@ Wallet::Wallet() {
|
|||
packetReceiver.registerListener(PacketType::ChallengeOwnership, this, "handleChallengeOwnershipPacket");
|
||||
packetReceiver.registerListener(PacketType::ChallengeOwnershipRequest, this, "handleChallengeOwnershipPacket");
|
||||
|
||||
connect(ledger.data(), &Ledger::accountResult, this, [&](QJsonObject result) {
|
||||
connect(ledger.data(), &Ledger::accountResult, this, [](QJsonObject result) {
|
||||
auto wallet = DependencyManager::get<Wallet>();
|
||||
auto walletScriptingInterface = DependencyManager::get<WalletScriptingInterface>();
|
||||
uint status;
|
||||
|
|
353
interface/src/raypick/CollisionPick.cpp
Normal file
353
interface/src/raypick/CollisionPick.cpp
Normal file
|
@ -0,0 +1,353 @@
|
|||
//
|
||||
// Created by Sabrina Shanman 7/16/2018
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "CollisionPick.h"
|
||||
|
||||
#include <QtCore/QDebug>
|
||||
|
||||
#include <glm/gtx/transform.hpp>
|
||||
|
||||
#include "ScriptEngineLogging.h"
|
||||
#include "UUIDHasher.h"
|
||||
|
||||
void buildObjectIntersectionsMap(IntersectionType intersectionType, const std::vector<ContactTestResult>& objectIntersections, std::unordered_map<QUuid, QVariantMap>& intersections, std::unordered_map<QUuid, QVariantList>& collisionPointPairs) {
|
||||
for (auto& objectIntersection : objectIntersections) {
|
||||
auto at = intersections.find(objectIntersection.foundID);
|
||||
if (at == intersections.end()) {
|
||||
QVariantMap intersectingObject;
|
||||
intersectingObject["id"] = objectIntersection.foundID;
|
||||
intersectingObject["type"] = intersectionType;
|
||||
intersections[objectIntersection.foundID] = intersectingObject;
|
||||
|
||||
collisionPointPairs[objectIntersection.foundID] = QVariantList();
|
||||
}
|
||||
|
||||
QVariantMap collisionPointPair;
|
||||
collisionPointPair["pointOnPick"] = vec3toVariant(objectIntersection.testCollisionPoint);
|
||||
collisionPointPair["pointOnObject"] = vec3toVariant(objectIntersection.foundCollisionPoint);
|
||||
|
||||
collisionPointPairs[objectIntersection.foundID].append(collisionPointPair);
|
||||
}
|
||||
}
|
||||
|
||||
QVariantMap CollisionPickResult::toVariantMap() const {
|
||||
QVariantMap variantMap;
|
||||
|
||||
variantMap["intersects"] = intersects;
|
||||
|
||||
std::unordered_map<QUuid, QVariantMap> intersections;
|
||||
std::unordered_map<QUuid, QVariantList> collisionPointPairs;
|
||||
|
||||
buildObjectIntersectionsMap(ENTITY, entityIntersections, intersections, collisionPointPairs);
|
||||
buildObjectIntersectionsMap(AVATAR, avatarIntersections, intersections, collisionPointPairs);
|
||||
|
||||
QVariantList qIntersectingObjects;
|
||||
for (auto& intersectionKeyVal : intersections) {
|
||||
const QUuid& id = intersectionKeyVal.first;
|
||||
QVariantMap& intersection = intersectionKeyVal.second;
|
||||
|
||||
intersection["collisionContacts"] = collisionPointPairs[id];
|
||||
qIntersectingObjects.append(intersection);
|
||||
}
|
||||
|
||||
variantMap["intersectingObjects"] = qIntersectingObjects;
|
||||
variantMap["loaded"] = (loadState == LOAD_STATE_LOADED);
|
||||
variantMap["collisionRegion"] = pickVariant;
|
||||
|
||||
return variantMap;
|
||||
}
|
||||
|
||||
bool CollisionPick::isShapeInfoReady() {
|
||||
if (_mathPick.shouldComputeShapeInfo()) {
|
||||
if (_cachedResource && _cachedResource->isLoaded()) {
|
||||
computeShapeInfo(_mathPick, *_mathPick.shapeInfo, _cachedResource);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void CollisionPick::computeShapeInfo(CollisionRegion& pick, ShapeInfo& shapeInfo, QSharedPointer<GeometryResource> resource) {
|
||||
// This code was copied and modified from RenderableModelEntityItem::computeShapeInfo
|
||||
// TODO: Move to some shared code area (in entities-renderer? model-networking?)
|
||||
// after we verify this is working and do a diff comparison with RenderableModelEntityItem::computeShapeInfo
|
||||
// to consolidate the code.
|
||||
// We may also want to make computeShapeInfo always abstract away from the gpu model mesh, like it does here.
|
||||
const uint32_t TRIANGLE_STRIDE = 3;
|
||||
const uint32_t QUAD_STRIDE = 4;
|
||||
|
||||
ShapeType type = shapeInfo.getType();
|
||||
glm::vec3 dimensions = pick.transform.getScale();
|
||||
if (type == SHAPE_TYPE_COMPOUND) {
|
||||
// should never fall in here when collision model not fully loaded
|
||||
// TODO: assert that all geometries exist and are loaded
|
||||
//assert(_model && _model->isLoaded() && _compoundShapeResource && _compoundShapeResource->isLoaded());
|
||||
const FBXGeometry& collisionGeometry = resource->getFBXGeometry();
|
||||
|
||||
ShapeInfo::PointCollection& pointCollection = shapeInfo.getPointCollection();
|
||||
pointCollection.clear();
|
||||
uint32_t i = 0;
|
||||
|
||||
// the way OBJ files get read, each section under a "g" line is its own meshPart. We only expect
|
||||
// to find one actual "mesh" (with one or more meshParts in it), but we loop over the meshes, just in case.
|
||||
foreach (const FBXMesh& mesh, collisionGeometry.meshes) {
|
||||
// each meshPart is a convex hull
|
||||
foreach (const FBXMeshPart &meshPart, mesh.parts) {
|
||||
pointCollection.push_back(QVector<glm::vec3>());
|
||||
ShapeInfo::PointList& pointsInPart = pointCollection[i];
|
||||
|
||||
// run through all the triangles and (uniquely) add each point to the hull
|
||||
uint32_t numIndices = (uint32_t)meshPart.triangleIndices.size();
|
||||
// TODO: assert rather than workaround after we start sanitizing FBXMesh higher up
|
||||
//assert(numIndices % TRIANGLE_STRIDE == 0);
|
||||
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXReader
|
||||
|
||||
for (uint32_t j = 0; j < numIndices; j += TRIANGLE_STRIDE) {
|
||||
glm::vec3 p0 = mesh.vertices[meshPart.triangleIndices[j]];
|
||||
glm::vec3 p1 = mesh.vertices[meshPart.triangleIndices[j + 1]];
|
||||
glm::vec3 p2 = mesh.vertices[meshPart.triangleIndices[j + 2]];
|
||||
if (!pointsInPart.contains(p0)) {
|
||||
pointsInPart << p0;
|
||||
}
|
||||
if (!pointsInPart.contains(p1)) {
|
||||
pointsInPart << p1;
|
||||
}
|
||||
if (!pointsInPart.contains(p2)) {
|
||||
pointsInPart << p2;
|
||||
}
|
||||
}
|
||||
|
||||
// run through all the quads and (uniquely) add each point to the hull
|
||||
numIndices = (uint32_t)meshPart.quadIndices.size();
|
||||
// TODO: assert rather than workaround after we start sanitizing FBXMesh higher up
|
||||
//assert(numIndices % QUAD_STRIDE == 0);
|
||||
numIndices -= numIndices % QUAD_STRIDE; // WORKAROUND lack of sanity checking in FBXReader
|
||||
|
||||
for (uint32_t j = 0; j < numIndices; j += QUAD_STRIDE) {
|
||||
glm::vec3 p0 = mesh.vertices[meshPart.quadIndices[j]];
|
||||
glm::vec3 p1 = mesh.vertices[meshPart.quadIndices[j + 1]];
|
||||
glm::vec3 p2 = mesh.vertices[meshPart.quadIndices[j + 2]];
|
||||
glm::vec3 p3 = mesh.vertices[meshPart.quadIndices[j + 3]];
|
||||
if (!pointsInPart.contains(p0)) {
|
||||
pointsInPart << p0;
|
||||
}
|
||||
if (!pointsInPart.contains(p1)) {
|
||||
pointsInPart << p1;
|
||||
}
|
||||
if (!pointsInPart.contains(p2)) {
|
||||
pointsInPart << p2;
|
||||
}
|
||||
if (!pointsInPart.contains(p3)) {
|
||||
pointsInPart << p3;
|
||||
}
|
||||
}
|
||||
|
||||
if (pointsInPart.size() == 0) {
|
||||
qCDebug(scriptengine) << "Warning -- meshPart has no faces";
|
||||
pointCollection.pop_back();
|
||||
continue;
|
||||
}
|
||||
++i;
|
||||
}
|
||||
}
|
||||
|
||||
// We expect that the collision model will have the same units and will be displaced
|
||||
// from its origin in the same way the visual model is. The visual model has
|
||||
// been centered and probably scaled. We take the scaling and offset which were applied
|
||||
// to the visual model and apply them to the collision model (without regard for the
|
||||
// collision model's extents).
|
||||
|
||||
glm::vec3 scaleToFit = dimensions / resource->getFBXGeometry().getUnscaledMeshExtents().size();
|
||||
// multiply each point by scale
|
||||
for (int32_t i = 0; i < pointCollection.size(); i++) {
|
||||
for (int32_t j = 0; j < pointCollection[i].size(); j++) {
|
||||
// back compensate for registration so we can apply that offset to the shapeInfo later
|
||||
pointCollection[i][j] = scaleToFit * pointCollection[i][j];
|
||||
}
|
||||
}
|
||||
shapeInfo.setParams(type, dimensions, resource->getURL().toString());
|
||||
} else if (type >= SHAPE_TYPE_SIMPLE_HULL && type <= SHAPE_TYPE_STATIC_MESH) {
|
||||
const FBXGeometry& fbxGeometry = resource->getFBXGeometry();
|
||||
int numFbxMeshes = fbxGeometry.meshes.size();
|
||||
int totalNumVertices = 0;
|
||||
for (int i = 0; i < numFbxMeshes; i++) {
|
||||
const FBXMesh& mesh = fbxGeometry.meshes.at(i);
|
||||
totalNumVertices += mesh.vertices.size();
|
||||
}
|
||||
const int32_t MAX_VERTICES_PER_STATIC_MESH = 1e6;
|
||||
if (totalNumVertices > MAX_VERTICES_PER_STATIC_MESH) {
|
||||
qWarning() << "model" << resource->getURL() << "has too many vertices" << totalNumVertices << "and will collide as a box.";
|
||||
shapeInfo.setParams(SHAPE_TYPE_BOX, 0.5f * dimensions);
|
||||
return;
|
||||
}
|
||||
|
||||
auto& meshes = resource->getFBXGeometry().meshes;
|
||||
int32_t numMeshes = (int32_t)(meshes.size());
|
||||
|
||||
const int MAX_ALLOWED_MESH_COUNT = 1000;
|
||||
if (numMeshes > MAX_ALLOWED_MESH_COUNT) {
|
||||
// too many will cause the deadlock timer to throw...
|
||||
shapeInfo.setParams(SHAPE_TYPE_BOX, 0.5f * dimensions);
|
||||
return;
|
||||
}
|
||||
|
||||
ShapeInfo::PointCollection& pointCollection = shapeInfo.getPointCollection();
|
||||
pointCollection.clear();
|
||||
if (type == SHAPE_TYPE_SIMPLE_COMPOUND) {
|
||||
pointCollection.resize(numMeshes);
|
||||
} else {
|
||||
pointCollection.resize(1);
|
||||
}
|
||||
|
||||
ShapeInfo::TriangleIndices& triangleIndices = shapeInfo.getTriangleIndices();
|
||||
triangleIndices.clear();
|
||||
|
||||
Extents extents;
|
||||
int32_t meshCount = 0;
|
||||
int32_t pointListIndex = 0;
|
||||
for (auto& mesh : meshes) {
|
||||
if (!mesh.vertices.size()) {
|
||||
continue;
|
||||
}
|
||||
QVector<glm::vec3> vertices = mesh.vertices;
|
||||
|
||||
ShapeInfo::PointList& points = pointCollection[pointListIndex];
|
||||
|
||||
// reserve room
|
||||
int32_t sizeToReserve = (int32_t)(vertices.count());
|
||||
if (type == SHAPE_TYPE_SIMPLE_COMPOUND) {
|
||||
// a list of points for each mesh
|
||||
pointListIndex++;
|
||||
} else {
|
||||
// only one list of points
|
||||
sizeToReserve += (int32_t)points.size();
|
||||
}
|
||||
points.reserve(sizeToReserve);
|
||||
|
||||
// copy points
|
||||
const glm::vec3* vertexItr = vertices.cbegin();
|
||||
while (vertexItr != vertices.cend()) {
|
||||
glm::vec3 point = *vertexItr;
|
||||
points.push_back(point);
|
||||
extents.addPoint(point);
|
||||
++vertexItr;
|
||||
}
|
||||
|
||||
if (type == SHAPE_TYPE_STATIC_MESH) {
|
||||
// copy into triangleIndices
|
||||
size_t triangleIndicesCount = 0;
|
||||
for (const FBXMeshPart& meshPart : mesh.parts) {
|
||||
triangleIndicesCount += meshPart.triangleIndices.count();
|
||||
}
|
||||
triangleIndices.reserve((int)triangleIndicesCount);
|
||||
|
||||
for (const FBXMeshPart& meshPart : mesh.parts) {
|
||||
const int* indexItr = meshPart.triangleIndices.cbegin();
|
||||
while (indexItr != meshPart.triangleIndices.cend()) {
|
||||
triangleIndices.push_back(*indexItr);
|
||||
++indexItr;
|
||||
}
|
||||
}
|
||||
} else if (type == SHAPE_TYPE_SIMPLE_COMPOUND) {
|
||||
// for each mesh copy unique part indices, separated by special bogus (flag) index values
|
||||
for (const FBXMeshPart& meshPart : mesh.parts) {
|
||||
// collect unique list of indices for this part
|
||||
std::set<int32_t> uniqueIndices;
|
||||
auto numIndices = meshPart.triangleIndices.count();
|
||||
// TODO: assert rather than workaround after we start sanitizing FBXMesh higher up
|
||||
//assert(numIndices% TRIANGLE_STRIDE == 0);
|
||||
numIndices -= numIndices % TRIANGLE_STRIDE; // WORKAROUND lack of sanity checking in FBXReader
|
||||
|
||||
auto indexItr = meshPart.triangleIndices.cbegin();
|
||||
while (indexItr != meshPart.triangleIndices.cend()) {
|
||||
uniqueIndices.insert(*indexItr);
|
||||
++indexItr;
|
||||
}
|
||||
|
||||
// store uniqueIndices in triangleIndices
|
||||
triangleIndices.reserve(triangleIndices.size() + (int32_t)uniqueIndices.size());
|
||||
for (auto index : uniqueIndices) {
|
||||
triangleIndices.push_back(index);
|
||||
}
|
||||
// flag end of part
|
||||
triangleIndices.push_back(END_OF_MESH_PART);
|
||||
}
|
||||
// flag end of mesh
|
||||
triangleIndices.push_back(END_OF_MESH);
|
||||
}
|
||||
++meshCount;
|
||||
}
|
||||
|
||||
// scale and shift
|
||||
glm::vec3 extentsSize = extents.size();
|
||||
glm::vec3 scaleToFit = dimensions / extentsSize;
|
||||
for (int32_t i = 0; i < 3; ++i) {
|
||||
if (extentsSize[i] < 1.0e-6f) {
|
||||
scaleToFit[i] = 1.0f;
|
||||
}
|
||||
}
|
||||
for (auto points : pointCollection) {
|
||||
for (int32_t i = 0; i < points.size(); ++i) {
|
||||
points[i] = (points[i] * scaleToFit);
|
||||
}
|
||||
}
|
||||
|
||||
shapeInfo.setParams(type, 0.5f * dimensions, resource->getURL().toString());
|
||||
}
|
||||
}
|
||||
|
||||
CollisionRegion CollisionPick::getMathematicalPick() const {
|
||||
return _mathPick;
|
||||
}
|
||||
|
||||
const std::vector<ContactTestResult> CollisionPick::filterIntersections(const std::vector<ContactTestResult>& intersections) const {
|
||||
std::vector<ContactTestResult> filteredIntersections;
|
||||
|
||||
const QVector<QUuid>& ignoreItems = getIgnoreItems();
|
||||
const QVector<QUuid>& includeItems = getIncludeItems();
|
||||
bool isWhitelist = includeItems.size();
|
||||
for (const auto& intersection : intersections) {
|
||||
const QUuid& id = intersection.foundID;
|
||||
if (!ignoreItems.contains(id) && (!isWhitelist || includeItems.contains(id))) {
|
||||
filteredIntersections.push_back(intersection);
|
||||
}
|
||||
}
|
||||
|
||||
return filteredIntersections;
|
||||
}
|
||||
|
||||
PickResultPointer CollisionPick::getEntityIntersection(const CollisionRegion& pick) {
|
||||
if (!isShapeInfoReady()) {
|
||||
// Cannot compute result
|
||||
return std::make_shared<CollisionPickResult>(pick.toVariantMap(), CollisionPickResult::LOAD_STATE_NOT_LOADED, std::vector<ContactTestResult>(), std::vector<ContactTestResult>());
|
||||
}
|
||||
|
||||
const auto& entityIntersections = filterIntersections(_physicsEngine->getCollidingInRegion(MOTIONSTATE_TYPE_ENTITY, *pick.shapeInfo, pick.transform));
|
||||
return std::make_shared<CollisionPickResult>(pick, CollisionPickResult::LOAD_STATE_LOADED, entityIntersections, std::vector<ContactTestResult>());
|
||||
}
|
||||
|
||||
PickResultPointer CollisionPick::getOverlayIntersection(const CollisionRegion& pick) {
|
||||
return std::make_shared<CollisionPickResult>(pick.toVariantMap(), isShapeInfoReady() ? CollisionPickResult::LOAD_STATE_LOADED : CollisionPickResult::LOAD_STATE_NOT_LOADED, std::vector<ContactTestResult>(), std::vector<ContactTestResult>());
|
||||
}
|
||||
|
||||
PickResultPointer CollisionPick::getAvatarIntersection(const CollisionRegion& pick) {
|
||||
if (!isShapeInfoReady()) {
|
||||
// Cannot compute result
|
||||
return std::make_shared<CollisionPickResult>(pick.toVariantMap(), CollisionPickResult::LOAD_STATE_NOT_LOADED, std::vector<ContactTestResult>(), std::vector<ContactTestResult>());
|
||||
}
|
||||
|
||||
const auto& avatarIntersections = filterIntersections(_physicsEngine->getCollidingInRegion(MOTIONSTATE_TYPE_AVATAR, *pick.shapeInfo, pick.transform));
|
||||
return std::make_shared<CollisionPickResult>(pick, CollisionPickResult::LOAD_STATE_LOADED, std::vector<ContactTestResult>(), avatarIntersections);
|
||||
}
|
||||
|
||||
PickResultPointer CollisionPick::getHUDIntersection(const CollisionRegion& pick) {
|
||||
return std::make_shared<CollisionPickResult>(pick.toVariantMap(), isShapeInfoReady() ? CollisionPickResult::LOAD_STATE_LOADED : CollisionPickResult::LOAD_STATE_NOT_LOADED, std::vector<ContactTestResult>(), std::vector<ContactTestResult>());
|
||||
}
|
102
interface/src/raypick/CollisionPick.h
Normal file
102
interface/src/raypick/CollisionPick.h
Normal file
|
@ -0,0 +1,102 @@
|
|||
//
|
||||
// Created by Sabrina Shanman 7/11/2018
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#ifndef hifi_CollisionPick_h
|
||||
#define hifi_CollisionPick_h
|
||||
|
||||
#include <PhysicsEngine.h>
|
||||
#include <model-networking/ModelCache.h>
|
||||
#include <RegisteredMetaTypes.h>
|
||||
#include <Pick.h>
|
||||
|
||||
class CollisionPickResult : public PickResult {
|
||||
public:
|
||||
enum LoadState {
|
||||
LOAD_STATE_UNKNOWN,
|
||||
LOAD_STATE_NOT_LOADED,
|
||||
LOAD_STATE_LOADED
|
||||
};
|
||||
|
||||
CollisionPickResult() {}
|
||||
CollisionPickResult(const QVariantMap& pickVariant) : PickResult(pickVariant) {}
|
||||
|
||||
CollisionPickResult(const CollisionRegion& searchRegion, LoadState loadState, const std::vector<ContactTestResult>& entityIntersections, const std::vector<ContactTestResult>& avatarIntersections) :
|
||||
PickResult(searchRegion.toVariantMap()),
|
||||
loadState(loadState),
|
||||
intersects(entityIntersections.size() || avatarIntersections.size()),
|
||||
entityIntersections(entityIntersections),
|
||||
avatarIntersections(avatarIntersections) {
|
||||
}
|
||||
|
||||
CollisionPickResult(const CollisionPickResult& collisionPickResult) : PickResult(collisionPickResult.pickVariant) {
|
||||
avatarIntersections = collisionPickResult.avatarIntersections;
|
||||
entityIntersections = collisionPickResult.entityIntersections;
|
||||
intersects = collisionPickResult.intersects;
|
||||
loadState = collisionPickResult.loadState;
|
||||
}
|
||||
|
||||
LoadState loadState { LOAD_STATE_UNKNOWN };
|
||||
bool intersects { false };
|
||||
std::vector<ContactTestResult> entityIntersections;
|
||||
std::vector<ContactTestResult> avatarIntersections;
|
||||
|
||||
QVariantMap toVariantMap() const override;
|
||||
|
||||
bool doesIntersect() const override { return intersects; }
|
||||
bool checkOrFilterAgainstMaxDistance(float maxDistance) override { return true; }
|
||||
|
||||
PickResultPointer compareAndProcessNewResult(const PickResultPointer& newRes) override {
|
||||
const std::shared_ptr<CollisionPickResult> newCollisionResult = std::static_pointer_cast<CollisionPickResult>(newRes);
|
||||
|
||||
for (ContactTestResult& entityIntersection : newCollisionResult->entityIntersections) {
|
||||
entityIntersections.push_back(entityIntersection);
|
||||
}
|
||||
for (ContactTestResult& avatarIntersection : newCollisionResult->avatarIntersections) {
|
||||
avatarIntersections.push_back(avatarIntersection);
|
||||
}
|
||||
|
||||
intersects = entityIntersections.size() || avatarIntersections.size();
|
||||
if (newCollisionResult->loadState == LOAD_STATE_NOT_LOADED || loadState == LOAD_STATE_UNKNOWN) {
|
||||
loadState = (LoadState)newCollisionResult->loadState;
|
||||
}
|
||||
|
||||
return std::make_shared<CollisionPickResult>(*this);
|
||||
}
|
||||
};
|
||||
|
||||
class CollisionPick : public Pick<CollisionRegion> {
|
||||
public:
|
||||
CollisionPick(const PickFilter& filter, float maxDistance, bool enabled, CollisionRegion collisionRegion, PhysicsEnginePointer physicsEngine) :
|
||||
Pick(filter, maxDistance, enabled),
|
||||
_mathPick(collisionRegion),
|
||||
_physicsEngine(physicsEngine) {
|
||||
if (collisionRegion.shouldComputeShapeInfo()) {
|
||||
_cachedResource = DependencyManager::get<ModelCache>()->getCollisionGeometryResource(collisionRegion.modelURL);
|
||||
}
|
||||
}
|
||||
|
||||
CollisionRegion getMathematicalPick() const override;
|
||||
PickResultPointer getDefaultResult(const QVariantMap& pickVariant) const override {
|
||||
return std::make_shared<CollisionPickResult>(pickVariant, CollisionPickResult::LOAD_STATE_UNKNOWN, std::vector<ContactTestResult>(), std::vector<ContactTestResult>());
|
||||
}
|
||||
PickResultPointer getEntityIntersection(const CollisionRegion& pick) override;
|
||||
PickResultPointer getOverlayIntersection(const CollisionRegion& pick) override;
|
||||
PickResultPointer getAvatarIntersection(const CollisionRegion& pick) override;
|
||||
PickResultPointer getHUDIntersection(const CollisionRegion& pick) override;
|
||||
|
||||
protected:
|
||||
// Returns true if pick.shapeInfo is valid. Otherwise, attempts to get the shapeInfo ready for use.
|
||||
bool isShapeInfoReady();
|
||||
void computeShapeInfo(CollisionRegion& pick, ShapeInfo& shapeInfo, QSharedPointer<GeometryResource> resource);
|
||||
const std::vector<ContactTestResult> filterIntersections(const std::vector<ContactTestResult>& intersections) const;
|
||||
|
||||
CollisionRegion _mathPick;
|
||||
PhysicsEnginePointer _physicsEngine;
|
||||
QSharedPointer<GeometryResource> _cachedResource;
|
||||
};
|
||||
|
||||
#endif // hifi_CollisionPick_h
|
|
@ -11,6 +11,7 @@
|
|||
#include <QVariant>
|
||||
#include "GLMHelpers.h"
|
||||
|
||||
#include "Application.h"
|
||||
#include <PickManager.h>
|
||||
|
||||
#include "StaticRayPick.h"
|
||||
|
@ -20,6 +21,7 @@
|
|||
#include "StaticParabolaPick.h"
|
||||
#include "JointParabolaPick.h"
|
||||
#include "MouseParabolaPick.h"
|
||||
#include "CollisionPick.h"
|
||||
|
||||
#include <ScriptEngine.h>
|
||||
|
||||
|
@ -31,6 +33,8 @@ unsigned int PickScriptingInterface::createPick(const PickQuery::PickType type,
|
|||
return createStylusPick(properties);
|
||||
case PickQuery::PickType::Parabola:
|
||||
return createParabolaPick(properties);
|
||||
case PickQuery::PickType::Collision:
|
||||
return createCollisionPick(properties);
|
||||
default:
|
||||
return PickManager::INVALID_PICK_ID;
|
||||
}
|
||||
|
@ -234,6 +238,48 @@ unsigned int PickScriptingInterface::createParabolaPick(const QVariant& properti
|
|||
return PickManager::INVALID_PICK_ID;
|
||||
}
|
||||
|
||||
/**jsdoc
|
||||
* A Shape defines a physical volume.
|
||||
*
|
||||
* @typedef {object} Shape
|
||||
* @property {string} shapeType The type of shape to use. Can be one of the following: "box", "sphere", "capsule-x", "capsule-y", "capsule-z", "cylinder-x", "cylinder-y", "cylinder-z"
|
||||
* @property {Vec3} dimensions - The size to scale the shape to.
|
||||
*/
|
||||
|
||||
// TODO: Add this property to the Shape jsdoc above once model picks work properly
|
||||
// * @property {string} modelURL - If shapeType is one of: "compound", "simple-hull", "simple-compound", or "static-mesh", this defines the model to load to generate the collision volume.
|
||||
|
||||
/**jsdoc
|
||||
* A set of properties that can be passed to {@link Picks.createPick} to create a new Collision Pick.
|
||||
|
||||
* @typedef {object} Picks.CollisionPickProperties
|
||||
* @property {Shape} shape - The information about the collision region's size and shape.
|
||||
* @property {Vec3} position - The position of the collision region.
|
||||
* @property {Quat} orientation - The orientation of the collision region.
|
||||
*/
|
||||
unsigned int PickScriptingInterface::createCollisionPick(const QVariant& properties) {
|
||||
QVariantMap propMap = properties.toMap();
|
||||
|
||||
bool enabled = false;
|
||||
if (propMap["enabled"].isValid()) {
|
||||
enabled = propMap["enabled"].toBool();
|
||||
}
|
||||
|
||||
PickFilter filter = PickFilter();
|
||||
if (propMap["filter"].isValid()) {
|
||||
filter = PickFilter(propMap["filter"].toUInt());
|
||||
}
|
||||
|
||||
float maxDistance = 0.0f;
|
||||
if (propMap["maxDistance"].isValid()) {
|
||||
maxDistance = propMap["maxDistance"].toFloat();
|
||||
}
|
||||
|
||||
CollisionRegion collisionRegion(propMap);
|
||||
|
||||
return DependencyManager::get<PickManager>()->addPick(PickQuery::Collision, std::make_shared<CollisionPick>(filter, maxDistance, enabled, collisionRegion, qApp->getPhysicsEngine()));
|
||||
}
|
||||
|
||||
void PickScriptingInterface::enablePick(unsigned int uid) {
|
||||
DependencyManager::get<PickManager>()->enablePick(uid);
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
|
||||
#include <RegisteredMetaTypes.h>
|
||||
#include <DependencyManager.h>
|
||||
#include <PhysicsEngine.h>
|
||||
#include <Pick.h>
|
||||
|
||||
/**jsdoc
|
||||
|
@ -62,6 +63,7 @@ class PickScriptingInterface : public QObject, public Dependency {
|
|||
public:
|
||||
unsigned int createRayPick(const QVariant& properties);
|
||||
unsigned int createStylusPick(const QVariant& properties);
|
||||
unsigned int createCollisionPick(const QVariant& properties);
|
||||
unsigned int createParabolaPick(const QVariant& properties);
|
||||
|
||||
void registerMetaTypes(QScriptEngine* engine);
|
||||
|
@ -72,7 +74,7 @@ public:
|
|||
* with PickType.Ray, depending on which optional parameters you pass, you could create a Static Ray Pick, a Mouse Ray Pick, or a Joint Ray Pick.
|
||||
* @function Picks.createPick
|
||||
* @param {PickType} type A PickType that specifies the method of picking to use
|
||||
* @param {Picks.RayPickProperties|Picks.StylusPickProperties|Picks.ParabolaPickProperties} properties A PickProperties object, containing all the properties for initializing this Pick
|
||||
* @param {Picks.RayPickProperties|Picks.StylusPickProperties|Picks.ParabolaPickProperties|Picks.CollisionPickProperties} properties A PickProperties object, containing all the properties for initializing this Pick
|
||||
* @returns {number} The ID of the created Pick. Used for managing the Pick. 0 if invalid.
|
||||
*/
|
||||
Q_INVOKABLE unsigned int createPick(const PickQuery::PickType type, const QVariant& properties);
|
||||
|
@ -141,11 +143,40 @@ public:
|
|||
* @property {PickParabola} parabola The PickParabola that was used. Valid even if there was no intersection.
|
||||
*/
|
||||
|
||||
/**jsdoc
|
||||
* An intersection result for a Collision Pick.
|
||||
*
|
||||
* @typedef {object} CollisionPickResult
|
||||
* @property {boolean} intersects If there was at least one valid intersection (intersectingObjects.length > 0)
|
||||
* @property {IntersectingObject[]} intersectingObjects The collision information of each object which intersect with the CollisionRegion.
|
||||
* @property {CollisionRegion} collisionRegion The CollisionRegion that was used. Valid even if there was no intersection.
|
||||
*/
|
||||
|
||||
// TODO: Add this to the CollisionPickResult jsdoc once model collision picks are working
|
||||
//* @property {boolean} loaded If the CollisionRegion was successfully loaded (may be false if a model was used)
|
||||
|
||||
/**jsdoc
|
||||
* Information about the Collision Pick's intersection with an object
|
||||
*
|
||||
* @typedef {object} IntersectingObject
|
||||
* @property {QUuid} id The ID of the object.
|
||||
* @property {number} type The type of the object, either Picks.INTERSECTED_ENTITY() or Picks.INTERSECTED_AVATAR()
|
||||
* @property {CollisionContact[]} collisionContacts Pairs of points representing penetration information between the pick and the object
|
||||
*/
|
||||
|
||||
/**jsdoc
|
||||
* A pair of points that represents part of an overlap between a Collision Pick and an object in the physics engine. Points which are further apart represent deeper overlap
|
||||
*
|
||||
* @typedef {object} CollisionContact
|
||||
* @property {Vec3} pointOnPick A point representing a penetration of the object's surface into the volume of the pick, in world space.
|
||||
* @property {Vec3} pointOnObject A point representing a penetration of the pick's surface into the volume of the found object, in world space.
|
||||
*/
|
||||
|
||||
/**jsdoc
|
||||
* Get the most recent pick result from this Pick. This will be updated as long as the Pick is enabled.
|
||||
* @function Picks.getPrevPickResult
|
||||
* @param {number} uid The ID of the Pick, as returned by {@link Picks.createPick}.
|
||||
* @returns {RayPickResult|StylusPickResult} The most recent intersection result. This will be different for different PickTypes.
|
||||
* @returns {RayPickResult|StylusPickResult|ParabolaPickResult|CollisionPickResult} The most recent intersection result. This will be different for different PickTypes.
|
||||
*/
|
||||
Q_INVOKABLE QVariantMap getPrevPickResult(unsigned int uid);
|
||||
|
||||
|
|
|
@ -60,23 +60,20 @@ Audio::Audio() : _devices(_contextIsHMD) {
|
|||
}
|
||||
|
||||
bool Audio::startRecording(const QString& filepath) {
|
||||
auto client = DependencyManager::get<AudioClient>().data();
|
||||
return resultWithWriteLock<bool>([&] {
|
||||
return client->startRecording(filepath);
|
||||
return DependencyManager::get<AudioClient>()->startRecording(filepath);
|
||||
});
|
||||
}
|
||||
|
||||
bool Audio::getRecording() {
|
||||
auto client = DependencyManager::get<AudioClient>().data();
|
||||
return resultWithReadLock<bool>([&] {
|
||||
return client->getRecording();
|
||||
return DependencyManager::get<AudioClient>()->getRecording();
|
||||
});
|
||||
}
|
||||
|
||||
void Audio::stopRecording() {
|
||||
auto client = DependencyManager::get<AudioClient>().data();
|
||||
withWriteLock([&] {
|
||||
client->stopRecording();
|
||||
DependencyManager::get<AudioClient>()->stopRecording();
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -360,11 +360,11 @@ void AudioInputDeviceList::onPeakValueListChanged(const QList<float>& peakValueL
|
|||
}
|
||||
|
||||
AudioDevices::AudioDevices(bool& contextIsHMD) : _contextIsHMD(contextIsHMD) {
|
||||
auto client = DependencyManager::get<AudioClient>();
|
||||
auto client = DependencyManager::get<AudioClient>().data();
|
||||
|
||||
connect(client.data(), &AudioClient::deviceChanged, this, &AudioDevices::onDeviceChanged, Qt::QueuedConnection);
|
||||
connect(client.data(), &AudioClient::devicesChanged, this, &AudioDevices::onDevicesChanged, Qt::QueuedConnection);
|
||||
connect(client.data(), &AudioClient::peakValueListChanged, &_inputs, &AudioInputDeviceList::onPeakValueListChanged, Qt::QueuedConnection);
|
||||
connect(client, &AudioClient::deviceChanged, this, &AudioDevices::onDeviceChanged, Qt::QueuedConnection);
|
||||
connect(client, &AudioClient::devicesChanged, this, &AudioDevices::onDevicesChanged, Qt::QueuedConnection);
|
||||
connect(client, &AudioClient::peakValueListChanged, &_inputs, &AudioInputDeviceList::onPeakValueListChanged, Qt::QueuedConnection);
|
||||
|
||||
_inputs.onDeviceChanged(client->getActiveAudioDevice(QAudio::AudioInput), contextIsHMD);
|
||||
_outputs.onDeviceChanged(client->getActiveAudioDevice(QAudio::AudioOutput), contextIsHMD);
|
||||
|
@ -446,7 +446,7 @@ void AudioDevices::onDevicesChanged(QAudio::Mode mode, const QList<QAudioDeviceI
|
|||
static std::once_flag once;
|
||||
std::call_once(once, [&] {
|
||||
//readout settings
|
||||
auto client = DependencyManager::get<AudioClient>();
|
||||
auto client = DependencyManager::get<AudioClient>().data();
|
||||
|
||||
_inputs._hmdSavedDeviceName = getTargetDevice(true, QAudio::AudioInput);
|
||||
_inputs._desktopSavedDeviceName = getTargetDevice(false, QAudio::AudioInput);
|
||||
|
@ -494,9 +494,9 @@ void AudioDevices::onDevicesChanged(QAudio::Mode mode, const QList<QAudioDeviceI
|
|||
void AudioDevices::chooseInputDevice(const QAudioDeviceInfo& device, bool isHMD) {
|
||||
//check if current context equals device to change
|
||||
if (_contextIsHMD == isHMD) {
|
||||
auto client = DependencyManager::get<AudioClient>();
|
||||
auto client = DependencyManager::get<AudioClient>().data();
|
||||
_requestedInputDevice = device;
|
||||
QMetaObject::invokeMethod(client.data(), "switchAudioDevice",
|
||||
QMetaObject::invokeMethod(client, "switchAudioDevice",
|
||||
Q_ARG(QAudio::Mode, QAudio::AudioInput),
|
||||
Q_ARG(const QAudioDeviceInfo&, device));
|
||||
} else {
|
||||
|
@ -511,9 +511,9 @@ void AudioDevices::chooseInputDevice(const QAudioDeviceInfo& device, bool isHMD)
|
|||
void AudioDevices::chooseOutputDevice(const QAudioDeviceInfo& device, bool isHMD) {
|
||||
//check if current context equals device to change
|
||||
if (_contextIsHMD == isHMD) {
|
||||
auto client = DependencyManager::get<AudioClient>();
|
||||
auto client = DependencyManager::get<AudioClient>().data();
|
||||
_requestedOutputDevice = device;
|
||||
QMetaObject::invokeMethod(client.data(), "switchAudioDevice",
|
||||
QMetaObject::invokeMethod(client, "switchAudioDevice",
|
||||
Q_ARG(QAudio::Mode, QAudio::AudioOutput),
|
||||
Q_ARG(const QAudioDeviceInfo&, device));
|
||||
} else {
|
||||
|
|
|
@ -312,7 +312,7 @@ JSConsole::~JSConsole() {
|
|||
delete _ui;
|
||||
}
|
||||
|
||||
void JSConsole::setScriptEngine(const ScriptEnginePointer& scriptEngine) {
|
||||
void JSConsole::setScriptEngine(const ScriptEnginePointer& scriptEngine) {
|
||||
if (_scriptEngine == scriptEngine && scriptEngine != nullptr) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -162,9 +162,7 @@ void LoginDialog::createAccountFromStream(QString username) {
|
|||
}
|
||||
|
||||
void LoginDialog::openUrl(const QString& url) const {
|
||||
|
||||
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
|
||||
auto tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet("com.highfidelity.interface.tablet.system"));
|
||||
auto tablet = dynamic_cast<TabletProxy*>(DependencyManager::get<TabletScriptingInterface>()->getTablet("com.highfidelity.interface.tablet.system"));
|
||||
auto hmd = DependencyManager::get<HMDScriptingInterface>();
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
|
||||
|
|
|
@ -27,28 +27,27 @@
|
|||
|
||||
void setupPreferences() {
|
||||
auto preferences = DependencyManager::get<Preferences>();
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
static const QString AVATAR_BASICS { "Avatar Basics" };
|
||||
{
|
||||
auto getter = [=]()->QString { return myAvatar->getDisplayName(); };
|
||||
auto setter = [=](const QString& value) { myAvatar->setDisplayName(value); };
|
||||
auto getter = [myAvatar]()->QString { return myAvatar->getDisplayName(); };
|
||||
auto setter = [myAvatar](const QString& value) { myAvatar->setDisplayName(value); };
|
||||
auto preference = new EditPreference(AVATAR_BASICS, "Avatar display name (optional)", getter, setter);
|
||||
preference->setPlaceholderText("Not showing a name");
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
|
||||
{
|
||||
auto getter = [=]()->QString { return myAvatar->getCollisionSoundURL(); };
|
||||
auto setter = [=](const QString& value) { myAvatar->setCollisionSoundURL(value); };
|
||||
auto getter = [myAvatar]()->QString { return myAvatar->getCollisionSoundURL(); };
|
||||
auto setter = [myAvatar](const QString& value) { myAvatar->setCollisionSoundURL(value); };
|
||||
auto preference = new EditPreference(AVATAR_BASICS, "Avatar collision sound URL (optional)", getter, setter);
|
||||
preference->setPlaceholderText("Enter the URL of a sound to play when you bump into something");
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
|
||||
{
|
||||
auto getter = [=]()->QString { return myAvatar->getFullAvatarURLFromPreferences().toString(); };
|
||||
auto setter = [=](const QString& value) { myAvatar->useFullAvatarURL(value, ""); qApp->clearAvatarOverrideUrl(); };
|
||||
auto getter = [myAvatar]()->QString { return myAvatar->getFullAvatarURLFromPreferences().toString(); };
|
||||
auto setter = [myAvatar](const QString& value) { myAvatar->useFullAvatarURL(value, ""); qApp->clearAvatarOverrideUrl(); };
|
||||
auto preference = new AvatarPreference(AVATAR_BASICS, "Appearance", getter, setter);
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
|
@ -163,8 +162,8 @@ void setupPreferences() {
|
|||
|
||||
static const QString VIEW_CATEGORY{ "View" };
|
||||
{
|
||||
auto getter = [=]()->float { return myAvatar->getRealWorldFieldOfView(); };
|
||||
auto setter = [=](float value) { myAvatar->setRealWorldFieldOfView(value); };
|
||||
auto getter = [myAvatar]()->float { return myAvatar->getRealWorldFieldOfView(); };
|
||||
auto setter = [myAvatar](float value) { myAvatar->setRealWorldFieldOfView(value); };
|
||||
auto preference = new SpinnerPreference(VIEW_CATEGORY, "Real world vertical field of view (angular size of monitor)", getter, setter);
|
||||
preference->setMin(1);
|
||||
preference->setMax(180);
|
||||
|
@ -219,13 +218,13 @@ void setupPreferences() {
|
|||
|
||||
static const QString AVATAR_TUNING { "Avatar Tuning" };
|
||||
{
|
||||
auto getter = [=]()->QString { return myAvatar->getDominantHand(); };
|
||||
auto setter = [=](const QString& value) { myAvatar->setDominantHand(value); };
|
||||
auto getter = [myAvatar]()->QString { return myAvatar->getDominantHand(); };
|
||||
auto setter = [myAvatar](const QString& value) { myAvatar->setDominantHand(value); };
|
||||
preferences->addPreference(new PrimaryHandPreference(AVATAR_TUNING, "Dominant Hand", getter, setter));
|
||||
}
|
||||
{
|
||||
auto getter = [=]()->float { return myAvatar->getTargetScale(); };
|
||||
auto setter = [=](float value) { myAvatar->setTargetScale(value); };
|
||||
auto getter = [myAvatar]()->float { return myAvatar->getTargetScale(); };
|
||||
auto setter = [myAvatar](float value) { myAvatar->setTargetScale(value); };
|
||||
auto preference = new SpinnerSliderPreference(AVATAR_TUNING, "Avatar Scale", getter, setter);
|
||||
preference->setMin(0.25);
|
||||
preference->setMax(4);
|
||||
|
@ -240,16 +239,16 @@ void setupPreferences() {
|
|||
}
|
||||
|
||||
{
|
||||
auto getter = [=]()->QString { return myAvatar->getAnimGraphOverrideUrl().toString(); };
|
||||
auto setter = [=](const QString& value) { myAvatar->setAnimGraphOverrideUrl(QUrl(value)); };
|
||||
auto getter = [myAvatar]()->QString { return myAvatar->getAnimGraphOverrideUrl().toString(); };
|
||||
auto setter = [myAvatar](const QString& value) { myAvatar->setAnimGraphOverrideUrl(QUrl(value)); };
|
||||
auto preference = new EditPreference(AVATAR_TUNING, "Avatar animation JSON", getter, setter);
|
||||
preference->setPlaceholderText("default");
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
|
||||
{
|
||||
auto getter = [=]()->bool { return myAvatar->getCollisionsEnabled(); };
|
||||
auto setter = [=](bool value) { myAvatar->setCollisionsEnabled(value); };
|
||||
auto getter = [myAvatar]()->bool { return myAvatar->getCollisionsEnabled(); };
|
||||
auto setter = [myAvatar](bool value) { myAvatar->setCollisionsEnabled(value); };
|
||||
auto preference = new CheckPreference(AVATAR_TUNING, "Enable Avatar collisions", getter, setter);
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
|
@ -270,20 +269,20 @@ void setupPreferences() {
|
|||
{
|
||||
|
||||
static const QString movementsControlChannel = QStringLiteral("Hifi-Advanced-Movement-Disabler");
|
||||
auto getter = [=]()->bool { return myAvatar->useAdvancedMovementControls(); };
|
||||
auto setter = [=](bool value) { myAvatar->setUseAdvancedMovementControls(value); };
|
||||
auto getter = [myAvatar]()->bool { return myAvatar->useAdvancedMovementControls(); };
|
||||
auto setter = [myAvatar](bool value) { myAvatar->setUseAdvancedMovementControls(value); };
|
||||
preferences->addPreference(new CheckPreference(VR_MOVEMENT,
|
||||
QStringLiteral("Advanced movement for hand controllers"),
|
||||
getter, setter));
|
||||
}
|
||||
{
|
||||
auto getter = [=]()->bool { return myAvatar->getFlyingHMDPref(); };
|
||||
auto setter = [=](bool value) { myAvatar->setFlyingHMDPref(value); };
|
||||
auto getter = [myAvatar]()->bool { return myAvatar->getFlyingHMDPref(); };
|
||||
auto setter = [myAvatar](bool value) { myAvatar->setFlyingHMDPref(value); };
|
||||
preferences->addPreference(new CheckPreference(VR_MOVEMENT, "Flying & jumping (HMD)", getter, setter));
|
||||
}
|
||||
{
|
||||
auto getter = [=]()->int { return myAvatar->getSnapTurn() ? 0 : 1; };
|
||||
auto setter = [=](int value) { myAvatar->setSnapTurn(value == 0); };
|
||||
auto getter = [myAvatar]()->int { return myAvatar->getSnapTurn() ? 0 : 1; };
|
||||
auto setter = [myAvatar](int value) { myAvatar->setSnapTurn(value == 0); };
|
||||
auto preference = new RadioButtonsPreference(VR_MOVEMENT, "Snap turn / Smooth turn", getter, setter);
|
||||
QStringList items;
|
||||
items << "Snap turn" << "Smooth turn";
|
||||
|
@ -309,8 +308,8 @@ void setupPreferences() {
|
|||
|
||||
static const QString AVATAR_CAMERA{ "Mouse Sensitivity" };
|
||||
{
|
||||
auto getter = [=]()->float { return myAvatar->getPitchSpeed(); };
|
||||
auto setter = [=](float value) { myAvatar->setPitchSpeed(value); };
|
||||
auto getter = [myAvatar]()->float { return myAvatar->getPitchSpeed(); };
|
||||
auto setter = [myAvatar](float value) { myAvatar->setPitchSpeed(value); };
|
||||
auto preference = new SpinnerSliderPreference(AVATAR_CAMERA, "Y input:", getter, setter);
|
||||
preference->setMin(1.0f);
|
||||
preference->setMax(360.0f);
|
||||
|
@ -319,8 +318,8 @@ void setupPreferences() {
|
|||
preferences->addPreference(preference);
|
||||
}
|
||||
{
|
||||
auto getter = [=]()->float { return myAvatar->getYawSpeed(); };
|
||||
auto setter = [=](float value) { myAvatar->setYawSpeed(value); };
|
||||
auto getter = [myAvatar]()->float { return myAvatar->getYawSpeed(); };
|
||||
auto setter = [myAvatar](float value) { myAvatar->setYawSpeed(value); };
|
||||
auto preference = new SpinnerSliderPreference(AVATAR_CAMERA, "X input:", getter, setter);
|
||||
preference->setMin(1.0f);
|
||||
preference->setMax(360.0f);
|
||||
|
@ -381,12 +380,24 @@ void setupPreferences() {
|
|||
{
|
||||
static const QString NETWORKING("Networking");
|
||||
|
||||
auto nodelist = DependencyManager::get<NodeList>();
|
||||
QWeakPointer<NodeList> nodeListWeak = DependencyManager::get<NodeList>();
|
||||
{
|
||||
static const int MIN_PORT_NUMBER { 0 };
|
||||
static const int MAX_PORT_NUMBER { 65535 };
|
||||
auto getter = [nodelist] { return static_cast<int>(nodelist->getSocketLocalPort()); };
|
||||
auto setter = [nodelist](int preset) { nodelist->setSocketLocalPort(static_cast<quint16>(preset)); };
|
||||
auto getter = [nodeListWeak] {
|
||||
auto nodeList = nodeListWeak.lock();
|
||||
if (nodeList) {
|
||||
return static_cast<int>(nodeList->getSocketLocalPort());
|
||||
} else {
|
||||
return -1;
|
||||
}
|
||||
};
|
||||
auto setter = [nodeListWeak](int preset) {
|
||||
auto nodeList = nodeListWeak.lock();
|
||||
if (nodeList) {
|
||||
nodeList->setSocketLocalPort(static_cast<quint16>(preset));
|
||||
}
|
||||
};
|
||||
auto preference = new IntSpinnerPreference(NETWORKING, "Listening Port", getter, setter);
|
||||
preference->setMin(MIN_PORT_NUMBER);
|
||||
preference->setMax(MAX_PORT_NUMBER);
|
||||
|
|
|
@ -174,7 +174,7 @@ void Stats::updateStats(bool force) {
|
|||
int octreeServerCount = 0;
|
||||
int pingOctreeMax = 0;
|
||||
int totalEntityKbps = 0;
|
||||
nodeList->eachNode([&](const SharedNodePointer& node) {
|
||||
nodeList->eachNode([&totalPingOctree, &totalEntityKbps, &octreeServerCount, &pingOctreeMax](const SharedNodePointer& node) {
|
||||
// TODO: this should also support entities
|
||||
if (node->getType() == NodeType::EntityServer) {
|
||||
totalPingOctree += node->getPingMs();
|
||||
|
@ -219,7 +219,7 @@ void Stats::updateStats(bool force) {
|
|||
STAT_UPDATE_FLOAT(myAvatarSendRate, avatarManager->getMyAvatarSendRate(), 0.1f);
|
||||
|
||||
SharedNodePointer audioMixerNode = nodeList->soloNodeOfType(NodeType::AudioMixer);
|
||||
auto audioClient = DependencyManager::get<AudioClient>();
|
||||
auto audioClient = DependencyManager::get<AudioClient>().data();
|
||||
if (audioMixerNode || force) {
|
||||
STAT_UPDATE(audioMixerKbps, (int)roundf(
|
||||
bandwidthRecorder->getAverageInputKilobitsPerSecond(NodeType::AudioMixer) +
|
||||
|
|
|
@ -23,8 +23,7 @@ TestingDialog::TestingDialog(QWidget* parent) :
|
|||
|
||||
_console->setFixedHeight(TESTING_CONSOLE_HEIGHT);
|
||||
|
||||
auto _engines = DependencyManager::get<ScriptEngines>();
|
||||
_engine = _engines->loadScript(qApp->applicationDirPath() + testRunnerRelativePath);
|
||||
_engine = DependencyManager::get<ScriptEngines>()->loadScript(qApp->applicationDirPath() + testRunnerRelativePath);
|
||||
_console->setScriptEngine(_engine);
|
||||
connect(_engine.data(), &ScriptEngine::finished, this, &TestingDialog::onTestingFinished);
|
||||
}
|
||||
|
|
|
@ -184,9 +184,11 @@ void Web3DOverlay::buildWebSurface() {
|
|||
_webSurface->getRootItem()->setProperty("scriptURL", _scriptURL);
|
||||
} else {
|
||||
_webSurface = QSharedPointer<OffscreenQmlSurface>(new OffscreenQmlSurface(), qmlSurfaceDeleter);
|
||||
connect(_webSurface.data(), &hifi::qml::OffscreenSurface::rootContextCreated, [this](QQmlContext* surfaceContext) {
|
||||
setupQmlSurface(_url == TabletScriptingInterface::QML);
|
||||
});
|
||||
_webSurface->load(_url);
|
||||
_cachedWebSurface = false;
|
||||
setupQmlSurface();
|
||||
}
|
||||
_webSurface->getSurfaceContext()->setContextProperty("globalPosition", vec3toVariant(getWorldPosition()));
|
||||
onResizeWebSurface();
|
||||
|
@ -214,7 +216,7 @@ bool Web3DOverlay::isWebContent() const {
|
|||
return false;
|
||||
}
|
||||
|
||||
void Web3DOverlay::setupQmlSurface() {
|
||||
void Web3DOverlay::setupQmlSurface(bool isTablet) {
|
||||
_webSurface->getSurfaceContext()->setContextProperty("Users", DependencyManager::get<UsersScriptingInterface>().data());
|
||||
_webSurface->getSurfaceContext()->setContextProperty("HMD", DependencyManager::get<HMDScriptingInterface>().data());
|
||||
_webSurface->getSurfaceContext()->setContextProperty("UserActivityLogger", DependencyManager::get<UserActivityLoggerScriptingInterface>().data());
|
||||
|
@ -225,7 +227,7 @@ void Web3DOverlay::setupQmlSurface() {
|
|||
_webSurface->getSurfaceContext()->setContextProperty("Entities", DependencyManager::get<EntityScriptingInterface>().data());
|
||||
_webSurface->getSurfaceContext()->setContextProperty("Snapshot", DependencyManager::get<Snapshot>().data());
|
||||
|
||||
if (_webSurface->getRootItem() && _webSurface->getRootItem()->objectName() == "tabletRoot") {
|
||||
if (isTablet) {
|
||||
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
|
||||
auto flags = tabletScriptingInterface->getFlags();
|
||||
|
||||
|
|
|
@ -79,7 +79,7 @@ protected:
|
|||
Transform evalRenderTransform() override;
|
||||
|
||||
private:
|
||||
void setupQmlSurface();
|
||||
void setupQmlSurface(bool isTablet);
|
||||
void rebuildWebSurface();
|
||||
bool isWebContent() const;
|
||||
|
||||
|
|
|
@ -53,6 +53,7 @@
|
|||
#include "AudioHelpers.h"
|
||||
|
||||
#if defined(Q_OS_ANDROID)
|
||||
#define VOICE_RECOGNITION "voicerecognition"
|
||||
#include <QtAndroidExtras/QAndroidJniObject>
|
||||
#endif
|
||||
|
||||
|
@ -273,31 +274,14 @@ AudioClient::~AudioClient() {
|
|||
}
|
||||
|
||||
void AudioClient::customDeleter() {
|
||||
deleteLater();
|
||||
}
|
||||
|
||||
void AudioClient::cleanupBeforeQuit() {
|
||||
// FIXME: this should be put in customDeleter, but there is still a reference to this when it is called,
|
||||
// so this must be explicitly, synchronously stopped
|
||||
static ConditionalGuard guard;
|
||||
if (QThread::currentThread() != thread()) {
|
||||
// This will likely be called from the main thread, but we don't want to do blocking queued calls
|
||||
// from the main thread, so we use a normal auto-connection invoke, and then use a conditional to wait
|
||||
// for completion
|
||||
// The effect is the same, yes, but we actually want to avoid the use of Qt::BlockingQueuedConnection
|
||||
// in the code
|
||||
QMetaObject::invokeMethod(this, "cleanupBeforeQuit");
|
||||
guard.wait();
|
||||
return;
|
||||
}
|
||||
|
||||
#if defined(Q_OS_ANDROID)
|
||||
_shouldRestartInputSetup = false;
|
||||
#endif
|
||||
stop();
|
||||
_checkDevicesTimer->stop();
|
||||
_checkPeakValuesTimer->stop();
|
||||
guard.trigger();
|
||||
|
||||
deleteLater();
|
||||
}
|
||||
|
||||
void AudioClient::handleMismatchAudioFormat(SharedNodePointer node, const QString& currentCodec, const QString& recievedCodec) {
|
||||
|
@ -465,7 +449,16 @@ QAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
|
|||
return getNamedAudioDeviceForMode(mode, deviceName);
|
||||
#endif
|
||||
|
||||
|
||||
#if defined (Q_OS_ANDROID)
|
||||
if (mode == QAudio::AudioInput) {
|
||||
auto inputDevices = QAudioDeviceInfo::availableDevices(QAudio::AudioInput);
|
||||
for (auto inputDevice : inputDevices) {
|
||||
if (inputDevice.deviceName() == VOICE_RECOGNITION) {
|
||||
return inputDevice;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
// fallback for failed lookup is the default device
|
||||
return (mode == QAudio::AudioInput) ? QAudioDeviceInfo::defaultInputDevice() : QAudioDeviceInfo::defaultOutputDevice();
|
||||
}
|
||||
|
@ -486,15 +479,6 @@ bool nativeFormatForAudioDevice(const QAudioDeviceInfo& audioDevice,
|
|||
audioFormat.setSampleType(QAudioFormat::SignedInt);
|
||||
audioFormat.setByteOrder(QAudioFormat::LittleEndian);
|
||||
|
||||
#if defined(Q_OS_ANDROID)
|
||||
// Using the HW sample rate (AUDIO_INPUT_FLAG_FAST) in some samsung phones causes a low volume at input stream
|
||||
// Changing the sample rate forces a resampling that (in samsung) amplifies +18 dB
|
||||
QAndroidJniObject brand = QAndroidJniObject::getStaticObjectField<jstring>("android/os/Build", "BRAND");
|
||||
if (audioDevice == QAudioDeviceInfo::defaultInputDevice() && brand.toString().contains("samsung", Qt::CaseInsensitive)) {
|
||||
audioFormat.setSampleRate(24000);
|
||||
}
|
||||
#endif
|
||||
|
||||
if (!audioDevice.isFormatSupported(audioFormat)) {
|
||||
qCWarning(audioclient) << "The native format is" << audioFormat << "but isFormatSupported() failed.";
|
||||
return false;
|
||||
|
@ -1848,7 +1832,9 @@ const float AudioClient::CALLBACK_ACCELERATOR_RATIO = IsWindows8OrGreater() ? 1.
|
|||
const float AudioClient::CALLBACK_ACCELERATOR_RATIO = 2.0f;
|
||||
#endif
|
||||
|
||||
#ifdef Q_OS_LINUX
|
||||
#ifdef Q_OS_ANDROID
|
||||
const float AudioClient::CALLBACK_ACCELERATOR_RATIO = 0.5f;
|
||||
#elif defined(Q_OS_LINUX)
|
||||
const float AudioClient::CALLBACK_ACCELERATOR_RATIO = 2.0f;
|
||||
#endif
|
||||
|
||||
|
|
|
@ -171,7 +171,6 @@ public:
|
|||
public slots:
|
||||
void start();
|
||||
void stop();
|
||||
void cleanupBeforeQuit();
|
||||
|
||||
void handleAudioEnvironmentDataPacket(QSharedPointer<ReceivedMessage> message);
|
||||
void handleAudioDataPacket(QSharedPointer<ReceivedMessage> message);
|
||||
|
|
|
@ -95,8 +95,6 @@ void AudioIOStats::processStreamStatsPacket(QSharedPointer<ReceivedMessage> mess
|
|||
}
|
||||
|
||||
void AudioIOStats::publish() {
|
||||
auto audioIO = DependencyManager::get<AudioClient>();
|
||||
|
||||
// call _receivedAudioStream's per-second callback
|
||||
_receivedAudioStream->perSecondCallbackForUpdatingStats();
|
||||
|
||||
|
|
|
@ -1184,6 +1184,15 @@ glm::quat Avatar::getAbsoluteJointRotationInObjectFrame(int index) const {
|
|||
}
|
||||
return Quaternions::Y_180 * rotation * Quaternions::Y_180;
|
||||
}
|
||||
case FARGRAB_RIGHTHAND_INDEX: {
|
||||
return extractRotation(_farGrabRightMatrixCache.get());
|
||||
}
|
||||
case FARGRAB_LEFTHAND_INDEX: {
|
||||
return extractRotation(_farGrabLeftMatrixCache.get());
|
||||
}
|
||||
case FARGRAB_MOUSE_INDEX: {
|
||||
return extractRotation(_farGrabMouseMatrixCache.get());
|
||||
}
|
||||
default: {
|
||||
glm::quat rotation;
|
||||
_skeletonModel->getAbsoluteJointRotationInRigFrame(index, rotation);
|
||||
|
@ -1224,6 +1233,15 @@ glm::vec3 Avatar::getAbsoluteJointTranslationInObjectFrame(int index) const {
|
|||
}
|
||||
return Quaternions::Y_180 * translation * Quaternions::Y_180;
|
||||
}
|
||||
case FARGRAB_RIGHTHAND_INDEX: {
|
||||
return extractTranslation(_farGrabRightMatrixCache.get());
|
||||
}
|
||||
case FARGRAB_LEFTHAND_INDEX: {
|
||||
return extractTranslation(_farGrabLeftMatrixCache.get());
|
||||
}
|
||||
case FARGRAB_MOUSE_INDEX: {
|
||||
return extractTranslation(_farGrabMouseMatrixCache.get());
|
||||
}
|
||||
default: {
|
||||
glm::vec3 translation;
|
||||
_skeletonModel->getAbsoluteJointTranslationInRigFrame(index, translation);
|
||||
|
|
|
@ -62,7 +62,7 @@ size_t AvatarDataPacket::maxFaceTrackerInfoSize(size_t numBlendshapeCoefficients
|
|||
return FACE_TRACKER_INFO_SIZE + numBlendshapeCoefficients * sizeof(float);
|
||||
}
|
||||
|
||||
size_t AvatarDataPacket::maxJointDataSize(size_t numJoints) {
|
||||
size_t AvatarDataPacket::maxJointDataSize(size_t numJoints, bool hasGrabJoints) {
|
||||
const size_t validityBitsSize = (size_t)std::ceil(numJoints / (float)BITS_IN_BYTE);
|
||||
|
||||
size_t totalSize = sizeof(uint8_t); // numJoints
|
||||
|
@ -73,7 +73,8 @@ size_t AvatarDataPacket::maxJointDataSize(size_t numJoints) {
|
|||
totalSize += numJoints * sizeof(SixByteTrans); // Translations
|
||||
|
||||
size_t NUM_FAUX_JOINT = 2;
|
||||
totalSize += NUM_FAUX_JOINT * (sizeof(SixByteQuat) + sizeof(SixByteTrans)); // faux joints
|
||||
size_t num_grab_joints = (hasGrabJoints ? 2 : 0);
|
||||
totalSize += (NUM_FAUX_JOINT + num_grab_joints) * (sizeof(SixByteQuat) + sizeof(SixByteTrans)); // faux joints
|
||||
|
||||
return totalSize;
|
||||
}
|
||||
|
@ -227,7 +228,8 @@ QByteArray AvatarData::toByteArrayStateful(AvatarDataDetail dataDetail, bool dro
|
|||
&_outboundDataRate);
|
||||
}
|
||||
|
||||
QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSentTime, const QVector<JointData>& lastSentJointData,
|
||||
QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSentTime,
|
||||
const QVector<JointData>& lastSentJointData,
|
||||
AvatarDataPacket::HasFlags& hasFlagsOut, bool dropFaceTracking, bool distanceAdjust,
|
||||
glm::vec3 viewerPosition, QVector<JointData>* sentJointDataOut, AvatarDataRate* outboundDataRateOut) const {
|
||||
|
||||
|
@ -284,6 +286,11 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
bool hasFaceTrackerInfo = false;
|
||||
bool hasJointData = false;
|
||||
bool hasJointDefaultPoseFlags = false;
|
||||
bool hasGrabJoints = false;
|
||||
|
||||
glm::mat4 leftFarGrabMatrix;
|
||||
glm::mat4 rightFarGrabMatrix;
|
||||
glm::mat4 mouseFarGrabMatrix;
|
||||
|
||||
if (sendPALMinimum) {
|
||||
hasAudioLoudness = true;
|
||||
|
@ -304,12 +311,30 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
(sendAll || faceTrackerInfoChangedSince(lastSentTime));
|
||||
hasJointData = sendAll || !sendMinimum;
|
||||
hasJointDefaultPoseFlags = hasJointData;
|
||||
if (hasJointData) {
|
||||
bool leftValid;
|
||||
leftFarGrabMatrix = _farGrabLeftMatrixCache.get(leftValid);
|
||||
if (!leftValid) {
|
||||
leftFarGrabMatrix = glm::mat4();
|
||||
}
|
||||
bool rightValid;
|
||||
rightFarGrabMatrix = _farGrabRightMatrixCache.get(rightValid);
|
||||
if (!rightValid) {
|
||||
rightFarGrabMatrix = glm::mat4();
|
||||
}
|
||||
bool mouseValid;
|
||||
mouseFarGrabMatrix = _farGrabMouseMatrixCache.get(mouseValid);
|
||||
if (!mouseValid) {
|
||||
mouseFarGrabMatrix = glm::mat4();
|
||||
}
|
||||
hasGrabJoints = (leftValid || rightValid || mouseValid);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const size_t byteArraySize = AvatarDataPacket::MAX_CONSTANT_HEADER_SIZE +
|
||||
(hasFaceTrackerInfo ? AvatarDataPacket::maxFaceTrackerInfoSize(_headData->getBlendshapeCoefficients().size()) : 0) +
|
||||
(hasJointData ? AvatarDataPacket::maxJointDataSize(_jointData.size()) : 0) +
|
||||
(hasJointData ? AvatarDataPacket::maxJointDataSize(_jointData.size(), hasGrabJoints) : 0) +
|
||||
(hasJointDefaultPoseFlags ? AvatarDataPacket::maxJointDefaultPoseFlagsSize(_jointData.size()) : 0);
|
||||
|
||||
QByteArray avatarDataByteArray((int)byteArraySize, 0);
|
||||
|
@ -330,7 +355,8 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
| (hasAvatarLocalPosition ? AvatarDataPacket::PACKET_HAS_AVATAR_LOCAL_POSITION : 0)
|
||||
| (hasFaceTrackerInfo ? AvatarDataPacket::PACKET_HAS_FACE_TRACKER_INFO : 0)
|
||||
| (hasJointData ? AvatarDataPacket::PACKET_HAS_JOINT_DATA : 0)
|
||||
| (hasJointDefaultPoseFlags ? AvatarDataPacket::PACKET_HAS_JOINT_DEFAULT_POSE_FLAGS : 0);
|
||||
| (hasJointDefaultPoseFlags ? AvatarDataPacket::PACKET_HAS_JOINT_DEFAULT_POSE_FLAGS : 0)
|
||||
| (hasGrabJoints ? AvatarDataPacket::PACKET_HAS_GRAB_JOINTS : 0);
|
||||
|
||||
memcpy(destinationBuffer, &packetStateFlags, sizeof(packetStateFlags));
|
||||
destinationBuffer += sizeof(packetStateFlags);
|
||||
|
@ -668,6 +694,53 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
destinationBuffer += packFloatVec3ToSignedTwoByteFixed(destinationBuffer, controllerRightHandTransform.getTranslation(),
|
||||
TRANSLATION_COMPRESSION_RADIX);
|
||||
|
||||
if (hasGrabJoints) {
|
||||
// the far-grab joints may range further than 3 meters, so we can't use packFloatVec3ToSignedTwoByteFixed etc
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::FarGrabJoints*>(destinationBuffer);
|
||||
glm::vec3 leftFarGrabPosition = extractTranslation(leftFarGrabMatrix);
|
||||
glm::quat leftFarGrabRotation = extractRotation(leftFarGrabMatrix);
|
||||
glm::vec3 rightFarGrabPosition = extractTranslation(rightFarGrabMatrix);
|
||||
glm::quat rightFarGrabRotation = extractRotation(rightFarGrabMatrix);
|
||||
glm::vec3 mouseFarGrabPosition = extractTranslation(mouseFarGrabMatrix);
|
||||
glm::quat mouseFarGrabRotation = extractRotation(mouseFarGrabMatrix);
|
||||
|
||||
data->leftFarGrabPosition[0] = leftFarGrabPosition.x;
|
||||
data->leftFarGrabPosition[1] = leftFarGrabPosition.y;
|
||||
data->leftFarGrabPosition[2] = leftFarGrabPosition.z;
|
||||
|
||||
data->leftFarGrabRotation[0] = leftFarGrabRotation.w;
|
||||
data->leftFarGrabRotation[1] = leftFarGrabRotation.x;
|
||||
data->leftFarGrabRotation[2] = leftFarGrabRotation.y;
|
||||
data->leftFarGrabRotation[3] = leftFarGrabRotation.z;
|
||||
|
||||
data->rightFarGrabPosition[0] = rightFarGrabPosition.x;
|
||||
data->rightFarGrabPosition[1] = rightFarGrabPosition.y;
|
||||
data->rightFarGrabPosition[2] = rightFarGrabPosition.z;
|
||||
|
||||
data->rightFarGrabRotation[0] = rightFarGrabRotation.w;
|
||||
data->rightFarGrabRotation[1] = rightFarGrabRotation.x;
|
||||
data->rightFarGrabRotation[2] = rightFarGrabRotation.y;
|
||||
data->rightFarGrabRotation[3] = rightFarGrabRotation.z;
|
||||
|
||||
data->mouseFarGrabPosition[0] = mouseFarGrabPosition.x;
|
||||
data->mouseFarGrabPosition[1] = mouseFarGrabPosition.y;
|
||||
data->mouseFarGrabPosition[2] = mouseFarGrabPosition.z;
|
||||
|
||||
data->mouseFarGrabRotation[0] = mouseFarGrabRotation.w;
|
||||
data->mouseFarGrabRotation[1] = mouseFarGrabRotation.x;
|
||||
data->mouseFarGrabRotation[2] = mouseFarGrabRotation.y;
|
||||
data->mouseFarGrabRotation[3] = mouseFarGrabRotation.z;
|
||||
|
||||
destinationBuffer += sizeof(AvatarDataPacket::FarGrabJoints);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
|
||||
if (outboundDataRateOut) {
|
||||
outboundDataRateOut->farGrabJointRate.increment(numBytes);
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef WANT_DEBUG
|
||||
if (sendAll) {
|
||||
qCDebug(avatars) << "AvatarData::toByteArray" << cullSmallChanges << sendAll
|
||||
|
@ -834,6 +907,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
bool hasFaceTrackerInfo = HAS_FLAG(packetStateFlags, AvatarDataPacket::PACKET_HAS_FACE_TRACKER_INFO);
|
||||
bool hasJointData = HAS_FLAG(packetStateFlags, AvatarDataPacket::PACKET_HAS_JOINT_DATA);
|
||||
bool hasJointDefaultPoseFlags = HAS_FLAG(packetStateFlags, AvatarDataPacket::PACKET_HAS_JOINT_DEFAULT_POSE_FLAGS);
|
||||
bool hasGrabJoints = HAS_FLAG(packetStateFlags, AvatarDataPacket::PACKET_HAS_GRAB_JOINTS);
|
||||
|
||||
quint64 now = usecTimestampNow();
|
||||
|
||||
|
@ -1195,6 +1269,34 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
int numBytesRead = sourceBuffer - startSection;
|
||||
_jointDataRate.increment(numBytesRead);
|
||||
_jointDataUpdateRate.increment();
|
||||
|
||||
if (hasGrabJoints) {
|
||||
auto startSection = sourceBuffer;
|
||||
|
||||
PACKET_READ_CHECK(FarGrabJoints, sizeof(AvatarDataPacket::FarGrabJoints));
|
||||
auto data = reinterpret_cast<const AvatarDataPacket::FarGrabJoints*>(sourceBuffer);
|
||||
glm::vec3 leftFarGrabPosition = glm::vec3(data->leftFarGrabPosition[0], data->leftFarGrabPosition[1],
|
||||
data->leftFarGrabPosition[2]);
|
||||
glm::quat leftFarGrabRotation = glm::quat(data->leftFarGrabRotation[0], data->leftFarGrabRotation[1],
|
||||
data->leftFarGrabRotation[2], data->leftFarGrabRotation[3]);
|
||||
glm::vec3 rightFarGrabPosition = glm::vec3(data->rightFarGrabPosition[0], data->rightFarGrabPosition[1],
|
||||
data->rightFarGrabPosition[2]);
|
||||
glm::quat rightFarGrabRotation = glm::quat(data->rightFarGrabRotation[0], data->rightFarGrabRotation[1],
|
||||
data->rightFarGrabRotation[2], data->rightFarGrabRotation[3]);
|
||||
glm::vec3 mouseFarGrabPosition = glm::vec3(data->mouseFarGrabPosition[0], data->mouseFarGrabPosition[1],
|
||||
data->mouseFarGrabPosition[2]);
|
||||
glm::quat mouseFarGrabRotation = glm::quat(data->mouseFarGrabRotation[0], data->mouseFarGrabRotation[1],
|
||||
data->mouseFarGrabRotation[2], data->mouseFarGrabRotation[3]);
|
||||
|
||||
_farGrabLeftMatrixCache.set(createMatFromQuatAndPos(leftFarGrabRotation, leftFarGrabPosition));
|
||||
_farGrabRightMatrixCache.set(createMatFromQuatAndPos(rightFarGrabRotation, rightFarGrabPosition));
|
||||
_farGrabMouseMatrixCache.set(createMatFromQuatAndPos(mouseFarGrabRotation, mouseFarGrabPosition));
|
||||
|
||||
sourceBuffer += sizeof(AvatarDataPacket::AvatarGlobalPosition);
|
||||
int numBytesRead = sourceBuffer - startSection;
|
||||
_farGrabJointRate.increment(numBytesRead);
|
||||
_farGrabJointUpdateRate.increment();
|
||||
}
|
||||
}
|
||||
|
||||
if (hasJointDefaultPoseFlags) {
|
||||
|
@ -1261,6 +1363,8 @@ float AvatarData::getDataRate(const QString& rateName) const {
|
|||
return _jointDataRate.rate() / BYTES_PER_KILOBIT;
|
||||
} else if (rateName == "jointDefaultPoseFlagsRate") {
|
||||
return _jointDefaultPoseFlagsRate.rate() / BYTES_PER_KILOBIT;
|
||||
} else if (rateName == "farGrabJointRate") {
|
||||
return _farGrabJointRate.rate() / BYTES_PER_KILOBIT;
|
||||
} else if (rateName == "globalPositionOutbound") {
|
||||
return _outboundDataRate.globalPositionRate.rate() / BYTES_PER_KILOBIT;
|
||||
} else if (rateName == "localPositionOutbound") {
|
||||
|
@ -1318,6 +1422,8 @@ float AvatarData::getUpdateRate(const QString& rateName) const {
|
|||
return _faceTrackerUpdateRate.rate();
|
||||
} else if (rateName == "jointData") {
|
||||
return _jointDataUpdateRate.rate();
|
||||
} else if (rateName == "farGrabJointData") {
|
||||
return _farGrabJointUpdateRate.rate();
|
||||
}
|
||||
return 0.0f;
|
||||
}
|
||||
|
@ -1344,7 +1450,7 @@ void AvatarData::setRawJointData(QVector<JointData> data) {
|
|||
}
|
||||
|
||||
void AvatarData::setJointData(int index, const glm::quat& rotation, const glm::vec3& translation) {
|
||||
if (index == -1) {
|
||||
if (index < 0 || index >= LOWEST_PSEUDO_JOINT_INDEX) {
|
||||
return;
|
||||
}
|
||||
QWriteLocker writeLock(&_jointDataLock);
|
||||
|
@ -1359,7 +1465,7 @@ void AvatarData::setJointData(int index, const glm::quat& rotation, const glm::v
|
|||
}
|
||||
|
||||
void AvatarData::clearJointData(int index) {
|
||||
if (index == -1) {
|
||||
if (index < 0 || index >= LOWEST_PSEUDO_JOINT_INDEX) {
|
||||
return;
|
||||
}
|
||||
QWriteLocker writeLock(&_jointDataLock);
|
||||
|
@ -1371,27 +1477,72 @@ void AvatarData::clearJointData(int index) {
|
|||
}
|
||||
|
||||
bool AvatarData::isJointDataValid(int index) const {
|
||||
if (index == -1) {
|
||||
return false;
|
||||
switch (index) {
|
||||
case FARGRAB_RIGHTHAND_INDEX: {
|
||||
bool valid;
|
||||
_farGrabRightMatrixCache.get(valid);
|
||||
return valid;
|
||||
}
|
||||
case FARGRAB_LEFTHAND_INDEX: {
|
||||
bool valid;
|
||||
_farGrabLeftMatrixCache.get(valid);
|
||||
return valid;
|
||||
}
|
||||
case FARGRAB_MOUSE_INDEX: {
|
||||
bool valid;
|
||||
_farGrabMouseMatrixCache.get(valid);
|
||||
return valid;
|
||||
}
|
||||
default: {
|
||||
if (index < 0 || index >= LOWEST_PSEUDO_JOINT_INDEX) {
|
||||
return false;
|
||||
}
|
||||
QReadLocker readLock(&_jointDataLock);
|
||||
return index < _jointData.size();
|
||||
}
|
||||
}
|
||||
QReadLocker readLock(&_jointDataLock);
|
||||
return index < _jointData.size();
|
||||
}
|
||||
|
||||
glm::quat AvatarData::getJointRotation(int index) const {
|
||||
if (index == -1) {
|
||||
return glm::quat();
|
||||
switch (index) {
|
||||
case FARGRAB_RIGHTHAND_INDEX: {
|
||||
return extractRotation(_farGrabRightMatrixCache.get());
|
||||
}
|
||||
case FARGRAB_LEFTHAND_INDEX: {
|
||||
return extractRotation(_farGrabLeftMatrixCache.get());
|
||||
}
|
||||
case FARGRAB_MOUSE_INDEX: {
|
||||
return extractRotation(_farGrabMouseMatrixCache.get());
|
||||
}
|
||||
default: {
|
||||
if (index < 0 || index >= LOWEST_PSEUDO_JOINT_INDEX) {
|
||||
return glm::quat();
|
||||
}
|
||||
QReadLocker readLock(&_jointDataLock);
|
||||
return index < _jointData.size() ? _jointData.at(index).rotation : glm::quat();
|
||||
}
|
||||
}
|
||||
QReadLocker readLock(&_jointDataLock);
|
||||
return index < _jointData.size() ? _jointData.at(index).rotation : glm::quat();
|
||||
}
|
||||
|
||||
glm::vec3 AvatarData::getJointTranslation(int index) const {
|
||||
if (index == -1) {
|
||||
return glm::vec3();
|
||||
switch (index) {
|
||||
case FARGRAB_RIGHTHAND_INDEX: {
|
||||
return extractTranslation(_farGrabRightMatrixCache.get());
|
||||
}
|
||||
case FARGRAB_LEFTHAND_INDEX: {
|
||||
return extractTranslation(_farGrabLeftMatrixCache.get());
|
||||
}
|
||||
case FARGRAB_MOUSE_INDEX: {
|
||||
return extractTranslation(_farGrabMouseMatrixCache.get());
|
||||
}
|
||||
default: {
|
||||
if (index < 0 || index >= LOWEST_PSEUDO_JOINT_INDEX) {
|
||||
return glm::vec3();
|
||||
}
|
||||
QReadLocker readLock(&_jointDataLock);
|
||||
return index < _jointData.size() ? _jointData.at(index).translation : glm::vec3();
|
||||
}
|
||||
}
|
||||
QReadLocker readLock(&_jointDataLock);
|
||||
return index < _jointData.size() ? _jointData.at(index).translation : glm::vec3();
|
||||
}
|
||||
|
||||
glm::vec3 AvatarData::getJointTranslation(const QString& name) const {
|
||||
|
@ -1400,6 +1551,7 @@ glm::vec3 AvatarData::getJointTranslation(const QString& name) const {
|
|||
// return getJointTranslation(getJointIndex(name));
|
||||
return readLockWithNamedJointIndex<glm::vec3>(name, [this](int index) {
|
||||
return _jointData.at(index).translation;
|
||||
return getJointTranslation(index);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1437,7 +1589,7 @@ void AvatarData::setJointTranslation(const QString& name, const glm::vec3& trans
|
|||
}
|
||||
|
||||
void AvatarData::setJointRotation(int index, const glm::quat& rotation) {
|
||||
if (index == -1) {
|
||||
if (index < 0 || index >= LOWEST_PSEUDO_JOINT_INDEX) {
|
||||
return;
|
||||
}
|
||||
QWriteLocker writeLock(&_jointDataLock);
|
||||
|
@ -1450,7 +1602,7 @@ void AvatarData::setJointRotation(int index, const glm::quat& rotation) {
|
|||
}
|
||||
|
||||
void AvatarData::setJointTranslation(int index, const glm::vec3& translation) {
|
||||
if (index == -1) {
|
||||
if (index < 0 || index >= LOWEST_PSEUDO_JOINT_INDEX) {
|
||||
return;
|
||||
}
|
||||
QWriteLocker writeLock(&_jointDataLock);
|
||||
|
@ -1567,6 +1719,15 @@ int AvatarData::getFauxJointIndex(const QString& name) const {
|
|||
if (name == "_CAMERA_MATRIX") {
|
||||
return CAMERA_MATRIX_INDEX;
|
||||
}
|
||||
if (name == "_FARGRAB_RIGHTHAND") {
|
||||
return FARGRAB_RIGHTHAND_INDEX;
|
||||
}
|
||||
if (name == "_FARGRAB_LEFTHAND") {
|
||||
return FARGRAB_LEFTHAND_INDEX;
|
||||
}
|
||||
if (name == "_FARGRAB_MOUSE") {
|
||||
return FARGRAB_MOUSE_INDEX;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
@ -1908,7 +2069,7 @@ void AvatarData::sendIdentityPacket() {
|
|||
auto packetList = NLPacketList::create(PacketType::AvatarIdentity, QByteArray(), true, true);
|
||||
packetList->write(identityData);
|
||||
nodeList->eachMatchingNode(
|
||||
[&](const SharedNodePointer& node)->bool {
|
||||
[](const SharedNodePointer& node)->bool {
|
||||
return node->getType() == NodeType::AvatarMixer && node->getActiveSocket();
|
||||
},
|
||||
[&](const SharedNodePointer& node) {
|
||||
|
|
|
@ -138,6 +138,7 @@ namespace AvatarDataPacket {
|
|||
const HasFlags PACKET_HAS_FACE_TRACKER_INFO = 1U << 10;
|
||||
const HasFlags PACKET_HAS_JOINT_DATA = 1U << 11;
|
||||
const HasFlags PACKET_HAS_JOINT_DEFAULT_POSE_FLAGS = 1U << 12;
|
||||
const HasFlags PACKET_HAS_GRAB_JOINTS = 1U << 13;
|
||||
const size_t AVATAR_HAS_FLAGS_SIZE = 2;
|
||||
|
||||
using SixByteQuat = uint8_t[6];
|
||||
|
@ -273,7 +274,7 @@ namespace AvatarDataPacket {
|
|||
SixByteTrans rightHandControllerTranslation;
|
||||
};
|
||||
*/
|
||||
size_t maxJointDataSize(size_t numJoints);
|
||||
size_t maxJointDataSize(size_t numJoints, bool hasGrabJoints);
|
||||
|
||||
/*
|
||||
struct JointDefaultPoseFlags {
|
||||
|
@ -283,6 +284,17 @@ namespace AvatarDataPacket {
|
|||
};
|
||||
*/
|
||||
size_t maxJointDefaultPoseFlagsSize(size_t numJoints);
|
||||
|
||||
PACKED_BEGIN struct FarGrabJoints {
|
||||
float leftFarGrabPosition[3]; // left controller far-grab joint position
|
||||
float leftFarGrabRotation[4]; // left controller far-grab joint rotation
|
||||
float rightFarGrabPosition[3]; // right controller far-grab joint position
|
||||
float rightFarGrabRotation[4]; // right controller far-grab joint rotation
|
||||
float mouseFarGrabPosition[3]; // mouse far-grab joint position
|
||||
float mouseFarGrabRotation[4]; // mouse far-grab joint rotation
|
||||
} PACKED_END;
|
||||
const size_t FAR_GRAB_JOINTS_SIZE = 84;
|
||||
static_assert(sizeof(FarGrabJoints) == FAR_GRAB_JOINTS_SIZE, "AvatarDataPacket::FarGrabJoints size doesn't match.");
|
||||
}
|
||||
|
||||
const float MAX_AUDIO_LOUDNESS = 1000.0f; // close enough for mouth animation
|
||||
|
@ -347,6 +359,7 @@ public:
|
|||
RateCounter<> faceTrackerRate;
|
||||
RateCounter<> jointDataRate;
|
||||
RateCounter<> jointDefaultPoseFlagsRate;
|
||||
RateCounter<> farGrabJointRate;
|
||||
};
|
||||
|
||||
class AvatarPriority {
|
||||
|
@ -895,14 +908,14 @@ public:
|
|||
* @returns {object}
|
||||
*/
|
||||
// FIXME: Can this name be improved? Can it be deprecated?
|
||||
Q_INVOKABLE QVariantList getAttachmentsVariant() const;
|
||||
Q_INVOKABLE virtual QVariantList getAttachmentsVariant() const;
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.setAttachmentsVariant
|
||||
* @param {object} variant
|
||||
*/
|
||||
// FIXME: Can this name be improved? Can it be deprecated?
|
||||
Q_INVOKABLE void setAttachmentsVariant(const QVariantList& variant);
|
||||
Q_INVOKABLE virtual void setAttachmentsVariant(const QVariantList& variant);
|
||||
|
||||
|
||||
/**jsdoc
|
||||
|
@ -969,7 +982,7 @@ public:
|
|||
* print (attachments[i].modelURL);
|
||||
* }
|
||||
*/
|
||||
Q_INVOKABLE QVector<AttachmentData> getAttachmentData() const;
|
||||
Q_INVOKABLE virtual QVector<AttachmentData> getAttachmentData() const;
|
||||
|
||||
/**jsdoc
|
||||
* Set all models currently attached to your avatar. For example, if you retrieve attachment data using
|
||||
|
@ -1040,7 +1053,7 @@ public:
|
|||
* @param {string} [jointName=""] - The name of the joint to detach the model from. If <code>""</code>, then the most
|
||||
* recently attached model is removed from which ever joint it was attached to.
|
||||
*/
|
||||
Q_INVOKABLE void detachOne(const QString& modelURL, const QString& jointName = QString());
|
||||
Q_INVOKABLE virtual void detachOne(const QString& modelURL, const QString& jointName = QString());
|
||||
|
||||
/**jsdoc
|
||||
* Detach all instances of a particular model from either a specific joint or all joints.
|
||||
|
@ -1049,7 +1062,7 @@ public:
|
|||
* @param {string} [jointName=""] - The name of the joint to detach the model from. If <code>""</code>, then the model is
|
||||
* detached from all joints.
|
||||
*/
|
||||
Q_INVOKABLE void detachAll(const QString& modelURL, const QString& jointName = QString());
|
||||
Q_INVOKABLE virtual void detachAll(const QString& modelURL, const QString& jointName = QString());
|
||||
|
||||
QString getSkeletonModelURLFromScript() const { return _skeletonModelURL.toString(); }
|
||||
void setSkeletonModelURLFromScript(const QString& skeletonModelString) { setSkeletonModelURL(QUrl(skeletonModelString)); }
|
||||
|
@ -1317,6 +1330,7 @@ protected:
|
|||
bool _firstSkeletonCheck { true };
|
||||
QUrl _skeletonFBXURL;
|
||||
QVector<AttachmentData> _attachmentData;
|
||||
QVector<AttachmentData> _oldAttachmentData;
|
||||
QString _displayName;
|
||||
QString _sessionDisplayName { };
|
||||
bool _lookAtSnappingEnabled { true };
|
||||
|
@ -1369,6 +1383,7 @@ protected:
|
|||
RateCounter<> _faceTrackerRate;
|
||||
RateCounter<> _jointDataRate;
|
||||
RateCounter<> _jointDefaultPoseFlagsRate;
|
||||
RateCounter<> _farGrabJointRate;
|
||||
|
||||
// Some rate data for incoming data updates
|
||||
RateCounter<> _parseBufferUpdateRate;
|
||||
|
@ -1385,6 +1400,7 @@ protected:
|
|||
RateCounter<> _faceTrackerUpdateRate;
|
||||
RateCounter<> _jointDataUpdateRate;
|
||||
RateCounter<> _jointDefaultPoseFlagsUpdateRate;
|
||||
RateCounter<> _farGrabJointUpdateRate;
|
||||
|
||||
// Some rate data for outgoing data
|
||||
AvatarDataRate _outboundDataRate;
|
||||
|
@ -1403,6 +1419,10 @@ protected:
|
|||
ThreadSafeValueCache<glm::mat4> _controllerLeftHandMatrixCache { glm::mat4() };
|
||||
ThreadSafeValueCache<glm::mat4> _controllerRightHandMatrixCache { glm::mat4() };
|
||||
|
||||
ThreadSafeValueCache<glm::mat4> _farGrabRightMatrixCache { glm::mat4() };
|
||||
ThreadSafeValueCache<glm::mat4> _farGrabLeftMatrixCache { glm::mat4() };
|
||||
ThreadSafeValueCache<glm::mat4> _farGrabMouseMatrixCache { glm::mat4() };
|
||||
|
||||
int getFauxJointIndex(const QString& name) const;
|
||||
|
||||
float _audioLoudness { 0.0f };
|
||||
|
@ -1560,5 +1580,11 @@ const int CONTROLLER_LEFTHAND_INDEX = 65532; // -4
|
|||
const int CAMERA_RELATIVE_CONTROLLER_RIGHTHAND_INDEX = 65531; // -5
|
||||
const int CAMERA_RELATIVE_CONTROLLER_LEFTHAND_INDEX = 65530; // -6
|
||||
const int CAMERA_MATRIX_INDEX = 65529; // -7
|
||||
const int FARGRAB_RIGHTHAND_INDEX = 65528; // -8
|
||||
const int FARGRAB_LEFTHAND_INDEX = 65527; // -9
|
||||
const int FARGRAB_MOUSE_INDEX = 65526; // -10
|
||||
|
||||
const int LOWEST_PSEUDO_JOINT_INDEX = 65526;
|
||||
|
||||
|
||||
#endif // hifi_AvatarData_h
|
||||
|
|
|
@ -138,7 +138,7 @@ void TextureBaker::processTexture() {
|
|||
// IMPORTANT: _originalTexture is empty past this point
|
||||
_originalTexture.clear();
|
||||
_outputFiles.push_back(originalCopyFilePath);
|
||||
meta.original = _metaTexturePathPrefix +_textureURL.fileName();
|
||||
meta.original = _metaTexturePathPrefix + _textureURL.fileName();
|
||||
}
|
||||
|
||||
auto buffer = std::static_pointer_cast<QIODevice>(std::make_shared<QFile>(originalCopyFilePath));
|
||||
|
@ -149,49 +149,56 @@ void TextureBaker::processTexture() {
|
|||
|
||||
// Compressed KTX
|
||||
if (_compressionEnabled) {
|
||||
auto processedTexture = image::processImage(buffer, _textureURL.toString().toStdString(),
|
||||
ABSOLUTE_MAX_TEXTURE_NUM_PIXELS, _textureType, true, _abortProcessing);
|
||||
if (!processedTexture) {
|
||||
handleError("Could not process texture " + _textureURL.toString());
|
||||
return;
|
||||
}
|
||||
processedTexture->setSourceHash(hash);
|
||||
constexpr std::array<gpu::BackendTarget, 2> BACKEND_TARGETS {{
|
||||
gpu::BackendTarget::GL45,
|
||||
gpu::BackendTarget::GLES32
|
||||
}};
|
||||
for (auto target : BACKEND_TARGETS) {
|
||||
auto processedTexture = image::processImage(buffer, _textureURL.toString().toStdString(),
|
||||
ABSOLUTE_MAX_TEXTURE_NUM_PIXELS, _textureType, true,
|
||||
target, _abortProcessing);
|
||||
if (!processedTexture) {
|
||||
handleError("Could not process texture " + _textureURL.toString());
|
||||
return;
|
||||
}
|
||||
processedTexture->setSourceHash(hash);
|
||||
|
||||
if (shouldStop()) {
|
||||
return;
|
||||
}
|
||||
if (shouldStop()) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto memKTX = gpu::Texture::serialize(*processedTexture);
|
||||
if (!memKTX) {
|
||||
handleError("Could not serialize " + _textureURL.toString() + " to KTX");
|
||||
return;
|
||||
}
|
||||
auto memKTX = gpu::Texture::serialize(*processedTexture);
|
||||
if (!memKTX) {
|
||||
handleError("Could not serialize " + _textureURL.toString() + " to KTX");
|
||||
return;
|
||||
}
|
||||
|
||||
const char* name = khronos::gl::texture::toString(memKTX->_header.getGLInternaFormat());
|
||||
if (name == nullptr) {
|
||||
handleError("Could not determine internal format for compressed KTX: " + _textureURL.toString());
|
||||
return;
|
||||
}
|
||||
const char* name = khronos::gl::texture::toString(memKTX->_header.getGLInternaFormat());
|
||||
if (name == nullptr) {
|
||||
handleError("Could not determine internal format for compressed KTX: " + _textureURL.toString());
|
||||
return;
|
||||
}
|
||||
|
||||
const char* data = reinterpret_cast<const char*>(memKTX->_storage->data());
|
||||
const size_t length = memKTX->_storage->size();
|
||||
const char* data = reinterpret_cast<const char*>(memKTX->_storage->data());
|
||||
const size_t length = memKTX->_storage->size();
|
||||
|
||||
auto fileName = _baseFilename + "_" + name + ".ktx";
|
||||
auto filePath = _outputDirectory.absoluteFilePath(fileName);
|
||||
QFile bakedTextureFile { filePath };
|
||||
if (!bakedTextureFile.open(QIODevice::WriteOnly) || bakedTextureFile.write(data, length) == -1) {
|
||||
handleError("Could not write baked texture for " + _textureURL.toString());
|
||||
return;
|
||||
auto fileName = _baseFilename + "_" + name + ".ktx";
|
||||
auto filePath = _outputDirectory.absoluteFilePath(fileName);
|
||||
QFile bakedTextureFile { filePath };
|
||||
if (!bakedTextureFile.open(QIODevice::WriteOnly) || bakedTextureFile.write(data, length) == -1) {
|
||||
handleError("Could not write baked texture for " + _textureURL.toString());
|
||||
return;
|
||||
}
|
||||
_outputFiles.push_back(filePath);
|
||||
meta.availableTextureTypes[memKTX->_header.getGLInternaFormat()] = _metaTexturePathPrefix + fileName;
|
||||
}
|
||||
_outputFiles.push_back(filePath);
|
||||
meta.availableTextureTypes[memKTX->_header.getGLInternaFormat()] = _metaTexturePathPrefix + fileName;
|
||||
}
|
||||
|
||||
// Uncompressed KTX
|
||||
if (_textureType == image::TextureUsage::Type::CUBE_TEXTURE) {
|
||||
buffer->reset();
|
||||
auto processedTexture = image::processImage(std::move(buffer), _textureURL.toString().toStdString(),
|
||||
ABSOLUTE_MAX_TEXTURE_NUM_PIXELS, _textureType, false, _abortProcessing);
|
||||
ABSOLUTE_MAX_TEXTURE_NUM_PIXELS, _textureType, false, gpu::BackendTarget::GL45, _abortProcessing);
|
||||
if (!processedTexture) {
|
||||
handleError("Could not process texture " + _textureURL.toString());
|
||||
return;
|
||||
|
|
|
@ -38,6 +38,8 @@
|
|||
#include <EntityScriptClient.h>
|
||||
#include <Profile.h>
|
||||
|
||||
const QString GRABBABLE_USER_DATA = "{\"grabbableKey\":{\"grabbable\":true}}";
|
||||
const QString NOT_GRABBABLE_USER_DATA = "{\"grabbableKey\":{\"grabbable\":false}}";
|
||||
|
||||
EntityScriptingInterface::EntityScriptingInterface(bool bidOnSimulationOwnership) :
|
||||
_entityTree(NULL),
|
||||
|
@ -303,7 +305,7 @@ bool EntityScriptingInterface::addLocalEntityCopy(EntityItemProperties& properti
|
|||
}
|
||||
|
||||
QUuid EntityScriptingInterface::addModelEntity(const QString& name, const QString& modelUrl, const QString& textures,
|
||||
const QString& shapeType, bool dynamic, bool collisionless,
|
||||
const QString& shapeType, bool dynamic, bool collisionless, bool grabbable,
|
||||
const glm::vec3& position, const glm::vec3& gravity) {
|
||||
_activityTracking.addedEntityCount++;
|
||||
|
||||
|
@ -314,6 +316,7 @@ QUuid EntityScriptingInterface::addModelEntity(const QString& name, const QStrin
|
|||
properties.setShapeTypeFromString(shapeType);
|
||||
properties.setDynamic(dynamic);
|
||||
properties.setCollisionless(collisionless);
|
||||
properties.setUserData(grabbable ? GRABBABLE_USER_DATA : NOT_GRABBABLE_USER_DATA);
|
||||
properties.setPosition(position);
|
||||
properties.setGravity(gravity);
|
||||
if (!textures.isEmpty()) {
|
||||
|
@ -1249,7 +1252,7 @@ bool EntityScriptingInterface::actionWorker(const QUuid& entityID,
|
|||
|
||||
EntityItemPointer entity;
|
||||
bool doTransmit = false;
|
||||
_entityTree->withWriteLock([&] {
|
||||
_entityTree->withWriteLock([this, &entity, entityID, myNodeID, &doTransmit, actor, &properties] {
|
||||
EntitySimulationPointer simulation = _entityTree->getSimulation();
|
||||
entity = _entityTree->findEntityByEntityItemID(entityID);
|
||||
if (!entity) {
|
||||
|
|
|
@ -39,6 +39,9 @@
|
|||
class EntityTree;
|
||||
class MeshProxy;
|
||||
|
||||
extern const QString GRABBABLE_USER_DATA;
|
||||
extern const QString NOT_GRABBABLE_USER_DATA;
|
||||
|
||||
// helper factory to compose standardized, async metadata queries for "magic" Entity properties
|
||||
// like .script and .serverScripts. This is used for automated testing of core scripting features
|
||||
// as well as to provide early adopters a self-discoverable, consistent way to diagnose common
|
||||
|
@ -237,7 +240,7 @@ public slots:
|
|||
/// temporary method until addEntity can be used from QJSEngine
|
||||
/// Deliberately not adding jsdoc, only used internally.
|
||||
Q_INVOKABLE QUuid addModelEntity(const QString& name, const QString& modelUrl, const QString& textures, const QString& shapeType, bool dynamic,
|
||||
bool collisionless, const glm::vec3& position, const glm::vec3& gravity);
|
||||
bool collisionless, bool grabbable, const glm::vec3& position, const glm::vec3& gravity);
|
||||
|
||||
/**jsdoc
|
||||
* Create a clone of an entity. A clone can be created by a client that doesn't have rez permissions in the current domain.
|
||||
|
|
|
@ -1435,7 +1435,7 @@ void EntityTree::validatePop(const QString& certID, const EntityItemID& entityIt
|
|||
|
||||
QNetworkReply* networkReply = networkAccessManager.put(networkRequest, QJsonDocument(request).toJson());
|
||||
|
||||
connect(networkReply, &QNetworkReply::finished, [=]() {
|
||||
connect(networkReply, &QNetworkReply::finished, [this, networkReply, entityItemID, certID, senderNode]() {
|
||||
QJsonObject jsonObject = QJsonDocument::fromJson(networkReply->readAll()).object();
|
||||
jsonObject = jsonObject["data"].toObject();
|
||||
|
||||
|
|
|
@ -371,7 +371,7 @@ void ModelEntityItem::setAnimationFPS(float value) {
|
|||
|
||||
// virtual
|
||||
bool ModelEntityItem::shouldBePhysical() const {
|
||||
return !isDead() && getShapeType() != SHAPE_TYPE_NONE;
|
||||
return !isDead() && getShapeType() != SHAPE_TYPE_NONE && QUrl(_modelURL).isValid();
|
||||
}
|
||||
|
||||
void ModelEntityItem::resizeJointArrays(int newSize) {
|
||||
|
|
|
@ -65,20 +65,24 @@ GLTexture* GLESBackend::syncGPUObject(const TexturePointer& texturePointer) {
|
|||
object = new GLESAttachmentTexture(shared_from_this(), texture);
|
||||
break;
|
||||
|
||||
case TextureUsageType::RESOURCE:
|
||||
// FIXME disabling variable allocation textures for now, while debugging android rendering
|
||||
// and crashes
|
||||
#if 0
|
||||
qCDebug(gpugllogging) << "variable / Strict texture " << texture.source().c_str();
|
||||
object = new GLESResourceTexture(shared_from_this(), texture);
|
||||
GLVariableAllocationSupport::addMemoryManagedTexture(texturePointer);
|
||||
break;
|
||||
#endif
|
||||
case TextureUsageType::STRICT_RESOURCE:
|
||||
qCDebug(gpugllogging) << "Strict texture " << texture.source().c_str();
|
||||
object = new GLESStrictResourceTexture(shared_from_this(), texture);
|
||||
break;
|
||||
|
||||
case TextureUsageType::RESOURCE: {
|
||||
auto &transferEngine = _textureManagement._transferEngine;
|
||||
if (transferEngine->allowCreate()) {
|
||||
object = new GLESResourceTexture(shared_from_this(), texture);
|
||||
transferEngine->addMemoryManagedTexture(texturePointer);
|
||||
} else {
|
||||
auto fallback = texturePointer->getFallbackTexture();
|
||||
if (fallback) {
|
||||
object = static_cast<GLESTexture *>(syncGPUObject(fallback));
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
Q_UNREACHABLE();
|
||||
}
|
||||
|
@ -195,7 +199,6 @@ Size GLESTexture::copyMipFaceLinesFromTexture(uint16_t mip, uint8_t face, const
|
|||
glTexSubImage2D(target, mip, 0, yOffset, size.x, size.y, format, type, sourcePointer);
|
||||
}
|
||||
} else {
|
||||
// TODO: implement for android
|
||||
assert(false);
|
||||
amountCopied = 0;
|
||||
}
|
||||
|
@ -385,7 +388,6 @@ void GLESVariableAllocationTexture::allocateStorage(uint16 allocatedMip) {
|
|||
const auto totalMips = _gpuObject.getNumMips();
|
||||
const auto mips = totalMips - _allocatedMip;
|
||||
withPreservedTexture([&] {
|
||||
// FIXME technically GL 4.2, but OSX includes the ARB_texture_storage extension
|
||||
glTexStorage2D(_target, mips, texelFormat.internalFormat, dimensions.x, dimensions.y); CHECK_GL_ERROR();
|
||||
});
|
||||
auto mipLevels = _gpuObject.getNumMips();
|
||||
|
@ -426,139 +428,26 @@ void GLESVariableAllocationTexture::syncSampler() const {
|
|||
});
|
||||
}
|
||||
|
||||
|
||||
void copyUncompressedTexGPUMem(const gpu::Texture& texture, GLenum texTarget, GLuint srcId, GLuint destId, uint16_t numMips, uint16_t srcMipOffset, uint16_t destMipOffset, uint16_t populatedMips) {
|
||||
// DestID must be bound to the GLESBackend::RESOURCE_TRANSFER_TEX_UNIT
|
||||
|
||||
GLuint fbo { 0 };
|
||||
glGenFramebuffers(1, &fbo);
|
||||
glBindFramebuffer(GL_READ_FRAMEBUFFER, fbo);
|
||||
|
||||
uint16_t mips = numMips;
|
||||
// copy pre-existing mips
|
||||
for (uint16_t mip = populatedMips; mip < mips; ++mip) {
|
||||
void copyTexGPUMem(const gpu::Texture& texture, GLenum texTarget, GLuint srcId, GLuint destId, uint16_t numMips, uint16_t srcMipOffset, uint16_t destMipOffset, uint16_t populatedMips) {
|
||||
for (uint16_t mip = populatedMips; mip < numMips; ++mip) {
|
||||
auto mipDimensions = texture.evalMipDimensions(mip);
|
||||
uint16_t targetMip = mip - destMipOffset;
|
||||
uint16_t sourceMip = mip - srcMipOffset;
|
||||
for (GLenum target : GLTexture::getFaceTargets(texTarget)) {
|
||||
glFramebufferTexture2D(GL_READ_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, target, srcId, sourceMip);
|
||||
(void)CHECK_GL_ERROR();
|
||||
glCopyTexSubImage2D(target, targetMip, 0, 0, 0, 0, mipDimensions.x, mipDimensions.y);
|
||||
auto faces = GLTexture::getFaceCount(texTarget);
|
||||
for (uint8_t face = 0; face < faces; ++face) {
|
||||
glCopyImageSubData(
|
||||
srcId, texTarget, sourceMip, 0, 0, face,
|
||||
destId, texTarget, targetMip, 0, 0, face,
|
||||
mipDimensions.x, mipDimensions.y, 1
|
||||
);
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
}
|
||||
|
||||
// destroy the transfer framebuffer
|
||||
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
|
||||
glDeleteFramebuffers(1, &fbo);
|
||||
}
|
||||
|
||||
void copyCompressedTexGPUMem(const gpu::Texture& texture, GLenum texTarget, GLuint srcId, GLuint destId, uint16_t numMips, uint16_t srcMipOffset, uint16_t destMipOffset, uint16_t populatedMips) {
|
||||
// DestID must be bound to the GLESBackend::RESOURCE_TRANSFER_TEX_UNIT
|
||||
|
||||
struct MipDesc {
|
||||
GLint _faceSize;
|
||||
GLint _size;
|
||||
GLint _offset;
|
||||
GLint _width;
|
||||
GLint _height;
|
||||
};
|
||||
std::vector<MipDesc> sourceMips(numMips);
|
||||
|
||||
std::vector<GLubyte> bytes;
|
||||
|
||||
glActiveTexture(GL_TEXTURE0 + GLESBackend::RESOURCE_TRANSFER_EXTRA_TEX_UNIT);
|
||||
glBindTexture(texTarget, srcId);
|
||||
const auto& faceTargets = GLTexture::getFaceTargets(texTarget);
|
||||
GLint internalFormat { 0 };
|
||||
|
||||
// Collect the mip description from the source texture
|
||||
GLint bufferOffset { 0 };
|
||||
for (uint16_t mip = populatedMips; mip < numMips; ++mip) {
|
||||
auto& sourceMip = sourceMips[mip];
|
||||
|
||||
uint16_t sourceLevel = mip - srcMipOffset;
|
||||
|
||||
// Grab internal format once
|
||||
if (internalFormat == 0) {
|
||||
glGetTexLevelParameteriv(faceTargets[0], sourceLevel, GL_TEXTURE_INTERNAL_FORMAT, &internalFormat);
|
||||
}
|
||||
|
||||
// Collect the size of the first face, and then compute the total size offset needed for this mip level
|
||||
auto mipDimensions = texture.evalMipDimensions(mip);
|
||||
sourceMip._width = mipDimensions.x;
|
||||
sourceMip._height = mipDimensions.y;
|
||||
#ifdef DEBUG_COPY
|
||||
glGetTexLevelParameteriv(faceTargets.front(), sourceLevel, GL_TEXTURE_WIDTH, &sourceMip._width);
|
||||
glGetTexLevelParameteriv(faceTargets.front(), sourceLevel, GL_TEXTURE_HEIGHT, &sourceMip._height);
|
||||
#endif
|
||||
// TODO: retrieve the size of a compressed image
|
||||
assert(false);
|
||||
//glGetTexLevelParameteriv(faceTargets.front(), sourceLevel, GL_TEXTURE_COMPRESSED_IMAGE_SIZE, &sourceMip._faceSize);
|
||||
sourceMip._size = (GLint)faceTargets.size() * sourceMip._faceSize;
|
||||
sourceMip._offset = bufferOffset;
|
||||
bufferOffset += sourceMip._size;
|
||||
}
|
||||
(void)CHECK_GL_ERROR();
|
||||
|
||||
// Allocate the PBO to accomodate for all the mips to copy
|
||||
GLuint pbo { 0 };
|
||||
glGenBuffers(1, &pbo);
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbo);
|
||||
glBufferData(GL_PIXEL_PACK_BUFFER, bufferOffset, nullptr, GL_STATIC_COPY);
|
||||
(void)CHECK_GL_ERROR();
|
||||
|
||||
// Transfer from source texture to pbo
|
||||
for (uint16_t mip = populatedMips; mip < numMips; ++mip) {
|
||||
auto& sourceMip = sourceMips[mip];
|
||||
|
||||
uint16_t sourceLevel = mip - srcMipOffset;
|
||||
|
||||
for (GLint f = 0; f < (GLint)faceTargets.size(); f++) {
|
||||
// TODO: implement for android
|
||||
//glGetCompressedTexImage(faceTargets[f], sourceLevel, BUFFER_OFFSET(sourceMip._offset + f * sourceMip._faceSize));
|
||||
}
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
// Now populate the new texture from the pbo
|
||||
glBindTexture(texTarget, 0);
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
|
||||
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, pbo);
|
||||
|
||||
glActiveTexture(GL_TEXTURE0 + GLESBackend::RESOURCE_TRANSFER_TEX_UNIT);
|
||||
|
||||
// Transfer from pbo to new texture
|
||||
for (uint16_t mip = populatedMips; mip < numMips; ++mip) {
|
||||
auto& sourceMip = sourceMips[mip];
|
||||
|
||||
uint16_t destLevel = mip - destMipOffset;
|
||||
|
||||
for (GLint f = 0; f < (GLint)faceTargets.size(); f++) {
|
||||
#ifdef DEBUG_COPY
|
||||
GLint destWidth, destHeight, destSize;
|
||||
glGetTexLevelParameteriv(faceTargets.front(), destLevel, GL_TEXTURE_WIDTH, &destWidth);
|
||||
glGetTexLevelParameteriv(faceTargets.front(), destLevel, GL_TEXTURE_HEIGHT, &destHeight);
|
||||
glGetTexLevelParameteriv(faceTargets.front(), destLevel, GL_TEXTURE_COMPRESSED_IMAGE_SIZE, &destSize);
|
||||
#endif
|
||||
glCompressedTexSubImage2D(faceTargets[f], destLevel, 0, 0, sourceMip._width, sourceMip._height, internalFormat,
|
||||
sourceMip._faceSize, BUFFER_OFFSET(sourceMip._offset + f * sourceMip._faceSize));
|
||||
}
|
||||
}
|
||||
|
||||
glBindBuffer(GL_PIXEL_UNPACK_BUFFER, 0);
|
||||
glDeleteBuffers(1, &pbo);
|
||||
}
|
||||
|
||||
void GLESVariableAllocationTexture::copyTextureMipsInGPUMem(GLuint srcId, GLuint destId, uint16_t srcMipOffset, uint16_t destMipOffset, uint16_t populatedMips) {
|
||||
uint16_t numMips = _gpuObject.getNumMips();
|
||||
withPreservedTexture([&] {
|
||||
if (_texelFormat.isCompressed()) {
|
||||
copyCompressedTexGPUMem(_gpuObject, _target, srcId, destId, numMips, srcMipOffset, destMipOffset, populatedMips);
|
||||
} else {
|
||||
copyUncompressedTexGPUMem(_gpuObject, _target, srcId, destId, numMips, srcMipOffset, destMipOffset, populatedMips);
|
||||
}
|
||||
});
|
||||
copyTexGPUMem(_gpuObject, _target, srcId, destId, numMips, srcMipOffset, destMipOffset, populatedMips);
|
||||
}
|
||||
|
||||
size_t GLESVariableAllocationTexture::promote() {
|
||||
|
|
|
@ -502,7 +502,7 @@ void Texture::setSampler(const Sampler& sampler) {
|
|||
}
|
||||
|
||||
|
||||
bool Texture::generateIrradiance() {
|
||||
bool Texture::generateIrradiance(gpu::BackendTarget target) {
|
||||
if (getType() != TEX_CUBE) {
|
||||
return false;
|
||||
}
|
||||
|
@ -513,7 +513,7 @@ bool Texture::generateIrradiance() {
|
|||
_irradiance = std::make_shared<SphericalHarmonics>();
|
||||
}
|
||||
|
||||
_irradiance->evalFromTexture(*this);
|
||||
_irradiance->evalFromTexture(*this, target);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -676,7 +676,7 @@ void sphericalHarmonicsEvaluateDirection(float * result, int order, const glm::
|
|||
result[8] = P_2_2 * ((double)dir.x * (double)dir.x - (double)dir.y * (double)dir.y);
|
||||
}
|
||||
|
||||
bool sphericalHarmonicsFromTexture(const gpu::Texture& cubeTexture, std::vector<glm::vec3> & output, const uint order) {
|
||||
bool sphericalHarmonicsFromTexture(const gpu::Texture& cubeTexture, std::vector<glm::vec3> & output, const uint order, gpu::BackendTarget target) {
|
||||
int width = cubeTexture.getWidth();
|
||||
if(width != cubeTexture.getHeight()) {
|
||||
return false;
|
||||
|
@ -684,22 +684,6 @@ bool sphericalHarmonicsFromTexture(const gpu::Texture& cubeTexture, std::vector<
|
|||
|
||||
PROFILE_RANGE(render_gpu, "sphericalHarmonicsFromTexture");
|
||||
|
||||
#ifndef USE_GLES
|
||||
auto mipFormat = cubeTexture.getStoredMipFormat();
|
||||
std::function<glm::vec3(uint32)> unpackFunc;
|
||||
switch (mipFormat.getSemantic()) {
|
||||
case gpu::R11G11B10:
|
||||
unpackFunc = glm::unpackF2x11_1x10;
|
||||
break;
|
||||
case gpu::RGB9E5:
|
||||
unpackFunc = glm::unpackF3x9_E1x5;
|
||||
break;
|
||||
default:
|
||||
assert(false);
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
|
||||
const uint sqOrder = order*order;
|
||||
|
||||
// allocate memory for calculations
|
||||
|
@ -733,11 +717,7 @@ bool sphericalHarmonicsFromTexture(const gpu::Texture& cubeTexture, std::vector<
|
|||
for(int face=0; face < gpu::Texture::NUM_CUBE_FACES; face++) {
|
||||
PROFILE_RANGE(render_gpu, "ProcessFace");
|
||||
|
||||
#ifndef USE_GLES
|
||||
auto data = reinterpret_cast<const uint32*>( cubeTexture.accessStoredMipFace(0, face)->readData() );
|
||||
#else
|
||||
auto data = cubeTexture.accessStoredMipFace(0, face)->readData();
|
||||
#endif
|
||||
if (data == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
@ -819,20 +799,40 @@ bool sphericalHarmonicsFromTexture(const gpu::Texture& cubeTexture, std::vector<
|
|||
|
||||
// get color from texture
|
||||
glm::vec3 color{ 0.0f, 0.0f, 0.0f };
|
||||
for (int i = 0; i < stride; ++i) {
|
||||
for (int j = 0; j < stride; ++j) {
|
||||
#ifndef USE_GLES
|
||||
int k = (int)(x + i - halfStride + (y + j - halfStride) * width);
|
||||
color += unpackFunc(data[k]);
|
||||
#else
|
||||
const int NUM_COMPONENTS_PER_PIXEL = 4;
|
||||
int k = NUM_COMPONENTS_PER_PIXEL * (int)(x + i - halfStride + (y + j - halfStride) * width);
|
||||
// BGRA -> RGBA
|
||||
color += glm::pow(glm::vec3(data[k + 2], data[k + 1], data[k]) / 255.0f, glm::vec3(2.2f));
|
||||
#endif
|
||||
|
||||
if (target != gpu::BackendTarget::GLES32) {
|
||||
auto mipFormat = cubeTexture.getStoredMipFormat();
|
||||
std::function<glm::vec3(uint32)> unpackFunc;
|
||||
switch (mipFormat.getSemantic()) {
|
||||
case gpu::R11G11B10:
|
||||
unpackFunc = glm::unpackF2x11_1x10;
|
||||
break;
|
||||
case gpu::RGB9E5:
|
||||
unpackFunc = glm::unpackF3x9_E1x5;
|
||||
break;
|
||||
default:
|
||||
assert(false);
|
||||
break;
|
||||
}
|
||||
auto data32 = reinterpret_cast<const uint32*>(data);
|
||||
for (int i = 0; i < stride; ++i) {
|
||||
for (int j = 0; j < stride; ++j) {
|
||||
int k = (int)(x + i - halfStride + (y + j - halfStride) * width);
|
||||
color += unpackFunc(data32[k]);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// BGRA -> RGBA
|
||||
const int NUM_COMPONENTS_PER_PIXEL = 4;
|
||||
for (int i = 0; i < stride; ++i) {
|
||||
for (int j = 0; j < stride; ++j) {
|
||||
int k = NUM_COMPONENTS_PER_PIXEL * (int)(x + i - halfStride + (y + j - halfStride) * width);
|
||||
color += glm::pow(glm::vec3(data[k + 2], data[k + 1], data[k]) / 255.0f, glm::vec3(2.2f));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// scale color and add to previously accumulated coefficients
|
||||
// red
|
||||
sphericalHarmonicsScale(shBuffB.data(), order, shBuff.data(), color.r * fDiffSolid);
|
||||
|
@ -861,10 +861,10 @@ bool sphericalHarmonicsFromTexture(const gpu::Texture& cubeTexture, std::vector<
|
|||
return true;
|
||||
}
|
||||
|
||||
void SphericalHarmonics::evalFromTexture(const Texture& texture) {
|
||||
void SphericalHarmonics::evalFromTexture(const Texture& texture, gpu::BackendTarget target) {
|
||||
if (texture.isDefined()) {
|
||||
std::vector< glm::vec3 > coefs;
|
||||
sphericalHarmonicsFromTexture(texture, coefs, 3);
|
||||
sphericalHarmonicsFromTexture(texture, coefs, 3, target);
|
||||
|
||||
L00 = coefs[0];
|
||||
L1m1 = coefs[1];
|
||||
|
|
|
@ -43,6 +43,11 @@ namespace khronos { namespace gl { namespace texture {
|
|||
|
||||
namespace gpu {
|
||||
|
||||
enum class BackendTarget {
|
||||
GL41,
|
||||
GL45,
|
||||
GLES32
|
||||
};
|
||||
|
||||
const std::string SOURCE_HASH_KEY { "hifi.sourceHash" };
|
||||
|
||||
|
@ -82,7 +87,7 @@ public:
|
|||
|
||||
void assignPreset(int p);
|
||||
|
||||
void evalFromTexture(const Texture& texture);
|
||||
void evalFromTexture(const Texture& texture, gpu::BackendTarget target);
|
||||
};
|
||||
typedef std::shared_ptr< SphericalHarmonics > SHPointer;
|
||||
|
||||
|
@ -541,7 +546,7 @@ public:
|
|||
Usage getUsage() const { return _usage; }
|
||||
|
||||
// For Cube Texture, it's possible to generate the irradiance spherical harmonics and make them availalbe with the texture
|
||||
bool generateIrradiance();
|
||||
bool generateIrradiance(gpu::BackendTarget target);
|
||||
const SHPointer& getIrradiance(uint16 slice = 0) const { return _irradiance; }
|
||||
void overrideIrradiance(SHPointer irradiance) { _irradiance = irradiance; }
|
||||
bool isIrradianceValid() const { return _isIrradianceValid; }
|
||||
|
|
|
@ -3,3 +3,9 @@ setup_hifi_library()
|
|||
link_hifi_libraries(shared gpu)
|
||||
target_nvtt()
|
||||
target_etc2comp()
|
||||
|
||||
if (UNIX AND NOT APPLE)
|
||||
set(THREADS_PREFER_PTHREAD_FLAG ON)
|
||||
find_package(Threads REQUIRED)
|
||||
target_link_libraries(image Threads::Threads)
|
||||
endif()
|
||||
|
|
|
@ -31,17 +31,13 @@ using namespace gpu;
|
|||
#define CPU_MIPMAPS 1
|
||||
#include <nvtt/nvtt.h>
|
||||
|
||||
#ifdef USE_GLES
|
||||
#undef _CRT_SECURE_NO_WARNINGS
|
||||
#include <Etc.h>
|
||||
#include <EtcFilter.h>
|
||||
#endif
|
||||
|
||||
static const glm::uvec2 SPARSE_PAGE_SIZE(128);
|
||||
#ifdef Q_OS_ANDROID
|
||||
static const glm::uvec2 MAX_TEXTURE_SIZE(2048);
|
||||
#else
|
||||
static const glm::uvec2 MAX_TEXTURE_SIZE(4096);
|
||||
#endif
|
||||
static const glm::uvec2 MAX_TEXTURE_SIZE_GLES(2048);
|
||||
static const glm::uvec2 MAX_TEXTURE_SIZE_GL(4096);
|
||||
bool DEV_DECIMATE_TEXTURES = false;
|
||||
std::atomic<size_t> DECIMATED_TEXTURE_COUNT{ 0 };
|
||||
std::atomic<size_t> RECTIFIED_TEXTURE_COUNT{ 0 };
|
||||
|
@ -83,11 +79,12 @@ const QStringList getSupportedFormats() {
|
|||
|
||||
|
||||
// On GLES, we don't use HDR skyboxes
|
||||
#ifndef USE_GLES
|
||||
QImage::Format QIMAGE_HDR_FORMAT = QImage::Format_RGB30;
|
||||
#else
|
||||
QImage::Format QIMAGE_HDR_FORMAT = QImage::Format_RGB32;
|
||||
#endif
|
||||
QImage::Format hdrFormatForTarget(BackendTarget target) {
|
||||
if (target == BackendTarget::GLES32) {
|
||||
return QImage::Format_RGB32;
|
||||
}
|
||||
return QImage::Format_RGB30;
|
||||
}
|
||||
|
||||
TextureUsage::TextureLoader TextureUsage::getTextureLoaderForType(Type type, const QVariantMap& options) {
|
||||
switch (type) {
|
||||
|
@ -123,63 +120,63 @@ TextureUsage::TextureLoader TextureUsage::getTextureLoaderForType(Type type, con
|
|||
}
|
||||
|
||||
gpu::TexturePointer TextureUsage::createStrict2DTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, true, abortProcessing);
|
||||
bool compress, BackendTarget target, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, target, true, abortProcessing);
|
||||
}
|
||||
|
||||
gpu::TexturePointer TextureUsage::create2DTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
|
||||
bool compress, BackendTarget target, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, target, false, abortProcessing);
|
||||
}
|
||||
|
||||
gpu::TexturePointer TextureUsage::createAlbedoTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
|
||||
bool compress, BackendTarget target, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, target, false, abortProcessing);
|
||||
}
|
||||
|
||||
gpu::TexturePointer TextureUsage::createEmissiveTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
|
||||
bool compress, BackendTarget target, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, target, false, abortProcessing);
|
||||
}
|
||||
|
||||
gpu::TexturePointer TextureUsage::createLightmapTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
|
||||
bool compress, BackendTarget target, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureColorFromImage(std::move(srcImage), srcImageName, compress, target, false, abortProcessing);
|
||||
}
|
||||
|
||||
gpu::TexturePointer TextureUsage::createNormalTextureFromNormalImage(QImage&& srcImage, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureNormalMapFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
|
||||
bool compress, BackendTarget target, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureNormalMapFromImage(std::move(srcImage), srcImageName, compress, target, false, abortProcessing);
|
||||
}
|
||||
|
||||
gpu::TexturePointer TextureUsage::createNormalTextureFromBumpImage(QImage&& srcImage, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureNormalMapFromImage(std::move(srcImage), srcImageName, compress, true, abortProcessing);
|
||||
bool compress, BackendTarget target, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureNormalMapFromImage(std::move(srcImage), srcImageName, compress, target, true, abortProcessing);
|
||||
}
|
||||
|
||||
gpu::TexturePointer TextureUsage::createRoughnessTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureGrayscaleFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
|
||||
bool compress, BackendTarget target, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureGrayscaleFromImage(std::move(srcImage), srcImageName, compress, target, false, abortProcessing);
|
||||
}
|
||||
|
||||
gpu::TexturePointer TextureUsage::createRoughnessTextureFromGlossImage(QImage&& srcImage, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureGrayscaleFromImage(std::move(srcImage), srcImageName, compress, true, abortProcessing);
|
||||
bool compress, BackendTarget target, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureGrayscaleFromImage(std::move(srcImage), srcImageName, compress, target, true, abortProcessing);
|
||||
}
|
||||
|
||||
gpu::TexturePointer TextureUsage::createMetallicTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureGrayscaleFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
|
||||
bool compress, BackendTarget target, const std::atomic<bool>& abortProcessing) {
|
||||
return process2DTextureGrayscaleFromImage(std::move(srcImage), srcImageName, compress, target, false, abortProcessing);
|
||||
}
|
||||
|
||||
gpu::TexturePointer TextureUsage::createCubeTextureFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||
return processCubeTextureColorFromImage(std::move(srcImage), srcImageName, compress, true, abortProcessing);
|
||||
bool compress, BackendTarget target, const std::atomic<bool>& abortProcessing) {
|
||||
return processCubeTextureColorFromImage(std::move(srcImage), srcImageName, compress, target, true, abortProcessing);
|
||||
}
|
||||
|
||||
gpu::TexturePointer TextureUsage::createCubeTextureFromImageWithoutIrradiance(QImage&& srcImage, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||
return processCubeTextureColorFromImage(std::move(srcImage), srcImageName, compress, false, abortProcessing);
|
||||
bool compress, BackendTarget target, const std::atomic<bool>& abortProcessing) {
|
||||
return processCubeTextureColorFromImage(std::move(srcImage), srcImageName, compress, target, false, abortProcessing);
|
||||
}
|
||||
|
||||
static float denormalize(float value, const float minValue) {
|
||||
|
@ -228,7 +225,7 @@ QImage processRawImageData(QIODevice& content, const std::string& filename) {
|
|||
|
||||
gpu::TexturePointer processImage(std::shared_ptr<QIODevice> content, const std::string& filename,
|
||||
int maxNumPixels, TextureUsage::Type textureType,
|
||||
bool compress, const std::atomic<bool>& abortProcessing) {
|
||||
bool compress, BackendTarget target, const std::atomic<bool>& abortProcessing) {
|
||||
|
||||
QImage image = processRawImageData(*content.get(), filename);
|
||||
// Texture content can take up a lot of memory. Here we release our ownership of that content
|
||||
|
@ -259,12 +256,12 @@ gpu::TexturePointer processImage(std::shared_ptr<QIODevice> content, const std::
|
|||
}
|
||||
|
||||
auto loader = TextureUsage::getTextureLoaderForType(textureType);
|
||||
auto texture = loader(std::move(image), filename, compress, abortProcessing);
|
||||
auto texture = loader(std::move(image), filename, compress, target, abortProcessing);
|
||||
|
||||
return texture;
|
||||
}
|
||||
|
||||
QImage processSourceImage(QImage&& srcImage, bool cubemap) {
|
||||
QImage processSourceImage(QImage&& srcImage, bool cubemap, BackendTarget target) {
|
||||
PROFILE_RANGE(resource_parse, "processSourceImage");
|
||||
|
||||
// Take a local copy to force move construction
|
||||
|
@ -274,7 +271,8 @@ QImage processSourceImage(QImage&& srcImage, bool cubemap) {
|
|||
const glm::uvec2 srcImageSize = toGlm(localCopy.size());
|
||||
glm::uvec2 targetSize = srcImageSize;
|
||||
|
||||
while (glm::any(glm::greaterThan(targetSize, MAX_TEXTURE_SIZE))) {
|
||||
const auto maxTextureSize = target == BackendTarget::GLES32 ? MAX_TEXTURE_SIZE_GLES : MAX_TEXTURE_SIZE_GL;
|
||||
while (glm::any(glm::greaterThan(targetSize, maxTextureSize))) {
|
||||
targetSize /= 2;
|
||||
}
|
||||
if (targetSize != srcImageSize) {
|
||||
|
@ -406,12 +404,12 @@ public:
|
|||
}
|
||||
};
|
||||
|
||||
void generateHDRMips(gpu::Texture* texture, QImage&& image, const std::atomic<bool>& abortProcessing, int face) {
|
||||
void generateHDRMips(gpu::Texture* texture, QImage&& image, BackendTarget target, const std::atomic<bool>& abortProcessing, int face) {
|
||||
// Take a local copy to force move construction
|
||||
// https://github.com/isocpp/CppCoreGuidelines/blob/master/CppCoreGuidelines.md#f18-for-consume-parameters-pass-by-x-and-stdmove-the-parameter
|
||||
QImage localCopy = std::move(image);
|
||||
|
||||
assert(localCopy.format() == QIMAGE_HDR_FORMAT);
|
||||
assert(localCopy.format() == hdrFormatForTarget(target));
|
||||
|
||||
const int width = localCopy.width(), height = localCopy.height();
|
||||
std::vector<glm::vec4> data;
|
||||
|
@ -503,220 +501,219 @@ void generateHDRMips(gpu::Texture* texture, QImage&& image, const std::atomic<bo
|
|||
}
|
||||
}
|
||||
|
||||
void generateLDRMips(gpu::Texture* texture, QImage&& image, const std::atomic<bool>& abortProcessing, int face) {
|
||||
void generateLDRMips(gpu::Texture* texture, QImage&& image, BackendTarget target, const std::atomic<bool>& abortProcessing, int face) {
|
||||
// Take a local copy to force move construction
|
||||
// https://github.com/isocpp/CppCoreGuidelines/blob/master/CppCoreGuidelines.md#f18-for-consume-parameters-pass-by-x-and-stdmove-the-parameter
|
||||
QImage localCopy = std::move(image);
|
||||
|
||||
if (localCopy.format() != QImage::Format_ARGB32 && localCopy.format() != QIMAGE_HDR_FORMAT) {
|
||||
if (localCopy.format() != QImage::Format_ARGB32 && localCopy.format() != hdrFormatForTarget(target)) {
|
||||
localCopy = localCopy.convertToFormat(QImage::Format_ARGB32);
|
||||
}
|
||||
|
||||
const int width = localCopy.width(), height = localCopy.height();
|
||||
auto mipFormat = texture->getStoredMipFormat();
|
||||
|
||||
#ifndef USE_GLES
|
||||
const void* data = static_cast<const void*>(localCopy.constBits());
|
||||
nvtt::TextureType textureType = nvtt::TextureType_2D;
|
||||
nvtt::InputFormat inputFormat = nvtt::InputFormat_BGRA_8UB;
|
||||
nvtt::WrapMode wrapMode = nvtt::WrapMode_Mirror;
|
||||
nvtt::RoundMode roundMode = nvtt::RoundMode_None;
|
||||
nvtt::AlphaMode alphaMode = nvtt::AlphaMode_None;
|
||||
if (target != BackendTarget::GLES32) {
|
||||
const void* data = static_cast<const void*>(localCopy.constBits());
|
||||
nvtt::TextureType textureType = nvtt::TextureType_2D;
|
||||
nvtt::InputFormat inputFormat = nvtt::InputFormat_BGRA_8UB;
|
||||
nvtt::WrapMode wrapMode = nvtt::WrapMode_Mirror;
|
||||
nvtt::RoundMode roundMode = nvtt::RoundMode_None;
|
||||
nvtt::AlphaMode alphaMode = nvtt::AlphaMode_None;
|
||||
|
||||
float inputGamma = 2.2f;
|
||||
float outputGamma = 2.2f;
|
||||
float inputGamma = 2.2f;
|
||||
float outputGamma = 2.2f;
|
||||
|
||||
nvtt::InputOptions inputOptions;
|
||||
inputOptions.setTextureLayout(textureType, width, height);
|
||||
nvtt::InputOptions inputOptions;
|
||||
inputOptions.setTextureLayout(textureType, width, height);
|
||||
|
||||
inputOptions.setMipmapData(data, width, height);
|
||||
// setMipmapData copies the memory, so free up the memory afterward to avoid bloating the heap
|
||||
data = nullptr;
|
||||
localCopy = QImage(); // QImage doesn't have a clear function, so override it with an empty one.
|
||||
inputOptions.setMipmapData(data, width, height);
|
||||
// setMipmapData copies the memory, so free up the memory afterward to avoid bloating the heap
|
||||
data = nullptr;
|
||||
localCopy = QImage(); // QImage doesn't have a clear function, so override it with an empty one.
|
||||
|
||||
inputOptions.setFormat(inputFormat);
|
||||
inputOptions.setGamma(inputGamma, outputGamma);
|
||||
inputOptions.setAlphaMode(alphaMode);
|
||||
inputOptions.setWrapMode(wrapMode);
|
||||
inputOptions.setRoundMode(roundMode);
|
||||
inputOptions.setFormat(inputFormat);
|
||||
inputOptions.setGamma(inputGamma, outputGamma);
|
||||
inputOptions.setAlphaMode(alphaMode);
|
||||
inputOptions.setWrapMode(wrapMode);
|
||||
inputOptions.setRoundMode(roundMode);
|
||||
|
||||
inputOptions.setMipmapGeneration(true);
|
||||
inputOptions.setMipmapFilter(nvtt::MipmapFilter_Box);
|
||||
inputOptions.setMipmapGeneration(true);
|
||||
inputOptions.setMipmapFilter(nvtt::MipmapFilter_Box);
|
||||
|
||||
nvtt::CompressionOptions compressionOptions;
|
||||
compressionOptions.setQuality(nvtt::Quality_Production);
|
||||
nvtt::CompressionOptions compressionOptions;
|
||||
compressionOptions.setQuality(nvtt::Quality_Production);
|
||||
|
||||
if (mipFormat == gpu::Element::COLOR_COMPRESSED_BCX_SRGB) {
|
||||
compressionOptions.setFormat(nvtt::Format_BC1);
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_BCX_SRGBA_MASK) {
|
||||
alphaMode = nvtt::AlphaMode_Transparency;
|
||||
compressionOptions.setFormat(nvtt::Format_BC1a);
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_BCX_SRGBA) {
|
||||
alphaMode = nvtt::AlphaMode_Transparency;
|
||||
compressionOptions.setFormat(nvtt::Format_BC3);
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_BCX_RED) {
|
||||
compressionOptions.setFormat(nvtt::Format_BC4);
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_BCX_XY) {
|
||||
compressionOptions.setFormat(nvtt::Format_BC5);
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_BCX_SRGBA_HIGH) {
|
||||
alphaMode = nvtt::AlphaMode_Transparency;
|
||||
compressionOptions.setFormat(nvtt::Format_BC7);
|
||||
} else if (mipFormat == gpu::Element::COLOR_RGBA_32) {
|
||||
compressionOptions.setFormat(nvtt::Format_RGBA);
|
||||
compressionOptions.setPixelType(nvtt::PixelType_UnsignedNorm);
|
||||
compressionOptions.setPitchAlignment(4);
|
||||
compressionOptions.setPixelFormat(32,
|
||||
0x000000FF,
|
||||
0x0000FF00,
|
||||
0x00FF0000,
|
||||
0xFF000000);
|
||||
inputGamma = 1.0f;
|
||||
outputGamma = 1.0f;
|
||||
} else if (mipFormat == gpu::Element::COLOR_BGRA_32) {
|
||||
compressionOptions.setFormat(nvtt::Format_RGBA);
|
||||
compressionOptions.setPixelType(nvtt::PixelType_UnsignedNorm);
|
||||
compressionOptions.setPitchAlignment(4);
|
||||
compressionOptions.setPixelFormat(32,
|
||||
0x00FF0000,
|
||||
0x0000FF00,
|
||||
0x000000FF,
|
||||
0xFF000000);
|
||||
inputGamma = 1.0f;
|
||||
outputGamma = 1.0f;
|
||||
} else if (mipFormat == gpu::Element::COLOR_SRGBA_32) {
|
||||
compressionOptions.setFormat(nvtt::Format_RGBA);
|
||||
compressionOptions.setPixelType(nvtt::PixelType_UnsignedNorm);
|
||||
compressionOptions.setPitchAlignment(4);
|
||||
compressionOptions.setPixelFormat(32,
|
||||
0x000000FF,
|
||||
0x0000FF00,
|
||||
0x00FF0000,
|
||||
0xFF000000);
|
||||
} else if (mipFormat == gpu::Element::COLOR_SBGRA_32) {
|
||||
compressionOptions.setFormat(nvtt::Format_RGBA);
|
||||
compressionOptions.setPixelType(nvtt::PixelType_UnsignedNorm);
|
||||
compressionOptions.setPitchAlignment(4);
|
||||
compressionOptions.setPixelFormat(32,
|
||||
0x00FF0000,
|
||||
0x0000FF00,
|
||||
0x000000FF,
|
||||
0xFF000000);
|
||||
} else if (mipFormat == gpu::Element::COLOR_R_8) {
|
||||
compressionOptions.setFormat(nvtt::Format_RGB);
|
||||
compressionOptions.setPixelType(nvtt::PixelType_UnsignedNorm);
|
||||
compressionOptions.setPitchAlignment(4);
|
||||
compressionOptions.setPixelFormat(8, 0, 0, 0);
|
||||
} else if (mipFormat == gpu::Element::VEC2NU8_XY) {
|
||||
inputOptions.setNormalMap(true);
|
||||
compressionOptions.setFormat(nvtt::Format_RGBA);
|
||||
compressionOptions.setPixelType(nvtt::PixelType_UnsignedNorm);
|
||||
compressionOptions.setPitchAlignment(4);
|
||||
compressionOptions.setPixelFormat(8, 8, 0, 0);
|
||||
} else {
|
||||
qCWarning(imagelogging) << "Unknown mip format";
|
||||
Q_UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
|
||||
nvtt::OutputOptions outputOptions;
|
||||
outputOptions.setOutputHeader(false);
|
||||
OutputHandler outputHandler(texture, face);
|
||||
outputOptions.setOutputHandler(&outputHandler);
|
||||
MyErrorHandler errorHandler;
|
||||
outputOptions.setErrorHandler(&errorHandler);
|
||||
|
||||
SequentialTaskDispatcher dispatcher(abortProcessing);
|
||||
nvtt::Compressor compressor;
|
||||
compressor.setTaskDispatcher(&dispatcher);
|
||||
compressor.process(inputOptions, compressionOptions, outputOptions);
|
||||
|
||||
#else
|
||||
int numMips = 1 + (int)log2(std::max(width, height));
|
||||
Etc::RawImage *mipMaps = new Etc::RawImage[numMips];
|
||||
Etc::Image::Format etcFormat = Etc::Image::Format::DEFAULT;
|
||||
|
||||
if (mipFormat == gpu::Element::COLOR_COMPRESSED_ETC2_RGB) {
|
||||
etcFormat = Etc::Image::Format::RGB8;
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_ETC2_SRGB) {
|
||||
etcFormat = Etc::Image::Format::SRGB8;
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_ETC2_RGB_PUNCHTHROUGH_ALPHA) {
|
||||
etcFormat = Etc::Image::Format::RGB8A1;
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_ETC2_SRGB_PUNCHTHROUGH_ALPHA) {
|
||||
etcFormat = Etc::Image::Format::SRGB8A1;
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_ETC2_RGBA) {
|
||||
etcFormat = Etc::Image::Format::RGBA8;
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_ETC2_SRGBA) {
|
||||
etcFormat = Etc::Image::Format::SRGBA8;
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_EAC_RED) {
|
||||
etcFormat = Etc::Image::Format::R11;
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_EAC_RED_SIGNED) {
|
||||
etcFormat = Etc::Image::Format::SIGNED_R11;
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_EAC_XY) {
|
||||
etcFormat = Etc::Image::Format::RG11;
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_EAC_XY_SIGNED) {
|
||||
etcFormat = Etc::Image::Format::SIGNED_RG11;
|
||||
} else {
|
||||
qCWarning(imagelogging) << "Unknown mip format";
|
||||
Q_UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
|
||||
const Etc::ErrorMetric errorMetric = Etc::ErrorMetric::RGBA;
|
||||
const float effort = 1.0f;
|
||||
const int numEncodeThreads = 4;
|
||||
int encodingTime;
|
||||
const float MAX_COLOR = 255.0f;
|
||||
|
||||
std::vector<vec4> floatData;
|
||||
floatData.resize(width * height);
|
||||
for (int y = 0; y < height; y++) {
|
||||
QRgb *line = (QRgb *) localCopy.scanLine(y);
|
||||
for (int x = 0; x < width; x++) {
|
||||
QRgb &pixel = line[x];
|
||||
floatData[x + y * width] = vec4(qRed(pixel), qGreen(pixel), qBlue(pixel), qAlpha(pixel)) / MAX_COLOR;
|
||||
if (mipFormat == gpu::Element::COLOR_COMPRESSED_BCX_SRGB) {
|
||||
compressionOptions.setFormat(nvtt::Format_BC1);
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_BCX_SRGBA_MASK) {
|
||||
alphaMode = nvtt::AlphaMode_Transparency;
|
||||
compressionOptions.setFormat(nvtt::Format_BC1a);
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_BCX_SRGBA) {
|
||||
alphaMode = nvtt::AlphaMode_Transparency;
|
||||
compressionOptions.setFormat(nvtt::Format_BC3);
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_BCX_RED) {
|
||||
compressionOptions.setFormat(nvtt::Format_BC4);
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_BCX_XY) {
|
||||
compressionOptions.setFormat(nvtt::Format_BC5);
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_BCX_SRGBA_HIGH) {
|
||||
alphaMode = nvtt::AlphaMode_Transparency;
|
||||
compressionOptions.setFormat(nvtt::Format_BC7);
|
||||
} else if (mipFormat == gpu::Element::COLOR_RGBA_32) {
|
||||
compressionOptions.setFormat(nvtt::Format_RGBA);
|
||||
compressionOptions.setPixelType(nvtt::PixelType_UnsignedNorm);
|
||||
compressionOptions.setPitchAlignment(4);
|
||||
compressionOptions.setPixelFormat(32,
|
||||
0x000000FF,
|
||||
0x0000FF00,
|
||||
0x00FF0000,
|
||||
0xFF000000);
|
||||
inputGamma = 1.0f;
|
||||
outputGamma = 1.0f;
|
||||
} else if (mipFormat == gpu::Element::COLOR_BGRA_32) {
|
||||
compressionOptions.setFormat(nvtt::Format_RGBA);
|
||||
compressionOptions.setPixelType(nvtt::PixelType_UnsignedNorm);
|
||||
compressionOptions.setPitchAlignment(4);
|
||||
compressionOptions.setPixelFormat(32,
|
||||
0x00FF0000,
|
||||
0x0000FF00,
|
||||
0x000000FF,
|
||||
0xFF000000);
|
||||
inputGamma = 1.0f;
|
||||
outputGamma = 1.0f;
|
||||
} else if (mipFormat == gpu::Element::COLOR_SRGBA_32) {
|
||||
compressionOptions.setFormat(nvtt::Format_RGBA);
|
||||
compressionOptions.setPixelType(nvtt::PixelType_UnsignedNorm);
|
||||
compressionOptions.setPitchAlignment(4);
|
||||
compressionOptions.setPixelFormat(32,
|
||||
0x000000FF,
|
||||
0x0000FF00,
|
||||
0x00FF0000,
|
||||
0xFF000000);
|
||||
} else if (mipFormat == gpu::Element::COLOR_SBGRA_32) {
|
||||
compressionOptions.setFormat(nvtt::Format_RGBA);
|
||||
compressionOptions.setPixelType(nvtt::PixelType_UnsignedNorm);
|
||||
compressionOptions.setPitchAlignment(4);
|
||||
compressionOptions.setPixelFormat(32,
|
||||
0x00FF0000,
|
||||
0x0000FF00,
|
||||
0x000000FF,
|
||||
0xFF000000);
|
||||
} else if (mipFormat == gpu::Element::COLOR_R_8) {
|
||||
compressionOptions.setFormat(nvtt::Format_RGB);
|
||||
compressionOptions.setPixelType(nvtt::PixelType_UnsignedNorm);
|
||||
compressionOptions.setPitchAlignment(4);
|
||||
compressionOptions.setPixelFormat(8, 0, 0, 0);
|
||||
} else if (mipFormat == gpu::Element::VEC2NU8_XY) {
|
||||
inputOptions.setNormalMap(true);
|
||||
compressionOptions.setFormat(nvtt::Format_RGBA);
|
||||
compressionOptions.setPixelType(nvtt::PixelType_UnsignedNorm);
|
||||
compressionOptions.setPitchAlignment(4);
|
||||
compressionOptions.setPixelFormat(8, 8, 0, 0);
|
||||
} else {
|
||||
qCWarning(imagelogging) << "Unknown mip format";
|
||||
Q_UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// free up the memory afterward to avoid bloating the heap
|
||||
localCopy = QImage(); // QImage doesn't have a clear function, so override it with an empty one.
|
||||
nvtt::OutputOptions outputOptions;
|
||||
outputOptions.setOutputHeader(false);
|
||||
OutputHandler outputHandler(texture, face);
|
||||
outputOptions.setOutputHandler(&outputHandler);
|
||||
MyErrorHandler errorHandler;
|
||||
outputOptions.setErrorHandler(&errorHandler);
|
||||
|
||||
Etc::EncodeMipmaps(
|
||||
(float *)floatData.data(), width, height,
|
||||
etcFormat, errorMetric, effort,
|
||||
numEncodeThreads, numEncodeThreads,
|
||||
numMips, Etc::FILTER_WRAP_NONE,
|
||||
mipMaps, &encodingTime
|
||||
);
|
||||
SequentialTaskDispatcher dispatcher(abortProcessing);
|
||||
nvtt::Compressor compressor;
|
||||
compressor.setTaskDispatcher(&dispatcher);
|
||||
compressor.process(inputOptions, compressionOptions, outputOptions);
|
||||
} else {
|
||||
int numMips = 1 + (int)log2(std::max(width, height));
|
||||
Etc::RawImage *mipMaps = new Etc::RawImage[numMips];
|
||||
Etc::Image::Format etcFormat = Etc::Image::Format::DEFAULT;
|
||||
|
||||
for (int i = 0; i < numMips; i++) {
|
||||
if (mipMaps[i].paucEncodingBits.get()) {
|
||||
if (face >= 0) {
|
||||
texture->assignStoredMipFace(i, face, mipMaps[i].uiEncodingBitsBytes, static_cast<const gpu::Byte*>(mipMaps[i].paucEncodingBits.get()));
|
||||
} else {
|
||||
texture->assignStoredMip(i, mipMaps[i].uiEncodingBitsBytes, static_cast<const gpu::Byte*>(mipMaps[i].paucEncodingBits.get()));
|
||||
if (mipFormat == gpu::Element::COLOR_COMPRESSED_ETC2_RGB) {
|
||||
etcFormat = Etc::Image::Format::RGB8;
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_ETC2_SRGB) {
|
||||
etcFormat = Etc::Image::Format::SRGB8;
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_ETC2_RGB_PUNCHTHROUGH_ALPHA) {
|
||||
etcFormat = Etc::Image::Format::RGB8A1;
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_ETC2_SRGB_PUNCHTHROUGH_ALPHA) {
|
||||
etcFormat = Etc::Image::Format::SRGB8A1;
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_ETC2_RGBA) {
|
||||
etcFormat = Etc::Image::Format::RGBA8;
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_ETC2_SRGBA) {
|
||||
etcFormat = Etc::Image::Format::SRGBA8;
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_EAC_RED) {
|
||||
etcFormat = Etc::Image::Format::R11;
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_EAC_RED_SIGNED) {
|
||||
etcFormat = Etc::Image::Format::SIGNED_R11;
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_EAC_XY) {
|
||||
etcFormat = Etc::Image::Format::RG11;
|
||||
} else if (mipFormat == gpu::Element::COLOR_COMPRESSED_EAC_XY_SIGNED) {
|
||||
etcFormat = Etc::Image::Format::SIGNED_RG11;
|
||||
} else {
|
||||
qCWarning(imagelogging) << "Unknown mip format";
|
||||
Q_UNREACHABLE();
|
||||
return;
|
||||
}
|
||||
|
||||
const Etc::ErrorMetric errorMetric = Etc::ErrorMetric::RGBA;
|
||||
const float effort = 1.0f;
|
||||
const int numEncodeThreads = 4;
|
||||
int encodingTime;
|
||||
const float MAX_COLOR = 255.0f;
|
||||
|
||||
std::vector<vec4> floatData;
|
||||
floatData.resize(width * height);
|
||||
for (int y = 0; y < height; y++) {
|
||||
QRgb *line = (QRgb *)localCopy.scanLine(y);
|
||||
for (int x = 0; x < width; x++) {
|
||||
QRgb &pixel = line[x];
|
||||
floatData[x + y * width] = vec4(qRed(pixel), qGreen(pixel), qBlue(pixel), qAlpha(pixel)) / MAX_COLOR;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
delete[] mipMaps;
|
||||
#endif
|
||||
// free up the memory afterward to avoid bloating the heap
|
||||
localCopy = QImage(); // QImage doesn't have a clear function, so override it with an empty one.
|
||||
|
||||
Etc::EncodeMipmaps(
|
||||
(float *)floatData.data(), width, height,
|
||||
etcFormat, errorMetric, effort,
|
||||
numEncodeThreads, numEncodeThreads,
|
||||
numMips, Etc::FILTER_WRAP_NONE,
|
||||
mipMaps, &encodingTime
|
||||
);
|
||||
|
||||
for (int i = 0; i < numMips; i++) {
|
||||
if (mipMaps[i].paucEncodingBits.get()) {
|
||||
if (face >= 0) {
|
||||
texture->assignStoredMipFace(i, face, mipMaps[i].uiEncodingBitsBytes, static_cast<const gpu::Byte*>(mipMaps[i].paucEncodingBits.get()));
|
||||
} else {
|
||||
texture->assignStoredMip(i, mipMaps[i].uiEncodingBitsBytes, static_cast<const gpu::Byte*>(mipMaps[i].paucEncodingBits.get()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
delete[] mipMaps;
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
void generateMips(gpu::Texture* texture, QImage&& image, const std::atomic<bool>& abortProcessing = false, int face = -1) {
|
||||
void generateMips(gpu::Texture* texture, QImage&& image, BackendTarget target, const std::atomic<bool>& abortProcessing = false, int face = -1) {
|
||||
#if CPU_MIPMAPS
|
||||
PROFILE_RANGE(resource_parse, "generateMips");
|
||||
|
||||
#ifndef USE_GLES
|
||||
if (image.format() == QIMAGE_HDR_FORMAT) {
|
||||
generateHDRMips(texture, std::move(image), abortProcessing, face);
|
||||
} else {
|
||||
generateLDRMips(texture, std::move(image), abortProcessing, face);
|
||||
if (target == BackendTarget::GLES32) {
|
||||
generateLDRMips(texture, std::move(image), target, abortProcessing, face);
|
||||
} else {
|
||||
if (image.format() == hdrFormatForTarget(target)) {
|
||||
generateHDRMips(texture, std::move(image), target, abortProcessing, face);
|
||||
} else {
|
||||
generateLDRMips(texture, std::move(image), target, abortProcessing, face);
|
||||
}
|
||||
}
|
||||
#else
|
||||
generateLDRMips(texture, std::move(image), abortProcessing, face);
|
||||
#endif
|
||||
#else
|
||||
texture->setAutoGenerateMips(true);
|
||||
#endif
|
||||
|
@ -750,9 +747,9 @@ void processTextureAlpha(const QImage& srcImage, bool& validAlpha, bool& alphaAs
|
|||
}
|
||||
|
||||
gpu::TexturePointer TextureUsage::process2DTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName, bool compress,
|
||||
bool isStrict, const std::atomic<bool>& abortProcessing) {
|
||||
BackendTarget target, bool isStrict, const std::atomic<bool>& abortProcessing) {
|
||||
PROFILE_RANGE(resource_parse, "process2DTextureColorFromImage");
|
||||
QImage image = processSourceImage(std::move(srcImage), false);
|
||||
QImage image = processSourceImage(std::move(srcImage), false, target);
|
||||
|
||||
bool validAlpha = image.hasAlphaChannel();
|
||||
bool alphaAsMask = false;
|
||||
|
@ -771,23 +768,26 @@ gpu::TexturePointer TextureUsage::process2DTextureColorFromImage(QImage&& srcIma
|
|||
gpu::Element formatMip;
|
||||
gpu::Element formatGPU;
|
||||
if (compress) {
|
||||
if (validAlpha) {
|
||||
// NOTE: This disables BC1a compression because it was producing odd artifacts on text textures
|
||||
// for the tutorial. Instead we use BC3 (which is larger) but doesn't produce the same artifacts).
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_SRGBA;
|
||||
if (target == BackendTarget::GLES32) {
|
||||
// GLES does not support GL_BGRA
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_ETC2_SRGBA;
|
||||
formatMip = formatGPU;
|
||||
} else {
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_SRGB;
|
||||
if (validAlpha) {
|
||||
// NOTE: This disables BC1a compression because it was producing odd artifacts on text textures
|
||||
// for the tutorial. Instead we use BC3 (which is larger) but doesn't produce the same artifacts).
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_SRGBA;
|
||||
} else {
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_SRGB;
|
||||
}
|
||||
formatMip = formatGPU;
|
||||
}
|
||||
formatMip = formatGPU;
|
||||
} else {
|
||||
#ifdef USE_GLES
|
||||
// GLES does not support GL_BGRA
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_ETC2_SRGBA;
|
||||
formatMip = formatGPU;
|
||||
#else
|
||||
formatGPU = gpu::Element::COLOR_SRGBA_32;
|
||||
formatMip = gpu::Element::COLOR_SBGRA_32;
|
||||
#endif
|
||||
if (target == BackendTarget::GLES32) {
|
||||
} else {
|
||||
formatGPU = gpu::Element::COLOR_SRGBA_32;
|
||||
formatMip = gpu::Element::COLOR_SBGRA_32;
|
||||
}
|
||||
}
|
||||
|
||||
if (isStrict) {
|
||||
|
@ -806,7 +806,7 @@ gpu::TexturePointer TextureUsage::process2DTextureColorFromImage(QImage&& srcIma
|
|||
theTexture->setUsage(usage.build());
|
||||
theTexture->setStoredMipFormat(formatMip);
|
||||
theTexture->assignStoredMip(0, image.byteCount(), image.constBits());
|
||||
generateMips(theTexture.get(), std::move(image), abortProcessing);
|
||||
generateMips(theTexture.get(), std::move(image), target, abortProcessing);
|
||||
}
|
||||
|
||||
return theTexture;
|
||||
|
@ -887,10 +887,10 @@ QImage processBumpMap(QImage&& image) {
|
|||
return result;
|
||||
}
|
||||
gpu::TexturePointer TextureUsage::process2DTextureNormalMapFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||
bool compress, bool isBumpMap,
|
||||
bool compress, BackendTarget target, bool isBumpMap,
|
||||
const std::atomic<bool>& abortProcessing) {
|
||||
PROFILE_RANGE(resource_parse, "process2DTextureNormalMapFromImage");
|
||||
QImage image = processSourceImage(std::move(srcImage), false);
|
||||
QImage image = processSourceImage(std::move(srcImage), false, target);
|
||||
|
||||
if (isBumpMap) {
|
||||
image = processBumpMap(std::move(image));
|
||||
|
@ -906,13 +906,13 @@ gpu::TexturePointer TextureUsage::process2DTextureNormalMapFromImage(QImage&& sr
|
|||
gpu::Element formatMip;
|
||||
gpu::Element formatGPU;
|
||||
if (compress) {
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_XY;
|
||||
if (target == BackendTarget::GLES32) {
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_EAC_XY;
|
||||
} else {
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_XY;
|
||||
}
|
||||
} else {
|
||||
#ifdef USE_GLES
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_EAC_XY;
|
||||
#else
|
||||
formatGPU = gpu::Element::VEC2NU8_XY;
|
||||
#endif
|
||||
}
|
||||
formatMip = formatGPU;
|
||||
|
||||
|
@ -920,17 +920,17 @@ gpu::TexturePointer TextureUsage::process2DTextureNormalMapFromImage(QImage&& sr
|
|||
theTexture->setSource(srcImageName);
|
||||
theTexture->setStoredMipFormat(formatMip);
|
||||
theTexture->assignStoredMip(0, image.byteCount(), image.constBits());
|
||||
generateMips(theTexture.get(), std::move(image), abortProcessing);
|
||||
generateMips(theTexture.get(), std::move(image), target, abortProcessing);
|
||||
}
|
||||
|
||||
return theTexture;
|
||||
}
|
||||
|
||||
gpu::TexturePointer TextureUsage::process2DTextureGrayscaleFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||
bool compress, bool isInvertedPixels,
|
||||
bool compress, BackendTarget target, bool isInvertedPixels,
|
||||
const std::atomic<bool>& abortProcessing) {
|
||||
PROFILE_RANGE(resource_parse, "process2DTextureGrayscaleFromImage");
|
||||
QImage image = processSourceImage(std::move(srcImage), false);
|
||||
QImage image = processSourceImage(std::move(srcImage), false, target);
|
||||
|
||||
if (image.format() != QImage::Format_ARGB32) {
|
||||
image = image.convertToFormat(QImage::Format_ARGB32);
|
||||
|
@ -946,13 +946,13 @@ gpu::TexturePointer TextureUsage::process2DTextureGrayscaleFromImage(QImage&& sr
|
|||
gpu::Element formatMip;
|
||||
gpu::Element formatGPU;
|
||||
if (compress) {
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_RED;
|
||||
if (target == BackendTarget::GLES32) {
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_EAC_RED;
|
||||
} else {
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_RED;
|
||||
}
|
||||
} else {
|
||||
#ifdef USE_GLES
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_EAC_RED;
|
||||
#else
|
||||
formatGPU = gpu::Element::COLOR_R_8;
|
||||
#endif
|
||||
}
|
||||
formatMip = formatGPU;
|
||||
|
||||
|
@ -960,7 +960,7 @@ gpu::TexturePointer TextureUsage::process2DTextureGrayscaleFromImage(QImage&& sr
|
|||
theTexture->setSource(srcImageName);
|
||||
theTexture->setStoredMipFormat(formatMip);
|
||||
theTexture->assignStoredMip(0, image.byteCount(), image.constBits());
|
||||
generateMips(theTexture.get(), std::move(image), abortProcessing);
|
||||
generateMips(theTexture.get(), std::move(image), target, abortProcessing);
|
||||
}
|
||||
|
||||
return theTexture;
|
||||
|
@ -1233,12 +1233,12 @@ const int CubeLayout::NUM_CUBEMAP_LAYOUTS = sizeof(CubeLayout::CUBEMAP_LAYOUTS)
|
|||
|
||||
//#define DEBUG_COLOR_PACKING
|
||||
|
||||
QImage convertToHDRFormat(QImage&& srcImage, gpu::Element format) {
|
||||
QImage convertToHDRFormat(QImage&& srcImage, gpu::Element format, BackendTarget target) {
|
||||
// Take a local copy to force move construction
|
||||
// https://github.com/isocpp/CppCoreGuidelines/blob/master/CppCoreGuidelines.md#f18-for-consume-parameters-pass-by-x-and-stdmove-the-parameter
|
||||
QImage localCopy = std::move(srcImage);
|
||||
|
||||
QImage hdrImage(localCopy.width(), localCopy.height(), (QImage::Format)QIMAGE_HDR_FORMAT);
|
||||
QImage hdrImage(localCopy.width(), localCopy.height(), hdrFormatForTarget(target));
|
||||
std::function<uint32(const glm::vec3&)> packFunc;
|
||||
#ifdef DEBUG_COLOR_PACKING
|
||||
std::function<glm::vec3(uint32)> unpackFunc;
|
||||
|
@ -1292,7 +1292,7 @@ QImage convertToHDRFormat(QImage&& srcImage, gpu::Element format) {
|
|||
}
|
||||
|
||||
gpu::TexturePointer TextureUsage::processCubeTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName,
|
||||
bool compress, bool generateIrradiance,
|
||||
bool compress, BackendTarget target, bool generateIrradiance,
|
||||
const std::atomic<bool>& abortProcessing) {
|
||||
PROFILE_RANGE(resource_parse, "processCubeTextureColorFromImage");
|
||||
|
||||
|
@ -1308,27 +1308,28 @@ gpu::TexturePointer TextureUsage::processCubeTextureColorFromImage(QImage&& srcI
|
|||
|
||||
gpu::TexturePointer theTexture = nullptr;
|
||||
|
||||
QImage image = processSourceImage(std::move(localCopy), true);
|
||||
QImage image = processSourceImage(std::move(localCopy), true, target);
|
||||
|
||||
if (image.format() != QIMAGE_HDR_FORMAT) {
|
||||
#ifndef USE_GLES
|
||||
image = convertToHDRFormat(std::move(image), HDR_FORMAT);
|
||||
#else
|
||||
image = image.convertToFormat(QImage::Format_RGB32);
|
||||
#endif
|
||||
if (image.format() != hdrFormatForTarget(target)) {
|
||||
if (target == BackendTarget::GLES32) {
|
||||
image = image.convertToFormat(QImage::Format_RGB32);
|
||||
} else {
|
||||
image = convertToHDRFormat(std::move(image), HDR_FORMAT, target);
|
||||
}
|
||||
}
|
||||
|
||||
gpu::Element formatMip;
|
||||
gpu::Element formatGPU;
|
||||
if (compress) {
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_HDR_RGB;
|
||||
if (target == BackendTarget::GLES32) {
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_ETC2_SRGB;
|
||||
} else {
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_BCX_HDR_RGB;
|
||||
}
|
||||
} else {
|
||||
#ifdef USE_GLES
|
||||
formatGPU = gpu::Element::COLOR_COMPRESSED_ETC2_SRGB;
|
||||
#else
|
||||
formatGPU = HDR_FORMAT;
|
||||
#endif
|
||||
}
|
||||
|
||||
formatMip = formatGPU;
|
||||
|
||||
// Find the layout of the cubemap in the 2D image
|
||||
|
@ -1378,11 +1379,12 @@ gpu::TexturePointer TextureUsage::processCubeTextureColorFromImage(QImage&& srcI
|
|||
PROFILE_RANGE(resource_parse, "generateIrradiance");
|
||||
gpu::Element irradianceFormat;
|
||||
// TODO: we could locally compress the irradiance texture on Android, but we don't need to
|
||||
#ifndef USE_GLES
|
||||
irradianceFormat = HDR_FORMAT;
|
||||
#else
|
||||
irradianceFormat = gpu::Element::COLOR_SRGBA_32;
|
||||
#endif
|
||||
if (target == BackendTarget::GLES32) {
|
||||
irradianceFormat = gpu::Element::COLOR_SRGBA_32;
|
||||
} else {
|
||||
irradianceFormat = HDR_FORMAT;
|
||||
}
|
||||
|
||||
auto irradianceTexture = gpu::Texture::createCube(irradianceFormat, faces[0].width(), gpu::Texture::MAX_NUM_MIPS, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR, gpu::Sampler::WRAP_CLAMP));
|
||||
irradianceTexture->setSource(srcImageName);
|
||||
irradianceTexture->setStoredMipFormat(irradianceFormat);
|
||||
|
@ -1390,14 +1392,14 @@ gpu::TexturePointer TextureUsage::processCubeTextureColorFromImage(QImage&& srcI
|
|||
irradianceTexture->assignStoredMipFace(0, face, faces[face].byteCount(), faces[face].constBits());
|
||||
}
|
||||
|
||||
irradianceTexture->generateIrradiance();
|
||||
irradianceTexture->generateIrradiance(target);
|
||||
|
||||
auto irradiance = irradianceTexture->getIrradiance();
|
||||
theTexture->overrideIrradiance(irradiance);
|
||||
}
|
||||
|
||||
for (uint8 face = 0; face < faces.size(); ++face) {
|
||||
generateMips(theTexture.get(), std::move(faces[face]), abortProcessing, face);
|
||||
generateMips(theTexture.get(), std::move(faces[face]), target, abortProcessing, face);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -41,42 +41,41 @@ enum Type {
|
|||
UNUSED_TEXTURE
|
||||
};
|
||||
|
||||
using TextureLoader = std::function<gpu::TexturePointer(QImage&&, const std::string&, bool, const std::atomic<bool>&)>;
|
||||
using TextureLoader = std::function<gpu::TexturePointer(QImage&&, const std::string&, bool, gpu::BackendTarget, const std::atomic<bool>&)>;
|
||||
TextureLoader getTextureLoaderForType(Type type, const QVariantMap& options = QVariantMap());
|
||||
|
||||
gpu::TexturePointer create2DTextureFromImage(QImage&& image, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing);
|
||||
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
|
||||
gpu::TexturePointer createStrict2DTextureFromImage(QImage&& image, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing);
|
||||
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
|
||||
gpu::TexturePointer createAlbedoTextureFromImage(QImage&& image, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing);
|
||||
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
|
||||
gpu::TexturePointer createEmissiveTextureFromImage(QImage&& image, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing);
|
||||
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
|
||||
gpu::TexturePointer createNormalTextureFromNormalImage(QImage&& image, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing);
|
||||
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
|
||||
gpu::TexturePointer createNormalTextureFromBumpImage(QImage&& image, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing);
|
||||
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
|
||||
gpu::TexturePointer createRoughnessTextureFromImage(QImage&& image, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing);
|
||||
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
|
||||
gpu::TexturePointer createRoughnessTextureFromGlossImage(QImage&& image, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing);
|
||||
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
|
||||
gpu::TexturePointer createMetallicTextureFromImage(QImage&& image, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing);
|
||||
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
|
||||
gpu::TexturePointer createCubeTextureFromImage(QImage&& image, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing);
|
||||
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
|
||||
gpu::TexturePointer createCubeTextureFromImageWithoutIrradiance(QImage&& image, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing);
|
||||
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
|
||||
gpu::TexturePointer createLightmapTextureFromImage(QImage&& image, const std::string& srcImageName,
|
||||
bool compress, const std::atomic<bool>& abortProcessing);
|
||||
|
||||
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing);
|
||||
gpu::TexturePointer process2DTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName, bool compress,
|
||||
bool isStrict, const std::atomic<bool>& abortProcessing);
|
||||
gpu::BackendTarget target, bool isStrict, const std::atomic<bool>& abortProcessing);
|
||||
gpu::TexturePointer process2DTextureNormalMapFromImage(QImage&& srcImage, const std::string& srcImageName, bool compress,
|
||||
bool isBumpMap, const std::atomic<bool>& abortProcessing);
|
||||
gpu::BackendTarget target, bool isBumpMap, const std::atomic<bool>& abortProcessing);
|
||||
gpu::TexturePointer process2DTextureGrayscaleFromImage(QImage&& srcImage, const std::string& srcImageName, bool compress,
|
||||
bool isInvertedPixels, const std::atomic<bool>& abortProcessing);
|
||||
gpu::BackendTarget target, bool isInvertedPixels, const std::atomic<bool>& abortProcessing);
|
||||
gpu::TexturePointer processCubeTextureColorFromImage(QImage&& srcImage, const std::string& srcImageName, bool compress,
|
||||
bool generateIrradiance, const std::atomic<bool>& abortProcessing);
|
||||
gpu::BackendTarget target, bool generateIrradiance, const std::atomic<bool>& abortProcessing);
|
||||
|
||||
} // namespace TextureUsage
|
||||
|
||||
|
@ -84,7 +83,7 @@ const QStringList getSupportedFormats();
|
|||
|
||||
gpu::TexturePointer processImage(std::shared_ptr<QIODevice> content, const std::string& url,
|
||||
int maxNumPixels, TextureUsage::Type textureType,
|
||||
bool compress = false, const std::atomic<bool>& abortProcessing = false);
|
||||
bool compress, gpu::BackendTarget target, const std::atomic<bool>& abortProcessing = false);
|
||||
|
||||
} // namespace image
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
set(TARGET_NAME model-networking)
|
||||
setup_hifi_library()
|
||||
link_hifi_libraries(shared networking graphics fbx ktx image)
|
||||
link_hifi_libraries(shared networking graphics fbx ktx image gl)
|
||||
include_hifi_library_headers(gpu)
|
||||
|
|
|
@ -31,6 +31,7 @@
|
|||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/random.hpp>
|
||||
|
||||
#include <gl/GLHelpers.h>
|
||||
#include <gpu/Batch.h>
|
||||
|
||||
#include <image/Image.h>
|
||||
|
@ -271,6 +272,20 @@ gpu::TexturePointer getFallbackTextureForType(image::TextureUsage::Type type) {
|
|||
return result;
|
||||
}
|
||||
|
||||
gpu::BackendTarget getBackendTarget() {
|
||||
#if defined(USE_GLES)
|
||||
gpu::BackendTarget target = gpu::BackendTarget::GLES32;
|
||||
#elif defined(Q_OS_MAC)
|
||||
gpu::BackendTarget target = gpu::BackendTarget::GL41;
|
||||
#else
|
||||
gpu::BackendTarget target = gpu::BackendTarget::GL45;
|
||||
if (gl::disableGl45()) {
|
||||
target = gpu::BackendTarget::GL41;
|
||||
}
|
||||
#endif
|
||||
return target;
|
||||
}
|
||||
|
||||
/// Returns a texture version of an image file
|
||||
gpu::TexturePointer TextureCache::getImageTexture(const QString& path, image::TextureUsage::Type type, QVariantMap options) {
|
||||
QImage image = QImage(path);
|
||||
|
@ -279,7 +294,15 @@ gpu::TexturePointer TextureCache::getImageTexture(const QString& path, image::Te
|
|||
return nullptr;
|
||||
}
|
||||
auto loader = image::TextureUsage::getTextureLoaderForType(type, options);
|
||||
return gpu::TexturePointer(loader(std::move(image), path.toStdString(), false, false));
|
||||
|
||||
#ifdef USE_GLES
|
||||
constexpr bool shouldCompress = true;
|
||||
#else
|
||||
constexpr bool shouldCompress = false;
|
||||
#endif
|
||||
auto target = getBackendTarget();
|
||||
|
||||
return gpu::TexturePointer(loader(std::move(image), path.toStdString(), shouldCompress, target, false));
|
||||
}
|
||||
|
||||
QSharedPointer<Resource> TextureCache::createResource(const QUrl& url, const QSharedPointer<Resource>& fallback,
|
||||
|
@ -1160,7 +1183,14 @@ void ImageReader::read() {
|
|||
|
||||
// IMPORTANT: _content is empty past this point
|
||||
auto buffer = std::shared_ptr<QIODevice>((QIODevice*)new OwningBuffer(std::move(_content)));
|
||||
texture = image::processImage(std::move(buffer), _url.toString().toStdString(), _maxNumPixels, networkTexture->getTextureType());
|
||||
|
||||
#ifdef USE_GLES
|
||||
constexpr bool shouldCompress = true;
|
||||
#else
|
||||
constexpr bool shouldCompress = false;
|
||||
#endif
|
||||
auto target = getBackendTarget();
|
||||
texture = image::processImage(std::move(buffer), _url.toString().toStdString(), _maxNumPixels, networkTexture->getTextureType(), shouldCompress, target);
|
||||
|
||||
if (!texture) {
|
||||
qCWarning(modelnetworking) << "Could not process:" << _url;
|
||||
|
|
|
@ -39,191 +39,3 @@ void CongestionControl::setPacketSendPeriod(double newSendPeriod) {
|
|||
_packetSendPeriod = newSendPeriod;
|
||||
}
|
||||
}
|
||||
|
||||
DefaultCC::DefaultCC() :
|
||||
_lastDecreaseMaxSeq(SequenceNumber {SequenceNumber::MAX })
|
||||
{
|
||||
_mss = udt::MAX_PACKET_SIZE_WITH_UDP_HEADER;
|
||||
|
||||
_congestionWindowSize = 16;
|
||||
setPacketSendPeriod(1.0);
|
||||
}
|
||||
|
||||
bool DefaultCC::onACK(SequenceNumber ackNum, p_high_resolution_clock::time_point receiveTime) {
|
||||
double increase = 0;
|
||||
|
||||
// Note from UDT original code:
|
||||
// The minimum increase parameter is increased from "1.0 / _mss" to 0.01
|
||||
// because the original was too small and caused sending rate to stay at low level
|
||||
// for long time.
|
||||
const double minimumIncrease = 0.01;
|
||||
|
||||
// we will only adjust once per sync interval so check that it has been at least that long now
|
||||
auto now = p_high_resolution_clock::now();
|
||||
if (duration_cast<microseconds>(now - _lastRCTime).count() < synInterval()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// our last rate increase time is now
|
||||
_lastRCTime = now;
|
||||
|
||||
if (_slowStart) {
|
||||
// we are in slow start phase - increase the congestion window size by the number of packets just ACKed
|
||||
_congestionWindowSize += seqlen(_lastACK, ackNum);
|
||||
|
||||
// update the last ACK
|
||||
_lastACK = ackNum;
|
||||
|
||||
// check if we can get out of slow start (is our new congestion window size bigger than the max)
|
||||
if (_congestionWindowSize > _maxCongestionWindowSize) {
|
||||
_slowStart = false;
|
||||
|
||||
if (_receiveRate > 0) {
|
||||
// if we have a valid receive rate we set the send period to whatever the receive rate dictates
|
||||
setPacketSendPeriod(USECS_PER_SECOND / _receiveRate);
|
||||
} else {
|
||||
// no valid receive rate, packet send period is dictated by estimated RTT and current congestion window size
|
||||
setPacketSendPeriod((_rtt + synInterval()) / _congestionWindowSize);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// not in slow start - window size should be arrival rate * (RTT + SYN) + 16
|
||||
_congestionWindowSize = _receiveRate / USECS_PER_SECOND * (_rtt + synInterval()) + 16;
|
||||
}
|
||||
|
||||
// during slow start we perform no rate increases
|
||||
if (_slowStart) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// if loss has happened since the last rate increase we do not perform another increase
|
||||
if (_loss) {
|
||||
_loss = false;
|
||||
return false;
|
||||
}
|
||||
|
||||
double capacitySpeedDelta = (_bandwidth - USECS_PER_SECOND / _packetSendPeriod);
|
||||
|
||||
// UDT uses what they call DAIMD - additive increase multiplicative decrease with decreasing increases
|
||||
// This factor is a protocol parameter that is part of the DAIMD algorithim
|
||||
static const int AIMD_DECREASING_INCREASE_FACTOR = 9;
|
||||
|
||||
if ((_packetSendPeriod > _lastDecreasePeriod) && ((_bandwidth / AIMD_DECREASING_INCREASE_FACTOR) < capacitySpeedDelta)) {
|
||||
capacitySpeedDelta = _bandwidth / AIMD_DECREASING_INCREASE_FACTOR;
|
||||
}
|
||||
|
||||
if (capacitySpeedDelta <= 0) {
|
||||
increase = minimumIncrease;
|
||||
} else {
|
||||
// use UDTs DAIMD algorithm to figure out what the send period increase factor should be
|
||||
|
||||
// inc = max(10 ^ ceil(log10(B * MSS * 8 ) * Beta / MSS, minimumIncrease)
|
||||
// B = estimated link capacity
|
||||
// Beta = 1.5 * 10^(-6)
|
||||
|
||||
static const double BETA = 0.0000015;
|
||||
static const double BITS_PER_BYTE = 8.0;
|
||||
|
||||
increase = pow(10.0, ceil(log10(capacitySpeedDelta * _mss * BITS_PER_BYTE))) * BETA / _mss;
|
||||
|
||||
if (increase < minimumIncrease) {
|
||||
increase = minimumIncrease;
|
||||
}
|
||||
}
|
||||
|
||||
setPacketSendPeriod((_packetSendPeriod * synInterval()) / (_packetSendPeriod * increase + synInterval()));
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void DefaultCC::onLoss(SequenceNumber rangeStart, SequenceNumber rangeEnd) {
|
||||
// stop the slow start if we haven't yet
|
||||
if (_slowStart) {
|
||||
stopSlowStart();
|
||||
|
||||
// if the change to send rate was driven by a known receive rate, then we don't continue with the decrease
|
||||
if (_receiveRate > 0) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
_loss = true;
|
||||
++_nakCount;
|
||||
|
||||
static const double INTER_PACKET_ARRIVAL_INCREASE = 1.125;
|
||||
static const int MAX_DECREASES_PER_CONGESTION_EPOCH = 5;
|
||||
|
||||
// check if this NAK starts a new congestion period - this will be the case if the
|
||||
// NAK received occured for a packet sent after the last decrease
|
||||
if (rangeStart > _lastDecreaseMaxSeq) {
|
||||
_delayedDecrease = (rangeStart == rangeEnd);
|
||||
|
||||
_lastDecreasePeriod = _packetSendPeriod;
|
||||
|
||||
if (!_delayedDecrease) {
|
||||
setPacketSendPeriod(ceil(_packetSendPeriod * INTER_PACKET_ARRIVAL_INCREASE));
|
||||
} else {
|
||||
_loss = false;
|
||||
}
|
||||
|
||||
// use EWMA to update the average number of NAKs per congestion
|
||||
static const double NAK_EWMA_ALPHA = 0.125;
|
||||
_avgNAKNum = (int)ceil(_avgNAKNum * (1 - NAK_EWMA_ALPHA) + _nakCount * NAK_EWMA_ALPHA);
|
||||
|
||||
// update the count of NAKs and count of decreases in this interval
|
||||
_nakCount = 1;
|
||||
_decreaseCount = 1;
|
||||
|
||||
_lastDecreaseMaxSeq = _sendCurrSeqNum;
|
||||
|
||||
if (_avgNAKNum < 1) {
|
||||
_randomDecreaseThreshold = 1;
|
||||
} else {
|
||||
// avoid synchronous rate decrease across connections using randomization
|
||||
std::random_device rd;
|
||||
std::mt19937 generator(rd());
|
||||
std::uniform_int_distribution<> distribution(1, std::max(1, _avgNAKNum));
|
||||
|
||||
_randomDecreaseThreshold = distribution(generator);
|
||||
}
|
||||
} else if (_delayedDecrease && _nakCount == 2) {
|
||||
setPacketSendPeriod(ceil(_packetSendPeriod * INTER_PACKET_ARRIVAL_INCREASE));
|
||||
} else if ((_decreaseCount++ < MAX_DECREASES_PER_CONGESTION_EPOCH) && ((_nakCount % _randomDecreaseThreshold) == 0)) {
|
||||
// there have been fewer than MAX_DECREASES_PER_CONGESTION_EPOCH AND this NAK matches the random count at which we
|
||||
// decided we would decrease the packet send period
|
||||
|
||||
setPacketSendPeriod(ceil(_packetSendPeriod * INTER_PACKET_ARRIVAL_INCREASE));
|
||||
_lastDecreaseMaxSeq = _sendCurrSeqNum;
|
||||
}
|
||||
}
|
||||
|
||||
void DefaultCC::onTimeout() {
|
||||
if (_slowStart) {
|
||||
stopSlowStart();
|
||||
} else {
|
||||
// UDT used to do the following on timeout if not in slow start - we should check if it could be helpful
|
||||
_lastDecreasePeriod = _packetSendPeriod;
|
||||
_packetSendPeriod = ceil(_packetSendPeriod * 2);
|
||||
|
||||
// this seems odd - the last ack they were setting _lastDecreaseMaxSeq to only applies to slow start
|
||||
_lastDecreaseMaxSeq = _lastACK;
|
||||
}
|
||||
}
|
||||
|
||||
void DefaultCC::stopSlowStart() {
|
||||
_slowStart = false;
|
||||
|
||||
if (_receiveRate > 0) {
|
||||
// Set the sending rate to the receiving rate.
|
||||
setPacketSendPeriod(USECS_PER_SECOND / _receiveRate);
|
||||
} else {
|
||||
// If no receiving rate is observed, we have to compute the sending
|
||||
// rate according to the current window size, and decrease it
|
||||
// using the method below.
|
||||
setPacketSendPeriod(double(_congestionWindowSize) / (_rtt + synInterval()));
|
||||
}
|
||||
}
|
||||
|
||||
void DefaultCC::setInitialSendSequenceNumber(udt::SequenceNumber seqNum) {
|
||||
_lastACK = _lastDecreaseMaxSeq = seqNum - 1;
|
||||
}
|
||||
|
|
|
@ -32,11 +32,9 @@ class CongestionControl {
|
|||
friend class Connection;
|
||||
public:
|
||||
|
||||
CongestionControl() {};
|
||||
CongestionControl(int synInterval) : _synInterval(synInterval) {}
|
||||
virtual ~CongestionControl() {}
|
||||
|
||||
int synInterval() const { return _synInterval; }
|
||||
CongestionControl() = default;
|
||||
virtual ~CongestionControl() = default;
|
||||
|
||||
void setMaxBandwidth(int maxBandwidth);
|
||||
|
||||
virtual void init() {}
|
||||
|
@ -44,50 +42,28 @@ public:
|
|||
// return value specifies if connection should perform a fast re-transmit of ACK + 1 (used in TCP style congestion control)
|
||||
virtual bool onACK(SequenceNumber ackNum, p_high_resolution_clock::time_point receiveTime) { return false; }
|
||||
|
||||
virtual void onLoss(SequenceNumber rangeStart, SequenceNumber rangeEnd) {}
|
||||
virtual void onTimeout() {}
|
||||
|
||||
virtual bool shouldNAK() { return true; }
|
||||
virtual bool shouldACK2() { return true; }
|
||||
virtual bool shouldProbe() { return true; }
|
||||
|
||||
virtual void onPacketSent(int wireSize, SequenceNumber seqNum, p_high_resolution_clock::time_point timePoint) {}
|
||||
|
||||
virtual int estimatedTimeout() const = 0;
|
||||
protected:
|
||||
void setAckInterval(int ackInterval) { _ackInterval = ackInterval; }
|
||||
void setRTO(int rto) { _userDefinedRTO = true; _rto = rto; }
|
||||
|
||||
void setMSS(int mss) { _mss = mss; }
|
||||
void setMaxCongestionWindowSize(int window) { _maxCongestionWindowSize = window; }
|
||||
void setBandwidth(int bandwidth) { _bandwidth = bandwidth; }
|
||||
virtual void setInitialSendSequenceNumber(SequenceNumber seqNum) = 0;
|
||||
void setSendCurrentSequenceNumber(SequenceNumber seqNum) { _sendCurrSeqNum = seqNum; }
|
||||
void setReceiveRate(int rate) { _receiveRate = rate; }
|
||||
void setRTT(int rtt) { _rtt = rtt; }
|
||||
void setPacketSendPeriod(double newSendPeriod); // call this internally to ensure send period doesn't go past max bandwidth
|
||||
|
||||
double _packetSendPeriod { 1.0 }; // Packet sending period, in microseconds
|
||||
int _congestionWindowSize { 16 }; // Congestion window size, in packets
|
||||
|
||||
int _bandwidth { 0 }; // estimated bandwidth, packets per second
|
||||
|
||||
std::atomic<int> _maxBandwidth { -1 }; // Maximum desired bandwidth, bits per second
|
||||
int _maxCongestionWindowSize { 0 }; // maximum cwnd size, in packets
|
||||
|
||||
int _mss { 0 }; // Maximum Packet Size, including all packet headers
|
||||
SequenceNumber _sendCurrSeqNum; // current maximum seq num sent out
|
||||
int _receiveRate { 0 }; // packet arrive rate at receiver side, packets per second
|
||||
int _rtt { 0 }; // current estimated RTT, microsecond
|
||||
|
||||
private:
|
||||
CongestionControl(const CongestionControl& other) = delete;
|
||||
CongestionControl& operator=(const CongestionControl& other) = delete;
|
||||
|
||||
int _ackInterval { 0 }; // How many packets to send one ACK, in packets
|
||||
int _lightACKInterval { 64 }; // How many packets to send one light ACK, in packets
|
||||
|
||||
int _synInterval { DEFAULT_SYN_INTERVAL };
|
||||
|
||||
bool _userDefinedRTO { false }; // if the RTO value is defined by users
|
||||
int _rto { -1 }; // RTO value, microseconds
|
||||
};
|
||||
|
||||
|
||||
|
@ -95,8 +71,6 @@ class CongestionControlVirtualFactory {
|
|||
public:
|
||||
virtual ~CongestionControlVirtualFactory() {}
|
||||
|
||||
static int synInterval() { return DEFAULT_SYN_INTERVAL; }
|
||||
|
||||
virtual std::unique_ptr<CongestionControl> create() = 0;
|
||||
};
|
||||
|
||||
|
@ -105,35 +79,6 @@ public:
|
|||
virtual ~CongestionControlFactory() {}
|
||||
virtual std::unique_ptr<CongestionControl> create() override { return std::unique_ptr<T>(new T()); }
|
||||
};
|
||||
|
||||
class DefaultCC: public CongestionControl {
|
||||
public:
|
||||
DefaultCC();
|
||||
|
||||
public:
|
||||
virtual bool onACK(SequenceNumber ackNum, p_high_resolution_clock::time_point receiveTime) override;
|
||||
virtual void onLoss(SequenceNumber rangeStart, SequenceNumber rangeEnd) override;
|
||||
virtual void onTimeout() override;
|
||||
|
||||
protected:
|
||||
virtual void setInitialSendSequenceNumber(SequenceNumber seqNum) override;
|
||||
|
||||
private:
|
||||
void stopSlowStart(); // stops the slow start on loss or timeout
|
||||
|
||||
p_high_resolution_clock::time_point _lastRCTime = p_high_resolution_clock::now(); // last rate increase time
|
||||
|
||||
bool _slowStart { true }; // if in slow start phase
|
||||
SequenceNumber _lastACK; // last ACKed sequence number from previous
|
||||
bool _loss { false }; // if loss happened since last rate increase
|
||||
SequenceNumber _lastDecreaseMaxSeq; // max pkt seq num sent out when last decrease happened
|
||||
double _lastDecreasePeriod { 1 }; // value of _packetSendPeriod when last decrease happened
|
||||
int _nakCount { 0 }; // number of NAKs in congestion epoch
|
||||
int _randomDecreaseThreshold { 1 }; // random threshold on decrease by number of loss events
|
||||
int _avgNAKNum { 0 }; // average number of NAKs per congestion
|
||||
int _decreaseCount { 0 }; // number of decreases in a congestion epoch
|
||||
bool _delayedDecrease { false };
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -39,28 +39,12 @@ Connection::Connection(Socket* parentSocket, HifiSockAddr destination, std::uniq
|
|||
|
||||
Q_ASSERT_X(_congestionControl, "Connection::Connection", "Must be called with a valid CongestionControl object");
|
||||
_congestionControl->init();
|
||||
|
||||
// setup default SYN, RTT and RTT Variance based on the SYN interval in CongestionControl object
|
||||
_synInterval = _congestionControl->synInterval();
|
||||
|
||||
resetRTT();
|
||||
|
||||
// set the initial RTT and flow window size on congestion control object
|
||||
_congestionControl->setRTT(_rtt);
|
||||
_congestionControl->setMaxCongestionWindowSize(_flowWindowSize);
|
||||
|
||||
// Setup packets
|
||||
static const int ACK_PACKET_PAYLOAD_BYTES = sizeof(_lastSentACK) + sizeof(_currentACKSubSequenceNumber)
|
||||
+ sizeof(_rtt) + sizeof(int32_t) + sizeof(int32_t) + sizeof(int32_t);
|
||||
static const int LIGHT_ACK_PACKET_PAYLOAD_BYTES = sizeof(SequenceNumber);
|
||||
static const int ACK2_PAYLOAD_BYTES = sizeof(SequenceNumber);
|
||||
static const int NAK_PACKET_PAYLOAD_BYTES = 2 * sizeof(SequenceNumber);
|
||||
static const int ACK_PACKET_PAYLOAD_BYTES = sizeof(SequenceNumber);
|
||||
static const int HANDSHAKE_ACK_PAYLOAD_BYTES = sizeof(SequenceNumber);
|
||||
|
||||
_ackPacket = ControlPacket::create(ControlPacket::ACK, ACK_PACKET_PAYLOAD_BYTES);
|
||||
_lightACKPacket = ControlPacket::create(ControlPacket::LightACK, LIGHT_ACK_PACKET_PAYLOAD_BYTES);
|
||||
_ack2Packet = ControlPacket::create(ControlPacket::ACK2, ACK2_PAYLOAD_BYTES);
|
||||
_lossReport = ControlPacket::create(ControlPacket::NAK, NAK_PACKET_PAYLOAD_BYTES);
|
||||
_handshakeACK = ControlPacket::create(ControlPacket::HandshakeACK, HANDSHAKE_ACK_PAYLOAD_BYTES);
|
||||
|
||||
|
||||
|
@ -101,11 +85,6 @@ void Connection::stopSendQueue() {
|
|||
}
|
||||
}
|
||||
|
||||
void Connection::resetRTT() {
|
||||
_rtt = _synInterval * 10;
|
||||
_rttVariance = _rtt / 2;
|
||||
}
|
||||
|
||||
void Connection::setMaxBandwidth(int maxBandwidth) {
|
||||
_congestionControl->setMaxBandwidth(maxBandwidth);
|
||||
}
|
||||
|
@ -135,15 +114,12 @@ SendQueue& Connection::getSendQueue() {
|
|||
QObject::connect(_sendQueue.get(), &SendQueue::packetRetransmitted, this, &Connection::recordRetransmission);
|
||||
QObject::connect(_sendQueue.get(), &SendQueue::queueInactive, this, &Connection::queueInactive);
|
||||
QObject::connect(_sendQueue.get(), &SendQueue::timeout, this, &Connection::queueTimeout);
|
||||
QObject::connect(_sendQueue.get(), &SendQueue::shortCircuitLoss, this, &Connection::queueShortCircuitLoss);
|
||||
|
||||
|
||||
// set defaults on the send queue from our congestion control object and estimatedTimeout()
|
||||
_sendQueue->setPacketSendPeriod(_congestionControl->_packetSendPeriod);
|
||||
_sendQueue->setSyncInterval(_synInterval);
|
||||
_sendQueue->setEstimatedTimeout(estimatedTimeout());
|
||||
_sendQueue->setFlowWindowSize(std::min(_flowWindowSize, (int) _congestionControl->_congestionWindowSize));
|
||||
_sendQueue->setProbePacketEnabled(_congestionControl->shouldProbe());
|
||||
_sendQueue->setEstimatedTimeout(_congestionControl->estimatedTimeout());
|
||||
_sendQueue->setFlowWindowSize(_congestionControl->_congestionWindowSize);
|
||||
|
||||
// give the randomized sequence number to the congestion control object
|
||||
_congestionControl->setInitialSendSequenceNumber(_sendQueue->getCurrentSequenceNumber());
|
||||
|
@ -167,12 +143,6 @@ void Connection::queueTimeout() {
|
|||
});
|
||||
}
|
||||
|
||||
void Connection::queueShortCircuitLoss(quint32 sequenceNumber) {
|
||||
updateCongestionControlAndSendQueue([this, sequenceNumber] {
|
||||
_congestionControl->onLoss(SequenceNumber { sequenceNumber }, SequenceNumber { sequenceNumber });
|
||||
});
|
||||
}
|
||||
|
||||
void Connection::sendReliablePacket(std::unique_ptr<Packet> packet) {
|
||||
Q_ASSERT_X(packet->isReliable(), "Connection::send", "Trying to send an unreliable packet reliably.");
|
||||
getSendQueue().queuePacket(std::move(packet));
|
||||
|
@ -213,43 +183,6 @@ void Connection::queueReceivedMessagePacket(std::unique_ptr<Packet> packet) {
|
|||
}
|
||||
|
||||
void Connection::sync() {
|
||||
if (_isReceivingData) {
|
||||
|
||||
// check if we should expire the receive portion of this connection
|
||||
// this occurs if it has been 16 timeouts since the last data received and at least 5 seconds
|
||||
static const int NUM_TIMEOUTS_BEFORE_EXPIRY = 16;
|
||||
static const int MIN_SECONDS_BEFORE_EXPIRY = 5;
|
||||
|
||||
auto now = p_high_resolution_clock::now();
|
||||
|
||||
auto sincePacketReceive = now - _lastReceiveTime;
|
||||
|
||||
if (duration_cast<microseconds>(sincePacketReceive).count() >= NUM_TIMEOUTS_BEFORE_EXPIRY * estimatedTimeout()
|
||||
&& duration_cast<seconds>(sincePacketReceive).count() >= MIN_SECONDS_BEFORE_EXPIRY ) {
|
||||
// the receive side of this connection is expired
|
||||
_isReceivingData = false;
|
||||
}
|
||||
|
||||
// reset the number of light ACKs or non SYN ACKs during this sync interval
|
||||
_lightACKsDuringSYN = 1;
|
||||
_acksDuringSYN = 1;
|
||||
|
||||
if (_congestionControl->_ackInterval > 1) {
|
||||
// we send out a periodic ACK every rate control interval
|
||||
sendACK();
|
||||
}
|
||||
|
||||
if (_congestionControl->shouldNAK() && _lossList.getLength() > 0) {
|
||||
// check if we need to re-transmit a loss list
|
||||
// we do this if it has been longer than the current nakInterval since we last sent
|
||||
auto now = p_high_resolution_clock::now();
|
||||
|
||||
if (duration_cast<microseconds>(now - _lastNAKTime).count() >= _nakInterval) {
|
||||
// Send a timeout NAK packet
|
||||
sendTimeoutNAK();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Connection::recordSentPackets(int wireSize, int payloadSize,
|
||||
|
@ -265,159 +198,23 @@ void Connection::recordRetransmission(int wireSize, SequenceNumber seqNum, p_hig
|
|||
_congestionControl->onPacketSent(wireSize, seqNum, timePoint);
|
||||
}
|
||||
|
||||
void Connection::sendACK(bool wasCausedBySyncTimeout) {
|
||||
static p_high_resolution_clock::time_point lastACKSendTime;
|
||||
auto currentTime = p_high_resolution_clock::now();
|
||||
|
||||
void Connection::sendACK() {
|
||||
SequenceNumber nextACKNumber = nextACK();
|
||||
Q_ASSERT_X(nextACKNumber >= _lastSentACK, "Connection::sendACK", "Sending lower ACK, something is wrong");
|
||||
|
||||
// if our congestion control doesn't want to send an ACK for every packet received
|
||||
// check if we already sent this ACK
|
||||
if (_congestionControl->_ackInterval > 1 && nextACKNumber == _lastSentACK) {
|
||||
|
||||
// if we use ACK2s, check if the receiving side already confirmed receipt of this ACK
|
||||
if (_congestionControl->shouldACK2() && nextACKNumber < _lastReceivedAcknowledgedACK) {
|
||||
// we already got an ACK2 for this ACK we would be sending, don't bother
|
||||
return;
|
||||
}
|
||||
|
||||
// We will re-send if it has been more than the estimated timeout since the last ACK
|
||||
microseconds sinceLastACK = duration_cast<microseconds>(currentTime - lastACKSendTime);
|
||||
|
||||
if (sinceLastACK.count() < estimatedTimeout()) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
// we have received new packets since the last sent ACK
|
||||
// or our congestion control dictates that we always send ACKs
|
||||
|
||||
// update the last sent ACK
|
||||
_lastSentACK = nextACKNumber;
|
||||
|
||||
_ackPacket->reset(); // We need to reset it every time.
|
||||
|
||||
// pack in the ACK sub-sequence number
|
||||
_ackPacket->writePrimitive(++_currentACKSubSequenceNumber);
|
||||
|
||||
|
||||
// pack in the ACK number
|
||||
_ackPacket->writePrimitive(nextACKNumber);
|
||||
|
||||
// pack in the RTT and variance
|
||||
_ackPacket->writePrimitive(_rtt);
|
||||
|
||||
// pack the available buffer size, in packets
|
||||
// in our implementation we have no hard limit on receive buffer size, send the default value
|
||||
_ackPacket->writePrimitive((int32_t) udt::MAX_PACKETS_IN_FLIGHT);
|
||||
|
||||
if (wasCausedBySyncTimeout) {
|
||||
// grab the up to date packet receive speed and estimated bandwidth
|
||||
int32_t packetReceiveSpeed = _receiveWindow.getPacketReceiveSpeed();
|
||||
int32_t estimatedBandwidth = _receiveWindow.getEstimatedBandwidth();
|
||||
|
||||
// update those values in our connection stats
|
||||
_stats.recordReceiveRate(packetReceiveSpeed);
|
||||
_stats.recordEstimatedBandwidth(estimatedBandwidth);
|
||||
|
||||
// pack in the receive speed and estimatedBandwidth
|
||||
_ackPacket->writePrimitive(packetReceiveSpeed);
|
||||
_ackPacket->writePrimitive(estimatedBandwidth);
|
||||
}
|
||||
|
||||
// record this as the last ACK send time
|
||||
lastACKSendTime = p_high_resolution_clock::now();
|
||||
|
||||
// have the socket send off our packet
|
||||
_parentSocket->writeBasePacket(*_ackPacket, _destination);
|
||||
|
||||
Q_ASSERT_X(_sentACKs.empty() || _sentACKs.back().first + 1 == _currentACKSubSequenceNumber,
|
||||
"Connection::sendACK", "Adding an invalid ACK to _sentACKs");
|
||||
|
||||
// write this ACK to the map of sent ACKs
|
||||
_sentACKs.push_back({ _currentACKSubSequenceNumber, { nextACKNumber, p_high_resolution_clock::now() }});
|
||||
|
||||
// reset the number of data packets received since last ACK
|
||||
_packetsSinceACK = 0;
|
||||
|
||||
_stats.record(ConnectionStats::Stats::SentACK);
|
||||
}
|
||||
|
||||
void Connection::sendLightACK() {
|
||||
SequenceNumber nextACKNumber = nextACK();
|
||||
|
||||
if (nextACKNumber == _lastReceivedAcknowledgedACK) {
|
||||
// we already got an ACK2 for this ACK we would be sending, don't bother
|
||||
return;
|
||||
}
|
||||
|
||||
// reset the lightACKPacket before we go to write the ACK to it
|
||||
_lightACKPacket->reset();
|
||||
|
||||
// pack in the ACK
|
||||
_lightACKPacket->writePrimitive(nextACKNumber);
|
||||
|
||||
// have the socket send off our packet immediately
|
||||
_parentSocket->writeBasePacket(*_lightACKPacket, _destination);
|
||||
|
||||
_stats.record(ConnectionStats::Stats::SentLightACK);
|
||||
}
|
||||
|
||||
void Connection::sendACK2(SequenceNumber currentACKSubSequenceNumber) {
|
||||
// reset the ACK2 Packet before writing the sub-sequence number to it
|
||||
_ack2Packet->reset();
|
||||
|
||||
// write the sub sequence number for this ACK2
|
||||
_ack2Packet->writePrimitive(currentACKSubSequenceNumber);
|
||||
|
||||
// send the ACK2 packet
|
||||
_parentSocket->writeBasePacket(*_ack2Packet, _destination);
|
||||
|
||||
// update the last sent ACK2 and the last ACK2 send time
|
||||
_lastSentACK2 = currentACKSubSequenceNumber;
|
||||
|
||||
_stats.record(ConnectionStats::Stats::SentACK2);
|
||||
}
|
||||
|
||||
void Connection::sendNAK(SequenceNumber sequenceNumberRecieved) {
|
||||
_lossReport->reset(); // We need to reset it every time.
|
||||
|
||||
// pack in the loss report
|
||||
_lossReport->writePrimitive(_lastReceivedSequenceNumber + 1);
|
||||
if (_lastReceivedSequenceNumber + 1 != sequenceNumberRecieved - 1) {
|
||||
_lossReport->writePrimitive(sequenceNumberRecieved - 1);
|
||||
}
|
||||
|
||||
// have the parent socket send off our packet immediately
|
||||
_parentSocket->writeBasePacket(*_lossReport, _destination);
|
||||
|
||||
// record our last NAK time
|
||||
_lastNAKTime = p_high_resolution_clock::now();
|
||||
|
||||
_stats.record(ConnectionStats::Stats::SentNAK);
|
||||
}
|
||||
|
||||
void Connection::sendTimeoutNAK() {
|
||||
if (_lossList.getLength() > 0) {
|
||||
|
||||
int timeoutPayloadSize = std::min((int) (_lossList.getLength() * 2 * sizeof(SequenceNumber)),
|
||||
ControlPacket::maxPayloadSize());
|
||||
|
||||
// construct a NAK packet that will hold all of the lost sequence numbers
|
||||
auto lossListPacket = ControlPacket::create(ControlPacket::TimeoutNAK, timeoutPayloadSize);
|
||||
|
||||
// Pack in the lost sequence numbers
|
||||
_lossList.write(*lossListPacket, timeoutPayloadSize / (2 * sizeof(SequenceNumber)));
|
||||
|
||||
// have our parent socket send off this control packet
|
||||
_parentSocket->writeBasePacket(*lossListPacket, _destination);
|
||||
|
||||
// record this as the last NAK time
|
||||
_lastNAKTime = p_high_resolution_clock::now();
|
||||
|
||||
_stats.record(ConnectionStats::Stats::SentTimeoutNAK);
|
||||
}
|
||||
}
|
||||
|
||||
SequenceNumber Connection::nextACK() const {
|
||||
if (_lossList.getLength() > 0) {
|
||||
return _lossList.getFirstSequenceNumber() - 1;
|
||||
|
@ -447,27 +244,8 @@ bool Connection::processReceivedSequenceNumber(SequenceNumber sequenceNumber, in
|
|||
return false;
|
||||
}
|
||||
|
||||
_isReceivingData = true;
|
||||
|
||||
// mark our last receive time as now (to push the potential expiry farther)
|
||||
_lastReceiveTime = p_high_resolution_clock::now();
|
||||
|
||||
if (_congestionControl->shouldProbe()) {
|
||||
// check if this is a packet pair we should estimate bandwidth from, or just a regular packet
|
||||
if (((uint32_t) sequenceNumber & 0xF) == 0) {
|
||||
_receiveWindow.onProbePair1Arrival();
|
||||
} else if (((uint32_t) sequenceNumber & 0xF) == 1) {
|
||||
// only use this packet for bandwidth estimation if we didn't just receive a control packet in its place
|
||||
if (!_receivedControlProbeTail) {
|
||||
_receiveWindow.onProbePair2Arrival();
|
||||
} else {
|
||||
// reset our control probe tail marker so the next probe that comes with data can be used
|
||||
_receivedControlProbeTail = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_receiveWindow.onPacketArrival();
|
||||
|
||||
// If this is not the next sequence number, report loss
|
||||
if (sequenceNumber > _lastReceivedSequenceNumber + 1) {
|
||||
|
@ -476,24 +254,6 @@ bool Connection::processReceivedSequenceNumber(SequenceNumber sequenceNumber, in
|
|||
} else {
|
||||
_lossList.append(_lastReceivedSequenceNumber + 1, sequenceNumber - 1);
|
||||
}
|
||||
|
||||
if (_congestionControl->shouldNAK()) {
|
||||
// Send a NAK packet
|
||||
sendNAK(sequenceNumber);
|
||||
|
||||
// figure out when we should send the next loss report, if we haven't heard anything back
|
||||
_nakInterval = estimatedTimeout();
|
||||
|
||||
int receivedPacketsPerSecond = _receiveWindow.getPacketReceiveSpeed();
|
||||
if (receivedPacketsPerSecond > 0) {
|
||||
// the NAK interval is at least the _minNAKInterval
|
||||
// but might be the time required for all lost packets to be retransmitted
|
||||
_nakInterval += (int) (_lossList.getLength() * (USECS_PER_SECOND / receivedPacketsPerSecond));
|
||||
}
|
||||
|
||||
// the NAK interval is at least the _minNAKInterval but might be the value calculated above, if that is larger
|
||||
_nakInterval = std::max(_nakInterval, _minNAKInterval);
|
||||
}
|
||||
}
|
||||
|
||||
bool wasDuplicate = false;
|
||||
|
@ -505,22 +265,9 @@ bool Connection::processReceivedSequenceNumber(SequenceNumber sequenceNumber, in
|
|||
// Otherwise, it could be a resend, try and remove it from the loss list
|
||||
wasDuplicate = !_lossList.remove(sequenceNumber);
|
||||
}
|
||||
|
||||
// increment the counters for data packets received
|
||||
++_packetsSinceACK;
|
||||
|
||||
// check if we need to send an ACK, according to CC params
|
||||
if (_congestionControl->_ackInterval == 1) {
|
||||
// using a congestion control that ACKs every packet (like TCP Vegas)
|
||||
sendACK(true);
|
||||
} else if (_congestionControl->_ackInterval > 0 && _packetsSinceACK >= _congestionControl->_ackInterval * _acksDuringSYN) {
|
||||
_acksDuringSYN++;
|
||||
sendACK(false);
|
||||
} else if (_congestionControl->_lightACKInterval > 0
|
||||
&& _packetsSinceACK >= _congestionControl->_lightACKInterval * _lightACKsDuringSYN) {
|
||||
sendLightACK();
|
||||
++_lightACKsDuringSYN;
|
||||
}
|
||||
|
||||
// using a congestion control that ACKs every packet (like TCP Vegas)
|
||||
sendACK();
|
||||
|
||||
if (wasDuplicate) {
|
||||
_stats.record(ConnectionStats::Stats::Duplicate);
|
||||
|
@ -544,37 +291,12 @@ void Connection::processControl(ControlPacketPointer controlPacket) {
|
|||
processACK(move(controlPacket));
|
||||
}
|
||||
break;
|
||||
case ControlPacket::LightACK:
|
||||
if (_hasReceivedHandshakeACK) {
|
||||
processLightACK(move(controlPacket));
|
||||
}
|
||||
break;
|
||||
case ControlPacket::ACK2:
|
||||
if (_hasReceivedHandshake) {
|
||||
processACK2(move(controlPacket));
|
||||
}
|
||||
break;
|
||||
case ControlPacket::NAK:
|
||||
if (_hasReceivedHandshakeACK) {
|
||||
processNAK(move(controlPacket));
|
||||
}
|
||||
break;
|
||||
case ControlPacket::TimeoutNAK:
|
||||
if (_hasReceivedHandshakeACK) {
|
||||
processTimeoutNAK(move(controlPacket));
|
||||
}
|
||||
break;
|
||||
case ControlPacket::Handshake:
|
||||
processHandshake(move(controlPacket));
|
||||
break;
|
||||
case ControlPacket::HandshakeACK:
|
||||
processHandshakeACK(move(controlPacket));
|
||||
break;
|
||||
case ControlPacket::ProbeTail:
|
||||
if (_isReceivingData) {
|
||||
processProbeTail(move(controlPacket));
|
||||
}
|
||||
break;
|
||||
case ControlPacket::HandshakeRequest:
|
||||
if (_hasReceivedHandshakeACK) {
|
||||
// We're already in a state where we've received a handshake ack, so we are likely in a state
|
||||
|
@ -591,27 +313,6 @@ void Connection::processControl(ControlPacketPointer controlPacket) {
|
|||
}
|
||||
|
||||
void Connection::processACK(ControlPacketPointer controlPacket) {
|
||||
// read the ACK sub-sequence number
|
||||
SequenceNumber currentACKSubSequenceNumber;
|
||||
controlPacket->readPrimitive(¤tACKSubSequenceNumber);
|
||||
|
||||
// Check if we need send an ACK2 for this ACK
|
||||
// This will be the case if it has been longer than the sync interval OR
|
||||
// it looks like they haven't received our ACK2 for this ACK
|
||||
auto currentTime = p_high_resolution_clock::now();
|
||||
static p_high_resolution_clock::time_point lastACK2SendTime =
|
||||
p_high_resolution_clock::now() - std::chrono::microseconds(_synInterval);
|
||||
|
||||
microseconds sinceLastACK2 = duration_cast<microseconds>(currentTime - lastACK2SendTime);
|
||||
|
||||
if (_congestionControl->shouldACK2()
|
||||
&& (sinceLastACK2.count() >= _synInterval || currentACKSubSequenceNumber == _lastSentACK2)) {
|
||||
// Send ACK2 packet
|
||||
sendACK2(currentACKSubSequenceNumber);
|
||||
|
||||
lastACK2SendTime = p_high_resolution_clock::now();
|
||||
}
|
||||
|
||||
// read the ACKed sequence number
|
||||
SequenceNumber ack;
|
||||
controlPacket->readPrimitive(&ack);
|
||||
|
@ -626,22 +327,9 @@ void Connection::processACK(ControlPacketPointer controlPacket) {
|
|||
return;
|
||||
}
|
||||
|
||||
// read the RTT
|
||||
int32_t rtt;
|
||||
controlPacket->readPrimitive(&rtt);
|
||||
|
||||
if (ack < _lastReceivedACK) {
|
||||
if (ack <= _lastReceivedACK) {
|
||||
// this is an out of order ACK, bail
|
||||
return;
|
||||
}
|
||||
|
||||
// this is a valid ACKed sequence number - update the flow window size and the last received ACK
|
||||
int32_t packedFlowWindow;
|
||||
controlPacket->readPrimitive(&packedFlowWindow);
|
||||
|
||||
_flowWindowSize = packedFlowWindow;
|
||||
|
||||
if (ack == _lastReceivedACK) {
|
||||
// or
|
||||
// processing an already received ACK, bail
|
||||
return;
|
||||
}
|
||||
|
@ -650,39 +338,7 @@ void Connection::processACK(ControlPacketPointer controlPacket) {
|
|||
|
||||
// ACK the send queue so it knows what was received
|
||||
getSendQueue().ack(ack);
|
||||
|
||||
// update the RTT
|
||||
updateRTT(rtt);
|
||||
|
||||
// write this RTT to stats
|
||||
_stats.recordRTT(rtt);
|
||||
|
||||
// set the RTT for congestion control
|
||||
_congestionControl->setRTT(_rtt);
|
||||
|
||||
if (controlPacket->bytesLeftToRead() > 0) {
|
||||
int32_t receiveRate, bandwidth;
|
||||
|
||||
Q_ASSERT_X(controlPacket->bytesLeftToRead() == sizeof(receiveRate) + sizeof(bandwidth),
|
||||
"Connection::processACK", "sync interval ACK packet does not contain expected data");
|
||||
|
||||
controlPacket->readPrimitive(&receiveRate);
|
||||
controlPacket->readPrimitive(&bandwidth);
|
||||
|
||||
// set the delivery rate and bandwidth for congestion control
|
||||
// these are calculated using an EWMA
|
||||
static const int EMWA_ALPHA_NUMERATOR = 8;
|
||||
|
||||
// record these samples in connection stats
|
||||
_stats.recordSendRate(receiveRate);
|
||||
_stats.recordEstimatedBandwidth(bandwidth);
|
||||
|
||||
_deliveryRate = (_deliveryRate * (EMWA_ALPHA_NUMERATOR - 1) + receiveRate) / EMWA_ALPHA_NUMERATOR;
|
||||
_bandwidth = (_bandwidth * (EMWA_ALPHA_NUMERATOR - 1) + bandwidth) / EMWA_ALPHA_NUMERATOR;
|
||||
|
||||
_congestionControl->setReceiveRate(_deliveryRate);
|
||||
_congestionControl->setBandwidth(_bandwidth);
|
||||
}
|
||||
|
||||
|
||||
// give this ACK to the congestion control and update the send queue parameters
|
||||
updateCongestionControlAndSendQueue([this, ack, &controlPacket] {
|
||||
|
@ -695,92 +351,6 @@ void Connection::processACK(ControlPacketPointer controlPacket) {
|
|||
_stats.record(ConnectionStats::Stats::ProcessedACK);
|
||||
}
|
||||
|
||||
void Connection::processLightACK(ControlPacketPointer controlPacket) {
|
||||
// read the ACKed sequence number
|
||||
SequenceNumber ack;
|
||||
controlPacket->readPrimitive(&ack);
|
||||
|
||||
// must be larger than the last received ACK to be processed
|
||||
if (ack > _lastReceivedACK) {
|
||||
// NOTE: the following makes sense in UDT where there is a dynamic receive buffer.
|
||||
// Since we have a receive buffer that is always of a default size, we don't use this light ACK to
|
||||
// drop the flow window size.
|
||||
|
||||
// decrease the flow window size by the offset between the last received ACK and this ACK
|
||||
// _flowWindowSize -= seqoff(_lastReceivedACK, ack);
|
||||
|
||||
// update the last received ACK to the this one
|
||||
_lastReceivedACK = ack;
|
||||
|
||||
// send light ACK to the send queue
|
||||
getSendQueue().ack(ack);
|
||||
}
|
||||
|
||||
_stats.record(ConnectionStats::Stats::ReceivedLightACK);
|
||||
}
|
||||
|
||||
void Connection::processACK2(ControlPacketPointer controlPacket) {
|
||||
// pull the sub sequence number from the packet
|
||||
SequenceNumber subSequenceNumber;
|
||||
controlPacket->readPrimitive(&subSequenceNumber);
|
||||
|
||||
// check if we had that subsequence number in our map
|
||||
auto it = std::find_if_not(_sentACKs.begin(), _sentACKs.end(), [&subSequenceNumber](const ACKListPair& pair){
|
||||
return pair.first < subSequenceNumber;
|
||||
});
|
||||
|
||||
if (it != _sentACKs.end()) {
|
||||
if (it->first == subSequenceNumber){
|
||||
// update the RTT using the ACK window
|
||||
|
||||
// calculate the RTT (time now - time ACK sent)
|
||||
auto now = p_high_resolution_clock::now();
|
||||
int rtt = duration_cast<microseconds>(now - it->second.second).count();
|
||||
|
||||
updateRTT(rtt);
|
||||
// write this RTT to stats
|
||||
_stats.recordRTT(rtt);
|
||||
|
||||
// set the RTT for congestion control
|
||||
_congestionControl->setRTT(_rtt);
|
||||
|
||||
// update the last ACKed ACK
|
||||
if (it->second.first > _lastReceivedAcknowledgedACK) {
|
||||
_lastReceivedAcknowledgedACK = it->second.first;
|
||||
}
|
||||
} else if (it->first < subSequenceNumber) {
|
||||
Q_UNREACHABLE();
|
||||
}
|
||||
}
|
||||
|
||||
// erase this sub-sequence number and anything below it now that we've gotten our timing information
|
||||
_sentACKs.erase(_sentACKs.begin(), it);
|
||||
|
||||
_stats.record(ConnectionStats::Stats::ReceivedACK2);
|
||||
}
|
||||
|
||||
void Connection::processNAK(ControlPacketPointer controlPacket) {
|
||||
// read the loss report
|
||||
SequenceNumber start, end;
|
||||
controlPacket->readPrimitive(&start);
|
||||
|
||||
end = start;
|
||||
|
||||
if (controlPacket->bytesLeftToRead() >= (qint64)sizeof(SequenceNumber)) {
|
||||
controlPacket->readPrimitive(&end);
|
||||
}
|
||||
|
||||
// send that off to the send queue so it knows there was loss
|
||||
getSendQueue().nak(start, end);
|
||||
|
||||
// give the loss to the congestion control object and update the send queue parameters
|
||||
updateCongestionControlAndSendQueue([this, start, end] {
|
||||
_congestionControl->onLoss(start, end);
|
||||
});
|
||||
|
||||
_stats.record(ConnectionStats::Stats::ReceivedNAK);
|
||||
}
|
||||
|
||||
void Connection::processHandshake(ControlPacketPointer controlPacket) {
|
||||
SequenceNumber initialSequenceNumber;
|
||||
controlPacket->readPrimitive(&initialSequenceNumber);
|
||||
|
@ -797,7 +367,6 @@ void Connection::processHandshake(ControlPacketPointer controlPacket) {
|
|||
resetReceiveState();
|
||||
_initialReceiveSequenceNumber = initialSequenceNumber;
|
||||
_lastReceivedSequenceNumber = initialSequenceNumber - 1;
|
||||
_lastSentACK = initialSequenceNumber - 1;
|
||||
}
|
||||
|
||||
_handshakeACK->reset();
|
||||
|
@ -829,33 +398,6 @@ void Connection::processHandshakeACK(ControlPacketPointer controlPacket) {
|
|||
}
|
||||
}
|
||||
|
||||
void Connection::processTimeoutNAK(ControlPacketPointer controlPacket) {
|
||||
// Override SendQueue's LossList with the timeout NAK list
|
||||
getSendQueue().overrideNAKListFromPacket(*controlPacket);
|
||||
|
||||
// we don't tell the congestion control object there was loss here - this matches UDTs implementation
|
||||
// a possible improvement would be to tell it which new loss this timeout packet told us about
|
||||
|
||||
_stats.record(ConnectionStats::Stats::ReceivedTimeoutNAK);
|
||||
}
|
||||
|
||||
void Connection::processProbeTail(ControlPacketPointer controlPacket) {
|
||||
if (((uint32_t) _lastReceivedSequenceNumber & 0xF) == 0) {
|
||||
// this is the second packet in a probe set so we can estimate bandwidth
|
||||
// the sender sent this to us in lieu of sending new data (because they didn't have any)
|
||||
|
||||
#ifdef UDT_CONNECTION_DEBUG
|
||||
qCDebug(networking) << "Processing second packet of probe from control packet instead of data packet";
|
||||
#endif
|
||||
|
||||
_receiveWindow.onProbePair2Arrival();
|
||||
|
||||
// mark that we processed a control packet for the second in the pair and we should not mark
|
||||
// the next data packet received
|
||||
_receivedControlProbeTail = true;
|
||||
}
|
||||
}
|
||||
|
||||
void Connection::resetReceiveState() {
|
||||
|
||||
// reset all SequenceNumber member variables back to default
|
||||
|
@ -863,35 +405,12 @@ void Connection::resetReceiveState() {
|
|||
|
||||
_lastReceivedSequenceNumber = defaultSequenceNumber;
|
||||
|
||||
_lastReceivedAcknowledgedACK = defaultSequenceNumber;
|
||||
_currentACKSubSequenceNumber = defaultSequenceNumber;
|
||||
|
||||
_lastSentACK = defaultSequenceNumber;
|
||||
|
||||
// clear the sent ACKs
|
||||
_sentACKs.clear();
|
||||
|
||||
// clear the loss list and _lastNAKTime
|
||||
// clear the loss list
|
||||
_lossList.clear();
|
||||
_lastNAKTime = p_high_resolution_clock::now();
|
||||
|
||||
// the _nakInterval need not be reset, that will happen on loss
|
||||
|
||||
// clear sync variables
|
||||
_isReceivingData = false;
|
||||
_connectionStart = p_high_resolution_clock::now();
|
||||
|
||||
_acksDuringSYN = 1;
|
||||
_lightACKsDuringSYN = 1;
|
||||
_packetsSinceACK = 0;
|
||||
|
||||
// reset RTT to initial value
|
||||
resetRTT();
|
||||
|
||||
// clear the intervals in the receive window
|
||||
_receiveWindow.reset();
|
||||
_receivedControlProbeTail = false;
|
||||
|
||||
// clear any pending received messages
|
||||
for (auto& pendingMessage : _pendingReceivedMessages) {
|
||||
_parentSocket->messageFailed(this, pendingMessage.first);
|
||||
|
@ -899,30 +418,6 @@ void Connection::resetReceiveState() {
|
|||
_pendingReceivedMessages.clear();
|
||||
}
|
||||
|
||||
void Connection::updateRTT(int rtt) {
|
||||
// This updates the RTT using exponential weighted moving average
|
||||
// This is the Jacobson's forumla for RTT estimation
|
||||
// http://www.mathcs.emory.edu/~cheung/Courses/455/Syllabus/7-transport/Jacobson-88.pdf
|
||||
|
||||
// Estimated RTT = (1 - x)(estimatedRTT) + (x)(sampleRTT)
|
||||
// (where x = 0.125 via Jacobson)
|
||||
|
||||
// Deviation = (1 - x)(deviation) + x |sampleRTT - estimatedRTT|
|
||||
// (where x = 0.25 via Jacobson)
|
||||
|
||||
static const int RTT_ESTIMATION_ALPHA_NUMERATOR = 8;
|
||||
static const int RTT_ESTIMATION_VARIANCE_ALPHA_NUMERATOR = 4;
|
||||
|
||||
_rtt = (_rtt * (RTT_ESTIMATION_ALPHA_NUMERATOR - 1) + rtt) / RTT_ESTIMATION_ALPHA_NUMERATOR;
|
||||
|
||||
_rttVariance = (_rttVariance * (RTT_ESTIMATION_VARIANCE_ALPHA_NUMERATOR - 1)
|
||||
+ abs(rtt - _rtt)) / RTT_ESTIMATION_VARIANCE_ALPHA_NUMERATOR;
|
||||
}
|
||||
|
||||
int Connection::estimatedTimeout() const {
|
||||
return _congestionControl->_userDefinedRTO ? _congestionControl->_rto : _rtt + _rttVariance * 4;
|
||||
}
|
||||
|
||||
void Connection::updateCongestionControlAndSendQueue(std::function<void ()> congestionCallback) {
|
||||
// update the last sent sequence number in congestion control
|
||||
_congestionControl->setSendCurrentSequenceNumber(getSendQueue().getCurrentSequenceNumber());
|
||||
|
@ -934,8 +429,8 @@ void Connection::updateCongestionControlAndSendQueue(std::function<void ()> cong
|
|||
|
||||
// now that we've updated the congestion control, update the packet send period and flow window size
|
||||
sendQueue.setPacketSendPeriod(_congestionControl->_packetSendPeriod);
|
||||
sendQueue.setEstimatedTimeout(estimatedTimeout());
|
||||
sendQueue.setFlowWindowSize(std::min(_flowWindowSize, (int) _congestionControl->_congestionWindowSize));
|
||||
sendQueue.setEstimatedTimeout(_congestionControl->estimatedTimeout());
|
||||
sendQueue.setFlowWindowSize(_congestionControl->_congestionWindowSize);
|
||||
|
||||
// record connection stats
|
||||
_stats.recordPacketSendPeriod(_congestionControl->_packetSendPeriod);
|
||||
|
|
|
@ -22,7 +22,6 @@
|
|||
#include "ConnectionStats.h"
|
||||
#include "Constants.h"
|
||||
#include "LossList.h"
|
||||
#include "PacketTimeWindow.h"
|
||||
#include "SendQueue.h"
|
||||
#include "../HifiSockAddr.h"
|
||||
|
||||
|
@ -51,9 +50,6 @@ private:
|
|||
class Connection : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
using SequenceNumberTimePair = std::pair<SequenceNumber, p_high_resolution_clock::time_point>;
|
||||
using ACKListPair = std::pair<SequenceNumber, SequenceNumberTimePair>;
|
||||
using SentACKList = std::list<ACKListPair>;
|
||||
using ControlPacketPointer = std::unique_ptr<ControlPacket>;
|
||||
|
||||
Connection(Socket* parentSocket, HifiSockAddr destination, std::unique_ptr<CongestionControl> congestionControl);
|
||||
|
@ -87,51 +83,29 @@ private slots:
|
|||
void recordRetransmission(int wireSize, SequenceNumber sequenceNumber, p_high_resolution_clock::time_point timePoint);
|
||||
void queueInactive();
|
||||
void queueTimeout();
|
||||
void queueShortCircuitLoss(quint32 sequenceNumber);
|
||||
|
||||
private:
|
||||
void sendACK(bool wasCausedBySyncTimeout = true);
|
||||
void sendLightACK();
|
||||
void sendACK2(SequenceNumber currentACKSubSequenceNumber);
|
||||
void sendNAK(SequenceNumber sequenceNumberRecieved);
|
||||
void sendTimeoutNAK();
|
||||
void sendACK();
|
||||
|
||||
void processACK(ControlPacketPointer controlPacket);
|
||||
void processLightACK(ControlPacketPointer controlPacket);
|
||||
void processACK2(ControlPacketPointer controlPacket);
|
||||
void processNAK(ControlPacketPointer controlPacket);
|
||||
void processTimeoutNAK(ControlPacketPointer controlPacket);
|
||||
void processHandshake(ControlPacketPointer controlPacket);
|
||||
void processHandshakeACK(ControlPacketPointer controlPacket);
|
||||
void processProbeTail(ControlPacketPointer controlPacket);
|
||||
|
||||
void resetReceiveState();
|
||||
void resetRTT();
|
||||
|
||||
SendQueue& getSendQueue();
|
||||
SequenceNumber nextACK() const;
|
||||
void updateRTT(int rtt);
|
||||
|
||||
int estimatedTimeout() const;
|
||||
|
||||
void updateCongestionControlAndSendQueue(std::function<void()> congestionCallback);
|
||||
|
||||
void stopSendQueue();
|
||||
|
||||
int _synInterval; // Periodical Rate Control Interval, in microseconds
|
||||
|
||||
int _nakInterval { -1 }; // NAK timeout interval, in microseconds, set on loss
|
||||
int _minNAKInterval { 100000 }; // NAK timeout interval lower bound, default of 100ms
|
||||
p_high_resolution_clock::time_point _lastNAKTime = p_high_resolution_clock::now();
|
||||
|
||||
bool _hasReceivedHandshake { false }; // flag for receipt of handshake from server
|
||||
bool _hasReceivedHandshakeACK { false }; // flag for receipt of handshake ACK from client
|
||||
bool _didRequestHandshake { false }; // flag for request of handshake from server
|
||||
|
||||
p_high_resolution_clock::time_point _connectionStart = p_high_resolution_clock::now(); // holds the time_point for creation of this connection
|
||||
p_high_resolution_clock::time_point _lastReceiveTime; // holds the last time we received anything from sender
|
||||
|
||||
bool _isReceivingData { false }; // flag used for expiry of receipt portion of connection
|
||||
|
||||
SequenceNumber _initialSequenceNumber; // Randomized on Connection creation, identifies connection during re-connect requests
|
||||
SequenceNumber _initialReceiveSequenceNumber; // Randomized by peer Connection on creation, identifies connection during re-connect requests
|
||||
|
@ -141,43 +115,18 @@ private:
|
|||
LossList _lossList; // List of all missing packets
|
||||
SequenceNumber _lastReceivedSequenceNumber; // The largest sequence number received from the peer
|
||||
SequenceNumber _lastReceivedACK; // The last ACK received
|
||||
SequenceNumber _lastReceivedAcknowledgedACK; // The last sent ACK that has been acknowledged via an ACK2 from the peer
|
||||
SequenceNumber _currentACKSubSequenceNumber; // The current ACK sub-sequence number (used for Acknowledgment of ACKs)
|
||||
|
||||
SequenceNumber _lastSentACK; // The last sent ACK
|
||||
SequenceNumber _lastSentACK2; // The last sent ACK sub-sequence number in an ACK2
|
||||
|
||||
int _acksDuringSYN { 1 }; // The number of non-SYN ACKs sent during SYN
|
||||
int _lightACKsDuringSYN { 1 }; // The number of lite ACKs sent during SYN interval
|
||||
|
||||
int32_t _rtt; // RTT, in microseconds
|
||||
int32_t _rttVariance; // RTT variance
|
||||
int _flowWindowSize { udt::MAX_PACKETS_IN_FLIGHT }; // Flow control window size
|
||||
|
||||
int _bandwidth { 1 }; // Exponential moving average for estimated bandwidth, in packets per second
|
||||
int _deliveryRate { 16 }; // Exponential moving average for receiver's receive rate, in packets per second
|
||||
|
||||
SentACKList _sentACKs; // Map of ACK sub-sequence numbers to ACKed sequence number and sent time
|
||||
|
||||
Socket* _parentSocket { nullptr };
|
||||
HifiSockAddr _destination;
|
||||
|
||||
PacketTimeWindow _receiveWindow { 16, 64 }; // Window of interval between packets (16) and probes (64) for timing
|
||||
bool _receivedControlProbeTail { false }; // Marker for receipt of control packet probe tail (in lieu of probe with data)
|
||||
|
||||
std::unique_ptr<CongestionControl> _congestionControl;
|
||||
|
||||
std::unique_ptr<SendQueue> _sendQueue;
|
||||
|
||||
std::map<MessageNumber, PendingReceivedMessage> _pendingReceivedMessages;
|
||||
|
||||
int _packetsSinceACK { 0 }; // The number of packets that have been received during the current ACK interval
|
||||
|
||||
// Re-used control packets
|
||||
ControlPacketPointer _ackPacket;
|
||||
ControlPacketPointer _lightACKPacket;
|
||||
ControlPacketPointer _ack2Packet;
|
||||
ControlPacketPointer _lossReport;
|
||||
ControlPacketPointer _handshakeACK;
|
||||
|
||||
ConnectionStats _stats;
|
||||
|
|
|
@ -95,11 +95,6 @@ void ConnectionStats::recordReceiveRate(int sample) {
|
|||
_total.receiveRate = (int)((_total.receiveRate * EWMA_PREVIOUS_SAMPLES_WEIGHT) + (sample * EWMA_CURRENT_SAMPLE_WEIGHT));
|
||||
}
|
||||
|
||||
void ConnectionStats::recordEstimatedBandwidth(int sample) {
|
||||
_currentSample.estimatedBandwith = sample;
|
||||
_total.estimatedBandwith = (int)((_total.estimatedBandwith * EWMA_PREVIOUS_SAMPLES_WEIGHT) + (sample * EWMA_CURRENT_SAMPLE_WEIGHT));
|
||||
}
|
||||
|
||||
void ConnectionStats::recordRTT(int sample) {
|
||||
_currentSample.rtt = sample;
|
||||
_total.rtt = (int)((_total.rtt * EWMA_PREVIOUS_SAMPLES_WEIGHT) + (sample * EWMA_CURRENT_SAMPLE_WEIGHT));
|
||||
|
@ -122,14 +117,6 @@ QDebug& operator<<(QDebug&& debug, const udt::ConnectionStats::Stats& stats) {
|
|||
HIFI_LOG_EVENT(SentACK)
|
||||
HIFI_LOG_EVENT(ReceivedACK)
|
||||
HIFI_LOG_EVENT(ProcessedACK)
|
||||
HIFI_LOG_EVENT(SentLightACK)
|
||||
HIFI_LOG_EVENT(ReceivedLightACK)
|
||||
HIFI_LOG_EVENT(SentACK2)
|
||||
HIFI_LOG_EVENT(ReceivedACK2)
|
||||
HIFI_LOG_EVENT(SentNAK)
|
||||
HIFI_LOG_EVENT(ReceivedNAK)
|
||||
HIFI_LOG_EVENT(SentTimeoutNAK)
|
||||
HIFI_LOG_EVENT(ReceivedTimeoutNAK)
|
||||
HIFI_LOG_EVENT(Retransmission)
|
||||
HIFI_LOG_EVENT(Duplicate)
|
||||
;
|
||||
|
|
|
@ -24,14 +24,6 @@ public:
|
|||
SentACK,
|
||||
ReceivedACK,
|
||||
ProcessedACK,
|
||||
SentLightACK,
|
||||
ReceivedLightACK,
|
||||
SentACK2,
|
||||
ReceivedACK2,
|
||||
SentNAK,
|
||||
ReceivedNAK,
|
||||
SentTimeoutNAK,
|
||||
ReceivedTimeoutNAK,
|
||||
Retransmission,
|
||||
Duplicate,
|
||||
|
||||
|
@ -89,7 +81,6 @@ public:
|
|||
|
||||
void recordSendRate(int sample);
|
||||
void recordReceiveRate(int sample);
|
||||
void recordEstimatedBandwidth(int sample);
|
||||
void recordRTT(int sample);
|
||||
void recordCongestionWindowSize(int sample);
|
||||
void recordPacketSendPeriod(int sample);
|
||||
|
|
|
@ -28,13 +28,8 @@ public:
|
|||
|
||||
enum Type : uint16_t {
|
||||
ACK,
|
||||
ACK2,
|
||||
LightACK,
|
||||
NAK,
|
||||
TimeoutNAK,
|
||||
Handshake,
|
||||
HandshakeACK,
|
||||
ProbeTail,
|
||||
HandshakeRequest
|
||||
};
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ PacketVersion versionForPacketType(PacketType packetType) {
|
|||
case PacketType::AvatarData:
|
||||
case PacketType::BulkAvatarData:
|
||||
case PacketType::KillAvatar:
|
||||
return static_cast<PacketVersion>(AvatarMixerPacketVersion::ProceduralFaceMovementFlagsAndBlendshapes);
|
||||
return static_cast<PacketVersion>(AvatarMixerPacketVersion::FarGrabJoints);
|
||||
case PacketType::MessagesData:
|
||||
return static_cast<PacketVersion>(MessageDataVersion::TextOrBinaryData);
|
||||
// ICE packets
|
||||
|
@ -95,7 +95,7 @@ PacketVersion versionForPacketType(PacketType packetType) {
|
|||
case PacketType::AvatarIdentityRequest:
|
||||
return 22;
|
||||
default:
|
||||
return 21;
|
||||
return 22;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -288,7 +288,8 @@ enum class AvatarMixerPacketVersion : PacketVersion {
|
|||
AvatarJointDefaultPoseFlags,
|
||||
FBXReaderNodeReparenting,
|
||||
FixMannequinDefaultAvatarFeet,
|
||||
ProceduralFaceMovementFlagsAndBlendshapes
|
||||
ProceduralFaceMovementFlagsAndBlendshapes,
|
||||
FarGrabJoints
|
||||
};
|
||||
|
||||
enum class DomainConnectRequestVersion : PacketVersion {
|
||||
|
|
|
@ -1,125 +0,0 @@
|
|||
//
|
||||
// PacketTimeWindow.cpp
|
||||
// libraries/networking/src/udt
|
||||
//
|
||||
// Created by Stephen Birarda on 2015-07-28.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "PacketTimeWindow.h"
|
||||
|
||||
#include <numeric>
|
||||
#include <cmath>
|
||||
|
||||
#include <NumericalConstants.h>
|
||||
|
||||
using namespace udt;
|
||||
using namespace std::chrono;
|
||||
|
||||
static const int DEFAULT_PACKET_INTERVAL_MICROSECONDS = 1000000; // 1s
|
||||
static const int DEFAULT_PROBE_INTERVAL_MICROSECONDS = 1000; // 1ms
|
||||
|
||||
PacketTimeWindow::PacketTimeWindow(int numPacketIntervals, int numProbeIntervals) :
|
||||
_numPacketIntervals(numPacketIntervals),
|
||||
_numProbeIntervals(numProbeIntervals),
|
||||
_packetIntervals(_numPacketIntervals, DEFAULT_PACKET_INTERVAL_MICROSECONDS),
|
||||
_probeIntervals(_numProbeIntervals, DEFAULT_PROBE_INTERVAL_MICROSECONDS)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
void PacketTimeWindow::reset() {
|
||||
_packetIntervals.assign(_numPacketIntervals, DEFAULT_PACKET_INTERVAL_MICROSECONDS);
|
||||
_probeIntervals.assign(_numProbeIntervals, DEFAULT_PROBE_INTERVAL_MICROSECONDS);
|
||||
}
|
||||
|
||||
template <typename Iterator>
|
||||
int median(Iterator begin, Iterator end) {
|
||||
// use std::nth_element to grab the middle - for an even number of elements this is the upper middle
|
||||
Iterator middle = begin + (end - begin) / 2;
|
||||
std::nth_element(begin, middle, end);
|
||||
|
||||
if ((end - begin) % 2 != 0) {
|
||||
// odd number of elements, just return the middle
|
||||
return *middle;
|
||||
} else {
|
||||
// even number of elements, return the mean of the upper middle and the lower middle
|
||||
Iterator lowerMiddle = std::max_element(begin, middle);
|
||||
return (*middle + *lowerMiddle) / 2;
|
||||
}
|
||||
}
|
||||
|
||||
int32_t meanOfMedianFilteredValues(std::vector<int> intervals, int numValues, int valuesRequired = 0) {
|
||||
// grab the median value of the intervals vector
|
||||
int intervalsMedian = median(intervals.begin(), intervals.end());
|
||||
|
||||
// figure out our bounds for median filtering
|
||||
static const int MEDIAN_FILTERING_BOUND_MULTIPLIER = 8;
|
||||
int upperBound = intervalsMedian * MEDIAN_FILTERING_BOUND_MULTIPLIER;
|
||||
int lowerBound = intervalsMedian / MEDIAN_FILTERING_BOUND_MULTIPLIER;
|
||||
|
||||
int sum = 0;
|
||||
int count = 0;
|
||||
|
||||
// sum the values that are inside the median filtered bounds
|
||||
for (auto& interval : intervals) {
|
||||
if ((interval < upperBound) && (interval > lowerBound)) {
|
||||
++count;
|
||||
sum += interval;
|
||||
}
|
||||
}
|
||||
|
||||
// make sure we hit our threshold of values required
|
||||
if (count >= valuesRequired) {
|
||||
// return the frequency (per second) for the mean interval
|
||||
static const double USECS_PER_SEC = 1000000.0;
|
||||
return (int32_t) ceil(USECS_PER_SEC / (((double) sum) / ((double) count)));
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
int32_t PacketTimeWindow::getPacketReceiveSpeed() const {
|
||||
// return the mean value of median filtered values (per second) - or zero if there are too few filtered values
|
||||
return meanOfMedianFilteredValues(_packetIntervals, _numPacketIntervals, _numPacketIntervals / 2);
|
||||
}
|
||||
|
||||
int32_t PacketTimeWindow::getEstimatedBandwidth() const {
|
||||
// return mean value of median filtered values (per second)
|
||||
return meanOfMedianFilteredValues(_probeIntervals, _numProbeIntervals);
|
||||
}
|
||||
|
||||
void PacketTimeWindow::onPacketArrival() {
|
||||
|
||||
// take the current time
|
||||
auto now = p_high_resolution_clock::now();
|
||||
|
||||
if (_packetIntervals.size() > 0) {
|
||||
// record the interval between this packet and the last one
|
||||
_packetIntervals[_currentPacketInterval++] = duration_cast<microseconds>(now - _lastPacketTime).count();
|
||||
|
||||
// reset the currentPacketInterval index when it wraps
|
||||
_currentPacketInterval %= _numPacketIntervals;
|
||||
}
|
||||
|
||||
// remember this as the last packet arrival time
|
||||
_lastPacketTime = now;
|
||||
}
|
||||
|
||||
void PacketTimeWindow::onProbePair1Arrival() {
|
||||
// take the current time as the first probe time
|
||||
_firstProbeTime = p_high_resolution_clock::now();
|
||||
}
|
||||
|
||||
void PacketTimeWindow::onProbePair2Arrival() {
|
||||
// store the interval between the two probes
|
||||
auto now = p_high_resolution_clock::now();
|
||||
|
||||
_probeIntervals[_currentProbeInterval++] = duration_cast<microseconds>(now - _firstProbeTime).count();
|
||||
|
||||
// reset the currentProbeInterval index when it wraps
|
||||
_currentProbeInterval %= _numProbeIntervals;
|
||||
}
|
|
@ -1,51 +0,0 @@
|
|||
//
|
||||
// PacketTimeWindow.h
|
||||
// libraries/networking/src/udt
|
||||
//
|
||||
// Created by Stephen Birarda on 2015-07-28.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#ifndef hifi_PacketTimeWindow_h
|
||||
#define hifi_PacketTimeWindow_h
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include <PortableHighResolutionClock.h>
|
||||
|
||||
namespace udt {
|
||||
|
||||
class PacketTimeWindow {
|
||||
public:
|
||||
PacketTimeWindow(int numPacketIntervals = 16, int numProbeIntervals = 16);
|
||||
|
||||
void onPacketArrival();
|
||||
void onProbePair1Arrival();
|
||||
void onProbePair2Arrival();
|
||||
|
||||
int32_t getPacketReceiveSpeed() const;
|
||||
int32_t getEstimatedBandwidth() const;
|
||||
|
||||
void reset();
|
||||
private:
|
||||
int _numPacketIntervals { 0 }; // the number of packet intervals to store
|
||||
int _numProbeIntervals { 0 }; // the number of probe intervals to store
|
||||
|
||||
int _currentPacketInterval { 0 }; // index for the current packet interval
|
||||
int _currentProbeInterval { 0 }; // index for the current probe interval
|
||||
|
||||
std::vector<int> _packetIntervals; // vector of microsecond intervals between packet arrivals
|
||||
std::vector<int> _probeIntervals; // vector of microsecond intervals between probe pair arrivals
|
||||
|
||||
p_high_resolution_clock::time_point _lastPacketTime = p_high_resolution_clock::now(); // the time_point when last packet arrived
|
||||
p_high_resolution_clock::time_point _firstProbeTime = p_high_resolution_clock::now(); // the time_point when first probe in pair arrived
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // hifi_PacketTimeWindow_h
|
|
@ -164,16 +164,6 @@ void SendQueue::ack(SequenceNumber ack) {
|
|||
_emptyCondition.notify_one();
|
||||
}
|
||||
|
||||
void SendQueue::nak(SequenceNumber start, SequenceNumber end) {
|
||||
{
|
||||
std::lock_guard<std::mutex> nakLocker(_naksLock);
|
||||
_naks.insert(start, end);
|
||||
}
|
||||
|
||||
// call notify_one on the condition_variable_any in case the send thread is sleeping waiting for losses to re-send
|
||||
_emptyCondition.notify_one();
|
||||
}
|
||||
|
||||
void SendQueue::fastRetransmit(udt::SequenceNumber ack) {
|
||||
{
|
||||
std::lock_guard<std::mutex> nakLocker(_naksLock);
|
||||
|
@ -184,28 +174,6 @@ void SendQueue::fastRetransmit(udt::SequenceNumber ack) {
|
|||
_emptyCondition.notify_one();
|
||||
}
|
||||
|
||||
void SendQueue::overrideNAKListFromPacket(ControlPacket& packet) {
|
||||
{
|
||||
std::lock_guard<std::mutex> nakLocker(_naksLock);
|
||||
_naks.clear();
|
||||
|
||||
SequenceNumber first, second;
|
||||
while (packet.bytesLeftToRead() >= (qint64)(2 * sizeof(SequenceNumber))) {
|
||||
packet.readPrimitive(&first);
|
||||
packet.readPrimitive(&second);
|
||||
|
||||
if (first == second) {
|
||||
_naks.append(first);
|
||||
} else {
|
||||
_naks.append(first, second);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// call notify_one on the condition_variable_any in case the send thread is sleeping waiting for losses to re-send
|
||||
_emptyCondition.notify_one();
|
||||
}
|
||||
|
||||
void SendQueue::sendHandshake() {
|
||||
std::unique_lock<std::mutex> handshakeLock { _handshakeMutex };
|
||||
if (!_hasReceivedHandshakeACK) {
|
||||
|
@ -268,8 +236,6 @@ bool SendQueue::sendNewPacketAndAddToSentList(std::unique_ptr<Packet> newPacket,
|
|||
_naks.append(sequenceNumber);
|
||||
}
|
||||
|
||||
emit shortCircuitLoss(quint32(sequenceNumber));
|
||||
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
|
@ -385,10 +351,6 @@ void SendQueue::run() {
|
|||
}
|
||||
}
|
||||
|
||||
void SendQueue::setProbePacketEnabled(bool enabled) {
|
||||
_shouldSendProbes = enabled;
|
||||
}
|
||||
|
||||
int SendQueue::maybeSendNewPacket() {
|
||||
if (!isFlowWindowFull()) {
|
||||
// we didn't re-send a packet, so time to send a new one
|
||||
|
@ -397,40 +359,15 @@ int SendQueue::maybeSendNewPacket() {
|
|||
SequenceNumber nextNumber = getNextSequenceNumber();
|
||||
|
||||
// grab the first packet we will send
|
||||
std::unique_ptr<Packet> firstPacket = _packets.takePacket();
|
||||
Q_ASSERT(firstPacket);
|
||||
std::unique_ptr<Packet> packet = _packets.takePacket();
|
||||
Q_ASSERT(packet);
|
||||
|
||||
|
||||
// attempt to send the first packet
|
||||
if (sendNewPacketAndAddToSentList(move(firstPacket), nextNumber)) {
|
||||
std::unique_ptr<Packet> secondPacket;
|
||||
bool shouldSendPairTail = false;
|
||||
// attempt to send the packet
|
||||
sendNewPacketAndAddToSentList(move(packet), nextNumber);
|
||||
|
||||
if (_shouldSendProbes && ((uint32_t) nextNumber & 0xF) == 0) {
|
||||
// the first packet is the first in a probe pair - every 16 (rightmost 16 bits = 0) packets
|
||||
// pull off a second packet if we can before we unlock
|
||||
shouldSendPairTail = true;
|
||||
|
||||
secondPacket = _packets.takePacket();
|
||||
}
|
||||
|
||||
// do we have a second in a pair to send as well?
|
||||
if (secondPacket) {
|
||||
sendNewPacketAndAddToSentList(move(secondPacket), getNextSequenceNumber());
|
||||
} else if (shouldSendPairTail) {
|
||||
// we didn't get a second packet to send in the probe pair
|
||||
// send a control packet of type ProbePairTail so the receiver can still do
|
||||
// proper bandwidth estimation
|
||||
static auto pairTailPacket = ControlPacket::create(ControlPacket::ProbeTail);
|
||||
_socket->writeBasePacket(*pairTailPacket, _destination);
|
||||
}
|
||||
|
||||
// return the number of attempted packet sends
|
||||
return shouldSendPairTail ? 2 : 1;
|
||||
} else {
|
||||
// we attempted to send a single packet, return 1
|
||||
return 1;
|
||||
}
|
||||
// we attempted to send a packet, return 1
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -69,16 +69,12 @@ public:
|
|||
|
||||
void setEstimatedTimeout(int estimatedTimeout) { _estimatedTimeout = estimatedTimeout; }
|
||||
void setSyncInterval(int syncInterval) { _syncInterval = syncInterval; }
|
||||
|
||||
void setProbePacketEnabled(bool enabled);
|
||||
|
||||
public slots:
|
||||
void stop();
|
||||
|
||||
void ack(SequenceNumber ack);
|
||||
void nak(SequenceNumber start, SequenceNumber end);
|
||||
void fastRetransmit(SequenceNumber ack);
|
||||
void overrideNAKListFromPacket(ControlPacket& packet);
|
||||
void handshakeACK();
|
||||
|
||||
signals:
|
||||
|
@ -87,7 +83,6 @@ signals:
|
|||
|
||||
void queueInactive();
|
||||
|
||||
void shortCircuitLoss(quint32 sequenceNumber);
|
||||
void timeout();
|
||||
|
||||
private slots:
|
||||
|
@ -145,9 +140,6 @@ private:
|
|||
std::condition_variable _handshakeACKCondition;
|
||||
|
||||
std::condition_variable_any _emptyCondition;
|
||||
|
||||
|
||||
std::atomic<bool> _shouldSendProbes { true };
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -33,18 +33,11 @@ using namespace udt;
|
|||
|
||||
Socket::Socket(QObject* parent, bool shouldChangeSocketOptions) :
|
||||
QObject(parent),
|
||||
_synTimer(new QTimer(this)),
|
||||
_readyReadBackupTimer(new QTimer(this)),
|
||||
_shouldChangeSocketOptions(shouldChangeSocketOptions)
|
||||
{
|
||||
connect(&_udpSocket, &QUdpSocket::readyRead, this, &Socket::readPendingDatagrams);
|
||||
|
||||
// make sure our synchronization method is called every SYN interval
|
||||
connect(_synTimer, &QTimer::timeout, this, &Socket::rateControlSync);
|
||||
|
||||
// start our timer for the synchronization time interval
|
||||
_synTimer->start(_synInterval);
|
||||
|
||||
// make sure we hear about errors and state changes from the underlying socket
|
||||
connect(&_udpSocket, SIGNAL(error(QAbstractSocket::SocketError)),
|
||||
this, SLOT(handleSocketError(QAbstractSocket::SocketError)));
|
||||
|
@ -427,49 +420,9 @@ void Socket::connectToSendSignal(const HifiSockAddr& destinationAddr, QObject* r
|
|||
}
|
||||
}
|
||||
|
||||
void Socket::rateControlSync() {
|
||||
|
||||
// enumerate our list of connections and ask each of them to send off periodic ACK packet for rate control
|
||||
|
||||
// the way we do this is a little funny looking - we need to avoid the case where we call sync and
|
||||
// (because of our Qt direct connection to the Connection's signal that it has been deactivated)
|
||||
// an iterator on _connectionsHash would be invalidated by our own call to cleanupConnection
|
||||
|
||||
// collect the sockets for all connections in a vector
|
||||
|
||||
std::vector<HifiSockAddr> sockAddrVector;
|
||||
sockAddrVector.reserve(_connectionsHash.size());
|
||||
|
||||
for (auto& connection : _connectionsHash) {
|
||||
sockAddrVector.emplace_back(connection.first);
|
||||
}
|
||||
|
||||
// enumerate that vector of HifiSockAddr objects
|
||||
for (auto& sockAddr : sockAddrVector) {
|
||||
// pull out the respective connection via a quick find on the unordered_map
|
||||
auto it = _connectionsHash.find(sockAddr);
|
||||
|
||||
if (it != _connectionsHash.end()) {
|
||||
// if the connection is erased while calling sync since we are re-using the iterator that was invalidated
|
||||
// we're good to go
|
||||
auto& connection = _connectionsHash[sockAddr];
|
||||
connection->sync();
|
||||
}
|
||||
}
|
||||
|
||||
if (_synTimer->interval() != _synInterval) {
|
||||
// if the _synTimer interval doesn't match the current _synInterval (changes when the CC factory is changed)
|
||||
// then restart it now with the right interval
|
||||
_synTimer->start(_synInterval);
|
||||
}
|
||||
}
|
||||
|
||||
void Socket::setCongestionControlFactory(std::unique_ptr<CongestionControlVirtualFactory> ccFactory) {
|
||||
// swap the current unique_ptr for the new factory
|
||||
_ccFactory.swap(ccFactory);
|
||||
|
||||
// update the _synInterval to the value from the factory
|
||||
_synInterval = _ccFactory->synInterval();
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -102,7 +102,6 @@ public slots:
|
|||
private slots:
|
||||
void readPendingDatagrams();
|
||||
void checkForReadyReadBackup();
|
||||
void rateControlSync();
|
||||
|
||||
void handleSocketError(QAbstractSocket::SocketError socketError);
|
||||
void handleStateChanged(QAbstractSocket::SocketState socketState);
|
||||
|
@ -133,9 +132,6 @@ private:
|
|||
std::unordered_map<HifiSockAddr, BasePacketHandler> _unfilteredHandlers;
|
||||
std::unordered_map<HifiSockAddr, SequenceNumber> _unreliableSequenceNumbers;
|
||||
std::unordered_map<HifiSockAddr, std::unique_ptr<Connection>> _connectionsHash;
|
||||
|
||||
int _synInterval { 10 }; // 10ms
|
||||
QTimer* _synTimer { nullptr };
|
||||
|
||||
QTimer* _readyReadBackupTimer { nullptr };
|
||||
|
||||
|
|
|
@ -21,8 +21,6 @@ TCPVegasCC::TCPVegasCC() {
|
|||
_packetSendPeriod = 0.0;
|
||||
_congestionWindowSize = 2;
|
||||
|
||||
setAckInterval(1); // TCP sends an ACK for every packet received
|
||||
|
||||
// set our minimum RTT variables to the maximum possible value
|
||||
// we can't do this as a member initializer until our VS has support for constexpr
|
||||
_currentMinRTT = std::numeric_limits<int>::max();
|
||||
|
@ -103,12 +101,11 @@ bool TCPVegasCC::onACK(SequenceNumber ack, p_high_resolution_clock::time_point r
|
|||
|
||||
auto it = _sentPacketTimes.find(ack + 1);
|
||||
if (it != _sentPacketTimes.end()) {
|
||||
auto estimatedTimeout = _ewmaRTT + _rttVariance * 4;
|
||||
|
||||
auto now = p_high_resolution_clock::now();
|
||||
auto sinceSend = duration_cast<microseconds>(now - it->second).count();
|
||||
|
||||
if (sinceSend >= estimatedTimeout) {
|
||||
if (sinceSend >= estimatedTimeout()) {
|
||||
// break out of slow start, we've decided this is loss
|
||||
_slowStart = false;
|
||||
|
||||
|
@ -215,6 +212,11 @@ void TCPVegasCC::performCongestionAvoidance(udt::SequenceNumber ack) {
|
|||
_numACKs = 0;
|
||||
}
|
||||
|
||||
|
||||
int TCPVegasCC::estimatedTimeout() const {
|
||||
return _ewmaRTT == -1 ? DEFAULT_SYN_INTERVAL : _ewmaRTT + _rttVariance * 4;
|
||||
}
|
||||
|
||||
bool TCPVegasCC::isCongestionWindowLimited() {
|
||||
if (_slowStart) {
|
||||
return true;
|
||||
|
|
|
@ -27,14 +27,11 @@ public:
|
|||
TCPVegasCC();
|
||||
|
||||
virtual bool onACK(SequenceNumber ackNum, p_high_resolution_clock::time_point receiveTime) override;
|
||||
virtual void onLoss(SequenceNumber rangeStart, SequenceNumber rangeEnd) override {};
|
||||
virtual void onTimeout() override {};
|
||||
|
||||
virtual bool shouldNAK() override { return false; }
|
||||
virtual bool shouldACK2() override { return false; }
|
||||
virtual bool shouldProbe() override { return false; }
|
||||
|
||||
virtual void onPacketSent(int wireSize, SequenceNumber seqNum, p_high_resolution_clock::time_point timePoint) override;
|
||||
|
||||
virtual int estimatedTimeout() const override;
|
||||
|
||||
protected:
|
||||
virtual void performCongestionAvoidance(SequenceNumber ack);
|
||||
|
@ -65,7 +62,6 @@ private:
|
|||
int _duplicateACKCount { 0 }; // Counter for duplicate ACKs received
|
||||
|
||||
int _slowStartOddAdjust { 0 }; // Marker for every window adjustment every other RTT in slow-start
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue