Exposing the drawcall user info just a uint16 right now and use it for the skinning and blendshape

This commit is contained in:
sam gateau 2018-09-24 17:34:42 -07:00
commit 7cf7eaa7c8
36 changed files with 448 additions and 66 deletions

View file

@ -9,6 +9,7 @@
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
<uses-feature android:name="android.hardware.sensor.accelerometer" android:required="true"/>
<uses-feature android:name="android.hardware.sensor.gyroscope" android:required="true"/>
@ -75,6 +76,15 @@
android:enabled="true"
android:exported="false"
android:process=":breakpad_uploader"/>
<receiver
android:name=".receiver.HeadsetStateReceiver"
android:enabled="true"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.HEADSET_PLUG" />
</intent-filter>
</receiver>
</application>
<uses-feature android:name="android.software.vr.mode" android:required="true"/>

View file

@ -355,5 +355,51 @@ JNIEXPORT void Java_io_highfidelity_hifiinterface_WebViewActivity_nativeProcessU
AndroidHelper::instance().processURL(QString::fromUtf8(nativeString));
}
JNIEXPORT void JNICALL
Java_io_highfidelity_hifiinterface_fragment_SettingsFragment_updateHifiSetting(JNIEnv *env,
jobject instance,
jstring group_,
jstring key_,
jboolean value_) {
const char *c_group = env->GetStringUTFChars(group_, 0);
const char *c_key = env->GetStringUTFChars(key_, 0);
const QString group = QString::fromUtf8(c_group);
const QString key = QString::fromUtf8(c_key);
env->ReleaseStringUTFChars(group_, c_group);
env->ReleaseStringUTFChars(key_, c_key);
bool value = value_;
Setting::Handle<bool> setting { QStringList() << group << key , !value };
setting.set(value);
}
JNIEXPORT jboolean JNICALL
Java_io_highfidelity_hifiinterface_fragment_SettingsFragment_getHifiSettingBoolean(JNIEnv *env,
jobject instance,
jstring group_,
jstring key_,
jboolean defaultValue) {
const char *c_group = env->GetStringUTFChars(group_, 0);
const char *c_key = env->GetStringUTFChars(key_, 0);
const QString group = QString::fromUtf8(c_group);
const QString key = QString::fromUtf8(c_key);
env->ReleaseStringUTFChars(group_, c_group);
env->ReleaseStringUTFChars(key_, c_key);
Setting::Handle<bool> setting { QStringList() << group << key , defaultValue};
return setting.get();
}
JNIEXPORT void JNICALL
Java_io_highfidelity_hifiinterface_receiver_HeadsetStateReceiver_notifyHeadsetOn(JNIEnv *env,
jobject instance,
jboolean pluggedIn) {
AndroidHelper::instance().notifyHeadsetOn(pluggedIn);
}
}

View file

@ -13,6 +13,7 @@ package io.highfidelity.hifiinterface;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
@ -40,6 +41,7 @@ import java.util.HashMap;
import java.util.List;
import io.highfidelity.hifiinterface.fragment.WebViewFragment;
import io.highfidelity.hifiinterface.receiver.HeadsetStateReceiver;
/*import com.google.vr.cardboard.DisplaySynchronizer;
import com.google.vr.cardboard.DisplayUtils;
@ -55,6 +57,7 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
private static final int NORMAL_DPI = 160;
private Vibrator mVibrator;
private HeadsetStateReceiver headsetStateReceiver;
//public static native void handleHifiURL(String hifiURLString);
private native long nativeOnCreate(InterfaceActivity instance, AssetManager assetManager);
@ -151,6 +154,8 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
layoutParams.resolveLayoutDirection(View.LAYOUT_DIRECTION_RTL);
qtLayout.addView(webSlidingDrawer, layoutParams);
webSlidingDrawer.setVisibility(View.GONE);
headsetStateReceiver = new HeadsetStateReceiver();
}
@Override
@ -161,6 +166,7 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
} else {
nativeEnterBackground();
}
unregisterReceiver(headsetStateReceiver);
//gvrApi.pauseTracking();
}
@ -183,6 +189,7 @@ public class InterfaceActivity extends QtActivity implements WebViewFragment.OnW
nativeEnterForeground();
surfacesWorkaround();
keepInterfaceRunning = false;
registerReceiver(headsetStateReceiver, new IntentFilter(Intent.ACTION_HEADSET_PLUG));
//gvrApi.resumeTracking();
}

View file

@ -33,6 +33,7 @@ import io.highfidelity.hifiinterface.fragment.FriendsFragment;
import io.highfidelity.hifiinterface.fragment.HomeFragment;
import io.highfidelity.hifiinterface.fragment.LoginFragment;
import io.highfidelity.hifiinterface.fragment.PolicyFragment;
import io.highfidelity.hifiinterface.fragment.SettingsFragment;
import io.highfidelity.hifiinterface.task.DownloadProfileImageTask;
public class MainActivity extends AppCompatActivity implements NavigationView.OnNavigationItemSelectedListener,
@ -80,6 +81,8 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
mPeopleMenuItem = mNavigationView.getMenu().findItem(R.id.action_people);
updateDebugMenu(mNavigationView.getMenu());
Toolbar toolbar = findViewById(R.id.toolbar);
toolbar.setTitleTextAppearance(this, R.style.HomeActionBarTitleStyle);
setSupportActionBar(toolbar);
@ -108,6 +111,16 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
}
}
private void updateDebugMenu(Menu menu) {
if (BuildConfig.DEBUG) {
for (int i=0; i < menu.size(); i++) {
if (menu.getItem(i).getItemId() == R.id.action_debug_settings) {
menu.getItem(i).setVisible(true);
}
}
}
}
private void loadFragment(String fragment) {
switch (fragment) {
case "Login":
@ -151,6 +164,13 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
loadFragment(fragment, getString(R.string.people), getString(R.string.tagFragmentPeople), true);
}
private void loadSettingsFragment() {
SettingsFragment fragment = SettingsFragment.newInstance();
loadFragment(fragment, getString(R.string.settings), getString(R.string.tagSettings), true);
}
private void loadFragment(Fragment fragment, String title, String tag, boolean addToBackStack) {
FragmentManager fragmentManager = getFragmentManager();
@ -241,6 +261,9 @@ public class MainActivity extends AppCompatActivity implements NavigationView.On
case R.id.action_people:
loadPeopleFragment();
return true;
case R.id.action_debug_settings:
loadSettingsFragment();
return true;
}
return false;
}

View file

@ -0,0 +1,63 @@
package io.highfidelity.hifiinterface.fragment;
import android.content.SharedPreferences;
import android.media.audiofx.AcousticEchoCanceler;
import android.os.Bundle;
import android.preference.Preference;
import android.preference.PreferenceFragment;
import android.support.annotation.Nullable;
import io.highfidelity.hifiinterface.R;
public class SettingsFragment extends PreferenceFragment implements SharedPreferences.OnSharedPreferenceChangeListener {
public native void updateHifiSetting(String group, String key, boolean value);
public native boolean getHifiSettingBoolean(String group, String key, boolean defaultValue);
private final String HIFI_SETTINGS_ANDROID_GROUP = "Android";
private final String HIFI_SETTINGS_AEC_KEY = "aec";
private final String PREFERENCE_KEY_AEC = "aec";
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.settings);
if (!AcousticEchoCanceler.isAvailable()) {
getPreferenceScreen().getPreferenceManager().findPreference("aec").setEnabled(false);
}
getPreferenceScreen().getSharedPreferences().edit().putBoolean(PREFERENCE_KEY_AEC,
getHifiSettingBoolean(HIFI_SETTINGS_ANDROID_GROUP, HIFI_SETTINGS_AEC_KEY, false));
}
public static SettingsFragment newInstance() {
SettingsFragment fragment = new SettingsFragment();
return fragment;
}
@Override
public void onResume() {
super.onResume();
getPreferenceScreen().getSharedPreferences().registerOnSharedPreferenceChangeListener(this);
}
@Override
public void onPause() {
super.onPause();
getPreferenceScreen().getSharedPreferences().unregisterOnSharedPreferenceChangeListener(this);
}
@Override
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
Preference pref = findPreference(key);
switch (key) {
case "aec":
updateHifiSetting(HIFI_SETTINGS_ANDROID_GROUP, HIFI_SETTINGS_AEC_KEY, sharedPreferences.getBoolean(key, false));
break;
default:
break;
}
}
}

View file

@ -0,0 +1,18 @@
package io.highfidelity.hifiinterface.receiver;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.media.AudioManager;
import android.util.Log;
public class HeadsetStateReceiver extends BroadcastReceiver {
private native void notifyHeadsetOn(boolean pluggedIn);
@Override
public void onReceive(Context context, Intent intent) {
AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
notifyHeadsetOn(audioManager.isWiredHeadsetOn());
}
}

View file

@ -9,4 +9,9 @@
android:id="@+id/action_people"
android:title="@string/people"
/>
<item
android:id="@+id/action_debug_settings"
android:title="@string/settings"
android:visible="false"
/>
</menu>

View file

@ -29,4 +29,9 @@
<string name="tagFragmentLogin">tagFragmentLogin</string>
<string name="tagFragmentPolicy">tagFragmentPolicy</string>
<string name="tagFragmentPeople">tagFragmentPeople</string>
<string name="tagSettings">tagSettings</string>
<string name="settings">Settings</string>
<string name="AEC">AEC</string>
<string name="acoustic_echo_cancellation">Acoustic Echo Cancellation</string>
<string name="settings_developer">Developer</string>
</resources>

View file

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="utf-8"?>
<PreferenceScreen xmlns:android="http://schemas.android.com/apk/res/android">
<PreferenceCategory
android:title="@string/settings_developer"
android:key="pref_key_developer">
<SwitchPreference
android:key="aec"
android:title="@string/AEC"
android:summary="@string/acoustic_echo_cancellation" />
</PreferenceCategory>
</PreferenceScreen>

View file

@ -72,17 +72,17 @@ def jniFolder = new File(appDir, 'src/main/jniLibs/arm64-v8a')
def baseUrl = 'https://hifi-public.s3.amazonaws.com/dependencies/android/'
def breakpadDumpSymsDir = new File("${appDir}/build/tmp/breakpadDumpSyms")
def qtFile='qt-5.11.1_linux_armv8-libcpp_openssl.tgz'
def qtChecksum='f312c47cd8b8dbca824c32af4eec5e66'
def qtVersionId='nyCGcb91S4QbYeJhUkawO5x1lrLdSNB_'
def qtFile='qt-5.11.1_linux_armv8-libcpp_openssl_patched.tgz'
def qtChecksum='aa449d4bfa963f3bc9a9dfe558ba29df'
def qtVersionId='3S97HBM5G5Xw9EfE52sikmgdN3t6C2MN'
if (Os.isFamily(Os.FAMILY_MAC)) {
qtFile = 'qt-5.11.1_osx_armv8-libcpp_openssl.tgz'
qtChecksum='a0c8b394aec5b0fcd46714ca3a53278a'
qtVersionId='QNa.lwNJaPc0eGuIL.xZ8ebeTuLL7rh8'
qtFile = 'qt-5.11.1_osx_armv8-libcpp_openssl_patched.tgz'
qtChecksum='c83cc477c08a892e00c71764dca051a0'
qtVersionId='OxBD7iKINv1HbyOXmAmDrBb8AF3N.Kup'
} else if (Os.isFamily(Os.FAMILY_WINDOWS)) {
qtFile = 'qt-5.11.1_win_armv8-libcpp_openssl.tgz'
qtChecksum='d80aed4233ce9e222aae8376e7a94bf9'
qtVersionId='iDVXu0i3WEXRFIxQCtzcJ2XuKrE8RIqB'
qtFile = 'qt-5.11.1_win_armv8-libcpp_openssl_patched.tgz'
qtChecksum='0582191cc55431aa4f660848a542883e'
qtVersionId='JfWM0P_Mz5Qp0LwpzhrsRwN3fqlLSFeT'
}
def packages = [

View file

@ -48,35 +48,11 @@ Item {
spacing: 4; x: 4; y: 4;
StatText {
text: "State Machines:---------------------------------------------------------------------------"
text: root.positionText
}
ListView {
width: firstCol.width
height: root.animStateMachines.length * 15
visible: root.animStateMchines.length > 0;
model: root.animStateMachines
delegate: StatText {
text: {
return modelData;
}
}
}
}
}
Rectangle {
width: secondCol.width + 8
height: secondCol.height + 8
color: root.bgColor;
Column {
id: secondCol
spacing: 4; x: 4; y: 4;
StatText {
text: "Anim Vars:--------------------------------------------------------------------------------"
}
ListView {
width: secondCol.width
height: root.animVars.length * 15
@ -104,6 +80,36 @@ Item {
}
}
Rectangle {
width: secondCol.width + 8
height: secondCol.height + 8
color: root.bgColor;
Column {
id: secondCol
spacing: 4; x: 4; y: 4;
StatText {
text: root.rotationText
}
StatText {
text: "State Machines:---------------------------------------------------------------------------"
}
ListView {
width: firstCol.width
height: root.animStateMachines.length * 15
visible: root.animStateMachines.length > 0;
model: root.animStateMachines
delegate: StatText {
text: {
return modelData;
}
}
}
}
}
Rectangle {
width: thirdCol.width + 8
height: thirdCol.height + 8
@ -113,10 +119,12 @@ Item {
id: thirdCol
spacing: 4; x: 4; y: 4;
StatText {
text: root.velocityText
}
StatText {
text: "Alpha Values:--------------------------------------------------------------------------"
}
ListView {
width: thirdCol.width
height: root.animAlphaValues.length * 15

View file

@ -10,6 +10,7 @@
//
#include "AndroidHelper.h"
#include <QDebug>
#include <AudioClient.h>
#include "Application.h"
#if defined(qApp)
@ -18,6 +19,7 @@
#define qApp (static_cast<Application*>(QCoreApplication::instance()))
AndroidHelper::AndroidHelper() {
qRegisterMetaType<QAudio::Mode>("QAudio::Mode");
}
AndroidHelper::~AndroidHelper() {
@ -56,3 +58,12 @@ void AndroidHelper::processURL(const QString &url) {
qApp->acceptURL(url);
}
}
void AndroidHelper::notifyHeadsetOn(bool pluggedIn) {
#if defined (Q_OS_ANDROID)
auto audioClient = DependencyManager::get<AudioClient>();
if (audioClient) {
QMetaObject::invokeMethod(audioClient.data(), "setHeadsetPluggedIn", Q_ARG(bool, pluggedIn));
}
#endif
}

View file

@ -29,6 +29,7 @@ public:
void performHapticFeedback(int duration);
void processURL(const QString &url);
void notifyHeadsetOn(bool pluggedIn);
AndroidHelper(AndroidHelper const&) = delete;
void operator=(AndroidHelper const&) = delete;

View file

@ -42,6 +42,29 @@ void AnimStats::updateStats(bool force) {
auto myAvatar = avatarManager->getMyAvatar();
auto debugAlphaMap = myAvatar->getSkeletonModel()->getRig().getDebugAlphaMap();
glm::vec3 position = myAvatar->getWorldPosition();
glm::quat rotation = myAvatar->getWorldOrientation();
glm::vec3 velocity = myAvatar->getWorldVelocity();
_positionText = QString("Position: (%1, %2, %3)").
arg(QString::number(position.x, 'f', 2)).
arg(QString::number(position.y, 'f', 2)).
arg(QString::number(position.z, 'f', 2));
emit positionTextChanged();
glm::vec3 eulerRotation = safeEulerAngles(rotation);
_rotationText = QString("Heading: %1").
arg(QString::number(glm::degrees(eulerRotation.y), 'f', 2));
emit rotationTextChanged();
// transform velocity into rig coordinate frame. z forward.
glm::vec3 localVelocity = Quaternions::Y_180 * glm::inverse(rotation) * velocity;
_velocityText = QString("Local Vel: (%1, %2, %3)").
arg(QString::number(localVelocity.x, 'f', 2)).
arg(QString::number(localVelocity.y, 'f', 2)).
arg(QString::number(localVelocity.z, 'f', 2));
emit velocityTextChanged();
// update animation debug alpha values
QStringList newAnimAlphaValues;
qint64 now = usecTimestampNow();

View file

@ -19,6 +19,9 @@ class AnimStats : public QQuickItem {
Q_PROPERTY(QStringList animAlphaValues READ animAlphaValues NOTIFY animAlphaValuesChanged)
Q_PROPERTY(QStringList animVars READ animVars NOTIFY animVarsChanged)
Q_PROPERTY(QStringList animStateMachines READ animStateMachines NOTIFY animStateMachinesChanged)
Q_PROPERTY(QString positionText READ positionText NOTIFY positionTextChanged)
Q_PROPERTY(QString rotationText READ rotationText NOTIFY rotationTextChanged)
Q_PROPERTY(QString velocityText READ velocityText NOTIFY velocityTextChanged)
public:
static AnimStats* getInstance();
@ -27,9 +30,13 @@ public:
void updateStats(bool force = false);
QStringList animAlphaValues() { return _animAlphaValues; }
QStringList animVars() { return _animVarsList; }
QStringList animStateMachines() { return _animStateMachines; }
QStringList animAlphaValues() const { return _animAlphaValues; }
QStringList animVars() const { return _animVarsList; }
QStringList animStateMachines() const { return _animStateMachines; }
QString positionText() const { return _positionText; }
QString rotationText() const { return _rotationText; }
QString velocityText() const { return _velocityText; }
public slots:
void forceUpdateStats() { updateStats(true); }
@ -39,6 +46,9 @@ signals:
void animAlphaValuesChanged();
void animVarsChanged();
void animStateMachinesChanged();
void positionTextChanged();
void rotationTextChanged();
void velocityTextChanged();
private:
QStringList _animAlphaValues;
@ -50,6 +60,10 @@ private:
std::map<QString, qint64> _animVarChangedTimers; // last time animVar value has changed.
QStringList _animStateMachines;
QString _positionText;
QString _rotationText;
QString _velocityText;
};
#endif // hifi_AnimStats_h

View file

@ -88,6 +88,10 @@ const AnimPoseVec& AnimStateMachine::evaluate(const AnimVariantMap& animVars, co
processOutputJoints(triggersOut);
context.addStateMachineInfo(_id, _currentState->getID(), _previousState->getID(), _duringInterp, _alpha);
if (_duringInterp) {
// hack: add previoius state to debug alpha map, with parens around it's name.
context.setDebugAlpha(QString("(%1)").arg(_previousState->getID()), 1.0f - _alpha, AnimNodeType::Clip);
}
return _poses;
}

View file

@ -140,14 +140,19 @@ std::map<QString, QString> AnimVariantMap::toDebugMap() const {
result[pair.first] = QString::number(pair.second.getFloat(), 'f', 3);
break;
case AnimVariant::Type::Vec3: {
// To prevent filling up debug stats, don't show vec3 values
/*
glm::vec3 value = pair.second.getVec3();
result[pair.first] = QString("(%1, %2, %3)").
arg(QString::number(value.x, 'f', 3)).
arg(QString::number(value.y, 'f', 3)).
arg(QString::number(value.z, 'f', 3));
*/
break;
}
case AnimVariant::Type::Quat: {
// To prevent filling up the anim stats, don't show quat values
/*
glm::quat value = pair.second.getQuat();
result[pair.first] = QString("(%1, %2, %3, %4)").
arg(QString::number(value.x, 'f', 3)).
@ -155,10 +160,14 @@ std::map<QString, QString> AnimVariantMap::toDebugMap() const {
arg(QString::number(value.z, 'f', 3)).
arg(QString::number(value.w, 'f', 3));
break;
*/
}
case AnimVariant::Type::String:
// To prevent filling up anim stats, don't show string values
/*
result[pair.first] = pair.second.getString();
break;
*/
default:
assert(("invalid AnimVariant::Type", false));
}

View file

@ -53,7 +53,6 @@
#include "AudioHelpers.h"
#if defined(Q_OS_ANDROID)
#define VOICE_RECOGNITION "voicerecognition"
#include <QtAndroidExtras/QAndroidJniObject>
#endif
@ -210,6 +209,7 @@ AudioClient::AudioClient() :
_positionGetter(DEFAULT_POSITION_GETTER),
#if defined(Q_OS_ANDROID)
_checkInputTimer(this),
_isHeadsetPluggedIn(false),
#endif
_orientationGetter(DEFAULT_ORIENTATION_GETTER) {
// avoid putting a lock in the device callback
@ -461,9 +461,14 @@ QAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
#if defined (Q_OS_ANDROID)
if (mode == QAudio::AudioInput) {
Setting::Handle<bool> enableAEC(SETTING_AEC_KEY, false);
bool aecEnabled = enableAEC.get();
auto audioClient = DependencyManager::get<AudioClient>();
bool headsetOn = audioClient? audioClient->isHeadsetPluggedIn() : false;
auto inputDevices = QAudioDeviceInfo::availableDevices(QAudio::AudioInput);
for (auto inputDevice : inputDevices) {
if (inputDevice.deviceName() == VOICE_RECOGNITION) {
if (((headsetOn || !aecEnabled) && inputDevice.deviceName() == VOICE_RECOGNITION) ||
((!headsetOn && aecEnabled) && inputDevice.deviceName() == VOICE_COMMUNICATION)) {
return inputDevice;
}
}
@ -1640,6 +1645,29 @@ void AudioClient::checkInputTimeout() {
#endif
}
void AudioClient::setHeadsetPluggedIn(bool pluggedIn) {
#if defined(Q_OS_ANDROID)
if (pluggedIn == !_isHeadsetPluggedIn && !_inputDeviceInfo.isNull()) {
QAndroidJniObject brand = QAndroidJniObject::getStaticObjectField<jstring>("android/os/Build", "BRAND");
// some samsung phones needs more time to shutdown the previous input device
if (brand.toString().contains("samsung", Qt::CaseInsensitive)) {
switchInputToAudioDevice(QAudioDeviceInfo(), true);
QThread::msleep(200);
}
Setting::Handle<bool> enableAEC(SETTING_AEC_KEY, false);
bool aecEnabled = enableAEC.get();
if ((pluggedIn || !aecEnabled) && _inputDeviceInfo.deviceName() != VOICE_RECOGNITION) {
switchAudioDevice(QAudio::AudioInput, VOICE_RECOGNITION);
} else if (!pluggedIn && aecEnabled && _inputDeviceInfo.deviceName() != VOICE_COMMUNICATION) {
switchAudioDevice(QAudio::AudioInput, VOICE_COMMUNICATION);
}
}
_isHeadsetPluggedIn = pluggedIn;
#endif
}
void AudioClient::outputNotify() {
int recentUnfulfilled = _audioOutputIODevice.getRecentUnfulfilledReads();
if (recentUnfulfilled > 0) {

View file

@ -64,6 +64,13 @@
#pragma warning( pop )
#endif
#if defined (Q_OS_ANDROID)
#define VOICE_RECOGNITION "voicerecognition"
#define VOICE_COMMUNICATION "voicecommunication"
#define SETTING_AEC_KEY "Android/aec"
#endif
class QAudioInput;
class QAudioOutput;
class QIODevice;
@ -169,6 +176,10 @@ public:
static QString getWinDeviceName(wchar_t* guid);
#endif
#if defined(Q_OS_ANDROID)
bool isHeadsetPluggedIn() { return _isHeadsetPluggedIn; }
#endif
public slots:
void start();
void stop();
@ -217,6 +228,9 @@ public slots:
bool switchAudioDevice(QAudio::Mode mode, const QAudioDeviceInfo& deviceInfo = QAudioDeviceInfo());
bool switchAudioDevice(QAudio::Mode mode, const QString& deviceName);
// Qt opensles plugin is not able to detect when the headset is plugged in
void setHeadsetPluggedIn(bool pluggedIn);
float getInputVolume() const { return (_audioInput) ? (float)_audioInput->volume() : 0.0f; }
void setInputVolume(float volume, bool emitSignal = true);
void setReverb(bool reverb);
@ -278,6 +292,7 @@ private:
#ifdef Q_OS_ANDROID
QTimer _checkInputTimer;
long _inputReadsSinceLastCheck = 0l;
bool _isHeadsetPluggedIn;
#endif
class Gate {

View file

@ -98,6 +98,7 @@ void Batch::clear() {
_name = nullptr;
_invalidModel = true;
_currentModel = Transform();
_drawcallUserInfo = 0;
_projectionJitter = glm::vec2(0.0f);
_enableStereo = true;
_enableSkybox = false;
@ -112,6 +113,10 @@ size_t Batch::cacheData(size_t size, const void* data) {
return offset;
}
void Batch::setDrawcallInfo(uint16_t user) {
_drawcallUserInfo = user;
}
void Batch::draw(Primitive primitiveType, uint32 numVertices, uint32 startVertex) {
ADD_COMMAND(draw);
@ -545,7 +550,8 @@ void Batch::captureDrawCallInfoImpl() {
}
auto& drawCallInfos = getDrawCallInfoBuffer();
drawCallInfos.emplace_back((uint16)_objects.size() - 1);
drawCallInfos.emplace_back((uint16)_objects.size() - 1, _drawcallUserInfo);
_drawcallUserInfo = 0;
}
void Batch::captureDrawCallInfo() {

View file

@ -48,6 +48,7 @@ public:
using Index = uint16_t;
DrawCallInfo(Index idx) : index(idx) {}
DrawCallInfo(Index idx, Index user) : index(idx), unused(user) {}
Index index { 0 };
uint16_t unused { 0 }; // Reserved space for later
@ -111,6 +112,9 @@ public:
void enableSkybox(bool enable = true);
bool isSkyboxEnabled() const;
// Push user Drawcall info
void setDrawcallInfo(uint16 user);
// Drawcalls
void draw(Primitive primitiveType, uint32 numVertices, uint32 startVertex = 0);
void drawIndexed(Primitive primitiveType, uint32 numIndices, uint32 startIndex = 0);
@ -499,6 +503,8 @@ public:
NamedBatchDataMap _namedData;
uint16_t _drawcallUserInfo{ 0 };
glm::vec2 _projectionJitter{ 0.0f, 0.0f };
bool _enableStereo{ true };
bool _enableSkybox { false };

View file

@ -10,11 +10,8 @@
<@def MESH_DEFORMER_SLH@>
// MeshDeformer.slh
<@func declareMeshDeformer(USE_NORMAL, USE_TANGENT, USE_SKINNING, USE_DUAL_QUATERNION, USE_BLENDSHAPE)@>
<@include LightingModel.slh@>
<@if USE_SKINNING@>
<@include Skinning.slh@>
<$declareUseDualQuaternionSkinning($USE_DUAL_QUATERNION$) $>
@ -33,10 +30,10 @@ void evalMeshDeformer(vec4 inPosition, out vec4 outPosition
, vec3 inTangent, out vec3 outTangent
<@endif@>
<@if USE_SKINNING@>
,ivec4 skinClusterIndex, vec4 skinClusterWeight
, bool isSkinningEnabled, ivec4 skinClusterIndex, vec4 skinClusterWeight
<@endif@>
<@if USE_BLENDSHAPE@>
, int vertexIndex
, bool isBlendshapeEnabled, int vertexIndex
<@endif@>
) {
@ -49,7 +46,7 @@ void evalMeshDeformer(vec4 inPosition, out vec4 outPosition
<@endif@>
<@if USE_BLENDSHAPE@>
if (bool(isBlendshapeEnabled())) {
if (isBlendshapeEnabled) {
<@if USE_TANGENT@>
applyBlendshapeOffset(vertexIndex, inPosition, _deformedPosition, inNormal, _deformedNormal, inTangent, _deformedTangent);
@ -64,7 +61,7 @@ void evalMeshDeformer(vec4 inPosition, out vec4 outPosition
<@endif@>
<@if USE_SKINNING@>
if (bool(isSkinningEnabled())) {
if (isSkinningEnabled) {
<@if USE_TANGENT@>
skinPositionNormalTangent(inSkinClusterIndex, inSkinClusterWeight, _deformedPosition, _deformedNormal, _deformedTangent, _deformedPosition, _deformedNormal, _deformedTangent);
<@else@>
@ -88,4 +85,36 @@ void evalMeshDeformer(vec4 inPosition, out vec4 outPosition
<@endfunc@>
<@func declareMeshDeformerActivation(USE_SKINNING, USE_BLENDSHAPE, USE_LIGHTING_MODEL)@>
const BITFIELD MESH_DEFORMER_BLENDSHAPE_BIT = 0x00000001;
const BITFIELD MESH_DEFORMER_SKINNING_BIT = 0x00000002;
<@if USE_LIGHTING_MODEL@>
<@include LightingModel.slh@>
<@endif@>
<@if USE_BLENDSHAPE@>
bool meshDeformer_doBlendshape(int meshKey) {
<@if USE_LIGHTING_MODEL@>
return ((meshKey & MESH_DEFORMER_BLENDSHAPE_BIT) != 0) & bool(isBlendshapeEnabled());
<@else@>
return ((meshKey & MESH_DEFORMER_BLENDSHAPE_BIT) != 0);
<@endif@>
}
<@endif@>
<@if USE_SKINNING@>
bool meshDeformer_doSkinning(int meshKey) {
<@if USE_LIGHTING_MODEL@>
return ((meshKey & MESH_DEFORMER_SKINNING_BIT) != 0) & bool(isSkinningEnabled());
<@else@>
return ((meshKey & MESH_DEFORMER_SKINNING_BIT) != 0);
<@endif@>
}
<@endif@>
<@endfunc@>
<@endif@> // if not MESH_DEFORMER_SLH

View file

@ -424,6 +424,11 @@ void ModelMeshPartPayload::render(RenderArgs* args) {
//Bind the index buffer and vertex buffer and Blend shapes if needed
bindMesh(batch);
auto drawcallInfo = (uint16_t) ((_isBlendShaped << 0) | (_isSkinned << 1));
if (drawcallInfo) {
batch.setDrawcallInfo(drawcallInfo);
}
// apply material properties
if (args->_renderMode != render::Args::RenderMode::SHADOW_RENDER_MODE) {
RenderPipelines::bindMaterial(!_drawMaterials.empty() ? _drawMaterials.top().material : DEFAULT_MATERIAL, batch, args->_enableTexturing);

View file

@ -26,6 +26,12 @@ class Model;
class MeshPartPayload {
public:
struct DrawcallInfo {
uint32_t _geometryKey { 0 };
uint32_t _spare[3];
};
MeshPartPayload() {}
MeshPartPayload(const std::shared_ptr<const graphics::Mesh>& mesh, int partIndex, graphics::MaterialPointer material);

View file

@ -1533,12 +1533,11 @@ void Model::setBlendedVertices(int blendNumber, const QVector<BlendshapeOffset>&
for (int i = 0; i < fbxGeometry.meshes.size(); i++) {
const FBXMesh& mesh = fbxGeometry.meshes.at(i);
auto meshBlendshapeOffsets = _blendshapeOffsets.find(i);
if (mesh.blendshapes.isEmpty() || meshBlendshapeOffsets == _blendshapeOffsets.end()) {
if (mesh.blendshapes.isEmpty() || meshBlendshapeOffsets == _blendshapeOffsets.end() || meshBlendshapeOffsets == _blendshapeOffsets.end()) {
continue;
}
const auto& buffer = _blendshapeBuffers.find(i);
assert(buffer != _blendshapeBuffers.end());
const auto blendshapeOffsetSize = meshBlendshapeOffsets->second.size() * sizeof(BlendshapeOffset);
buffer->second->setData(blendshapeOffsetSize, (gpu::Byte*) blendshapeOffsets.constData() + index * sizeof(BlendshapeOffset));

View file

@ -20,6 +20,10 @@
<@include MeshDeformer.slh@>
<$declareMeshDeformer(1, _SCRIBE_NULL, 1, 0, 1)$>
<$declareMeshDeformerActivation(1, 1, 1)$>
<@include LightingModel.slh@>
<@include render-utils/ShaderConstants.h@>
@ -32,7 +36,9 @@ layout(location=RENDER_UTILS_ATTR_COLOR) out vec4 _color;
void main(void) {
vec4 deformedPosition = vec4(0.0, 0.0, 0.0, 0.0);
vec3 deformedNormal = vec3(0.0, 0.0, 0.0);
evalMeshDeformer(inPosition, deformedPosition, inNormal.xyz, deformedNormal, inSkinClusterIndex, inSkinClusterWeight, gl_VertexID);
evalMeshDeformer(inPosition, deformedPosition, inNormal.xyz, deformedNormal,
meshDeformer_doSkinning(_drawcallInfo.y), inSkinClusterIndex, inSkinClusterWeight,
meshDeformer_doBlendshape(_drawcallInfo.y), gl_VertexID);
// pass along the color
_color.rgb = color_sRGBToLinear(inColor.rgb);

View file

@ -20,6 +20,7 @@
<@include MeshDeformer.slh@>
<$declareMeshDeformer(1, _SCRIBE_NULL, 1, 1, 1)$>
<$declareMeshDeformerActivation(1, 1, 1)$>
<@include render-utils/ShaderConstants.h@>
@ -33,7 +34,9 @@ void main(void) {
vec4 deformedPosition = vec4(0.0, 0.0, 0.0, 0.0);
vec3 deformedNormal = vec3(0.0, 0.0, 0.0);
evalMeshDeformer(inPosition, deformedPosition, inNormal.xyz, deformedNormal, inSkinClusterIndex, inSkinClusterWeight, gl_VertexID);
evalMeshDeformer(inPosition, deformedPosition, inNormal.xyz, deformedNormal,
meshDeformer_doSkinning(_drawcallInfo.y), inSkinClusterIndex, inSkinClusterWeight,
meshDeformer_doBlendshape(_drawcallInfo.y), gl_VertexID);
// pass along the color
_color.rgb = color_sRGBToLinear(inColor.rgb);

View file

@ -20,6 +20,7 @@
<@include MeshDeformer.slh@>
<$declareMeshDeformer(1, 1, 1, _SCRIBE_NULL, 1)$>
<$declareMeshDeformerActivation(1, 1, 1)$>
<@include render-utils/ShaderConstants.h@>
@ -34,7 +35,9 @@ void main(void) {
vec4 deformedPosition = vec4(0.0, 0.0, 0.0, 0.0);
vec3 deformedNormal = vec3(0.0, 0.0, 0.0);
vec3 deformedTangent = vec3(0.0, 0.0, 0.0);
evalMeshDeformer(inPosition, deformedPosition, inNormal.xyz, deformedNormal, inTangent.xyz, deformedTangent, inSkinClusterIndex, inSkinClusterWeight, gl_VertexID);
evalMeshDeformer(inPosition, deformedPosition, inNormal.xyz, deformedNormal, inTangent.xyz, deformedTangent,
meshDeformer_doSkinning(_drawcallInfo.y), inSkinClusterIndex, inSkinClusterWeight,
meshDeformer_doBlendshape(_drawcallInfo.y), gl_VertexID);
// pass along the color
_color.rgb = color_sRGBToLinear(inColor.rgb);

View file

@ -21,6 +21,7 @@
<@include MeshDeformer.slh@>
<$declareMeshDeformer(1, 1, 1, 1, 1)$>
<$declareMeshDeformerActivation(1, 1, 1)$>
<@include render-utils/ShaderConstants.h@>
@ -35,7 +36,9 @@ void main(void) {
vec4 deformedPosition = vec4(0.0, 0.0, 0.0, 0.0);
vec3 deformedNormal = vec3(0.0, 0.0, 0.0);
vec3 deformedTangent = vec3(0.0, 0.0, 0.0);
evalMeshDeformer(inPosition, deformedPosition, inNormal.xyz, deformedNormal, inTangent.xyz, deformedTangent, inSkinClusterIndex, inSkinClusterWeight, gl_VertexID);
evalMeshDeformer(inPosition, deformedPosition, inNormal.xyz, deformedNormal, inTangent.xyz, deformedTangent,
meshDeformer_doSkinning(_drawcallInfo.y), inSkinClusterIndex, inSkinClusterWeight,
meshDeformer_doBlendshape(_drawcallInfo.y), gl_VertexID);
// pass along the color
_color.rgb = color_sRGBToLinear(inColor.rgb);

View file

@ -16,12 +16,15 @@
<@include MeshDeformer.slh@>
<$declareMeshDeformer(_SCRIBE_NULL, _SCRIBE_NULL, 1, _SCRIBE_NULL, 1)$>
<$declareMeshDeformerActivation(1, 1, 1)$>
<@include render-utils/ShaderConstants.h@>
void main(void) {
vec4 deformedPosition = vec4(0.0, 0.0, 0.0, 0.0);
evalMeshDeformer(inPosition, deformedPosition, inSkinClusterIndex, inSkinClusterWeight, gl_VertexID);
evalMeshDeformer(inPosition, deformedPosition,
meshDeformer_doSkinning(_drawcallInfo.y), inSkinClusterIndex, inSkinClusterWeight,
meshDeformer_doBlendshape(_drawcallInfo.y), gl_VertexID);
// standard transform
TransformCamera cam = getTransformCamera();

View file

@ -16,12 +16,15 @@
<@include MeshDeformer.slh@>
<$declareMeshDeformer(_SCRIBE_NULL, _SCRIBE_NULL, 1, 1, 1)$>
<$declareMeshDeformerActivation(1, 1, 1)$>
<@include render-utils/ShaderConstants.h@>
void main(void) {
vec4 deformedPosition = vec4(0.0, 0.0, 0.0, 0.0);
evalMeshDeformer(inPosition, deformedPosition, inSkinClusterIndex, inSkinClusterWeight, gl_VertexID);
evalMeshDeformer(inPosition, deformedPosition,
meshDeformer_doSkinning(_drawcallInfo.y), inSkinClusterIndex, inSkinClusterWeight,
meshDeformer_doBlendshape(_drawcallInfo.y), gl_VertexID);
// standard transform
TransformCamera cam = getTransformCamera();

View file

@ -19,6 +19,7 @@
<@include MeshDeformer.slh@>
<$declareMeshDeformer(_SCRIBE_NULL, _SCRIBE_NULL, 1, _SCRIBE_NULL, 1)$>
<$declareMeshDeformerActivation(1, 1, 1)$>
<@include render-utils/ShaderConstants.h@>
@ -26,7 +27,9 @@ layout(location=RENDER_UTILS_ATTR_POSITION_WS) out vec4 _positionWS;
void main(void) {
vec4 deformedPosition = vec4(0.0, 0.0, 0.0, 0.0);
evalMeshDeformer(inPosition, deformedPosition, inSkinClusterIndex, inSkinClusterWeight, gl_VertexID);
evalMeshDeformer(inPosition, deformedPosition,
meshDeformer_doSkinning(_drawcallInfo.y), inSkinClusterIndex, inSkinClusterWeight,
meshDeformer_doBlendshape(_drawcallInfo.y), gl_VertexID);
// standard transform
TransformCamera cam = getTransformCamera();

View file

@ -19,6 +19,7 @@
<@include MeshDeformer.slh@>
<$declareMeshDeformer(_SCRIBE_NULL, _SCRIBE_NULL, 1, 1, 1)$>
<$declareMeshDeformerActivation(1, 1, 1)$>
<@include render-utils/ShaderConstants.h@>
@ -26,7 +27,9 @@ layout(location=RENDER_UTILS_ATTR_POSITION_WS) out vec4 _positionWS;
void main(void) {
vec4 deformedPosition = vec4(0.0, 0.0, 0.0, 0.0);
evalMeshDeformer(inPosition, deformedPosition, inSkinClusterIndex, inSkinClusterWeight, gl_VertexID);
evalMeshDeformer(inPosition, deformedPosition,
meshDeformer_doSkinning(_drawcallInfo.y), inSkinClusterIndex, inSkinClusterWeight,
meshDeformer_doBlendshape(_drawcallInfo.y), gl_VertexID);
// standard transform
TransformCamera cam = getTransformCamera();

View file

@ -2134,9 +2134,7 @@ var PropertiesTool = function (opts) {
var onWebEventReceived = function(data) {
try {
data = JSON.parse(data);
}
catch(e) {
print('Edit.js received web event that was not valid json.');
} catch(e) {
return;
}
var i, properties, dY, diff, newPosition;

View file

@ -164,7 +164,10 @@ function loaded() {
selectedEntities.forEach(function(entityID) {
if (selection.indexOf(entityID) === -1) {
entitiesByID[entityID].el.className = '';
let entity = entitiesByID[entityID];
if (entity !== undefined) {
entity.el.className = '';
}
}
});
@ -388,15 +391,18 @@ function loaded() {
let notFound = false;
selectedEntities.forEach(function(id) {
entitiesByID[id].el.className = '';
let entity = entitiesByID[id];
if (entity !== undefined) {
entity.el.className = '';
}
});
selectedEntities = [];
for (let i = 0; i < selectedIDs.length; i++) {
let id = selectedIDs[i];
selectedEntities.push(id);
if (id in entitiesByID) {
let entity = entitiesByID[id];
let entity = entitiesByID[id];
if (entity !== undefined) {
entity.el.className = 'selected';
} else {
notFound = true;

View file

@ -267,7 +267,6 @@ GridTool = function(opts) {
try {
data = JSON.parse(data);
} catch (e) {
print("gridTool.js: Error parsing JSON: " + e.name + " data " + data);
return;
}