mirror of
https://github.com/overte-org/overte.git
synced 2025-04-17 08:56:36 +02:00
Merge branch 'master' of github.com:highfidelity/hifi into commerce_upgrades_1
This commit is contained in:
commit
a792bcc3d3
30 changed files with 1358 additions and 867 deletions
assignment-client/src/entities
interface
resources/qml
src
libraries
animation/src
audio/src
entities-renderer/src
entities/src
gpu-gles/src/gpu/gl
GLBackend.cppGLBackend.hGLBackendOutput.cppGLBackendPipeline.cppGLBackendTransform.cppGLTexelFormat.cpp
graphics/src/graphics
model-networking/src/model-networking
qml/src/qml
render-utils/src
shared/src
scripts/system
|
@ -442,12 +442,16 @@ bool EntityTreeSendThread::traverseTreeAndBuildNextPacketPayload(EncodeBitstream
|
|||
PrioritizedEntity queuedItem = _sendQueue.top();
|
||||
EntityItemPointer entity = queuedItem.getEntity();
|
||||
if (entity) {
|
||||
// Only send entities that match the jsonFilters, but keep track of everything we've tried to send so we don't try to send it again
|
||||
const QUuid& entityID = entity->getID();
|
||||
// Only send entities that match the jsonFilters, but keep track of everything we've tried to send so we don't try to send it again;
|
||||
// also send if we previously matched since this represents change to a matched item.
|
||||
bool entityMatchesFilters = entity->matchesJSONFilters(jsonFilters);
|
||||
if (entityMatchesFilters || entityNodeData->isEntityFlaggedAsExtra(entity->getID())) {
|
||||
bool entityPreviouslyMatchedFilter = entityNodeData->sentFilteredEntity(entityID);
|
||||
|
||||
if (entityMatchesFilters || entityNodeData->isEntityFlaggedAsExtra(entityID) || entityPreviouslyMatchedFilter) {
|
||||
if (!jsonFilters.isEmpty() && entityMatchesFilters) {
|
||||
// Record explicitly filtered-in entity so that extra entities can be flagged.
|
||||
entityNodeData->insertSentFilteredEntity(entity->getID());
|
||||
entityNodeData->insertSentFilteredEntity(entityID);
|
||||
}
|
||||
OctreeElement::AppendState appendEntityState = entity->appendEntityData(&_packetData, params, _extraEncodeData);
|
||||
|
||||
|
@ -458,6 +462,10 @@ bool EntityTreeSendThread::traverseTreeAndBuildNextPacketPayload(EncodeBitstream
|
|||
params.stopReason = EncodeBitstreamParams::DIDNT_FIT;
|
||||
break;
|
||||
}
|
||||
|
||||
if (entityPreviouslyMatchedFilter && !entityMatchesFilters) {
|
||||
entityNodeData->removeSentFilteredEntity(entityID);
|
||||
}
|
||||
++_numEntities;
|
||||
}
|
||||
if (queuedItem.shouldForceRemove()) {
|
||||
|
|
|
@ -14,9 +14,9 @@ import Qt.labs.settings 1.0
|
|||
|
||||
import "./hifi/audio" as HifiAudio
|
||||
|
||||
Hifi.AvatarInputs {
|
||||
Item {
|
||||
id: root;
|
||||
objectName: "AvatarInputs"
|
||||
objectName: "AvatarInputsBar"
|
||||
property int modality: Qt.NonModal
|
||||
width: audio.width;
|
||||
height: audio.height;
|
||||
|
@ -26,7 +26,7 @@ Hifi.AvatarInputs {
|
|||
|
||||
HifiAudio.MicBar {
|
||||
id: audio;
|
||||
visible: root.showAudioTools;
|
||||
visible: AvatarInputs.showAudioTools;
|
||||
standalone: true;
|
||||
dragTarget: parent;
|
||||
}
|
|
@ -2717,10 +2717,12 @@ void Application::onDesktopRootContextCreated(QQmlContext* surfaceContext) {
|
|||
|
||||
void Application::onDesktopRootItemCreated(QQuickItem* rootItem) {
|
||||
Stats::show();
|
||||
AvatarInputs::show();
|
||||
auto surfaceContext = DependencyManager::get<OffscreenUi>()->getSurfaceContext();
|
||||
surfaceContext->setContextProperty("Stats", Stats::getInstance());
|
||||
surfaceContext->setContextProperty("AvatarInputs", AvatarInputs::getInstance());
|
||||
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
auto qml = PathUtils::qmlUrl("AvatarInputsBar.qml");
|
||||
offscreenUi->show(qml, "AvatarInputsBar");
|
||||
}
|
||||
|
||||
void Application::updateCamera(RenderArgs& renderArgs, float deltaTime) {
|
||||
|
|
|
@ -16,19 +16,19 @@
|
|||
#include "Application.h"
|
||||
#include "Menu.h"
|
||||
|
||||
HIFI_QML_DEF(AvatarInputs)
|
||||
|
||||
static AvatarInputs* INSTANCE{ nullptr };
|
||||
|
||||
Setting::Handle<bool> showAudioToolsSetting { QStringList { "AvatarInputs", "showAudioTools" }, false };
|
||||
|
||||
AvatarInputs* AvatarInputs::getInstance() {
|
||||
Q_ASSERT(INSTANCE);
|
||||
if (!INSTANCE) {
|
||||
INSTANCE = new AvatarInputs();
|
||||
Q_ASSERT(INSTANCE);
|
||||
}
|
||||
return INSTANCE;
|
||||
}
|
||||
|
||||
AvatarInputs::AvatarInputs(QQuickItem* parent) : QQuickItem(parent) {
|
||||
INSTANCE = this;
|
||||
AvatarInputs::AvatarInputs(QObject* parent) : QObject(parent) {
|
||||
_showAudioTools = showAudioToolsSetting.get();
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ public: \
|
|||
private: \
|
||||
type _##name{ initialValue };
|
||||
|
||||
class AvatarInputs : public QQuickItem {
|
||||
class AvatarInputs : public QObject {
|
||||
Q_OBJECT
|
||||
HIFI_QML_DECL
|
||||
|
||||
|
@ -32,7 +32,7 @@ class AvatarInputs : public QQuickItem {
|
|||
public:
|
||||
static AvatarInputs* getInstance();
|
||||
Q_INVOKABLE float loudnessToAudioLevel(float loudness);
|
||||
AvatarInputs(QQuickItem* parent = nullptr);
|
||||
AvatarInputs(QObject* parent = nullptr);
|
||||
void update();
|
||||
bool showAudioTools() const { return _showAudioTools; }
|
||||
|
||||
|
|
|
@ -37,7 +37,8 @@ Base3DOverlay::Base3DOverlay(const Base3DOverlay* base3DOverlay) :
|
|||
_ignoreRayIntersection(base3DOverlay->_ignoreRayIntersection),
|
||||
_drawInFront(base3DOverlay->_drawInFront),
|
||||
_drawHUDLayer(base3DOverlay->_drawHUDLayer),
|
||||
_isGrabbable(base3DOverlay->_isGrabbable)
|
||||
_isGrabbable(base3DOverlay->_isGrabbable),
|
||||
_isVisibleInSecondaryCamera(base3DOverlay->_isVisibleInSecondaryCamera)
|
||||
{
|
||||
setTransform(base3DOverlay->getTransform());
|
||||
}
|
||||
|
@ -142,6 +143,13 @@ void Base3DOverlay::setProperties(const QVariantMap& originalProperties) {
|
|||
setIsGrabbable(isGrabbable.toBool());
|
||||
}
|
||||
|
||||
auto isVisibleInSecondaryCamera = properties["isVisibleInSecondaryCamera"];
|
||||
if (isVisibleInSecondaryCamera.isValid()) {
|
||||
bool value = isVisibleInSecondaryCamera.toBool();
|
||||
setIsVisibleInSecondaryCamera(value);
|
||||
needRenderItemUpdate = true;
|
||||
}
|
||||
|
||||
if (properties["position"].isValid()) {
|
||||
setLocalPosition(vec3FromVariant(properties["position"]));
|
||||
needRenderItemUpdate = true;
|
||||
|
@ -221,6 +229,8 @@ void Base3DOverlay::setProperties(const QVariantMap& originalProperties) {
|
|||
* @property {boolean} drawInFront=false - If <code>true</code>, the overlay is rendered in front of other overlays that don't
|
||||
* have <code>drawInFront</code> set to <code>true</code>, and in front of entities.
|
||||
* @property {boolean} grabbable=false - Signal to grabbing scripts whether or not this overlay can be grabbed.
|
||||
* @property {boolean} isVisibleInSecondaryCamera=false - If <code>true</code>, the overlay is rendered in secondary
|
||||
* camera views.
|
||||
* @property {Uuid} parentID=null - The avatar, entity, or overlay that the overlay is parented to.
|
||||
* @property {number} parentJointIndex=65535 - Integer value specifying the skeleton joint that the overlay is attached to if
|
||||
* <code>parentID</code> is an avatar skeleton. A value of <code>65535</code> means "no joint".
|
||||
|
@ -259,6 +269,9 @@ QVariant Base3DOverlay::getProperty(const QString& property) {
|
|||
if (property == "grabbable") {
|
||||
return _isGrabbable;
|
||||
}
|
||||
if (property == "isVisibleInSecondaryCamera") {
|
||||
return _isVisibleInSecondaryCamera;
|
||||
}
|
||||
if (property == "parentID") {
|
||||
return getParentID();
|
||||
}
|
||||
|
|
|
@ -48,6 +48,7 @@ public:
|
|||
bool getDrawInFront() const { return _drawInFront; }
|
||||
bool getDrawHUDLayer() const { return _drawHUDLayer; }
|
||||
bool getIsGrabbable() const { return _isGrabbable; }
|
||||
virtual bool getIsVisibleInSecondaryCamera() const override { return _isVisibleInSecondaryCamera; }
|
||||
|
||||
void setIsSolid(bool isSolid) { _isSolid = isSolid; }
|
||||
void setIsDashedLine(bool isDashedLine) { _isDashedLine = isDashedLine; }
|
||||
|
@ -55,6 +56,7 @@ public:
|
|||
virtual void setDrawInFront(bool value) { _drawInFront = value; }
|
||||
virtual void setDrawHUDLayer(bool value) { _drawHUDLayer = value; }
|
||||
void setIsGrabbable(bool value) { _isGrabbable = value; }
|
||||
virtual void setIsVisibleInSecondaryCamera(bool value) { _isVisibleInSecondaryCamera = value; }
|
||||
|
||||
virtual AABox getBounds() const override = 0;
|
||||
|
||||
|
@ -92,6 +94,7 @@ protected:
|
|||
bool _drawInFront;
|
||||
bool _drawHUDLayer;
|
||||
bool _isGrabbable { false };
|
||||
bool _isVisibleInSecondaryCamera { false };
|
||||
mutable bool _renderVariableDirty { true };
|
||||
|
||||
QString _name;
|
||||
|
|
|
@ -89,8 +89,11 @@ void ModelOverlay::update(float deltatime) {
|
|||
}
|
||||
if (_visibleDirty) {
|
||||
_visibleDirty = false;
|
||||
// don't show overlays in mirrors
|
||||
_model->setVisibleInScene(getVisible(), scene, render::ItemKey::TAG_BITS_0, false);
|
||||
// don't show overlays in mirrors or spectator-cam unless _isVisibleInSecondaryCamera is true
|
||||
_model->setVisibleInScene(getVisible(), scene,
|
||||
render::ItemKey::TAG_BITS_0 |
|
||||
(_isVisibleInSecondaryCamera ? render::ItemKey::TAG_BITS_1 : render::ItemKey::TAG_BITS_NONE),
|
||||
false);
|
||||
}
|
||||
if (_drawInFrontDirty) {
|
||||
_drawInFrontDirty = false;
|
||||
|
|
|
@ -36,6 +36,11 @@ public:
|
|||
void clearSubRenderItemIDs();
|
||||
void setSubRenderItemIDs(const render::ItemIDs& ids);
|
||||
|
||||
virtual void setIsVisibleInSecondaryCamera(bool value) override {
|
||||
Base3DOverlay::setIsVisibleInSecondaryCamera(value);
|
||||
_visibleDirty = true;
|
||||
}
|
||||
|
||||
void setProperties(const QVariantMap& properties) override;
|
||||
QVariant getProperty(const QString& property) override;
|
||||
virtual bool findRayIntersection(const glm::vec3& origin, const glm::vec3& direction, float& distance,
|
||||
|
|
|
@ -56,6 +56,8 @@ public:
|
|||
bool isLoaded() { return _isLoaded; }
|
||||
bool getVisible() const { return _visible; }
|
||||
virtual bool isTransparent() { return getAlphaPulse() != 0.0f || getAlpha() != 1.0f; };
|
||||
virtual bool getIsVisibleInSecondaryCamera() const { return false; }
|
||||
|
||||
xColor getColor();
|
||||
float getAlpha();
|
||||
|
||||
|
|
|
@ -49,7 +49,11 @@ namespace render {
|
|||
builder.withInvisible();
|
||||
}
|
||||
|
||||
builder.withTagBits(render::ItemKey::TAG_BITS_0); // Only draw overlays in main view
|
||||
// always visible in primary view. if isVisibleInSecondaryCamera, also draw in secondary view
|
||||
uint32_t viewTaskBits = render::ItemKey::TAG_BITS_0 |
|
||||
(overlay->getIsVisibleInSecondaryCamera() ? render::ItemKey::TAG_BITS_1 : render::ItemKey::TAG_BITS_NONE);
|
||||
|
||||
builder.withTagBits(viewTaskBits);
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
|
|
@ -66,16 +66,12 @@ bool ElbowConstraint::apply(glm::quat& rotation) const {
|
|||
bool twistWasClamped = (twistAngle != clampedTwistAngle);
|
||||
|
||||
// update rotation
|
||||
const float MIN_SWING_REAL_PART = 0.99999f;
|
||||
if (twistWasClamped || fabsf(swingRotation.w) < MIN_SWING_REAL_PART) {
|
||||
if (twistWasClamped) {
|
||||
twistRotation = glm::angleAxis(clampedTwistAngle, _axis);
|
||||
}
|
||||
// we discard all swing and only keep twist
|
||||
rotation = twistRotation * _referenceRotation;
|
||||
return true;
|
||||
if (twistWasClamped) {
|
||||
twistRotation = glm::angleAxis(clampedTwistAngle, _axis);
|
||||
}
|
||||
return false;
|
||||
// we discard all swing and only keep twist
|
||||
rotation = twistRotation * _referenceRotation;
|
||||
return true;
|
||||
}
|
||||
|
||||
glm::quat ElbowConstraint::computeCenterRotation() const {
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -46,6 +46,19 @@ static int YOUTUBE_MAX_FPS = 30;
|
|||
|
||||
static QTouchDevice _touchDevice;
|
||||
|
||||
WebEntityRenderer::ContentType WebEntityRenderer::getContentType(const QString& urlString) {
|
||||
if (urlString.isEmpty()) {
|
||||
return ContentType::NoContent;
|
||||
}
|
||||
|
||||
const QUrl url(urlString);
|
||||
if (url.scheme() == "http" || url.scheme() == "https" ||
|
||||
urlString.toLower().endsWith(".htm") || urlString.toLower().endsWith(".html")) {
|
||||
return ContentType::HtmlContent;
|
||||
}
|
||||
return ContentType::QmlContent;
|
||||
}
|
||||
|
||||
WebEntityRenderer::WebEntityRenderer(const EntityItemPointer& entity) : Parent(entity) {
|
||||
static std::once_flag once;
|
||||
std::call_once(once, [&]{
|
||||
|
@ -123,13 +136,45 @@ void WebEntityRenderer::onTimeout() {
|
|||
}
|
||||
|
||||
void WebEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& scene, Transaction& transaction, const TypedEntityPointer& entity) {
|
||||
withWriteLock([&] {
|
||||
// This work must be done on the main thread
|
||||
if (!hasWebSurface()) {
|
||||
// If we couldn't create a new web surface, exit
|
||||
if (!buildWebSurface(entity)) {
|
||||
return;
|
||||
// If the content type has changed, or the old content type was QML, we need to
|
||||
// destroy the existing surface (because surfaces don't support changing the root
|
||||
// object, so subsequent loads of content just overlap the existing content
|
||||
bool urlChanged = false;
|
||||
{
|
||||
auto newSourceUrl = entity->getSourceUrl();
|
||||
auto newContentType = getContentType(newSourceUrl);
|
||||
auto currentContentType = ContentType::NoContent;
|
||||
withReadLock([&] {
|
||||
urlChanged = _lastSourceUrl != newSourceUrl;
|
||||
currentContentType = _contentType;
|
||||
});
|
||||
|
||||
if (urlChanged) {
|
||||
if (newContentType != ContentType::HtmlContent || currentContentType != ContentType::HtmlContent) {
|
||||
destroyWebSurface();
|
||||
}
|
||||
|
||||
withWriteLock([&] {
|
||||
_lastSourceUrl = newSourceUrl;
|
||||
_contentType = newContentType;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
withWriteLock([&] {
|
||||
if (_contentType == ContentType::NoContent) {
|
||||
return;
|
||||
}
|
||||
|
||||
// This work must be done on the main thread
|
||||
// If we couldn't create a new web surface, exit
|
||||
if (!hasWebSurface() && !buildWebSurface(entity)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (urlChanged) {
|
||||
_webSurface->getRootItem()->setProperty("url", _lastSourceUrl);
|
||||
}
|
||||
|
||||
if (_contextPosition != entity->getWorldPosition()) {
|
||||
|
@ -138,11 +183,6 @@ void WebEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& scene
|
|||
_webSurface->getSurfaceContext()->setContextProperty("globalPosition", vec3toVariant(_contextPosition));
|
||||
}
|
||||
|
||||
if (_lastSourceUrl != entity->getSourceUrl()) {
|
||||
_lastSourceUrl = entity->getSourceUrl();
|
||||
loadSourceURL();
|
||||
}
|
||||
|
||||
_lastDPI = entity->getDPI();
|
||||
_lastLocked = entity->getLocked();
|
||||
|
||||
|
@ -232,9 +272,6 @@ bool WebEntityRenderer::buildWebSurface(const TypedEntityPointer& entity) {
|
|||
// Let us interact with the keyboard
|
||||
surfaceContext->setContextProperty("tabletInterface", DependencyManager::get<TabletScriptingInterface>().data());
|
||||
});
|
||||
_fadeStartTime = usecTimestampNow();
|
||||
loadSourceURL();
|
||||
_webSurface->resume();
|
||||
|
||||
// forward web events to EntityScriptingInterface
|
||||
auto entities = DependencyManager::get<EntityScriptingInterface>();
|
||||
|
@ -243,6 +280,29 @@ bool WebEntityRenderer::buildWebSurface(const TypedEntityPointer& entity) {
|
|||
emit entities->webEventReceived(entityItemID, message);
|
||||
});
|
||||
|
||||
if (_contentType == ContentType::HtmlContent) {
|
||||
// We special case YouTube URLs since we know they are videos that we should play with at least 30 FPS.
|
||||
// FIXME this doesn't handle redirects or shortened URLs, consider using a signaling method from the
|
||||
// web entity
|
||||
if (QUrl(_lastSourceUrl).host().endsWith("youtube.com", Qt::CaseInsensitive)) {
|
||||
_webSurface->setMaxFps(YOUTUBE_MAX_FPS);
|
||||
} else {
|
||||
_webSurface->setMaxFps(DEFAULT_MAX_FPS);
|
||||
}
|
||||
_webSurface->load("controls/WebEntityView.qml", [this](QQmlContext* context, QObject* item) {
|
||||
item->setProperty("url", _lastSourceUrl);
|
||||
});
|
||||
} else if (_contentType == ContentType::QmlContent) {
|
||||
_webSurface->load(_lastSourceUrl, [this](QQmlContext* context, QObject* item) {
|
||||
if (item && item->objectName() == "tabletRoot") {
|
||||
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
|
||||
tabletScriptingInterface->setQmlTabletRoot("com.highfidelity.interface.tablet.system", _webSurface.data());
|
||||
}
|
||||
});
|
||||
}
|
||||
_fadeStartTime = usecTimestampNow();
|
||||
_webSurface->resume();
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -289,32 +349,6 @@ glm::vec2 WebEntityRenderer::getWindowSize(const TypedEntityPointer& entity) con
|
|||
return dims;
|
||||
}
|
||||
|
||||
void WebEntityRenderer::loadSourceURL() {
|
||||
const QUrl sourceUrl(_lastSourceUrl);
|
||||
if (sourceUrl.scheme() == "http" || sourceUrl.scheme() == "https" ||
|
||||
_lastSourceUrl.toLower().endsWith(".htm") || _lastSourceUrl.toLower().endsWith(".html")) {
|
||||
_contentType = htmlContent;
|
||||
|
||||
// We special case YouTube URLs since we know they are videos that we should play with at least 30 FPS.
|
||||
if (sourceUrl.host().endsWith("youtube.com", Qt::CaseInsensitive)) {
|
||||
_webSurface->setMaxFps(YOUTUBE_MAX_FPS);
|
||||
} else {
|
||||
_webSurface->setMaxFps(DEFAULT_MAX_FPS);
|
||||
}
|
||||
|
||||
_webSurface->load("controls/WebEntityView.qml", [this](QQmlContext* context, QObject* item) {
|
||||
item->setProperty("url", _lastSourceUrl);
|
||||
});
|
||||
} else {
|
||||
_contentType = qmlContent;
|
||||
_webSurface->load(_lastSourceUrl);
|
||||
if (_webSurface->getRootItem() && _webSurface->getRootItem()->objectName() == "tabletRoot") {
|
||||
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
|
||||
tabletScriptingInterface->setQmlTabletRoot("com.highfidelity.interface.tablet.system", _webSurface.data());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void WebEntityRenderer::hoverEnterEntity(const PointerEvent& event) {
|
||||
if (!_lastLocked && _webSurface) {
|
||||
PointerEvent webEvent = event;
|
||||
|
|
|
@ -47,15 +47,19 @@ private:
|
|||
bool buildWebSurface(const TypedEntityPointer& entity);
|
||||
void destroyWebSurface();
|
||||
bool hasWebSurface();
|
||||
void loadSourceURL();
|
||||
glm::vec2 getWindowSize(const TypedEntityPointer& entity) const;
|
||||
|
||||
|
||||
int _geometryId{ 0 };
|
||||
enum contentType {
|
||||
htmlContent,
|
||||
qmlContent
|
||||
enum class ContentType {
|
||||
NoContent,
|
||||
HtmlContent,
|
||||
QmlContent
|
||||
};
|
||||
contentType _contentType;
|
||||
|
||||
static ContentType getContentType(const QString& urlString);
|
||||
|
||||
ContentType _contentType{ ContentType::NoContent };
|
||||
QSharedPointer<OffscreenQmlSurface> _webSurface;
|
||||
glm::vec3 _contextPosition;
|
||||
gpu::TexturePointer _texture;
|
||||
|
|
|
@ -33,7 +33,7 @@ public:
|
|||
// these can only be called from the OctreeSendThread for the given Node
|
||||
void insertSentFilteredEntity(const QUuid& entityID) { _sentFilteredEntities.insert(entityID); }
|
||||
void removeSentFilteredEntity(const QUuid& entityID) { _sentFilteredEntities.remove(entityID); }
|
||||
bool sentFilteredEntity(const QUuid& entityID) { return _sentFilteredEntities.contains(entityID); }
|
||||
bool sentFilteredEntity(const QUuid& entityID) const { return _sentFilteredEntities.contains(entityID); }
|
||||
QSet<QUuid> getSentFilteredEntities() { return _sentFilteredEntities; }
|
||||
|
||||
// the following flagged extra entity methods can only be called from the OctreeSendThread for the given Node
|
||||
|
|
|
@ -93,12 +93,16 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
|
|||
(&::gpu::gl::GLBackend::do_setUniformBuffer),
|
||||
(&::gpu::gl::GLBackend::do_setResourceBuffer),
|
||||
(&::gpu::gl::GLBackend::do_setResourceTexture),
|
||||
(&::gpu::gl::GLBackend::do_setResourceFramebufferSwapChainTexture),
|
||||
|
||||
(&::gpu::gl::GLBackend::do_setFramebuffer),
|
||||
(&::gpu::gl::GLBackend::do_setFramebufferSwapChain),
|
||||
(&::gpu::gl::GLBackend::do_clearFramebuffer),
|
||||
(&::gpu::gl::GLBackend::do_blit),
|
||||
(&::gpu::gl::GLBackend::do_generateTextureMips),
|
||||
|
||||
(&::gpu::gl::GLBackend::do_advance),
|
||||
|
||||
(&::gpu::gl::GLBackend::do_beginQuery),
|
||||
(&::gpu::gl::GLBackend::do_endQuery),
|
||||
(&::gpu::gl::GLBackend::do_getQuery),
|
||||
|
|
|
@ -123,15 +123,19 @@ public:
|
|||
// Resource Stage
|
||||
virtual void do_setResourceBuffer(const Batch& batch, size_t paramOffset) final;
|
||||
virtual void do_setResourceTexture(const Batch& batch, size_t paramOffset) final;
|
||||
virtual void do_setResourceFramebufferSwapChainTexture(const Batch& batch, size_t paramOffset) final;
|
||||
|
||||
// Pipeline Stage
|
||||
virtual void do_setPipeline(const Batch& batch, size_t paramOffset) final;
|
||||
|
||||
// Output stage
|
||||
virtual void do_setFramebuffer(const Batch& batch, size_t paramOffset) final;
|
||||
virtual void do_setFramebufferSwapChain(const Batch& batch, size_t paramOffset) final;
|
||||
virtual void do_clearFramebuffer(const Batch& batch, size_t paramOffset) final;
|
||||
virtual void do_blit(const Batch& batch, size_t paramOffset) = 0;
|
||||
|
||||
virtual void do_advance(const Batch& batch, size_t paramOffset) final;
|
||||
|
||||
// Query section
|
||||
virtual void do_beginQuery(const Batch& batch, size_t paramOffset) final;
|
||||
virtual void do_endQuery(const Batch& batch, size_t paramOffset) final;
|
||||
|
@ -242,6 +246,8 @@ protected:
|
|||
void setupStereoSide(int side);
|
||||
#endif
|
||||
|
||||
virtual void setResourceTexture(unsigned int slot, const TexturePointer& resourceTexture);
|
||||
virtual void setFramebuffer(const FramebufferPointer& framebuffer);
|
||||
virtual void initInput() final;
|
||||
virtual void killInput() final;
|
||||
virtual void syncInputStateCache() final;
|
||||
|
|
|
@ -37,6 +37,19 @@ void GLBackend::resetOutputStage() {
|
|||
|
||||
void GLBackend::do_setFramebuffer(const Batch& batch, size_t paramOffset) {
|
||||
auto framebuffer = batch._framebuffers.get(batch._params[paramOffset]._uint);
|
||||
setFramebuffer(framebuffer);
|
||||
}
|
||||
|
||||
void GLBackend::do_setFramebufferSwapChain(const Batch& batch, size_t paramOffset) {
|
||||
auto swapChain = batch._swapChains.get(batch._params[paramOffset]._uint);
|
||||
if (swapChain) {
|
||||
auto index = batch._params[paramOffset + 1]._uint;
|
||||
FramebufferPointer framebuffer = static_cast<const FramebufferSwapChain*>(swapChain.get())->get(index);
|
||||
setFramebuffer(framebuffer);
|
||||
}
|
||||
}
|
||||
|
||||
void GLBackend::setFramebuffer(const FramebufferPointer& framebuffer) {
|
||||
if (_output._framebuffer != framebuffer) {
|
||||
auto newFBO = getFramebufferID(framebuffer);
|
||||
if (_output._drawFBO != newFBO) {
|
||||
|
@ -47,6 +60,13 @@ void GLBackend::do_setFramebuffer(const Batch& batch, size_t paramOffset) {
|
|||
}
|
||||
}
|
||||
|
||||
void GLBackend::do_advance(const Batch& batch, size_t paramOffset) {
|
||||
auto ringbuffer = batch._swapChains.get(batch._params[paramOffset]._uint);
|
||||
if (ringbuffer) {
|
||||
ringbuffer->advance();
|
||||
}
|
||||
}
|
||||
|
||||
void GLBackend::do_clearFramebuffer(const Batch& batch, size_t paramOffset) {
|
||||
if (_stereo.isStereo() && !_pipeline._stateCache.scissorEnable) {
|
||||
qWarning("Clear without scissor in stereo mode");
|
||||
|
|
|
@ -251,6 +251,31 @@ void GLBackend::do_setResourceTexture(const Batch& batch, size_t paramOffset) {
|
|||
releaseResourceTexture(slot);
|
||||
return;
|
||||
}
|
||||
setResourceTexture(slot, resourceTexture);
|
||||
}
|
||||
|
||||
void GLBackend::do_setResourceFramebufferSwapChainTexture(const Batch& batch, size_t paramOffset) {
|
||||
GLuint slot = batch._params[paramOffset + 1]._uint;
|
||||
if (slot >= (GLuint)MAX_NUM_RESOURCE_TEXTURES) {
|
||||
qCDebug(gpugllogging) << "GLBackend::do_setResourceFramebufferSwapChainTexture: Trying to set a resource Texture at slot #" << slot << " which doesn't exist. MaxNumResourceTextures = " << getMaxNumResourceTextures();
|
||||
return;
|
||||
}
|
||||
|
||||
SwapChainPointer swapChain = batch._swapChains.get(batch._params[paramOffset + 0]._uint);
|
||||
|
||||
if (!swapChain) {
|
||||
releaseResourceTexture(slot);
|
||||
return;
|
||||
}
|
||||
auto index = batch._params[paramOffset + 2]._uint;
|
||||
auto renderBufferSlot = batch._params[paramOffset + 3]._uint;
|
||||
FramebufferPointer resourceFramebuffer = static_cast<const FramebufferSwapChain*>(swapChain.get())->get(index);
|
||||
TexturePointer resourceTexture = resourceFramebuffer->getRenderBuffer(renderBufferSlot);
|
||||
|
||||
setResourceTexture(slot, resourceTexture);
|
||||
}
|
||||
|
||||
void GLBackend::setResourceTexture(unsigned int slot, const TexturePointer& resourceTexture) {
|
||||
// check cache before thinking
|
||||
if (_resource._textures[slot] == resourceTexture) {
|
||||
return;
|
||||
|
@ -267,11 +292,11 @@ void GLBackend::do_setResourceTexture(const Batch& batch, size_t paramOffset) {
|
|||
glActiveTexture(GL_TEXTURE0 + slot);
|
||||
glBindTexture(target, to);
|
||||
|
||||
(void) CHECK_GL_ERROR();
|
||||
(void)CHECK_GL_ERROR();
|
||||
|
||||
_resource._textures[slot] = resourceTexture;
|
||||
|
||||
_stats._RSAmountTextureMemoryBounded += (int) object->size();
|
||||
_stats._RSAmountTextureMemoryBounded += (int)object->size();
|
||||
|
||||
} else {
|
||||
releaseResourceTexture(slot);
|
||||
|
|
|
@ -105,7 +105,7 @@ void GLBackend::TransformStageState::preUpdate(size_t commandIndex, const Stereo
|
|||
if (_viewIsCamera && (_viewCorrectionEnabled && _correction.correction != glm::mat4())) {
|
||||
// FIXME should I switch to using the camera correction buffer in Transform.slf and leave this out?
|
||||
Transform result;
|
||||
_view.mult(result, _view, _correction.correction);
|
||||
_view.mult(result, _view, _correction.correctionInverse);
|
||||
if (_skybox) {
|
||||
result.setTranslation(vec3());
|
||||
}
|
||||
|
|
|
@ -173,7 +173,47 @@ GLenum GLTexelFormat::evalGLTexelFormatInternal(const gpu::Element& dstFormat) {
|
|||
case gpu::RGB:
|
||||
case gpu::RGBA:
|
||||
case gpu::XY:
|
||||
result = GL_RG8;
|
||||
switch (dstFormat.getType()) {
|
||||
case gpu::UINT32:
|
||||
result = GL_RG32UI;
|
||||
break;
|
||||
case gpu::INT32:
|
||||
result = GL_RG32I;
|
||||
break;
|
||||
case gpu::FLOAT:
|
||||
result = GL_RG32F;
|
||||
break;
|
||||
case gpu::UINT16:
|
||||
result = GL_RG16UI;
|
||||
break;
|
||||
case gpu::INT16:
|
||||
result = GL_RG16I;
|
||||
break;
|
||||
case gpu::HALF:
|
||||
result = GL_RG16F;
|
||||
break;
|
||||
case gpu::UINT8:
|
||||
result = GL_RG8UI;
|
||||
break;
|
||||
case gpu::INT8:
|
||||
result = GL_RG8I;
|
||||
break;
|
||||
case gpu::NUINT8:
|
||||
result = GL_RG8;
|
||||
break;
|
||||
case gpu::NINT8:
|
||||
result = GL_RG8_SNORM;
|
||||
break;
|
||||
case gpu::NUINT32:
|
||||
case gpu::NINT32:
|
||||
case gpu::NUINT2:
|
||||
case gpu::NINT2_10_10_10:
|
||||
case gpu::COMPRESSED:
|
||||
case gpu::NUINT16:
|
||||
case gpu::NINT16:
|
||||
case gpu::NUM_TYPES: // quiet compiler
|
||||
Q_UNREACHABLE();
|
||||
}
|
||||
break;
|
||||
default:
|
||||
qCWarning(gpugllogging) << "Unknown combination of texel format";
|
||||
|
@ -312,7 +352,48 @@ GLTexelFormat GLTexelFormat::evalGLTexelFormat(const Element& dstFormat, const E
|
|||
case gpu::RGB:
|
||||
case gpu::RGBA:
|
||||
case gpu::XY:
|
||||
texel.internalFormat = GL_RG8;
|
||||
switch (dstFormat.getType()) {
|
||||
case gpu::UINT32:
|
||||
texel.internalFormat = GL_RG32UI;
|
||||
break;
|
||||
case gpu::INT32:
|
||||
texel.internalFormat = GL_RG32I;
|
||||
break;
|
||||
case gpu::FLOAT:
|
||||
texel.internalFormat = GL_RG32F;
|
||||
break;
|
||||
case gpu::UINT16:
|
||||
texel.internalFormat = GL_RG16UI;
|
||||
break;
|
||||
case gpu::INT16:
|
||||
texel.internalFormat = GL_RG16I;
|
||||
break;
|
||||
case gpu::HALF:
|
||||
texel.type = GL_FLOAT;
|
||||
texel.internalFormat = GL_RG16F;
|
||||
break;
|
||||
case gpu::UINT8:
|
||||
texel.internalFormat = GL_RG8UI;
|
||||
break;
|
||||
case gpu::INT8:
|
||||
texel.internalFormat = GL_RG8I;
|
||||
break;
|
||||
case gpu::NUINT8:
|
||||
texel.internalFormat = GL_RG8;
|
||||
break;
|
||||
case gpu::NINT8:
|
||||
texel.internalFormat = GL_RG8_SNORM;
|
||||
break;
|
||||
case gpu::NUINT32:
|
||||
case gpu::NINT32:
|
||||
case gpu::NUINT2:
|
||||
case gpu::NINT2_10_10_10:
|
||||
case gpu::COMPRESSED:
|
||||
case gpu::NUINT16:
|
||||
case gpu::NINT16:
|
||||
case gpu::NUM_TYPES: // quiet compiler
|
||||
Q_UNREACHABLE();
|
||||
}
|
||||
break;
|
||||
default:
|
||||
qCWarning(gpugllogging) << "Unknown combination of texel format";
|
||||
|
|
|
@ -74,7 +74,7 @@ public:
|
|||
size_t getNumIndices() const { return _indexBuffer.getNumElements(); }
|
||||
|
||||
// Access vertex position value
|
||||
const Vec3& getPos3(Index index) const { return _vertexBuffer.get<Vec3>(index); }
|
||||
const Vec3& getPos(Index index) const { return _vertexBuffer.get<Vec3>(index); }
|
||||
|
||||
enum Topology {
|
||||
POINTS = 0,
|
||||
|
|
|
@ -21,7 +21,7 @@ int SimpleMeshProxy::getNumVertices() const {
|
|||
return (int)_mesh->getNumVertices();
|
||||
}
|
||||
|
||||
glm::vec3 SimpleMeshProxy::getPos3(int index) const {
|
||||
return _mesh->getPos3(index);
|
||||
glm::vec3 SimpleMeshProxy::getPos(int index) const {
|
||||
return _mesh->getPos(index);
|
||||
}
|
||||
|
||||
|
|
|
@ -26,8 +26,8 @@ public:
|
|||
|
||||
int getNumVertices() const override;
|
||||
|
||||
glm::vec3 getPos3(int index) const override;
|
||||
|
||||
glm::vec3 getPos(int index) const override;
|
||||
glm::vec3 getPos3(int index) const override { return getPos(index); } // deprecated
|
||||
|
||||
protected:
|
||||
const MeshPointer _mesh;
|
||||
|
|
|
@ -188,11 +188,13 @@ bool OffscreenSurface::eventFilter(QObject* originalDestination, QEvent* event)
|
|||
event->ignore();
|
||||
if (QCoreApplication::sendEvent(window->activeFocusItem(), event)) {
|
||||
bool eventAccepted = event->isAccepted();
|
||||
QInputMethodQueryEvent* imqEvent = static_cast<QInputMethodQueryEvent*>(event);
|
||||
// this block disables the selection cursor in android which appears in
|
||||
// the top-left corner of the screen
|
||||
if (imqEvent->queries() & Qt::ImEnabled) {
|
||||
imqEvent->setValue(Qt::ImEnabled, QVariant(false));
|
||||
if (event->type() == QEvent::InputMethodQuery) {
|
||||
QInputMethodQueryEvent *imqEvent = static_cast<QInputMethodQueryEvent *>(event);
|
||||
// this block disables the selection cursor in android which appears in
|
||||
// the top-left corner of the screen
|
||||
if (imqEvent->queries() & Qt::ImEnabled) {
|
||||
imqEvent->setValue(Qt::ImEnabled, QVariant(false));
|
||||
}
|
||||
}
|
||||
return eventAccepted;
|
||||
}
|
||||
|
|
|
@ -88,7 +88,7 @@ class AntialiasingConfig : public render::Job::Config {
|
|||
Q_PROPERTY(float blend MEMBER blend NOTIFY dirty)
|
||||
Q_PROPERTY(float sharpen MEMBER sharpen NOTIFY dirty)
|
||||
Q_PROPERTY(float covarianceGamma MEMBER covarianceGamma NOTIFY dirty)
|
||||
|
||||
|
||||
Q_PROPERTY(bool constrainColor MEMBER constrainColor NOTIFY dirty)
|
||||
Q_PROPERTY(bool feedbackColor MEMBER feedbackColor NOTIFY dirty)
|
||||
|
||||
|
|
|
@ -363,12 +363,13 @@ public:
|
|||
|
||||
/**jsdoc
|
||||
* Get the position of a vertex in the mesh.
|
||||
* @function MeshProxy#getPos3
|
||||
* @function MeshProxy#getPos
|
||||
* @param {number} index - Integer index of the mesh vertex.
|
||||
* @returns {Vec3} Local position of the vertex relative to the mesh.
|
||||
* @deprecated Use the {@link Graphics} API instead.
|
||||
*/
|
||||
Q_INVOKABLE virtual glm::vec3 getPos3(int index) const = 0;
|
||||
Q_INVOKABLE virtual glm::vec3 getPos(int index) const = 0;
|
||||
Q_INVOKABLE virtual glm::vec3 getPos3(int index) const { return getPos(index); } // deprecated
|
||||
};
|
||||
|
||||
Q_DECLARE_METATYPE(MeshProxy*);
|
||||
|
|
|
@ -17,7 +17,7 @@ var currentSelectedBtn;
|
|||
|
||||
var SETTING_CURRENT_MODE_KEY = 'Android/Mode';
|
||||
var MODE_VR = "VR", MODE_RADAR = "RADAR", MODE_MY_VIEW = "MY VIEW";
|
||||
var DEFAULT_MODE = MODE_RADAR;
|
||||
var DEFAULT_MODE = MODE_MY_VIEW;
|
||||
var logEnabled = true;
|
||||
|
||||
var radar = Script.require('./radar.js');
|
||||
|
|
|
@ -9,7 +9,8 @@
|
|||
//
|
||||
|
||||
/* global Tablet, Script, HMD, UserActivityLogger, Entities, Account, Wallet, ContextOverlay, Settings, Camera, Vec3,
|
||||
Quat, MyAvatar, Clipboard, Menu, Grid, Uuid, GlobalServices, openLoginWindow */
|
||||
Quat, MyAvatar, Clipboard, Menu, Grid, Uuid, GlobalServices, openLoginWindow, Overlays, SoundCache,
|
||||
DesktopPreviewProvider */
|
||||
/* eslint indent: ["error", 4, { "outerIIFEBody": 0 }] */
|
||||
|
||||
var selectionDisplay = null; // for gridTool.js to ignore
|
||||
|
@ -127,6 +128,24 @@ var selectionDisplay = null; // for gridTool.js to ignore
|
|||
|
||||
var onWalletScreen = false;
|
||||
var onCommerceScreen = false;
|
||||
var tabletShouldBeVisibleInSecondaryCamera = false;
|
||||
|
||||
function setTabletVisibleInSecondaryCamera(visibleInSecondaryCam) {
|
||||
if (visibleInSecondaryCam) {
|
||||
// if we're potentially showing the tablet, only do so if it was visible before
|
||||
if (!tabletShouldBeVisibleInSecondaryCamera) {
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
// if we're hiding the tablet, check to see if it was visible in the first place
|
||||
tabletShouldBeVisibleInSecondaryCamera = Overlays.getProperty(HMD.tabletID, "isVisibleInSecondaryCamera");
|
||||
}
|
||||
|
||||
Overlays.editOverlay(HMD.tabletID, { isVisibleInSecondaryCamera : visibleInSecondaryCam });
|
||||
Overlays.editOverlay(HMD.homeButtonID, { isVisibleInSecondaryCamera : visibleInSecondaryCam });
|
||||
Overlays.editOverlay(HMD.homeButtonHighlightIDtabletID, { isVisibleInSecondaryCamera : visibleInSecondaryCam });
|
||||
Overlays.editOverlay(HMD.tabletScreenID, { isVisibleInSecondaryCamera : visibleInSecondaryCam });
|
||||
}
|
||||
|
||||
function onScreenChanged(type, url) {
|
||||
onMarketplaceScreen = type === "Web" && url.indexOf(MARKETPLACE_URL) !== -1;
|
||||
|
@ -138,6 +157,7 @@ var selectionDisplay = null; // for gridTool.js to ignore
|
|||
if (isHmdPreviewDisabledBySecurity) {
|
||||
DesktopPreviewProvider.setPreviewDisabledReason("USER");
|
||||
Menu.setIsOptionChecked("Disable Preview", false);
|
||||
setTabletVisibleInSecondaryCamera(true);
|
||||
isHmdPreviewDisabledBySecurity = false;
|
||||
}
|
||||
}
|
||||
|
@ -258,7 +278,7 @@ var selectionDisplay = null; // for gridTool.js to ignore
|
|||
var wearableDimensions = null;
|
||||
|
||||
if (itemType === "contentSet") {
|
||||
console.log("Item is a content set; codepath shouldn't go here.")
|
||||
console.log("Item is a content set; codepath shouldn't go here.");
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -592,6 +612,7 @@ var selectionDisplay = null; // for gridTool.js to ignore
|
|||
if (!isHmdPreviewDisabled) {
|
||||
DesktopPreviewProvider.setPreviewDisabledReason("SECURE_SCREEN");
|
||||
Menu.setIsOptionChecked("Disable Preview", true);
|
||||
setTabletVisibleInSecondaryCamera(false);
|
||||
isHmdPreviewDisabledBySecurity = true;
|
||||
}
|
||||
break;
|
||||
|
@ -599,6 +620,7 @@ var selectionDisplay = null; // for gridTool.js to ignore
|
|||
if (isHmdPreviewDisabledBySecurity) {
|
||||
DesktopPreviewProvider.setPreviewDisabledReason("USER");
|
||||
Menu.setIsOptionChecked("Disable Preview", false);
|
||||
setTabletVisibleInSecondaryCamera(true);
|
||||
isHmdPreviewDisabledBySecurity = false;
|
||||
}
|
||||
break;
|
||||
|
|
Loading…
Reference in a new issue