Merge pull request #1 from wayne-chen/addNewAudioControls

fixing audio screen to match master
This commit is contained in:
Ken Cooke 2019-03-26 11:30:20 -07:00 committed by GitHub
commit b1d4753464
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
21 changed files with 195 additions and 127 deletions

View file

@ -345,14 +345,6 @@ Rectangle {
color: hifi.colors.white;
text: qsTr("Choose input device");
}
AudioControls.LoopbackAudio {
x: margins.paddings
visible: (bar.currentIndex === 1 && isVR) ||
(bar.currentIndex === 0 && !isVR);
anchors { right: parent.right }
}
}
ListView {
@ -446,14 +438,6 @@ Rectangle {
color: hifi.colors.white;
text: qsTr("Choose output device");
}
AudioControls.PlaySampleSound {
x: margins.paddings
visible: (bar.currentIndex === 1 && isVR) ||
(bar.currentIndex === 0 && !isVR);
anchors { right: parent.right }
}
}
ListView {
@ -556,6 +540,8 @@ Rectangle {
x: margins.paddings;
width: parent.width - margins.paddings*2
height: injectorGainSliderTextMetrics.height
anchors.top: avatarGainContainer.bottom;
anchors.topMargin: 10;
HifiControlsUit.Slider {
id: injectorGainSlider
@ -615,6 +601,8 @@ Rectangle {
x: margins.paddings;
width: parent.width - margins.paddings*2
height: systemInjectorGainSliderTextMetrics.height
anchors.top: injectorGainContainer.bottom;
anchors.topMargin: 10;
HifiControlsUit.Slider {
id: systemInjectorGainSlider
@ -667,12 +655,13 @@ Rectangle {
horizontalAlignment: Text.AlignLeft;
verticalAlignment: Text.AlignTop;
}
}
}
AudioControls.PlaySampleSound {
id: playSampleSound
x: margins.paddings
anchors.top: systemInjectorGainContainer.bottom;
anchors.topMargin: 10;
id: playSampleSound
x: margins.paddings
anchors.top: systemInjectorGainContainer.bottom;
anchors.topMargin: 10;
}
}
}

View file

@ -44,8 +44,11 @@ RowLayout {
}
HifiControlsUit.Button {
text: audioLoopedBack ? qsTr("STOP TESTING") : qsTr("TEST YOUR VOICE");
text: audioLoopedBack ? qsTr("STOP TESTING VOICE") : qsTr("TEST YOUR VOICE");
color: audioLoopedBack ? hifi.buttons.red : hifi.buttons.blue;
fontSize: 15;
width: 200;
height: 32;
onClicked: {
if (audioLoopedBack) {
loopbackTimer.stop();

View file

@ -59,6 +59,9 @@ RowLayout {
text: isPlaying ? qsTr("STOP TESTING") : qsTr("TEST YOUR SOUND");
color: isPlaying ? hifi.buttons.red : hifi.buttons.blue;
onClicked: isPlaying ? stopSound() : playSound();
fontSize: 15;
width: 200;
height: 32;
}
// RalewayRegular {

View file

@ -133,7 +133,7 @@ Item {
states: [
State {
name: AvatarPackagerState.main
PropertyChanges { target: avatarPackagerHeader; title: qsTr("Avatar Packager"); docsEnabled: true; backButtonVisible: false }
PropertyChanges { target: avatarPackagerHeader; title: qsTr("Avatar Packager"); docsEnabled: true; videoEnabled: true; backButtonVisible: false }
PropertyChanges { target: avatarPackagerMain; visible: true }
PropertyChanges { target: avatarPackagerFooter; content: avatarPackagerMain.footer }
},
@ -229,7 +229,11 @@ Item {
}
function openDocs() {
Qt.openUrlExternally("https://docs.highfidelity.com/create/avatars/create-avatars#how-to-package-your-avatar");
Qt.openUrlExternally("https://docs.highfidelity.com/create/avatars/package-avatar.html");
}
function openVideo() {
Qt.openUrlExternally("https://youtu.be/zrkEowu_yps");
}
AvatarPackagerHeader {
@ -243,6 +247,9 @@ Item {
onDocsButtonClicked: {
avatarPackager.openDocs();
}
onVideoButtonClicked: {
avatarPackager.openVideo();
}
}
Item {

View file

@ -13,6 +13,7 @@ ShadowRectangle {
property string title: qsTr("Avatar Packager")
property alias docsEnabled: docs.visible
property alias videoEnabled: video.visible
property bool backButtonVisible: true // If false, is not visible and does not take up space
property bool backButtonEnabled: true // If false, is not visible but does not affect space
property bool canRename: false
@ -24,6 +25,7 @@ ShadowRectangle {
signal backButtonClicked
signal docsButtonClicked
signal videoButtonClicked
RalewayButton {
id: back
@ -126,6 +128,20 @@ ShadowRectangle {
}
}
RalewayButton {
id: video
visible: false
size: 28
anchors.top: parent.top
anchors.bottom: parent.bottom
anchors.right: docs.left
anchors.rightMargin: 16
text: qsTr("Video")
onClicked: videoButtonClicked()
}
RalewayButton {
id: docs
visible: false
@ -137,8 +153,6 @@ ShadowRectangle {
text: qsTr("Docs")
onClicked: {
docsButtonClicked();
}
onClicked: docsButtonClicked()
}
}

View file

@ -339,8 +339,8 @@ Item {
visible: AvatarPackagerCore.currentAvatarProject && AvatarPackagerCore.currentAvatarProject.hasErrors
anchors {
top: notForSaleMessage.bottom
topMargin: 16
top: notForSaleMessage.visible ? notForSaleMessage.bottom : infoMessage .bottom
bottom: showFilesText.top
horizontalCenter: parent.horizontalCenter
}

View file

@ -2248,6 +2248,7 @@ Item {
if (sendAssetStep.selectedRecipientUserName === "") {
console.log("SendAsset: Script didn't specify a recipient username!");
sendAssetHome.visible = false;
root.nextActiveView = 'paymentFailure';
return;
}

View file

@ -55,7 +55,7 @@ static QStringList HAND_MAPPING_SUFFIXES = {
"HandThumb1",
};
const QUrl DEFAULT_DOCS_URL = QUrl("https://docs.highfidelity.com/create/avatars/create-avatars.html#create-your-own-avatar");
const QUrl PACKAGE_AVATAR_DOCS_BASE_URL = QUrl("https://docs.highfidelity.com/create/avatars/package-avatar.html");
AvatarDoctor::AvatarDoctor(const QUrl& avatarFSTFileUrl) :
_avatarFSTFileUrl(avatarFSTFileUrl) {
@ -85,7 +85,7 @@ void AvatarDoctor::startDiagnosing() {
const auto resourceLoaded = [this, resource](bool success) {
// MODEL
if (!success) {
_errors.push_back({ "Model file cannot be opened.", DEFAULT_DOCS_URL });
addError("Model file cannot be opened.", "missing-file");
emit complete(getErrors());
return;
}
@ -93,45 +93,45 @@ void AvatarDoctor::startDiagnosing() {
const auto model = resource.data();
const auto avatarModel = resource.data()->getHFMModel();
if (!avatarModel.originalURL.endsWith(".fbx")) {
_errors.push_back({ "Unsupported avatar model format.", DEFAULT_DOCS_URL });
addError("Unsupported avatar model format.", "unsupported-format");
emit complete(getErrors());
return;
}
// RIG
if (avatarModel.joints.isEmpty()) {
_errors.push_back({ "Avatar has no rig.", DEFAULT_DOCS_URL });
addError("Avatar has no rig.", "no-rig");
} else {
auto jointNames = avatarModel.getJointNames();
if (avatarModel.joints.length() > NETWORKED_JOINTS_LIMIT) {
_errors.push_back({tr( "Avatar has over %n bones.", "", NETWORKED_JOINTS_LIMIT), DEFAULT_DOCS_URL });
addError(tr( "Avatar has over %n bones.", "", NETWORKED_JOINTS_LIMIT), "maximum-bone-limit");
}
// Avatar does not have Hips bone mapped
if (!jointNames.contains("Hips")) {
_errors.push_back({ "Hips are not mapped.", DEFAULT_DOCS_URL });
addError("Hips are not mapped.", "hips-not-mapped");
}
if (!jointNames.contains("Spine")) {
_errors.push_back({ "Spine is not mapped.", DEFAULT_DOCS_URL });
addError("Spine is not mapped.", "spine-not-mapped");
}
if (!jointNames.contains("Spine1")) {
_errors.push_back({ "Chest (Spine1) is not mapped.", DEFAULT_DOCS_URL });
addError("Chest (Spine1) is not mapped.", "chest-not-mapped");
}
if (!jointNames.contains("Neck")) {
_errors.push_back({ "Neck is not mapped.", DEFAULT_DOCS_URL });
addError("Neck is not mapped.", "neck-not-mapped");
}
if (!jointNames.contains("Head")) {
_errors.push_back({ "Head is not mapped.", DEFAULT_DOCS_URL });
addError("Head is not mapped.", "head-not-mapped");
}
if (!jointNames.contains("LeftEye")) {
if (jointNames.contains("RightEye")) {
_errors.push_back({ "LeftEye is not mapped.", DEFAULT_DOCS_URL });
addError("LeftEye is not mapped.", "eye-not-mapped");
} else {
_errors.push_back({ "Eyes are not mapped.", DEFAULT_DOCS_URL });
addError("Eyes are not mapped.", "eye-not-mapped");
}
} else if (!jointNames.contains("RightEye")) {
_errors.push_back({ "RightEye is not mapped.", DEFAULT_DOCS_URL });
addError("RightEye is not mapped.", "eye-not-mapped");
}
const auto checkJointAsymmetry = [jointNames] (const QStringList& jointMappingSuffixes) {
@ -159,13 +159,13 @@ void AvatarDoctor::startDiagnosing() {
};
if (checkJointAsymmetry(ARM_MAPPING_SUFFIXES)) {
_errors.push_back({ "Asymmetrical arm bones.", DEFAULT_DOCS_URL });
addError("Asymmetrical arm bones.", "asymmetrical-bones");
}
if (checkJointAsymmetry(HAND_MAPPING_SUFFIXES)) {
_errors.push_back({ "Asymmetrical hand bones.", DEFAULT_DOCS_URL });
addError("Asymmetrical hand bones.", "asymmetrical-bones");
}
if (checkJointAsymmetry(LEG_MAPPING_SUFFIXES)) {
_errors.push_back({ "Asymmetrical leg bones.", DEFAULT_DOCS_URL });
addError("Asymmetrical leg bones.", "asymmetrical-bones");
}
// Multiple skeleton root joints checkup
@ -177,7 +177,7 @@ void AvatarDoctor::startDiagnosing() {
}
if (skeletonRootJoints > 1) {
_errors.push_back({ "Multiple top-level joints found.", DEFAULT_DOCS_URL });
addError("Multiple top-level joints found.", "multiple-top-level-joints");
}
Rig rig;
@ -191,9 +191,9 @@ void AvatarDoctor::startDiagnosing() {
const float RECOMMENDED_MAX_HEIGHT = DEFAULT_AVATAR_HEIGHT * 1.5f;
if (avatarHeight < RECOMMENDED_MIN_HEIGHT) {
_errors.push_back({ "Avatar is possibly too short.", DEFAULT_DOCS_URL });
addError("Avatar is possibly too short.", "short-avatar");
} else if (avatarHeight > RECOMMENDED_MAX_HEIGHT) {
_errors.push_back({ "Avatar is possibly too tall.", DEFAULT_DOCS_URL });
addError("Avatar is possibly too tall.", "tall-avatar");
}
// HipsNotOnGround
@ -204,7 +204,7 @@ void AvatarDoctor::startDiagnosing() {
const auto hipJoint = avatarModel.joints.at(avatarModel.getJointIndex("Hips"));
if (hipsPosition.y < HIPS_GROUND_MIN_Y) {
_errors.push_back({ "Hips are on ground.", DEFAULT_DOCS_URL });
addError("Hips are on ground.", "hips-on-ground");
}
}
}
@ -223,7 +223,7 @@ void AvatarDoctor::startDiagnosing() {
const auto hipsToSpine = glm::length(hipsPosition - spinePosition);
const auto spineToChest = glm::length(spinePosition - chestPosition);
if (hipsToSpine < HIPS_SPINE_CHEST_MIN_SEPARATION && spineToChest < HIPS_SPINE_CHEST_MIN_SEPARATION) {
_errors.push_back({ "Hips/Spine/Chest overlap.", DEFAULT_DOCS_URL });
addError("Hips/Spine/Chest overlap.", "overlap-error");
}
}
}
@ -240,21 +240,21 @@ void AvatarDoctor::startDiagnosing() {
const auto& uniqueJointValues = jointValues.toSet();
for (const auto& jointName: uniqueJointValues) {
if (jointValues.count(jointName) > 1) {
_errors.push_back({ tr("%1 is mapped multiple times.").arg(jointName), DEFAULT_DOCS_URL });
addError(tr("%1 is mapped multiple times.").arg(jointName), "mapped-multiple-times");
}
}
}
if (!isDescendantOfJointWhenJointsExist("Spine", "Hips")) {
_errors.push_back({ "Spine is not a child of Hips.", DEFAULT_DOCS_URL });
addError("Spine is not a child of Hips.", "spine-not-child");
}
if (!isDescendantOfJointWhenJointsExist("Spine1", "Spine")) {
_errors.push_back({ "Spine1 is not a child of Spine.", DEFAULT_DOCS_URL });
addError("Spine1 is not a child of Spine.", "spine1-not-child");
}
if (!isDescendantOfJointWhenJointsExist("Head", "Spine1")) {
_errors.push_back({ "Head is not a child of Spine1.", DEFAULT_DOCS_URL });
addError("Head is not a child of Spine1.", "head-not-child");
}
}
@ -300,7 +300,7 @@ void AvatarDoctor::startDiagnosing() {
connect(resource.data(), &GeometryResource::finished, this, resourceLoaded);
}
} else {
_errors.push_back({ "Model file cannot be opened", DEFAULT_DOCS_URL });
addError("Model file cannot be opened", "missing-file");
emit complete(getErrors());
}
}
@ -345,7 +345,7 @@ void AvatarDoctor::diagnoseTextures() {
QUrl(avatarModel.originalURL)).resolved(QUrl("textures"));
if (texturesFound == 0) {
_errors.push_back({ tr("No textures assigned."), DEFAULT_DOCS_URL });
addError(tr("No textures assigned."), "no-textures-assigned");
}
if (!externalTextures.empty()) {
@ -356,11 +356,10 @@ void AvatarDoctor::diagnoseTextures() {
auto checkTextureLoadingComplete = [this]() mutable {
if (_checkedTextureCount == _externalTextureCount) {
if (_missingTextureCount > 0) {
_errors.push_back({ tr("Missing %n texture(s).","", _missingTextureCount), DEFAULT_DOCS_URL });
addError(tr("Missing %n texture(s).","", _missingTextureCount), "missing-textures");
}
if (_unsupportedTextureCount > 0) {
_errors.push_back({ tr("%n unsupported texture(s) found.", "", _unsupportedTextureCount),
DEFAULT_DOCS_URL });
addError(tr("%n unsupported texture(s) found.", "", _unsupportedTextureCount), "unsupported-textures");
}
emit complete(getErrors());
@ -411,6 +410,12 @@ void AvatarDoctor::diagnoseTextures() {
}
}
void AvatarDoctor::addError(const QString& errorMessage, const QString& docFragment) {
QUrl documentationURL = PACKAGE_AVATAR_DOCS_BASE_URL;
documentationURL.setFragment(docFragment);
_errors.push_back({ errorMessage, documentationURL });
}
QVariantList AvatarDoctor::getErrors() const {
QVariantList result;
for (const auto& error : _errors) {

View file

@ -40,6 +40,8 @@ signals:
private:
void diagnoseTextures();
void addError(const QString& errorMessage, const QString& docFragment);
QUrl _avatarFSTFileUrl;
QVector<AvatarDiagnosticResult> _errors;

View file

@ -169,7 +169,7 @@ void MaterialEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
if (urlChanged && !usingMaterialData) {
_networkMaterial = MaterialCache::instance().getMaterial(_materialURL);
auto onMaterialRequestFinished = [&, oldParentID, oldParentMaterialName, newCurrentMaterialName](bool success) {
auto onMaterialRequestFinished = [this, oldParentID, oldParentMaterialName, newCurrentMaterialName](bool success) {
if (success) {
deleteMaterial(oldParentID, oldParentMaterialName);
_texturesLoaded = false;
@ -186,7 +186,11 @@ void MaterialEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
if (_networkMaterial->isLoaded()) {
onMaterialRequestFinished(!_networkMaterial->isFailed());
} else {
connect(_networkMaterial.data(), &Resource::finished, this, onMaterialRequestFinished);
connect(_networkMaterial.data(), &Resource::finished, this, [this, onMaterialRequestFinished](bool success) {
withWriteLock([&] {
onMaterialRequestFinished(success);
});
});
}
}
} else if (materialDataChanged && usingMaterialData) {

View file

@ -95,19 +95,18 @@ bool PolyLineEntityRenderer::needsRenderUpdate() const {
}
bool PolyLineEntityRenderer::needsRenderUpdateFromTypedEntity(const TypedEntityPointer& entity) const {
return (
entity->pointsChanged() ||
entity->widthsChanged() ||
entity->normalsChanged() ||
entity->texturesChanged() ||
entity->colorsChanged() ||
_isUVModeStretch != entity->getIsUVModeStretch() ||
_glow != entity->getGlow() ||
_faceCamera != entity->getFaceCamera()
);
if (entity->pointsChanged() || entity->widthsChanged() || entity->normalsChanged() || entity->texturesChanged() || entity->colorsChanged()) {
return true;
}
if (_isUVModeStretch != entity->getIsUVModeStretch() || _glow != entity->getGlow() || _faceCamera != entity->getFaceCamera()) {
return true;
}
return Parent::needsRenderUpdateFromTypedEntity(entity);
}
void PolyLineEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPointer& entity) {
void PolyLineEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& scene, Transaction& transaction, const TypedEntityPointer& entity) {
auto pointsChanged = entity->pointsChanged();
auto widthsChanged = entity->widthsChanged();
auto normalsChanged = entity->normalsChanged();
@ -119,10 +118,6 @@ void PolyLineEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
entity->resetPolyLineChanged();
// Transform
updateModelTransformAndBound();
_renderTransform = getModelTransform();
// Textures
if (entity->texturesChanged()) {
entity->resetTexturesChanged();
@ -131,7 +126,9 @@ void PolyLineEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
if (!textures.isEmpty()) {
entityTextures = QUrl(textures);
}
_texture = DependencyManager::get<TextureCache>()->getTexture(entityTextures);
withWriteLock([&] {
_texture = DependencyManager::get<TextureCache>()->getTexture(entityTextures);
});
_textureAspectRatio = 1.0f;
_textureLoaded = false;
}
@ -145,11 +142,13 @@ void PolyLineEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
// Data
bool faceCameraChanged = faceCamera != _faceCamera;
if (faceCameraChanged || glow != _glow) {
_faceCamera = faceCamera;
_glow = glow;
updateData();
}
withWriteLock([&] {
if (faceCameraChanged || glow != _glow) {
_faceCamera = faceCamera;
_glow = glow;
updateData();
}
});
// Geometry
if (pointsChanged) {
@ -165,10 +164,23 @@ void PolyLineEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPo
_colors = entity->getStrokeColors();
_color = toGlm(entity->getColor());
}
if (_isUVModeStretch != isUVModeStretch || pointsChanged || widthsChanged || normalsChanged || colorsChanged || textureChanged || faceCameraChanged) {
_isUVModeStretch = isUVModeStretch;
updateGeometry();
}
bool uvModeStretchChanged = _isUVModeStretch != isUVModeStretch;
_isUVModeStretch = isUVModeStretch;
bool geometryChanged = uvModeStretchChanged || pointsChanged || widthsChanged || normalsChanged || colorsChanged || textureChanged || faceCameraChanged;
void* key = (void*)this;
AbstractViewStateInterface::instance()->pushPostUpdateLambda(key, [this, geometryChanged] () {
withWriteLock([&] {
updateModelTransformAndBound();
_renderTransform = getModelTransform();
if (geometryChanged) {
updateGeometry();
}
});
});
}
void PolyLineEntityRenderer::updateGeometry() {
@ -267,22 +279,32 @@ void PolyLineEntityRenderer::updateData() {
}
void PolyLineEntityRenderer::doRender(RenderArgs* args) {
if (_numVertices < 2) {
return;
}
PerformanceTimer perfTimer("RenderablePolyLineEntityItem::render");
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
if (!_pipeline || !_glowPipeline) {
size_t numVertices;
Transform transform;
gpu::TexturePointer texture;
withReadLock([&] {
numVertices = _numVertices;
transform = _renderTransform;
texture = _textureLoaded ? _texture->getGPUTexture() : DependencyManager::get<TextureCache>()->getWhiteTexture();
batch.setResourceBuffer(0, _polylineGeometryBuffer);
batch.setUniformBuffer(0, _polylineDataBuffer);
});
if (numVertices < 2) {
return;
}
if (!_pipeline) {
buildPipeline();
}
batch.setPipeline(_glow ? _glowPipeline : _pipeline);
batch.setModelTransform(_renderTransform);
batch.setResourceTexture(0, _textureLoaded ? _texture->getGPUTexture() : DependencyManager::get<TextureCache>()->getWhiteTexture());
batch.setResourceBuffer(0, _polylineGeometryBuffer);
batch.setUniformBuffer(0, _polylineDataBuffer);
batch.draw(gpu::TRIANGLE_STRIP, (gpu::uint32)(2 * _numVertices), 0);
batch.setModelTransform(transform);
batch.setResourceTexture(0, texture);
batch.draw(gpu::TRIANGLE_STRIP, (gpu::uint32)(2 * numVertices), 0);
}

View file

@ -31,7 +31,7 @@ public:
protected:
virtual bool needsRenderUpdate() const override;
virtual bool needsRenderUpdateFromTypedEntity(const TypedEntityPointer& entity) const override;
virtual void doRenderUpdateAsynchronousTyped(const TypedEntityPointer& entity) override;
virtual void doRenderUpdateSynchronousTyped(const ScenePointer& scene, Transaction& transaction, const TypedEntityPointer& entity) override;
virtual ItemKey getKey() override;
virtual ShapeKey getShapeKey() override;

View file

@ -249,10 +249,14 @@ void ShapeEntityRenderer::doRender(RenderArgs* args) {
graphics::MultiMaterial materials;
auto geometryCache = DependencyManager::get<GeometryCache>();
GeometryCache::Shape geometryShape;
PrimitiveMode primitiveMode;
RenderLayer renderLayer;
bool proceduralRender = false;
glm::vec4 outColor;
withReadLock([&] {
geometryShape = geometryCache->getShapeForEntityShape(_shape);
primitiveMode = _primitiveMode;
renderLayer = _renderLayer;
batch.setModelTransform(_renderTransform); // use a transform with scale, rotation, registration point and translation
materials = _materials["0"];
auto& schema = materials.getSchemaBuffer().get<graphics::MultiMaterial::Schema>();
@ -267,7 +271,7 @@ void ShapeEntityRenderer::doRender(RenderArgs* args) {
});
if (proceduralRender) {
if (render::ShapeKey(args->_globalShapeKey).isWireframe()) {
if (render::ShapeKey(args->_globalShapeKey).isWireframe() || primitiveMode == PrimitiveMode::LINES) {
geometryCache->renderWireShape(batch, geometryShape, outColor);
} else {
geometryCache->renderShape(batch, geometryShape, outColor);
@ -275,10 +279,16 @@ void ShapeEntityRenderer::doRender(RenderArgs* args) {
} else if (!useMaterialPipeline(materials)) {
// FIXME, support instanced multi-shape rendering using multidraw indirect
outColor.a *= _isFading ? Interpolate::calculateFadeRatio(_fadeStartTime) : 1.0f;
if (render::ShapeKey(args->_globalShapeKey).isWireframe() || _primitiveMode == PrimitiveMode::LINES) {
geometryCache->renderWireShapeInstance(args, batch, geometryShape, outColor, args->_shapePipeline);
render::ShapePipelinePointer pipeline;
if (renderLayer == RenderLayer::WORLD) {
pipeline = GeometryCache::getShapePipeline(false, outColor.a < 1.0f, true, false);
} else {
geometryCache->renderSolidShapeInstance(args, batch, geometryShape, outColor, args->_shapePipeline);
pipeline = GeometryCache::getShapePipeline(false, outColor.a < 1.0f, true, false, false, true);
}
if (render::ShapeKey(args->_globalShapeKey).isWireframe() || primitiveMode == PrimitiveMode::LINES) {
geometryCache->renderWireShapeInstance(args, batch, geometryShape, outColor, pipeline);
} else {
geometryCache->renderSolidShapeInstance(args, batch, geometryShape, outColor, pipeline);
}
} else {
if (args->_renderMode != render::Args::RenderMode::SHADOW_RENDER_MODE) {

View file

@ -162,10 +162,12 @@ void TextEntityRenderer::doRender(RenderArgs* args) {
glm::vec4 backgroundColor;
Transform modelTransform;
glm::vec3 dimensions;
BillboardMode billboardMode;
bool layered;
withReadLock([&] {
modelTransform = _renderTransform;
dimensions = _dimensions;
billboardMode = _billboardMode;
float fadeRatio = _isFading ? Interpolate::calculateFadeRatio(_fadeStartTime) : 1.0f;
textColor = glm::vec4(_textColor, fadeRatio * _textAlpha);
@ -190,7 +192,7 @@ void TextEntityRenderer::doRender(RenderArgs* args) {
}
auto transformToTopLeft = modelTransform;
transformToTopLeft.setRotation(EntityItem::getBillboardRotation(transformToTopLeft.getTranslation(), transformToTopLeft.getRotation(), _billboardMode, args->getViewFrustum().getPosition()));
transformToTopLeft.setRotation(EntityItem::getBillboardRotation(transformToTopLeft.getTranslation(), transformToTopLeft.getRotation(), billboardMode, args->getViewFrustum().getPosition()));
transformToTopLeft.postTranslate(dimensions * glm::vec3(-0.5f, 0.5f, 0.0f)); // Go to the top left
transformToTopLeft.setScale(1.0f); // Use a scale of one so that the text is not deformed
@ -210,10 +212,6 @@ void TextEntityRenderer::doRender(RenderArgs* args) {
glm::vec2 bounds = glm::vec2(dimensions.x - (_leftMargin + _rightMargin), dimensions.y - (_topMargin + _bottomMargin));
_textRenderer->draw(batch, _leftMargin / scale, -_topMargin / scale, _text, textColor, bounds / scale, layered);
}
if (layered) {
DependencyManager::get<DeferredLightingEffect>()->unsetKeyLightBatch(batch);
}
}
QSizeF TextEntityRenderer::textSize(const QString& text) const {

View file

@ -368,6 +368,12 @@ public:
const ShapeData * getShapeData(Shape shape) const;
graphics::MeshPointer meshFromShape(Shape geometryShape, glm::vec3 color);
static render::ShapePipelinePointer getShapePipeline(bool textured = false, bool transparent = false, bool culled = true,
bool unlit = false, bool depthBias = false, bool forward = false);
static render::ShapePipelinePointer getFadingShapePipeline(bool textured = false, bool transparent = false, bool culled = true,
bool unlit = false, bool depthBias = false);
private:
GeometryCache();
@ -471,11 +477,6 @@ private:
gpu::PipelinePointer _simpleOpaqueWebBrowserPipeline;
gpu::ShaderPointer _simpleTransparentWebBrowserShader;
gpu::PipelinePointer _simpleTransparentWebBrowserPipeline;
static render::ShapePipelinePointer getShapePipeline(bool textured = false, bool transparent = false, bool culled = true,
bool unlit = false, bool depthBias = false, bool forward = false);
static render::ShapePipelinePointer getFadingShapePipeline(bool textured = false, bool transparent = false, bool culled = true,
bool unlit = false, bool depthBias = false);
};
#endif // hifi_GeometryCache_h

View file

@ -95,7 +95,11 @@ void DrawLayered3D::run(const RenderContextPointer& renderContext, const Inputs&
// Setup lighting model for all items;
batch.setUniformBuffer(ru::Buffer::LightModel, lightingModel->getParametersBuffer());
renderShapes(renderContext, _shapePlumber, inItems, _maxDrawn);
if (_opaquePass) {
renderStateSortShapes(renderContext, _shapePlumber, inItems, _maxDrawn);
} else {
renderShapes(renderContext, _shapePlumber, inItems, _maxDrawn);
}
args->_batch = nullptr;
});
}

View file

@ -216,8 +216,8 @@ void RenderDeferredTask::build(JobModel& task, const render::Varying& input, ren
task.addJob<DrawHaze>("DrawHazeDeferred", drawHazeInputs);
// Render transparent objects forward in LightingBuffer
const auto transparentsInputs = DrawDeferred::Inputs(transparents, hazeFrame, lightFrame, lightingModel, lightClusters, shadowFrame, jitter).asVarying();
task.addJob<DrawDeferred>("DrawTransparentDeferred", transparentsInputs, shapePlumber);
const auto transparentsInputs = RenderTransparentDeferred::Inputs(transparents, hazeFrame, lightFrame, lightingModel, lightClusters, shadowFrame, jitter).asVarying();
task.addJob<RenderTransparentDeferred>("DrawTransparentDeferred", transparentsInputs, shapePlumber);
const auto outlineRangeTimer = task.addJob<BeginGPURangeTimer>("BeginHighlightRangeTimer", "Highlight");
@ -436,7 +436,7 @@ void RenderDeferredTaskDebug::build(JobModel& task, const render::Varying& input
}
void DrawDeferred::run(const RenderContextPointer& renderContext, const Inputs& inputs) {
void RenderTransparentDeferred::run(const RenderContextPointer& renderContext, const Inputs& inputs) {
assert(renderContext->args);
assert(renderContext->args->hasViewFrustum());
@ -453,7 +453,7 @@ void DrawDeferred::run(const RenderContextPointer& renderContext, const Inputs&
RenderArgs* args = renderContext->args;
gpu::doInBatch("DrawDeferred::run", args->_context, [&](gpu::Batch& batch) {
gpu::doInBatch("RenderTransparentDeferred::run", args->_context, [&](gpu::Batch& batch) {
args->_batch = &batch;
// Setup camera, projection and viewport for all items

View file

@ -19,7 +19,7 @@
#include "LightClusters.h"
#include "RenderShadowTask.h"
class DrawDeferredConfig : public render::Job::Config {
class RenderTransparentDeferredConfig : public render::Job::Config {
Q_OBJECT
Q_PROPERTY(int numDrawn READ getNumDrawn NOTIFY newStats)
Q_PROPERTY(int maxDrawn MEMBER maxDrawn NOTIFY dirty)
@ -41,13 +41,13 @@ protected:
int _numDrawn{ 0 };
};
class DrawDeferred {
class RenderTransparentDeferred {
public:
using Inputs = render::VaryingSet7<render::ItemBounds, HazeStage::FramePointer, LightStage::FramePointer, LightingModelPointer, LightClustersPointer, LightStage::ShadowFramePointer, glm::vec2>;
using Config = DrawDeferredConfig;
using JobModel = render::Job::ModelI<DrawDeferred, Inputs, Config>;
using Config = RenderTransparentDeferredConfig;
using JobModel = render::Job::ModelI<RenderTransparentDeferred, Inputs, Config>;
DrawDeferred(render::ShapePlumberPointer shapePlumber)
RenderTransparentDeferred(render::ShapePlumberPointer shapePlumber)
: _shapePlumber{ shapePlumber } {}
void configure(const Config& config) { _maxDrawn = config.maxDrawn; }

View file

@ -98,7 +98,7 @@ void RenderForwardTask::build(JobModel& task, const render::Varying& input, rend
// Draw opaques forward
const auto opaqueInputs = DrawForward::Inputs(opaques, lightingModel).asVarying();
task.addJob<DrawForward>("DrawOpaques", opaqueInputs, shapePlumber);
task.addJob<DrawForward>("DrawOpaques", opaqueInputs, shapePlumber, true);
// Similar to light stage, background stage has been filled by several potential render items and resolved for the frame in this job
const auto backgroundInputs = DrawBackgroundStage::Inputs(lightingModel, backgroundFrame).asVarying();
@ -106,7 +106,7 @@ void RenderForwardTask::build(JobModel& task, const render::Varying& input, rend
// Draw transparent objects forward
const auto transparentInputs = DrawForward::Inputs(transparents, lightingModel).asVarying();
task.addJob<DrawForward>("DrawTransparents", transparentInputs, shapePlumber);
task.addJob<DrawForward>("DrawTransparents", transparentInputs, shapePlumber, false);
// Layered
const auto nullJitter = Varying(glm::vec2(0.0f, 0.0f));
@ -261,7 +261,11 @@ void DrawForward::run(const RenderContextPointer& renderContext, const Inputs& i
args->_globalShapeKey = globalKey._flags.to_ulong();
// Render items
renderStateSortShapes(renderContext, _shapePlumber, inItems, -1, globalKey);
if (_opaquePass) {
renderStateSortShapes(renderContext, _shapePlumber, inItems, -1, globalKey);
} else {
renderShapes(renderContext, _shapePlumber, inItems, -1, globalKey);
}
args->_batch = nullptr;
args->_globalShapeKey = 0;

View file

@ -76,12 +76,13 @@ public:
using Inputs = render::VaryingSet2<render::ItemBounds, LightingModelPointer>;
using JobModel = render::Job::ModelI<DrawForward, Inputs>;
DrawForward(const render::ShapePlumberPointer& shapePlumber) : _shapePlumber(shapePlumber) {}
DrawForward(const render::ShapePlumberPointer& shapePlumber, bool opaquePass) : _shapePlumber(shapePlumber), _opaquePass(opaquePass) {}
void run(const render::RenderContextPointer& renderContext,
const Inputs& inputs);
private:
render::ShapePlumberPointer _shapePlumber;
bool _opaquePass;
};
#endif // hifi_RenderForwardTask_h

View file

@ -16,8 +16,8 @@
<@include gpu/Color.slh@>
<@include render-utils/ShaderConstants.h@>
<@include ForwardGlobalLight.slh@>
<$declareEvalGlobalLightingAlphaBlended()$>
<@include DeferredGlobalLight.slh@>
<$declareEvalGlobalLightingAlphaBlendedWithHaze()$>
<@include gpu/Transform.slh@>
<$declareStandardCameraTransform()$>