Merge branch 'master' into propertyRangeAudit

This commit is contained in:
David Back 2019-01-07 15:36:18 -08:00 committed by GitHub
commit 3aa245036b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 433 additions and 495 deletions

View file

@ -19,66 +19,39 @@
#include <PerfStat.h>
#include <shaders/Shaders.h>
//#define POLYLINE_ENTITY_USE_FADE_EFFECT
#ifdef POLYLINE_ENTITY_USE_FADE_EFFECT
# include <FadeEffect.h>
#endif
#include "paintStroke_Shared.slh"
using namespace render;
using namespace render::entities;
static uint8_t CUSTOM_PIPELINE_NUMBER { 0 };
static const int32_t PAINTSTROKE_TEXTURE_SLOT { 0 };
static gpu::Stream::FormatPointer polylineFormat;
static gpu::PipelinePointer polylinePipeline;
#ifdef POLYLINE_ENTITY_USE_FADE_EFFECT
static gpu::PipelinePointer polylineFadePipeline;
#endif
gpu::PipelinePointer PolyLineEntityRenderer::_pipeline = nullptr;
static render::ShapePipelinePointer shapePipelineFactory(const render::ShapePlumber& plumber, const render::ShapeKey& key, gpu::Batch& batch) {
if (!polylinePipeline) {
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::entities_renderer::program::paintStroke);
#ifdef POLYLINE_ENTITY_USE_FADE_EFFECT
auto fadeVS = gpu::Shader::createVertex(std::string(paintStroke_fade_vert));
auto fadePS = gpu::Shader::createPixel(std::string(paintStroke_fade_frag));
gpu::ShaderPointer fadeProgram = gpu::Shader::createProgram(fadeVS, fadePS);
#endif
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(true, true, gpu::LESS_EQUAL);
PrepareStencil::testMask(*state);
state->setBlendFunction(true,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
polylinePipeline = gpu::Pipeline::create(program, state);
#ifdef POLYLINE_ENTITY_USE_FADE_EFFECT
_fadePipeline = gpu::Pipeline::create(fadeProgram, state);
#endif
}
#ifdef POLYLINE_ENTITY_USE_FADE_EFFECT
if (key.isFaded()) {
auto fadeEffect = DependencyManager::get<FadeEffect>();
return std::make_shared<render::ShapePipeline>(_fadePipeline, nullptr, fadeEffect->getBatchSetter(), fadeEffect->getItemUniformSetter());
} else {
#endif
return std::make_shared<render::ShapePipeline>(polylinePipeline, nullptr, nullptr, nullptr);
#ifdef POLYLINE_ENTITY_USE_FADE_EFFECT
}
#endif
}
static const QUrl DEFAULT_POLYLINE_TEXTURE = PathUtils::resourcesUrl("images/paintStroke.png");
PolyLineEntityRenderer::PolyLineEntityRenderer(const EntityItemPointer& entity) : Parent(entity) {
static std::once_flag once;
std::call_once(once, [&] {
CUSTOM_PIPELINE_NUMBER = render::ShapePipeline::registerCustomShapePipelineFactory(shapePipelineFactory);
polylineFormat.reset(new gpu::Stream::Format());
polylineFormat->setAttribute(gpu::Stream::POSITION, 0, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ), offsetof(Vertex, position));
polylineFormat->setAttribute(gpu::Stream::NORMAL, 0, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ), offsetof(Vertex, normal));
polylineFormat->setAttribute(gpu::Stream::TEXCOORD, 0, gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::UV), offsetof(Vertex, uv));
polylineFormat->setAttribute(gpu::Stream::COLOR, 0, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::RGB), offsetof(Vertex, color));
});
_texture = DependencyManager::get<TextureCache>()->getTexture(DEFAULT_POLYLINE_TEXTURE);
_verticesBuffer = std::make_shared<gpu::Buffer>();
{ // Initialize our buffers
_polylineDataBuffer = std::make_shared<gpu::Buffer>();
_polylineDataBuffer->resize(sizeof(PolylineData));
PolylineData data { glm::vec2(_faceCamera, _glow), glm::vec2(0.0f) };
_polylineDataBuffer->setSubData(0, data);
_polylineGeometryBuffer = std::make_shared<gpu::Buffer>();
}
}
void PolyLineEntityRenderer::buildPipeline() {
// FIXME: opaque pipeline
gpu::ShaderPointer program = gpu::Shader::createProgram(shader::entities_renderer::program::paintStroke);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setCullMode(gpu::State::CullMode::CULL_NONE);
state->setDepthTest(true, true, gpu::LESS_EQUAL);
PrepareStencil::testMask(*state);
state->setBlendFunction(true,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
_pipeline = gpu::Pipeline::create(program, state);
}
ItemKey PolyLineEntityRenderer::getKey() {
@ -86,152 +59,164 @@ ItemKey PolyLineEntityRenderer::getKey() {
}
ShapeKey PolyLineEntityRenderer::getShapeKey() {
return ShapeKey::Builder().withCustom(CUSTOM_PIPELINE_NUMBER).build();
return ShapeKey::Builder().withOwnPipeline().withTranslucent().withoutCullFace();
}
bool PolyLineEntityRenderer::needsRenderUpdate() const {
bool textureLoadedChanged = resultWithReadLock<bool>([&] {
return (!_textureLoaded && _texture && _texture->isLoaded());
});
if (textureLoadedChanged) {
return true;
}
return Parent::needsRenderUpdate();
}
bool PolyLineEntityRenderer::needsRenderUpdateFromTypedEntity(const TypedEntityPointer& entity) const {
return (
entity->pointsChanged() ||
entity->strokeWidthsChanged() ||
entity->widthsChanged() ||
entity->normalsChanged() ||
entity->texturesChanged() ||
entity->strokeColorsChanged()
entity->colorsChanged() ||
_isUVModeStretch != entity->getIsUVModeStretch() ||
_glow != entity->getGlow() ||
_faceCamera != entity->getFaceCamera()
);
}
void PolyLineEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& scene, Transaction& transaction, const TypedEntityPointer& entity) {
static const QUrl DEFAULT_POLYLINE_TEXTURE = QUrl(PathUtils::resourcesPath() + "images/paintStroke.png");
QUrl entityTextures = DEFAULT_POLYLINE_TEXTURE;
void PolyLineEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPointer& entity) {
auto pointsChanged = entity->pointsChanged();
auto widthsChanged = entity->widthsChanged();
auto normalsChanged = entity->normalsChanged();
auto colorsChanged = entity->colorsChanged();
bool isUVModeStretch = entity->getIsUVModeStretch();
bool glow = entity->getGlow();
bool faceCamera = entity->getFaceCamera();
entity->resetPolyLineChanged();
// Transform
updateModelTransformAndBound();
_renderTransform = getModelTransform();
// Textures
if (entity->texturesChanged()) {
entity->resetTexturesChanged();
QUrl entityTextures = DEFAULT_POLYLINE_TEXTURE;
auto textures = entity->getTextures();
if (!textures.isEmpty()) {
entityTextures = QUrl(textures);
}
_texture = DependencyManager::get<TextureCache>()->getTexture(entityTextures);
_textureAspectRatio = 1.0f;
_textureLoaded = false;
}
if (!_texture) {
_texture = DependencyManager::get<TextureCache>()->getTexture(entityTextures);
bool textureChanged = false;
if (!_textureLoaded && _texture && _texture->isLoaded()) {
textureChanged = true;
_textureAspectRatio = (float)_texture->getOriginalHeight() / (float)_texture->getOriginalWidth();
_textureLoaded = true;
}
}
void PolyLineEntityRenderer::doRenderUpdateAsynchronousTyped(const TypedEntityPointer& entity) {
auto pointsChanged = entity->pointsChanged();
auto strokeWidthsChanged = entity->strokeWidthsChanged();
auto normalsChanged = entity->normalsChanged();
auto strokeColorsChanged = entity->strokeColorsChanged();
bool isUVModeStretch = entity->getIsUVModeStretch();
entity->resetPolyLineChanged();
_polylineTransform = Transform();
_polylineTransform.setTranslation(entity->getWorldPosition());
_polylineTransform.setRotation(entity->getWorldOrientation());
// Data
if (faceCamera != _faceCamera || glow != _glow) {
_faceCamera = faceCamera;
_glow = glow;
updateData();
}
// Geometry
if (pointsChanged) {
_lastPoints = entity->getLinePoints();
_points = entity->getLinePoints();
}
if (strokeWidthsChanged) {
_lastStrokeWidths = entity->getStrokeWidths();
if (widthsChanged) {
_widths = entity->getStrokeWidths();
}
if (normalsChanged) {
_lastNormals = entity->getNormals();
_normals = entity->getNormals();
}
if (strokeColorsChanged) {
_lastStrokeColors = entity->getStrokeColors();
_lastStrokeColors = _lastNormals.size() == _lastStrokeColors.size() ? _lastStrokeColors : QVector<glm::vec3>({ toGlm(entity->getColor()) });
if (colorsChanged) {
_colors = entity->getStrokeColors();
_color = toGlm(entity->getColor());
}
if (pointsChanged || strokeWidthsChanged || normalsChanged || strokeColorsChanged) {
_empty = std::min(_lastPoints.size(), std::min(_lastNormals.size(), _lastStrokeWidths.size())) < 2;
if (!_empty) {
updateGeometry(updateVertices(_lastPoints, _lastNormals, _lastStrokeWidths, _lastStrokeColors, isUVModeStretch, _textureAspectRatio));
}
if (_isUVModeStretch != isUVModeStretch || pointsChanged || widthsChanged || normalsChanged || colorsChanged || textureChanged) {
_isUVModeStretch = isUVModeStretch;
updateGeometry();
}
}
void PolyLineEntityRenderer::updateGeometry(const std::vector<Vertex>& vertices) {
_numVertices = (uint32_t)vertices.size();
auto bufferSize = _numVertices * sizeof(Vertex);
if (bufferSize > _verticesBuffer->getSize()) {
_verticesBuffer->resize(bufferSize);
}
_verticesBuffer->setSubData(0, vertices);
}
void PolyLineEntityRenderer::updateGeometry() {
int maxNumVertices = std::min(_points.length(), _normals.length());
std::vector<PolyLineEntityRenderer::Vertex> PolyLineEntityRenderer::updateVertices(const QVector<glm::vec3>& points,
const QVector<glm::vec3>& normals,
const QVector<float>& strokeWidths,
const QVector<glm::vec3>& strokeColors,
const bool isUVModeStretch,
const float textureAspectRatio) {
// Calculate the minimum vector size out of normals, points, and stroke widths
int size = std::min(points.size(), std::min(normals.size(), strokeWidths.size()));
std::vector<Vertex> vertices;
// Guard against an empty polyline
if (size <= 0) {
return vertices;
}
float uCoordInc = 1.0f / size;
float uCoord = 0.0f;
int finalIndex = size - 1;
glm::vec3 binormal;
float accumulatedDistance = 0.0f;
float distanceToLastPoint = 0.0f;
float accumulatedStrokeWidth = 0.0f;
float strokeWidth = 0.0f;
bool doesStrokeWidthVary = false;
for (int i = 1; i < strokeWidths.size(); i++) {
if (strokeWidths[i] != strokeWidths[i - 1]) {
doesStrokeWidthVary = true;
break;
if (_widths.size() >= 0) {
for (int i = 1; i < maxNumVertices; i++) {
float width = PolyLineEntityItem::DEFAULT_LINE_WIDTH;
if (i < _widths.length()) {
width = _widths[i];
}
if (width != _widths[i - 1]) {
doesStrokeWidthVary = true;
break;
}
}
}
for (int i = 0; i <= finalIndex; i++) {
const float& width = strokeWidths.at(i);
const auto& point = points.at(i);
const auto& normal = normals.at(i);
const auto& color = strokeColors.size() == normals.size() ? strokeColors.at(i) : strokeColors.at(0);
int vertexIndex = i * 2;
float uCoordInc = 1.0f / maxNumVertices;
float uCoord = 0.0f;
float accumulatedDistance = 0.0f;
float accumulatedStrokeWidth = 0.0f;
glm::vec3 binormal;
if (!isUVModeStretch && i >= 1) {
distanceToLastPoint = glm::distance(points.at(i), points.at(i - 1));
accumulatedDistance += distanceToLastPoint;
strokeWidth = 2 * strokeWidths[i];
std::vector<PolylineVertex> vertices;
vertices.reserve(maxNumVertices);
for (int i = 0; i < maxNumVertices; i++) {
// Position
glm::vec3 point = _points[i];
if (doesStrokeWidthVary) {
//If the stroke varies along the line the texture will stretch more or less depending on the speed
//because it looks better than using the same method as below
accumulatedStrokeWidth += strokeWidth;
float increaseValue = 1;
if (accumulatedStrokeWidth != 0) {
float newUcoord = glm::ceil(((1.0f / textureAspectRatio) * accumulatedDistance) / (accumulatedStrokeWidth / i));
increaseValue = newUcoord - uCoord;
// uCoord
float width = i < _widths.size() ? _widths[i] : PolyLineEntityItem::DEFAULT_LINE_WIDTH;
if (i > 0) { // First uCoord is 0.0f
if (!_isUVModeStretch) {
accumulatedDistance += glm::distance(point, _points[i - 1]);
if (doesStrokeWidthVary) {
//If the stroke varies along the line the texture will stretch more or less depending on the speed
//because it looks better than using the same method as below
accumulatedStrokeWidth += width;
float increaseValue = 1;
if (accumulatedStrokeWidth != 0) {
float newUcoord = glm::ceil((_textureAspectRatio * accumulatedDistance) / (accumulatedStrokeWidth / i));
increaseValue = newUcoord - uCoord;
}
increaseValue = increaseValue > 0 ? increaseValue : 1;
uCoord += increaseValue;
} else {
// If the stroke width is constant then the textures should keep the aspect ratio along the line
uCoord = (_textureAspectRatio * accumulatedDistance) / width;
}
increaseValue = increaseValue > 0 ? increaseValue : 1;
uCoord += increaseValue;
} else {
//If the stroke width is constant then the textures should keep the aspect ratio along the line
uCoord = ((1.0f / textureAspectRatio) * accumulatedDistance) / strokeWidth;
uCoord += uCoordInc;
}
} else if (vertexIndex >= 2) {
uCoord += uCoordInc;
}
// Color
glm::vec3 color = i < _colors.length() ? _colors[i] : _color;
// Normal
glm::vec3 normal = _normals[i];
// Binormal
// For last point we can assume binormals are the same since it represents the last two vertices of quad
if (i < finalIndex) {
const auto tangent = points.at(i + 1) - point;
binormal = glm::normalize(glm::cross(tangent, normal)) * width;
if (i < maxNumVertices - 1) {
glm::vec3 tangent = _points[i + 1] - point;
binormal = glm::normalize(glm::cross(tangent, normal));
// Check to make sure binormal is not a NAN. If it is, don't add to vertices vector
if (binormal.x != binormal.x) {
@ -239,54 +224,36 @@ std::vector<PolyLineEntityRenderer::Vertex> PolyLineEntityRenderer::updateVertic
}
}
const auto v1 = points.at(i) + binormal;
const auto v2 = points.at(i) - binormal;
vertices.emplace_back(v1, normal, vec2(uCoord, 0.0f), color);
vertices.emplace_back(v2, normal, vec2(uCoord, 1.0f), color);
PolylineVertex vertex = { glm::vec4(point, uCoord), glm::vec4(color, 1.0f), glm::vec4(normal, 0.0f), glm::vec4(binormal, 0.5f * width) };
vertices.push_back(vertex);
}
return vertices;
_numVertices = vertices.size();
_polylineGeometryBuffer->setData(vertices.size() * sizeof(PolylineVertex), (const gpu::Byte*) vertices.data());
}
scriptable::ScriptableModelBase PolyLineEntityRenderer::getScriptableModel() {
// TODO: adapt polyline into a triangles mesh...
return EntityRenderer::getScriptableModel();
void PolyLineEntityRenderer::updateData() {
PolylineData data { glm::vec2(_faceCamera, _glow), glm::vec2(0.0f) };
_polylineDataBuffer->setSubData(0, data);
}
void PolyLineEntityRenderer::doRender(RenderArgs* args) {
if (_empty) {
if (_numVertices < 2) {
return;
}
PerformanceTimer perfTimer("RenderablePolyLineEntityItem::render");
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
batch.setModelTransform(_polylineTransform);
if (_texture && _texture->isLoaded()) {
batch.setResourceTexture(PAINTSTROKE_TEXTURE_SLOT, _texture->getGPUTexture());
} else {
batch.setResourceTexture(PAINTSTROKE_TEXTURE_SLOT, DependencyManager::get<TextureCache>()->getWhiteTexture());
if (!_pipeline) {
buildPipeline();
}
float textureWidth = (float)_texture->getOriginalWidth();
float textureHeight = (float)_texture->getOriginalHeight();
if (textureWidth != 0 && textureHeight != 0) {
_textureAspectRatio = textureWidth / textureHeight;
}
batch.setInputFormat(polylineFormat);
batch.setInputBuffer(0, _verticesBuffer, 0, sizeof(Vertex));
#ifndef POLYLINE_ENTITY_USE_FADE_EFFECT
// glColor4f must be called after setInputFormat if it must be taken into account
if (_isFading) {
batch._glColor4f(1.0f, 1.0f, 1.0f, Interpolate::calculateFadeRatio(_fadeStartTime));
} else {
batch._glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
}
#endif
batch.draw(gpu::TRIANGLE_STRIP, _numVertices, 0);
batch.setPipeline(_pipeline);
batch.setModelTransform(_renderTransform);
batch.setResourceTexture(0, _textureLoaded ? _texture->getGPUTexture() : DependencyManager::get<TextureCache>()->getWhiteTexture());
batch.setResourceBuffer(0, _polylineGeometryBuffer);
batch.setUniformBuffer(0, _polylineDataBuffer);
batch.draw(gpu::TRIANGLE_STRIP, (gpu::uint32)(2 * _numVertices), 0);
}

View file

@ -25,52 +25,40 @@ class PolyLineEntityRenderer : public TypedEntityRenderer<PolyLineEntityItem> {
public:
PolyLineEntityRenderer(const EntityItemPointer& entity);
virtual scriptable::ScriptableModelBase getScriptableModel() override;
// FIXME: shouldn't always be transparent: take into account texture and glow
virtual bool isTransparent() const override { return true; }
protected:
virtual bool needsRenderUpdate() const override;
virtual bool needsRenderUpdateFromTypedEntity(const TypedEntityPointer& entity) const override;
virtual void doRenderUpdateSynchronousTyped(const ScenePointer& scene,
Transaction& transaction,
const TypedEntityPointer& entity) override;
virtual void doRenderUpdateAsynchronousTyped(const TypedEntityPointer& entity) override;
virtual ItemKey getKey() override;
virtual ShapeKey getShapeKey() override;
virtual void doRender(RenderArgs* args) override;
virtual bool isTransparent() const override { return true; }
void buildPipeline();
void updateGeometry();
void updateData();
struct Vertex {
Vertex() {}
Vertex(const vec3& position, const vec3& normal, const vec2& uv, const vec3& color) : position(position),
normal(normal),
uv(uv),
color(color) {}
vec3 position;
vec3 normal;
vec2 uv;
vec3 color;
};
QVector<glm::vec3> _points;
QVector<glm::vec3> _normals;
QVector<glm::vec3> _colors;
glm::vec3 _color;
QVector<float> _widths;
void updateGeometry(const std::vector<Vertex>& vertices);
static std::vector<Vertex> updateVertices(const QVector<glm::vec3>& points,
const QVector<glm::vec3>& normals,
const QVector<float>& strokeWidths,
const QVector<glm::vec3>& strokeColors,
const bool isUVModeStretch,
const float textureAspectRatio);
Transform _polylineTransform;
QVector<glm::vec3> _lastPoints;
QVector<glm::vec3> _lastNormals;
QVector<glm::vec3> _lastStrokeColors;
QVector<float> _lastStrokeWidths;
gpu::BufferPointer _verticesBuffer;
uint32_t _numVertices { 0 };
bool _empty{ true };
NetworkTexturePointer _texture;
float _textureAspectRatio { 1.0f };
bool _textureLoaded { false };
bool _isUVModeStretch;
bool _faceCamera;
bool _glow;
size_t _numVertices;
gpu::BufferPointer _polylineDataBuffer;
gpu::BufferPointer _polylineGeometryBuffer;
static gpu::PipelinePointer _pipeline;
};
} } // namespace

View file

@ -14,21 +14,27 @@
<@include DeferredBufferWrite.slh@>
// the albedo texture
LAYOUT(binding=0) uniform sampler2D originalTexture;
<@include paintStroke.slh@>
<$declarePolyLineBuffers()$>
// the interpolated normal
layout(location=0) in vec3 interpolatedNormal;
layout(location=1) in vec2 varTexcoord;
layout(location=2) in vec4 varColor;
LAYOUT(binding=0) uniform sampler2D _texture;
layout(location=0) in vec3 _normalWS;
layout(location=1) in vec2 _texCoord;
layout(location=2) in vec4 _color;
layout(location=3) in float _distanceFromCenter;
void main(void) {
vec4 texel = texture(originalTexture, varTexcoord);
int frontCondition = 1 -int(gl_FrontFacing) * 2;
vec3 color = varColor.rgb;
vec4 texel = texture(_texture, _texCoord);
int frontCondition = 1 - 2 * int(gl_FrontFacing);
vec3 color = _color.rgb * texel.rgb;
float alpha = texel.a * _color.a;
alpha *= mix(1.0, pow(1.0 - abs(_distanceFromCenter), 10.0), _polylineData.faceCameraGlow.y);
packDeferredFragmentTranslucent(
float(frontCondition) * interpolatedNormal,
texel.a * varColor.a,
color * texel.rgb,
10.0);
float(frontCondition) * _normalWS,
alpha,
color,
DEFAULT_ROUGHNESS);
}

View file

@ -0,0 +1,48 @@
<!
// paintStroke.slh
//
// Created by Sam Gondelman on 12/13/2018
// Copyright 2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
!>
<@if not PAINTSTROKE_SLH@>
<@def PAINTSTROKE_SLH@>
<@include paintStroke_Shared.slh@>
<@include gpu/ShaderConstants.h@>
<@func declarePolyLineBuffers() @>
// Hack comment to absorb the extra '//' scribe prepends
#if !defined(GPU_SSBO_TRANSFORM_OBJECT)
LAYOUT(binding=GPU_RESOURCE_BUFFER_SLOT0_TEXTURE) uniform samplerBuffer polylineVerticesBuffer;
PolylineVertex getPolylineVertex(int i) {
int offset = 4 * i;
PolylineVertex vertex;
vertex.positionAndUCoord = texelFetch(polylineVerticesBuffer, offset);
vertex.color = texelFetch(polylineVerticesBuffer, offset + 1);
vertex.normal = texelFetch(polylineVerticesBuffer, offset + 2);
vertex.binormalAndHalfWidth = texelFetch(polylineVerticesBuffer, offset + 3);
return vertex;
}
#else
LAYOUT_STD140(binding=GPU_RESOURCE_BUFFER_SLOT0_STORAGE) buffer polylineVerticesBuffer {
PolylineVertex _vertices[];
};
PolylineVertex getPolylineVertex(int i) {
PolylineVertex vertex = _vertices[i];
return vertex;
}
#endif
LAYOUT_STD140(binding=0) uniform polylineDataBuffer {
PolylineData _polylineData;
};
<@endfunc@>
<@endif@>

View file

@ -17,23 +17,45 @@
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
// the interpolated normal
layout(location=0) out vec3 interpolatedNormal;
<@include paintStroke.slh@>
<$declarePolyLineBuffers()$>
//the diffuse texture
layout(location=1) out vec2 varTexcoord;
layout(location=2) out vec4 varColor;
layout(location=0) out vec3 _normalWS;
layout(location=1) out vec2 _texCoord;
layout(location=2) out vec4 _color;
layout(location=3) out float _distanceFromCenter;
void main(void) {
varTexcoord = inTexCoord0.st;
PolylineVertex vertex = getPolylineVertex(gl_VertexID / 2);
float evenVertex = float(gl_VertexID % 2 == 0);
// pass along the diffuse color
varColor = color_sRGBAToLinear(inColor);
_texCoord = vec2(vertex.positionAndUCoord.w, mix(1.0, 0.0, evenVertex));
_color = color_sRGBAToLinear(vertex.color);
// standard transform
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
<$transformModelToClipPos(cam, obj, inPosition, gl_Position)$>
<$transformModelToEyeDir(cam, obj, inNormal.xyz, interpolatedNormal)$>
_distanceFromCenter = -1.0 + 2.0 * evenVertex;
vec4 position = vec4(vertex.positionAndUCoord.xyz, 1.0);
vec3 normal = vertex.normal.xyz;
vec3 binormal = vertex.binormalAndHalfWidth.xyz;
if (_polylineData.faceCameraGlow.x != 0.0) {
vec4 posEye;
vec3 normalEye;
vec3 binormalEye;
<$transformModelToEyePos(cam, obj, position, posEye)$>
<$transformModelToEyeDir(cam, obj, normal, normalEye)$>
<$transformModelToEyeDir(cam, obj, binormal, binormalEye)$>
vec3 tangentEye = cross(binormalEye, normalEye);
// new normal faces the camera
normalEye = normalize(posEye.xyz);
binormalEye = normalize(cross(normalEye, tangentEye));
posEye.xyz += _distanceFromCenter * vertex.binormalAndHalfWidth.w * binormalEye;
<$transformEyeToClipPos(cam, posEye, gl_Position)$>
<$transformEyeToWorldDir(cam, normalEye, _normalWS)$>
} else {
position.xyz += _distanceFromCenter * vertex.binormalAndHalfWidth.w * binormal;
<$transformModelToClipPos(cam, obj, position, gl_Position)$>
<$transformModelToWorldDir(cam, obj, normal, _normalWS)$>
}
}

View file

@ -0,0 +1,25 @@
// glsl / C++ compatible source as interface for FadeEffect
#ifdef __cplusplus
# define _PL_VEC4 glm::vec4
# define _PL_VEC2 glm::vec2
#else
# define _PL_VEC4 vec4
# define _PL_VEC2 vec2
#endif
struct PolylineVertex {
_PL_VEC4 positionAndUCoord;
_PL_VEC4 color;
_PL_VEC4 normal;
_PL_VEC4 binormalAndHalfWidth;
};
struct PolylineData {
_PL_VEC2 faceCameraGlow;
_PL_VEC2 spare;
};
// <@if 1@>
// Trigger Scribe include
// <@endif@> <!def that !>
//

View file

@ -1,52 +0,0 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// paintStroke_fade.frag
// fragment shader
//
// Created by Olivier Prat on 19/07/17.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include DeferredBufferWrite.slh@>
<@include Fade.slh@>
<$declareFadeFragment()$>
// the albedo texture
LAYOUT(binding=0) uniform sampler2D originalTexture;
// the interpolated normal
layout(location=0) in vec3 interpolatedNormal;
layout(location=1) in vec2 varTexcoord;
layout(location=2) in vec4 varColor;
layout(location=3) in vec4 _worldPosition;
struct PolyLineUniforms {
vec3 color;
};
LAYOUT(binding=0) uniform polyLineBuffer {
PolyLineUniforms polyline;
};
void main(void) {
vec3 fadeEmissive;
FadeObjectParams fadeParams;
<$fetchFadeObjectParams(fadeParams)$>
applyFade(fadeParams, _worldPosition.xyz, fadeEmissive);
vec4 texel = texture(originalTexture, varTexcoord);
int frontCondition = 1 -int(gl_FrontFacing) * 2;
vec3 color = varColor.rgb;
packDeferredFragmentTranslucent(
interpolatedNormal * float(frontCondition),
texel.a * varColor.a,
polyline.color * texel.rgb + fadeEmissive,
10.0);
}

View file

@ -1,43 +0,0 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// paintStroke_fade.vert
// vertex shader
//
// Created by Olivier Prat on 19/07/17.
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Inputs.slh@>
<@include gpu/Color.slh@>
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
// the interpolated normal
layout(location=0) out vec3 interpolatedNormal;
//the diffuse texture
layout(location=1) out vec2 varTexcoord;
layout(location=2) out vec4 varColor;
layout(location=3) out vec4 _worldPosition;
void main(void) {
varTexcoord = inTexCoord0.st;
// pass along the diffuse color
varColor = color_sRGBAToLinear(inColor);
// standard transform
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
<$transformModelToClipPos(cam, obj, inPosition, gl_Position)$>
<$transformModelToEyeDir(cam, obj, inNormal.xyz, interpolatedNormal)$>
<$transformModelToWorldPos(obj, inPosition, _worldPosition)$>
}

View file

@ -527,6 +527,8 @@ EntityPropertyFlags EntityItemProperties::getChangedProperties() const {
CHECK_PROPERTY_CHANGE(PROP_STROKE_NORMALS, normals);
CHECK_PROPERTY_CHANGE(PROP_STROKE_COLORS, strokeColors);
CHECK_PROPERTY_CHANGE(PROP_IS_UV_MODE_STRETCH, isUVModeStretch);
CHECK_PROPERTY_CHANGE(PROP_LINE_GLOW, glow);
CHECK_PROPERTY_CHANGE(PROP_LINE_FACE_CAMERA, faceCamera);
// Shape
CHECK_PROPERTY_CHANGE(PROP_SHAPE, shape);
@ -1051,6 +1053,8 @@ EntityPropertyFlags EntityItemProperties::getChangedProperties() const {
* format.
* @property {boolean} isUVModeStretch=true - If <code>true</code>, the texture is stretched to fill the whole line, otherwise
* the texture repeats along the line.
* @property {bool} glow=false - If <code>true</code>, the alpha of the strokes will drop off farther from the center.
* @property {bool} faceCamera=false - If <code>true</code>, each line segment will rotate to face the camera.
* @example <caption>Draw a textured "V".</caption>
* var entity = Entities.addEntity({
* type: "PolyLine",
@ -1634,6 +1638,8 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_STROKE_NORMALS, normals);
COPY_PROPERTY_TO_QSCRIPTVALUE_TYPED(PROP_STROKE_COLORS, strokeColors, qVectorVec3Color);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_IS_UV_MODE_STRETCH, isUVModeStretch);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_LINE_GLOW, glow);
COPY_PROPERTY_TO_QSCRIPTVALUE(PROP_LINE_FACE_CAMERA, faceCamera);
}
// Materials
@ -1956,6 +1962,8 @@ void EntityItemProperties::copyFromScriptValue(const QScriptValue& object, bool
COPY_PROPERTY_FROM_QSCRIPTVALUE(normals, qVectorVec3, setNormals);
COPY_PROPERTY_FROM_QSCRIPTVALUE(strokeColors, qVectorVec3, setStrokeColors);
COPY_PROPERTY_FROM_QSCRIPTVALUE(isUVModeStretch, bool, setIsUVModeStretch);
COPY_PROPERTY_FROM_QSCRIPTVALUE(glow, bool, setGlow);
COPY_PROPERTY_FROM_QSCRIPTVALUE(faceCamera, bool, setFaceCamera);
// Shape
COPY_PROPERTY_FROM_QSCRIPTVALUE(shape, QString, setShape);
@ -2208,6 +2216,8 @@ void EntityItemProperties::merge(const EntityItemProperties& other) {
COPY_PROPERTY_IF_CHANGED(normals);
COPY_PROPERTY_IF_CHANGED(strokeColors);
COPY_PROPERTY_IF_CHANGED(isUVModeStretch);
COPY_PROPERTY_IF_CHANGED(glow);
COPY_PROPERTY_IF_CHANGED(faceCamera);
// Shape
COPY_PROPERTY_IF_CHANGED(shape);
@ -2573,6 +2583,8 @@ bool EntityItemProperties::getPropertyInfo(const QString& propertyName, EntityPr
ADD_PROPERTY_TO_MAP(PROP_STROKE_NORMALS, Normals, normals, QVector<vec3>);
ADD_PROPERTY_TO_MAP(PROP_STROKE_COLORS, StrokeColors, strokeColors, QVector<vec3>);
ADD_PROPERTY_TO_MAP(PROP_IS_UV_MODE_STRETCH, IsUVModeStretch, isUVModeStretch, QVector<float>);
ADD_PROPERTY_TO_MAP(PROP_LINE_GLOW, Glow, glow, bool);
ADD_PROPERTY_TO_MAP(PROP_LINE_FACE_CAMERA, FaceCamera, faceCamera, bool);
// Shape
ADD_PROPERTY_TO_MAP(PROP_SHAPE, Shape, shape, QString);
@ -2944,6 +2956,8 @@ OctreeElement::AppendState EntityItemProperties::encodeEntityEditPacket(PacketTy
APPEND_ENTITY_PROPERTY(PROP_STROKE_NORMALS, properties.getPackedNormals());
APPEND_ENTITY_PROPERTY(PROP_STROKE_COLORS, properties.getPackedStrokeColors());
APPEND_ENTITY_PROPERTY(PROP_IS_UV_MODE_STRETCH, properties.getIsUVModeStretch());
APPEND_ENTITY_PROPERTY(PROP_LINE_GLOW, properties.getGlow());
APPEND_ENTITY_PROPERTY(PROP_LINE_FACE_CAMERA, properties.getFaceCamera());
}
// NOTE: Spheres and Boxes are just special cases of Shape, and they need to include their PROP_SHAPE
@ -3377,6 +3391,8 @@ bool EntityItemProperties::decodeEntityEditPacket(const unsigned char* data, int
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_STROKE_NORMALS, QByteArray, setPackedNormals);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_STROKE_COLORS, QByteArray, setPackedStrokeColors);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_IS_UV_MODE_STRETCH, bool, setIsUVModeStretch);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_LINE_GLOW, bool, setGlow);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_LINE_FACE_CAMERA, bool, setFaceCamera);
}
// NOTE: Spheres and Boxes are just special cases of Shape, and they need to include their PROP_SHAPE
@ -3722,6 +3738,8 @@ void EntityItemProperties::markAllChanged() {
_normalsChanged = true;
_strokeColorsChanged = true;
_isUVModeStretchChanged = true;
_glowChanged = true;
_faceCameraChanged = true;
// Shape
_shapeChanged = true;
@ -4333,6 +4351,12 @@ QList<QString> EntityItemProperties::listChangedProperties() {
if (isUVModeStretchChanged()) {
out += "isUVModeStretch";
}
if (glowChanged()) {
out += "glow";
}
if (faceCameraChanged()) {
out += "faceCamera";
}
// Shape
if (shapeChanged()) {

View file

@ -322,6 +322,8 @@ public:
DEFINE_PROPERTY(PROP_STROKE_NORMALS, Normals, normals, QVector<glm::vec3>, ENTITY_ITEM_DEFAULT_EMPTY_VEC3_QVEC);
DEFINE_PROPERTY(PROP_STROKE_COLORS, StrokeColors, strokeColors, QVector<glm::vec3>, ENTITY_ITEM_DEFAULT_EMPTY_VEC3_QVEC);
DEFINE_PROPERTY(PROP_IS_UV_MODE_STRETCH, IsUVModeStretch, isUVModeStretch, bool, true);
DEFINE_PROPERTY(PROP_LINE_GLOW, Glow, glow, bool, false);
DEFINE_PROPERTY(PROP_LINE_FACE_CAMERA, FaceCamera, faceCamera, bool, false);
// Shape
DEFINE_PROPERTY_REF(PROP_SHAPE, Shape, shape, QString, "Sphere");

View file

@ -291,6 +291,8 @@ enum EntityPropertyList {
PROP_STROKE_NORMALS = PROP_DERIVED_2,
PROP_STROKE_COLORS = PROP_DERIVED_3,
PROP_IS_UV_MODE_STRETCH = PROP_DERIVED_4,
PROP_LINE_GLOW = PROP_DERIVED_5,
PROP_LINE_FACE_CAMERA = PROP_DERIVED_6,
// Shape
PROP_SHAPE = PROP_DERIVED_0,

View file

@ -24,7 +24,6 @@
const float PolyLineEntityItem::DEFAULT_LINE_WIDTH = 0.1f;
const int PolyLineEntityItem::MAX_POINTS_PER_LINE = 60;
EntityItemPointer PolyLineEntityItem::factory(const EntityItemID& entityID, const EntityItemProperties& properties) {
EntityItemPointer entity(new PolyLineEntityItem(entityID), [](EntityItem* ptr) { ptr->deleteLater(); });
entity->setProperties(properties);
@ -37,7 +36,6 @@ PolyLineEntityItem::PolyLineEntityItem(const EntityItemID& entityItemID) : Entit
}
EntityItemProperties PolyLineEntityItem::getProperties(const EntityPropertyFlags& desiredProperties, bool allowEmptyDesiredProperties) const {
QWriteLocker lock(&_quadReadWriteLock);
EntityItemProperties properties = EntityItem::getProperties(desiredProperties, allowEmptyDesiredProperties); // get the properties from our base class
COPY_ENTITY_PROPERTY_TO_PROPERTIES(color, getColor);
@ -48,11 +46,13 @@ EntityItemProperties PolyLineEntityItem::getProperties(const EntityPropertyFlags
COPY_ENTITY_PROPERTY_TO_PROPERTIES(normals, getNormals);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(strokeColors, getStrokeColors);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(isUVModeStretch, getIsUVModeStretch);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(glow, getGlow);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(faceCamera, getFaceCamera);
return properties;
}
bool PolyLineEntityItem::setProperties(const EntityItemProperties& properties) {
QWriteLocker lock(&_quadReadWriteLock);
bool somethingChanged = false;
somethingChanged = EntityItem::setProperties(properties); // set the properties in our base class
@ -64,6 +64,8 @@ bool PolyLineEntityItem::setProperties(const EntityItemProperties& properties) {
SET_ENTITY_PROPERTY_FROM_PROPERTIES(normals, setNormals);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(strokeColors, setStrokeColors);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(isUVModeStretch, setIsUVModeStretch);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(glow, setGlow);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(faceCamera, setFaceCamera);
if (somethingChanged) {
bool wantDebug = false;
@ -78,125 +80,59 @@ bool PolyLineEntityItem::setProperties(const EntityItemProperties& properties) {
return somethingChanged;
}
bool PolyLineEntityItem::appendPoint(const glm::vec3& point) {
if (_points.size() > MAX_POINTS_PER_LINE - 1) {
qCDebug(entities) << "MAX POINTS REACHED!";
return false;
}
_points << point;
_pointsChanged = true;
calculateScaleAndRegistrationPoint();
return true;
}
bool PolyLineEntityItem::setStrokeWidths(const QVector<float>& strokeWidths) {
void PolyLineEntityItem::setLinePoints(const QVector<glm::vec3>& points) {
withWriteLock([&] {
_strokeWidths = strokeWidths;
_strokeWidthsChanged = true;
_points = points;
_pointsChanged = true;
});
return true;
computeAndUpdateDimensionsAndPosition();
}
bool PolyLineEntityItem::setNormals(const QVector<glm::vec3>& normals) {
void PolyLineEntityItem::setStrokeWidths(const QVector<float>& strokeWidths) {
withWriteLock([&] {
_widths = strokeWidths;
_widthsChanged = true;
});
computeAndUpdateDimensionsAndPosition();
}
void PolyLineEntityItem::setNormals(const QVector<glm::vec3>& normals) {
withWriteLock([&] {
_normals = normals;
_normalsChanged = true;
});
return true;
}
bool PolyLineEntityItem::setStrokeColors(const QVector<glm::vec3>& strokeColors) {
void PolyLineEntityItem::setStrokeColors(const QVector<glm::vec3>& strokeColors) {
withWriteLock([&] {
_strokeColors = strokeColors;
_strokeColorsChanged = true;
_colors = strokeColors;
_colorsChanged = true;
});
return true;
}
void PolyLineEntityItem::computeAndUpdateDimensionsAndPosition() {
QVector<glm::vec3> points;
QVector<float> widths;
bool PolyLineEntityItem::setLinePoints(const QVector<glm::vec3>& points) {
if (points.size() > MAX_POINTS_PER_LINE) {
return false;
}
bool result = false;
withWriteLock([&] {
//Check to see if points actually changed. If they haven't, return before doing anything else
if (points.size() != _points.size()) {
_pointsChanged = true;
} else if (points.size() == _points.size()) {
//same number of points, so now compare every point
for (int i = 0; i < points.size(); i++) {
if (points.at(i) != _points.at(i)) {
_pointsChanged = true;
break;
}
}
}
if (!_pointsChanged) {
return;
}
_points = points;
result = true;
});
if (result) {
calculateScaleAndRegistrationPoint();
}
return result;
}
void PolyLineEntityItem::calculateScaleAndRegistrationPoint() {
glm::vec3 high(0.0f, 0.0f, 0.0f);
glm::vec3 low(0.0f, 0.0f, 0.0f);
int pointCount = 0;
glm::vec3 firstPoint;
withReadLock([&] {
pointCount = _points.size();
if (pointCount > 0) {
firstPoint = _points.at(0);
}
for (int i = 0; i < pointCount; i++) {
const glm::vec3& point = _points.at(i);
high = glm::max(point, high);
low = glm::min(point, low);
}
points = _points;
widths = _widths;
});
float magnitudeSquared = glm::length2(low - high);
vec3 newScale { 1 };
vec3 newRegistrationPoint { 0.5f };
glm::vec3 maxHalfDim(0.5f * ENTITY_ITEM_DEFAULT_WIDTH);
float maxWidth = 0.0f;
for (int i = 0; i < points.length(); i++) {
maxHalfDim = glm::max(maxHalfDim, glm::abs(points[i]));
maxWidth = glm::max(maxWidth, i < widths.length() ? widths[i] : DEFAULT_LINE_WIDTH);
}
const float EPSILON = 0.0001f;
const float EPSILON_SQUARED = EPSILON * EPSILON;
const float HALF_LINE_WIDTH = 0.075f; // sadly _strokeWidths() don't seem to correspond to reality, so just use a flat assumption of the stroke width
const vec3 QUARTER_LINE_WIDTH { HALF_LINE_WIDTH * 0.5f };
if (pointCount > 1 && magnitudeSquared > EPSILON_SQUARED) {
newScale = glm::abs(high) + glm::abs(low) + vec3(HALF_LINE_WIDTH);
// Center the poly line in the bounding box
glm::vec3 startPointInScaleSpace = firstPoint - low;
startPointInScaleSpace += QUARTER_LINE_WIDTH;
newRegistrationPoint = startPointInScaleSpace / newScale;
}
// if Polyline has only one or fewer points, use default dimension settings
setScaledDimensions(newScale);
EntityItem::setRegistrationPoint(newRegistrationPoint);
setScaledDimensions(2.0f * (maxHalfDim + maxWidth));
}
int PolyLineEntityItem::readEntitySubclassDataFromBuffer(const unsigned char* data, int bytesLeftToRead,
ReadBitstreamToTreeParams& args,
EntityPropertyFlags& propertyFlags, bool overwriteLocalData,
bool& somethingChanged) {
QWriteLocker lock(&_quadReadWriteLock);
int bytesRead = 0;
const unsigned char* dataAt = data;
@ -208,6 +144,8 @@ int PolyLineEntityItem::readEntitySubclassDataFromBuffer(const unsigned char* da
READ_ENTITY_PROPERTY(PROP_STROKE_NORMALS, QVector<glm::vec3>, setNormals);
READ_ENTITY_PROPERTY(PROP_STROKE_COLORS, QVector<glm::vec3>, setStrokeColors);
READ_ENTITY_PROPERTY(PROP_IS_UV_MODE_STRETCH, bool, setIsUVModeStretch);
READ_ENTITY_PROPERTY(PROP_LINE_GLOW, bool, setGlow);
READ_ENTITY_PROPERTY(PROP_LINE_FACE_CAMERA, bool, setFaceCamera);
return bytesRead;
}
@ -222,6 +160,8 @@ EntityPropertyFlags PolyLineEntityItem::getEntityProperties(EncodeBitstreamParam
requestedProperties += PROP_STROKE_NORMALS;
requestedProperties += PROP_STROKE_COLORS;
requestedProperties += PROP_IS_UV_MODE_STRETCH;
requestedProperties += PROP_LINE_GLOW;
requestedProperties += PROP_LINE_FACE_CAMERA;
return requestedProperties;
}
@ -233,7 +173,6 @@ void PolyLineEntityItem::appendSubclassData(OctreePacketData* packetData, Encode
int& propertyCount,
OctreeElement::AppendState& appendState) const {
QWriteLocker lock(&_quadReadWriteLock);
bool successPropertyFits = true;
APPEND_ENTITY_PROPERTY(PROP_COLOR, getColor());
@ -244,6 +183,8 @@ void PolyLineEntityItem::appendSubclassData(OctreePacketData* packetData, Encode
APPEND_ENTITY_PROPERTY(PROP_STROKE_NORMALS, getNormals());
APPEND_ENTITY_PROPERTY(PROP_STROKE_COLORS, getStrokeColors());
APPEND_ENTITY_PROPERTY(PROP_IS_UV_MODE_STRETCH, getIsUVModeStretch());
APPEND_ENTITY_PROPERTY(PROP_LINE_GLOW, getGlow());
APPEND_ENTITY_PROPERTY(PROP_LINE_FACE_CAMERA, getFaceCamera());
}
void PolyLineEntityItem::debugDump() const {
@ -255,61 +196,49 @@ void PolyLineEntityItem::debugDump() const {
qCDebug(entities) << " getLastEdited:" << debugTime(getLastEdited(), now);
}
QVector<glm::vec3> PolyLineEntityItem::getLinePoints() const {
QVector<glm::vec3> result;
withReadLock([&] {
result = _points;
return resultWithReadLock<QVector<glm::vec3>>([&] {
return _points;
});
return result;
}
QVector<glm::vec3> PolyLineEntityItem::getNormals() const {
QVector<glm::vec3> result;
withReadLock([&] {
result = _normals;
return resultWithReadLock<QVector<glm::vec3>>([&] {
return _normals;
});
return result;
}
QVector<glm::vec3> PolyLineEntityItem::getStrokeColors() const {
QVector<glm::vec3> result;
withReadLock([&] {
result = _strokeColors;
return resultWithReadLock<QVector<glm::vec3>>([&] {
return _colors;
});
return result;
}
QVector<float> PolyLineEntityItem::getStrokeWidths() const {
QVector<float> result;
withReadLock([&] {
result = _strokeWidths;
return resultWithReadLock<QVector<float>>([&] {
return _widths;
});
return result;
}
QString PolyLineEntityItem::getTextures() const {
QString result;
withReadLock([&] {
result = _textures;
return resultWithReadLock<QString>([&] {
return _textures;
});
return result;
}
void PolyLineEntityItem::setTextures(const QString& textures) {
withWriteLock([&] {
if (_textures != textures) {
_textures = textures;
_texturesChangedFlag = true;
_texturesChanged = true;
}
});
}
void PolyLineEntityItem::setColor(const glm::u8vec3& value) {
withWriteLock([&] {
_strokeColorsChanged = true;
_color = value;
_colorsChanged = true;
});
}

View file

@ -44,35 +44,40 @@ class PolyLineEntityItem : public EntityItem {
glm::u8vec3 getColor() const;
void setColor(const glm::u8vec3& value);
bool setLinePoints(const QVector<glm::vec3>& points);
bool appendPoint(const glm::vec3& point);
static const int MAX_POINTS_PER_LINE;
void setLinePoints(const QVector<glm::vec3>& points);
QVector<glm::vec3> getLinePoints() const;
bool setNormals(const QVector<glm::vec3>& normals);
static const float DEFAULT_LINE_WIDTH;
void setStrokeWidths(const QVector<float>& strokeWidths);
QVector<float> getStrokeWidths() const;
void setNormals(const QVector<glm::vec3>& normals);
QVector<glm::vec3> getNormals() const;
bool setStrokeColors(const QVector<glm::vec3>& strokeColors);
void setStrokeColors(const QVector<glm::vec3>& strokeColors);
QVector<glm::vec3> getStrokeColors() const;
bool setStrokeWidths(const QVector<float>& strokeWidths);
QVector<float> getStrokeWidths() const;
void setIsUVModeStretch(bool isUVModeStretch){ _isUVModeStretch = isUVModeStretch; }
bool getIsUVModeStretch() const{ return _isUVModeStretch; }
QString getTextures() const;
void setTextures(const QString& textures);
virtual ShapeType getShapeType() const override { return SHAPE_TYPE_NONE; }
void setGlow(bool glow) { _glow = glow; }
bool getGlow() const { return _glow; }
void setFaceCamera(bool faceCamera) { _faceCamera = faceCamera; }
bool getFaceCamera() const { return _faceCamera; }
bool pointsChanged() const { return _pointsChanged; }
bool normalsChanged() const { return _normalsChanged; }
bool strokeColorsChanged() const { return _strokeColorsChanged; }
bool strokeWidthsChanged() const { return _strokeWidthsChanged; }
bool texturesChanged() const { return _texturesChangedFlag; }
void resetTexturesChanged() { _texturesChangedFlag = false; }
void resetPolyLineChanged() { _strokeColorsChanged = _strokeWidthsChanged = _normalsChanged = _pointsChanged = false; }
bool colorsChanged() const { return _colorsChanged; }
bool widthsChanged() const { return _widthsChanged; }
bool texturesChanged() const { return _texturesChanged; }
void resetTexturesChanged() { _texturesChanged = false; }
void resetPolyLineChanged() { _colorsChanged = _widthsChanged = _normalsChanged = _pointsChanged = false; }
// never have a ray intersection pick a PolyLineEntityItem.
virtual bool supportsDetailedIntersection() const override { return true; }
@ -85,29 +90,26 @@ class PolyLineEntityItem : public EntityItem {
BoxFace& face, glm::vec3& surfaceNormal,
QVariantMap& extraInfo, bool precisionPicking) const override { return false; }
// disable these external interfaces as PolyLineEntities caculate their own dimensions based on the points they contain
virtual void setRegistrationPoint(const glm::vec3& value) override {}; // FIXME: this is suspicious!
virtual void debugDump() const override;
static const float DEFAULT_LINE_WIDTH;
static const int MAX_POINTS_PER_LINE;
private:
void calculateScaleAndRegistrationPoint();
void computeAndUpdateDimensionsAndPosition();
protected:
glm::u8vec3 _color;
bool _pointsChanged { true };
bool _normalsChanged { true };
bool _strokeColorsChanged { true };
bool _strokeWidthsChanged { true };
QVector<glm::vec3> _points;
QVector<glm::vec3> _normals;
QVector<glm::vec3> _strokeColors;
QVector<float> _strokeWidths;
QVector<glm::vec3> _colors;
QVector<float> _widths;
QString _textures;
bool _isUVModeStretch;
bool _texturesChangedFlag { false };
mutable QReadWriteLock _quadReadWriteLock;
bool _glow;
bool _faceCamera;
bool _pointsChanged { false };
bool _normalsChanged { false };
bool _colorsChanged { false };
bool _widthsChanged { false };
bool _texturesChanged { false };
};
#endif // hifi_PolyLineEntityItem_h

View file

@ -33,7 +33,7 @@ PacketVersion versionForPacketType(PacketType packetType) {
case PacketType::EntityEdit:
case PacketType::EntityData:
case PacketType::EntityPhysics:
return static_cast<PacketVersion>(EntityVersion::MorePropertiesCleanup);
return static_cast<PacketVersion>(EntityVersion::UpdatedPolyLines);
case PacketType::EntityQuery:
return static_cast<PacketVersion>(EntityQueryPacketVersion::ConicalFrustums);
case PacketType::AvatarIdentity:

View file

@ -252,7 +252,8 @@ enum class EntityVersion : PacketVersion {
GridEntities,
MissingTextProperties,
GrabTraits,
MorePropertiesCleanup
MorePropertiesCleanup,
UpdatedPolyLines
};
enum class EntityScriptCallMethodVersion : PacketVersion {

View file

@ -2144,7 +2144,7 @@ function createColorProperty(property, elProperty) {
color: '000000',
submit: false, // We don't want to have a submission button
onShow: function(colpick) {
$(colorPickerID).attr('active', 'true');
console.log("Showing");
// The original color preview within the picker needs to be updated on show because
// prior to the picker being shown we don't have access to the selections' starting color.
colorPickers[colorPickerID].colpickSetColor({
@ -2152,13 +2152,18 @@ function createColorProperty(property, elProperty) {
"g": elNumberG.elInput.value,
"b": elNumberB.elInput.value
});
// Set the color picker active after setting the color, otherwise an update will be sent on open.
$(colorPickerID).attr('active', 'true');
},
onHide: function(colpick) {
$(colorPickerID).attr('active', 'false');
},
onChange: function(hsb, hex, rgb, el) {
$(el).css('background-color', '#' + hex);
emitColorPropertyUpdate(propertyName, rgb.r, rgb.g, rgb.b);
if ($(colorPickerID).attr('active') === 'true') {
emitColorPropertyUpdate(propertyName, rgb.r, rgb.g, rgb.b);
}
}
});
@ -3351,6 +3356,18 @@ function loaded() {
property.elColorPicker.style.backgroundColor = "rgb(" + propertyValue.red + "," +
propertyValue.green + "," +
propertyValue.blue + ")";
if ($(property.elColorPicker).attr('active') === 'true') {
// Set the color picker inactive before setting the color,
// otherwise an update will be sent directly after setting it here.
$(property.elColorPicker).attr('active', 'false');
colorPickers['#' + property.elementID].colpickSetColor({
"r": propertyValue.red,
"g": propertyValue.green,
"b": propertyValue.blue
});
$(property.elColorPicker).attr('active', 'true');
}
property.elNumberR.setValue(propertyValue.red);
property.elNumberG.setValue(propertyValue.green);
property.elNumberB.setValue(propertyValue.blue);