First pass at new particle shader

This commit is contained in:
Atlante45 2015-11-18 12:20:07 -08:00
parent 7d0a5677fa
commit 230a413ec1
10 changed files with 104 additions and 86 deletions

View file

@ -25,21 +25,23 @@
#include "textured_particle_frag.h"
#include "textured_particle_alpha_discard_frag.h"
static const uint32_t VERTEX_PER_QUAD = 6;
class ParticlePayload {
public:
typedef render::Payload<ParticlePayload> Payload;
typedef Payload::DataPointer Pointer;
typedef RenderableParticleEffectEntityItem::Vertex Vertex;
using Payload = render::Payload<ParticlePayload>;
using Pointer = Payload::DataPointer;
using Vertex = RenderableParticleEffectEntityItem::Vertex;
ParticlePayload(EntityItemPointer entity) :
_entity(entity),
_vertexFormat(std::make_shared<gpu::Stream::Format>()),
_vertexBuffer(std::make_shared<gpu::Buffer>()),
_indexBuffer(std::make_shared<gpu::Buffer>()) {
_vertexBuffer(std::make_shared<gpu::Buffer>()) {
_vertexFormat->setAttribute(gpu::Stream::POSITION, 0, gpu::Element::VEC3F_XYZ, offsetof(Vertex, xyz));
_vertexFormat->setAttribute(gpu::Stream::TEXCOORD, 0, gpu::Element::VEC2F_UV, offsetof(Vertex, uv));
_vertexFormat->setAttribute(gpu::Stream::COLOR, 0, gpu::Element::COLOR_RGBA_32, offsetof(Vertex, rgba));
_vertexFormat->setAttribute(gpu::Stream::POSITION, 0, gpu::Element::VEC4F_XYZW,
offsetof(Vertex, xyzw), VERTEX_PER_QUAD);
_vertexFormat->setAttribute(gpu::Stream::COLOR, 0, gpu::Element::COLOR_RGBA_32,
offsetof(Vertex, rgba), VERTEX_PER_QUAD);
}
void setPipeline(gpu::PipelinePointer pipeline) { _pipeline = pipeline; }
@ -54,9 +56,6 @@ public:
gpu::BufferPointer getVertexBuffer() { return _vertexBuffer; }
const gpu::BufferPointer& getVertexBuffer() const { return _vertexBuffer; }
gpu::BufferPointer getIndexBuffer() { return _indexBuffer; }
const gpu::BufferPointer& getIndexBuffer() const { return _indexBuffer; }
void setTexture(gpu::TexturePointer texture) { _texture = texture; }
const gpu::TexturePointer& getTexture() const { return _texture; }
@ -76,10 +75,9 @@ public:
batch.setModelTransform(_modelTransform);
batch.setInputFormat(_vertexFormat);
batch.setInputBuffer(0, _vertexBuffer, 0, sizeof(Vertex));
batch.setIndexBuffer(gpu::UINT16, _indexBuffer, 0);
auto numIndices = _indexBuffer->getSize() / sizeof(uint16_t);
batch.drawIndexed(gpu::TRIANGLES, numIndices);
auto numVertices = _vertexBuffer->getSize() / sizeof(Vertex);
batch.draw(gpu::TRIANGLES, numVertices * VERTEX_PER_QUAD);
}
protected:
@ -89,7 +87,6 @@ protected:
gpu::PipelinePointer _pipeline;
gpu::Stream::FormatPointer _vertexFormat;
gpu::BufferPointer _vertexBuffer;
gpu::BufferPointer _indexBuffer;
gpu::TexturePointer _texture;
bool _visibleFlag = true;
};
@ -197,70 +194,38 @@ void RenderableParticleEffectEntityItem::updateRenderItem() {
particleDetails.emplace_back(particle.position, particle.radius, rgba);
}
// sort particles back to front
// NOTE: this is view frustum might be one frame out of date.
auto direction = AbstractViewStateInterface::instance()->getCurrentViewFrustum()->getDirection();
// No need to sort if we're doing additive blending
if (!_additiveBlending) {
// sort particles back to front
// NOTE: this is view frustum might be one frame out of date.
auto direction = AbstractViewStateInterface::instance()->getCurrentViewFrustum()->getDirection();
// Get direction in the entity space
direction = glm::inverse(getRotation()) * direction;
std::sort(particleDetails.begin(), particleDetails.end(),
[&](const ParticleDetails& lhs, const ParticleDetails& rhs) {
return glm::dot(lhs.position, direction) > glm::dot(rhs.position, direction);
});
}
// allocate vertices
_vertices.clear();
// build vertices from particle positions and radiuses
glm::vec3 right = glm::normalize(glm::cross(direction, Vectors::UNIT_Y));
glm::vec3 up = glm::normalize(glm::cross(right, direction));
_vertices.clear(); // clear vertices
_vertices.reserve(particleDetails.size()); // Reserve space
for (const auto& particle : particleDetails) {
glm::vec3 upOffset = up * particle.radius;
glm::vec3 rightOffset = right * particle.radius;
// generate corners of quad aligned to face the camera.
_vertices.emplace_back(particle.position + rightOffset + upOffset, glm::vec2(1.0f, 1.0f), particle.rgba);
_vertices.emplace_back(particle.position - rightOffset + upOffset, glm::vec2(0.0f, 1.0f), particle.rgba);
_vertices.emplace_back(particle.position - rightOffset - upOffset, glm::vec2(0.0f, 0.0f), particle.rgba);
_vertices.emplace_back(particle.position + rightOffset - upOffset, glm::vec2(1.0f, 0.0f), particle.rgba);
_vertices.emplace_back(glm::vec4(particle.position, particle.radius), particle.rgba);
}
render::PendingChanges pendingChanges;
pendingChanges.updateItem<ParticlePayload>(_renderItemId, [this](ParticlePayload& payload) {
// update vertex buffer
auto vertexBuffer = payload.getVertexBuffer();
auto indexBuffer = payload.getIndexBuffer();
size_t numBytes = sizeof(Vertex) * _vertices.size();
vertexBuffer->resize(numBytes);
if (numBytes == 0) {
vertexBuffer->resize(0);
indexBuffer->resize(0);
return;
}
vertexBuffer->resize(numBytes);
memcpy(vertexBuffer->editData(), _vertices.data(), numBytes);
// FIXME, don't update index buffer if num particles has not changed.
// update index buffer
const size_t NUM_VERTS_PER_PARTICLE = 4;
const size_t NUM_INDICES_PER_PARTICLE = 6;
auto numQuads = (_vertices.size() / NUM_VERTS_PER_PARTICLE);
numBytes = sizeof(uint16_t) * numQuads * NUM_INDICES_PER_PARTICLE;
indexBuffer->resize(numBytes);
gpu::Byte* data = indexBuffer->editData();
auto indexPtr = reinterpret_cast<uint16_t*>(data);
for (size_t i = 0; i < numQuads; ++i) {
indexPtr[i * NUM_INDICES_PER_PARTICLE + 0] = i * NUM_VERTS_PER_PARTICLE + 0;
indexPtr[i * NUM_INDICES_PER_PARTICLE + 1] = i * NUM_VERTS_PER_PARTICLE + 1;
indexPtr[i * NUM_INDICES_PER_PARTICLE + 2] = i * NUM_VERTS_PER_PARTICLE + 3;
indexPtr[i * NUM_INDICES_PER_PARTICLE + 3] = i * NUM_VERTS_PER_PARTICLE + 1;
indexPtr[i * NUM_INDICES_PER_PARTICLE + 4] = i * NUM_VERTS_PER_PARTICLE + 2;
indexPtr[i * NUM_INDICES_PER_PARTICLE + 5] = i * NUM_VERTS_PER_PARTICLE + 3;
}
// update transform
glm::vec3 position = getPosition();
glm::quat rotation = getRotation();
@ -323,8 +288,7 @@ void RenderableParticleEffectEntityItem::createPipelines() {
gpu::ShaderPointer fragShader;
if (_additiveBlending) {
fragShader = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(textured_particle_frag)));
}
else {
} else {
//If we are sorting and have no additive blending, we want to discard pixels with low alpha to avoid inter-particle entity artifacts
fragShader = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(textured_particle_alpha_discard_frag)));
}

View file

@ -32,10 +32,9 @@ protected:
render::ItemID _renderItemId;
struct Vertex {
Vertex(glm::vec3 xyzIn, glm::vec2 uvIn, uint32_t rgbaIn) : xyz(xyzIn), uv(uvIn), rgba(rgbaIn) {}
glm::vec3 xyz;
glm::vec2 uv;
uint32_t rgba;
Vertex(glm::vec4 xyzwIn, uint32_t rgbaIn) : xyzw(xyzwIn), rgba(rgbaIn) {}
glm::vec4 xyzw; // Position + radius
uint32_t rgba; // Color
};
void createPipelines();

View file

@ -9,14 +9,17 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
uniform sampler2D colorMap;
in vec4 _color;
in vec2 _texCoord0;
in vec4 varColor;
in vec2 varTexcoord;
out vec4 outFragColor;
uniform sampler2D tex;
void main(void) {
vec4 color = texture(colorMap, _texCoord0);
outFragColor = color * _color;
outFragColor = varColor;//texture(tex, varTexcoord.xy) * varColor;
if (varColor == vec4(0,0,0,1)) {
discard;
}
}

View file

@ -10,21 +10,58 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Inputs.slh@>
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
out vec4 _color;
out vec2 _texCoord0;
in vec4 inPosition;
in vec4 inColor;
out vec4 varColor;
out vec2 varTexcoord;
void main(void) {
// pass along the color & uvs to fragment shader
_color = inColor;
_texCoord0 = inTexCoord0.xy;
const int NUM_VERTICES_PER_PARTICLE = 6;
const vec4 UNIT_QUAD[NUM_VERTICES_PER_PARTICLE] = vec4[NUM_VERTICES_PER_PARTICLE](
vec4(-1.0, -1.0, 0.0, 1.0),
vec4(1.0, -1.0, 0.0, 1.0),
vec4(-1.0, 1.0, 0.0, 1.0),
vec4(-1.0, 1.0, 0.0, 1.0),
vec4(1.0, -1.0, 0.0, 1.0),
vec4(1.0, 1.0, 0.0, 1.0)
);
// anchor point in eye space
vec4 anchorPoint = vec4(inPosition.xyz, 1.0);
float radius = inPosition.w;
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
<$transformModelToClipPos(cam, obj, inPosition, gl_Position)$>
<$transformModelToEyePos(cam, obj, anchorPoint, anchorPoint)$>
// Which icon are we dealing with ?
int particleID = gl_VertexID / NUM_VERTICES_PER_PARTICLE;
// Which quad vertex pos?
int twoTriID = gl_VertexID - particleID * NUM_VERTICES_PER_PARTICLE;
vec4 quadPos = radius * UNIT_QUAD[twoTriID];
// Pass the texcoord and the z texcoord is representing the texture icon
varTexcoord = vec2((quadPos.xy + 1.0) * 0.5);
varColor = inColor;
if (particleID == 0) {
varColor = vec4(1,0,0,1);
} else if (particleID == 5) {
varColor = vec4(0,1,0,1);
} else if (particleID == 10) {
varColor = vec4(0,0,1,1);
} else {
varColor = vec4(0,0,0,1);
}
vec4 clipPos;
vec4 eyePos = vec4(anchorPoint.xyz + quadPos.xyz, 1.0);
<$transformEyeToClip(cam, eyePos, clipPos)$>
gl_Position = clipPos;
}

View file

@ -91,7 +91,7 @@ const float ParticleEffectEntityItem::DEFAULT_RADIUS_SPREAD = 0.0f;
const float ParticleEffectEntityItem::DEFAULT_RADIUS_START = DEFAULT_PARTICLE_RADIUS;
const float ParticleEffectEntityItem::DEFAULT_RADIUS_FINISH = DEFAULT_PARTICLE_RADIUS;
const QString ParticleEffectEntityItem::DEFAULT_TEXTURES = "";
const bool ParticleEffectEntityItem::DEFAULT_ADDITIVE_BLENDING = false;
const bool ParticleEffectEntityItem::DEFAULT_ADDITIVE_BLENDING = true;
EntityItemPointer ParticleEffectEntityItem::factory(const EntityItemID& entityID, const EntityItemProperties& properties) {

View file

@ -211,7 +211,7 @@ public:
static const bool DEFAULT_ADDITIVE_BLENDING;
bool getAdditiveBlending() const { return _additiveBlending; }
void setAdditiveBlending(bool additiveBlending) {
_additiveBlending = additiveBlending;
_additiveBlending = true;
}
virtual bool supportsDetailedRayIntersection() const { return false; }

View file

@ -154,7 +154,7 @@ GLBackend::GLShader* compileShader(const Shader& shader) {
qCWarning(gpulogging) << "GLShader::compileShader - failed to compile the gl shader object:";
qCWarning(gpulogging) << temp;
/*
filestream.open("debugshader.glsl.info.txt");
if (filestream.is_open()) {

View file

@ -59,7 +59,7 @@ void Stream::Format::evaluateCache() {
}
}
bool Stream::Format::setAttribute(Slot slot, Slot channel, Element element, Offset offset, Frequency frequency) {
bool Stream::Format::setAttribute(Slot slot, Slot channel, Element element, Offset offset, uint32 frequency) {
_attributes[slot] = Attribute((InputSlot) slot, channel, element, offset, frequency);
evaluateCache();
return true;

View file

@ -49,8 +49,8 @@ public:
// Frequency describer
enum Frequency {
PER_INSTANCE = -1,
PER_VERTEX = 0,
PER_INSTANCE,
};
// The attribute description
@ -59,7 +59,7 @@ public:
public:
Attribute() {}
Attribute(Slot slot, Slot channel, Element element, Offset offset = 0, Frequency frequency = PER_VERTEX) :
Attribute(Slot slot, Slot channel, Element element, Offset offset = 0, uint32 frequency = PER_VERTEX) :
_slot(slot),
_channel(channel),
_element(element),
@ -107,7 +107,7 @@ public:
uint32 getElementTotalSize() const { return _elementTotalSize; }
bool setAttribute(Slot slot, Slot channel, Element element, Offset offset = 0, Frequency frequency = PER_VERTEX);
bool setAttribute(Slot slot, Slot channel, Element element, Offset offset = 0, uint32 frequency = PER_VERTEX);
bool setAttribute(Slot slot, Frequency frequency = PER_VERTEX);
bool setAttribute(Slot slot, Slot channel, Frequency frequency = PER_VERTEX);

View file

@ -74,6 +74,16 @@ TransformCamera getTransformCamera() {
}
<@endfunc@>
<@func $transformModelToEyePos(cameraTransform, objectTransform, modelPos, eyePos)@>
<!// Equivalent to the following but hoppefully a tad more accurate
//return camera._projection * camera._view * object._model * pos; !>
{ // transformModelToEyePos
vec4 _worldpos = (<$objectTransform$>._model * <$modelPos$>);
<$eyePos$> = (<$cameraTransform$>._view * _worldpos);
// <$eyePos$> = (<$cameraTransform$>._projectionInverse * <$clipPos$>);
}
<@endfunc@>
<@func $transformInstancedModelToEyeAndClipPos(cameraTransform, objectTransform, modelPos, eyePos, clipPos)@>
<!// Equivalent to the following but hoppefully a tad more accurate
//return camera._projection * camera._view * object._model * pos; !>
@ -86,7 +96,6 @@ TransformCamera getTransformCamera() {
}
<@endfunc@>
<@func transformModelToWorldPos(objectTransform, modelPos, worldPos)@>
{ // transformModelToWorldPos
<$worldPos$> = (<$objectTransform$>._model * <$modelPos$>);
@ -140,4 +149,10 @@ TransformCamera getTransformCamera() {
}
<@endfunc@>
<@func transformEyeToClip(cameraTransform, eyePos, clipPos)@>
{ // transformEyeToClip
<$clipPos$> = <$cameraTransform$>._projection * <$eyePos$>;
}
<@endfunc@>
<@endif@>