Post-effects significantly enhance visual rendering, enabling the interactive blending of shader effects like bloom, motion blur, and ambient occlusion into rendered scenes. This demo showcases the blend of blur, noise and pixelate effects using frame buffer objects (FBOs) and WEBGL2 shaders, applied to a scene featuring randomly placed toruses and boxes, with a sphere acting as the dynamic focal point for the blur effect, thereby creating a visually engaging experience. By employing a user-space array, these effects are sequentially applied to a source FBO layer with the applyEffects(layer, effects, uniforms, flip) function.
(fine tune effects with sliders; press 1
, 2
, or 3
to change their application ordering)
code
const blur_shader = `#version 300 es
precision highp float;
in vec2 texcoords2;
uniform sampler2D blender; // <- shared layer should be named 'blender'
uniform sampler2D depth;
uniform float focus;
uniform float blurIntensity; // 0, 4, 0, 0.1 controls blurriness
out vec4 fragColor;
const float TWO_PI = 6.28318530718;
float getBlurriness(float d) {
// Blur more the farther away we go from the focal point at depth=focus
// The blurIntensity uniform scales the blurriness
return abs(d - focus) * 40. * blurIntensity;
}
float maxBlurDistance(float blurriness) {
// The maximum distance for blurring, based on blurriness
return blurriness * 0.01;
}
void main() {
vec4 color = texture(blender, texcoords2);
float samples = 1.;
float centerDepth = texture(depth, texcoords2).r;
float blurriness = getBlurriness(centerDepth);
for (int sampleIndex = 0; sampleIndex < 20; sampleIndex++) {
// Sample nearby pixels in a spiral going out from the current pixel
// using TWO_PI to convert loop index to radians
float angle = float(sampleIndex) * TWO_PI / 20.;
float distance = float(sampleIndex) / 20. * maxBlurDistance(blurriness);
vec2 offset = vec2(cos(angle), sin(angle)) * distance;
// How close is the object at the nearby pixel?
float sampleDepth = texture(depth, texcoords2 + offset).r;
// How far should its blur reach?
float sampleBlurDistance = maxBlurDistance(getBlurriness(sampleDepth));
// If it's in front of the current pixel, or its blur overlaps
// with the current pixel, add its color to the average
if (sampleDepth >= centerDepth || sampleBlurDistance >= distance) {
color += texture(blender, texcoords2 + offset);
samples++;
}
}
color /= samples;
fragColor = color;
}`
const noise_shader = `#version 300 es
precision highp float;
uniform sampler2D blender; // <- shared layer should be named 'blender'
uniform float frequency; // 0, 10, 0, 0.1
uniform float amplitude; // 0, 1, 0, 0.1
uniform float speed; // 0, 1, 0, 0.1
uniform float time;
in vec2 texcoords2;
out vec4 outColor;
vec3 mod289(vec3 x) {
return mod(x, 289.0);
}
vec4 mod289(vec4 x) {
return mod(x, 289.0);
}
vec4 permute(vec4 x) {
return mod289(((x * 34.0) + 1.0) * x);
}
vec4 taylorInvSqrt(vec4 r) {
return 1.79284291400159 - 0.85373472095314 * r;
}
float snoise(vec3 v) {
const vec2 C = vec2(1.0 / 6.0, 1.0 / 3.0);
const vec4 D = vec4(0.0, 0.5, 1.0, 2.0);
vec3 i = floor(v + dot(v, C.yyy));
vec3 x0 = v - i + dot(i, C.xxx);
vec3 g = step(x0.yzx, x0.xyz);
vec3 l = 1.0 - g;
vec3 i1 = min(g.xyz, l.zxy);
vec3 i2 = max(g.xyz, l.zxy);
vec3 x1 = x0 - i1 + C.xxx;
vec3 x2 = x0 - i2 + C.yyy;
vec3 x3 = x0 - D.yyy;
i = mod289(i);
vec4 p = permute(permute(permute(
i.z + vec4(0.0, i1.z, i2.z, 1.0))
+ i.y + vec4(0.0, i1.y, i2.y, 1.0))
+ i.x + vec4(0.0, i1.x, i2.x, 1.0));
float n_ = 0.142857142857;
vec3 ns = n_ * D.wyz - D.xzx;
vec4 j = p - 49.0 * floor(p * ns.z * ns.z);
vec4 x_ = floor(j * ns.z);
vec4 y_ = floor(j - 7.0 * x_);
vec4 x = x_ * ns.x + ns.yyyy;
vec4 y = y_ * ns.x + ns.yyyy;
vec4 h = 1.0 - abs(x) - abs(y);
vec4 b0 = vec4(x.xy, y.xy);
vec4 b1 = vec4(x.zw, y.zw);
vec4 s0 = floor(b0) * 2.0 + 1.0;
vec4 s1 = floor(b1) * 2.0 + 1.0;
vec4 sh = -step(h, vec4(0.0));
vec4 a0 = b0.xzyw + s0.xzyw * sh.xxyy;
vec4 a1 = b1.xzyw + s1.xzyw * sh.zzww;
vec3 p0 = vec3(a0.xy, h.x);
vec3 p1 = vec3(a0.zw, h.y);
vec3 p2 = vec3(a1.xy, h.z);
vec3 p3 = vec3(a1.zw, h.w);
vec4 norm = taylorInvSqrt(vec4(dot(p0, p0), dot(p1, p1),
dot(p2, p2), dot(p3, p3)));
p0 *= norm.x;
p1 *= norm.y;
p2 *= norm.z;
p3 *= norm.w;
vec4 m = max(0.6 - vec4(dot(x0, x0), dot(x1, x1),
dot(x2, x2), dot(x3, x3)),
0.0);
m = m * m;
return 42.0 * dot(m * m, vec4(dot(p0, x0), dot(p1, x1),
dot(p2, x2), dot(p3, x3)));
}
void main() {
vec2 texCoords = texcoords2.st + vec2(
amplitude * snoise(vec3(frequency * texcoords2.s,
frequency * texcoords2.t,
speed * time)),
amplitude * snoise(vec3(frequency * texcoords2.s + 17.0,
frequency * texcoords2.t,
speed * time))
);
outColor = texture(blender, texCoords);
}`
const pixelate_shader = `#version 300 es
precision highp float;
uniform sampler2D blender; // <- shared layer should be named 'blender'
uniform float level; // 10, 600, 600, 10
in vec2 texcoords2;
out vec4 fragColor;
void main() {
fragColor = texture(blender, floor(texcoords2 * level) / level);
}`
let layer
let models = []
let effects = []
let font
function preload() {
font = loadFont('noto_sans.ttf')
}
function setup() {
createCanvas(600, 400, WEBGL)
layer = createFramebuffer()
const trange = 200
for (let i = 0; i < 50; i++) {
models.push(
{
position: createVector((random() * 2 - 1) * trange,
(random() * 2 - 1) * trange,
(random() * 2 - 1) * trange),
size: random() * 25 + 8,
color: color(int(random(256)), int(random(256)), int(random(256))),
type: i === 0 ? 'ball' : i < 25 ? 'torus' : 'box'
}
)
}
effects.push(makeShader(blur_shader, 'blur'))
effects.push(makeShader(noise_shader, 'noise'))
effects.push(makeShader(pixelate_shader, 'pixelate'))
const offset = 240
for (let i = 0; i < effects.length; i++) {
configUniformsUI(effects[i], { x: 10 + offset * i, y: 10, width: 80 })
showUniformsUI(effects[i])
}
textFont(font)
textSize(14)
}
function draw() {
// render scene into layer
layer.begin()
background(0)
axes()
noStroke()
orbitControl()
ambientLight(100)
const direction = parseDirection(Tree._k, { from: Tree.EYE, to: Tree.WORLD })
directionalLight(255, 255, 255, direction.x, direction.y, direction.z)
ambientMaterial(255, 0, 0)
fill(255, 255, 100)
specularMaterial(255)
shininess(150)
models.forEach(model => {
push()
noStroke()
fill(model.color)
translate(model.position)
model.type === 'box' ? box(model.size) :
model.type === 'torus' ?
torus(model.size) : sphere(model.size)
pop()
})
const focus = parsePosition(models[0].position,
{ from: Tree.WORLD, to: Tree.SCREEN }).z
layer.end()
// render target by applying effects to layer
let uniforms = { // uniforms (besides uniformsUI)
blur: { focus, depth: layer.depth },
noise: { time: millis() / 1000 }
}
const target = applyEffects(layer, effects, uniforms, false)
// display target using screen space coords
beginHUD()
image(target, 0, 0)
text(effects[0].key + ' -> ' +
effects[1].key + ' -> ' +
effects[2].key,
20, height - 15)
endHUD()
}
function keyPressed() {
if (key === '1') {
[effects[0], effects[1]] = [effects[1], effects[0]]
}
if (key === '2') {
[effects[1], effects[2]] = [effects[2], effects[1]]
}
if (key === '3') {
[effects[2], effects[0]] = [effects[0], effects[2]]
}
console.log(effects[0].key, ' -> ',
effects[1].key, ' -> ',
effects[2].key)
}
Shaders setup
For post-effects implementation it’s necessary to assign the blender
identifier to the uniform variable of each shader’s shared FBO layer. This simple setup allows the application of multiple shader effects on the same rendered scene with applyEffects(layer, effects, uniforms, flip).
// noise_shader
uniform sampler2D blender; // Shared layer named 'blender'
uniform float time;
// Additional shader code...
// blur_shader
uniform sampler2D blender; // Shared layer named 'blender'
uniform float focus;
uniform float blurIntensity;
// Additional shader code...
// pixelate_shader
uniform sampler2D blender; // Shared layer named 'blender'
uniform float level;
// Additional shader code...
Each shader utilizes the blender
uniform to access the shared FBO layer, enabling the seamless integration of the effects within the scene.
Effects setup
Shaders that are going to be applied to the scene are instantiated and added to the user-defined effects
array. The createFramebuffer()
function initializes the layer
that these effects will be applied to, while makeShader()
creates each shader and assigns it a key
which will then be used to emit custom uniform variables, not present in uniformsUI
, to a specific shader.
let layer
let effects = [] // user space array of shaders
function setup() {
createCanvas(600, 400, WEBGL)
layer = createFramebuffer()
// instantiate shaders with keys for later
// uniform settings and add them to effects
effects.push(makeShader(blur_shader, 'blur'))
effects.push(makeShader(noise_shader, 'noise'))
effects.push(makeShader(pixelate_shader, 'pixelate'))
}
Post-effects rendering
Rendering post-effects is a three-step process:
- Scene rendering: The scene is rendered into the
layer
withinlayer.begin()
andlayer.end()
. - Effects application: Effects stored in the
effects
array are applied sequentially using theapplyEffects
function, with auniforms
object specifying settings in a per-shader manner. - Image display: The processed image, now augmented with effects, is displayed in screen space within
beginHUD()
andendHUD()
, highlighting the visual enhancements on the scene.
function draw() {
layer.begin()
// render scene into layer
layer.end()
// render target by applying effects to layer
let uniforms = { // uniforms (besides uniformsUI)
blur: { focus, depth: layer.depth },
noise: { time: millis() / 1000 }
}
const target = applyEffects(layer, effects, uniforms)
// display target using screen space coords
beginHUD()
image(target, 0, 0)
// ...
endHUD()
}
Interactive effects re-ordering
By pressing the 1
, 2
, or 3
keys, users can dynamically change the sequence in which the post-effects are rendered, allowing for real-time experimentation with different visual outcomes. Note that the new ordering of effects
is efficiently set using destructuring assignment.
function keyPressed() {
if (key === '1') {
// swap the first and second effects
[effects[0], effects[1]] = [effects[1], effects[0]]
}
if (key === '2') {
// swap the second and third effects
[effects[1], effects[2]] = [effects[2], effects[1]]
}
if (key === '3') {
// swap the first and third effects
[effects[2], effects[0]] = [effects[0], effects[2]]
}
// log the current sequence of effects
console.log(effects[0].key, ' -> ',
effects[1].key, ' -> ',
effects[2].key)
}
Observe that array map can be used to customize the order of effects based on a predefined targetOrder
array, e.g.,
function reorder(originalArray, targetOrder) {
return targetOrder.map(index => originalArray[index])
}
function keyPressed () {
let targetOrder = [2, 0, 1]
effects = reorder(effects, targetOrder)
}
References
- For an in-depth analysis of shader post-effects, check Diego Bulla’s post-effects study.
- Additionally, explore Darragh Nolan p5.FIP, which compiles over 40 post-effects.