Ported all the post processing regl passes to WebGPU. Fixed the CORS stuff in loadTexture. Renamed createRenderTargetTexture to makePassFBO, to conform with the regl project. Added make1DTexture, which stripePass uses.

This commit is contained in:
Rezmason
2021-11-06 05:03:16 -07:00
parent 230847e5b8
commit 89b9f4e363
13 changed files with 785 additions and 30 deletions

View File

@@ -1,13 +1,12 @@
TODO:
WebGPU
Blur: compute or render?
What is workgroupBarrier in compute shaders?
The other passes should be a breeze
blur pass
Make sure you got CORS right in the texture fetch
Update links in issues
Try to change post processing to compute shaders once they're easier to support
std140
Document and share it

View File

@@ -2,7 +2,7 @@ import { loadImage, loadText, makePassFBO, makePass } from "./utils.js";
// Multiplies the rendered rain and bloom by a loaded in image
const defaultBGURL = "https://upload.wikimedia.org/wikipedia/commons/0/0a/Flammarion_Colored.jpg";
const defaultBGURL = "https://upload.wikimedia.org/wikipedia/commons/thumb/0/0a/Flammarion_Colored.jpg/917px-Flammarion_Colored.jpg";
export default (regl, config, inputs) => {
const output = makePassFBO(regl, config.useHalfFloat);

94
js/webgpu/imagePass.js Normal file
View File

@@ -0,0 +1,94 @@
import std140 from "./std140.js";
import { loadTexture, loadShaderModule, makeUniformBuffer, makePassFBO, makePass } from "./utils.js";
// Multiplies the rendered rain and bloom by a loaded in image
const defaultBGURL = "https://upload.wikimedia.org/wikipedia/commons/thumb/0/0a/Flammarion_Colored.jpg/917px-Flammarion_Colored.jpg";
const numVerticesPerQuad = 2 * 3;
export default (context, getInputs) => {
const { config, adapter, device, canvasContext } = context;
const ditherMagnitude = 0.05;
const configLayout = std140(["f32", "vec3<f32>"]);
const configBuffer = makeUniformBuffer(device, configLayout, [ditherMagnitude, config.backgroundColor]);
const linearSampler = device.createSampler({
magFilter: "linear",
minFilter: "linear",
});
const renderPassConfig = {
colorAttachments: [
{
view: null,
loadValue: { r: 0, g: 0, b: 0, a: 1 },
storeOp: "store",
},
],
};
const presentationFormat = canvasContext.getPreferredFormat(adapter);
let renderPipeline;
let output;
let backgroundTex;
const bgURL = "bgURL" in config ? config.bgURL : defaultBGURL;
const assets = [loadTexture(device, bgURL), loadShaderModule(device, "shaders/wgsl/imagePass.wgsl")];
const ready = (async () => {
const [bgTex, rainShader] = await Promise.all(assets);
backgroundTex = bgTex;
renderPipeline = device.createRenderPipeline({
vertex: {
module: rainShader,
entryPoint: "vertMain",
},
fragment: {
module: rainShader,
entryPoint: "fragMain",
targets: [
{
format: presentationFormat,
},
],
},
});
})();
const setSize = (width, height) => {
output?.destroy();
output = makePassFBO(device, width, height, presentationFormat);
};
const getOutputs = () => ({
primary: output,
});
const execute = (encoder) => {
const inputs = getInputs();
const tex = inputs.primary;
const bloomTex = inputs.primary; // TODO: bloom
const renderBindGroup = device.createBindGroup({
layout: renderPipeline.getBindGroupLayout(0),
entries: [linearSampler, tex.createView(), bloomTex.createView(), backgroundTex.createView()]
.map((resource) => (resource instanceof GPUBuffer ? { buffer: resource } : resource))
.map((resource, binding) => ({
binding,
resource,
})),
});
renderPassConfig.colorAttachments[0].view = output.createView();
const renderPass = encoder.beginRenderPass(renderPassConfig);
renderPass.setPipeline(renderPipeline);
renderPass.setBindGroup(0, renderBindGroup);
renderPass.draw(numVerticesPerQuad, 1, 0, 0);
renderPass.endPass();
};
return makePass(ready, setSize, getOutputs, execute);
};

View File

@@ -1,6 +1,25 @@
import std140 from "./std140.js";
import { getCanvasSize, makeUniformBuffer, makePipeline } from "./utils.js";
import makeRain from "./rainPass.js";
// import makeBloomPass from "./bloomPass.js";
import makePalettePass from "./palettePass.js";
import makeStripePass from "./stripePass.js";
import makeImagePass from "./imagePass.js";
import makeResurrectionPass from "./resurrectionPass.js";
const effects = {
none: null,
plain: makePalettePass,
customStripes: makeStripePass,
stripes: makeStripePass,
pride: makeStripePass,
transPride: makeStripePass,
trans: makeStripePass,
image: makeImagePass,
resurrection: makeResurrectionPass,
resurrections: makeResurrectionPass,
};
export default async (canvas, config) => {
const adapter = await navigator.gpu.requestAdapter();
@@ -30,13 +49,18 @@ export default async (canvas, config) => {
timeBuffer,
};
const pipeline = makePipeline(context, [makeRain /*makeBloomPass, effects[effectName]*/]);
const effectName = config.effect in effects ? config.effect : "plain";
const pipeline = makePipeline(context, [makeRain, /*makeBloomPass,*/ effects[effectName]]);
await Promise.all(pipeline.map((step) => step.ready));
let frame = 0;
let start = NaN;
const renderLoop = (now) => {
if (isNaN(start)) {
start = now;
}
const canvasSize = getCanvasSize(canvas);
if (canvasSize[0] !== canvasConfig.size[0] || canvasSize[1] !== canvasConfig.size[1]) {
canvasConfig.size = canvasSize;
@@ -44,7 +68,7 @@ export default async (canvas, config) => {
pipeline.forEach((step) => step.setSize(...canvasSize));
}
device.queue.writeBuffer(timeBuffer, 0, timeLayout.build([now / 1000, frame]));
device.queue.writeBuffer(timeBuffer, 0, timeLayout.build([(now - start) / 1000, frame]));
frame++;
const encoder = device.createCommandEncoder();

163
js/webgpu/palettePass.js Normal file
View File

@@ -0,0 +1,163 @@
import std140 from "./std140.js";
import { loadShaderModule, makeUniformBuffer, makePassFBO, makePass } from "./utils.js";
// Maps the brightness of the rendered rain and bloom to colors
// in a linear gradient buffer generated from the passed-in color sequence
// This shader introduces noise into the renders, to avoid banding
const colorToRGB = ([hue, saturation, lightness]) => {
const a = saturation * Math.min(lightness, 1 - lightness);
const f = (n) => {
const k = (n + hue * 12) % 12;
return lightness - a * Math.max(-1, Math.min(k - 3, 9 - k, 1));
};
return [f(0), f(8), f(4)];
};
const numVerticesPerQuad = 2 * 3;
const makePalette = (device, entries) => {
const PALETTE_SIZE = 512;
const paletteColors = Array(PALETTE_SIZE);
// Convert HSL gradient into sorted RGB gradient, capping the ends
const sortedEntries = entries
.slice()
.sort((e1, e2) => e1.at - e2.at)
.map((entry) => ({
rgb: colorToRGB(entry.hsl),
arrayIndex: Math.floor(Math.max(Math.min(1, entry.at), 0) * (PALETTE_SIZE - 1)),
}));
sortedEntries.unshift({ rgb: sortedEntries[0].rgb, arrayIndex: 0 });
sortedEntries.push({
rgb: sortedEntries[sortedEntries.length - 1].rgb,
arrayIndex: PALETTE_SIZE - 1,
});
// Interpolate between the sorted RGB entries to generate
// the palette texture data
sortedEntries.forEach((entry, index) => {
paletteColors[entry.arrayIndex] = entry.rgb.slice();
if (index + 1 < sortedEntries.length) {
const nextEntry = sortedEntries[index + 1];
const diff = nextEntry.arrayIndex - entry.arrayIndex;
for (let i = 0; i < diff; i++) {
const ratio = i / diff;
paletteColors[entry.arrayIndex + i] = [
entry.rgb[0] * (1 - ratio) + nextEntry.rgb[0] * ratio,
entry.rgb[1] * (1 - ratio) + nextEntry.rgb[1] * ratio,
entry.rgb[2] * (1 - ratio) + nextEntry.rgb[2] * ratio,
];
}
}
});
// TODO: support arrays in std140
const paletteBuffer = device.createBuffer({
size: (3 + 1) * PALETTE_SIZE * Float32Array.BYTES_PER_ELEMENT,
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
mappedAtCreation: true,
});
const view = new Float32Array(paletteBuffer.getMappedRange());
for (let i = 0; i < paletteColors.length; i++) {
view.set(paletteColors[i], (3 + 1) * i);
}
paletteBuffer.unmap();
return paletteBuffer;
};
// The rendered texture's values are mapped to colors in a palette texture.
// A little noise is introduced, to hide the banding that appears
// in subtle gradients. The noise is also time-driven, so its grain
// won't persist across subsequent frames. This is a safe trick
// in screen space.
export default (context, getInputs) => {
const { config, adapter, device, canvasContext, timeBuffer } = context;
const ditherMagnitude = 0.05;
const configLayout = std140(["f32", "vec3<f32>"]);
const configBuffer = makeUniformBuffer(device, configLayout, [ditherMagnitude, config.backgroundColor]);
const paletteBuffer = makePalette(device, config.paletteEntries);
const linearSampler = device.createSampler({
magFilter: "linear",
minFilter: "linear",
});
const renderPassConfig = {
colorAttachments: [
{
view: null,
loadValue: { r: 0, g: 0, b: 0, a: 1 },
storeOp: "store",
},
],
};
const presentationFormat = canvasContext.getPreferredFormat(adapter);
let renderPipeline;
let output;
const assets = [loadShaderModule(device, "shaders/wgsl/palettePass.wgsl")];
const ready = (async () => {
const [rainShader] = await Promise.all(assets);
renderPipeline = device.createRenderPipeline({
vertex: {
module: rainShader,
entryPoint: "vertMain",
},
fragment: {
module: rainShader,
entryPoint: "fragMain",
targets: [
{
format: presentationFormat,
},
],
},
});
})();
const setSize = (width, height) => {
output?.destroy();
output = makePassFBO(device, width, height, presentationFormat);
};
const getOutputs = () => ({
primary: output,
});
const execute = (encoder) => {
const inputs = getInputs();
const tex = inputs.primary;
const bloomTex = inputs.primary; // TODO: bloom
const renderBindGroup = device.createBindGroup({
layout: renderPipeline.getBindGroupLayout(0),
entries: [configBuffer, paletteBuffer, timeBuffer, linearSampler, tex.createView(), bloomTex.createView()]
.map((resource) => (resource instanceof GPUBuffer ? { buffer: resource } : resource))
.map((resource, binding) => ({
binding,
resource,
})),
});
renderPassConfig.colorAttachments[0].view = output.createView();
const renderPass = encoder.beginRenderPass(renderPassConfig);
renderPass.setPipeline(renderPipeline);
renderPass.setBindGroup(0, renderBindGroup);
renderPass.draw(numVerticesPerQuad, 1, 0, 0);
renderPass.endPass();
};
return makePass(ready, setSize, getOutputs, execute);
};

View File

@@ -1,5 +1,5 @@
import std140 from "./std140.js";
import { createRenderTargetTexture, loadTexture, loadShaderModule, makeUniformBuffer, makePass } from "./utils.js";
import { makePassFBO, loadTexture, loadShaderModule, makeUniformBuffer, makePass } from "./utils.js";
const { mat4, vec3 } = glMatrix;
@@ -114,18 +114,18 @@ export default (context, getInputs) => {
const presentationFormat = canvasContext.getPreferredFormat(adapter);
let rainComputePipeline;
let rainRenderPipeline;
let computePipeline;
let renderPipeline;
let computeBindGroup;
let renderBindGroup;
let renderTargetTexture;
let output;
const ready = (async () => {
const [msdfTexture, rainShaderModule] = await Promise.all(assets);
const [msdfTexture, rainShader] = await Promise.all(assets);
rainComputePipeline = device.createComputePipeline({
computePipeline = device.createComputePipeline({
compute: {
module: rainShaderModule,
module: rainShader,
entryPoint: "computeMain",
},
});
@@ -136,13 +136,13 @@ export default (context, getInputs) => {
dstFactor: "one",
};
rainRenderPipeline = device.createRenderPipeline({
renderPipeline = device.createRenderPipeline({
vertex: {
module: rainShaderModule,
module: rainShader,
entryPoint: "vertMain",
},
fragment: {
module: rainShaderModule,
module: rainShader,
entryPoint: "fragMain",
targets: [
{
@@ -157,7 +157,7 @@ export default (context, getInputs) => {
});
computeBindGroup = device.createBindGroup({
layout: rainComputePipeline.getBindGroupLayout(0),
layout: computePipeline.getBindGroupLayout(0),
entries: [configBuffer, timeBuffer, cellsBuffer]
.map((resource) => (resource instanceof GPUBuffer ? { buffer: resource } : resource))
.map((resource, binding) => ({
@@ -167,7 +167,7 @@ export default (context, getInputs) => {
});
renderBindGroup = device.createBindGroup({
layout: rainRenderPipeline.getBindGroupLayout(0),
layout: renderPipeline.getBindGroupLayout(0),
entries: [configBuffer, timeBuffer, sceneBuffer, linearSampler, msdfTexture.createView(), cellsBuffer]
.map((resource) => (resource instanceof GPUBuffer ? { buffer: resource } : resource))
.map((resource, binding) => ({
@@ -185,27 +185,28 @@ export default (context, getInputs) => {
device.queue.writeBuffer(sceneBuffer, 0, sceneLayout.build([screenSize, camera, transform]));
// Update
renderTargetTexture = createRenderTargetTexture(device, width, height, presentationFormat);
output?.destroy();
output = makePassFBO(device, width, height, presentationFormat);
};
const getOutputs = () => ({
primary: output,
});
const execute = (encoder) => {
const computePass = encoder.beginComputePass();
computePass.setPipeline(rainComputePipeline);
computePass.setPipeline(computePipeline);
computePass.setBindGroup(0, computeBindGroup);
computePass.dispatch(Math.ceil(gridSize[0] / 32), gridSize[1], 1);
computePass.endPass();
renderPassConfig.colorAttachments[0].view = renderTargetTexture.createView();
renderPassConfig.colorAttachments[0].view = output.createView();
const renderPass = encoder.beginRenderPass(renderPassConfig);
renderPass.setPipeline(rainRenderPipeline);
renderPass.setPipeline(renderPipeline);
renderPass.setBindGroup(0, renderBindGroup);
renderPass.draw(numVerticesPerQuad * numQuads, 1, 0, 0);
renderPass.endPass();
};
const getOutputs = () => ({
primary: renderTargetTexture,
});
return makePass(ready, setSize, getOutputs, execute);
};

View File

@@ -0,0 +1,95 @@
import std140 from "./std140.js";
import { loadShaderModule, makeUniformBuffer, makePassFBO, makePass } from "./utils.js";
// Matrix Resurrections isn't in theaters yet,
// and this version of the effect is still a WIP.
// Criteria:
// Upward-flowing glyphs should be golden
// Downward-flowing glyphs should be tinted slightly blue on top and golden on the bottom
// Cheat a lens blur, interpolating between the texture and bloom at the edges
const numVerticesPerQuad = 2 * 3;
export default (context, getInputs) => {
const { config, adapter, device, canvasContext, timeBuffer } = context;
const ditherMagnitude = 0.05;
const configLayout = std140(["f32", "vec3<f32>"]);
const configBuffer = makeUniformBuffer(device, configLayout, [ditherMagnitude, config.backgroundColor]);
const linearSampler = device.createSampler({
magFilter: "linear",
minFilter: "linear",
});
const renderPassConfig = {
colorAttachments: [
{
view: null,
loadValue: { r: 0, g: 0, b: 0, a: 1 },
storeOp: "store",
},
],
};
const presentationFormat = canvasContext.getPreferredFormat(adapter);
let renderPipeline;
let output;
const assets = [loadShaderModule(device, "shaders/wgsl/resurrectionPass.wgsl")];
const ready = (async () => {
const [rainShader] = await Promise.all(assets);
renderPipeline = device.createRenderPipeline({
vertex: {
module: rainShader,
entryPoint: "vertMain",
},
fragment: {
module: rainShader,
entryPoint: "fragMain",
targets: [
{
format: presentationFormat,
},
],
},
});
})();
const setSize = (width, height) => {
output?.destroy();
output = makePassFBO(device, width, height, presentationFormat);
};
const getOutputs = () => ({
primary: output,
});
const execute = (encoder) => {
const inputs = getInputs();
const tex = inputs.primary;
const bloomTex = inputs.primary; // TODO: bloom
const renderBindGroup = device.createBindGroup({
layout: renderPipeline.getBindGroupLayout(0),
entries: [configBuffer, timeBuffer, linearSampler, tex.createView(), bloomTex.createView()]
.map((resource) => (resource instanceof GPUBuffer ? { buffer: resource } : resource))
.map((resource, binding) => ({
binding,
resource,
})),
});
renderPassConfig.colorAttachments[0].view = output.createView();
const renderPass = encoder.beginRenderPass(renderPassConfig);
renderPass.setPipeline(renderPipeline);
renderPass.setBindGroup(0, renderBindGroup);
renderPass.draw(numVerticesPerQuad, 1, 0, 0);
renderPass.endPass();
};
return makePass(ready, setSize, getOutputs, execute);
};

130
js/webgpu/stripePass.js Normal file
View File

@@ -0,0 +1,130 @@
import std140 from "./std140.js";
import { loadShaderModule, make1DTexture, makeUniformBuffer, makePassFBO, makePass } from "./utils.js";
// Multiplies the rendered rain and bloom by a 1D gradient texture
// generated from the passed-in color sequence
// This shader introduces noise into the renders, to avoid banding
const transPrideStripeColors = [
[0.3, 1.0, 1.0],
[0.3, 1.0, 1.0],
[1.0, 0.5, 0.8],
[1.0, 0.5, 0.8],
[1.0, 1.0, 1.0],
[1.0, 1.0, 1.0],
[1.0, 1.0, 1.0],
[1.0, 0.5, 0.8],
[1.0, 0.5, 0.8],
[0.3, 1.0, 1.0],
[0.3, 1.0, 1.0],
];
const prideStripeColors = [
[1, 0, 0],
[1, 0.5, 0],
[1, 1, 0],
[0, 1, 0],
[0, 0, 1],
[0.8, 0, 1],
];
const numVerticesPerQuad = 2 * 3;
// The rendered texture's values are mapped to colors in a palette texture.
// A little noise is introduced, to hide the banding that appears
// in subtle gradients. The noise is also time-driven, so its grain
// won't persist across subsequent frames. This is a safe trick
// in screen space.
export default (context, getInputs) => {
const { config, adapter, device, canvasContext, timeBuffer } = context;
const ditherMagnitude = 0.05;
const configLayout = std140(["f32", "vec3<f32>"]);
const configBuffer = makeUniformBuffer(device, configLayout, [ditherMagnitude, config.backgroundColor]);
// Expand and convert stripe colors into 1D texture data
const stripeColors =
"stripeColors" in config ? config.stripeColors.split(",").map(parseFloat) : config.effect === "pride" ? prideStripeColors : transPrideStripeColors;
const stripeTexture = make1DTexture(
device,
stripeColors.map((color) => [...color, 1])
);
const linearSampler = device.createSampler({
magFilter: "linear",
minFilter: "linear",
});
const renderPassConfig = {
colorAttachments: [
{
view: null,
loadValue: { r: 0, g: 0, b: 0, a: 1 },
storeOp: "store",
},
],
};
const presentationFormat = canvasContext.getPreferredFormat(adapter);
let renderPipeline;
let output;
const assets = [loadShaderModule(device, "shaders/wgsl/stripePass.wgsl")];
const ready = (async () => {
const [rainShader] = await Promise.all(assets);
renderPipeline = device.createRenderPipeline({
vertex: {
module: rainShader,
entryPoint: "vertMain",
},
fragment: {
module: rainShader,
entryPoint: "fragMain",
targets: [
{
format: presentationFormat,
},
],
},
});
})();
const setSize = (width, height) => {
output?.destroy();
output = makePassFBO(device, width, height, presentationFormat);
};
const getOutputs = () => ({
primary: output,
});
const execute = (encoder) => {
const inputs = getInputs();
const tex = inputs.primary;
const bloomTex = inputs.primary; // TODO: bloom
const renderBindGroup = device.createBindGroup({
layout: renderPipeline.getBindGroupLayout(0),
entries: [configBuffer, timeBuffer, linearSampler, tex.createView(), bloomTex.createView(), stripeTexture.createView()]
.map((resource) => (resource instanceof GPUBuffer ? { buffer: resource } : resource))
.map((resource, binding) => ({
binding,
resource,
})),
});
renderPassConfig.colorAttachments[0].view = output.createView();
const renderPass = encoder.beginRenderPass(renderPassConfig);
renderPass.setPipeline(renderPipeline);
renderPass.setBindGroup(0, renderBindGroup);
renderPass.draw(numVerticesPerQuad, 1, 0, 0);
renderPass.endPass();
};
return makePass(ready, setSize, getOutputs, execute);
};

View File

@@ -4,7 +4,7 @@ const getCanvasSize = (canvas) => {
};
const loadTexture = async (device, url) => {
const response = await fetch(url, { credentials: "include" });
const response = await fetch(url);
const data = await response.blob();
const imageBitmap = await createImageBitmap(data);
@@ -27,7 +27,7 @@ const loadTexture = async (device, url) => {
return texture;
};
const createRenderTargetTexture = (device, width, height, format = "rgba8unorm") =>
const makePassFBO = (device, width, height, format = "rgba8unorm") =>
device.createTexture({
size: [width, height, 1],
format,
@@ -54,6 +54,19 @@ const makeUniformBuffer = (device, structLayout, values = null) => {
return buffer;
};
const make1DTexture = (device, rgbas) => {
const size = [rgbas.length];
const texture = device.createTexture({
size,
// dimension: "1d",
format: "rgba8unorm",
usage: GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_DST,
});
const data = new Uint8ClampedArray(rgbas.map((color) => color.map((f) => f * 0xff)).flat());
device.queue.writeTexture({ texture }, data, {}, size);
return texture;
};
const makePass = (ready, setSize, getOutputs, execute) => ({
ready: ready ?? Promise.resolve(),
setSize: setSize ?? (() => {}),
@@ -64,4 +77,4 @@ const makePass = (ready, setSize, getOutputs, execute) => ({
const makePipeline = (context, steps) =>
steps.filter((f) => f != null).reduce((pipeline, f, i) => [...pipeline, f(context, i == 0 ? null : pipeline[i - 1].getOutputs)], []);
export { getCanvasSize, createRenderTargetTexture, loadTexture, loadShaderModule, makeUniformBuffer, makePass, makePipeline };
export { getCanvasSize, makePassFBO, make1DTexture, loadTexture, loadShaderModule, makeUniformBuffer, makePass, makePipeline };

View File

@@ -0,0 +1,30 @@
[[group(0), binding(0)]] var linearSampler : sampler;
[[group(0), binding(1)]] var tex : texture_2d<f32>;
[[group(0), binding(2)]] var bloomTex : texture_2d<f32>;
[[group(0), binding(3)]] var backgroundTex : texture_2d<f32>;
struct VertOutput {
[[builtin(position)]] Position : vec4<f32>;
[[location(0)]] uv : vec2<f32>;
};
[[stage(vertex)]] fn vertMain([[builtin(vertex_index)]] index : u32) -> VertOutput {
var uv = vec2<f32>(f32(index % 2u), f32((index + 1u) % 6u / 3u));
var position = vec4<f32>(uv * 2.0 - 1.0, 1.0, 1.0);
return VertOutput(position, uv);
}
[[stage(fragment)]] fn fragMain(input : VertOutput) -> [[location(0)]] vec4<f32> {
var uv = input.uv;
uv.y = 1.0 - uv.y;
var bgColor = textureSample( backgroundTex, linearSampler, uv ).rgb;
// Combine the texture and bloom, then blow it out to reveal more of the image
var brightness = min(1.0, textureSample( tex, linearSampler, uv ).r * 2.0);
brightness = brightness + textureSample( bloomTex, linearSampler, uv ).r;
brightness = pow(brightness, 1.5);
return vec4<f32>(bgColor * brightness, 1.0);
}

View File

@@ -0,0 +1,61 @@
[[block]] struct Config {
ditherMagnitude : f32;
backgroundColor : vec3<f32>;
};
[[block]] struct Palette {
colors : array<vec3<f32>, 512>;
};
[[block]] struct Time {
seconds : f32;
frames : i32;
};
[[group(0), binding(0)]] var<uniform> config : Config;
[[group(0), binding(1)]] var<uniform> palette : Palette;
[[group(0), binding(2)]] var<uniform> time : Time;
[[group(0), binding(3)]] var linearSampler : sampler;
[[group(0), binding(4)]] var tex : texture_2d<f32>;
[[group(0), binding(5)]] var bloomTex : texture_2d<f32>;
struct VertOutput {
[[builtin(position)]] Position : vec4<f32>;
[[location(0)]] uv : vec2<f32>;
};
let PI : f32 = 3.14159265359;
fn randomFloat( uv : vec2<f32> ) -> f32 {
let a = 12.9898;
let b = 78.233;
let c = 43758.5453;
let dt = dot( uv, vec2<f32>( a, b ) );
let sn = dt % PI;
return fract(sin(sn) * c);
}
[[stage(vertex)]] fn vertMain([[builtin(vertex_index)]] index : u32) -> VertOutput {
var uv = vec2<f32>(f32(index % 2u), f32((index + 1u) % 6u / 3u));
var position = vec4<f32>(uv * 2.0 - 1.0, 1.0, 1.0);
return VertOutput(position, uv);
}
[[stage(fragment)]] fn fragMain(input : VertOutput) -> [[location(0)]] vec4<f32> {
var uv = input.uv;
uv.y = 1.0 - uv.y;
var brightnessRGB = textureSample( tex, linearSampler, uv ) + textureSample( bloomTex, linearSampler, uv );
// Combine the texture and bloom
var brightness = brightnessRGB.r + brightnessRGB.g + brightnessRGB.b;
// Dither: subtract a random value from the brightness
brightness = brightness - randomFloat( uv + vec2<f32>(time.seconds) ) * config.ditherMagnitude;
var paletteIndex = clamp(i32(brightness * 512.0), 0, 511);
// Map the brightness to a position in the palette texture
return vec4<f32>(palette.colors[paletteIndex] + config.backgroundColor, 1.0);
}

View File

@@ -0,0 +1,90 @@
[[block]] struct Config {
ditherMagnitude : f32;
backgroundColor : vec3<f32>;
};
[[block]] struct Time {
seconds : f32;
frames : i32;
};
[[group(0), binding(0)]] var<uniform> config : Config;
[[group(0), binding(1)]] var<uniform> time : Time;
[[group(0), binding(2)]] var linearSampler : sampler;
[[group(0), binding(3)]] var tex : texture_2d<f32>;
[[group(0), binding(4)]] var bloomTex : texture_2d<f32>;
struct VertOutput {
[[builtin(position)]] Position : vec4<f32>;
[[location(0)]] uv : vec2<f32>;
};
let PI : f32 = 3.14159265359;
fn randomFloat( uv : vec2<f32> ) -> f32 {
let a = 12.9898;
let b = 78.233;
let c = 43758.5453;
let dt = dot( uv, vec2<f32>( a, b ) );
let sn = dt % PI;
return fract(sin(sn) * c);
}
fn rgbComponent(p : f32, q : f32, t : f32) -> f32 {
var t2 = t;
if (t2 < 0.0) { t2 = t2 + 1.0; }
if (t2 > 1.0) { t2 = t2 - 1.0; }
if (t2 < 1.0 / 6.0) { return p + (q - p) * 6.0 * t2; }
if (t2 < 1.0 / 2.0) { return q; }
if (t2 < 2.0 / 3.0) { return p + (q - p) * (2.0 / 3.0 - t2) * 6.0; }
return p;
}
fn hslToRgb(h : f32, s : f32, l : f32) -> vec3<f32> {
var q : f32;
if (l < 0.5) {
q = l * (1. + s);
} else {
q = l + s - l * s;
}
var p = 2.0 * l - q;
return vec3<f32>(
rgbComponent(p, q, h + 1.0 / 3.0),
rgbComponent(p, q, h),
rgbComponent(p, q, h - 1.0 / 3.0)
);
}
[[stage(vertex)]] fn vertMain([[builtin(vertex_index)]] index : u32) -> VertOutput {
var uv = vec2<f32>(f32(index % 2u), f32((index + 1u) % 6u / 3u));
var position = vec4<f32>(uv * 2.0 - 1.0, 1.0, 1.0);
return VertOutput(position, uv);
}
[[stage(fragment)]] fn fragMain(input : VertOutput) -> [[location(0)]] vec4<f32> {
var uv = input.uv;
uv.y = 1.0 - uv.y;
// Mix the texture and bloom based on distance from center,
// to approximate a lens blur
var brightness = mix(
textureSample( tex, linearSampler, uv ).rgb,
textureSample( bloomTex, linearSampler, uv ).rgb,
(0.7 - length(input.uv - 0.5))
) * 1.25;
// Dither: subtract a random value from the brightness
brightness = brightness - randomFloat( uv + vec2<f32>(time.seconds) ) * config.ditherMagnitude;
// Calculate a hue based on distance from center
var hue = 0.35 + (length(input.uv - vec2<f32>(0.5, 1.0)) * -0.4 + 0.2);
// Convert HSL to RGB
var rgb = hslToRgb(hue, 0.8, max(0., brightness.r)) * vec3<f32>(0.8, 1.0, 0.7);
// Calculate a separate RGB for upward-flowing glyphs
var resurrectionRGB = hslToRgb(0.13, 1.0, max(0., brightness.g) * 0.9);
return vec4<f32>(rgb + resurrectionRGB + config.backgroundColor, 1.0);
}

View File

@@ -0,0 +1,55 @@
[[block]] struct Config {
ditherMagnitude : f32;
backgroundColor : vec3<f32>;
};
[[block]] struct Time {
seconds : f32;
frames : i32;
};
[[group(0), binding(0)]] var<uniform> config : Config;
[[group(0), binding(1)]] var<uniform> time : Time;
[[group(0), binding(2)]] var linearSampler : sampler;
[[group(0), binding(3)]] var tex : texture_2d<f32>;
[[group(0), binding(4)]] var bloomTex : texture_2d<f32>;
[[group(0), binding(5)]] var stripeTexture : texture_2d<f32>;
struct VertOutput {
[[builtin(position)]] Position : vec4<f32>;
[[location(0)]] uv : vec2<f32>;
};
let PI : f32 = 3.14159265359;
fn randomFloat( uv : vec2<f32> ) -> f32 {
let a = 12.9898;
let b = 78.233;
let c = 43758.5453;
let dt = dot( uv, vec2<f32>( a, b ) );
let sn = dt % PI;
return fract(sin(sn) * c);
}
[[stage(vertex)]] fn vertMain([[builtin(vertex_index)]] index : u32) -> VertOutput {
var uv = vec2<f32>(f32(index % 2u), f32((index + 1u) % 6u / 3u));
var position = vec4<f32>(uv * 2.0 - 1.0, 1.0, 1.0);
return VertOutput(position, uv);
}
[[stage(fragment)]] fn fragMain(input : VertOutput) -> [[location(0)]] vec4<f32> {
var uv = input.uv;
uv.y = 1.0 - uv.y;
var color = textureSample( stripeTexture, linearSampler, uv ).rgb;
// Combine the texture and bloom
var brightness = min(1.0, textureSample( tex, linearSampler, uv ).r * 2.0);
brightness = brightness + textureSample( bloomTex, linearSampler, uv ).r;
// Dither: subtract a random value from the brightness
brightness = brightness - randomFloat( uv + vec2<f32>(time.seconds) ) * config.ditherMagnitude;
return vec4<f32>(color * brightness + config.backgroundColor, 1.0);
}