mirror of
https://github.com/Rezmason/matrix.git
synced 2026-04-14 12:29:30 -07:00
Replacing the final copyToTexture call with a tiny render pass, so that the postprocessing passes can be compute shaders working off of storage textures instead.
This commit is contained in:
3
TODO.txt
3
TODO.txt
@@ -2,10 +2,9 @@ TODO:
|
||||
|
||||
WebGPU
|
||||
Switch post processing to compute shaders
|
||||
Do one final render pass that draws the compute shader output to the canvas texture
|
||||
This is a temporary requirement until the canvas context can be configured to generate storage textures
|
||||
blur pass
|
||||
Update links in issues
|
||||
Get rid of end pass once it's possible to copy a bgra8unorm to a canvas texture
|
||||
|
||||
gpu-buffer, working title
|
||||
Try and use it for the palette color buffer
|
||||
|
||||
60
js/webgpu/endPass.js
Normal file
60
js/webgpu/endPass.js
Normal file
@@ -0,0 +1,60 @@
|
||||
import { loadShader, makeBindGroup, makePassFBO, makePass } from "./utils.js";
|
||||
|
||||
const numVerticesPerQuad = 2 * 3;
|
||||
|
||||
export default (context, getInputs) => {
|
||||
const { config, device, canvasFormat, canvasContext } = context;
|
||||
|
||||
const linearSampler = device.createSampler({
|
||||
magFilter: "linear",
|
||||
minFilter: "linear",
|
||||
});
|
||||
|
||||
const renderPassConfig = {
|
||||
colorAttachments: [
|
||||
{
|
||||
view: null,
|
||||
loadValue: { r: 0, g: 0, b: 0, a: 1 },
|
||||
storeOp: "store",
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let renderPipeline;
|
||||
|
||||
const assets = [loadShader(device, "shaders/wgsl/endPass.wgsl")];
|
||||
|
||||
const ready = (async () => {
|
||||
const [imageShader] = await Promise.all(assets);
|
||||
|
||||
renderPipeline = device.createRenderPipeline({
|
||||
vertex: {
|
||||
module: imageShader.module,
|
||||
entryPoint: "vertMain",
|
||||
},
|
||||
fragment: {
|
||||
module: imageShader.module,
|
||||
entryPoint: "fragMain",
|
||||
targets: [
|
||||
{
|
||||
format: canvasFormat,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
})();
|
||||
|
||||
const execute = (encoder) => {
|
||||
const inputs = getInputs();
|
||||
const tex = inputs.primary;
|
||||
const renderBindGroup = makeBindGroup(device, renderPipeline, 0, [linearSampler, tex.createView()]);
|
||||
renderPassConfig.colorAttachments[0].view = canvasContext.getCurrentTexture().createView();
|
||||
const renderPass = encoder.beginRenderPass(renderPassConfig);
|
||||
renderPass.setPipeline(renderPipeline);
|
||||
renderPass.setBindGroup(0, renderBindGroup);
|
||||
renderPass.draw(numVerticesPerQuad, 1, 0, 0);
|
||||
renderPass.endPass();
|
||||
};
|
||||
|
||||
return makePass(null, ready, null, execute);
|
||||
};
|
||||
@@ -7,6 +7,7 @@ import makePalettePass from "./palettePass.js";
|
||||
import makeStripePass from "./stripePass.js";
|
||||
import makeImagePass from "./imagePass.js";
|
||||
import makeResurrectionPass from "./resurrectionPass.js";
|
||||
import makeEndPass from "./endPass.js";
|
||||
|
||||
const effects = {
|
||||
none: null,
|
||||
@@ -51,7 +52,7 @@ export default async (canvas, config) => {
|
||||
};
|
||||
|
||||
const effectName = config.effect in effects ? config.effect : "plain";
|
||||
const pipeline = makePipeline(context, [makeRain, makeBloomPass, effects[effectName]]);
|
||||
const pipeline = makePipeline(context, [makeRain, makeBloomPass, effects[effectName], makeEndPass]);
|
||||
|
||||
await Promise.all(pipeline.map((step) => step.ready));
|
||||
|
||||
@@ -74,7 +75,8 @@ export default async (canvas, config) => {
|
||||
|
||||
const encoder = device.createCommandEncoder();
|
||||
pipeline.forEach((step) => step.execute(encoder));
|
||||
encoder.copyTextureToTexture({ texture: pipeline[pipeline.length - 1].getOutputs().primary }, { texture: canvasContext.getCurrentTexture() }, canvasSize);
|
||||
// Eventually, when WebGPU allows it, we'll remove the endPass and just copy from our pipeline's output to the canvas texture.
|
||||
// encoder.copyTextureToTexture({ texture: pipeline[pipeline.length - 1].getOutputs().primary }, { texture: canvasContext.getCurrentTexture() }, canvasSize);
|
||||
device.queue.submit([encoder.finish()]);
|
||||
requestAnimationFrame(renderLoop);
|
||||
};
|
||||
|
||||
@@ -27,7 +27,7 @@ const loadTexture = async (device, url) => {
|
||||
return texture;
|
||||
};
|
||||
|
||||
const makePassFBO = (device, width, height, format = "rgba8unorm") =>
|
||||
const makePassFBO = (device, width, height, format = "bgra8unorm") =>
|
||||
device.createTexture({
|
||||
size: [width, height, 1],
|
||||
format,
|
||||
|
||||
19
shaders/wgsl/endPass.wgsl
Normal file
19
shaders/wgsl/endPass.wgsl
Normal file
@@ -0,0 +1,19 @@
|
||||
[[group(0), binding(0)]] var linearSampler : sampler;
|
||||
[[group(0), binding(1)]] var tex : texture_2d<f32>;
|
||||
|
||||
struct VertOutput {
|
||||
[[builtin(position)]] Position : vec4<f32>;
|
||||
[[location(0)]] uv : vec2<f32>;
|
||||
};
|
||||
|
||||
[[stage(vertex)]] fn vertMain([[builtin(vertex_index)]] index : u32) -> VertOutput {
|
||||
var uv = vec2<f32>(f32(index % 2u), f32((index + 1u) % 6u / 3u));
|
||||
var position = vec4<f32>(uv * 2.0 - 1.0, 1.0, 1.0);
|
||||
return VertOutput(position, uv);
|
||||
}
|
||||
|
||||
[[stage(fragment)]] fn fragMain(input : VertOutput) -> [[location(0)]] vec4<f32> {
|
||||
var uv = input.uv;
|
||||
uv.y = 1.0 - uv.y;
|
||||
return textureSample( tex, linearSampler, uv );
|
||||
}
|
||||
Reference in New Issue
Block a user