Replacing the final copyToTexture call with a tiny render pass, so that the postprocessing passes can be compute shaders working off of storage textures instead.

This commit is contained in:
Rezmason
2021-11-11 08:24:30 -08:00
parent 9813e76ffb
commit 9c861fd50b
5 changed files with 85 additions and 5 deletions

View File

@@ -2,10 +2,9 @@ TODO:
WebGPU WebGPU
Switch post processing to compute shaders Switch post processing to compute shaders
Do one final render pass that draws the compute shader output to the canvas texture
This is a temporary requirement until the canvas context can be configured to generate storage textures
blur pass blur pass
Update links in issues Update links in issues
Get rid of end pass once it's possible to copy a bgra8unorm to a canvas texture
gpu-buffer, working title gpu-buffer, working title
Try and use it for the palette color buffer Try and use it for the palette color buffer

60
js/webgpu/endPass.js Normal file
View File

@@ -0,0 +1,60 @@
import { loadShader, makeBindGroup, makePassFBO, makePass } from "./utils.js";
const numVerticesPerQuad = 2 * 3;
export default (context, getInputs) => {
const { config, device, canvasFormat, canvasContext } = context;
const linearSampler = device.createSampler({
magFilter: "linear",
minFilter: "linear",
});
const renderPassConfig = {
colorAttachments: [
{
view: null,
loadValue: { r: 0, g: 0, b: 0, a: 1 },
storeOp: "store",
},
],
};
let renderPipeline;
const assets = [loadShader(device, "shaders/wgsl/endPass.wgsl")];
const ready = (async () => {
const [imageShader] = await Promise.all(assets);
renderPipeline = device.createRenderPipeline({
vertex: {
module: imageShader.module,
entryPoint: "vertMain",
},
fragment: {
module: imageShader.module,
entryPoint: "fragMain",
targets: [
{
format: canvasFormat,
},
],
},
});
})();
const execute = (encoder) => {
const inputs = getInputs();
const tex = inputs.primary;
const renderBindGroup = makeBindGroup(device, renderPipeline, 0, [linearSampler, tex.createView()]);
renderPassConfig.colorAttachments[0].view = canvasContext.getCurrentTexture().createView();
const renderPass = encoder.beginRenderPass(renderPassConfig);
renderPass.setPipeline(renderPipeline);
renderPass.setBindGroup(0, renderBindGroup);
renderPass.draw(numVerticesPerQuad, 1, 0, 0);
renderPass.endPass();
};
return makePass(null, ready, null, execute);
};

View File

@@ -7,6 +7,7 @@ import makePalettePass from "./palettePass.js";
import makeStripePass from "./stripePass.js"; import makeStripePass from "./stripePass.js";
import makeImagePass from "./imagePass.js"; import makeImagePass from "./imagePass.js";
import makeResurrectionPass from "./resurrectionPass.js"; import makeResurrectionPass from "./resurrectionPass.js";
import makeEndPass from "./endPass.js";
const effects = { const effects = {
none: null, none: null,
@@ -51,7 +52,7 @@ export default async (canvas, config) => {
}; };
const effectName = config.effect in effects ? config.effect : "plain"; const effectName = config.effect in effects ? config.effect : "plain";
const pipeline = makePipeline(context, [makeRain, makeBloomPass, effects[effectName]]); const pipeline = makePipeline(context, [makeRain, makeBloomPass, effects[effectName], makeEndPass]);
await Promise.all(pipeline.map((step) => step.ready)); await Promise.all(pipeline.map((step) => step.ready));
@@ -74,7 +75,8 @@ export default async (canvas, config) => {
const encoder = device.createCommandEncoder(); const encoder = device.createCommandEncoder();
pipeline.forEach((step) => step.execute(encoder)); pipeline.forEach((step) => step.execute(encoder));
encoder.copyTextureToTexture({ texture: pipeline[pipeline.length - 1].getOutputs().primary }, { texture: canvasContext.getCurrentTexture() }, canvasSize); // Eventually, when WebGPU allows it, we'll remove the endPass and just copy from our pipeline's output to the canvas texture.
// encoder.copyTextureToTexture({ texture: pipeline[pipeline.length - 1].getOutputs().primary }, { texture: canvasContext.getCurrentTexture() }, canvasSize);
device.queue.submit([encoder.finish()]); device.queue.submit([encoder.finish()]);
requestAnimationFrame(renderLoop); requestAnimationFrame(renderLoop);
}; };

View File

@@ -27,7 +27,7 @@ const loadTexture = async (device, url) => {
return texture; return texture;
}; };
const makePassFBO = (device, width, height, format = "rgba8unorm") => const makePassFBO = (device, width, height, format = "bgra8unorm") =>
device.createTexture({ device.createTexture({
size: [width, height, 1], size: [width, height, 1],
format, format,

19
shaders/wgsl/endPass.wgsl Normal file
View File

@@ -0,0 +1,19 @@
[[group(0), binding(0)]] var linearSampler : sampler;
[[group(0), binding(1)]] var tex : texture_2d<f32>;
struct VertOutput {
[[builtin(position)]] Position : vec4<f32>;
[[location(0)]] uv : vec2<f32>;
};
[[stage(vertex)]] fn vertMain([[builtin(vertex_index)]] index : u32) -> VertOutput {
var uv = vec2<f32>(f32(index % 2u), f32((index + 1u) % 6u / 3u));
var position = vec4<f32>(uv * 2.0 - 1.0, 1.0, 1.0);
return VertOutput(position, uv);
}
[[stage(fragment)]] fn fragMain(input : VertOutput) -> [[location(0)]] vec4<f32> {
var uv = input.uv;
uv.y = 1.0 - uv.y;
return textureSample( tex, linearSampler, uv );
}