Runtime target textures seem to need to be the same format as the canvas current texture. Requires additional investigation.

Created a computeToTexture shader to experiment with writing directly to textures as a means of performing post processing.
This commit is contained in:
Rezmason
2021-11-03 09:03:05 -07:00
parent 35afa7ca01
commit 7eab0fd654
4 changed files with 58 additions and 27 deletions

View File

@@ -1,5 +1,5 @@
import std140 from "./std140.js";
import { getCanvasSize, loadTexture, loadShaderModule, makeUniformBuffer } from "./utils.js";
import { getCanvasSize, createRTT, loadTexture, loadShaderModule, makeUniformBuffer } from "./utils.js";
const { mat4, vec3 } = glMatrix;
const rippleTypes = {
@@ -25,11 +25,9 @@ export default async (canvas, config) => {
const canvasConfig = {
device,
format: presentationFormat,
size: getCanvasSize(canvas),
size: [NaN, NaN],
};
canvasContext.configure(canvasConfig);
const assets = [
loadTexture(device, config.glyphTexURL),
loadShaderModule(device, "shaders/wgsl/rainPass.wgsl"),
@@ -124,7 +122,7 @@ export default async (canvas, config) => {
};
updateCameraBuffer();
const msdfSampler = device.createSampler({
const linearSampler = device.createSampler({
magFilter: "linear",
minFilter: "linear",
});
@@ -192,7 +190,7 @@ export default async (canvas, config) => {
const rainRenderBindGroup = device.createBindGroup({
layout: rainRenderPipeline.getBindGroupLayout(0),
entries: [configBuffer, timeBuffer, sceneBuffer, msdfSampler, msdfTexture.createView(), cellsBuffer]
entries: [configBuffer, timeBuffer, sceneBuffer, linearSampler, msdfTexture.createView(), cellsBuffer]
.map((resource) => (resource instanceof GPUBuffer ? { buffer: resource } : resource))
.map((resource, binding) => ({
binding,
@@ -200,20 +198,12 @@ export default async (canvas, config) => {
})),
});
const renderToCanvasBindGroup = device.createBindGroup({
layout: renderToCanvasPipeline.getBindGroupLayout(0),
entries: [msdfSampler, msdfTexture.createView()]
.map((resource) => (resource instanceof GPUBuffer ? { buffer: resource } : resource))
.map((resource, binding) => ({
binding,
resource,
})),
});
let rtt, renderToCanvasBindGroup;
const rainRenderPassConfig = {
colorAttachments: [
{
view: canvasContext.getCurrentTexture().createView(),
view: null,
loadValue: { r: 0, g: 0, b: 0, a: 1 },
storeOp: "store",
},
@@ -223,7 +213,7 @@ export default async (canvas, config) => {
const renderToCanvasPassConfig = {
colorAttachments: [
{
view: canvasContext.getCurrentTexture().createView(),
view: null,
loadValue: { r: 0, g: 0, b: 0, a: 1 },
storeOp: "store",
},
@@ -238,7 +228,18 @@ export default async (canvas, config) => {
canvasConfig.size = canvasSize;
canvasContext.configure(canvasConfig);
// TODO: destroy and recreate all screen size textures
rtt = createRTT(adapter, device, canvasContext);
rainRenderPassConfig.colorAttachments[0].view = rtt.createView();
renderToCanvasBindGroup = device.createBindGroup({
layout: renderToCanvasPipeline.getBindGroupLayout(0),
entries: [linearSampler, rtt.createView()]
.map((resource) => (resource instanceof GPUBuffer ? { buffer: resource } : resource))
.map((resource, binding) => ({
binding,
resource,
})),
});
updateCameraBuffer();
}
@@ -254,19 +255,18 @@ export default async (canvas, config) => {
rainComputePass.dispatch(Math.ceil(gridSize[0] / 32), gridSize[1], 1);
rainComputePass.endPass();
rainRenderPassConfig.colorAttachments[0].view = canvasContext.getCurrentTexture().createView();
const rainRenderPass = encoder.beginRenderPass(rainRenderPassConfig);
rainRenderPass.setPipeline(rainRenderPipeline);
rainRenderPass.setBindGroup(0, rainRenderBindGroup);
rainRenderPass.draw(numVerticesPerQuad * numQuads, 1, 0, 0);
rainRenderPass.endPass();
// renderToCanvasPassConfig.colorAttachments[0].view = canvasContext.getCurrentTexture().createView();
// const renderToCanvasPass = encoder.beginRenderPass(renderToCanvasPassConfig);
// renderToCanvasPass.setPipeline(renderToCanvasPipeline);
// renderToCanvasPass.setBindGroup(0, renderToCanvasBindGroup);
// renderToCanvasPass.draw(numVerticesPerQuad, 1, 0, 0);
// renderToCanvasPass.endPass();
renderToCanvasPassConfig.colorAttachments[0].view = canvasContext.getCurrentTexture().createView();
const renderToCanvasPass = encoder.beginRenderPass(renderToCanvasPassConfig);
renderToCanvasPass.setPipeline(renderToCanvasPipeline);
renderToCanvasPass.setBindGroup(0, renderToCanvasBindGroup);
renderToCanvasPass.draw(numVerticesPerQuad, 1, 0, 0);
renderToCanvasPass.endPass();
const commandBuffer = encoder.finish();
device.queue.submit([commandBuffer]);

View File

@@ -27,6 +27,14 @@ const loadTexture = async (device, url) => {
return texture;
};
const createRTT = (adapter, device, canvasContext) => {
return device.createTexture({
size: [...getCanvasSize(canvasContext.canvas), 1],
format: canvasContext.getPreferredFormat(adapter),
usage: GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_SRC | GPUTextureUsage.COPY_DST | GPUTextureUsage.RENDER_ATTACHMENT// TODO: reduce
});
};
const loadShaderModule = async (device, url) => {
const response = await fetch(url);
const code = await response.text();
@@ -46,4 +54,4 @@ const makeUniformBuffer = (device, structLayout, values = null) => {
return buffer;
};
export { getCanvasSize, loadTexture, loadShaderModule, makeUniformBuffer };
export { getCanvasSize, loadTexture, createRTT, loadShaderModule, makeUniformBuffer };