Broke ground on webgpu version.

Split regl_main.js off of main.js, which now detects navigator.gpu. If it exists, it initializes webgpu_main instead.
This commit is contained in:
Rezmason
2021-10-24 05:37:56 -07:00
parent 48a6f2cf34
commit 6b317af0bb
5 changed files with 191 additions and 66 deletions

View File

@@ -186,7 +186,7 @@ paramMapping.dropLength = paramMapping.raindropLength;
paramMapping.angle = paramMapping.slant;
paramMapping.colors = paramMapping.stripeColors;
export default (searchString, make1DTexture) => {
export default (searchString) => {
const urlParams = Object.fromEntries(
Array.from(new URLSearchParams(searchString).entries())
.filter(([key]) => key in paramMapping)

View File

@@ -1,13 +1,6 @@
import { makeFullScreenQuad, makePipeline } from "./utils.js";
import makeConfig from "./config.js"; // The settings of the effect, specified in the URL query params
import makeRain from "./rainPass.js";
import makeBloomPass from "./bloomPass.js";
import makePalettePass from "./palettePass.js";
import makeStripePass from "./stripePass.js";
import makeImagePass from "./imagePass.js";
import makeResurrectionPass from "./resurrectionPass.js";
import makeConfig from "./config.js";
import initWebGPU from "./webgpu_main.js";
import initREGL from "./regl_main.js";
const canvas = document.createElement("canvas");
document.body.appendChild(canvas);
@@ -15,59 +8,11 @@ document.addEventListener("touchmove", (e) => e.preventDefault(), {
passive: false,
});
const regl = createREGL({
canvas,
extensions: ["OES_texture_half_float", "OES_texture_half_float_linear"],
// These extensions are also needed, but Safari misreports that they are missing
optionalExtensions: ["EXT_color_buffer_half_float", "WEBGL_color_buffer_float", "OES_standard_derivatives"],
});
const effects = {
none: null,
plain: makePalettePass,
customStripes: makeStripePass,
stripes: makeStripePass,
pride: makeStripePass,
transPride: makeStripePass,
trans: makeStripePass,
image: makeImagePass,
resurrection: makeResurrectionPass,
resurrections: makeResurrectionPass,
};
const config = makeConfig(window.location.search);
const resize = () => {
canvas.width = Math.ceil(canvas.clientWidth * config.resolution);
canvas.height = Math.ceil(canvas.clientHeight * config.resolution);
};
window.onresize = resize;
resize();
const dimensions = { width: 1, height: 1 };
document.body.onload = async () => {
// All this takes place in a full screen quad.
const fullScreenQuad = makeFullScreenQuad(regl);
const effectName = config.effect in effects ? config.effect : "plain";
const pipeline = makePipeline([makeRain, makeBloomPass, effects[effectName]], (p) => p.outputs, regl, config);
const screenUniforms = { tex: pipeline[pipeline.length - 1].outputs.primary };
const drawToScreen = regl({ uniforms: screenUniforms });
await Promise.all(pipeline.map((step) => step.ready));
const tick = regl.frame(({ viewportWidth, viewportHeight }) => {
// tick.cancel();
if (dimensions.width !== viewportWidth || dimensions.height !== viewportHeight) {
dimensions.width = viewportWidth;
dimensions.height = viewportHeight;
for (const step of pipeline) {
step.resize(viewportWidth, viewportHeight);
}
}
fullScreenQuad(() => {
for (const step of pipeline) {
step.render();
}
drawToScreen();
});
});
document.body.onload = () => {
const config = makeConfig(window.location.search);
if (navigator.gpu != null) {
initWebGPU(canvas, config);
} else {
initREGL(canvas, config);
}
};

63
js/regl_main.js Normal file
View File

@@ -0,0 +1,63 @@
import { makeFullScreenQuad, makePipeline } from "./utils.js";
import makeRain from "./rainPass.js";
import makeBloomPass from "./bloomPass.js";
import makePalettePass from "./palettePass.js";
import makeStripePass from "./stripePass.js";
import makeImagePass from "./imagePass.js";
import makeResurrectionPass from "./resurrectionPass.js";
const effects = {
none: null,
plain: makePalettePass,
customStripes: makeStripePass,
stripes: makeStripePass,
pride: makeStripePass,
transPride: makeStripePass,
trans: makeStripePass,
image: makeImagePass,
resurrection: makeResurrectionPass,
resurrections: makeResurrectionPass,
};
const dimensions = { width: 1, height: 1 };
export default async (canvas, config) => {
const resize = () => {
canvas.width = Math.ceil(canvas.clientWidth * config.resolution);
canvas.height = Math.ceil(canvas.clientHeight * config.resolution);
};
window.onresize = resize;
resize();
const regl = createREGL({
canvas,
extensions: ["OES_texture_half_float", "OES_texture_half_float_linear"],
// These extensions are also needed, but Safari misreports that they are missing
optionalExtensions: ["EXT_color_buffer_half_float", "WEBGL_color_buffer_float", "OES_standard_derivatives"],
});
// All this takes place in a full screen quad.
const fullScreenQuad = makeFullScreenQuad(regl);
const effectName = config.effect in effects ? config.effect : "plain";
const pipeline = makePipeline([makeRain, makeBloomPass, effects[effectName]], (p) => p.outputs, regl, config);
const screenUniforms = { tex: pipeline[pipeline.length - 1].outputs.primary };
const drawToScreen = regl({ uniforms: screenUniforms });
await Promise.all(pipeline.map((step) => step.ready));
const tick = regl.frame(({ viewportWidth, viewportHeight }) => {
// tick.cancel();
if (dimensions.width !== viewportWidth || dimensions.height !== viewportHeight) {
dimensions.width = viewportWidth;
dimensions.height = viewportHeight;
for (const step of pipeline) {
step.resize(viewportWidth, viewportHeight);
}
}
fullScreenQuad(() => {
for (const step of pipeline) {
step.render();
}
drawToScreen();
});
});
};

78
js/webgpu_main.js Normal file
View File

@@ -0,0 +1,78 @@
const getCanvasSize = (canvas) => {
const devicePixelRatio = window.devicePixelRatio ?? 1;
return [canvas.clientWidth * devicePixelRatio, canvas.clientHeight * devicePixelRatio];
};
export default async (canvas, config) => {
console.log(config);
if (navigator.gpu == null) {
return;
}
const adapter = await navigator.gpu.requestAdapter();
const device = await adapter.requestDevice();
const canvasContext = canvas.getContext("webgpu");
const queue = device.queue;
const canvasConfig = {
device,
format: canvasContext.getPreferredFormat(adapter),
size: getCanvasSize(canvas),
};
canvasContext.configure(canvasConfig);
const renderPassConfig = {
colorAttachments: [
{
view: canvasContext.getCurrentTexture().createView(),
loadValue: { r: 0, g: 0, b: 0, a: 1 },
storeOp: "store",
},
],
};
// TODO: create buffers, uniforms, textures, samplers
// TODO: create pipelines, bind groups, shaders
const frame = (now) => {
const canvasSize = getCanvasSize(canvas);
if (canvasSize[0] !== canvasConfig.size[0] || canvasSize[1] !== canvasConfig.size[1]) {
canvasConfig.size = canvasSize;
canvasContext.configure(canvasConfig);
// TODO: destroy and recreate all screen size textures
// TODO: update camera matrix, screen size, write to queue
}
// TODO: update the uniforms that change, write to queue
// TODO: passes and pipelines
renderPassConfig.colorAttachments[0].loadValue.g = Math.sin((now / 1000) * 2) / 2 + 0.5;
renderPassConfig.colorAttachments[0].view = canvasContext.getCurrentTexture().createView();
const encoder = device.createCommandEncoder();
const renderPass = encoder.beginRenderPass(renderPassConfig);
renderPass.endPass();
const commandBuffer = encoder.finish();
queue.submit([commandBuffer]);
// TODO: Record this, so it doesn't have to be reencoded
requestAnimationFrame(frame);
};
requestAnimationFrame(frame);
};
document.body.onload = () => {
if (navigator.gpu != null) {
initWebGPU();
} else {
// TODO: init regl
}
};

39
webgpu_notes.txt Normal file
View File

@@ -0,0 +1,39 @@
Learning WebGPU
Request an adaptor from navigator.gpu
Request a device from the adapter
Devices have features and (numeric) limits
So do adapters, but the device ones are the important ones
Devices have a destroy method
Create objects from the device
Pipelines (optionally with async) and pipeline layouts
Shader modules (programs)
Textures and samplers
You need a sampler to sample a texture in a shader
Texture has a createView() method, these views get bound to bind groups and passes
Has destroy() method
Buffers
Has destroy() method
Bind groups and bind group layouts
Bind groups are the interfaces defined between the CPU and a pipeline
Command encoders
Used to create passes, like a render pass
begin, set, set, set, end, finish, submit to device queue
It should be possible to reuse command buffers
Render bundle encoders [What are these?]
Query sets
Useful for measuring stats that come from the GPU
[Is this needed to detect errors?]
You do not need a canvas to webgpu.
You DO need a canvas to display what you're rendering
Get canvas's "webgpu" context
Configure context to point at the device
Adjust it when you resize
detect in the RAF when the presentation size isn't the canvas size
Create a view of the context's current texture, and reference it in the render pass's color attachments
[When does that currentTexture change?]
Textures were never resizable, you simply forgot
Screen-size textures have to be destroyed and recreated
Transfer the data over if you need to