Massive overhaul: the renderers are now classes that implement Renderer; replaced webpack and rollup with vite; converted bundle-contents to "core" and "full" bundle profiles; renamed "inclusions" to "staticAssets", which are "url" base64-encoded images and "raw" text strings; renamed the Matrix component module to the JSX extension; built out a test scaffold at tools/test/index.html to manually test the various deploy options.

This commit is contained in:
Rezmason
2025-05-23 12:49:10 -07:00
parent 658f07c6ab
commit 3b837c6f06
29 changed files with 2338 additions and 6918 deletions

View File

@@ -1,217 +0,0 @@
import { structs } from "../../lib/gpu-buffer.js";
import { makeUniformBuffer, makePipeline } from "./utils.js";
import fetchLibraries from "../fetchLibraries.js";
import makeRain from "./rainPass.js";
import makeBloomPass from "./bloomPass.js";
import makePalettePass from "./palettePass.js";
import makeStripePass from "./stripePass.js";
import makeImagePass from "./imagePass.js";
import makeMirrorPass from "./mirrorPass.js";
import makeEndPass from "./endPass.js";
import { setupCamera, cameraCanvas, cameraAspectRatio, cameraSize } from "../utils/camera.js";
const effects = {
none: null,
plain: makePalettePass,
palette: makePalettePass,
customStripes: makeStripePass,
stripes: makeStripePass,
pride: makeStripePass,
transPride: makeStripePass,
trans: makeStripePass,
image: makeImagePass,
mirror: makeMirrorPass,
};
let glMatrix, inclusions;
export const init = async (canvas) => {
const libraries = await fetchLibraries();
glMatrix = libraries.glMatrix;
inclusions = libraries.inclusions;
const resize = () => {
const devicePixelRatio = window.devicePixelRatio ?? 1;
canvas.width = Math.ceil(canvas.clientWidth * devicePixelRatio * rain.resolution);
canvas.height = Math.ceil(canvas.clientHeight * devicePixelRatio * rain.resolution);
};
const doubleClick = () => {
if (!document.fullscreenEnabled && !document.webkitFullscreenEnabled) {
return;
}
if (document.fullscreenElement != null) {
document.exitFullscreen();
return;
}
if (canvas.webkitRequestFullscreen != null) {
canvas.webkitRequestFullscreen();
} else {
canvas.requestFullscreen();
}
};
const canvasContext = canvas.getContext("webgpu");
const adapter = await navigator.gpu.requestAdapter();
const device = await adapter.requestDevice();
const cache = new Map(inclusions);
const rain = {
canvas,
resize,
doubleClick,
cache,
canvasContext,
adapter,
device,
resolution: 1,
};
window.addEventListener("dblclick", doubleClick);
window.addEventListener("resize", resize);
resize();
return rain;
};
export const formulate = async (rain, config) => {
if (rain.destroyed) {
throw new Error("Cannot formulate a destroyed rain instance.");
}
const { resize, canvas, cache, canvasContext, adapter, device } = rain;
rain.resolution = config.resolution;
resize();
if (config.useCamera) {
await setupCamera();
}
const canvasFormat = navigator.gpu.getPreferredCanvasFormat();
// console.table(device.limits);
canvasContext.configure({
device,
format: canvasFormat,
alphaMode: "opaque",
usage:
// GPUTextureUsage.STORAGE_BINDING |
GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.COPY_DST,
});
const timeUniforms = structs.from(`struct Time { seconds : f32, frames : i32, };`).Time;
const timeBuffer = makeUniformBuffer(device, timeUniforms);
const cameraTex = device.createTexture({
size: cameraSize,
format: "rgba8unorm",
usage:
GPUTextureUsage.TEXTURE_BINDING |
GPUTextureUsage.COPY_DST |
GPUTextureUsage.RENDER_ATTACHMENT,
});
const context = {
config,
cache,
adapter,
device,
canvasContext,
timeBuffer,
canvasFormat,
cameraTex,
cameraAspectRatio,
cameraSize,
glMatrix,
};
const effectName = config.effect in effects ? config.effect : "palette";
const pipeline = await makePipeline(context, [
makeRain,
makeBloomPass,
effects[effectName],
makeEndPass,
]);
const targetFrameTimeMilliseconds = 1000 / config.fps;
let frames = 0;
let start = NaN;
let last = NaN;
let outputs;
const renderLoop = (now) => {
if (isNaN(start)) {
start = now;
}
if (isNaN(last)) {
last = start;
}
const shouldRender =
config.fps >= 60 || now - last >= targetFrameTimeMilliseconds || config.once;
if (shouldRender) {
while (now - targetFrameTimeMilliseconds > last) {
last += targetFrameTimeMilliseconds;
}
}
const devicePixelRatio = window.devicePixelRatio ?? 1;
const canvasWidth = Math.ceil(canvas.clientWidth * devicePixelRatio * config.resolution);
const canvasHeight = Math.ceil(canvas.clientHeight * devicePixelRatio * config.resolution);
const canvasSize = [canvasWidth, canvasHeight];
if (outputs == null || canvas.width !== canvasWidth || canvas.height !== canvasHeight) {
canvas.width = canvasWidth;
canvas.height = canvasHeight;
outputs = pipeline.build(canvasSize);
}
if (config.useCamera) {
device.queue.copyExternalImageToTexture(
{ source: cameraCanvas },
{ texture: cameraTex },
cameraSize,
);
}
device.queue.writeBuffer(
timeBuffer,
0,
timeUniforms.toBuffer({ seconds: (now - start) / 1000, frames }),
);
frames++;
const encoder = device.createCommandEncoder();
pipeline.run(encoder, shouldRender);
// Eventually, when WebGPU allows it, we'll remove the endPass and just copy from our pipeline's output to the canvas texture.
// encoder.copyTextureToTexture({ texture: outputs?.primary }, { texture: canvasContext.getCurrentTexture() }, canvasSize);
device.queue.submit([encoder.finish()]);
if (!config.once) {
requestAnimationFrame(renderLoop);
}
};
if (rain.renderLoop != null) {
cancelAnimationFrame(rain.renderLoop);
}
renderLoop(performance.now());
rain.renderLoop = renderLoop;
};
export const destroy = (rain) => {
if (rain.destroyed) {
return;
}
const { device, resize, doubleClick, cache, canvas, renderLoop } = rain;
window.removeEventListener("resize", resize);
window.removeEventListener("dblclick", doubleClick);
cache.clear();
cancelAnimationFrame(renderLoop); // stop RAF
device.destroy(); // This also destroys any objects created with the device
rain.destroyed = true;
};
export const type = "webgpu";

178
js/webgpu/renderer.js Normal file
View File

@@ -0,0 +1,178 @@
import Renderer from "../renderer.js";
import { structs } from "../../lib/gpu-buffer.js";
import { makeUniformBuffer, makePipeline } from "./utils.js";
import makeRain from "./rainPass.js";
import makeBloomPass from "./bloomPass.js";
import makePalettePass from "./palettePass.js";
import makeStripePass from "./stripePass.js";
import makeImagePass from "./imagePass.js";
import makeMirrorPass from "./mirrorPass.js";
import makeEndPass from "./endPass.js";
import { setupCamera, cameraCanvas, cameraAspectRatio, cameraSize } from "../utils/camera.js";
const effects = {
none: null,
plain: makePalettePass,
palette: makePalettePass,
customStripes: makeStripePass,
stripes: makeStripePass,
pride: makeStripePass,
transPride: makeStripePass,
trans: makeStripePass,
image: makeImagePass,
mirror: makeMirrorPass,
};
export default class REGLRenderer extends Renderer {
#glMatrix;
#canvasContext;
#adapter;
#device;
#renderLoop;
constructor() {
super("webgpu", async () => {
const libraries = await Renderer.libraries;
this.#glMatrix = libraries.glMatrix;
this.#canvasContext = this.canvas.getContext("webgpu");
this.#adapter = await navigator.gpu.requestAdapter();
this.#device = await this.#adapter.requestDevice();
});
}
async formulate(config) {
await super.formulate(config);
const canvas = this.canvas;
const cache = this.cache;
const canvasContext = this.#canvasContext;
const adapter = this.#adapter;
const device = this.#device;
const glMatrix = this.#glMatrix;
if (config.useCamera) {
await setupCamera();
}
const canvasFormat = navigator.gpu.getPreferredCanvasFormat();
// console.table(device.limits);
canvasContext.configure({
device,
format: canvasFormat,
alphaMode: "opaque",
usage:
// GPUTextureUsage.STORAGE_BINDING |
GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.COPY_DST,
});
const timeUniforms = structs.from(`struct Time { seconds : f32, frames : i32, };`).Time;
const timeBuffer = makeUniformBuffer(device, timeUniforms);
const cameraTex = device.createTexture({
size: cameraSize,
format: "rgba8unorm",
usage:
GPUTextureUsage.TEXTURE_BINDING |
GPUTextureUsage.COPY_DST |
GPUTextureUsage.RENDER_ATTACHMENT,
});
const context = {
config,
cache,
adapter,
device,
canvasContext,
timeBuffer,
canvasFormat,
cameraTex,
cameraAspectRatio,
cameraSize,
glMatrix,
};
const effectName = config.effect in effects ? config.effect : "palette";
const pipeline = await makePipeline(context, [
makeRain,
makeBloomPass,
effects[effectName],
makeEndPass,
]);
const targetFrameTimeMilliseconds = 1000 / config.fps;
let frames = 0;
let start = NaN;
let last = NaN;
let outputs;
const renderLoop = (now) => {
if (isNaN(start)) {
start = now;
}
if (isNaN(last)) {
last = start;
}
const shouldRender =
config.fps >= 60 || now - last >= targetFrameTimeMilliseconds || config.once;
if (shouldRender) {
while (now - targetFrameTimeMilliseconds > last) {
last += targetFrameTimeMilliseconds;
}
}
const devicePixelRatio = window.devicePixelRatio ?? 1;
const size = this.size;
const [width, height] = size;
if (outputs == null || canvas.width !== width || canvas.height !== height) {
[canvas.width, canvas.height] = size;
outputs = pipeline.build(size);
}
if (config.useCamera) {
device.queue.copyExternalImageToTexture(
{ source: cameraCanvas },
{ texture: cameraTex },
cameraSize,
);
}
device.queue.writeBuffer(
timeBuffer,
0,
timeUniforms.toBuffer({ seconds: (now - start) / 1000, frames }),
);
frames++;
const encoder = device.createCommandEncoder();
pipeline.run(encoder, shouldRender);
// Eventually, when WebGPU allows it, we'll remove the endPass and just copy from our pipeline's output to the canvas texture.
// encoder.copyTextureToTexture({ texture: outputs?.primary }, { texture: canvasContext.getCurrentTexture() }, canvasSize);
device.queue.submit([encoder.finish()]);
if (!config.once) {
requestAnimationFrame(renderLoop);
}
};
if (this.#renderLoop != null) {
cancelAnimationFrame(this.#renderLoop);
}
renderLoop(performance.now());
this.#renderLoop = renderLoop;
}
destroy() {
if (this.destroyed) {
return;
}
cancelAnimationFrame(this.#renderLoop); // stop RAF
this.#device.destroy(); // This also destroys any objects created with the device
super.destroy();
}
}

View File

@@ -17,8 +17,8 @@ const loadTexture = async (device, cache, url) => {
});
} else {
let imageURL;
if (typeof cache.get(`import::${url}`) === "function") {
imageURL = (await cache.get(`import::${url}`)()).default;
if (typeof cache.get(`url::${url}`) === "function") {
imageURL = (await cache.get(`url::${url}`)()).default;
} else {
imageURL = url;
}
@@ -74,14 +74,12 @@ const loadShader = async (device, cache, url) => {
if (cache.has(key)) {
return cache.get(key);
}
let textURL;
if (typeof cache.get(`import::${url}`) === "function") {
textURL = (await cache.get(`import::${url}`)()).default;
let code;
if (typeof cache.get(`raw::${url}`) === "function") {
code = (await cache.get(`raw::${url}`)()).default;
} else {
textURL = url;
code = await (await fetch(url)).text();
}
const response = await fetch(textURL);
const code = await response.text();
return {
code,
module: device.createShaderModule({ code }),