Fixed some major bugs: the WebGPU cache should store loaded images and text, never GPU resource handles; renamed renderer "formulate" to "configure"; WebGPU renderer's configure function needs early returns after each major await, in case there's a new config; the render loops are now locally stored closures; renderers now have start and stop functions; fixed bugs in the REGL and WebGPU mirror passes; WebGPU bloom pass now enforces texture dimensions are greater than zero; the react component now stores the renderer type in a useRef and returns early from renderer init awaits to prevent multiple renderers from instantiating.

This commit is contained in:
Rezmason
2025-05-25 03:30:26 -07:00
parent 1da1feb356
commit b6570de106
15 changed files with 405 additions and 351 deletions

View File

@@ -31,7 +31,7 @@ import makeConfig from "./utils/config";
* volumetric?: boolean,
* loops?: boolean,
* skipIntro?: boolean,
* renderer?: "regl" | "three" | string,
* renderer?: "regl" | "webgpu" | string,
* suppressWarnings?: boolean,
* useHalfFloat?: boolean,
* isometric?: boolean,
@@ -109,9 +109,10 @@ export const Matrix = memo((props) => {
const elProps = { style, className };
const domElement = useRef(null);
const [rRenderer, setRenderer] = useState(null);
const rendererType = useRef(null);
const [rSize, setSize] = useState([1, 1]);
const [rConfig, setConfig] = useState(makeConfig({}));
const rendererClasses = {};
const rendererModules = {};
const resizeObserver = new ResizeObserver(entries => {
for (const entry of entries) {
@@ -126,14 +127,6 @@ export const Matrix = memo((props) => {
resizeObserver.observe(domElement.current);
}, [domElement]);
useEffect(() => {
setConfig(makeConfig({
...Object.fromEntries(
Object.entries(rawConfigProps).filter(([_, value]) => value != null),
)
}));
}, [props]);
const supportsWebGPU = () => {
return (
window.GPUQueue != null &&
@@ -142,6 +135,18 @@ export const Matrix = memo((props) => {
);
};
useEffect(() => {
const config = makeConfig({
...Object.fromEntries(
Object.entries(rawConfigProps).filter(([_, value]) => value != null),
)
});
if (config.renderer === "webgpu" && !supportsWebGPU()) {
config.renderer = "regl";
}
setConfig(config);
}, [props]);
const cleanup = () => {
if (rRenderer == null) return;
rRenderer.canvas.remove();
@@ -150,37 +155,40 @@ export const Matrix = memo((props) => {
};
useEffect(() => {
const useWebGPU = supportsWebGPU() && rConfig.renderer === "webgpu";
const isWebGPU = rRenderer?.type === "webgpu";
rendererType.current = rConfig.renderer;
let rendererModule;
if (rConfig.renderer === "webgpu") {
rendererModules.webgpu ??= import("./webgpu/renderer.js");
rendererModule = rendererModules.webgpu;
} else {
rendererModules.regl ??= import("./regl/renderer.js");
rendererModule = rendererModules.regl;
}
const loadRain = async () => {
let renderer;
if (useWebGPU) {
rendererClasses.webgpu ??= (await import("./webgpu/renderer.js")).default;
renderer = new (rendererClasses.webgpu)();
} else {
rendererClasses.regl ??= (await import("./regl/renderer.js")).default;
renderer = new (rendererClasses.regl)();
}
setRenderer(renderer);
(async () => {
const rendererClass = (await rendererModule).default;
if (rendererType.current !== rConfig.renderer) return;
const renderer = new rendererClass();
await renderer.ready;
if (rendererType.current !== rConfig.renderer) {
console.warn("Destroyed a redundant renderer late.");
renderer.destroy();
return;
}
cleanup();
setRenderer(renderer);
const canvas = renderer.canvas;
canvas.style.width = "100%";
canvas.style.height = "100%";
domElement.current.appendChild(canvas);
};
if (rRenderer == null || useWebGPU !== isWebGPU) {
cleanup();
loadRain();
}
})();
return cleanup;
}, [rConfig.renderer]);
useEffect(() => {
if (rRenderer?.destroyed ?? true) return;
rRenderer.formulate(rConfig);
rRenderer.configure(rConfig);
}, [rRenderer, rConfig]);
useEffect(() => {

View File

@@ -37,7 +37,7 @@ document.body.onload = async () => {
renderer.fullscreen = !renderer.fullscreen;
});
document.body.appendChild(renderer.canvas);
await renderer.formulate(config);
await renderer.configure(config);
};
if (isRunningSwiftShader() && !config.suppressWarnings) {

View File

@@ -1,19 +1,21 @@
import { loadText, makePassFBO, makePass } from "./utils.js";
let start;
const numClicks = 5;
const clicks = Array(numClicks).fill([0, 0, -Infinity]).flat();
let aspectRatio = 1;
export default ({ regl, canvas, cache, config, cameraTex, cameraAspectRatio }, inputs) => {
let index = 0;
window.onclick = (e) => {
clicks[index * 3 + 0] = 0 + e.clientX / e.srcElement.clientWidth;
clicks[index * 3 + 1] = 1 - e.clientY / e.srcElement.clientHeight;
clicks[index * 3 + 2] = (Date.now() - start) / 1000;
index = (index + 1) % numClicks;
};
let start;
const numClicks = 5;
const clicks = Array(numClicks).fill().map(_ => ([0, 0, -Infinity]));
let aspectRatio = 1;
let index = 0;
canvas.onmousedown = (e) => {
const rect = e.srcElement.getBoundingClientRect();
clicks[index][0] = 0 + (e.clientX - rect.x) / rect.width;
clicks[index][1] = 1 - (e.clientY - rect.y) / rect.height;
clicks[index][2] = (performance.now() - start) / 1000;
index = (index + 1) % numClicks;
};
export default ({ regl, cache, config, cameraTex, cameraAspectRatio }, inputs) => {
const output = makePassFBO(regl, config.useHalfFloat);
const mirrorPassFrag = loadText(cache, "shaders/glsl/mirrorPass.frag.glsl");
const render = regl({
@@ -23,14 +25,19 @@ export default ({ regl, cache, config, cameraTex, cameraAspectRatio }, inputs) =
tex: inputs.primary,
bloomTex: inputs.bloom,
cameraTex,
clicks: () => clicks,
// REGL bug can misinterpret array uniforms
["clicks[0]"]: () => clicks[0],
["clicks[1]"]: () => clicks[1],
["clicks[2]"]: () => clicks[2],
["clicks[3]"]: () => clicks[3],
["clicks[4]"]: () => clicks[4],
aspectRatio: () => aspectRatio,
cameraAspectRatio,
},
framebuffer: output,
});
start = Date.now();
start = performance.now();
return makePass(
{

View File

@@ -24,7 +24,7 @@ const effects = {
export default class REGLRenderer extends Renderer {
#tick;
#renderFunc;
#regl;
#glMatrix;
@@ -43,26 +43,24 @@ export default class REGLRenderer extends Renderer {
});
}
async formulate(config) {
await super.formulate(config);
const canvas = this.canvas;
const cache = this.cache;
const regl = this.#regl;
const glMatrix = this.#glMatrix;
const dimensions = { width: 1, height: 1 };
async configure(config) {
await super.configure(config);
if (config.useCamera) {
await setupCamera();
}
const canvas = this.canvas;
const cache = this.cache;
const regl = this.#regl;
const glMatrix = this.#glMatrix;
const dimensions = { width: 1, height: 1 };
const cameraTex = regl.texture(cameraCanvas);
// All this takes place in a full screen quad.
const fullScreenQuad = makeFullScreenQuad(regl);
const effectName = config.effect in effects ? config.effect : "palette";
const context = { regl, cache, config, cameraTex, cameraAspectRatio, glMatrix };
const context = { regl, canvas, cache, config, cameraTex, cameraAspectRatio, glMatrix };
const pipeline = makePipeline(context, [makeRain, makeBloomPass, effects[effectName]]);
const screenUniforms = { tex: pipeline[pipeline.length - 1].outputs.primary };
@@ -75,17 +73,15 @@ export default class REGLRenderer extends Renderer {
const targetFrameTimeMilliseconds = 1000 / config.fps;
let last = NaN;
resetREGLTime: {
const reset = regl.frame((o) => {
o.time = 0;
o.tick = 0;
reset.cancel();
});
}
const reset = regl.frame((reglContext) => {
reglContext.tick = 0;
reset.cancel();
});
this.#renderFunc = (reglContext) => {
const tick = regl.frame(({ viewportWidth, viewportHeight }) => {
if (config.once) {
tick.cancel();
this.stop();
}
const now = regl.now() * 1000;
@@ -106,6 +102,7 @@ export default class REGLRenderer extends Renderer {
if (config.useCamera) {
cameraTex(cameraCanvas);
}
const {viewportWidth, viewportHeight} = reglContext;
if (dimensions.width !== viewportWidth || dimensions.height !== viewportHeight) {
dimensions.width = viewportWidth;
dimensions.height = viewportHeight;
@@ -119,20 +116,31 @@ export default class REGLRenderer extends Renderer {
}
drawToScreen();
});
};
const frame = this.#regl.frame(o => {
this.#renderFunc(o);
frame.cancel();
});
}
if (this.#tick != null) {
this.#tick.cancel();
stop() {
super.stop();
this.#renderFunc = null;
}
update(now) {
if (this.#renderFunc != null) {
const frame = this.#regl.frame(o => {
this.#renderFunc(o);
frame.cancel();
})
}
this.#tick = tick;
super.update(now);
}
destroy() {
if (this.destroyed) {
return;
}
this.#tick.cancel(); // stop RAF
if (this.destroyed) return;
this.#regl.destroy(); // releases all GPU resources & event listeners
super.destroy();
}

View File

@@ -11,6 +11,7 @@ export default class Renderer {
#fullscreen = false;
#cache = new Map();
#destroyed = false;
#running = false;
constructor(type, ready) {
this.#type = type;
@@ -18,48 +19,46 @@ export default class Renderer {
this.#ready = Renderer.libraries.then(libraries => {
this.#cache = new Map(libraries.staticAssets);
}).then(ready);
this.#ready.then(() => this.start());
}
get canvas() {
return this.#canvas;
get running() { return this.#running; }
start() {
this.#running = true;
this.update();
}
get cache() {
return this.#cache;
stop() {
this.#running = false;
}
get type () {
return this.#type;
update(now) {
if (!this.#running) return;
requestAnimationFrame(now => this.update(now));
}
get ready () {
return this.#ready;
}
get canvas() { return this.#canvas; }
get size() {
return [this.#width, this.#height];
}
get cache() { return this.#cache; }
get type () { return this.#type; }
get ready () { return this.#ready; }
get size() { return ([this.#width, this.#height]); }
set size([width, height]) {
[width, height] = [Math.ceil(width), Math.ceil(height)];
if (width === this.#width && height === this.#height) {
return;
}
if (width === this.#width && height === this.#height) return;
[this.#canvas.width, this.#canvas.height] = [this.#width, this.#height] = [width, height];
}
get fullscreen() {
return this.#fullscreen;
}
get fullscreen() { return this.#fullscreen; }
set fullscreen(value) {
if (!!value === this.#fullscreen) {
return;
}
if (!document.fullscreenEnabled && !document.webkitFullscreenEnabled) {
return;
}
if (!!value === this.#fullscreen) return;
if (!document.fullscreenEnabled && !document.webkitFullscreenEnabled) return;
this.#fullscreen = value;
if (document.fullscreenElement != null) {
@@ -74,18 +73,17 @@ export default class Renderer {
}
}
async formulate(config) {
async configure(config) {
await this.ready;
if (this.destroyed) {
throw new Error("Cannot formulate a destroyed rain instance.");
throw new Error("Cannot configure a destroyed rain instance.");
}
}
get destroyed() {
return this.#destroyed;
}
get destroyed() { return this.#destroyed; }
destroy() {
this.stop();
this.#destroyed = true;
this.#cache.clear();
}

View File

@@ -26,7 +26,7 @@ const makePyramid = (device, size, pyramidHeight) =>
.map((_, index) =>
makeComputeTarget(
device,
size.map((x) => Math.floor(x * 2 ** -index)),
size.map((x) => Math.max(1, Math.floor(x * 2 ** -index))),
),
);
@@ -111,7 +111,7 @@ export default ({ config, device, cache }) => {
const build = (screenSize, inputs) => {
// Since the bloom is blurry, we downscale everything
scaledScreenSize = screenSize.map((x) => Math.floor(x * bloomSize));
scaledScreenSize = screenSize.map((x) => Math.max(1, Math.floor(x * bloomSize)));
destroyPyramid(hBlurPyramid);
hBlurPyramid = makePyramid(device, scaledScreenSize, pyramidHeight);
@@ -169,8 +169,8 @@ export default ({ config, device, cache }) => {
computePass.setPipeline(blurPipeline);
for (let i = 0; i < pyramidHeight; i++) {
const dispatchSize = [
Math.ceil(Math.floor(scaledScreenSize[0] * 2 ** -i) / 32),
Math.floor(Math.floor(scaledScreenSize[1] * 2 ** -i)),
Math.max(1, Math.ceil(Math.floor(scaledScreenSize[0] * 2 ** -i) / 32)),
Math.max(1, Math.floor(Math.floor(scaledScreenSize[1] * 2 ** -i))),
1,
];
computePass.setBindGroup(0, hBlurBindGroups[i]);

View File

@@ -49,7 +49,7 @@ export default ({ device, cache, canvasFormat, canvasContext }) => {
nearestSampler,
inputs.primary.createView(),
]);
return null;
return {};
};
const run = (encoder, shouldRender) => {

View File

@@ -7,24 +7,7 @@ import {
makePass,
} from "./utils.js";
let start;
const numTouches = 5;
const touches = Array(numTouches)
.fill()
.map((_) => [0, 0, -Infinity, 0]);
let aspectRatio = 1;
let index = 0;
let touchesChanged = true;
window.onclick = (e) => {
touches[index][0] = 0 + e.clientX / e.srcElement.clientWidth;
touches[index][1] = 1 - e.clientY / e.srcElement.clientHeight;
touches[index][2] = (Date.now() - start) / 1000;
index = (index + 1) % numTouches;
touchesChanged = true;
};
export default ({ config, device, cache, cameraTex, cameraAspectRatio, timeBuffer }) => {
export default ({ config, device, canvas, cache, cameraTex, cameraAspectRatio, timeBuffer }) => {
const assets = [loadShader(device, cache, "shaders/wgsl/mirrorPass.wgsl")];
const linearSampler = device.createSampler({
@@ -32,6 +15,24 @@ export default ({ config, device, cache, cameraTex, cameraAspectRatio, timeBuffe
minFilter: "linear",
});
let start;
const numTouches = 5;
const touches = Array(numTouches)
.fill()
.map((_) => [0, 0, -Infinity, 0]);
let aspectRatio = 1;
let index = 0;
let touchesChanged = true;
canvas.onmousedown = (e) => {
const rect = e.srcElement.getBoundingClientRect();
touches[index][0] = 0 + (e.clientX - rect.x) / rect.width;
touches[index][1] = 1 - (e.clientY - rect.y) / rect.height;
touches[index][2] = (performance.now() - start) / 1000;
index = (index + 1) % numTouches;
touchesChanged = true;
};
let computePipeline;
let configBuffer;
let sceneUniforms;
@@ -109,7 +110,7 @@ export default ({ config, device, cache, cameraTex, cameraAspectRatio, timeBuffe
computePass.end();
};
start = Date.now();
start = performance.now();
return makePass("Mirror", loaded, build, run);
};

View File

@@ -27,152 +27,185 @@ const effects = {
export default class REGLRenderer extends Renderer {
#glMatrix;
#canvasContext;
#adapter;
#device;
#renderLoop;
#canvasContext;
#canvasFormat;
#renderFunc;
#renewingDevice;
#configureIndex = 0;
#rebuildingPipeline;
constructor() {
super("webgpu", async () => {
const libraries = await Renderer.libraries;
this.#glMatrix = libraries.glMatrix;
this.#canvasContext = this.canvas.getContext("webgpu");
this.#adapter = await navigator.gpu.requestAdapter();
this.#device = await this.#adapter.requestDevice();
});
}
async formulate(config) {
await super.formulate(config);
const canvas = this.canvas;
const cache = this.cache;
const canvasContext = this.#canvasContext;
const adapter = this.#adapter;
const device = this.#device;
const glMatrix = this.#glMatrix;
async configure(config) {
const index = ++this.#configureIndex;
await super.configure(config);
if (config.useCamera) {
await setupCamera();
}
const canvasFormat = navigator.gpu.getPreferredCanvasFormat();
// console.table(device.limits);
canvasContext.configure({
device,
format: canvasFormat,
alphaMode: "opaque",
usage:
// GPUTextureUsage.STORAGE_BINDING |
GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.COPY_DST,
});
const timeUniforms = structs.from(`struct Time { seconds : f32, frames : i32, };`).Time;
const timeBuffer = makeUniformBuffer(device, timeUniforms);
const cameraTex = device.createTexture({
size: cameraSize,
format: "rgba8unorm",
usage:
GPUTextureUsage.TEXTURE_BINDING |
GPUTextureUsage.COPY_DST |
GPUTextureUsage.RENDER_ATTACHMENT,
});
const context = {
config,
cache,
adapter,
device,
canvasContext,
timeBuffer,
canvasFormat,
cameraTex,
cameraAspectRatio,
cameraSize,
glMatrix,
};
const effectName = config.effect in effects ? config.effect : "palette";
const pipeline = await makePipeline(context, [
makeRain,
makeBloomPass,
effects[effectName],
makeEndPass,
]);
const targetFrameTimeMilliseconds = 1000 / config.fps;
let frames = 0;
let start = NaN;
let last = NaN;
let outputs;
const renderLoop = (now) => {
if (isNaN(start)) {
start = now;
}
if (isNaN(last)) {
last = start;
}
const shouldRender =
config.fps >= 60 || now - last >= targetFrameTimeMilliseconds || config.once;
if (shouldRender) {
while (now - targetFrameTimeMilliseconds > last) {
last += targetFrameTimeMilliseconds;
}
}
const devicePixelRatio = window.devicePixelRatio ?? 1;
const size = this.size;
const [width, height] = size;
if (outputs == null || canvas.width !== width || canvas.height !== height) {
[canvas.width, canvas.height] = size;
outputs = pipeline.build(size);
}
if (config.useCamera) {
device.queue.copyExternalImageToTexture(
{ source: cameraCanvas },
{ texture: cameraTex },
cameraSize,
);
}
device.queue.writeBuffer(
timeBuffer,
0,
timeUniforms.toBuffer({ seconds: (now - start) / 1000, frames }),
);
frames++;
const encoder = device.createCommandEncoder();
pipeline.run(encoder, shouldRender);
// Eventually, when WebGPU allows it, we'll remove the endPass and just copy from our pipeline's output to the canvas texture.
// encoder.copyTextureToTexture({ texture: outputs?.primary }, { texture: canvasContext.getCurrentTexture() }, canvasSize);
device.queue.submit([encoder.finish()]);
if (!config.once) {
requestAnimationFrame(renderLoop);
}
};
if (this.#renderLoop != null) {
cancelAnimationFrame(this.#renderLoop);
if (this.#rebuildingPipeline != null) {
await this.#rebuildingPipeline;
}
renderLoop(performance.now());
this.#renderLoop = renderLoop;
const oldDevice = this.#device;
if (this.#renewingDevice == null) {
this.#renewingDevice = (async () => {
this.#canvasContext = this.canvas.getContext("webgpu");
this.#canvasFormat = navigator.gpu.getPreferredCanvasFormat();
const adapter = await navigator.gpu.requestAdapter();
this.#device = await adapter.requestDevice();
})();
}
await this.#renewingDevice;
this.#renewingDevice = null;
if (this.#configureIndex !== index || this.destroyed) {
return;
}
this.#rebuildingPipeline = (async () => {
const glMatrix = this.#glMatrix;
const canvas = this.canvas;
const cache = this.cache;
const device = this.#device;
const canvasContext = this.#canvasContext;
const canvasFormat = this.#canvasFormat;
const dimensions = { width: 1, height: 1 };
const timeUniforms = structs.from(`struct Time { seconds : f32, frames : i32, };`).Time;
const timeBuffer = makeUniformBuffer(device, timeUniforms);
const cameraTex = device.createTexture({
size: cameraSize,
format: "rgba8unorm",
usage:
GPUTextureUsage.TEXTURE_BINDING |
GPUTextureUsage.COPY_DST |
GPUTextureUsage.RENDER_ATTACHMENT,
});
const context = {
glMatrix,
config,
cache,
device,
canvas,
canvasContext,
canvasFormat,
timeBuffer,
cameraTex,
cameraAspectRatio,
cameraSize,
};
const effectName = config.effect in effects ? config.effect : "palette";
const pipeline = await makePipeline(context, [
makeRain,
makeBloomPass,
effects[effectName],
makeEndPass,
]);
this.#canvasContext.configure({
device: this.#device,
format: this.#canvasFormat,
alphaMode: "opaque",
usage:
// GPUTextureUsage.STORAGE_BINDING |
GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.COPY_DST,
});
dimensions.width = canvas.width;
dimensions.height = canvas.height;
const targetFrameTimeMilliseconds = 1000 / config.fps;
let frames = 0;
let start = NaN;
let last = NaN;
let outputs;
this.#renderFunc = (now) => {
if (config.once) {
this.stop();
}
if (isNaN(start)) {
start = now;
}
if (isNaN(last)) {
last = start;
}
const shouldRender =
config.fps >= 60 || now - last >= targetFrameTimeMilliseconds || config.once;
if (shouldRender) {
while (now - targetFrameTimeMilliseconds > last) {
last += targetFrameTimeMilliseconds;
}
}
const size = this.size;
const [width, height] = size;
if (outputs == null || dimensions.width !== width || dimensions.height !== height) {
[dimensions.width, dimensions.height] = size;
outputs = pipeline.build(size);
}
if (config.useCamera) {
device.queue.copyExternalImageToTexture(
{ source: cameraCanvas },
{ texture: cameraTex },
cameraSize,
);
}
device.queue.writeBuffer(
timeBuffer,
0,
timeUniforms.toBuffer({ seconds: (now - start) / 1000, frames }),
);
frames++;
const encoder = device.createCommandEncoder();
pipeline.run(encoder, shouldRender);
// Eventually, when WebGPU allows it, we'll remove the endPass and just copy from our pipeline's output to the canvas texture.
// encoder.copyTextureToTexture({ texture: outputs?.primary }, { texture: canvasContext.getCurrentTexture() }, canvasSize);
device.queue.submit([encoder.finish()]);
};
})();
await this.#rebuildingPipeline;
this.#renderFunc(performance.now());
if (oldDevice != null) {
oldDevice.destroy();
}
}
stop() {
super.stop();
this.#renderFunc = null;
}
update(now) {
if (this.#renderFunc != null) {
this.#renderFunc(now);
}
super.update(now);
}
destroy() {
if (this.destroyed) {
return;
if (this.destroyed) return;
if (this.#device != null) {
this.#device.destroy(); // This also destroys any objects created with the device
this.#device = null;
}
cancelAnimationFrame(this.#renderLoop); // stop RAF
this.#device.destroy(); // This also destroys any objects created with the device
super.destroy();
}
}

View File

@@ -1,20 +1,23 @@
const loadTexture = async (device, cache, url) => {
const key = url;
if (cache.has(key)) {
return cache.get(key);
}
let texture;
const format = "rgba8unorm";
const usage =
GPUTextureUsage.TEXTURE_BINDING |
GPUTextureUsage.COPY_DST |
GPUTextureUsage.RENDER_ATTACHMENT;
if (url == null) {
texture = device.createTexture({
return device.createTexture({
size: [1, 1, 1],
format: "rgba8unorm",
usage:
GPUTextureUsage.TEXTURE_BINDING |
GPUTextureUsage.COPY_DST |
GPUTextureUsage.RENDER_ATTACHMENT,
format,
usage,
});
}
let source;
const key = url;
if (cache.has(key)) {
source = cache.get(key);
} else {
let imageURL;
if (typeof cache.get(`url::${url}`) === "function") {
@@ -25,23 +28,17 @@ const loadTexture = async (device, cache, url) => {
const response = await fetch(imageURL);
const data = await response.blob();
const source = await createImageBitmap(data);
const size = [source.width, source.height, 1];
texture = device.createTexture({
size,
format: "rgba8unorm",
usage:
GPUTextureUsage.TEXTURE_BINDING |
GPUTextureUsage.COPY_DST |
GPUTextureUsage.RENDER_ATTACHMENT,
});
device.queue.copyExternalImageToTexture({ source, flipY: true }, { texture }, size);
source = await createImageBitmap(data);
cache.set(key, source);
}
cache.set(key, texture);
const size = [source.width, source.height, 1];
const texture = device.createTexture({
size,
format,
usage,
});
device.queue.copyExternalImageToTexture({ source, flipY: true }, { texture }, size);
return texture;
};
@@ -71,14 +68,16 @@ const makeComputeTarget = (device, size, mipLevelCount = 1) =>
const loadShader = async (device, cache, url) => {
const key = url;
if (cache.has(key)) {
return cache.get(key);
}
let code;
if (typeof cache.get(`raw::${url}`) === "function") {
code = (await cache.get(`raw::${url}`)()).default;
if (cache.has(key)) {
code = cache.get(key);
} else {
code = await (await fetch(url)).text();
if (typeof cache.get(`raw::${url}`) === "function") {
code = (await cache.get(`raw::${url}`)()).default;
} else {
code = await (await fetch(url)).text();
}
cache.set(key, code);
}
return {
code,