mirror of
https://github.com/Rezmason/matrix.git
synced 2026-04-14 12:29:30 -07:00
A pipeline is now an object with a build method and a run method. The build method returns the last step's outputs object.
This commit is contained in:
@@ -63,9 +63,7 @@ export default async (canvas, config) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const effectName = config.effect in effects ? config.effect : "plain";
|
const effectName = config.effect in effects ? config.effect : "plain";
|
||||||
const pipeline = makePipeline(context, [makeRain, makeBloomPass, effects[effectName], makeEndPass]);
|
const pipeline = await makePipeline(context, [makeRain, makeBloomPass, effects[effectName], makeEndPass]);
|
||||||
|
|
||||||
await Promise.all(pipeline.map((step) => step.loaded));
|
|
||||||
|
|
||||||
let frames = 0;
|
let frames = 0;
|
||||||
let start = NaN;
|
let start = NaN;
|
||||||
@@ -78,16 +76,16 @@ export default async (canvas, config) => {
|
|||||||
if (canvasSize[0] !== canvasConfig.size[0] || canvasSize[1] !== canvasConfig.size[1]) {
|
if (canvasSize[0] !== canvasConfig.size[0] || canvasSize[1] !== canvasConfig.size[1]) {
|
||||||
canvasConfig.size = canvasSize;
|
canvasConfig.size = canvasSize;
|
||||||
canvasContext.configure(canvasConfig);
|
canvasContext.configure(canvasConfig);
|
||||||
pipeline.reduce((outputs, step) => step.build(canvasSize, outputs), null);
|
pipeline.build(canvasSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
device.queue.writeBuffer(timeBuffer, 0, timeUniforms.toBuffer({ seconds: (now - start) / 1000, frames }));
|
device.queue.writeBuffer(timeBuffer, 0, timeUniforms.toBuffer({ seconds: (now - start) / 1000, frames }));
|
||||||
frames++;
|
frames++;
|
||||||
|
|
||||||
const encoder = device.createCommandEncoder();
|
const encoder = device.createCommandEncoder();
|
||||||
pipeline.forEach((step) => step.run(encoder));
|
pipeline.run(encoder);
|
||||||
// Eventually, when WebGPU allows it, we'll remove the endPass and just copy from our pipeline's output to the canvas texture.
|
// Eventually, when WebGPU allows it, we'll remove the endPass and just copy from our pipeline's output to the canvas texture.
|
||||||
// encoder.copyTextureToTexture({ texture: pipeline[pipeline.length - 1].getOutputs().primary }, { texture: canvasContext.getCurrentTexture() }, canvasSize);
|
// encoder.copyTextureToTexture({ texture: output.primary }, { texture: canvasContext.getCurrentTexture() }, canvasSize);
|
||||||
device.queue.submit([encoder.finish()]);
|
device.queue.submit([encoder.finish()]);
|
||||||
requestAnimationFrame(renderLoop);
|
requestAnimationFrame(renderLoop);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -116,6 +116,14 @@ const makePass = (loaded, build, run) => ({
|
|||||||
run: run ?? (() => {}),
|
run: run ?? (() => {}),
|
||||||
});
|
});
|
||||||
|
|
||||||
const makePipeline = (context, steps) => steps.filter((f) => f != null).map((f) => f(context));
|
const makePipeline = async (context, steps) => {
|
||||||
|
steps = steps.filter((f) => f != null).map((f) => f(context));
|
||||||
|
await Promise.all(steps.map(step => step.loaded));
|
||||||
|
return {
|
||||||
|
steps,
|
||||||
|
build: (canvasSize) => steps.reduce((outputs, step) => step.build(canvasSize, outputs), null),
|
||||||
|
run: (encoder) => steps.forEach((step) => step.run(encoder))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
export { getCanvasSize, makeRenderTarget, makeComputeTarget, make1DTexture, loadTexture, loadShader, makeUniformBuffer, makePass, makePipeline, makeBindGroup };
|
export { getCanvasSize, makeRenderTarget, makeComputeTarget, make1DTexture, loadTexture, loadShader, makeUniformBuffer, makePass, makePipeline, makeBindGroup };
|
||||||
|
|||||||
Reference in New Issue
Block a user