Adding an FPS argument. The renderers now determine whether the current frame should be rendered, and passes use that to determine whether to render or not. The rain pass, however, will still update the simulation at full speed.

This commit is contained in:
Rezmason
2022-10-03 23:45:56 -07:00
parent fb3fb89d95
commit b86b97fde9
19 changed files with 123 additions and 34 deletions

View File

@@ -135,7 +135,7 @@ Now you know link fu. Here's a list of customization options:
- `paletteHSL`, `stripeHSL`, `backgroundHSL`, `cursorHSL`, and `glintHSL` — the same as the above, except they use *H,S,L* (hue, saturation, lightness) instead of *R,G,B*.
- `url` - if you set the effect to "image", this is how you specify which image to load. It doesn't work with any URL; I suggest grabbing them from Wikipedia: [https://rezmason.github.io/matrix/?effect=image&url=https://upload.wikimedia.org/wikipedia/commons/f/f5/EagleRock.jpg](https://rezmason.github.io/matrix/?effect=image&url=https://upload.wikimedia.org/wikipedia/commons/f/f5/EagleRock.jpg)
- `loops` - (WIP) if set to "true", this causes the effect to loop, so that it can be converted into a looping video.
- `fps` — the framerate of the effect. Can be any number between 0 and 60. Default is 60.
## Troubleshooting

View File

@@ -75,6 +75,7 @@ const defaults = {
glintColor: { space: "rgb", values: [1, 1, 1] }, // The color of the glint
volumetric: false, // A mode where the raindrops appear in perspective
animationSpeed: 1, // The global rate that all animations progress
fps: 60, // The target frame rate (frames per second) of the effect
forwardSpeed: 0.25, // The speed volumetric rain approaches the eye
bloomStrength: 0.7, // The intensity of the bloom
bloomSize: 0.4, // The amount the bloom calculation is scaled
@@ -117,6 +118,7 @@ const defaults = {
useHoloplay: false,
loops: false,
skipIntro: true,
testFix: null,
};
const versions = {
@@ -457,6 +459,7 @@ const paramMapping = {
volumetric: { key: "volumetric", parser: (s) => s.toLowerCase().includes("true") },
loops: { key: "loops", parser: (s) => s.toLowerCase().includes("true") },
fps: { key: "fps", parser: (s) => nullNaN(range(parseFloat(s), 0, 60)) },
skipIntro: { key: "skipIntro", parser: (s) => s.toLowerCase().includes("true") },
renderer: { key: "renderer", parser: (s) => s },
once: { key: "once", parser: (s) => s.toLowerCase().includes("true") },

View File

@@ -98,7 +98,11 @@ export default ({ regl, config }, inputs) => {
resizePyramid(vBlurPyramid, w, h, bloomSize);
output.resize(w, h);
},
() => {
(shouldRender) => {
if (!shouldRender) {
return;
}
for (let i = 0; i < pyramidHeight; i++) {
const highPassFBO = highPassPyramid[i];
const hBlurFBO = hBlurPyramid[i];

View File

@@ -26,6 +26,10 @@ export default ({ regl, config }, inputs) => {
},
Promise.all([background.loaded, imagePassFrag.loaded]),
(w, h) => output.resize(w, h),
() => render({ frag: imagePassFrag.text() })
(shouldRender) => {
if (shouldRender) {
render({ frag: imagePassFrag.text() });
}
}
);
};

View File

@@ -88,10 +88,29 @@ export default async (canvas, config) => {
const screenUniforms = { tex: pipeline[pipeline.length - 1].outputs.primary };
const drawToScreen = regl({ uniforms: screenUniforms });
await Promise.all(pipeline.map((step) => step.ready));
const targetFrameTimeMilliseconds = 1000 / config.fps;
let last = NaN;
const tick = regl.frame(({ viewportWidth, viewportHeight }) => {
if (config.once) {
tick.cancel();
}
const now = regl.now() * 1000;
if (isNaN(last)) {
last = now;
}
const shouldRender = config.fps >= 60 || now - last >= targetFrameTimeMilliseconds || config.once == true;
if (shouldRender) {
while (now - targetFrameTimeMilliseconds > last) {
last += targetFrameTimeMilliseconds;
}
}
if (config.useCamera) {
cameraTex(cameraCanvas);
}
@@ -104,7 +123,7 @@ export default async (canvas, config) => {
}
fullScreenQuad(() => {
for (const step of pipeline) {
step.execute();
step.execute(shouldRender);
}
drawToScreen();
});

View File

@@ -41,6 +41,10 @@ export default ({ regl, config, cameraTex, cameraAspectRatio }, inputs) => {
output.resize(w, h);
aspectRatio = w / h;
},
() => render({ frag: mirrorPassFrag.text() })
(shouldRender) => {
if (shouldRender) {
render({ frag: mirrorPassFrag.text() });
}
}
);
};

View File

@@ -83,6 +83,10 @@ export default ({ regl, config }, inputs) => {
},
palettePassFrag.loaded,
(w, h) => output.resize(w, h),
() => render({ frag: palettePassFrag.text() })
(shouldRender) => {
if (shouldRender) {
render({ frag: palettePassFrag.text() });
}
}
);
};

View File

@@ -25,6 +25,10 @@ export default ({ regl, config, lkg }, inputs) => {
},
Promise.all([quiltPassFrag.loaded]),
(w, h) => output.resize(w, h),
() => render({ frag: quiltPassFrag.text() })
(shouldRender) => {
if (shouldRender) {
render({ frag: quiltPassFrag.text() });
}
}
);
};

View File

@@ -292,19 +292,22 @@ export default ({ regl, config, lkg }) => {
}
[screenSize[0], screenSize[1]] = aspectRatio > 1 ? [1, aspectRatio] : [1 / aspectRatio, 1];
},
() => {
(shouldRender) => {
intro({ frag: rainPassIntro.text() });
raindrop({ frag: rainPassRaindrop.text() });
symbol({ frag: rainPassSymbol.text() });
effect({ frag: rainPassEffect.text() });
regl.clear({
depth: 1,
color: [0, 0, 0, 1],
framebuffer: output,
});
for (const vantagePoint of vantagePoints) {
render({ ...vantagePoint, transform, screenSize, vert: rainPassVert.text(), frag: rainPassFrag.text() });
if (shouldRender) {
regl.clear({
depth: 1,
color: [0, 0, 0, 1],
framebuffer: output,
});
for (const vantagePoint of vantagePoints) {
render({ ...vantagePoint, transform, screenSize, vert: rainPassVert.text(), frag: rainPassFrag.text() });
}
}
}
);

View File

@@ -63,6 +63,10 @@ export default ({ regl, config }, inputs) => {
},
stripePassFrag.loaded,
(w, h) => output.resize(w, h),
() => render({ frag: stripePassFrag.text() })
(shouldRender) => {
if (shouldRender) {
render({ frag: stripePassFrag.text() });
}
}
);
};

View File

@@ -135,7 +135,11 @@ export default ({ config, device }) => {
};
};
const run = (encoder) => {
const run = (encoder, shouldRender) => {
if (!shouldRender) {
return;
}
const computePass = encoder.beginComputePass();
computePass.setPipeline(blurPipeline);

View File

@@ -49,7 +49,11 @@ export default ({ device, canvasFormat, canvasContext }) => {
return null;
};
const run = (encoder) => {
const run = (encoder, shouldRender) => {
if (!shouldRender) {
return;
}
renderPassConfig.colorAttachments[0].view = canvasContext.getCurrentTexture().createView();
const renderPass = encoder.beginRenderPass(renderPassConfig);
renderPass.setPipeline(renderPipeline);

View File

@@ -53,7 +53,11 @@ export default ({ config, device }) => {
return { primary: output };
};
const run = (encoder) => {
const run = (encoder, shouldRender) => {
if (!shouldRender) {
return;
}
const computePass = encoder.beginComputePass();
computePass.setPipeline(computePipeline);
computePass.setBindGroup(0, computeBindGroup);

View File

@@ -92,8 +92,10 @@ export default async (canvas, config) => {
const effectName = config.effect in effects ? config.effect : "palette";
const pipeline = await makePipeline(context, [makeRain, makeBloomPass, effects[effectName], makeEndPass]);
const targetFrameTimeMilliseconds = 1000 / config.fps;
let frames = 0;
let start = NaN;
let last = NaN;
let outputs;
const renderLoop = (now) => {
@@ -101,6 +103,17 @@ export default async (canvas, config) => {
start = now;
}
if (isNaN(last)) {
last = start;
}
const shouldRender = config.fps >= 60 || now - last >= targetFrameTimeMilliseconds || config.once;
if (shouldRender) {
while (now - targetFrameTimeMilliseconds > last) {
last += targetFrameTimeMilliseconds;
}
}
const devicePixelRatio = window.devicePixelRatio ?? 1;
const canvasWidth = canvas.clientWidth * devicePixelRatio;
const canvasHeight = canvas.clientHeight * devicePixelRatio;
@@ -119,10 +132,11 @@ export default async (canvas, config) => {
frames++;
const encoder = device.createCommandEncoder();
pipeline.run(encoder);
pipeline.run(encoder, shouldRender);
// Eventually, when WebGPU allows it, we'll remove the endPass and just copy from our pipeline's output to the canvas texture.
// encoder.copyTextureToTexture({ texture: outputs?.primary }, { texture: canvasContext.getCurrentTexture() }, canvasSize);
device.queue.submit([encoder.finish()]);
if (!config.once) {
requestAnimationFrame(renderLoop);
}

View File

@@ -82,7 +82,11 @@ export default ({ config, device, cameraTex, cameraAspectRatio, timeBuffer }) =>
return { primary: output };
};
const run = (encoder) => {
const run = (encoder, shouldRender) => {
if (!shouldRender) {
return;
}
if (touchesChanged) {
touchesChanged = false;
device.queue.writeBuffer(touchBuffer, 0, touchUniforms.toBuffer({ touches }));

View File

@@ -123,7 +123,11 @@ export default ({ config, device, timeBuffer }) => {
return { primary: output };
};
const run = (encoder) => {
const run = (encoder, shouldRender) => {
if (!shouldRender) {
return;
}
const computePass = encoder.beginComputePass();
computePass.setPipeline(computePipeline);
computePass.setBindGroup(0, computeBindGroup);

View File

@@ -207,7 +207,7 @@ export default ({ config, device, timeBuffer }) => {
};
};
const run = (encoder) => {
const run = (encoder, shouldRender) => {
// We render the code into an Target using MSDFs: https://github.com/Chlumsky/msdfgen
const introPass = encoder.beginComputePass();
@@ -222,13 +222,15 @@ export default ({ config, device, timeBuffer }) => {
computePass.dispatchWorkgroups(Math.ceil(gridSize[0] / 32), gridSize[1], 1);
computePass.end();
renderPassConfig.colorAttachments[0].view = output.createView();
renderPassConfig.colorAttachments[1].view = highPassOutput.createView();
const renderPass = encoder.beginRenderPass(renderPassConfig);
renderPass.setPipeline(renderPipeline);
renderPass.setBindGroup(0, renderBindGroup);
renderPass.draw(numVerticesPerQuad * numQuads, 1, 0, 0);
renderPass.end();
if (shouldRender) {
renderPassConfig.colorAttachments[0].view = output.createView();
renderPassConfig.colorAttachments[1].view = highPassOutput.createView();
const renderPass = encoder.beginRenderPass(renderPassConfig);
renderPass.setPipeline(renderPipeline);
renderPass.setBindGroup(0, renderBindGroup);
renderPass.draw(numVerticesPerQuad * numQuads, 1, 0, 0);
renderPass.end();
}
};
return makePass("Rain", loaded, build, run);

View File

@@ -92,7 +92,11 @@ export default ({ config, device, timeBuffer }) => {
};
};
const run = (encoder) => {
const run = (encoder, shouldRender) => {
if (!shouldRender) {
return;
}
const computePass = encoder.beginComputePass();
computePass.setPipeline(computePipeline);
const computeBindGroup = makeBindGroup(device, computePipeline, 0, [

View File

@@ -118,9 +118,9 @@ const makeBindGroup = (device, pipeline, index, entries) =>
const makePass = (name, loaded, build, run) => ({
loaded: loaded ?? Promise.resolve(),
build: build ?? ((size, inputs) => inputs),
run: (encoder) => {
run: (encoder, shouldRender) => {
encoder.pushDebugGroup(`Pass "${name}"`);
run?.(encoder);
run?.(encoder, shouldRender);
encoder.popDebugGroup();
},
});
@@ -131,7 +131,7 @@ const makePipeline = async (context, steps) => {
return {
steps,
build: (canvasSize) => steps.reduce((outputs, step) => step.build(canvasSize, outputs), null),
run: (encoder) => steps.forEach((step) => step.run(encoder)),
run: (encoder, shouldRender) => steps.forEach((step) => step.run(encoder, shouldRender)),
};
};