mirror of
https://github.com/Rezmason/matrix.git
synced 2026-04-14 12:29:30 -07:00
Taking a crack at an audio system
This commit is contained in:
BIN
assets/raindrop_placeholder.m4a
Normal file
BIN
assets/raindrop_placeholder.m4a
Normal file
Binary file not shown.
BIN
assets/raindrop_placeholder.ogg
Normal file
BIN
assets/raindrop_placeholder.ogg
Normal file
Binary file not shown.
BIN
assets/raindrop_placeholder.wav
Normal file
BIN
assets/raindrop_placeholder.wav
Normal file
Binary file not shown.
BIN
assets/raindrop_placeholder.webm
Normal file
BIN
assets/raindrop_placeholder.webm
Normal file
Binary file not shown.
80
js/audio.js
Normal file
80
js/audio.js
Normal file
@@ -0,0 +1,80 @@
|
||||
// pentatonic major: 0, 2, 4, 7, 9
|
||||
// pentatonic minor: 0, 3, 5, 7, 10
|
||||
const notes = [0, 3, 5, 7, 10].map((note) => 2 ** (note / 12 - 0.25));
|
||||
|
||||
const audioCodecs = [
|
||||
{ codec: "audio/webm; codecs=vorbis", extension: "webm" },
|
||||
{ codec: "audio/mp4; codecs=mp4a.40.5", extension: "m4a" },
|
||||
{ codec: "audio/wav; codecs=1", extension: "wav" },
|
||||
];
|
||||
|
||||
const extension = (() => {
|
||||
const audio = new Audio();
|
||||
return audioCodecs.find(({ codec }) => audio.canPlayType(codec))?.extension;
|
||||
})();
|
||||
|
||||
const context = new AudioContext();
|
||||
|
||||
// Borrowed from Oskar Eriksson's web audio examples
|
||||
const slapback = (() => {
|
||||
const input = context.createGain();
|
||||
const output = context.createGain();
|
||||
const delay = context.createDelay();
|
||||
const feedback = context.createGain();
|
||||
const wetLevel = context.createGain();
|
||||
|
||||
delay.delayTime.value = 0.15;
|
||||
feedback.gain.value = 0.25;
|
||||
wetLevel.gain.value = 0.55;
|
||||
|
||||
input.connect(output);
|
||||
input.connect(delay).connect(feedback).connect(delay).connect(wetLevel).connect(output);
|
||||
|
||||
return { input, output };
|
||||
})();
|
||||
|
||||
slapback.output.connect(context.destination);
|
||||
|
||||
const fetchAudioData = async (url) => {
|
||||
const response = await fetch(url);
|
||||
const buffer = await response.arrayBuffer();
|
||||
return await context.decodeAudioData(buffer);
|
||||
};
|
||||
|
||||
const sample = await fetchAudioData(`assets/raindrop_placeholder.${extension}`);
|
||||
|
||||
const delay = (seconds) => new Promise((resolve) => setTimeout(resolve, seconds * 1000));
|
||||
|
||||
const startDrop = async () => {
|
||||
setTimeout(() => startDrop(), 300);
|
||||
|
||||
await delay(Math.random() * 0.25);
|
||||
|
||||
const source = context.createBufferSource();
|
||||
source.buffer = sample;
|
||||
// source.detune.value = (Math.random() - 0.5) * 500;
|
||||
source.playbackRate.value = notes[Math.floor(Math.random() * notes.length)];
|
||||
|
||||
const gain = context.createGain();
|
||||
gain.gain.value = Math.random() ** 0.5 * 0.06; // 0 to 1
|
||||
|
||||
const panner = context.createStereoPanner();
|
||||
panner.pan.value = Math.random() * 2 - 1; // -1 to 1
|
||||
|
||||
source.connect(gain).connect(panner).connect(slapback.input);
|
||||
source.addEventListener("ended", () => panner.disconnect(slapback.input));
|
||||
source.start(0);
|
||||
};
|
||||
|
||||
const firstTap = new Promise((resolve) => {
|
||||
document.addEventListener("touchStart", () => resolve());
|
||||
document.addEventListener("mousedown", () => resolve());
|
||||
});
|
||||
|
||||
export default async () => {
|
||||
await firstTap;
|
||||
|
||||
for (let i = 0; i < 4; i++) {
|
||||
setTimeout(() => startDrop(), i * 500);
|
||||
}
|
||||
};
|
||||
@@ -119,6 +119,7 @@ const defaults = {
|
||||
loops: false,
|
||||
skipIntro: true,
|
||||
testFix: null,
|
||||
audio: false,
|
||||
};
|
||||
|
||||
const versions = {
|
||||
@@ -461,6 +462,7 @@ const paramMapping = {
|
||||
loops: { key: "loops", parser: (s) => s.toLowerCase().includes("true") },
|
||||
fps: { key: "fps", parser: (s) => nullNaN(range(parseFloat(s), 0, 60)) },
|
||||
skipIntro: { key: "skipIntro", parser: (s) => s.toLowerCase().includes("true") },
|
||||
audio: { key: "audio", parser: (s) => s.toLowerCase().includes("true") },
|
||||
renderer: { key: "renderer", parser: (s) => s },
|
||||
once: { key: "once", parser: (s) => s.toLowerCase().includes("true") },
|
||||
isometric: { key: "isometric", parser: (s) => s.toLowerCase().includes("true") },
|
||||
|
||||
20
js/main.js
20
js/main.js
@@ -17,11 +17,21 @@ const isRunningSwiftShader = () => {
|
||||
return renderer.toLowerCase().includes("swiftshader");
|
||||
};
|
||||
|
||||
|
||||
const initRenderer = async (config) => {
|
||||
const useWebGPU = (await supportsWebGPU()) && ["webgpu"].includes(config.renderer?.toLowerCase());
|
||||
const solution = import(`./${useWebGPU ? "webgpu" : "regl"}/main.js`);
|
||||
(await solution).default(canvas, config);
|
||||
};
|
||||
|
||||
const initAudio = async (config) => {
|
||||
if (!config.audio) return;
|
||||
(await import("./audio.js")).default();
|
||||
};
|
||||
|
||||
document.body.onload = async () => {
|
||||
const urlParams = Object.fromEntries(new URLSearchParams(window.location.search).entries());
|
||||
const config = makeConfig(urlParams);
|
||||
const useWebGPU = (await supportsWebGPU()) && ["webgpu"].includes(config.renderer?.toLowerCase());
|
||||
const solution = import(`./${useWebGPU ? "webgpu" : "regl"}/main.js`);
|
||||
|
||||
if (isRunningSwiftShader()) {
|
||||
const notice = document.createElement("notice");
|
||||
@@ -34,11 +44,13 @@ document.body.onload = async () => {
|
||||
canvas.style.display = "none";
|
||||
document.body.appendChild(notice);
|
||||
document.querySelector(".blue.pill").addEventListener("click", async () => {
|
||||
(await solution).default(canvas, config);
|
||||
initRenderer(config);
|
||||
initAudio(config);
|
||||
canvas.style.display = "unset";
|
||||
document.body.removeChild(notice);
|
||||
});
|
||||
} else {
|
||||
(await solution).default(canvas, config);
|
||||
initRenderer(config);
|
||||
initAudio(config);
|
||||
}
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user