import { useCallback, useEffect, useRef } from 'react'; type AudioWindow = Window & { webkitAudioContext?: typeof AudioContext; }; function scheduleTone( context: AudioContext, destination: GainNode, frequency: number, startTime: number, duration: number, options: { gain: number; type: OscillatorType; attack?: number; release?: number; detune?: number; }, ) { if (context.state === 'closed') { return; } const oscillator = context.createOscillator(); const gainNode = context.createGain(); const attack = options.attack ?? 0.05; const release = options.release ?? Math.max(0.18, duration * 0.5); const peakGain = options.gain; const releaseStart = Math.max(startTime + attack, startTime + duration - release); oscillator.type = options.type; oscillator.frequency.setValueAtTime(frequency, startTime); oscillator.detune.setValueAtTime(options.detune ?? 0, startTime); gainNode.gain.setValueAtTime(0.0001, startTime); gainNode.gain.linearRampToValueAtTime(peakGain, startTime + attack); gainNode.gain.setValueAtTime(peakGain, releaseStart); gainNode.gain.exponentialRampToValueAtTime(0.0001, startTime + duration); oscillator.connect(gainNode); gainNode.connect(destination); oscillator.start(startTime); oscillator.stop(startTime + duration + 0.02); } function scheduleChord(context: AudioContext, destination: GainNode, notes: number[], startTime: number) { notes.forEach((frequency, index) => { scheduleTone(context, destination, frequency, startTime, 2.45, { gain: index === 0 ? 0.028 : 0.022, type: index === 0 ? 'triangle' : 'sine', attack: 0.12, release: 1.4, detune: index === 1 ? -4 : index === 2 ? 4 : 0, }); }); } function scheduleAccent(context: AudioContext, destination: GainNode, frequency: number, startTime: number) { scheduleTone(context, destination, frequency, startTime, 0.32, { gain: 0.032, type: 'triangle', attack: 0.01, release: 0.18, }); } function scheduleBass(context: AudioContext, destination: GainNode, frequency: number, startTime: number) { scheduleTone(context, destination, frequency, startTime, 1.9, { gain: 0.024, type: 'sine', attack: 0.02, release: 0.8, }); } export function useBackgroundMusic({ active, volume, }: { active: boolean; volume: number; }) { const contextRef = useRef(null); const masterGainRef = useRef(null); const loopTimerRef = useRef(null); const stepRef = useRef(0); const activeRef = useRef(active); const volumeRef = useRef(volume); const stopLoop = useCallback(() => { if (loopTimerRef.current !== null) { window.clearTimeout(loopTimerRef.current); loopTimerRef.current = null; } }, []); const ensureAudioGraph = useCallback(() => { if (typeof window === 'undefined') return null; const AudioContextCtor = window.AudioContext ?? (window as AudioWindow).webkitAudioContext; if (!AudioContextCtor) return null; if (contextRef.current?.state === 'closed') { contextRef.current = null; masterGainRef.current = null; } if (!contextRef.current) { contextRef.current = new AudioContextCtor(); } if (!masterGainRef.current) { const masterGain = contextRef.current.createGain(); masterGain.gain.value = 0.0001; masterGain.connect(contextRef.current.destination); masterGainRef.current = masterGain; } return { context: contextRef.current, masterGain: masterGainRef.current, }; }, []); const scheduleLoop = useCallback(() => { const graph = ensureAudioGraph(); if (!graph || !activeRef.current || volumeRef.current <= 0) { stopLoop(); return; } const { context, masterGain } = graph; if (context.state === 'closed') { stopLoop(); return; } const progression = [ [220, 277.18, 329.63], [246.94, 311.13, 369.99], [196, 246.94, 293.66], [174.61, 220, 261.63], ]; const chord = progression[stepRef.current % progression.length] ?? progression[0]; if (!chord) { return; } const [bassNote, midNote, topNote] = chord; if (bassNote === undefined || midNote === undefined || topNote === undefined) { return; } const startTime = context.currentTime + 0.08; scheduleChord(context, masterGain, chord, startTime); scheduleBass(context, masterGain, bassNote / 2, startTime); scheduleAccent(context, masterGain, topNote * 2, startTime + 0.24); scheduleAccent(context, masterGain, midNote * 2, startTime + 1.12); stepRef.current += 1; loopTimerRef.current = window.setTimeout(scheduleLoop, 2200); }, [ensureAudioGraph, stopLoop]); const updateMasterVolume = useCallback((graph?: { context: AudioContext; masterGain: GainNode } | null) => { const audioGraph = graph ?? ensureAudioGraph(); if (!audioGraph) return; const targetGain = activeRef.current && volumeRef.current > 0 ? Math.max(0.0001, volumeRef.current * 0.18) : 0.0001; audioGraph.masterGain.gain.cancelScheduledValues(audioGraph.context.currentTime); audioGraph.masterGain.gain.linearRampToValueAtTime(targetGain, audioGraph.context.currentTime + 0.24); }, [ensureAudioGraph]); const startPlayback = useCallback(async () => { const graph = ensureAudioGraph(); if (!graph || !activeRef.current || volumeRef.current <= 0) return; if (graph.context.state === 'suspended') { await graph.context.resume(); } updateMasterVolume(graph); if (loopTimerRef.current === null) { scheduleLoop(); } }, [ensureAudioGraph, scheduleLoop, updateMasterVolume]); useEffect(() => { activeRef.current = active; volumeRef.current = volume; if (!active || volume <= 0) { updateMasterVolume(); stopLoop(); return; } void startPlayback(); const handleUserGesture = () => { void startPlayback(); }; window.addEventListener('pointerdown', handleUserGesture); window.addEventListener('keydown', handleUserGesture); return () => { window.removeEventListener('pointerdown', handleUserGesture); window.removeEventListener('keydown', handleUserGesture); }; }, [active, startPlayback, stopLoop, updateMasterVolume, volume]); useEffect(() => () => { stopLoop(); if (masterGainRef.current && contextRef.current) { masterGainRef.current.gain.cancelScheduledValues(contextRef.current.currentTime); masterGainRef.current.gain.value = 0.0001; } const context = contextRef.current; contextRef.current = null; masterGainRef.current = null; if (context && context.state !== 'closed') { void context.close(); } }, [stopLoop]); }