Files
Genarrative/src/games/bark-battle/infrastructure/BrowserMicrophoneInput.ts

86 lines
3.3 KiB
TypeScript

import type { MicrophoneFailureReason } from '../domain/BarkBattleTypes';
export function mapGetUserMediaError(error: unknown): MicrophoneFailureReason {
const name = error && typeof error === 'object' && 'name' in error ? String((error as { name?: unknown }).name) : '';
if (name === 'NotAllowedError' || name === 'SecurityError') return 'permission-denied';
if (name === 'NotFoundError' || name === 'DevicesNotFoundError') return 'not-found';
if (name === 'NotReadableError' || name === 'TrackStartError') return 'not-readable';
return 'unknown';
}
export function isMicrophoneApiSupported(windowLike: { isSecureContext?: boolean; navigator?: Navigator | { mediaDevices?: { getUserMedia?: unknown } } }) {
if (windowLike.isSecureContext === false) {
return { ok: false as const, reason: 'non-secure-context' as const };
}
const getUserMedia = windowLike.navigator?.mediaDevices?.getUserMedia;
if (typeof getUserMedia !== 'function') {
return { ok: false as const, reason: 'unsupported' as const };
}
return { ok: true as const, reason: null };
}
export function stopMediaStreamTracks(stream: MediaStream) {
stream.getTracks().forEach((track) => track.stop());
}
export type BrowserMicrophoneSampler = {
stop: () => void;
};
export type BrowserMicrophoneVolumeHandler = (volume: number, atMs: number) => void;
export async function startBrowserMicrophoneSampler(onVolume: BrowserMicrophoneVolumeHandler): Promise<BrowserMicrophoneSampler> {
const supported = isMicrophoneApiSupported(window);
if (!supported.ok) {
throw Object.assign(new Error(supported.reason), { reason: supported.reason });
}
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const AudioContextCtor = window.AudioContext || window.webkitAudioContext;
if (!AudioContextCtor) {
stopMediaStreamTracks(stream);
throw Object.assign(new Error('audio-context-blocked'), { reason: 'audio-context-blocked' });
}
const audioContext = new AudioContextCtor();
if (audioContext.state === 'suspended') {
await audioContext.resume();
}
const analyser = audioContext.createAnalyser();
analyser.fftSize = 512;
const source = audioContext.createMediaStreamSource(stream);
source.connect(analyser);
const data = new Uint8Array(analyser.fftSize);
const sampleStartedAtMs = window.performance.now();
let rafId = 0;
const sample = () => {
analyser.getByteTimeDomainData(data);
let sum = 0;
for (const value of data) {
const centered = (value - 128) / 128;
sum += centered * centered;
}
const volume = Math.min(1, Math.sqrt(sum / data.length) * 3.5);
onVolume(volume, window.performance.now() - sampleStartedAtMs);
rafId = window.requestAnimationFrame(sample);
};
sample();
return {
stop: () => {
window.cancelAnimationFrame(rafId);
source.disconnect();
void audioContext.close();
stopMediaStreamTracks(stream);
},
};
} catch (error) {
const reason = error && typeof error === 'object' && 'reason' in error ? (error as { reason: MicrophoneFailureReason }).reason : mapGetUserMediaError(error);
throw Object.assign(new Error(reason), { reason });
}
}
declare global {
interface Window {
webkitAudioContext?: typeof AudioContext;
}
}