import EventEmitter from './event-emitter.js' type WebAudioPlayerEvents = { loadedmetadata: [] canplay: [] play: [] pause: [] seeking: [] timeupdate: [] volumechange: [] emptied: [] ended: [] } /** * A Web Audio buffer player emulating the behavior of an HTML5 Audio element. * * Note: This class does not manage blob: URLs. If you pass a blob: URL to setSrc(), * you are responsible for revoking it when done. The Player class (player.ts) handles * blob URL lifecycle management automatically. */ class WebAudioPlayer extends EventEmitter { private audioContext: AudioContext private gainNode: GainNode private bufferNode: AudioBufferSourceNode | null = null private playStartTime = 0 private playbackPosition = 0 private _muted = false private _playbackRate = 1 private _duration: number | undefined = undefined private buffer: AudioBuffer | null = null public currentSrc = '' public paused = true public crossOrigin: string | null = null public seeking = false public autoplay = false constructor(audioContext = new AudioContext()) { super() this.audioContext = audioContext this.gainNode = this.audioContext.createGain() this.gainNode.connect(this.audioContext.destination) } /** Subscribe to an event. Returns an unsubscribe function. */ addEventListener = this.on /** Unsubscribe from an event */ removeEventListener = this.un async load() { return } get src() { return this.currentSrc } set src(value: string) { this.currentSrc = value this._duration = undefined if (!value) { this.buffer = null this.emit('emptied') return } fetch(value) .then((response) => { if (response.status >= 400) { throw new Error(`Failed to fetch ${value}: ${response.status} (${response.statusText})`) } return response.arrayBuffer() }) .then((arrayBuffer) => { if (this.currentSrc !== value) return null return this.audioContext.decodeAudioData(arrayBuffer) }) .then((audioBuffer) => { if (this.currentSrc !== value) return this.buffer = audioBuffer this.emit('loadedmetadata') this.emit('canplay') if (this.autoplay) this.play() }) .catch((err) => { // Emit error for proper error handling console.error('WebAudioPlayer load error:', err) }) } private _play() { if (!this.paused) return this.paused = false // Clean up old buffer node completely before creating new one if (this.bufferNode) { this.bufferNode.onended = null this.bufferNode.disconnect() } this.bufferNode = this.audioContext.createBufferSource() if (this.buffer) { this.bufferNode.buffer = this.buffer } this.bufferNode.playbackRate.value = this._playbackRate this.bufferNode.connect(this.gainNode) let currentPos = this.playbackPosition if (currentPos >= this.duration || currentPos < 0) { currentPos = 0 this.playbackPosition = 0 } this.bufferNode.start(this.audioContext.currentTime, currentPos) this.playStartTime = this.audioContext.currentTime this.bufferNode.onended = () => { if (this.currentTime >= this.duration) { this.pause() this.emit('ended') } } } private _pause() { this.paused = true this.bufferNode?.stop() this.playbackPosition += (this.audioContext.currentTime - this.playStartTime) * this._playbackRate } async play() { if (!this.paused) return this._play() this.emit('play') } pause() { if (this.paused) return this._pause() this.emit('pause') } stopAt(timeSeconds: number) { const delay = timeSeconds - this.currentTime const currentBufferNode = this.bufferNode currentBufferNode?.stop(this.audioContext.currentTime + delay) currentBufferNode?.addEventListener( 'ended', () => { if (currentBufferNode === this.bufferNode) { this.bufferNode = null this.pause() } }, { once: true }, ) } async setSinkId(deviceId: string) { const ac = this.audioContext as AudioContext & { setSinkId: (id: string) => Promise } return ac.setSinkId(deviceId) } get playbackRate() { return this._playbackRate } set playbackRate(value) { const wasPlaying = !this.paused if (wasPlaying) this._pause() this._playbackRate = value if (wasPlaying) this._play() if (this.bufferNode) { this.bufferNode.playbackRate.value = value } } get currentTime() { return this.paused ? this.playbackPosition : this.playbackPosition + (this.audioContext.currentTime - this.playStartTime) * this._playbackRate } set currentTime(value) { const wasPlaying = !this.paused if (wasPlaying) this._pause() this.playbackPosition = value if (wasPlaying) this._play() this.emit('seeking') this.emit('timeupdate') } get duration() { return this._duration ?? (this.buffer?.duration || 0) } set duration(value: number) { this._duration = value } get volume() { return this.gainNode.gain.value } set volume(value) { this.gainNode.gain.value = value this.emit('volumechange') } get muted() { return this._muted } set muted(value: boolean) { if (this._muted === value) return this._muted = value if (this._muted) { this.gainNode.disconnect() } else { this.gainNode.connect(this.audioContext.destination) } } public canPlayType(mimeType: string) { return /^(audio|video)\//.test(mimeType) } /** Get the GainNode used to play the audio. Can be used to attach filters. */ public getGainNode(): GainNode { return this.gainNode } /** Get decoded audio */ public getChannelData(): Float32Array[] { const channels: Float32Array[] = [] if (!this.buffer) return channels const numChannels = this.buffer.numberOfChannels for (let i = 0; i < numChannels; i++) { channels.push(this.buffer.getChannelData(i)) } return channels } /** * Imitate `HTMLElement.removeAttribute` for compatibility with `Player`. */ public removeAttribute(attrName: string) { switch (attrName) { case 'src': this.src = '' break case 'playbackRate': this.playbackRate = 0 break case 'currentTime': this.currentTime = 0 break case 'duration': this.duration = 0 break case 'volume': this.volume = 0 break case 'muted': this.muted = false break } } } export default WebAudioPlayer