;
/** Pre-allocated array for normalized waveform values */
private _waveformBuffer: number[] = [];
/**
* Start visualization with an audio stream
*/
startVisualization(stream: MediaStream) {
this.stopVisualization();
this._stream = stream;
// Reuse AudioContext if it exists and is not closed
if (!this.audioContext || this.audioContext.state === "closed") {
this.audioContext = new AudioContext();
}
this.analyser = this.audioContext.createAnalyser();
this.sourceNode = this.audioContext.createMediaStreamSource(stream);
this.sourceNode.connect(this.analyser);
this.analyser.fftSize = 256;
this.analyser.smoothingTimeConstant = 0.8;
// Pre-allocate frequency data buffer (reused every frame)
this._frequencyData = new Uint8Array(this.analyser.frequencyBinCount);
// Pre-allocate waveform buffer for the bar count
const barCount = this.bars;
if (this._waveformBuffer.length !== barCount) {
this._waveformBuffer = new Array(barCount).fill(0);
}
const update = () => {
if (!this._stream || !this.analyser || !this._frequencyData) return;
this.analyser.getByteFrequencyData(this._frequencyData);
// Sample data at regular intervals to match bar count
// Skip the first few bins (low frequencies that contain ambient noise)
const skipBins = 2;
const usableLength = this._frequencyData.length - skipBins;
const step = Math.floor(usableLength / barCount);
// Update buffer in place instead of creating new array
for (let i = 0; i < barCount; i++) {
const index = Math.min(
skipBins + i * step,
this._frequencyData.length - 1,
);
this._waveformBuffer[i] = this._frequencyData[index] / 255;
}
// Trigger update by assigning the same reference
// Lit will detect the change via the @property decorator
this.waveformData = this._waveformBuffer;
this.requestUpdate();
this.animationFrameId = requestAnimationFrame(update);
};
update();
}
/**
* Stop visualization and clean up resources
*/
stopVisualization() {
if (this.animationFrameId) {
cancelAnimationFrame(this.animationFrameId);
this.animationFrameId = undefined;
}
// Disconnect source node but keep AudioContext for reuse
if (this.sourceNode) {
this.sourceNode.disconnect();
this.sourceNode = undefined;
}
this.analyser = undefined;
this._stream = undefined;
this._frequencyData = undefined;
this.waveformData = [];
// Keep _waveformBuffer allocated for potential reuse
}
override disconnectedCallback() {
super.disconnectedCallback();
this.stopVisualization();
// Close AudioContext when element is removed from DOM
if (this.audioContext) {
this.audioContext.close();
this.audioContext = undefined;
}
}
override render() {
const barCount = this.waveformData.length || this.bars;
const barWidth = 100 / barCount; // Percentage
const heightPx = this.height;
// Default to minimal bars if no data
const data = this.waveformData.length > 0
? this.waveformData
: Array(barCount).fill(0.1);
return html`
`;
}
}
customElements.define("ct-audio-visualizer", CTAudioVisualizer);