Pen Settings

HTML

CSS

CSS Base

Vendor Prefixing

Add External Stylesheets/Pens

Any URLs added here will be added as <link>s in order, and before the CSS in the editor. You can use the CSS from another Pen by using its URL and the proper URL extension.

+ add another resource

JavaScript

Babel includes JSX processing.

Add External Scripts/Pens

Any URL's added here will be added as <script>s in order, and run before the JavaScript in the editor. You can use the URL of any other Pen and it will include the JavaScript from that Pen.

+ add another resource

Packages

Add Packages

Search for and use JavaScript packages from npm here. By selecting a package, an import statement will be added to the top of the JavaScript editor for this package.

Behavior

Auto Save

If active, Pens will autosave every 30 seconds after being saved once.

Auto-Updating Preview

If enabled, the preview panel updates automatically as you code. If disabled, use the "Run" button to update.

Format on Save

If enabled, your code will be formatted when you actively save your Pen. Note: your code becomes un-folded during formatting.

Editor Settings

Code Indentation

Want to change your Syntax Highlighting theme, Fonts and more?

Visit your global Editor Settings.

HTML

              
                <div id="container"><canvas id="canvas"></canvas></div>
<audio id="audio" controls crossorigin></audio>
<input id="audioFileInput" type="file" accept="audio/*">
<script id="AudioProvider" type="worklet">
class AudioProvider extends AudioWorkletProcessor {
  constructor() {
    super();
    this.dataArrays = [];
    this.bufferSize = 32768; // can handle more than 32768 samples of PCM data unlike in AnalyserNode.getFloatTimeDomainData, which is capped at 32768 samples
    this.bufferIdx = 0;
    this.currentTimeInSamples = 0;
    this.port.onmessage = (e) => {
      const audioChunks = [],
            retrievalWindowSize = e.data ? Math.min(this.bufferSize, currentFrame - this.currentTimeInSamples) : this.bufferSize,
            timeOffset = this.bufferSize-retrievalWindowSize;
      for (let channelIdx = 0; channelIdx < this.dataArrays.length; channelIdx++) {
        audioChunks[channelIdx] = [];
        for (let i = 0; i < this.dataArrays[channelIdx].length-timeOffset; i++) {
          const data = this.dataArrays[channelIdx][((this.bufferIdx+i+timeOffset) % this.bufferSize + this.bufferSize) % this.bufferSize];
          audioChunks[channelIdx][i] = data !== undefined ? data : 0;
        }
      }
      this.port.postMessage({currentChunk: audioChunks});
      this.currentTimeInSamples = currentFrame;
    };
  }
  
  process(inputs, _, _2) {
    if (inputs[0].length <= 0)
      return true;
    this.dataArrays.length = inputs[0].length;
    for (let i = 0; i < this.dataArrays.length; i++) {
      if (this.dataArrays[i] === undefined)
        this.dataArrays[i] = new Array(this.bufferSize);
      else {
        this.dataArrays[i].length = this.bufferSize;
      }
    }
    
    for (let i = 0; i < inputs[0][0].length; i++) {
      this.bufferIdx = Math.min(this.bufferIdx, this.bufferSize-1);
      for (let channelIdx = 0; channelIdx < inputs[0].length; channelIdx++) {
        this.dataArrays[channelIdx][this.bufferIdx] = inputs[0][channelIdx][i];
      }
      this.bufferIdx = ((this.bufferIdx + 1) % this.bufferSize + this.bufferSize) % this.bufferSize;
    }
    return true;
  }
}

registerProcessor('audio-provider', AudioProvider);
</script>
<script>
class AnalogStyleAnalyzer {
  constructor(...args) {
    // initialize the sDFT coefficients
    this.calcCoeffs(args);
    this.spectrumData = [];
  }
  
  calcCoeffs(freqBands, order = 4, timeRes = Infinity, bandwidth = 1, sampleRate = 44100, compensateBW = true, prewarpQ = false) {
    this._coeffs = freqBands.map(x => {
      // biquad bandpass filter (cascaded biquad bandpass is not Butterworth nor Bessel, rather it is something called "critically-damped" since each filter stage shares the same every biquad coefficients)
      const K = Math.tan(Math.PI * x.ctr/sampleRate),
            bw = Math.abs(x.hi-x.lo) * bandwidth + 1/(timeRes/1000),
            qCompensationFactor = prewarpQ ? (Math.PI * x.ctr/sampleRate)/K : 1,
            Q = x.ctr/bw * qCompensationFactor / (compensateBW ? Math.sqrt(order) : 1),
            norm = 1 / (1 + K / Q + K * K),
            a0 = K / Q * norm,
            a1 = 0,
            a2 = -a0,
            b1 = 2 * (K * K - 1) * norm,
            b2 = (1 - K / Q + K * K) * norm,
            zs = [];
      for (let i = 0; i < order; i++) {
        zs[i] = {
          z1: 0,
          z2: 0,
          out: 0
        }
      }
      return {
        a0: a0,
        a1: a1,
        a2: a2,
        b1: b1,
        b2: b2,
        zs: zs
      };
    });
  }

  analyze(samples) {
    const newSpectrumData = new Array(this._coeffs.length).fill(0);
    for (const x of samples) {
      for (let i = 0; i < this._coeffs.length; i++) {
        for (let j = 0; j < this._coeffs[i].zs.length; j++) {
          const input = j <= 0 ? x : this._coeffs[i].zs[j-1].out;
          this._coeffs[i].zs[j].out = input * this._coeffs[i].a0 + this._coeffs[i].zs[j].z1;
          this._coeffs[i].zs[j].z1 = input * this._coeffs[i].a1 + this._coeffs[i].zs[j].z2 - this._coeffs[i].b1 * this._coeffs[i].zs[j].out;
          this._coeffs[i].zs[j].z2 = input * this._coeffs[i].a2 - this._coeffs[i].b2 * this._coeffs[i].zs[j].out;
        }
        newSpectrumData[i] = Math.max(newSpectrumData[i], Math.abs(this._coeffs[i].zs[this._coeffs[i].zs.length-1].out));
      }
    }
    this.spectrumData = newSpectrumData.map(x => x/2);
  }
}
</script>
<script>
/**
 * Single file implementation of sliding windowed infinite Fourier transform (SWIFT)
 *
 * The frequency bands data is formatted like:
 * {lo: lowerBound,
 *  ctr: center,
 *  hi: higherBound}
 *
 * where lo and hi are used for calculating the necessary bandwidth for variable-Q transform spectrum visualizations and ctr for center frequency. This is generated using functions like generateFreqBands
 */
class SWIFT {
  constructor(...args) {
    // initialize the sDFT coefficients
    this.calcCoeffs(args);
    this.spectrumData = [];
  }
  
  calcCoeffs(freqBands, order = 4, timeRes = 600, bandwidth = 1, sampleRate = 44100, compensateBW = true) {
    // calcCoeffs() can be called anywhere else to re-initialize sliding DFT after changes in frequency band distributions and note that x and y are used instead of real and imaginary since vector rotation is the equivalent of the complex one
    this._coeffs = [];
    freqBands.map((x, i) => {
      // rX and rY are calculated in advance here since calculating sin and cos functions are pretty slow af
      this._coeffs[i] = {
        rX: Math.cos(x.ctr*Math.PI/sampleRate*2),
        rY: Math.sin(x.ctr*Math.PI/sampleRate*2),
        decay: Math.E ** ((-Math.abs(x.hi-x.lo) * Math.PI * bandwidth / sampleRate - 1/(timeRes*sampleRate/(Math.PI*1000))) * (compensateBW ? Math.sqrt(order) : 1)),
        coeffs: []
      };
      for (let j = 0; j < order; j++) {
        this._coeffs[i].coeffs[j] = {
          x: 0,
          y: 0
        };
      }
    });
  }
  
  analyze(dataArray) {
    const newSpectrumData = new Array(this._coeffs.length).fill(0);
    for (const x of dataArray) {
      for (let i = 0; i < this._coeffs.length; i++) {
        for (let j = 0; j < this._coeffs[i].coeffs.length; j++) {
          const input = j <= 0 ? {
            x: x,
            y: 0,
          } : this._coeffs[i].coeffs[j-1],
                outX = (this._coeffs[i].coeffs[j].x * this._coeffs[i].rX - this._coeffs[i].coeffs[j].y * this._coeffs[i].rY) * this._coeffs[i].decay + input.x * (1-this._coeffs[i].decay),
                outY = (this._coeffs[i].coeffs[j].x * this._coeffs[i].rY + this._coeffs[i].coeffs[j].y * this._coeffs[i].rX) * this._coeffs[i].decay + input.y * (1-this._coeffs[i].decay);
          
          this._coeffs[i].coeffs[j].x = outX;
          this._coeffs[i].coeffs[j].y = outY;
        }
        newSpectrumData[i] = Math.max(newSpectrumData[i],
                                      this._coeffs[i].coeffs[this._coeffs[i].coeffs.length-1].x ** 2 +
                                      this._coeffs[i].coeffs[this._coeffs[i].coeffs.length-1].y ** 2);
      }
    }
    this.spectrumData = newSpectrumData.map((x) => Math.sqrt(x));
  }
}
</script>
<script>
/**
 * Single file implementation of variable-Q sliding DFT (VQ-sDFT)
 *
 * The frequency bands data is formatted like:
 * {lo: lowerBound,
 *  ctr: center,
 *  hi: higherBound}
 *
 * where lo and hi are used for calculating the necessary bandwidth for variable-Q/constant-Q transform spectrum analysis and ctr for center frequency. This is generated using functions like generateFreqBands()
 *
 * Note: This algorithm is derived from the paper "Application of Improved Sliding DFT Algorithm for Non-Integer k" by Carl Q. Howard (https://acoustics.asn.au/conference_proceedings/AAS2021/papers/p60.pdf)
 */
class VQsDFT {
  constructor(...args) {
    this.calcCoeffs(args);
    this.spectrumData = [];
  }
  
  calcCoeffs(freqBands, window = [1, 0.5], timeRes = 600, bandwidth = 1, bufferSize = 44100, sampleRate = 44100, useNC = false) {
    this._coeffs = freqBands.map(x => {
      const fiddles = [],
            twiddles = [],
            resonCoeffs = [],
            coeffs1 = [],
            coeffs2 = [],
            coeffs3 = [],
            coeffs4 = [],
            coeffs5 = [],
            gains = [],
            period = Math.trunc(Math.min(bufferSize, sampleRate / (bandwidth * Math.abs(x.hi - x.lo) + 1/(timeRes / 1000)))), // N must be an integer, but K doesn't have to be
            minIdx = useNC ? 0 : -window.length + 1,
            maxIdx = useNC ? 2 : window.length;
      // this below is needed since we have to apply a frequency-domain window function
      for (let i = minIdx; i < maxIdx; i++) {
        const amplitude = useNC ? 1 : window[Math.abs(i)] * (-(Math.abs(i) % 2) * 2 + 1),
              k = x.ctr * period / sampleRate + i - useNC/2,
              fid = -2 * Math.PI * k,
              twid = 2 * Math.PI * k / period,
              reson = 2 * Math.cos(2*Math.PI*k/period);
        fiddles.push({
          x: Math.cos(fid),
          y: Math.sin(fid)
        });
        twiddles.push({
          x: Math.cos(twid),
          y: Math.sin(twid)
        });
        resonCoeffs.push(reson);
        coeffs1.push({x: 0, y: 0});
        coeffs2.push({x: 0, y: 0});
        coeffs3.push({x: 0, y: 0});
        coeffs4.push({x: 0, y: 0});
        coeffs5.push({x: 0, y: 0});
        gains.push(amplitude);
      }
      return {
        period: period,
        twiddles: twiddles,
        fiddles: fiddles,
        resonCoeffs: resonCoeffs,
        coeffs1: coeffs1,
        coeffs2: coeffs2,
        coeffs3: coeffs3,
        coeffs4: coeffs4,
        coeffs5: coeffs5,
        gains: gains,
        nc: useNC
      };
    });
    this._buffer = new Array(bufferSize+1).fill(0);
    this._bufferIdx = this._buffer.length-1; // this is required for circular buffer
  }
  
  analyze(samples) {
    this.spectrumData = new Array(this._coeffs.length).fill(0);
    for (const sample of samples) {
      // Admittedly slow linear buffer
      /*
      this._buffer.push(sample);
      this._buffer.shift();
      */
      // Circular buffer
      this._bufferIdx = ((this._bufferIdx + 1) % this._buffer.length + this._buffer.length) % this._buffer.length;
      this._buffer[this._bufferIdx] = sample;
      for (let i = 0; i < this._coeffs.length; i++) {
        const coeff = this._coeffs[i],
              kernelLength = coeff.coeffs1.length,
              /*oldest = this._buffer.length-coeff.period-1,
              latest = this._buffer.length-1,*/
              oldest = ((this._bufferIdx - coeff.period) % this._buffer.length + this._buffer.length) % this._buffer.length,
              latest = this._bufferIdx,
              sum = {
                x: 0,
                y: 0
              };
        for (let j = 0; j < kernelLength; j++) {
          const fiddle = coeff.fiddles[j],
                twiddle = coeff.twiddles[j],
          // Comb stage
                combX = this._buffer[latest] * fiddle.x - this._buffer[oldest],
                combY = this._buffer[latest] * fiddle.y
          
          // Second stage
          coeff.coeffs1[j].x = combX * twiddle.x - combY * twiddle.y - coeff.coeffs2[j].x;
          coeff.coeffs1[j].y = combX * twiddle.y + combY * twiddle.x - coeff.coeffs2[j].y;
          
          coeff.coeffs2[j].x = combX;
          coeff.coeffs2[j].y = combY;
          
          // Real resonator
          coeff.coeffs3[j].x = coeff.coeffs1[j].x + coeff.resonCoeffs[j] * coeff.coeffs4[j].x - coeff.coeffs5[j].x;
          coeff.coeffs3[j].y = coeff.coeffs1[j].y + coeff.resonCoeffs[j] * coeff.coeffs4[j].y - coeff.coeffs5[j].y;
          
          coeff.coeffs5[j].x = coeff.coeffs4[j].x;
          coeff.coeffs5[j].y = coeff.coeffs4[j].y;
          
          coeff.coeffs4[j].x = coeff.coeffs3[j].x;
          coeff.coeffs4[j].y = coeff.coeffs3[j].y;
          
          sum.x += coeff.coeffs3[j].x * coeff.gains[j] / coeff.period;
          sum.y += coeff.coeffs3[j].y * coeff.gains[j] / coeff.period;
        }
        const period = coeff.period
        this.spectrumData[i] = Math.max(this.spectrumData[i], coeff.nc ? -(coeff.coeffs3[0].x/period*coeff.coeffs3[1].x/period)-(coeff.coeffs3[0].y/period*coeff.coeffs3[1].y/period) : sum.x ** 2 + sum.y ** 2);
      }
    }
    this.spectrumData = this.spectrumData.map(x => Math.sqrt(x));
  }
}
</script>
<script>
  function map(x, min, max, targetMin, targetMax) {
    return (x - min) / (max - min) * (targetMax - targetMin) + targetMin;
  }
  
  function clamp(x, min, max) {
    return Math.min(Math.max(x, min), max);
  }
  
  function idxWrapOver(x, length) {
    return (x % length + length) % length;
  }
  // Hz and FFT bin conversion
function hertzToFFTBin(x, y = 'round', bufferSize = 4096, sampleRate = 44100) {
  const bin = x * bufferSize / sampleRate;
  let func = y;
  
  if (!['floor','ceil','trunc'].includes(func))
    func = 'round'; // always use round if you specify an invalid/undefined value
  
  return Math[func](bin);
}

function fftBinToHertz(x, bufferSize = 4096, sampleRate = 44100) {
  return x * sampleRate / bufferSize;
}
  

// Calculate the FFT
function calcFFT(input) {
  let fft = input.map(x => x);
  let fft2 = input.map(x => x);
  transform(fft, fft2);
  let output = new Array(Math.round(fft.length/2)).fill(0);
  for (let i = 0; i < output.length; i++) {
    output[i] = Math.hypot(fft[i], fft2[i])/(fft.length);
  }
  return output;
}

function calcComplexFFT(input) {
  let fft = input.map(x => x);
  let fft2 = input.map(x => x);
  transform(fft, fft2);
  return input.map((_, i, arr) => {
    return {
      re: fft[i]/(arr.length/2),
      im: fft2[i]/(arr.length/2),
      magnitude: Math.hypot(fft[i], fft2[i])/(arr.length/2),
      phase: Math.atan2(fft2[i], fft[i])
    };
  });
}
  
function calcComplexInputFFT(real, imag) {
  if (real.length !== imag.length)
    return [];
  const fft1 = real.map(x => x),
        fft2 = imag.map(x => x);
  transform(fft1, fft2);
  return real.map((_, i, arr) => {
    return {
      re: fft1[i]/arr.length,
      im: fft2[i]/arr.length,
      magnitude: Math.hypot(fft1[i], fft2[i])/arr.length,
      phase: Math.atan2(fft2[i], fft1[i])
    }
  });
}
  
  /**
 * FFT and convolution (JavaScript)
 * 
 * Copyright (c) 2017 Project Nayuki. (MIT License)
 * https://www.nayuki.io/page/free-small-fft-in-multiple-languages
 */

/* 
 * Computes the discrete Fourier transform (DFT) of the given complex vector, storing the result back into the vector.
 * The vector can have any length. This is a wrapper function.
 */
function transform(real, imag) {
	const n = real.length;
	if (n != imag.length)
		throw "Mismatched lengths";
	if (n <= 0)
		return;
	else if ((2 ** Math.trunc(Math.log2(n))) === n)  // Is power of 2
		transformRadix2(real, imag);
	else  // More complicated algorithm for arbitrary sizes
		transformBluestein(real, imag);
}


/* 
 * Computes the inverse discrete Fourier transform (IDFT) of the given complex vector, storing the result back into the vector.
 * The vector can have any length. This is a wrapper function. This transform does not perform scaling, so the inverse is not a true inverse.
 */
function inverseTransform(real, imag) {
	transform(imag, real);
}


/* 
 * Computes the discrete Fourier transform (DFT) of the given complex vector, storing the result back into the vector.
 * The vector's length must be a power of 2. Uses the Cooley-Tukey decimation-in-time radix-2 algorithm.
 */
function transformRadix2(real, imag) {
	// Length variables
	const n = real.length;
	if (n != imag.length)
		throw "Mismatched lengths";
	if (n <= 1)  // Trivial transform
		return;
	const logN = Math.log2(n);
	if ((2 ** Math.trunc(logN)) !== n)
		throw "Length is not a power of 2";
	
	// Trigonometric tables
	let cosTable = new Array(n / 2);
	let sinTable = new Array(n / 2);
	for (let i = 0; i < n / 2; i++) {
		cosTable[i] = Math.cos(2 * Math.PI * i / n);
		sinTable[i] = Math.sin(2 * Math.PI * i / n);
	}
	
	// Bit-reversed addressing permutation
	for (let i = 0; i < n; i++) {
		let j = reverseBits(i, logN);
		if (j > i) {
			let temp = real[i];
			real[i] = real[j];
			real[j] = temp;
			temp = imag[i];
			imag[i] = imag[j];
			imag[j] = temp;
		}
	}
	
	// Cooley-Tukey decimation-in-time radix-2 FFT
	for (let size = 2; size <= n; size *= 2) {
		let halfsize = size / 2;
		let tablestep = n / size;
		for (let i = 0; i < n; i += size) {
			for (let j = i, k = 0; j < i + halfsize; j++, k += tablestep) {
				const l = j + halfsize;
				const tpre =  real[l] * cosTable[k] + imag[l] * sinTable[k];
				const tpim = -real[l] * sinTable[k] + imag[l] * cosTable[k];
				real[l] = real[j] - tpre;
				imag[l] = imag[j] - tpim;
				real[j] += tpre;
				imag[j] += tpim;
			}
		}
	}
	
	// Returns the integer whose value is the reverse of the lowest 'bits' bits of the integer 'x'.
	function reverseBits(x, bits) {
		let y = 0;
		for (let i = 0; i < bits; i++) {
			y = (y << 1) | (x & 1);
			x >>>= 1;
		}
		return y;
	}
}


/* 
 * Computes the discrete Fourier transform (DFT) of the given complex vector, storing the result back into the vector.
 * The vector can have any length. This requires the convolution function, which in turn requires the radix-2 FFT function.
 * Uses Bluestein's chirp z-transform algorithm.
 */
function transformBluestein(real, imag) {
	// Find a power-of-2 convolution length m such that m >= n * 2 + 1
	const n = real.length;
	if (n != imag.length)
		throw "Mismatched lengths";
	const m = 2 ** Math.trunc(Math.log2(n*2)+1);
	
	// Trignometric tables
	let cosTable = new Array(n);
	let sinTable = new Array(n);
	for (let i = 0; i < n; i++) {
		let j = i * i % (n * 2);  // This is more accurate than j = i * i
		cosTable[i] = Math.cos(Math.PI * j / n);
		sinTable[i] = Math.sin(Math.PI * j / n);
	}
	
	// Temporary vectors and preprocessing
	let areal = newArrayOfZeros(m);
	let aimag = newArrayOfZeros(m);
	for (let i = 0; i < n; i++) {
		areal[i] =  real[i] * cosTable[i] + imag[i] * sinTable[i];
		aimag[i] = -real[i] * sinTable[i] + imag[i] * cosTable[i];
	}
	let breal = newArrayOfZeros(m);
	let bimag = newArrayOfZeros(m);
	breal[0] = cosTable[0];
	bimag[0] = sinTable[0];
	for (let i = 1; i < n; i++) {
		breal[i] = breal[m - i] = cosTable[i];
		bimag[i] = bimag[m - i] = sinTable[i];
	}
	
	// Convolution
	let creal = new Array(m);
	let cimag = new Array(m);
	convolveComplex(areal, aimag, breal, bimag, creal, cimag);
	
	// Postprocessing
	for (let i = 0; i < n; i++) {
		real[i] =  creal[i] * cosTable[i] + cimag[i] * sinTable[i];
		imag[i] = -creal[i] * sinTable[i] + cimag[i] * cosTable[i];
	}
}


/* 
 * Computes the circular convolution of the given real vectors. Each vector's length must be the same.
 */
function convolveReal(x, y, out) {
	const n = x.length;
	if (n != y.length || n != out.length)
		throw "Mismatched lengths";
	convolveComplex(x, newArrayOfZeros(n), y, newArrayOfZeros(n), out, newArrayOfZeros(n));
}


/* 
 * Computes the circular convolution of the given complex vectors. Each vector's length must be the same.
 */
function convolveComplex(xreal, ximag, yreal, yimag, outreal, outimag) {
	const n = xreal.length;
	if (n != ximag.length || n != yreal.length || n != yimag.length
			|| n != outreal.length || n != outimag.length)
		throw "Mismatched lengths";
	
	xreal = xreal.slice();
	ximag = ximag.slice();
	yreal = yreal.slice();
	yimag = yimag.slice();
	transform(xreal, ximag);
	transform(yreal, yimag);
	
	for (let i = 0; i < n; i++) {
		const temp = xreal[i] * yreal[i] - ximag[i] * yimag[i];
		ximag[i] = ximag[i] * yreal[i] + xreal[i] * yimag[i];
		xreal[i] = temp;
	}
	inverseTransform(xreal, ximag);
	
	for (let i = 0; i < n; i++) {  // Scaling (because this FFT implementation omits it)
		outreal[i] = xreal[i] / n;
		outimag[i] = ximag[i] / n;
	}
}


function newArrayOfZeros(n) {
	let result = new Array(n).fill(0);
	return result;
}
</script>
              
            
!

CSS

              
                body {
  margin: 0;
  overflow: hidden;
}

audio {
  display: inline-block;
  width: 100%;
  height: 40px;
}

canvas {
  display: block;
  width: 100%;
}

#container {
  height: calc( 100vh - 40px );
}

#upload {
  display: none;
}
              
            
!

JS

              
                // necessary parts for audio context and audio elements respectively
const audioCtx = new AudioContext();
const audioPlayer = document.getElementById('audio');
const localAudioElement = document.getElementById('audioFileInput');
localAudioElement.addEventListener('change', loadLocalFile);
// canvas is for displaying visuals
const canvas = document.getElementById('canvas'),
      ctx = canvas.getContext('2d'),
      container = document.getElementById('container');
const audioSource = audioCtx.createMediaElementSource(audioPlayer);
const analyser = audioCtx.createAnalyser();
analyser.fftSize = 32768; // maxes out FFT size
const dataArray = new Float32Array(analyser.fftSize);
// variables
const currentSpectrum = [],
      peaks = [],
      peakHolds = [],
      averageSpectrum = [],
      fifoBuffers = [];
let cumulativeIdx = 0, //required for infinite averaging
    fifoIdx = 0;
const delay = audioCtx.createDelay();
audioSource.connect(delay);
delay.connect(audioCtx.destination);
//audioSource.connect(audioCtx.destination);
audioSource.connect(analyser);
let audioProvider,
    currentSampleRate = audioCtx.sampleRate,
    freqBands = [];
const analogStyleAnalyser = new AnalogStyleAnalyzer([]),
      swift = new SWIFT([]),
      sdft = new VQsDFT([]);
const customDSPSource = document.getElementById('AudioProvider'),
      dspSourceBlob = new Blob([customDSPSource.innerText], {type: 'application/javascript'}),
      dspSourceUrl = URL.createObjectURL(dspSourceBlob);
const auxCanvas = new OffscreenCanvas(0,0), // OffscreenCanvas is needed for spectrogram visualization
      auxCtx = auxCanvas.getContext('2d');
auxCtx.imageSmoothingEnabled = false;
let accumulatedData = [],
    sampleCounter = 0,
    staticSpectrogramIdx = 0,
    accumulatedSpectrum = [],
    lastAccumulatedSpectrum = [];

const visualizerSettings = {
  //fftSize: 1152,
  freqDist: 'octaves',
  numBands: 50, // similar to WMP's Bars visualization when number of bands are at maximum possible
  minFreq: 20,
  maxFreq: 20000,
  fscale: 'logarithmic',
  hzLinearFactor: 0,
  minNote: 4,
  maxNote: 124,
  noteTuning: 1000, // setting it to 1kHz does automatically makes octave bands compliant with ANSI S1.11-2004 standard when comes to one-third octave band center frequencies right?
  octaves: 6, // defaults to something similar to Spectroscope visualization in WaveLab
  detune: 0,
  analysisAlgorithm: 'analog',
  bandwidth: 1,
  order: 1,
  prewarpQ: true,
  compensateBW: true,
  windowFunction: '1, 0.5',
  customWindow: '1',
  useNC: false,
  timeRes: 100,
  maxTimeRes: 1000,
  constantQ: true,
  resetCoeffs: recalcCoeffs,
  resetAverages: resetSmoothedValues,
  useAccurateSmoothing: true,
  antiFlicker: false, // relevant for analog-style analyzer and sample-by-sample smoothing calculation
  smoothingTimeConstant: 90, // default value is approximately the main bar of audio visualizer thing in Geometry Dash 2.2
  useAverageSmoothing: false,
  peakDecay: 0,
  peakHold: 30,
  useActualPeak: false,
  fadingPeaks: true, // this effect is used on peak hold part of Audio Visualizer effect on GD 2.2
  // minDecibels and maxDecibels defaults to -60...+6 to match foobar2000's built-in Spectrum visualization
  minDecibels: -60,
  maxDecibels: 6,
  useDecibels: true,
  gamma: 1,
  useAbsolute: true,
  decoupleAmplitudeFromSpectrum: true,
  // spectrogram part
  altMinDecibels: -66,
  altMaxDecibels: 0,
  altUseDecibels: true,
  altGamma: 1,
  altUseAbsolute: true,
  showLabels: true,
  showLabelsY: true,
  amplitudeLabelInterval: 10,
  labelTuning: 440,
  showDC: true,
  showNyquist: true,
  mirrorLabels: true,
  spectrogramExtendGrid: false,
  diffLabels: false,
  labelTextAlign: 'start',
  labelTextBaseline: 'alphabetic',
  labelTextBaseline2: 'alphabetic',
  labelMode : 'decade',
  freeze: false,
  pauseAverage: true,
  freezeFIFO: true,
  useGradient: true,
  alternateColor: false,
  darkMode: false,
  showMain: true,
  showPeaks: true,
  showAverage: false,
  averagingDomain: 'rms', // Enhanced Spectrum analyzer (foo_enhanced_spectrum_analyzer) component should have used the "recommended" way of calculating the average spectrum, that is to calculate the average in the squared (x^2) domain and do a square root afterwards, but only time will tell whether or not the upcoming Enhanced Spectrum analyzer 2.0.0.0 adds the true RMS averaging as well as infinite averaging
  showRMS: false,
  fifoLength: 300,
  fifoDomain: 'rms',
  showCalibration: false,
  calibrationSrc: 'main',
  calibrationDomain: 'linear',
  barSpacing: 2,
  spacingMode: 'smooth',
  centerBars: true,
  peakHeight: 2,
  drawLines: false, // Draws lines as in foo_enhanced_spectrum_analyzer instead of bargraph like in foo_musical_spectrum
  lineWidth: 1,
  lineJoin: 'miter',
  miterLimit: 10,
  drawMode: 'fill',
  drawMode2: 'stroke',
  drawMode3: 'fill',
  display: 'spectrum',
  resetBoth: resetBoth,
  autoReset: true,
  useIncorrectWay: false, // when enabled, it uses the wrong way of getting samples
  fftSize: 576, // default is the buffer length of PCM data on Winamp's visualization system
  hopSize: 576, // determines the scrolling speed of the spectrogram part
  channelMode: 'mono',
  preventGainIncreaseFromChannelSum: true,
  channelIdx1: 0,
  channelIdx2: 1,
  reverseIdx1: false,
  reverseIdx2: false,
  //compensateDelay: true
},
      drawModes = {
        'Stroke': 'stroke',
        'Fill': 'fill',
        'Both': 'both'
      },
      loader = {
        url: '',
        load: function() {
          audioPlayer.src = this.url;
          audioPlayer.play();
        },
        loadLocal: function() {
          localAudioElement.click();
        },
        toggleFullscreen: _ => {
          if (document.fullscreenElement === canvas)
            document.exitFullscreen();
          else
            canvas.requestFullscreen();
        }
      };
// dat.GUI for quick customization
let gui = new dat.GUI();
gui.add(loader, 'url').name('URL');
gui.add(loader, 'load').name('Load');
gui.add(loader, 'loadLocal').name('Load from local device');
let settings = gui.addFolder('Visualization settings');
// FFT size can be non-power of 2 because we use the FFT library that supports non-power of two data length
//settings.add(visualizerSettings, 'fftSize', 32, 32768, 1).name('FFT size');
// The additional parameters goes here
// another parameters at the end
const freqDistFolder = settings.addFolder('Frequency distribution');
freqDistFolder.add(visualizerSettings, 'freqDist', {
  'Frequency bands': 'freqs',
  'Octave bands': 'octaves'
}).name('Frequency band distribution').onChange(recalcCoeffs);
// up to 192kHz sample rate is supported for full-range visualization
freqDistFolder.add(visualizerSettings, 'minFreq', 0, 96000).name('Minimum frequency').onChange(recalcCoeffs);
freqDistFolder.add(visualizerSettings, 'maxFreq', 0, 96000).name('Maximum frequency').onChange(recalcCoeffs);
freqDistFolder.add(visualizerSettings, 'minNote', 0, 128).name('Minimum note').onChange(recalcCoeffs);
freqDistFolder.add(visualizerSettings, 'maxNote', 0, 128).name('Maximum note').onChange(recalcCoeffs);
freqDistFolder.add(visualizerSettings, 'noteTuning', 0, 96000).name('Octave bands tuning (nearest note = tuning frequency in Hz)').onChange(recalcCoeffs);
freqDistFolder.add(visualizerSettings, 'detune', -24, 24).name('Detune').onChange(recalcCoeffs);
freqDistFolder.add(visualizerSettings, 'numBands', 2, 1920, 1).name('Number of bands').onChange(recalcCoeffs);
freqDistFolder.add(visualizerSettings, 'octaves', 1, 192).name('Bands per octave').onChange(recalcCoeffs);
freqDistFolder.add(visualizerSettings, 'fscale', {'Bark': 'bark',
        'ERB': 'erb',
        'Cams': 'cam',
        'Mel (AIMP)': 'mel',
        'Linear': 'linear',
        'Logarithmic': 'logarithmic',
        'Hyperbolic sine': 'sinh',
        'Shifted logarithmic': 'shifted log',
        'Nth root': 'nth root',
        'Negative exponential': 'negative exponential',
        'Adjustable Bark': 'adjustable bark',
        'Period': 'period'}).name('Frequency scale').onChange(recalcCoeffs);
freqDistFolder.add(visualizerSettings, 'hzLinearFactor', 0, 100).name('Hz linear factor').onChange(recalcCoeffs);
const transformFolder = settings.addFolder('Transform algorithm');
transformFolder.add(visualizerSettings, 'analysisAlgorithm', {
  'Analog-style analyzer': 'analog',
  'Sliding windowed infinite Fourier transform': 'swift',
  'Variable-Q sliding DFT': 'sdft'
}).name('Analysis algorithm').onChange(recalcCoeffs);
transformFolder.add(visualizerSettings, 'bandwidth', 0, 64).name('Bandwidth').onChange(recalcCoeffs);
transformFolder.add(visualizerSettings, 'order', 1, 8, 1).name('Filter order').onChange(recalcCoeffs);
transformFolder.add(visualizerSettings, 'windowFunction', {
  'Rectangular': '1',
  'Hann': '1, 0.5',
  'Hamming': '1, 0.4259434938430786',
  'Blackman': '1, 0.595257580280304, 0.0952545627951622',
  'Nuttall': '1, 0.6850073933601379, 0.20272639393806458, 0.017719272524118423',
  'Flat top': '1, 0.966312825679779, 0.6430955529212952, 0.19387830793857574, 0.016120079904794693',
  'Custom': 'custom'
}).name('Window function').onChange(recalcCoeffs);
transformFolder.add(visualizerSettings, 'customWindow').name('Custom frequency-domain windowing coefficients').onChange(recalcCoeffs);
transformFolder.add(visualizerSettings, 'useNC').name('Use NC method (VQ-sDFT only)').onChange(recalcCoeffs);
transformFolder.add(visualizerSettings, 'prewarpQ').name('Use prewarped Q (analog-style analyzer only)').onChange(recalcCoeffs);
transformFolder.add(visualizerSettings, 'compensateBW').name('Compensate bandwidth for narrowing on higher order filters (IIR filter banks only)').onChange(recalcCoeffs);
transformFolder.add(visualizerSettings, 'timeRes', 0, 2000).name('Time resoluion').onChange(recalcCoeffs);
transformFolder.add(visualizerSettings, 'constantQ').name('Use constant-Q instead of variable-Q').onChange(recalcCoeffs);
transformFolder.add(visualizerSettings, 'maxTimeRes', 0, 8000).name('Maximum time resoluion').onChange(recalcCoeffs);
transformFolder.add(visualizerSettings, 'resetCoeffs').name('Reset coefficients');
const channelFolder = settings.addFolder('Channel configuration');
channelFolder.add(visualizerSettings, 'channelMode', {
  'Mono': 'mono',
  'Left': 'left',
  'Right': 'right',
  'Mid (sum)': 'mid',
  'Side (difference)': 'side'
}).name('Channel mode');
channelFolder.add(visualizerSettings, 'channelIdx1', 0, 32, 1).name('First channel index');
channelFolder.add(visualizerSettings, 'channelIdx2', 0, 32, 1).name('Second channel index');
channelFolder.add(visualizerSettings, 'reverseIdx1').name('Reverse first channel index');
channelFolder.add(visualizerSettings, 'reverseIdx2').name('Reverse second channel index');
channelFolder.add(visualizerSettings, 'preventGainIncreaseFromChannelSum').name('Prevent gain increase on summation of multiple audio channels');
const peakFolder = settings.addFolder('Time averaging and peak decay settings');
peakFolder.add(visualizerSettings, 'useAccurateSmoothing').name('Apply time smoothing during processing').onChange(resetFIFO);
peakFolder.add(visualizerSettings, 'antiFlicker').name('Reduce flickering on fast or no smoothing settings').onChange(resetFIFO);
peakFolder.add(visualizerSettings, 'smoothingTimeConstant', 0, 100).name('Smoothing time constant'); // you can use this Desmos graph: https://www.desmos.com/calculator/ictdd2ep8g to determine the smoothing time constant value for particular dB per second decay time (e.g. to get -20dB/sec decay time, the smoothing time constant value is 96.2351% assuming 60fps)
peakFolder.add(visualizerSettings, 'useAverageSmoothing').name('Use exponential average instead of peak decay');
peakFolder.add(visualizerSettings, 'peakHold', 0, 240).name('Peak hold time');
peakFolder.add(visualizerSettings, 'peakDecay', 0, 100).name('Peak fall rate');
peakFolder.add(visualizerSettings, 'useActualPeak').name('Use actual peak');
peakFolder.add(visualizerSettings, 'fifoLength', 0, 3000).name('FIFO averaging length (milliseconds)').onChange(resetFIFO);
peakFolder.add(visualizerSettings, 'fifoDomain', {
  'Linear': 'linear',
  'Squared (RMS)': 'rms',
  'Logarithmic': 'log'
}).name('FIFO averaging domain');
peakFolder.add(visualizerSettings, 'averagingDomain', {
  'Linear': 'linear',
  'Squared (RMS)': 'rms',
  'Logarithmic': 'log'
}).name('Averaging domain').onChange(resetBoth);
peakFolder.add(visualizerSettings, 'resetAverages').name('Reset smoothed values and peaks');
const amplitudeFolder = settings.addFolder('Amplitude');
amplitudeFolder.add(visualizerSettings, 'useDecibels').name('Use logarithmic amplitude/decibel scale');
amplitudeFolder.add(visualizerSettings, 'useAbsolute').name('Use absolute value');
amplitudeFolder.add(visualizerSettings, 'gamma', 0.5, 10).name('Gamma');
amplitudeFolder.add(visualizerSettings, 'minDecibels', -120, 6).name('Lower amplitude range');
amplitudeFolder.add(visualizerSettings, 'maxDecibels', -120, 6).name('Higher amplitude range');
amplitudeFolder.add(visualizerSettings, 'decoupleAmplitudeFromSpectrum').name('Decouple amplitude scaling of spectrogram from spectrum');
const altAmplitudeFolder = amplitudeFolder.addFolder('Spectrogram colormap scaling');
altAmplitudeFolder.add(visualizerSettings, 'altUseDecibels').name('Use logarithmic amplitude/decibel scale');
altAmplitudeFolder.add(visualizerSettings, 'altUseAbsolute').name('Use absolute value');
altAmplitudeFolder.add(visualizerSettings, 'altGamma', 0.5, 10).name('Gamma');
altAmplitudeFolder.add(visualizerSettings, 'altMinDecibels', -120, 6).name('Lower amplitude range');
altAmplitudeFolder.add(visualizerSettings, 'altMaxDecibels', -120, 6).name('Higher amplitude range');
const labelFolder = settings.addFolder('Labels and grids');
labelFolder.add(visualizerSettings, 'showLabels').name('Show horizontal-axis labels');
labelFolder.add(visualizerSettings, 'showLabelsY').name('Show vertical-axis labels');
labelFolder.add(visualizerSettings, 'amplitudeLabelInterval', 0.5, 48).name('dB label interval');
labelFolder.add(visualizerSettings, 'showDC').name('Show DC label');
labelFolder.add(visualizerSettings, 'showNyquist').name('Show Nyquist frequency label');
labelFolder.add(visualizerSettings, 'mirrorLabels').name('Mirror Y-axis labels');
labelFolder.add(visualizerSettings, 'spectrogramExtendGrid').name('Extend spectrogram gridlines into screen size');
labelFolder.add(visualizerSettings, 'labelTextAlign', {
  'Start': 'start',
  'Center': 'center',
  'End': 'end'
}).name('Frequency label text alignment');
labelFolder.add(visualizerSettings, 'labelTextBaseline', {
  'Alphabetic': 'alphabetic',
  'Middle': 'middle',
  'Hanging': 'hanging'
}).name('dB label text alignment');
labelFolder.add(visualizerSettings, 'labelTextBaseline2', {
  'Alphabetic': 'alphabetic',
  'Middle': 'middle',
  'Hanging': 'hanging'
}).name('Spectrogram frequency label text alignment');
labelFolder.add(visualizerSettings, 'diffLabels').name('Use difference coloring for labels');
labelFolder.add(visualizerSettings, 'labelMode', {
  'Decades': 'decade',
  'Decades (coarse)': 'decade 2',
  'Decades (without minor gridlines)': 'decade 3',
  'Octaves': 'octave',
  'Powers of two': 'powers of two',
  'Notes': 'note',
  'Critical bands': 'bark',
  'Linear': 'linear',
  'Automatic': 'auto'
}).name('Frequency label mode');
labelFolder.add(visualizerSettings, 'labelTuning', 0, 96000).name('Note labels tuning (nearest note = tuning frequency in Hz)');
const calibrationFolder = labelFolder.addFolder('Calibration line');
calibrationFolder.add(visualizerSettings, 'showCalibration').name('Show calibration line');
calibrationFolder.add(visualizerSettings, 'calibrationSrc', {
  'Main': 'main',
  'FIFO average': 'avg',
  'Cumulative average': 'cumulative',
  'Peaks': 'peaks'
}).name('Calibration line calculation source');
calibrationFolder.add(visualizerSettings, 'calibrationDomain', {
  'Linear': 'linear',
  'Squared (RMS)': 'rms',
  'Logarithmic': 'log'
}).name('Calibration line calculation domain');
const appearanceFolder = settings.addFolder('Appearance');
appearanceFolder.add(visualizerSettings, 'display', {
  'Spectrum': 'spectrum',
  'Spectrogram': 'spectrogram',
  'Static spectrogram': 'static',
  'Combined spectrum and spectrogram': 'both'
}).name('Display which').onChange(resizeCanvas);
appearanceFolder.add(visualizerSettings, 'hopSize', 32, 32768, 1).name('Spectrogram hop length (samples)');
appearanceFolder.add(visualizerSettings, 'showMain').name('Show main graph');
appearanceFolder.add(visualizerSettings, 'showPeaks').name('Show peaks');
appearanceFolder.add(visualizerSettings, 'fadingPeaks').name('Enable peak fading effect');
appearanceFolder.add(visualizerSettings, 'showAverage').name('Show infinite average (cumulative) spectrum');
appearanceFolder.add(visualizerSettings, 'showRMS').name('Show RMS spectrum');
appearanceFolder.add(visualizerSettings, 'useGradient').name('Use color gradient');
appearanceFolder.add(visualizerSettings, 'alternateColor').name('Use alternate color gradient');
appearanceFolder.add(visualizerSettings, 'peakHeight', 0.5, 32).name('Peak indicator height');
appearanceFolder.add(visualizerSettings, 'barSpacing', 0, 1024).name('Bar spacing');
appearanceFolder.add(visualizerSettings, 'spacingMode', ['rough', 'smooth', 'pixel perfect']).name('Bar spacing mode');
appearanceFolder.add(visualizerSettings, 'centerBars').name('Center bars');
appearanceFolder.add(visualizerSettings, 'drawLines').name('Draw lines/area graphs instead of bars');
appearanceFolder.add(visualizerSettings, 'lineWidth', 0.5, 10).name('Line width');
appearanceFolder.add(visualizerSettings, 'lineJoin', {
  'Miter': 'miter',
  'Round': 'round',
  'Bevel': 'bevel'
}).name('Line join');
appearanceFolder.add(visualizerSettings, 'miterLimit', 1, 100).name('Line miter limit');
appearanceFolder.add(visualizerSettings, 'drawMode', drawModes).name('Main graph draw mode');
appearanceFolder.add(visualizerSettings, 'drawMode2', drawModes).name('Peak draw mode');
appearanceFolder.add(visualizerSettings, 'drawMode3', drawModes).name('Average draw mode');
appearanceFolder.add(visualizerSettings, 'darkMode').name('Dark mode');
settings.add(visualizerSettings, 'autoReset').name('Enable auto-reset');
settings.add(visualizerSettings, 'useIncorrectWay').name('Use getFloatTimeDomainData instead of AudioWorklet').onChange(resetFIFO);
settings.add(visualizerSettings, 'fftSize', 32, 32768, 1).name('getFloatTimeDomainData buffer length (samples)');
settings.add(visualizerSettings, 'pauseAverage').name('Freeze infinite average spectrum');
settings.add(visualizerSettings, 'freezeFIFO').name('Freeze FIFO average spectrum');
settings.add(visualizerSettings, 'freeze').name('Freeze analyzer');
settings.add(visualizerSettings, 'resetBoth').name('Reset both coefficients and smoothing');
//settings.add(visualizerSettings, 'compensateDelay').name('Compensate for delay');
gui.add(loader, 'toggleFullscreen').name('Toggle fullscreen mode');

function resetBoth() {
  resetSmoothedValues();
  recalcCoeffs();
}
function autoReset() {
  if (visualizerSettings.autoReset && !visualizerSettings.freeze)
    resetBoth();
}
// this below makes it more faithful to how foobar2000 visualizations work
audioPlayer.addEventListener('play', autoReset);
audioPlayer.addEventListener('seeked', autoReset);

function resetSmoothedValues() {
  cumulativeIdx = 0;
  fifoIdx = 0;
  auxCtx.clearRect(0, 0, canvas.width, canvas.height);
  accumulatedData.length = 0;
  sampleCounter = 0;
  updateSpectrumVisualization([]);
  updateAccumulatedSpectrum([]);
  staticSpectrogramIdx = 0;
}

function resetFIFO() {
  fifoIdx = 0;
  fifoBuffers.length = 0;
}

function recalcCoeffs() {
  switch(visualizerSettings.freqDist) {
    case 'octaves':
      freqBands = generateOctaveBands(visualizerSettings.octaves, visualizerSettings.minNote, visualizerSettings.maxNote, visualizerSettings.detune, visualizerSettings.noteTuning);
      break;
    default:
      freqBands = generateFreqBands(visualizerSettings.numBands, visualizerSettings.minFreq, visualizerSettings.maxFreq, visualizerSettings.fscale, visualizerSettings.hzLinearFactor/100);
  }
  
  const windowingKernel = parseList(visualizerSettings.windowFunction === 'custom' ? visualizerSettings.customWindow : visualizerSettings.windowFunction),
        timeRes = visualizerSettings.constantQ ? Infinity : visualizerSettings.timeRes,
        iirArgs = [freqBands, visualizerSettings.order, timeRes, visualizerSettings.bandwidth, audioCtx.sampleRate, visualizerSettings.compensateBW, visualizerSettings.prewarpQ],
        firArgs = [freqBands, windowingKernel, timeRes, visualizerSettings.bandwidth, Math.round(audioCtx.sampleRate*visualizerSettings.maxTimeRes/1000), audioCtx.sampleRate, visualizerSettings.useNC];
  analogStyleAnalyser.calcCoeffs([]);
  swift.calcCoeffs([]);
  sdft.calcCoeffs([]);
  switch (visualizerSettings.analysisAlgorithm) {
    case 'analog':
      analogStyleAnalyser.calcCoeffs(...iirArgs);
    case 'swift':
      swift.calcCoeffs(...iirArgs);
    default:
      sdft.calcCoeffs(...firArgs);
  }
}
recalcCoeffs();

function resizeCanvas() {
  const scale = devicePixelRatio,
        isFullscreen = document.fullscreenElement === canvas;
  canvas.width = (isFullscreen ? innerWidth : container.clientWidth)*scale;
  canvas.height = (isFullscreen ? innerHeight : container.clientHeight)*scale;
  auxCanvas.width = canvas.width;
  auxCanvas.height = visualizerSettings.display === 'both' ? Math.trunc(canvas.height/2) : canvas.height;
  staticSpectrogramIdx = 0;
}

addEventListener('click', () => {
  if (audioCtx.state == 'suspended')
    audioCtx.resume();
});
addEventListener('resize', resizeCanvas);
resizeCanvas();

function loadLocalFile(event) {
  const file = event.target.files[0],
        reader = new FileReader();
  reader.onload = (e) => {
    audioPlayer.src = e.target.result;
    audioPlayer.play();
  };

  reader.readAsDataURL(file);
}

//visualize();
audioCtx.audioWorklet.addModule(dspSourceUrl).then(() => {
  //let messageCounter = 0;
  audioProvider = new AudioWorkletNode(audioCtx, 'audio-provider');
  audioSource.connect(audioProvider);
  audioProvider.port.postMessage(0);
  audioProvider.port.onmessage = (e) => {
    if (!visualizerSettings.freeze && !visualizerSettings.useIncorrectWay)
      analyzeChunk(e.data.currentChunk);
    audioProvider.port.postMessage(1);
    //if (messageCounter < 1) {
    //  console.log(e.data.currentChunk);
    //}
    //messageCounter++;
  };
  audioProvider.onprocessorerror = (e) => {
    console.log(e.message);
  }
  // optional mic input
  /*navigator.mediaDevices.getUserMedia({
    audio: {
      noiseCancellation: false,
      echoCancellation: false,
      autoGainControl: false
    },
    video: false
  }).then((stream) => {
    const audioStream = audioCtx.createMediaStreamSource(stream);
    audioStream.connect(analyser);
    audioStream.connect(audioProvider); // for use with AudioWorklet-based visualizations
  }).catch((err) => {
    console.log(err);
  });*/
  visualize();
}).catch((e) => {
  console.log(e.message);
});

let hasUpdatedSince = false;
function analyzeChunk(data) {
  const dataset = [],
        isSpectrogram = visualizerSettings.display === 'spectrogram' || visualizerSettings.display === 'static' || visualizerSettings.display === 'both';
  let retrievalLength = 0;
  for (const x of data) {
    retrievalLength = Math.max(retrievalLength, x.length);
  }
  for (let i = 0; i < retrievalLength; i++) {
    let sum = 0,
        channelDivisor = 1;
    const i1 = visualizerSettings.channelIdx1,
          idx1 = idxWrapOver(visualizerSettings.reverseIdx1 ? data.length-i1-1 : i1, data.length),
          i2 = visualizerSettings.channelIdx2,
          idx2 = idxWrapOver(visualizerSettings.reverseIdx2 ? data.length-i2-1 : i2, data.length),
          pairIndices = [isFinite(idx1) ? idx1 : 0, isFinite(idx2) ? idx2 : 0];
    switch (visualizerSettings.channelMode) {
      default:
        for (let channelIdx = 0; channelIdx < data.length; channelIdx++) {
          sum += data[channelIdx][i];
        }
        channelDivisor = data.length;
        break;
      case 'left':
      case 'right':
      case 'mid':
      case 'side':
        channelDivisor = visualizerSettings.channelMode === 'left' || visualizerSettings.channelMode === 'right' ? 1 : 2;
        const l = data[pairIndices[0]][i],
              r = data[pairIndices[1]][i];
        sum = visualizerSettings.channelMode === 'left' ? l : visualizerSettings.channelMode === 'right' ? r : l + r * (visualizerSettings.channelMode === 'side' ? -1 : 1); 
    }
    dataset[i] = sum/(visualizerSettings.preventGainIncreaseFromChannelSum ? channelDivisor : 1);
    if (visualizerSettings.useAccurateSmoothing) {
      getKindofsDFT().analyze([isFinite(dataset[i]) ? dataset[i] : 0]);
      const spectrumData = getKindofsDFT().spectrumData,
            spectrumLength = spectrumData.length;
      if (isSpectrogram) {
        accumulatedData.length = spectrumLength;
        for (let i = 0; i < spectrumLength; i++) {
          accumulatedData[i] = Math.max(isFinite(accumulatedData[i]) ? accumulatedData[i] : 0, spectrumData[i]);
        }
        sampleCounter++;
      }
      updateSpectrumVisualization(spectrumData, true);
      updateAccumulatedSpectrum(currentSpectrum);
      if (sampleCounter >= visualizerSettings.hopSize && isSpectrogram) {
        printSpectrogram(accumulatedData);
        accumulatedData = accumulatedData.map((_) => 0);
        sampleCounter = 0;
      }
    }
  }
  if (dataset.length > 0 && !visualizerSettings.useAccurateSmoothing) {
    getKindofsDFT().analyze(dataset.map(x => isFinite(x) ? x : 0));
    const spectrum = getKindofsDFT().spectrumData;
    if (isSpectrogram)
      printSpectrogram(spectrum);
    updateAccumulatedSpectrum(spectrum);
  }
  if (dataset.length > 0)
    hasUpdatedSince = true;
}

function getKindofsDFT() {
  switch(visualizerSettings.analysisAlgorithm) {
    case 'analog':
      return analogStyleAnalyser;
    case 'swift':
      return swift;
    default:
      return sdft;
  }
}

function visualize() {
  delay.delayTime.value = 0//(visualizerSettings.fftSize / audioCtx.sampleRate) * visualizerSettings.compensateDelay;
  if (!visualizerSettings.freeze) {
    // we use getFloatTimeDomainData (which is PCM data that is gathered, just like vis_stream::get_chunk_absolute() in foobar2000 SDK)
    if (visualizerSettings.useIncorrectWay) {
      analyser.getFloatTimeDomainData(dataArray);
      const fftData = [];
      for (let i = 0; i < visualizerSettings.fftSize; i++) {
        fftData[i] = dataArray[i+analyser.fftSize-visualizerSettings.fftSize];
      }
      analyzeChunk([fftData]);
    }
    /*
    const spectrumData = getKindofsDFT().spectrumData;
    */
    if (currentSampleRate !== audioCtx.sampleRate)
      recalcCoeffs();
    /*
    currentSpectrum.length = spectrumData.length;
    for (let i = 0; i < spectrumData.length; i++) {
      currentSpectrum[i] = spectrumData[i];
    }
    */
    
    if (!hasUpdatedSince) {
      updateAccumulatedSpectrum(lastAccumulatedSpectrum);
    }
    
    if (!visualizerSettings.useAccurateSmoothing)
      updateSpectrumVisualization(visualizerSettings.antiFlicker ? accumulatedSpectrum : getKindofsDFT().spectrumData);
  }
  const fgColor = visualizerSettings.darkMode ? (visualizerSettings.useGradient || visualizerSettings.alternateColor ? '#c0c0c0' : '#fff') : '#000',
        bgColor = visualizerSettings.darkMode ? (visualizerSettings.useGradient || visualizerSettings.alternateColor ? '#202020' : '#000') : '#fff',
        isSpectrogramOnly = visualizerSettings.display === 'spectrogram' || visualizerSettings.display === 'static' ,
        isSpectrogram = visualizerSettings.display === 'spectrogram' || visualizerSettings.display === 'static' || visualizerSettings.display === 'both',
        isSpectrumandSpectrogram = visualizerSettings.display === 'both',
        shownAverage = visualizerSettings.showAverage || visualizerSettings.showRMS,
        shownInfOnly = visualizerSettings.showAverage && !visualizerSettings.showRMS,
        shownCalibration = visualizerSettings.showCalibration,
        calibrationSrc = visualizerSettings.calibrationSrc;
  let grad = fgColor;
  if (visualizerSettings.useGradient) {
    if (visualizerSettings.alternateColor) {
      grad = ctx.createLinearGradient(0, 0, canvas.width, 0);
      grad.addColorStop(0/4, '#f00');
      grad.addColorStop(1/4, '#ff8000');
      grad.addColorStop(2/4, '#0f0');
      grad.addColorStop(3/4, '#0ff');
      grad.addColorStop(4/4, '#00f');
    }
    else {
      grad = ctx.createLinearGradient(0, 0, 0, isSpectrumandSpectrogram ? canvas.height/2 : canvas.height);
      // color gradient derived from foobar2000
      grad.addColorStop(0, visualizerSettings.darkMode ? '#569cd6' : 'rgb(0, 102, 204)');
      if (!shownAverage)
        grad.addColorStop(1, visualizerSettings.darkMode ? '#c0c0c0' : '#000');
    }
  }
  let averageValues = [],
      rmsValues = [],
      audioSpectrum = [],
      calibrationData = [];
  if (!isSpectrogramOnly) {
    if (visualizerSettings.showMain || (calibrationSrc === 'main' && shownCalibration)) {
      audioSpectrum = visualizerSettings.antiFlicker && visualizerSettings.useAccurateSmoothing ? accumulatedSpectrum : currentSpectrum;
    }
    
    if (visualizerSettings.showAverage || (calibrationSrc === 'cumulative' && shownCalibration)) {
      averageValues = averageSpectrum.map(x => {
        switch (visualizerSettings.averagingDomain) {
          case 'rms':
            return Math.sqrt(x) * 2;
          case 'log':
            return 10 ** (x/20) * 2;
          default:
            return x * 2;
        }
      });
    }
    
    if (visualizerSettings.showRMS || (calibrationSrc === 'avg' && shownCalibration)) {
      const fifoLength = fifoBuffers.length > 0 && fifoBuffers[0] !== undefined ? fifoBuffers[0].length : 1;
      rmsValues = fifoBuffers.map(x => {
        if (x === undefined)
          return 0;
        const average = x.reduce((acc, curr) => {
          const current = isFinite(curr) ? curr : 0;
          switch (visualizerSettings.fifoDomain) {
            case 'rms':
              return acc + current ** 2;
            case 'log':
              return acc + 20*Math.log10(current);
            default:
              return acc + current;
          }
        }, 0);
        switch (visualizerSettings.fifoDomain) {
          case 'rms':
            return Math.sqrt(average / fifoLength) * 2;
          case 'log':
            return 10 ** (average/fifoLength/20) * 2;
          default:
            return average / fifoLength * 2;
        }
      });
    }
    
    switch (calibrationSrc) {
      case 'avg':
        calibrationData = rmsValues;
        break;
      case 'cumulative':
        calibrationData = averageValues;
        break;
      case 'peaks':
        calibrationData = peaks;
        break;
      default:
        calibrationData = audioSpectrum;
    }
  }
  ctx.globalCompositeOperation = 'source-over';
  ctx.fillStyle = bgColor;
  ctx.fillRect(0, 0, canvas.width, canvas.height);
  if (visualizerSettings.showPeaks && !isSpectrogramOnly && visualizerSettings.drawMode2 !== 'stroke') {
    ctx.fillStyle = fgColor;
    ctx.strokeStyle = fgColor;
    drawGraph(peaks.map(x => x*2), false, visualizerSettings.fadingPeaks && !visualizerSettings.drawLines ? peakHolds.map(x => x/2) : 0.5);
  }
  ctx.fillStyle = grad;
  ctx.strokeStyle = grad;
  if (!isSpectrogramOnly && visualizerSettings.showMain) {
    drawGraph(audioSpectrum.map(x => x*2), visualizerSettings.drawMode === 'stroke', (shownAverage && !(visualizerSettings.alternateColor && visualizerSettings.useGradient) && visualizerSettings.drawMode !== 'stroke') || visualizerSettings.drawMode === 'both' ? 0.5 : 1);
    if (visualizerSettings.drawMode === 'both')
      drawGraph(audioSpectrum.map(x => x*2), true, 1);
  }
  /*
  for (let i = 0; i < currentSpectrum.length; i++) {
    ctx.fillRect(i*canvas.width/currentSpectrum.length+1, canvas.height, canvas.width/currentSpectrum.length-2, -map(ascale(currentSpectrum[i]*2), 0, 1, 0, canvas.height));
  }
  */
  ctx.fillStyle = fgColor;
  ctx.strokeStyle = fgColor;
  if (visualizerSettings.showPeaks && !isSpectrogramOnly && visualizerSettings.drawMode2 !== 'fill') {
    drawGraph(peaks.map(x => x*2), true, visualizerSettings.fadingPeaks ? peakHolds : 1);
    /*
    for (let i = 0; i < peaks.length; i++) {
      ctx.globalAlpha = visualizerSettings.fadingPeaks ? peakHolds[i] / (visualizerSettings.peakHold * (visualizerSettings.useAccurateSmoothing ? audioCtx.sampleRate/60 : 1)) : 1;
      ctx.fillRect(i*canvas.width/peaks.length+1, map(ascale(peaks[i]*2), 0, 1, canvas.height, 0), canvas.width/peaks.length-2, 2);
    }
    */
  }
  ctx.fillStyle = visualizerSettings.showRMS && visualizerSettings.showAverage ? (visualizerSettings.darkMode ? '#fff' : '#000') : visualizerSettings.drawMode3 === 'both' ? '#202020' : visualizerSettings.alternateColor && visualizerSettings.useGradient && visualizerSettings.drawMode3 !== 'stroke' ? '#888' : fgColor;
  ctx.strokeStyle = ctx.fillStyle;
  if (visualizerSettings.showAverage && !isSpectrogramOnly) {
    drawGraph(averageValues, !shownInfOnly || visualizerSettings.drawMode3 === 'stroke', shownInfOnly && visualizerSettings.showMain && visualizerSettings.drawMode3 !== 'stroke' ? 0.5 : 1);
    if (visualizerSettings.drawMode3 === 'both' && shownInfOnly) {
      ctx.fillStyle = visualizerSettings.darkMode ? '#fff' : '#000';
      ctx.strokeStyle = ctx.fillStyle;
      drawGraph(averageValues, true, 1);
    }
  }
  ctx.fillStyle = visualizerSettings.drawMode3 === 'both' ? '#202020' : visualizerSettings.alternateColor && visualizerSettings.useGradient && visualizerSettings.drawMode3 !== 'stroke' ? '#888' : fgColor;
  ctx.strokeStyle = ctx.fillStyle;
  if (visualizerSettings.showRMS && !isSpectrogramOnly) {
    drawGraph(rmsValues, visualizerSettings.drawMode3 === 'stroke', (visualizerSettings.showMain || visualizerSettings.showAverage || visualizerSettings.drawMode3 === 'both') && visualizerSettings.drawMode3 !== 'stroke' ? 0.5 : 1);
    if (visualizerSettings.drawMode3 === 'both') {
      ctx.fillStyle = visualizerSettings.darkMode ? '#fff' : '#000';
      ctx.strokeStyle = ctx.fillStyle;
      drawGraph(rmsValues, true, 1);
    }
  }
  /*
  for (let i = 0; i < 24; i++) {
    let sum = 0;
    for (let j = 0; j < currentSpectrum.length/24; j++) {
      sum += currentSpectrum[i+j*24] ** 2;
    }
    ctx.fillRect(i*canvas.width/24+1, canvas.height, canvas.width/24 - 2, -Math.min(Math.sqrt(sum)*canvas.height*Math.SQRT2, canvas.height/2));
  }
  */
  ctx.globalAlpha = 1;
  ctx.globalCompositeOperation = 'source-over';
  ctx.fillStyle = bgColor;
  if (isSpectrumandSpectrogram)
    ctx.fillRect(0, canvas.height/2, canvas.width, canvas.height/2);
  if (auxCanvas.width > 0 && auxCanvas.height > 0 && isSpectrogram)
    ctx.drawImage(auxCanvas, 0, canvas.height-auxCanvas.height);
  ctx.globalCompositeOperation = visualizerSettings.diffLabels ? 'difference' : 'source-over';
  ctx.fillStyle = visualizerSettings.diffLabels ? '#fff' : fgColor;
  ctx.strokeStyle = visualizerSettings.diffLabels ? '#fff' : fgColor;
  // label part
  ctx.font = `${Math.trunc(10*devicePixelRatio)}px sans-serif`;
  ctx.textAlign = visualizerSettings.labelTextAlign //'start';
  ctx.textBaseline = isSpectrumandSpectrogram ? visualizerSettings.labelTextBaseline2 : 'alphabetic';
  // Frequency label part
  if (visualizerSettings.showLabels || visualizerSettings.showDC || visualizerSettings.showNyquist) {
    ctx.globalAlpha = 0.5;
    ctx.setLineDash([]);
    
    const freqLabels = [],
          isNote = visualizerSettings.labelMode === 'note',
          notes = ['C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B'],
          minLabelRange = freqBands.length > 0 ? freqBands[0].ctr : 0,
          maxLabelRange = freqBands.length > 0 ? freqBands[freqBands.length-1].ctr : 0,
          labelScale = visualizerSettings.freqDist === 'octaves' ? 'log' : visualizerSettings.fscale,
          hzLinearFactor = visualizerSettings.hzLinearFactor/100;

    let freqsTable;
    switch(visualizerSettings.labelMode) {
      case 'decade':
        freqsTable = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 200, 300, 400, 500, 600, 700, 800, 900, 1000, 2000, 3000, 4000, 5000, 6000, 7000, 8000, 9000, 10000, 20000];
        break;
      case 'decade 2':
        freqsTable = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000];
        break;
      case 'decade 3':
        freqsTable = [10, 100, 1000, 10000];
        break;
      case 'octave':
        freqsTable = [31, 63.5, 125, 250, 500, 1000, 2000, 4000, 8000, 16000];
        break;
      case 'powers of two':
        freqsTable = [32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384];
        break;
      case 'note':
        freqsTable = generateOctaveBands(12, 0, 132, 0, visualizerSettings.labelTuning).map(x => x.ctr);
        break;
      case 'bark':
        freqsTable = [50, 150, 250, 350, 450, 570, 700, 840, 1000, 1170, 1370, 1600, 1850, 2150, 2500, 2900, 3400, 4000, 4800, 5800, 7000, 8500, 10500, 13500];
        break;
      case 'linear':
        freqsTable = [1000, 2000, 3000, 4000, 5000, 6000, 7000, 8000, 9000, 10000, 11000, 12000, 13000, 14000, 15000, 16000, 17000, 18000, 19000, 20000];
        break;
      default:
        freqsTable = freqBands.map(x => x.ctr);
    }
    if (visualizerSettings.showLabels)
      freqLabels.push(...freqsTable);
    if (visualizerSettings.showDC)
      freqLabels.push(0);
    if (visualizerSettings.showNyquist)
      freqLabels.push(audioCtx.sampleRate/2);
    
    freqLabels.map(x => {
      const note = isFinite(Math.log2(x)) ? notes[idxWrapOver(Math.round(Math.log2(x)*12), notes.length)] : 'DC',
      isSharp = note.includes('#'),
      isC = note === 'C';
      
      ctx.globalAlpha = isNote ? (isSharp ? 0.2 : isC ? 0.8 : 0.5) : 0.5;
      const label = x === audioCtx.sampleRate/2 && visualizerSettings.showNyquist ? 'Nyquist' : isNote || x === 0 ? `${note}${isC ? Math.trunc(Math.log2(x)-4) : ''}` : (x >= 1000) ? `${x / 1000}kHz` : `${x}Hz`,
            posX = map(fscale(x, labelScale, hzLinearFactor), fscale(minLabelRange, labelScale, hzLinearFactor), fscale(maxLabelRange, labelScale, hzLinearFactor), 1/freqBands.length/2, 1 - 1/freqBands.length/2),
            lineWidth = 10*devicePixelRatio*ctx.globalAlpha;
      ctx.beginPath();
      if (isSpectrogramOnly) {
        if (!visualizerSettings.spectrogramExtendGrid)
          ctx.globalAlpha = 1;
        ctx.lineTo(visualizerSettings.spectrogramExtendGrid ? 0 : visualizerSettings.mirrorLabels ? canvas.width : 0, canvas.height-posX*canvas.height);
        ctx.lineTo(visualizerSettings.spectrogramExtendGrid ? canvas.width : visualizerSettings.mirrorLabels ? canvas.width-lineWidth : lineWidth, canvas.height-posX*canvas.height);
      }
      else {
        ctx.lineTo(posX*canvas.width, isSpectrumandSpectrogram ? canvas.height/2 : canvas.height);
        ctx.lineTo(posX*canvas.width, 0);
      }
      ctx.stroke();
      ctx.globalAlpha = 1;
      if (isSpectrogramOnly) {
        ctx.textAlign = visualizerSettings.mirrorLabels ? 'end' : 'start';
        ctx.fillText(label, visualizerSettings.mirrorLabels ? canvas.width : 0, canvas.height-posX*canvas.height);
      }
      else
        ctx.fillText(label, posX*canvas.width, isSpectrumandSpectrogram ? canvas.height/2 : canvas.height);
    });
    ctx.setLineDash([]);
    ctx.globalAlpha = 1;
    ctx.textAlign = 'start';
    ctx.textBaseline = 'alphabetic';
  }
  // Amplitude/decibel label part
  if ((visualizerSettings.showLabelsY || shownCalibration) && !isSpectrogramOnly) {
    const dBLabelData = [],
          mindB = Math.min(visualizerSettings.minDecibels, visualizerSettings.maxDecibels),
          maxdB = Math.max(visualizerSettings.minDecibels, visualizerSettings.maxDecibels),
          minLabelIdx = Math.round(mindB/visualizerSettings.amplitudeLabelInterval),
          maxLabelIdx = Math.round(maxdB/visualizerSettings.amplitudeLabelInterval);
    if (visualizerSettings.showLabelsY) {
      dBLabelData.push({value: -Infinity,
                        alpha: 0.5});
      if (isFinite(minLabelIdx) && isFinite(maxLabelIdx)) {
        for (let i = maxLabelIdx; i >= minLabelIdx; i--) {
          dBLabelData.push({value: i*visualizerSettings.amplitudeLabelInterval,
                            alpha: 0.5});
        }
      }
    }
    if (shownCalibration) {
      dBLabelData.push({value: 20*Math.log10(calcCalibrationLine(calibrationData, visualizerSettings.calibrationDomain)),
                        alpha: 1});
    }
    ctx.globalAlpha = 0.5;
    ctx.setLineDash([]);
    ctx.textBaseline = visualizerSettings.labelTextBaseline;
    dBLabelData.map(v => {
      const x = v.value;
      ctx.globalAlpha = v.alpha;
      const label = `${x}dB`,
            posY = map(ascale(10 ** (x/20)), 0, 1, isSpectrumandSpectrogram ? canvas.height/2 : canvas.height, 0);
      if (ascale(10 ** (x/20)) >= 0 || !isSpectrumandSpectrogram) {
        ctx.beginPath();
        ctx.lineTo(0, posY);
        ctx.lineTo(canvas.width, posY);
        ctx.stroke();
        ctx.globalAlpha = 1;
        ctx.textAlign = visualizerSettings.mirrorLabels ? 'end' : 'start'
        ctx.fillText(label, canvas.width * visualizerSettings.mirrorLabels, posY);
      }
    });
    ctx.setLineDash([]);
    ctx.globalAlpha = 1;
    ctx.textAlign = 'start';
    ctx.textBaseline = 'alphabetic';
  }
  // reset the accumulated spectrum
  if (!visualizerSettings.freeze) {
    hasUpdatedSince = false;
    lastAccumulatedSpectrum = accumulatedSpectrum.map(x => x);
    updateAccumulatedSpectrum([]);
  }
  
  requestAnimationFrame(visualize);
  currentSampleRate = audioCtx.sampleRate;
}
// and here's the additional functions that we can need for this visualization
function applyWindow(posX, windowType = 'Hann', windowParameter = 1, truncate = true, windowSkew = 0) {
  let x = windowSkew > 0 ? ((posX/2-0.5)/(1-(posX/2-0.5)*10*(windowSkew ** 2)))/(1/(1+10*(windowSkew ** 2)))*2+1 :
                           ((posX/2+0.5)/(1+(posX/2+0.5)*10*(windowSkew ** 2)))/(1/(1+10*(windowSkew ** 2)))*2-1;
  
  if (truncate && Math.abs(x) > 1)
    return 0;
  
  switch (windowType.toLowerCase()) {
    default:
      return 1;
    case 'hanning':
    case 'cosine squared':
    case 'hann':
      return Math.cos(x*Math.PI/2) ** 2;
    case 'raised cosine':
    case 'hamming':
      return 0.54 + 0.46 * Math.cos(x*Math.PI);
    case 'power of sine':
      return Math.cos(x*Math.PI/2) ** windowParameter;
    case 'circle':
    case 'power of circle':
      return Math.sqrt(1 - (x ** 2)) ** windowParameter;
    case 'tapered cosine':
    case 'tukey':
      return Math.abs(x) <= 1-windowParameter ? 1 : 
      (x > 0 ? 
       (-Math.sin((x-1)*Math.PI/windowParameter/2)) ** 2 :
       Math.sin((x+1)*Math.PI/windowParameter/2) ** 2);
    case 'blackman':
      return 0.42 + 0.5 * Math.cos(x*Math.PI) + 0.08 * Math.cos(x*Math.PI*2);
    case 'nuttall':
      return 0.355768 + 0.487396 * Math.cos(x*Math.PI) + 0.144232 * Math.cos(2*x*Math.PI) + 0.012604 * Math.cos(3*x*Math.PI);
    case 'flat top':
    case 'flattop':
      return 0.21557895 + 0.41663158 * Math.cos(x*Math.PI) + 0.277263158 * Math.cos(2*x*Math.PI) + 0.083578947 * Math.cos(3*x*Math.PI) + 0.006947368 * Math.cos(4*x*Math.PI);
    case 'kaiser':
      return Math.cosh(Math.sqrt(1-(x ** 2))*(windowParameter ** 2))/Math.cosh(windowParameter ** 2);
    case 'gauss':
    case 'gaussian':
      return Math.exp(-(windowParameter ** 2)*(x ** 2));
    case 'cosh':
    case 'hyperbolic cosine':
      return Math.E ** (-(windowParameter ** 2)*(Math.cosh(x)-1));
    case 'bartlett':
    case 'triangle':
    case 'triangular':
      return 1 - Math.abs(x);
    case 'poisson':
    case 'exponential':
      return Math.exp(-Math.abs(x * (windowParameter ** 2)));
    case 'hyperbolic secant':
    case 'sech':
      return 1/Math.cosh(x * (windowParameter ** 2));
    case 'quadratic spline':
      return Math.abs(x) <= 0.5 ? -((x*Math.sqrt(2)) ** 2)+1 : (Math.abs(x*Math.sqrt(2))-Math.sqrt(2)) ** 2;
    case 'parzen':
      return Math.abs(x) > 0.5 ? -2 * ((-1 + Math.abs(x)) ** 3) : 1 - 24 * (Math.abs(x/2) ** 2) + 48 * (Math.abs(x/2) ** 3);
    case 'welch':
      return 1 - (x ** 2);
    case 'ogg':
    case 'vorbis':
      return Math.sin(Math.PI/2 * Math.cos(x*Math.PI/2) ** 2);
    case 'cascaded sine':
    case 'cascaded cosine':
    case 'cascaded sin':
    case 'cascaded cos':
      return 1 - Math.sin(Math.PI/2 * Math.sin(x*Math.PI/2) ** 2);
  }
}

function fscale(x, freqScale = 'logarithmic', freqSkew = 0.5) {
  switch(freqScale.toLowerCase()) {
    default:
      return x;
    case 'log':
    case 'logarithmic':
      return Math.log2(x);
    case 'mel':
      return Math.log2(1+x/700);
    case 'critical bands':
    case 'bark':
      return (26.81*x)/(1960+x)-0.53;
    case 'equivalent rectangular bandwidth':
    case 'erb':
      return Math.log2(1+0.00437*x);
    case 'cam':
    case 'cams':
      return Math.log2((x/1000+0.312)/(x/1000+14.675));
    case 'sinh':
    case 'arcsinh':
    case 'asinh':
      return Math.asinh(x/(10 ** (freqSkew*4)));
    case 'shifted log':
    case 'shifted logarithmic':
      return Math.log2((10 ** (freqSkew*4))+x);
    case 'nth root':
      return x ** (1/(11-freqSkew*10));
    case 'negative exponential':
      return -(2 ** (-x/(2 ** (7+freqSkew*8))));
    case 'adjustable bark':
      return (26.81 * x)/((10 ** (freqSkew*4)) + x);
    case 'period':
      return 1/x;
  }
}

function invFscale(x, freqScale = 'logarithmic', freqSkew = 0.5) {
  switch(freqScale.toLowerCase()) {
    default:
      return x;
    case 'log':
    case 'logarithmic':
      return 2 ** x;
    case 'mel':
      return 700 * ((2 ** x) - 1);
    case 'critical bands':
    case 'bark':
      return 1960 / (26.81/(x+0.53)-1);
    case 'equivalent rectangular bandwidth':
    case 'erb':
      return (1/0.00437) * ((2 ** x) - 1);
    case 'cam':
    case 'cams':
      return (14.675 * (2 ** x) - 0.312)/(1-(2 ** x)) * 1000;
    case 'sinh':
    case 'arcsinh':
    case 'asinh':
      return Math.sinh(x)*(10 ** (freqSkew*4));
    case 'shifted log':
    case 'shifted logarithmic':
      return (2 ** x) - (10 ** (freqSkew*4));
    case 'nth root':
      return x ** ((11-freqSkew*10));
    case 'negative exponential':
      return -Math.log2(-x)*(2 ** (7+freqSkew*8));
    case 'adjustable bark':
      return (10 ** (freqSkew*4)) / (26.81 / x - 1);
    case 'period':
      return 1/x;
  }
}

function generateFreqBands(N = 128, low = 20, high = 20000, freqScale, freqSkew, bandwidth = 0.5) {
  let freqArray = [];
  for (let i = 0; i < N; i++) {
    freqArray.push({
      lo: invFscale( map(i-bandwidth, 0, N-1, fscale(low, freqScale, freqSkew), fscale(high, freqScale, freqSkew)), freqScale, freqSkew),
      ctr: invFscale( map(i, 0, N-1, fscale(low, freqScale, freqSkew), fscale(high, freqScale, freqSkew)), freqScale, freqSkew),
      hi: invFscale( map(i+bandwidth, 0, N-1, fscale(low, freqScale, freqSkew), fscale(high, freqScale, freqSkew)), freqScale, freqSkew)
    });
  }
  return freqArray;
}

function generateOctaveBands(bandsPerOctave = 12, lowerNote = 4, higherNote = 123, detune = 0, tuningFreq = 440, bandwidth = 0.5) {
  const tuningNote = isFinite(Math.log2(tuningFreq)) ? Math.round((Math.log2(tuningFreq)-4)*12)*2 : 0,
        root24 = 2 ** ( 1 / 24 ),
        c0 = tuningFreq * root24 ** -tuningNote, // ~16.35 Hz
        groupNotes = 24/bandsPerOctave;
  let bands = [];
  for (let i = Math.round(lowerNote*2/groupNotes); i <= Math.round(higherNote*2/groupNotes); i++) {
    bands.push({
      lo: c0 * root24 ** ((i-bandwidth)*groupNotes+detune),
      ctr: c0 * root24 ** (i*groupNotes+detune),
      hi: c0 * root24 ** ((i+bandwidth)*groupNotes+detune)
    });
  }
  return bands;
}

function ascale(x, alt = false) {
  const minDecibels = alt ? visualizerSettings.altMinDecibels : visualizerSettings.minDecibels,
        maxDecibels = alt ? visualizerSettings.altMaxDecibels : visualizerSettings.maxDecibels,
        useAbsolute = alt ? visualizerSettings.altUseAbsolute : visualizerSettings.useAbsolute,
        gamma = alt ? visualizerSettings.altGamma : visualizerSettings.gamma,
        useDecibels = alt ? visualizerSettings.altUseDecibels : visualizerSettings.useDecibels;
  if (useDecibels)
    return map(20*Math.log10(x), minDecibels, maxDecibels, 0, 1);
  else
    return map(x ** (1/gamma), !useAbsolute * (10 ** (minDecibels/20)) ** (1/gamma), (10 ** (maxDecibels/20)) ** (1/gamma), 0, 1);
}

function parseList(string) {
  return string.split(',').map(x => isNaN(x) ? 0 : parseFloat(x));
}

function updateSpectrumVisualization(data, inAudioContext = false) {
  if (currentSpectrum.length !== data.length || averageSpectrum.length !== data.length || fifoBuffers.length !== data.length) {
    currentSpectrum.length = data.length;
    averageSpectrum.length = data.length;
    fifoBuffers.length = data.length;
  }
  if (currentSpectrum.length !== peaks.length || currentSpectrum.length !== peakHolds.length) {
    peaks.length = currentSpectrum.length;
    peakHolds.length = currentSpectrum.length;
  }
  const factor = inAudioContext ? 60/audioCtx.sampleRate : 1,
        holdFactor = inAudioContext ? audioCtx.sampleRate/60 : 1,
        smoothingTimeConstant = (visualizerSettings.smoothingTimeConstant/100) ** factor,
        peakDecayTimeConstant = (visualizerSettings.peakDecay/100) ** factor,
        fifoLength = Math.max(Math.round(visualizerSettings.fifoLength / 1000 * (inAudioContext ? audioCtx.sampleRate : 60)), 1);   // assuming 60fps
  if (!visualizerSettings.freezeFIFO) {
    for (let i = 0; i < fifoBuffers.length; i++) {
      if (fifoBuffers[i] === undefined)
        fifoBuffers[i] = new Array(fifoLength);
      else if (fifoBuffers[i].length !== fifoLength)
        fifoBuffers[i].length = fifoLength;
    }
  }
  for (let i = 0; i < data.length; i++) {
    currentSpectrum[i] = isFinite(currentSpectrum[i]) ? visualizerSettings.useAverageSmoothing ? data[i]*(1-smoothingTimeConstant) + currentSpectrum[i]*smoothingTimeConstant : Math.max(data[i], currentSpectrum[i]*smoothingTimeConstant) : data[i];
    const peakValue = visualizerSettings.useActualPeak ? data[i] : currentSpectrum[i];
    if (peakValue >= peaks[i] || !isFinite(peaks[i])) {
      peaks[i] = peakValue;
      peakHolds[i] = visualizerSettings.peakHold * holdFactor;
    }
    else if (peakHolds[i] > 0)
      peakHolds[i] = Math.min(peakHolds[i]-1, visualizerSettings.peakHold * holdFactor);
    else
      peaks[i] *= peakDecayTimeConstant;
    if (!visualizerSettings.pauseAverage) {
      // infinite (cumulative) average spectrum part
      let dataToAverage = data[i];
      switch (visualizerSettings.averagingDomain) {
        case 'rms':
          dataToAverage = dataToAverage ** 2;
          break;
        case 'log':
          dataToAverage = 20 * Math.log10(dataToAverage);
      }
      averageSpectrum[i] = (dataToAverage + cumulativeIdx*(isFinite(averageSpectrum[i]) ? averageSpectrum[i] : 0))/(cumulativeIdx+1);
    }
    if (!visualizerSettings.freezeFIFO)
      fifoBuffers[i][fifoIdx] = data[i];
  }
  if (!visualizerSettings.pauseAverage)
    cumulativeIdx++;
  if (!visualizerSettings.freezeFIFO)
    fifoIdx = idxWrapOver(fifoIdx+1, fifoLength);
}

function updateAccumulatedSpectrum(data) {
  accumulatedSpectrum.length = data.length;
  for (let i = 0; i < data.length; i++) {
    const prevResult = accumulatedSpectrum[i],
          x = data[i];
    accumulatedSpectrum[i] = Math.max(isFinite(prevResult) ? prevResult : 0, isFinite(x) ? x : 0);
  }
}

function calcCalibrationLine(data, domain = 'rms') {
  let calibrationValue = 0;
  for (let i = 0; i < data.length; i++) {
    const x = data[i];
    // calculating a calibration line
    switch (domain) {
      case 'rms':
        calibrationValue += x ** 2;
        break;
      case 'log':
        calibrationValue += 20*Math.log10(x);
        break;
      default:
        calibrationValue += x;
    }
  }
  switch (domain) {
    case 'rms':
      return Math.sqrt(calibrationValue/fifoBuffers.length);
    case 'log':
      return 10 ** (calibrationValue/fifoBuffers.length/20);
    default:
      return calibrationValue / fifoBuffers.length;
  }
}

function drawGraph(data, isPeak, aux) {
  const isLine = visualizerSettings.drawLines,
        height = canvas.height / (1+(visualizerSettings.display === 'both')),
        prevLineWidth = ctx.lineWidth,
        prevLineJoin = ctx.lineJoin,
        prevMiterLimit = ctx.miterLimit;
  ctx.lineWidth = visualizerSettings.lineWidth;
  ctx.lineJoin = visualizerSettings.lineJoin;
  ctx.miterLimit = visualizerSettings.miterLimit;
  if (isLine) {
    ctx.beginPath();
    if (!isPeak)
      ctx.lineTo(canvas.width / data.length / 2, canvas.height)
  }
  for (let i = 0; i < data.length; i++) {
    const amp = ascale(data[i]);
    let x = isLine ? i * canvas.width / data.length + (canvas.width / data.length / 2) : Math[visualizerSettings.spacingMode === 'smooth' ? 'max' : 'trunc'](i * canvas.width / data.length) + Math.min(visualizerSettings.barSpacing, canvas.width / data.length)/2 * visualizerSettings.centerBars,
        y,
        w = isLine ? canvas.width / data.length - 2 : Math.max(1, (visualizerSettings.spacingMode === 'pixel perfect' ? Math.trunc((i+1) * canvas.width / data.length)-Math.trunc(i * canvas.width / data.length) : Math[visualizerSettings.spacingMode === 'smooth' ? 'max' : 'trunc'](canvas.width / data.length))-visualizerSettings.barSpacing),
        h;
    if (isPeak || isLine) {
      y = height - amp * height;
      h = visualizerSettings.peakHeight;
    }
    else {
      y = canvas.height;
      h = -amp*height - canvas.height+height;
    }
    ctx.globalAlpha = Array.isArray(aux) && !isLine ? aux[i] / (visualizerSettings.peakHold * (visualizerSettings.useAccurateSmoothing ? audioCtx.sampleRate/60 : 1)) : isFinite(aux) ? aux : 1;
    if (!isLine)
      ctx.fillRect(x, y, w, h);
    else {
      ctx.lineTo(x, y);
    }
  }
  if (isLine) {
    if (isPeak) 
      ctx.stroke();
    else {
      ctx.lineTo((data.length-0.5)*canvas.width/data.length, canvas.height);
      ctx.fill();
    }
  }
  ctx.lineWidth = prevLineWidth,
  ctx.lineJoin = prevLineJoin,
  ctx.miterLimit = prevMiterLimit;
}

function printSpectrogram(data) {
  const length = visualizerSettings.display === 'both' ? auxCanvas.width : auxCanvas.height;
  for (let i = 0; i < data.length; i++) {
    const start = Math.trunc(i/data.length*length),
          end = Math.trunc((i+1)/data.length*length),
          delta = end-start,
          amp = ascale(data[i]*2, visualizerSettings.decoupleAmplitudeFromSpectrum);
    auxCtx.fillStyle = `hsl(0, 0%, ${map(isFinite(amp) ? amp : 0, 0, 1, visualizerSettings.darkMode ? 0 : 100, visualizerSettings.darkMode ? 100 : 0)}%)`;
    if (visualizerSettings.display === 'both')
      auxCtx.fillRect(start, 0, delta, 1);
    else
      auxCtx.fillRect(visualizerSettings.display === 'static' ? staticSpectrogramIdx+1 : auxCanvas.width, auxCanvas.height-start, -1, -delta)
  }
  if (auxCanvas.width > 0 && auxCanvas.height > 0) 
    auxCtx.drawImage(auxCanvas,
      visualizerSettings.display === 'both' || visualizerSettings.display === 'static' ? 0 : -1,
      visualizerSettings.display === 'both' ? 1 : 0
    );
  
  if (visualizerSettings.display === 'static')
    staticSpectrogramIdx = idxWrapOver(staticSpectrogramIdx+1, auxCanvas.width);
  else
    staticSpectrogramIdx = 0;
}
              
            
!
999px

Console