123

Pen Settings

CSS Base

Vendor Prefixing

Add External Stylesheets/Pens

Any URL's added here will be added as <link>s in order, and before the CSS in the editor. If you link to another Pen, it will include the CSS from that Pen. If the preprocessor matches, it will attempt to combine them before processing.

+ add another resource

You're using npm packages, so we've auto-selected Babel for you here, which we require to process imports and make it all work. If you need to use a different JavaScript preprocessor, remove the packages in the npm tab.

Add External Scripts/Pens

Any URL's added here will be added as <script>s in order, and run before the JavaScript in the editor. You can use the URL of any other Pen and it will include the JavaScript from that Pen.

+ add another resource

Use npm Packages

We can make npm packages available for you to use in your JavaScript. We use webpack to prepare them and make them available to import. We'll also process your JavaScript with Babel.

⚠️ This feature can only be used by logged in users.

Code Indentation

     

Save Automatically?

If active, Pens will autosave every 30 seconds after being saved once.

Auto-Updating Preview

If enabled, the preview panel updates automatically as you code. If disabled, use the "Run" button to update.

HTML Settings

Here you can Sed posuere consectetur est at lobortis. Donec ullamcorper nulla non metus auctor fringilla. Maecenas sed diam eget risus varius blandit sit amet non magna. Donec id elit non mi porta gravida at eget metus. Praesent commodo cursus magna, vel scelerisque nisl consectetur et.

            
              <div class="wrapper">
	<div class="loading">
		<p>Loading...</p>
	</div>

	<div class="iir-demo">
		<button class="button-play" role="switch" data-playing="false" aria-pressed="false">Play</button>
		
		<section class="filter-toggle">
			<span id="label" aria-live="assertive" aria-atomic="true">Filter</span>
			<button class="button-filter" role="switch" data-filteron="false" aria-pressed="false" aria-describedby="label" disabled></button>
		</section>
		
		<section class="filter-graph">
		</section>
		
	</div>
</div>
            
          
!
            
              :root {
	--orange: hsla(32, 100%, 50%, 1);
	--yellow: hsla(49, 99%, 50%, 1);
	--lime: hsla(82, 90%, 45%, 1);
	--green: hsla(127, 81%, 41%, 1);
	--red: hsla(342, 93%, 53%, 1);
	--pink: hsla(314, 85%, 45%, 1);
	--blue: hsla(211, 92%, 52%, 1);
	--purple: hsla(283, 92%, 44%, 1);
	--cyan: hsla(195, 98%, 55%, 1);
	--white: hsla(0, 0%, 95%, 1);
	--black: hsla(0, 0%, 10%, 1);
	
	--border: 5px solid var(--black);
	--borderRad: 2px;
	
	
}
* {box-sizing: border-box;}
body {
	background-color: var(--white);
	font-family: sans-serif, system-ui; color: var(--black);
}

.wrapper {
	position: relative;
	display: flex;
	flex-direction: column;
	align-items: center;
}

.loading {
	background: white;
	position: absolute;
	left: 0;
	right: 0;
	height: 100vh;
	z-index: 2;
	display: flex;
	justify-content: center;
	align-items: center;
}
.loading p {
	font-size: 200%;
	animation: loading ease-in-out 2s infinite;
}

@keyframes loading {
	0% {opacity: 0;}
	50% {opacity: 1;}
	100% {opacity: 0;}
}

.iir-demo {text-align: center;}

/* play button */
button, span {font-size: 120%;}
[class^="button"] {cursor: pointer;}
.button-play {
	background-color: var(--orange);
	display: block;
	margin: 3vmin auto; padding: 3vmin 4vmin 3vmin 12vmin;
	border: var(--border); border-radius: var(--borderRad);
}
[data-playing], [data-playing="true"]:hover {
		background: var(--red) url('data:image/svg+xml;charset=utf8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><path d="M424.4 214.7L72.4 6.6C43.8-10.3 0 6.1 0 47.9V464c0 37.5 40.7 60.1 72.4 41.3l352-208c31.4-18.5 31.5-64.1 0-82.6z" fill="black" /></svg>') no-repeat left center;
		background-size: 60% 60%;
		cursor: pointer;
}
[data-playing]:hover {background-color: var(--green);}
[data-playing="true"] {
	background: var(--green) url('data:image/svg+xml;charset=utf8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><path d="M144 479H48c-26.5 0-48-21.5-48-48V79c0-26.5 21.5-48 48-48h96c26.5 0 48 21.5 48 48v352c0 26.5-21.5 48-48 48zm304-48V79c0-26.5-21.5-48-48-48h-96c-26.5 0-48 21.5-48 48v352c0 26.5 21.5 48 48 48h96c26.5 0 48-21.5 48-48z" fill="black" /></svg>') no-repeat left center;
	background-size: 60% 60%;
}

/* filter button */
.filter-toggle {
	margin: 4vmin auto;
}
.button-filter {
  margin: 0px 0px 0px 10px; padding: 0;
  width: 90px; height: 50px;
  display: inline-block;
  vertical-align: middle;
  border: var(--border); border-radius: 25px;
  position: relative;
  text-align: center;  
  transition: background .15s ease-in-out;
}
.button-filter:after {
  content: "";
  position: absolute;
  height: 31px; width: 31px;
  border: var(--border); border-radius: 50%;
	background-color: var(--red);
  top: 0px;
  transition: left .15s ease-in-out;
  will-change: left;
}
.button-filter[data-filteron="true"]:after {
  background-color: var(--green);
  left: 0px;
}
.button-filter[disabled] {
	cursor: default;
	border-color: hsla(0, 0%, 40%, 1);
}
.button-filter[disabled]:after {
	background-color: hsla(342, 93%, 73%, 1);
	border-color: hsla(0, 0%, 40%, 1);
}
.button-filter[data-filteron="true"][disabled]:after {
  background-color: hsla(127, 81%, 61%, 1);
}

.filter-graph {
	margin-top: 20px;
	width: 60vw; height: 40vw;
	max-width: 600px; max-height: 400px;
}
            
          
!
            
              console.clear();

// instigate our audio context ~~~~~~~~~~~~~~~ 1
const AudioContext = window.AudioContext || window.webkitAudioContext;
const audioCtx = new AudioContext();

// fetch the audio file and decode the data
async function getFile(audioContext, filepath) {
  const response = await fetch(filepath);
  const arrayBuffer = await response.arrayBuffer();
  const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
  return audioBuffer;
}
// create a buffer, plop in data, connect and play -> modify graph here if required
function playSourceNode(audioContext, audioBuffer) {
  const soundSource = audioContext.createBufferSource();
  soundSource.buffer = audioBuffer;
  soundSource.connect(audioContext.destination);
  soundSource.start();
	return soundSource;
}

async function setupSample() {
	const filePath = 'https://s3-us-west-2.amazonaws.com/s.cdpn.io/858/outfoxing.mp3';
	// Here we're `await`ing the async/promise that is `getFile`.
	// To be able to use this keyword we need to be within an `async` function
	sample = await getFile(audioCtx, filePath);
	return sample;
}

// change this to change the filter - can be 0-3 and will reference the values in the array below
const filterNumber = 2;

let lowPassCoefs = [
	{
		frequency: 200,
		feedforward: [0.00020298, 0.0004059599, 0.00020298],
		feedback: [1.0126964558, -1.9991880801, 0.9873035442]
	},
	{
		frequency: 500,
		feedforward: [0.0012681742, 0.0025363483, 0.0012681742],
		feedback: [1.0317185917, -1.9949273033, 0.9682814083]
	},
	{
		frequency: 1000,
		feedforward: [0.0050662636, 0.0101325272, 0.0050662636],
		feedback: [1.0632762845, -1.9797349456, 0.9367237155]
	},
	{
		frequency: 5000,
		feedforward: [0.1215955842, 0.2431911684, 0.1215955842],
		feedback: [1.2912769759, -1.5136176632, 0.7087230241]
	}
]

let feedForward = lowPassCoefs[filterNumber].feedforward,
	feedBack = lowPassCoefs[filterNumber].feedback;

// create a canvas element and append it to our dom
const canvasContainer = document.querySelector('.filter-graph');
const canvasEl = document.createElement('canvas');
canvasContainer.appendChild(canvasEl);
// set 2d context and set dimesions
const canvasCtx = canvasEl.getContext('2d');
const width = canvasContainer.offsetWidth;
const height = canvasContainer.offsetHeight;
canvasEl.width = width;
canvasEl.height = height;

// set fill and create axis
canvasCtx.fillStyle = 'white';
canvasCtx.fillRect(0, 0, width, height);

const spacing = width/16;
const fontSize = Math.floor(spacing/1.5);
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = 'grey';

// draw our axis
canvasCtx.beginPath();
canvasCtx.moveTo(spacing, spacing);
canvasCtx.lineTo(spacing, height-spacing);
canvasCtx.lineTo(width-spacing, height-spacing);
canvasCtx.stroke();
// axis is gain by frequency
canvasCtx.font = fontSize+'px sans-serif';
canvasCtx.fillStyle = 'grey';
canvasCtx.fillText('1', spacing-fontSize, spacing+fontSize);
canvasCtx.fillText('g', spacing-fontSize, (height-spacing+fontSize)/2);
canvasCtx.fillText('0', spacing-fontSize, height-spacing+fontSize);
canvasCtx.fillText('Hz', width/2, height-spacing+fontSize);
canvasCtx.fillText('20k', width-spacing, height-spacing+fontSize);

// dom elements needed
const loadingSection = document.querySelector('.loading');
const playButton = document.querySelector('.button-play');
const filterButton = document.querySelector('.button-filter');

// arrays for our frequency response
const totalArrayItems = 30;
// Here we want to create an array of frequency values that we would like to get data about. We could go for a linear approach, but it's far better when working with frequencies to take a log approach, so let's fill our array with frequencies that get larger as array item goes up.
let myFrequencyArray = new Float32Array(totalArrayItems);
myFrequencyArray = myFrequencyArray.map(function(item, index) {
    return Math.pow(1.4, index);
});
// We need to create arrays that return the data, these need to be the same size as the origianl frequency array
let magResponseOutput = new Float32Array(totalArrayItems);
let phaseResponseOutput = new Float32Array(totalArrayItems);

// let the magic happen! When the file has loaded...
setupSample()
	.then((sample) => {
	
	// remove loading screen
	loadingSection.style.display = 'none';
	
	// create out iir filter
	const iirfilter = audioCtx.createIIRFilter(feedForward, feedBack);
	
	let srcNode;
	// play/pause our track
	playButton.addEventListener('click', function() {
		if (this.dataset.playing === 'false') {
			
			// check if context is in suspended state (autoplay policy)
      if (audioCtx.state === 'suspended') {
        audioCtx.resume();
      }
			
			srcNode = playSourceNode(audioCtx, sample);
			this.setAttribute('data-playing', 'true');
			this.setAttribute('aria-pressed', 'true');
			this.innerText = 'Pause';
			filterButton.removeAttribute('disabled');
		} else {
			srcNode.stop();
			this.setAttribute('data-playing', 'false');
			this.setAttribute('aria-pressed', 'false');
			this.innerText = 'Play';
			filterButton.disabled = 'true';
		}

	}, false);
	
	// on turn on filter connect iir
	filterButton.addEventListener('click', function() {
		if (this.dataset.filteron === 'false') {
			srcNode.disconnect(audioCtx.destination);
			srcNode.connect(iirfilter).connect(audioCtx.destination);
			this.setAttribute('data-filteron', 'true');
			this.setAttribute('aria-pressed', 'true');
		} else {
			srcNode.disconnect(iirfilter);
			srcNode.connect(audioCtx.destination);
			this.setAttribute('data-filteron', 'false');
			this.setAttribute('aria-pressed', 'false');
		}
		
	}, false);
	
	// get our frequency response
	iirfilter.getFrequencyResponse(myFrequencyArray, magResponseOutput, phaseResponseOutput);
	
	// draw our graph
	canvasCtx.beginPath();
  for(let i = 0; i < magResponseOutput.length; i++) {
		
		if (i === 0) {
			canvasCtx.moveTo(spacing, height-(magResponseOutput[i]*(height-(spacing*2)))-spacing );
		} else {
			canvasCtx.lineTo((width/totalArrayItems)*i, height-(magResponseOutput[i]*(height-(spacing*2)))-spacing );
		}
		
  }
  canvasCtx.stroke();

});

            
          
!
999px
🕑 One or more of the npm packages you are using needs to be built. You're the first person to ever need it! We're building it right now and your preview will start updating again when it's ready.

Console