Pen Settings

HTML

CSS

CSS Base

Vendor Prefixing

Add External Stylesheets/Pens

Any URL's added here will be added as <link>s in order, and before the CSS in the editor. If you link to another Pen, it will include the CSS from that Pen. If the preprocessor matches, it will attempt to combine them before processing.

+ add another resource

JavaScript

Babel includes JSX processing.

Add External Scripts/Pens

Any URL's added here will be added as <script>s in order, and run before the JavaScript in the editor. You can use the URL of any other Pen and it will include the JavaScript from that Pen.

+ add another resource

Packages

Add Packages

Search for and use JavaScript packages from npm here. By selecting a package, an import statement will be added to the top of the JavaScript editor for this package.

Behavior

Save Automatically?

If active, Pens will autosave every 30 seconds after being saved once.

Auto-Updating Preview

If enabled, the preview panel updates automatically as you code. If disabled, use the "Run" button to update.

Format on Save

If enabled, your code will be formatted when you actively save your Pen. Note: your code becomes un-folded during formatting.

Editor Settings

Code Indentation

Want to change your Syntax Highlighting theme, Fonts and more?

Visit your global Editor Settings.

HTML

              
                <div class="audio-variable-font">
  <div class="font"><span>OUT LOUD!</span></div>
  <div id="micro" class="micro">
    <div class="micro-wrapper">
      <div class="dtc">
        <canvas id="meter"></canvas>
        <img src="https://www.kobufoundry.com/files/themes/foundry_theme/shortcodes/05_audio_controlled_variable_font/images/mic.svg" alt="">
        <button type="button">start the mic</button>
      </div>
    </div>
  </div>
</div>
              
            
!

CSS

              
                $white: #ffffff;
$black: #000000;

:root {
	--weight: 0;
	--width: 0;
	--height: 0;
}

* {
    box-sizing: border-box;
}

body {
	padding: 50px;
	margin: 0;
	background: $black;
  font-size: 18px;
  font-family:  Helvetica, Tahoma, sans-serif;
  font-weight: 300;
  color: $white;
  line-height: 1.44;
}

.audio-variable-font {
  border: 1px solid #fff;
  margin-bottom: 30px;
  overflow: hidden;
  display: flex;
  flex-wrap: wrap;
  align-items: stretch;
  flex-direction: row-reverse;

  & > div, & > li {
    float: left;
  }

  &:after {
    clear: both;
    content: " ";
    display: block;
    height: 0;
    line-height: 0;
    visibility: hidden;
  }

  .micro {
    border-right: 1px solid #fff;
    width: 175px;
    padding: 20px;
    position: relative;
    text-align: center;

    .micro-wrapper {
      display: table;
      height: 100%;

      .dtc {
        display: table-cell;
        vertical-align: middle;
      }
    }

    img {
      display: inline-block;
      width: 60px;
      height: 87px;
      position: relative;
      z-index: 2;
    }

    button {
      transition: color .5s ease-in-out, background-color .5s ease-in-out;
      display: inline-block;
      color: #fff;
      font-size: .77rem;
      text-align: center;
      margin-top: 20px;
      border: 1px solid #fff;
      border-radius: 18px;
      padding: 7px 15px;
      position: relative;
      z-index: 2;
      background-color: transparent;
      cursor: pointer;
      
      &:focus {
        outline: none;
      }

      &:hover {
        color: #000;
        background-color: #fff;
      }
    }

    canvas {
      position: absolute;
      bottom: 0;
      left: 0;
      width: 100%;
      z-index: 1;
    }
  }

  .font {
    width: calc( 100% - 175px);
    align-self: center;
    padding: 20px 30px;
    font-size: 6rem;
    font-family: 'Rakki', sans-serif;;
    font-variation-settings: 'wdth' var(--width), 'wght' var(--weight), 'hght' var(--height);
    white-space: nowrap;
    line-height: 1;

    span {
      display: block;
      margin-top: -15px;
    }
  }
}
              
            
!

JS

              
                var audioContext = null,
    meter = null,
    analyser = null,
    buf = null,
    mediaStream = null,
    mediaStreamSource = null,
    canvas = document.getElementById('meter'),
    canvasContext = canvas.getContext('2d'),
    micro = document.getElementById('micro'),
    openAudioContext = 0,
    MIN_SAMPLES = 0;

var bar = {
        x: 0,
        y: 0,
        width: micro.clientWidth,
        height: 0,
        fill: "#282828"
    };

/*
The MIT License (MIT)
Copyright (c) 2014 Chris Wilson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/

function volumeAudioProcess(event) {
    var buf = event.inputBuffer.getChannelData(0);
    var bufLength = buf.length;
    var sum = 0;
    var x;
    var scriptProcessorNode = event.target;

    // Do a root-mean-square on the samples: sum up the squares...
    for (var i = 0; i < bufLength; i++) {
        x = buf[i];
        if (Math.abs(x) >= scriptProcessorNode.clipLevel) {
            scriptProcessorNode.clipping = true;
            scriptProcessorNode.lastClip = window.performance.now();
        }
        sum += x * x;
    }

    // ... then take the square root of the sum.
    var rms = Math.sqrt(sum / bufLength);

    // Now smooth this out with the averaging factor applied
    // to the previous sample - take the max here because we
    // want "fast attack, slow release."
    // this.volume = Math.max(rms, this.volume*this.averaging);
    scriptProcessorNode.volume = rms; // without the slow release using gsap instead
}

/*
Usage:
audioNode = createAudioMeter(audioContext,clipLevel,averaging,clipLag);
audioContext: the AudioContext you're using.
clipLevel: the level (0 to 1) that you would consider "clipping".
    Defaults to 0.98.
averaging: how "smoothed" you would like the meter to be over time.
    Should be between 0 and less than 1.  Defaults to 0.95.
clipLag: how long you would like the "clipping" indicator to show
    after clipping has occured, in milliseconds.  Defaults to 750ms.
Access the clipping through node.checkClipping(); use node.shutdown to get rid of it.
*/

function createAudioMeter(audioContext, clipLevel, averaging, clipLag) {
    var processor = audioContext.createScriptProcessor(512);
    processor.onaudioprocess = volumeAudioProcess;
    processor.clipping = false;
    processor.lastClip = 0;
    processor.volume = 0;
    processor.clipLevel = clipLevel || 0.98;
    processor.averaging = averaging || 0.95;
    processor.clipLag = clipLag || 750;

    // this will have no effect, since we don't copy the input to the output,
    // but works around a current Chrome bug.
    processor.connect(audioContext.destination);

    processor.checkClipping = function () {
        if (!this.clipping) {
            return false;
        }
        if (this.lastClip + this.clipLag < window.performance.now()) {
            this.clipping = false;
        }
        
        return this.clipping;
    };

    processor.shutdown = function () {
        this.disconnect();
        this.onaudioprocess = null;
    };

    return processor;
}

function createAnalyser(audioContext) {
    // Create analyzer
    var analyser = audioContext.createAnalyser();
    analyser.fftSize = 2048;
    var bufferLength = analyser.frequencyBinCount;
    buf = new Float32Array(bufferLength);

    analyser.shutdown = function () {
        this.disconnect();
    };

    return analyser;
}

function autoCorrelate( buf, sampleRate ) {
    var GOOD_ENOUGH_CORRELATION = 0.9; // this is the "bar" for how close a correlation needs to be
    var SIZE = buf.length;
    var MAX_SAMPLES = Math.floor(SIZE/2);
    var best_offset = -1;
    var best_correlation = 0;
    var rms = 0;
    var foundGoodCorrelation = false;
    var correlations = new Array(MAX_SAMPLES);

    for (var i=0;i<SIZE;i++) {
        var val = buf[i];
        rms += val*val;
    }
    rms = Math.sqrt(rms/SIZE);
    if (rms<0.01) { // not enough signal
        return -1;
    }

    var lastCorrelation=1;
    for (var offset = MIN_SAMPLES; offset < MAX_SAMPLES; offset++) {
        var correlation = 0;

        for (var j=0; j<MAX_SAMPLES; j++) {
            correlation += Math.abs((buf[j])-(buf[j+offset]));
        }
        correlation = 1 - (correlation/MAX_SAMPLES);
        correlations[offset] = correlation; // store it, for the tweaking we need to do below.
        if ((correlation>GOOD_ENOUGH_CORRELATION) && (correlation > lastCorrelation)) {
            foundGoodCorrelation = true;
            if (correlation > best_correlation) {
                best_correlation = correlation;
                best_offset = offset;
            }
        } else if (foundGoodCorrelation) {
            // short-circuit - we found a good correlation, then a bad one, so we'd just be seeing copies from here.
            // Now we need to tweak the offset - by interpolating between the values to the left and right of the
            // best offset, and shifting it a bit.  This is complex, and HACKY in this code (happy to take PRs!) -
            // we need to do a curve fit on correlations[] around best_offset in order to better determine precise
            // (anti-aliased) offset.

            // we know best_offset >=1, 
            // since foundGoodCorrelation cannot go to true until the second pass (offset=1), and 
            // we can't drop into this clause until the following pass (else if).
            var shift = (correlations[best_offset+1] - correlations[best_offset-1])/correlations[best_offset];  
            return sampleRate/(best_offset+(8*shift));
        }
        lastCorrelation = correlation;
    }
    if (best_correlation > 0.01) {
        // console.log("f = " + sampleRate/best_offset + "Hz (rms: " + rms + " confidence: " + best_correlation + ")")
        return sampleRate/best_offset;
    }
    return -1;
//	var best_frequency = sampleRate/best_offset;
}

function draw() {
    // Frequency
    analyser.getFloatTimeDomainData( buf );
    var frequency = Math.round( autoCorrelate( buf, audioContext.sampleRate ) );

    var widthVol = 0,
        weightVol = 0,
        heightVol = 0;

    if ( ( frequency < 220 && frequency !== -1 ) ) {
        var maxFontWidth = 500,
            minFontWidth = 100,
            widthScaleFactor = maxFontWidth * 5;
        
        widthVol = meter.volume * widthScaleFactor;

        if (widthVol > maxFontWidth) {
            widthVol = maxFontWidth;
        } else if (widthVol < minFontWidth) {
            widthVol = minFontWidth;
        }
    }
    
    if ( frequency > 1000 ) {
        var maxFontHeight = 900,
            minFontHeight = 100,
            heightScaleFactor = maxFontHeight * 5;

        heightVol = meter.volume * heightScaleFactor;

        if (heightVol > maxFontHeight) {
            heightVol = maxFontHeight;
        } else if (heightVol < minFontHeight) {
            heightVol = minFontHeight;
        }
    } else if( frequency !== -1 ) {
        var maxFontWeight = 900,
            minFontWeight = 100,
            weightScaleFactor = maxFontWeight * 5;
        
        weightVol = meter.volume * weightScaleFactor;

        if (weightVol > maxFontWeight) {
            weightVol = maxFontWeight;
        } else if (weightVol < minFontWeight) {
            weightVol = minFontWeight;
        }
    }

    // Update css vars
    gsap.to(":root", 1, { 
        "--width": widthVol,
        "--height": heightVol,
        "--weight": weightVol,
        ease: Expo.easeOut
    });


    var WIDTH = micro.clientWidth,
        HEIGHT = micro.clientHeight;

    // clear the background
    canvasContext.clearRect(0, 0, WIDTH, HEIGHT);
    canvasContext.fillStyle = bar.fill;

    // smooth out the bar animation
    gsap.to(bar, 0.8, {
        height: meter.volume * HEIGHT,
        ease: Expo.easeOut
    });

    canvasContext.fillRect(0, HEIGHT - bar.height * 2, WIDTH, bar.height * 2);
}

function gotStream(stream) {
    // Create an AudioNode from the stream.
    mediaStreamSource = audioContext.createMediaStreamSource(stream);

    // Create a new volume meter and connect it.
    meter = createAudioMeter(audioContext);
    mediaStreamSource.connect(meter);

    analyser = createAnalyser(audioContext);
    mediaStreamSource.connect(analyser);
    analyser.connect(meter);

    // kick off the visual updating
    gsap.ticker.add(draw);
}

function closeAudioContext() {
    if ( audioContext ) {
        // Stop MediaStream tracks
        var tracks = mediaStream.getTracks();
        tracks.forEach(function(track) {
            track.stop();
        });
        mediaStream = null;

        mediaStreamSource.disconnect();
        meter.shutdown();
        analyser.shutdown();

        gsap.ticker.remove(draw);

        // Close audioContext
        audioContext.close().then(function() {
            var WIDTH = micro ? micro.clientWidth : 0,
                HEIGHT = micro ? micro.clientHeight : 0;

            // clear the background
            canvasContext.clearRect(0, 0, WIDTH, HEIGHT);

            gsap.to(bar, 0.8, {
                height: 0,
                ease: Expo.easeOut
            });

            // Animate the css var
            gsap.to(":root", 1, { 
                "--weight": 0,
                "--width": 0,
                "--height": 0,
                ease: Expo.easeOut
            });
            
            var button = document.querySelector('.micro button');
            button.classList.remove('active');
            button.innerHTML = 'start the mic';

            audioContext = null;
            openAudioContext = 0;
        }).catch(function (err) {
            console.log(err.name + ": " + err.message);
        });
    }
}

function initAudioContext() {
    // grab an audio context
    window.AudioContext = window.AudioContext || window.webkitAudioContext;
    audioContext = new AudioContext();

    var constraints = { audio: true, video: false };

    navigator.mediaDevices.getUserMedia(constraints).then(function (stream) {
        mediaStream = stream;
        gotStream(stream);

        var button = document.querySelector('.micro button');
        button.classList.add('active');
        button.innerHTML = 'stop the mic';

        openAudioContext = 1;
    }).catch(function (err) {
        console.log(err.name + ": " + err.message);
    });
}

function resizeCanvas() {
    canvas.width = micro.clientWidth;
    canvas.height = micro.clientHeight;
}

function buttonListener() {
    if ( this.classList.contains("active") ) {
        closeAudioContext();
    } else {
        initAudioContext();
    }
}

document.querySelector('.micro button').addEventListener('click', buttonListener, false);

resizeCanvas();
window.addEventListener('resize', resizeCanvas, false);
              
            
!
999px

Console