<section id="links">
<a target="_blank" href="https://codepen.io/Rumyra/pen/bBqbBX?editors=0110">View over Https</a>
<a target="_blank" href="https://codepen.io/Rumyra/pen/pEMGEP">Read Blog Post</a>
</section>
<div id="screen">
<section>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
</section>
</div>
:root {
--freq:1;
}
html {background:#efefef; font-family:"Lato", "Lucida Grande","Lucida Sans Unicode", Tahoma, Sans-Serif; letter-spacing:-0.2px;}
body {margin:0px; padding:0px; overflow:hidden;}
#links {
display:flex; padding:5px;
justify-content:space-around;
a {
display:block; padding:5px 15px;
border: 3px solid #343436; border-radius: 3px;
background-color:#343436;
text-align:center; text-decoration:none; color:#fff;
&:hover {background-color:#4d4d50;}
}
}
#screen {
position: relative; padding:1vh;
width: 100vw; height: 100vh;
box-sizing: border-box;
background: black;
}
#screen section div {
height:20px;
@for $i from 1 through 8 {
&:nth-of-type(#{$i}) {
width:calc(300px*var(--freq#{$i}) );
background-color:hsla((30*$i)+90,60%,60%,0.7);
border:1px solid hsla((30*$i)+90,60%,60%,0.9);
animation: moveDivs #{$i*400}ms linear #{$i*200}ms alternate infinite;
}
}
}
@keyframes moveDivs {
0% {transform: translateY(0vh);}
100% {transform: translateY(50vh);}
}
View Compiled
// set up audio context
var audioContext = (window.AudioContext || window.webkitAudioContext);
// create audio class
if (audioContext) {
// Web Audio API is available.
var audioAPI = new audioContext();
} else {
// Web Audio API is not available. Ask the user to use a supported browser.
alert("Oh nos! It appears your browser does not support the Web Audio API, please upgrade or use a different browser");
}
// variables
var analyserNode,
frequencyData = new Uint8Array(64);
const screen = document.querySelector('#screen'),
allRepeatedEls = document.querySelectorAll('#screen section'),
totalEls = allRepeatedEls.length;
// create an audio API analyser node and connect to source
function createAnalyserNode(audioSource) {
analyserNode = audioAPI.createAnalyser();
analyserNode.fftSize = 128;
audioSource.connect(analyserNode);
}
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// set which element to be symmetry'd
const fragmentedSection = document.querySelector('#screen section');
// TODO: add amount of quarters - rotate them around
function createQuarter(fragment, screen) {
// create a new 'quarter', set styles
let quarterEl = document.createElement('div');
quarterEl.classList = 'quarter';
quarterEl.style.width = '50vw';
quarterEl.style.height = '50vh';
quarterEl.style.overflow = 'hidden';
// duplicate original fragment to be mirrored in quarter
let newHtml = fragment.outerHTML+fragment.outerHTML;
quarterEl.innerHTML = newHtml;
screen.removeChild(fragment);
screen.appendChild(quarterEl);
// rotate and set styles on new fragments within quarter
let newFragments = document.querySelectorAll('.quarter '+fragment.localName);
for (let i = newFragments.length - 1; i >= 0; i--) {
newFragments[i].style.width = '50vw';
newFragments[i].style.height = '50vh';
newFragments[i].style.transform = 'rotateZ(-45deg)';
}
newFragments[1].style.transform = 'translateY(-50vh) rotateY(180deg) rotateZ(45deg)';
// rotate quarter
for (var i=3-1; i>=0; i--) {
let nextQuarter = quarterEl.cloneNode(true);
screen.appendChild(nextQuarter);
}
const allQuarters = document.querySelectorAll('.quarter');
for (var i=allQuarters.length-1; i>0; i--) {
allQuarters[i].style.transform = 'translateY('+i*-50+'vh) rotateZ('+i*90+'deg)';
allQuarters[i].style.transformOrigin = 'right bottom';
}
}
createQuarter(fragmentedSection, screen);
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// get's html elements, loops over them & attaches a frequency from analysed data - what you do is up to you!
function animateStuff() {
requestAnimationFrame(animateStuff);
analyserNode.getByteFrequencyData(frequencyData);
for (let i=1; i<10; i++) {
document.documentElement.style.setProperty('--freq'+i, frequencyData[i*2]/125);
}
}
// getUserMedia success callback -> pipe audio stream into audio API
var gotStream = function(stream) {
// Create an audio input from the stream.
var audioSource = audioAPI.createMediaStreamSource(stream);
createAnalyserNode(audioSource);
animateStuff();
}
// pipe in analysing to getUserMedia
navigator.mediaDevices.getUserMedia({ audio: true, video: false })
.then(gotStream);
This Pen doesn't use any external CSS resources.
This Pen doesn't use any external JavaScript resources.