JavaScript preprocessors can help make authoring JavaScript easier and more convenient. For instance, CoffeeScript can help prevent easy-to-make mistakes and offer a cleaner syntax and Babel can bring ECMAScript 6 features to browsers that only support ECMAScript 5.
Any URL's added here will be added as <script>
s in order, and run before the JavaScript in the editor. You can use the URL of any other Pen and it will include the JavaScript from that Pen.
You can apply a script from anywhere on the web to your Pen. Just put a URL to it here and we'll add it, in the order you have them, before the JavaScript in the Pen itself.
If the script you link to has the file extension of a preprocessor, we'll attempt to process it before applying.
You can also link to another Pen here, and we'll pull the JavaScript from that Pen and include it. If it's using a matching preprocessor, we'll combine the code before preprocessing, so you can use the linked Pen as a true dependency.
HTML Settings
Here you can Sed posuere consectetur est at lobortis. Donec ullamcorper nulla non metus auctor fringilla. Maecenas sed diam eget risus varius blandit sit amet non magna. Donec id elit non mi porta gravida at eget metus. Praesent commodo cursus magna, vel scelerisque nisl consectetur et.
const DURATION = 6; // Loop 3 sec
// Size of render target
const WIDTH = 512;
const HEIGHT = 512;
const fragmentShader = `
precision mediump float;
uniform float iSampleRate;
uniform float iBlockOffset;
float tri(in float freq, in float time) {
return -abs(1. - mod(freq * time * 2., 2.));
}
vec2 mainSound( float time )
{
float freq = 440.;
freq *= pow(1.06 * 1.06, floor(mod(time, 6.)));
return vec2(
tri(freq, time) * sin(time * 3.141592),
tri(freq * 1.5, time) * sin(time * 3.141592)
);
}
void main() {
float t = iBlockOffset + ((gl_FragCoord.x-0.5) + (gl_FragCoord.y-0.5)*512.0) / iSampleRate;
vec2 y = mainSound(t);
vec2 v = floor((0.5+0.5*y)*65536.0);
vec2 vl = mod(v,256.0)/255.0;
vec2 vh = floor(v/256.0)/255.0;
gl_FragColor = vec4(vl.x,vh.x,vl.y,vh.y);
}`;
// Create audio context
const ctx = new window.AudioContext();
const node = ctx.createBufferSource();
node.connect(ctx.destination);
node.loop = true;
const audioBuffer = ctx.createBuffer(2, ctx.sampleRate * DURATION, ctx.sampleRate);
// Create canvas
const canvas = document.createElement('canvas');
canvas.width = WIDTH;
canvas.height = HEIGHT;
const renderer = new THREE.WebGLRenderer({ canvas, alpha: true });
const wctx = renderer.getContext();
// Create scenes
const uniforms = {
iBlockOffset: { type: 'f', value: 0.0 },
iSampleRate: { type: 'f', value: ctx.sampleRate },
};
const geometry = new THREE.PlaneGeometry(2, 2);
const material = new THREE.ShaderMaterial({ uniforms, fragmentShader });
const plane = new THREE.Mesh(geometry, material);
const scene = new THREE.Scene();
const camera = new THREE.OrthographicCamera(-1, 1, 1, -1, 0.1, 10);
camera.position.set(0, 0, 1);
camera.lookAt(scene.position);
scene.add(plane);
const target = new THREE.WebGLRenderTarget(WIDTH, HEIGHT);
// Render
const samples = WIDTH * HEIGHT;
const numBlocks = (ctx.sampleRate * DURATION) / samples;
for (let i = 0; i < numBlocks; i++) {
// Update uniform & Render
uniforms.iBlockOffset.value = i * samples / ctx.sampleRate;
renderer.render(scene, camera, target, true);
// Read pixels
const pixels = new Uint8Array(WIDTH * HEIGHT * 4);
wctx.readPixels(0, 0, WIDTH, HEIGHT, wctx.RGBA, wctx.UNSIGNED_BYTE, pixels);
// Convert pixels to samples
const outputDataL = audioBuffer.getChannelData(0);
const outputDataR = audioBuffer.getChannelData(1);
for (let j = 0; j < samples; j++) {
outputDataL[i * samples + j] = (pixels[j * 4 + 0] + 256 * pixels[j * 4 + 1]) / 65535 * 2 - 1;
outputDataR[i * samples + j] = (pixels[j * 4 + 2] + 256 * pixels[j * 4 + 3]) / 65535 * 2 - 1;
}
}
// Play
node.buffer = audioBuffer;
node.start(0);
Also see: Tab Triggers