Pen Settings

HTML

CSS

CSS Base

Vendor Prefixing

Add External Stylesheets/Pens

Any URLs added here will be added as <link>s in order, and before the CSS in the editor. You can use the CSS from another Pen by using its URL and the proper URL extension.

+ add another resource

JavaScript

Babel includes JSX processing.

Add External Scripts/Pens

Any URL's added here will be added as <script>s in order, and run before the JavaScript in the editor. You can use the URL of any other Pen and it will include the JavaScript from that Pen.

+ add another resource

Packages

Add Packages

Search for and use JavaScript packages from npm here. By selecting a package, an import statement will be added to the top of the JavaScript editor for this package.

Behavior

Auto Save

If active, Pens will autosave every 30 seconds after being saved once.

Auto-Updating Preview

If enabled, the preview panel updates automatically as you code. If disabled, use the "Run" button to update.

Format on Save

If enabled, your code will be formatted when you actively save your Pen. Note: your code becomes un-folded during formatting.

Editor Settings

Code Indentation

Want to change your Syntax Highlighting theme, Fonts and more?

Visit your global Editor Settings.

HTML

              
                <!-- Shader Session with Sol Sarratea | 2021
 *
 * Allow the use of the camera in your device to play with this experiment. You might need to reload this Pen after enabling the camera.
 *
 * To learn more about feedback loops and shaders, we suggest reading the blog post written by Sol where she explains her process to create this:
 * https://solquemal.com/seeds/feedback-systems
 *
 * Find a repo with Sol's code:
 * https://github.com/solsarratea/simple-feedback-system/
 *
 * Sol: 
 * https://twitter.com/solquemal
 * https://solquemal.com
 *
 * We're curiouslyminded (eliza & ilithya):
 * https://www.curiouslyminded.xyz
 * https://www.twitch.tv/curiouslyminded
 * https://www.youtube.com/c/curiouslyminded -->

<div id="shadercollab"></div>

<script id="vertex" type="x-shader/x-vertex">
varying vec2 texCoordVarying;
	
void main() {
	texCoordVarying = uv;
	
	gl_Position = projectionMatrix*modelViewMatrix*vec4(position,1.0);
}
</script>

<script id="copy" type="x-shader/x-fragment">
varying vec2 texCoordVarying;
uniform sampler2D channel0;
	
vec2 rotate(vec2 v, float a) {
	return mat2(cos(a), -sin(a), sin(a), cos(a))*v;
}

void main() {
	vec4 inText = texture2D(channel0,texCoordVarying);
	// vec4 inText2 = texture2D(channel0,texCoordVarying * 0.9);
	
	gl_FragColor = inText;
}
</script>

<script id="diffusion" type="x-shader/x-fragment">
precision highp float;
uniform sampler2D webcam;
uniform sampler2D backbuffer;
uniform float time;
uniform float resoluton;

varying vec2 texCoordVarying;
uniform vec2 resolution;

const vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
	
vec3 hsv2rgb(vec3 c) {
	vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
	
	return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
}

void main() {
	vec2 pixel = texCoordVarying; 
	vec3 finalColor;
	vec3 webcamCapture = texture2D(webcam,pixel).rgb;
	vec3 lastFrame = texture2D(backbuffer,pixel).rgb;

	// Objective: update pixel color, from information of previous frame
	float step = 1.;
	float dx = pixel.x/gl_FragCoord.x * step;
	float dy = pixel.y/gl_FragCoord.y * step; // size of pixel

	// 1. Get neighbourhood from prev buffer
	vec3 N = texture2D(backbuffer, vec2(pixel.x,pixel.y+dy)).rgb;
	vec3 S = texture2D(backbuffer, vec2(pixel.x,pixel.y-dy)).rgb;
	vec3 E = texture2D(backbuffer, vec2(pixel.x+dx,pixel.y)).rgb;
	vec3 W = texture2D(backbuffer, vec2(pixel.x-dx,pixel.y)).rgb;

	vec3 NE = texture2D(backbuffer, vec2(pixel.x+dx,pixel.y + dy)).rgb;
	vec3 NW = texture2D(backbuffer, vec2(pixel.x-dx,pixel.y + dy)).rgb;
	vec3 SE = texture2D(backbuffer, vec2(pixel.x+dx,pixel.y+dy)).rgb;
	vec3 SW = texture2D(backbuffer, vec2(pixel.x+dx,pixel.y-dy)).rgb;

	vec3 new_color = lastFrame * -1.; // current color  
	
	// 2. DO something with neighbours
	// Rates of diffuson
	float diff1 = .2;
	float diff2 = .2;
	float diff3 = .3;
	float diff4 = .2;

	new_color += N * vec3(diff1, diff2, diff3);
	new_color += S * vec3(diff1, diff2, diff3);
	new_color += E * vec3(diff1, diff2, diff3);
	new_color += W * vec3(diff1, diff2, diff3);

	new_color += NE * diff4;
	new_color += SE * diff4;
	new_color += NW * diff4;
	new_color += SW * diff4;

	new_color /= 9.;

	float r = new_color.r * pixel.y;
	float b = new_color.b * pixel.x;

	new_color += 0.92 + webcamCapture * 20. * (r*r-b) - 4.0 * r;
	new_color = smoothstep(0.0, abs(sin(time*0.2)*0.3), new_color);

	new_color = hsv2rgb(
		new_color*vec3(
			sin(10.0 * lastFrame.r/new_color.g),
			0.32,
			1.0
		)
	);

	finalColor = mix(new_color, webcamCapture, .15);

	gl_FragColor = vec4(finalColor, 1.);
}
</script>
              
            
!

CSS

              
                $c1: black;

* {
	user-select: none;
}

body {
	height: 100vh;
	background-color: $c1;
	margin: 0;
	padding: 0;
	overflow: hidden;
	position: relative;
}
              
            
!

JS

              
                /*
 * SHADER SESSION WITH SOL SARRATEA
 * MAY 2021
 *
 * Allow the use of the camera in your device to play with this experiment. You might need to reload this Pen after enabling the camera.
 *
 * To learn more about feedback loops and shaders, we suggest reading the blog post written by Sol where she explains her process to create this:
 * https://solquemal.com/seeds/feedback-systems
 *
 * Find a repo with Sol's code:
 * https://github.com/solsarratea/simple-feedback-system/
 *
 * Sol:
 * https://twitter.com/solquemal
 * https://solquemal.com
 *
 * We're curiouslyminded (eliza & ilithya):
 * https://www.curiouslyminded.xyz
 * https://www.twitch.tv/curiouslyminded
 * https://www.youtube.com/c/curiouslyminded
 *
 */

let webcamTexture, video;
function initWebcamCapture() {
	video = document.createElement("video");
	video.autoplay = "";
	video.style = "display:none";
	video.id = "feedCam";

	if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia && video) {
		const constraints = {
			video: {
				width: 1280,
				height: 720,
				facingMode: "user"
			}
		};

		navigator.mediaDevices
			.getUserMedia(constraints)
			.then((stream) => {
				video.srcObject = stream;
				video.play();
			})
			.catch((error) => {
				console.error("Unable to access the camera/webcam.", error);
			});
	} else {
		console.error("MediaDevices interface not available.");
	}

	window.video = document.getElementById("video");

	webcamTexture = new THREE.VideoTexture(video);
	webcamTexture.minFilter = THREE.LinearFilter;
	webcamTexture.magFilter = THREE.LinearFilter;
	webcamTexture.needsUpdate = true;
}

let camera, scene, renderer, clock;
function setupMainScene() {
	const container = document.getElementById("shadercollab");

	scene = new THREE.Scene();
	camera = new THREE.Camera();
	renderer = new THREE.WebGLRenderer({ preserveDrawingBuffer: true });

	const DPR = window.devicePixelRatio ? window.devicePixelRatio : 1;
	renderer.setPixelRatio(DPR);

	document.body.appendChild(renderer.domElement);
	container.appendChild(renderer.domElement);

	onWindowResize();
	window.addEventListener("resize", onWindowResize);

	clock = new THREE.Clock();
}

let copyScene, diffusionScene, ping, pong, alt;
function setupBufferScenes() {
	copyScene = new THREE.Scene();
	diffusionScene = new THREE.Scene();

	const renderTargetParams = {
		minFilter: THREE.LinearFilter,
		magFilter: THREE.LinearMipMapLinearFilter,
		format: THREE.RGBAFormat,
		type: THREE.FloatType
	};

	ping = new THREE.WebGLRenderTarget(
		window.innerWidth,
		window.innerHeight,
		renderTargetParams
	);
	pong = new THREE.WebGLRenderTarget(
		window.innerWidth,
		window.innerHeight,
		renderTargetParams
	);
	alt = new THREE.WebGLRenderTarget(
		window.innerWidth,
		window.innerHeight,
		renderTargetParams
	);
}

let copyMaterial, diffusionMaterial;
function initBufferScenes() {
	copyMaterial = new THREE.ShaderMaterial({
		uniforms: {
			channel0: {
				type: "t",
				value: pong.texture
			}
		},
		vertexShader: document.getElementById("vertex").innerHTML,
		fragmentShader: document.getElementById("copy").innerHTML
	});

	const copyObject = new THREE.Mesh(new THREE.PlaneGeometry(2, 2), copyMaterial);
	copyScene.add(copyObject);

	diffusionMaterial = new THREE.ShaderMaterial({
		uniforms: {
			webcam: { type: "t", value: webcamTexture },
			backbuffer: { type: "t", value: pong },
			time: { type: "f", value: 0 },
			resolution: {
				type: "v2",
				value: new THREE.Vector2(window.innerWidth, window.innerHeight)
			}
		},
		vertexShader: document.getElementById("vertex").innerHTML,
		fragmentShader: document.getElementById("diffusion").innerHTML
	});

	const diffusionObject = new THREE.Mesh(
		new THREE.PlaneGeometry(2, 2),
		diffusionMaterial
	);
	diffusionScene.add(diffusionObject);
}

let quad;
function initMainScene() {
	const geom = new THREE.PlaneGeometry(2, 2);
	quad = new THREE.Mesh(geom, new THREE.MeshBasicMaterial({ map: ping }));
	scene.add(quad);
}

function onWindowResize() {
	renderer.setSize(window.innerWidth, window.innerHeight);
}

/* OPTION #1 TO RENDER */
function render_1() {
	diffusionMaterial.uniforms.time.value = clock.getElapsedTime();

	if (renderer.info.render.frame % 1 == 0) {
		for (let i = 0; i < 1; i++) {
			// Apply Diffusion shader and save output in ping
			renderer.setRenderTarget(ping);
			renderer.render(diffusionScene, camera);
			renderer.setRenderTarget(null);
			renderer.clear();

			// Swap ping and pong
			const temp = pong;
			pong = ping;
			ping = temp;

			// Update channels
			diffusionMaterial.uniforms.backbuffer.value = pong;
		}

		quad.material.map = ping;
	}

	// Render Main Scene
	renderer.render(scene, camera);
}

/* OPTION #2 TO RENDER */
function render_2() {
	diffusionMaterial.uniforms.time.value = clock.getElapsedTime();

	if (renderer.info.render.frame % 1 == 0) {
		for (let i = 0; i < 2; i++) {
			// Apply Copy shader and save output in alt
			renderer.setRenderTarget(alt);
			renderer.render(copyScene, camera);
			renderer.setRenderTarget(null);
			renderer.clear();

			// Apply Diffusion shader and save output in ping
			renderer.setRenderTarget(ping);
			renderer.render(diffusionScene, camera);
			renderer.setRenderTarget(null);
			renderer.clear();

			// Swap ping and pong
			const temp = pong;
			pong = ping;
			ping = temp;

			// Update channels
			diffusionMaterial.uniforms.backbuffer.value = alt;
			copyMaterial.uniforms.channel0.value = pong;
		}

		quad.material.map = ping;
	}

	// Render Main Scene
	renderer.render(scene, camera);
}

/* OPTION #3 TO RENDER */
function render_3() {
	diffusionMaterial.uniforms.time.value = clock.getElapsedTime();

	if (renderer.info.render.frame % 2 == 0) {
		for (let i = 0; i < 2; i++) {
			renderer.setRenderTarget(ping);
			renderer.render(copyScene, camera);
			renderer.setRenderTarget(null);
			renderer.clear();

			renderer.setRenderTarget(pong);
			renderer.render(diffusionScene, camera);
			renderer.setRenderTarget(null);
			renderer.clear();

			// Update channels
			diffusionMaterial.uniforms.backbuffer.value = ping;
			copyMaterial.uniforms.channel0.value = pong;
		}
	}

	// Render Copy Scene
	renderer.render(copyScene, camera);
}

function animate() {
	// render_1();
	// render_2();
	render_3();
	requestAnimationFrame(animate);
}

initWebcamCapture();
setupMainScene();
setupBufferScenes();
initBufferScenes();
initMainScene();
animate();
              
            
!
999px

Console