Pen Settings

HTML

CSS

CSS Base

Vendor Prefixing

Add External Stylesheets/Pens

Any URLs added here will be added as <link>s in order, and before the CSS in the editor. You can use the CSS from another Pen by using its URL and the proper URL extension.

+ add another resource

JavaScript

Babel includes JSX processing.

Add External Scripts/Pens

Any URL's added here will be added as <script>s in order, and run before the JavaScript in the editor. You can use the URL of any other Pen and it will include the JavaScript from that Pen.

+ add another resource

Packages

Add Packages

Search for and use JavaScript packages from npm here. By selecting a package, an import statement will be added to the top of the JavaScript editor for this package.

Behavior

Auto Save

If active, Pens will autosave every 30 seconds after being saved once.

Auto-Updating Preview

If enabled, the preview panel updates automatically as you code. If disabled, use the "Run" button to update.

Format on Save

If enabled, your code will be formatted when you actively save your Pen. Note: your code becomes un-folded during formatting.

Editor Settings

Code Indentation

Want to change your Syntax Highlighting theme, Fonts and more?

Visit your global Editor Settings.

HTML

              
                <script id="vertexShader" type="x-shader/x-vertex">
  varying vec2 vUvs;

void main() {
    gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
    vUvs = uv;
}
</script>

<script id="fragmentShaderScreen" type="x-shader/x-fragment">

  precision mediump float;
//Our input texture
uniform sampler2D uTexture; 
  varying vec2 vUvs;

void main() {
    //special method to sample from texture
    vec4 initTexture = texture2D(uTexture, vUvs);

    vec3 colour = initTexture.rgb;

    gl_FragColor = vec4(colour, 1.0);

}
</script>

<script id="fragmentShaderBuffer" type="x-shader/x-fragment">

  precision mediump float;
  //Our input texture
  uniform sampler2D uTexture; 
  uniform vec2 uResolution;
  
  varying vec2 vUvs;
  
  float GetNeighbours(vec2 p) {
    float count = 0.0;

    for(float y = -1.0; y <= 1.0; y++) {
        for(float x = -1.0; x <= 1.0; x++) {

            if(x == 0.0 && y == 0.0)
                continue;

            // Scale the offset down
            vec2 offset = vec2(x, y) / uResolution.xy;	
            // Apply offset and sample texture	 
            vec4 lookup = texture2D(uTexture, p + offset); 
             // Accumulate the result
            count += lookup.r > 0.5 ? 1.0 : 0.0;
        }
    }

    return count;
}
  
  

void main() {
    /*
		Using a tempoary variable for the output value for clarity.
		it is just passed to fragColor at the end of the function.
	*/
    vec3 color = vec3(0.0);
  
  /*
		Time to count the population of the neighborhood!
		We count all the live cells in a 3 wide, 3 tall area
		centered on this cell.
		 _ _ _
		|_|_|_|     [-1, -1], [0, -1], [1, -1],
		|_|_|_|  =  [-1,  0], [0,  0], [1,  0],
		|_|_|_|     [-1,  1], [0,  1], [1,  1],

		Since each cell only should hold a value of either 0 (dead) or 1 (alive),
		the count yields an integer value, but since the
		texture sampling returns a float, we will use that instead.
	*/ 
  float neighbors = 0.0;
  
  
        neighbors += GetNeighbours(vUvs);

        bool alive = texture2D(uTexture, vUvs).x > 0.5;

        if(alive && (neighbors == 2.0 || neighbors == 3.0)) { //cell is alive
            

      		//Any live cell with two or three live       neighbours lives on to the next generation.
            color = vec3(1.0, 0.0, 0.0);

        //cell is dead
        } else if(!alive && (neighbors == 3.0)) { 
        //Any dead cell with exactly three live neighbours becomes a live cell, as if by reproduction.
            color = vec3(1.0, 0.0, 0.0);

        }

    gl_FragColor = vec4(color, 1.0);

}
</script>
              
            
!

CSS

              
                * { margin: 0; padding: 0; }
              
            
!

JS

              
                import * as THREE from "https://cdn.skypack.dev/three@0.146.0";

/**
 * Base
 */

// Scenes
const scene = new THREE.Scene();
const bufferScene = new THREE.Scene();

/**
 * Sizes
 */
const sizes = {
  width: window.innerWidth,
  height: window.innerHeight
};

/**
 * Textures
 */
const dataTexture = createDataTexture();

/**
 * Meshes
 */
// Geometry
const geometry = new THREE.PlaneGeometry(2, 2);

//Screen resolution
const resolution = new THREE.Vector3(
  sizes.width,
  sizes.height,
  window.devicePixelRatio
);

/**
 * Render Buffers
 */
// Create a new framebuffer we will use to render to
// the video card memory
let renderBufferA = new THREE.WebGLRenderTarget(
    sizes.width,
    sizes.height,
    {
        // In this demo UV coordinates are float values in the range of [0,1]. 
        // If you render these values into a 32bit RGBA buffer (a render target in format RGBA and type UnsignedByte), you will lose precision since you can only store 8 bit (256 possible integer values) per color channel. 
        // This loss is visible if you use the sampled uv coordinates for a texture fetch.
        // You can fix the issue if you add this parameter when creating the render target type: THREE.FloatType. 
        // The underlying texture is now a float texture that can hold your uv coordinates and retain precision.
        minFilter: THREE.NearestFilter,
        magFilter: THREE.NearestFilter,
        format: THREE.RGBAFormat,
        type: THREE.FloatType,
        stencilBuffer: false
    }
)

let renderBufferB = new THREE.WebGLRenderTarget(
    sizes.width,
    sizes.height,
    {
        minFilter: THREE.NearestFilter,
        magFilter: THREE.NearestFilter,
        format: THREE.RGBAFormat,
        type: THREE.FloatType,
        stencilBuffer: false
    }
)

// Buffer Material
const bufferMaterial = new THREE.ShaderMaterial({
    uniforms: {
        uTexture: { value: dataTexture },
        uResolution: {
            value: resolution
        },
    },
    vertexShader: document.getElementById("vertexShader").textContent,
  fragmentShader: document.getElementById("fragmentShaderBuffer").textContent
});

//Screen Material
const quadMaterial = new THREE.ShaderMaterial({
  uniforms: {
    //The screen will receive it's texture from our off screen framebuffer
    uTexture: { value: null },
    uResolution: {
      value: resolution
    }
  },
  vertexShader: document.getElementById("vertexShader").textContent,
  fragmentShader: document.getElementById("fragmentShaderScreen").textContent
});

// Meshes
const mesh = new THREE.Mesh(geometry, quadMaterial);
scene.add(mesh)

// Meshes
const bufferMesh = new THREE.Mesh(geometry, bufferMaterial);
bufferScene.add(bufferMesh)
/**
 * Renderer
 */
const renderer = new THREE.WebGLRenderer()
renderer.setSize(sizes.width, sizes.height)
renderer.setPixelRatio(Math.min(window.devicePixelRatio, 2))
document.body.appendChild(renderer.domElement);

const onWindowResize = () => {
    // Update sizes
    sizes.width = window.innerWidth
    sizes.height = window.innerHeight

    // Update camera
    // camera.aspect = sizes.width / sizes.height
    camera.updateProjectionMatrix()

    // Update renderer
    renderer.setSize(sizes.width, sizes.height)
    renderer.setPixelRatio(Math.min(window.devicePixelRatio, 2))

    //update uniforms
    quadMaterial.uniforms.uResolution.value.x = sizes.width
    quadMaterial.uniforms.uResolution.value.y = sizes.height
}

window.addEventListener('resize', onWindowResize)

/**
 * Camera
 */
// Base camera
const camera = new THREE.OrthographicCamera(- 1, 1, 1, - 1, 0, 1);


/**
 * Animate
 */

const tick = () => {
  
   // Explicitly set renderBufferA as the framebuffer to render to
   //the output of this rendering pass will be stored in the texture associated with renderBufferA
    renderer.setRenderTarget(renderBufferA)
    // This will contain the ping-pong accumulated texture
    renderer.render(bufferScene, camera)
  
    mesh.material.uniforms.uTexture.value = renderBufferA.texture;
    //This will set the default framebuffer (i.e. the screen) back to being the output
    renderer.setRenderTarget(null)
  //Render to screen
    renderer.render(scene, camera);
  
    // Ping-pong the framebuffers by swapping them
    // at the end of each frame render
    //Now prepare for the next cycle by swapping renderBufferA and renderBufferB
    //so that the previous frame's *output* becomes the next frame's *input*
    const temp = renderBufferA
    renderBufferA = renderBufferB
    renderBufferB = temp
    bufferMaterial.uniforms.uTexture.value = renderBufferB.texture;


    // Call tick again on the next frame
    window.requestAnimationFrame(tick)

}

tick()


/**
 * CREATE RANDOM NOISY TEXTURE
 */

function createDataTexture() {
  // create a buffer with color data

  var size = sizes.width * sizes.height;
  var data = new Uint8Array(4 * size);

  for (var i = 0; i < size; i++) {
    var stride = i * 4;

    if (Math.random() < 0.5) {
      data[stride] = 255;
      data[stride + 1] = 255;
      data[stride + 2] = 255;
      data[stride + 3] = 255;
    } else {
      data[stride] = 0;
      data[stride + 1] = 0;
      data[stride + 2] = 0;
      data[stride + 3] = 255;
    }
  }

  // used the buffer to create a DataTexture

  console.log(data);
  var texture = new THREE.DataTexture(
    data,
    sizes.width,
    sizes.height,
    THREE.RGBAFormat
  );

  // just a weird thing that Three.js wants you to do after you set the data for the texture
  texture.needsUpdate = true;

  return texture;
}

              
            
!
999px

Console