Pen Settings

HTML

CSS

CSS Base

Vendor Prefixing

Add External Stylesheets/Pens

Any URL's added here will be added as <link>s in order, and before the CSS in the editor. If you link to another Pen, it will include the CSS from that Pen. If the preprocessor matches, it will attempt to combine them before processing.

+ add another resource

JavaScript

Babel includes JSX processing.

Add External Scripts/Pens

Any URL's added here will be added as <script>s in order, and run before the JavaScript in the editor. You can use the URL of any other Pen and it will include the JavaScript from that Pen.

+ add another resource

Packages

Add Packages

Search for and use JavaScript packages from npm here. By selecting a package, an import statement will be added to the top of the JavaScript editor for this package.

Behavior

Save Automatically?

If active, Pens will autosave every 30 seconds after being saved once.

Auto-Updating Preview

If enabled, the preview panel updates automatically as you code. If disabled, use the "Run" button to update.

Format on Save

If enabled, your code will be formatted when you actively save your Pen. Note: your code becomes un-folded during formatting.

Editor Settings

Code Indentation

Want to change your Syntax Highlighting theme, Fonts and more?

Visit your global Editor Settings.

HTML

              
                <div>
<p>Move your mouse</p>
<canvas id="webgl" width="500" height="1758"></canvas>
</div>

<script id="vertexShader" type="x-shader/x-vertex">
  attribute vec4 a_position;
  
  uniform mat4 u_modelViewMatrix;
  uniform mat4 u_projectionMatrix;
  
  void main() {
    gl_Position = a_position;
  }
</script>
<script id="fragmentShader" type="x-shader/x-fragment">
  #extension GL_OES_standard_derivatives : enable
  precision highp float;
  
  uniform vec2 u_resolution;
  uniform bool u_mousedown;
  uniform vec2 u_mouse;
  uniform vec2 u_oldmouse;
  uniform float u_time;
  uniform sampler2D u_noise;
  
  uniform int u_frame;
  
  uniform sampler2D u_b_buffer;
  uniform bool u_buffer_pass;
  
  const int samplesize = 5; 
  const float a = 1.45;
  const float b = 1.75;
  const float g = 1.65;
  const float reactionStrength = .125;
  
  const vec3 c1 = vec3( 0.494, 0, 1 );
  const vec3 c2 = vec3( 0.506, 1, 0 );
  
  #define PI 3.141592653589793
  
  vec2 getScreenSpace() {
    vec2 uv = (gl_FragCoord.xy - 0.5 * u_resolution.xy) / min(u_resolution.y, u_resolution.x);
    
    return uv;
  }
  
  float sdSegment( in vec2 p, in vec2 a, in vec2 b ) {
    vec2 pa = p-a, ba = b-a;
    float h = clamp( dot(pa,ba)/dot(ba,ba), 0.0, 1.0 );
    return length( pa - ba*h );
  }
  
  vec4 render_effect() {
    vec2 sample = (gl_FragCoord.xy+u_mouse*10.) / u_resolution.xy;
    vec2 px = vec2(1.0 / u_resolution.xy); 

    vec4 col = vec4(0);
   	float i = 0.0;

    for (int x = -samplesize; x <= samplesize; x++) {
      for (int y = -samplesize; y <= samplesize; y++) {
        col += texture2D(u_b_buffer, sample + px * vec2(x, y));
        i++; 
      }
    }
    col /= i;
    
    if(u_frame < 1) {
       return vec4(c2*9., 1);
    } else {
      float r = smoothstep(0.0, 1.0, col.x * col.y * col.z); 
      // float _a = a * (1. + .5*(gl_FragCoord.x/u_resolution.x));
      // float _b = b * (1. + .5*(gl_FragCoord.x/u_resolution.x));
      // float _g = g * (1. + .5*(gl_FragCoord.x/u_resolution.x));
      float _a = a;
      float _b = b;
      float _g = g;
      vec3 reaction = vec3(
        col.r + r + col.r * (_a * _g * col.g) - col.b,
        col.g - r + col.g * ((_b * col.b) - (_a * col.r)),
        col.b - r + col.b * ((_g * col.r) - (_b * col.g))
      );
  
      reaction = texture2D(u_b_buffer, sample).xyz * (1.-reactionStrength) + reaction * reactionStrength;
      
      if(u_mousedown) {
        reaction = mix(reaction, c2, .3);
      }
      
      vec2 uv = getScreenSpace();
      vec2 oldmouse = u_oldmouse.xy;
      vec2 mouse = u_mouse.xy;
      float field = sdSegment(uv, oldmouse, mouse);
      reaction.r -= smoothstep(.05,.03,field)*3.;
      reaction.g += smoothstep(.1,.06,field)*3.;
      reaction.b += smoothstep(.06,.0,field)*3.;

      return vec4(clamp(reaction, 0., 1.), 1.0); 
    }
  }
  
  vec4 blurBuffer(vec2 uv) {
    vec3 pixs = vec3(1./u_resolution.xy, 0.)*5.;
    
    vec4 sample = texture2D(u_b_buffer, uv + pixs.zy);
    sample += texture2D(u_b_buffer, uv - pixs.zy);
    sample += texture2D(u_b_buffer, uv + pixs.xz);
    sample += texture2D(u_b_buffer, uv - pixs.xz);
    
    sample *= .25;
    
    return sample;
  }
  
  void main() {
    vec4 tex = texture2D(u_b_buffer, gl_FragCoord.xy/u_resolution.xy);
    tex = blurBuffer(gl_FragCoord.xy/u_resolution.xy);
    if(u_buffer_pass) {
      gl_FragColor = render_effect();
    } else {
      vec4 s = texture2D(u_b_buffer, gl_FragCoord.xy/u_resolution.xy);
      vec3 c = mix(vec3(0), vec3(0), s.r);
      c = mix(c1, c, s.g);
      c = mix(c2, c, s.b);
      gl_FragColor = vec4(c, 1.);
    }
  }
  
</script>
              
            
!

CSS

              
                body {
  background: #111122;
  margin:0;
  display: grid;
  grid-template-rows: 100vh;
  grid-template-columns: 100vw;
}

div {
  align-self: center;
  justify-self: center;
  text-align: center;
}
p {
  color: #CCCCCC;
  font-family: Helvetica;
}

canvas {
  border-radius: 5px;
  box-shadow: 5px 5px 20px rgba(11,22,44,.5);
}
              
            
!

JS

              
                console.clear();

setTimeout(() => { init() }, 0);

const dimensions = [1024, 512];

class RenderBuffer {
  fb1 = null;
  fb2 = null;
  activeFB = null;
  wtcgl = null;
  buffername = null;
  textype = WTCGL.IMAGETYPE_REGULAR;
  texdepth = WTCGL.TEXTYPE_HALF_FLOAT_OES;
  width = 1024;
  height = 1024;
  textureRegister = 5;
  static textureRegisterBase = 5;

  constructor(options) {
    if(options.wtcgl) this.wtcgl = options.wtcgl;
    else throw new Error('You need to provide a WTCGL Instance');
    if(options.buffername) this.buffername = options.buffername;
    else throw new Error('You need to provide a Buffer name');
    if(options.textype) this.textype = options.textype;
    if(options.texdepth) this.texdepth = options.texdepth;
    if(options.width) this.width = options.width;
    if(options.height) this.height = options.height;
    this.textureRegister = RenderBuffer.textureRegisterBase++;
    
    this.fb1 = this.wtcgl.addFrameBuffer(this.width, this.height, this.textype, this.texdepth);
    this.fb2 = this.wtcgl.addFrameBuffer(this.width, this.height, this.textype, this.texdepth);
    this.activeFB = this.fb1;
  }
  resize(width, height) {
    this.width = width;
    this.height = height;
    this.fb1 = this.wtcgl.addFrameBuffer(this.width, this.height, this.textype, this.texdepth);
    this.fb2 = this.wtcgl.addFrameBuffer(this.width, this.height, this.textype, this.texdepth);
    this.activeFB = this.fb1;
  }
  render() {
    let _ctx = this.wtcgl._ctx;

    // find the active texture based on the index
    let uniform = _ctx.getUniformLocation(this.wtcgl._program, `u_b_${this.buffername}`);
    if(!window.uniform) window.uniform = _ctx[`TEXTURE${this.textureRegister}`];
    // console.log(uniform)
    // Set the texture unit to the uniform
    _ctx.uniform1i(uniform, this.textureRegister);
    _ctx.activeTexture(_ctx[`TEXTURE${this.textureRegister}`]);
    // Finally, bind the texture
    _ctx.bindTexture(_ctx.TEXTURE_2D, this.activeFB.frameTexture);
    this.activeFB = this.activeFB === this.fb1 ? this.fb2 : this.fb1;

    this.wtcgl.addUniform(`${this.buffername}_pass`, WTCGL.TYPE_BOOL, true);
    this.wtcgl.render(this.activeFB);
    this.wtcgl.addUniform(`${this.buffername}_pass`, WTCGL.TYPE_BOOL, false);
  }
}

const init = () => {
  const twodWebGL = new WTCGL(
    document.querySelector('canvas#webgl'), 
    document.querySelector('script#vertexShader').textContent, 
    document.querySelector('script#fragmentShader').textContent,
    dimensions[0],
    dimensions[1],
    2
  );
  twodWebGL.startTime = -100 + Math.random() * 50;
  
  let buffer = new RenderBuffer({
    width: dimensions[0],
    height: dimensions[1],
    buffername: 'buffer',
    textype: WTCGL.IMAGETYPE_TILE,
    texdepth: WTCGL.TEXTYPE_HALF_FLOAT_OES,
    wtcgl: twodWebGL
  });

  let timeout;
  
  let frame = 0;
  const render = function() {
    let _ctx = twodWebGL._ctx;
    
    buffer.render();
    
    twodWebGL.addUniform('frame', WTCGL.TYPE_INT, frame);
    frame+=1;
  };
  twodWebGL.onRun = (delta) => {
    render();
    // render();
    // render();
  }



  
  // track mouse move
  let mousepos = [0,0];
  const u_mousepos = twodWebGL.addUniform('mouse', WTCGL.TYPE_V2, mousepos);
  const u_oldmouse = twodWebGL.addUniform('oldmouse', WTCGL.TYPE_V2, mousepos);
  twodWebGL.addUniform('mousedown', WTCGL.TYPE_BOOL, false);
  window.addEventListener('mousedown', (e) => {
    twodWebGL.addUniform('mousedown', WTCGL.TYPE_BOOL, true);
  });
  window.addEventListener('mouseup', (e) => {
    twodWebGL.addUniform('mousedown', WTCGL.TYPE_BOOL, false);
  });
  window.addEventListener('pointermove', (e) => {
    let ratio = dimensions[1] / dimensions[0];
    const os = document.querySelector('canvas#webgl').getBoundingClientRect();
    const x = e.pageX - os.x;
    const y = e.pageY - os.y;
    twodWebGL.addUniform('oldmouse', WTCGL.TYPE_V2, [mousepos[0], mousepos[1]]);
    if(dimensions[1] > dimensions[0]) {
      mousepos[0] = (x - dimensions[0] / 2) / dimensions[0];
      mousepos[1] = (y - dimensions[1] / 2) / -dimensions[1] * ratio;
    } else {
      mousepos[0] = (x - dimensions[0] / 2) / dimensions[0] / ratio;
      mousepos[1] = (y - dimensions[1] / 2) / -dimensions[1];
    }
    twodWebGL.addUniform('mouse', WTCGL.TYPE_V2, mousepos);
  });
    
    
  
  
  
  
  
  
  
  
  
  // Load all our textures. We only initiate the instance once all images are loaded.
  const textures = [
    {
      name: 'noise',
      url: 'https://s3-us-west-2.amazonaws.com/s.cdpn.io/982762/noise.png',
      type: WTCGL.IMAGETYPE_TILE,
      img: null
    }
  ];
  const loadImage = function (imageObject) {
    let img = document.createElement('img');
    img.crossOrigin="anonymous";

    return new Promise((resolve, reject) => {
      img.addEventListener('load', (e) => {
        imageObject.img = img;
        resolve(imageObject);
      });
      img.addEventListener('error', (e) => {
        reject(e);
      });
      img.src = imageObject.url
    });
  }
  const loadTextures = function(textures) {
    return new Promise((resolve, reject) => {
      const loadTexture = (pointer) => {
        if(pointer >= textures.length || pointer > 10) {
          resolve(textures);
          return;
        };
        const imageObject = textures[pointer];

        const p = loadImage(imageObject);
        p.then(
          (result) => {
            twodWebGL.addTexture(result.name, result.type, result.img);
          },
          (error) => {
            console.log('error', error)
          }).finally((e) => {
            loadTexture(pointer+1);
        });
      }
      loadTexture(0);
    });

  }

  loadTextures(textures).then(
    (result) => {
      twodWebGL.initTextures();
      // twodWebGL.render();
      twodWebGL.running = true;
    },
    (error) => {
      console.log('error');
    }
  );
}
              
            
!
999px

Console