Pen Settings

HTML

CSS

CSS Base

Vendor Prefixing

Add External Stylesheets/Pens

Any URL's added here will be added as <link>s in order, and before the CSS in the editor. You can use the CSS from another Pen by using it's URL and the proper URL extention.

+ add another resource

JavaScript

Babel includes JSX processing.

Add External Scripts/Pens

Any URL's added here will be added as <script>s in order, and run before the JavaScript in the editor. You can use the URL of any other Pen and it will include the JavaScript from that Pen.

+ add another resource

Packages

Add Packages

Search for and use JavaScript packages from npm here. By selecting a package, an import statement will be added to the top of the JavaScript editor for this package.

Behavior

Save Automatically?

If active, Pens will autosave every 30 seconds after being saved once.

Auto-Updating Preview

If enabled, the preview panel updates automatically as you code. If disabled, use the "Run" button to update.

Format on Save

If enabled, your code will be formatted when you actively save your Pen. Note: your code becomes un-folded during formatting.

Editor Settings

Code Indentation

Want to change your Syntax Highlighting theme, Fonts and more?

Visit your global Editor Settings.

HTML

              
                <script id="vertexShader" type="x-shader/x-vertex">
uniform vec3 viewVector;
uniform float c;
uniform float p;
varying float intensity;
void main() 
{
    vec3 vNormal = normalize( normalMatrix * normal ); //normalMatrix and normal are both GLSL built-in variables; the normalization is for the OBJECT
	vec3 vNormel = normalize( normalMatrix * viewVector ); //viewVector is camera position; it is normalized to OBJECT coordinates
	intensity = pow( c - dot(vNormal, vNormel), p ); //power of a constant (c) minus the current normal in relation to view times p
//the more orthogonal the view, the lower base value and therefore the intensity
//cases should be evaluated in terms of the height of the difference and the value of the power ([0,inf))
    gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
</script>

<!-- fragment shader a.k.a. pixel shader -->
<script id="fragmentShader" type="x-shader/x-vertex"> 
uniform vec3 glowColor;
varying float intensity;
void main() 
{
	vec3 glow = glowColor * intensity;
    gl_FragColor = vec4( glow, 1.0 );
}
</script>
<div id="logwrapper_l" class="logwrapper"></div>
<div id="logwrapper_r" class="logwrapper">
<!--<div id="warning" class="log">Currently sound not working on mobile devices :(</div>-->
<div id="overlay" class="log">TOUCH SCREEN FOR SOUND</div>
</div>
<div id='webglcontainer' style="position: absolute; left:0px; top:0px"></div>


              
            
!

CSS

              
                html{
 background-color: 0x000;
}

/* E: 
References for simple overlay:
--- https://discourse.threejs.org/t/embed-a-div-into-a-scene/2338
--- https://jsfiddle.net/f2Lommf5/4528/
*/
.logwrapper{
	position: absolute;
	top: 0;
 z-index: 1;
}

#logwrapper_l{
	left: 0;
}

#logwrapper_r{
	right: 0;
}


.log {
	width: auto;
	height: auto;
 padding: 5px;
}
#warning{
	background-color: yellow; 
}
#overlay{
	background-color: #ff0000; 
 color: white;
}
              
            
!

JS

              
                /*globals*/
var container, scene, camera, renderer, sphereMaterial;
var tw, parameters;
//var audiocontext, songBuffer, sourceNode;
var analyser1;

//stats
//console.log(111);
var stats = new Stats();
stats.showPanel( 0,1,2 ); // 0: fps, 1: ms, 2: mb, 3+: custom
document.getElementById("logwrapper_l").appendChild(stats.domElement);

function init(){
	// SCENE
	scene = new THREE.Scene();
 //scene.background = new THREE.Color( 0xfff );
 /*E: usually forget how to change background color :(
 A reference for several solutions: https://stackoverflow.com/questions/16177056/changing-three-js-background-to-transparent-or-other-color
 */
 scene.background = new THREE.Color( 'skyblue' );

 // CAMERA
	var SCREEN_WIDTH = window.innerWidth,  SCREEN_HEIGHT = window.innerHeight;
	var VIEW_ANGLE = 45, ASPECT = SCREEN_WIDTH / SCREEN_HEIGHT, NEAR = 0.1, FAR = 20000;
	camera = new THREE.PerspectiveCamera( VIEW_ANGLE, ASPECT, NEAR, FAR);
	scene.add(camera);
	camera.position.set(0,100,400);
	camera.lookAt(scene.position);	
	
 //SOUND
 /*E: based on a previous modification of a fork of the base three.js example for positional audio
 Other interesting references here in codepen (keywords: "positionalaudio", "web audio api"):
 --- https://codepen.io/hexdodecagram/pen/wvBGKZj
 --- https://codepen.io/bbx/pen/EpLobJ?editors=0010
 Check also through google: "web audio three.js", eg.:
 --- https://tympanus.net/codrops/2019/09/06/how-to-create-a-webcam-audio-visualizer-with-three-js/
 
 */
 var listener = new THREE.AudioListener();
 camera.add(listener);
 var audioLoader = new THREE.AudioLoader();
 
 // RENDERER
	//if ( Detector.webgl )
		renderer = new THREE.WebGLRenderer( {antialias:true} );
	//else
		//renderer = new THREE.CanvasRenderer(); 
	renderer.setSize(SCREEN_WIDTH, SCREEN_HEIGHT);


 container = document.getElementById( 'webglcontainer' );
	container.appendChild( renderer.domElement );
	// LIGHT
var light = new THREE.PointLight(0xffffff);
light.position.set(0,250,0);
	scene.add(light);
 /*
 var geometry = new THREE.BoxGeometry(5,5,5);
var cubeMaterial = new THREE.MeshNormalMaterial(); 

var mesh = new THREE.Mesh( geometry, cubeMaterial );
scene.add( mesh );
  */
 

 // E: OBJECT
 var sphereGeom = new THREE.SphereGeometry(100, 32, 16);
	sphereMaterial = new THREE.MeshBasicMaterial( { 
  color: 'blue' 
 } );
 var sphere = new THREE.Mesh(sphereGeom, sphereMaterial);
	sphere.position.set(0,0,-150);
    scene.add(sphere);
 
 var sound1 = new THREE.PositionalAudio( listener );
 audioLoader.load( 'https://raw.githubusercontent.com/d-subat/spaceshipmaze/master/public/assets/machine.mp3', function( buffer ) {
					if(buffer){
     //document.getElementById('overlay').innerHTML = buffer.length + ' bits loaded';
      
     sound1.setBuffer( buffer );
     sound1.setLoop(true);
     sound1.setVolume(100);
					sound1.setRefDistance( 10 ); //the smaller, the less
					sound1.play();
     } else {
      document.getElementById('overlay').innerHTML = "NO MUSIC LOADED"
     }
				},
    function(xhr){console.log( (xhr.loaded / xhr.total * 100) + '% loaded' );}, //success callback
    function(err){console.log( 'An error happened' ); document.getElementById('overlay').innerHTML = 'An error happened loading music'; }  
                 );
 
 //console.log(sound1);
 /* E: WARNING:
 For Chrome/Google, the rules for the use of the Web Audio API or AudioContext (the base of all sound in ThreeJS) changed slightly:
 the user is required to INTERACT with the page somehow. If not interaction, the state is set to suspended
 References:
 --- https://developers.google.com/web/updates/2017/09/autoplay-policy-changes#webaudio (also a link in the warning error message)
 
 Also, jquery a bit easier for handling different equivalent events
 --- https://stackoverflow.com/questions/11397028/document-click-function-for-touch-device
 */
 //document.querySelector('html').addEventListener('click', function() {
 $(document).on('click touch', function() {
 sound1.context.resume().then(() => {
    console.log('Playback resumed successfully');
    //console.log(sound1.context);
   document.getElementById('overlay').remove();
   //document.getElementById('overlay').innerHTML =  'Playback resumed successfully';
  }).catch((err)=>{document.getElementById('overlay').innerHTML = err; });
});
 
 sphere.add(sound1);
 // analysers
	analyser1 = new THREE.AudioAnalyser( sound1, 32 ); //actually not required for this exercise...
 //console.log(sphereMaterial)

  // create custom material from the shader code above
	//   that is within specially labeled script tags
	var customMaterial = new THREE.ShaderMaterial( 
	{
	    uniforms: 
		{ 
			"c":   { type: "f", value: 1.0 },
			"p":   { type: "f", value: 2.5 },
			glowColor: { 
    type: "c",
    //value: new THREE.Color(0xffff00)
    value: new THREE.Color('')
   },
			viewVector: { type: "v3", value: camera.position }
		},
		vertexShader:   document.getElementById( 'vertexShader'   ).textContent,
		fragmentShader: document.getElementById( 'fragmentShader' ).textContent,
		side: THREE.FrontSide,
		blending: THREE.AdditiveBlending,
		transparent: true
	}   );
		
	//this.moonGlow = new THREE.Mesh( sphereGeom.clone(), customMaterial.clone() );
var moonGlow = new THREE.Mesh( sphereGeom.clone(), customMaterial.clone() );

 moonGlow.position.set(0,0,-150); //cloning not working??
	//moonGlow.scale.multiplyScalar(1.3);
	scene.add( moonGlow );


 //console.log(111);
 
resize();
window.addEventListener('resize',resize);

function resize(){
  let w = window.innerWidth;
  let h = window.innerHeight;
  
  renderer.setSize(w,h);
  camera.aspect = w / h;
  camera.updateProjectionMatrix();
}
 /*E: References for tween.js:
 --- https://github.com/tweenjs/tween.js/blob/master/docs/user_guide.md
 --- https://codepen.io/ecccs/pen/BedmYO
 */
parameters = {intensity_c: .5, intensity_p: 1., scale: 1., };
new TWEEN.Tween(parameters)
  .to({intensity_c: 1., intensity_p: 2.1, scale: 2.05}, 600)
  .easing(TWEEN.Easing.Quadratic.InOut)
  .repeat(Infinity)
  .yoyo(true)
  .onUpdate(function(o){
  //console.log('o',o);
 //console.log(moonGlow.scale)
   //moonGlow.material.uniforms["c"].value = o;
   moonGlow.material.uniforms["c"].value = parameters.intensity_c;
   //moonGlow.material.uniforms["p"].value = parameters.intensity_p;
 //console.log(parameters.scale);
   //moonGlow.scale({x:parameters.scale, y:parameters.scale, z:parameters.scale});
 //console.log(moonGlow.scale);
 //moonGlow.geometry.scale = new THREE.Vector3(0,0,0);
 //moonGlow.scale.multiplyScalar(1.3)
 moonGlow.scale.x = parameters.scale;
 moonGlow.scale.y = parameters.scale;
 moonGlow.scale.z = parameters.scale;
})
  .start();

  /*  .repeat(1).yoyo(true).onComplete(function(object){
   object.tweening();
  }).start();
 */
 renderer.render( scene, camera );
}

function animate(time){
 stats.begin();
 requestAnimationFrame( animate );
 TWEEN.update(time);
 //sphereMaterial.emissive.b = analyser1.getAverageFrequency() / 256;
 renderer.render( scene, camera );
 stats.end();
}

init();
animate();

/*
//SOUND
function createAudioEnv() {
  context = new AudioContext();

  sourceNode = context.createBufferSource();

  sourceNode.connect(context.destination);

};

function loadSong(name) {
  var request = new XMLHttpRequest();
  request.open('GET', name, true);
  request.responseType = 'arraybuffer';
  
  request.onload = function() {
    // decode the data
    context.decodeAudioData(request.response, function(buffer) {
      songBuffer = buffer;
      playSong(buffer);
    }, onError);
  };
  request.send();
}

function playSong(buffer) {
  var dur = buffer.duration;
  sourceNode.buffer = buffer;
  sourceNode.start(0);
}
*/
              
            
!
999px

Console