WebGL from Scratch: Updating Textures

Time for a change of pace.

Last time I finished up promising to look at a non-invasive way to render wireframes in WebGL, but that’s really a bit boring. Instead, I’m going to focus on updating a texture after it’s been created. See, a texture isn’t fixed once uploaded to the GPU (via texImage2D); provided that you don’t change the dimensions, it can be updated with the appropriately-named texSubImage2D.

The Difference Between texImage2D and texSubImage2D

You use texImage2D to initialise (or reinitialise) storage for a texture. Bearing in mind that createTexture just reserves an identifier, texImage2D can be thought of as allocation plus a copy of texture data. Compared to that, texSubImage2D is just the copy, making it significantly faster. As the name implies, you can choose to overwrite part of the associated texture, but you can also replace the whole image.

Sourcing a Moving Image

For this demo to be convincing, I need multiple images of the same dimensions. In order to show off the impressive bandwidth between your CPU and graphics card, this image should change frequently. We could some kind of page-stack animation in a loop, but—for suitably-equipped hardware—HTML 5 provides access to a webcam via getUserMedia. Binding to a <video> element is fairly simple, which is great, because that element can be the destination parameter for texImage2D and texSubImage2D.

Internet Explorer and Safari users: sorry, but your browser of choice is lagging behind Mozilla and Google. getUserMedia was unavailable to you when I wrote this post.

Making Things a Bit More Interesting

Stitching video onto a flat surface is a good way to create a highly expensive, power-hungry mirror, so I’m going to give the vertex shader something to do: time-based mesh deformation. This will give the video a ripple effect. Hopefully one smoother and more colourful than this animated gif:

Capture w/Ripple Distortion

The version of Firefox that I’m using lets me run this straight off the disk. Chrome insists that it be provided by a web-server (python -m SimpleHTTPServer to the rescue).

The Code

<!doctype html>
<html>
  <head>
    <title>Hacking WebGL</title>
    <script type="text/javascript" src="gl-matrix.js"></script>
    <script id="vertex-shader" type="x-shader/x-vertex">
      precision mediump float;

      uniform mat4 modelMatrix, viewMatrix, projectionMatrix;
      uniform float time;

      attribute vec3 pos;
      attribute vec2 texCoords;

      varying vec2 tc;

      void main() {
        float d = -length(pos);
        float z = 0.05 * sin(3.141592653589793 * d * 5.0 + time * 3.0);
        tc = texCoords;
        gl_Position = 
          projectionMatrix * viewMatrix *
          modelMatrix * vec4(pos.xy, z, 1.0);
      }      
    </script>
    <script id="fragment-shader" type="x-shader/x-fragment">
      precision mediump float;

      uniform sampler2D image;
      varying vec2 tc;

      void main() {
        gl_FragColor = texture2D(image, tc.st);
      }
    </script>
    <script type="text/javascript" src="gl-matrix.js"></script>
    <script type="text/javascript">

    function createCanvas() {
      var canvas = document.createElement('canvas');
      document.getElementById('content').appendChild(canvas);
      return canvas;      
    }

    function createProgram(gl, shaderSpecs) {
      var program = gl.createProgram();
      for ( var i = 0 ; i < shaderSpecs.length ; i++ ) {
        var spec = shaderSpecs[i];
        var shader = gl.createShader(spec.type);
        gl.shaderSource(
          shader, document.getElementById(spec.container).text
        );
        gl.compileShader(shader);
        if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
          throw gl.getShaderInfoLog(shader);
        }
        gl.attachShader(program, shader);
        gl.deleteShader(shader);
      }
      gl.linkProgram(program);
      if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
        throw gl.getProgramInfoLog(program);
      }
      return program;
    }

    var frameCount = 0, fpsTimer = null;

    function resetFpsCounter() {
      fpsTimer = setTimeout(function() {
        fpsTimer = null;
      }, 1000);
    }

    function render(gl, scene, time) {
      if ( fpsTimer == null ) {
        console.log(frameCount);
        frameCount = 0;
        resetFpsCounter();
      }
      gl.clear(gl.COLOR_BUFFER_BIT);
      gl.useProgram(scene.program);

      gl.uniform1f(scene.program.timeUniform, time / 1000);
      
      gl.uniformMatrix4fv(
        scene.program.modelMatrixUniform, gl.FALSE,
        scene.object.modelMatrix);
      gl.bindBuffer(gl.ARRAY_BUFFER, scene.object.buffer);
      gl.bindTexture(gl.TEXTURE_2D, scene.object.texture);

      var video = scene.object.textureSourceElement;
      gl.texSubImage2D(
        gl.TEXTURE_2D, 0, 0, 0, gl.RGBA,
        gl.UNSIGNED_BYTE, video);

      gl.drawArrays(
        scene.object.primitiveType, 0,
        scene.object.vertexCount);

      gl.bindTexture(gl.TEXTURE_2D, null);

      gl.bindBuffer(gl.ARRAY_BUFFER, null);
      gl.useProgram(null);
      frameCount++;
      requestAnimationFrame(function(timestamp) {
        render(gl, scene, timestamp);
      });
    }

    function createFlatMesh(gl) {
      var MAX_ROWS=64, MAX_COLS=64;
      var points = [];

      for ( var r = 0 ; r <= MAX_ROWS ; r++ ) {
        for ( var c = 0 ; c <= MAX_COLS ; c++ ) {
          points.push({
            location: [-0.75 + (1.5 / MAX_COLS) * c, 
                        0.75 - (1.5 / MAX_ROWS) * r,
                        0.0],
            texture: [1.0 / MAX_COLS * c,
                      1.0 / MAX_ROWS * r]
          });
        }
      }
      var OFFSET = function(R,C) {
        return ((R) * ((MAX_COLS)+1) + (C));
      };
      var
        vertices = [],
        rotations = [-1,-1,-1,0,1,1,1,0,-1,-1,-1,0,1,1,1,0];
      for ( var r = 1 ; r <= MAX_ROWS ; r += 2 ) {
        for ( var c = 1 ; c <= MAX_COLS ; c += 2 ) {
          for ( var i = 0 ; i < 8 ; i++ ) {
            var off1 = OFFSET(r, c);
            var off2 = OFFSET(r + rotations[i],   c + rotations[i+6]);
            var off3 = OFFSET(r + rotations[i+1], c + rotations[i+7]);
            Array.prototype.push.apply(
              vertices, points[off1].location);
            Array.prototype.push.apply(
              vertices, points[off1].texture);
            Array.prototype.push.apply(
              vertices, points[off2].location);
            Array.prototype.push.apply(
              vertices, points[off2].texture);
            Array.prototype.push.apply(
              vertices, points[off3].location);
            Array.prototype.push.apply(
              vertices, points[off3].texture);
          }
        }
      }

      var buffer = gl.createBuffer();
      gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
      gl.bufferData(
        gl.ARRAY_BUFFER, new Float32Array(vertices),
        gl.STATIC_DRAW);
      gl.bindBuffer(gl.ARRAY_BUFFER, null);

      return {
        buffer: buffer,
        primitiveType: gl.TRIANGLES,
        vertexCount: vertices.length / 5
      }
    }

    function loadTexture(gl, mesh, andThenFn) {
      var texture = gl.createTexture();
      navigator.getUserMedia = navigator.getUserMedia // WC3
        || navigator.mozGetUserMedia // Mozilla
        || navigator.webkitGetUserMedia; // Chrome
      navigator.getUserMedia(
        {video: true, audio:false},
        function(stream) {
          var video = document.getElementById('video');
          video.src = URL.createObjectURL(stream);
          video.onplaying = function() {
            gl.bindTexture(gl.TEXTURE_2D, texture);
            gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
            gl.texImage2D(
              gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, video);
            gl.texParameteri(
              gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
            gl.texParameteri(
              gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
            gl.texParameteri(
              gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
            gl.texParameteri(
              gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
            gl.bindTexture(gl.TEXTURE_2D, null);
            mesh.texture = texture;
            mesh.textureSourceElement = video;
            andThenFn();
          };
          video.play();
      }, function(e) {
        alert(e);
      });
    }

    function init() {
      var canvas = createCanvas();
      var gl = canvas.getContext('experimental-webgl');
      var resize = function() {
        canvas.width = window.innerWidth;
        canvas.height = window.innerHeight;
        gl.viewport(0,0,canvas.width,canvas.height);
      };
      window.addEventListener('resize', resize);

      resize();

      gl.enable(gl.DEPTH_TEST);
      gl.clearColor(0.0, 0.0, 0.0, 0.0);

      var mesh = createFlatMesh(gl);

      var program = createProgram(
        gl,
        [{container: 'vertex-shader', type: gl.VERTEX_SHADER},
         {container: 'fragment-shader', type: gl.FRAGMENT_SHADER}]);

      var projectionMatrix = mat4.create();
      mat4.perspective(
        projectionMatrix, 0.75, canvas.width/canvas.height,
        0.1, 100);
      var viewMatrix = mat4.create();
      var modelMatrix = mat4.create();
      mat4.translate(modelMatrix, modelMatrix, [0,0,-2]);
      mat4.rotate(modelMatrix, modelMatrix, -1, [1,0,0]);

      mesh.modelMatrix = modelMatrix;

      gl.useProgram(program);

      program.modelMatrixUniform =
        gl.getUniformLocation(program, 'modelMatrix');
      program.viewMatrixUniform =
        gl.getUniformLocation(program, 'viewMatrix');
      program.projectionMatrixUniform =
        gl.getUniformLocation(program, 'projectionMatrix');
      program.timeUniform =
        gl.getUniformLocation(program, 'time');

      gl.uniformMatrix4fv(
        program.projectionMatrixUniform, gl.FALSE,
        projectionMatrix);
      gl.uniformMatrix4fv(
        program.viewMatrixUniform, gl.FALSE, viewMatrix);
      gl.uniform1f(
        program.timeUniform, gl.FALSE, 0.0);

      gl.bindBuffer(gl.ARRAY_BUFFER, mesh.buffer);

      program.positionAttribute =
        gl.getAttribLocation(program, 'pos');
      program.textureCoordsAttribute =
        gl.getAttribLocation(program, 'texCoords');
      gl.enableVertexAttribArray(program.positionAttribute);
      gl.enableVertexAttribArray(program.textureCoordsAttribute);
      gl.vertexAttribPointer(
        program.positionAttribute, 3, gl.FLOAT, false,
        5 * Float32Array.BYTES_PER_ELEMENT,
        0);
      gl.vertexAttribPointer(
        program.textureCoordsAttribute, 2, gl.FLOAT, false,
        5 * Float32Array.BYTES_PER_ELEMENT,
        3 * Float32Array.BYTES_PER_ELEMENT);

      gl.bindBuffer(gl.ARRAY_BUFFER, null);
      gl.useProgram(null);

      loadTexture(gl, mesh,
        function() {
          requestAnimationFrame(function(timestamp) {
            render(gl, {
              program: program,
              object: mesh
            }, timestamp);
          })
        });
    }
    </script>
  </head>
  <body onLoad="init()">
    <video
      id="video"
      width="640" height="480"
      style="display:none">
    </video>
    <div id="content"></div>
  </body>
</html>

What’s New

The loadTexture function now refers to a <video> element, and uses the getUserMedia API to bind it to the input from a camera. The onplaying callback of the video element actually initialises the texture with the texImage2D call (allocating storage on the GPU and coyping pixel data from the surface), and then sets up the call to render. The play() method is immediately called on the element in order to kick the whole process off.

The render() method itself is much like it’s always been, with the exception of the call to texSubImage2D, using the video element as a pixel store. When the underlying mesh is drawn with drawArrays, the image is stitched over the surface just like any other.

Finally, the vertex shader does a little more than just translating between coordinate systems. It’s now generating a replacement z-index for each vertex that goes in, displacing it by a function of its distance from the centre of the view and the current time. There are some not-so-magic numbers in there: 0.05 is the amplitude, 5.0 is the frequency, and 3.0 is the speed. What’s worth noting here is that the GPU is doing the deforming. As far as the CPU is concerned, a flat mesh was uploaded to graphics memory once on initialisation and never updated. The only data being sent to the GPU thereafer are the new uniform values and the updated texture for each frame.

[side-note: It’s nice that all this work is being done by the GPU, but the trade-off is that the CPU can’t just look at its copy of the mesh to find out what you’ve selected if you click on something. It also complicates lighting, as the adjustments made by the vertex shader affec9t what the normals should be; these would have to be recalculated in shader, but the vertex shader doesn’t have visibility of the other vertices in its triangle. A geometry shader—which can see a whole polygon at a time—could calculate and feed appropriate values to the fragment shader, but WebGL doesn’t support those.]

Where did I get the time from? Well, the callback that you provide to requestAnimationFrame gets a parameter when it’s actually called: a timestamp in milliseconds. This timestamp doesn’t correspond to the actual time in any way, it’s just the time since the rendering cycle started for your page. In any case, it’s usable as something that varies relatively smoothly and which can be fed into the vertex shader to indicate change on a frame-by-frame basis.

Window Refresh vs. Camera Capture Rates

The browser refresh rate is going to be synchronised with your display—typically about 60fps—but unless you’re working with a good camera, the video data itself is only going to be updated at 30fps or less. This means that the code above pushes twice as much texture data to the GPU as it has to. If you’ve been tracking performance, you might be thinking that this is why the numbers say that the CPU is still quite busy. I thought it was worth trying out by passing along a step counter to my render function. If step % 2 == 0, upload the texture (i.e. only update the texture on every second frame). Any impact on performance wasn’t noticable. Trying mod-5 didn’t make a difference, either, so it doesn’t appear to be the OpenGL layer that’s chewing up time. Performance figures were similar between Firefox 40 and Chrome 43 on a 2011 Macbook Pro. I wonder what’s hogging the time?

Follow-Up Practice

If you don’t have access to a webcam, try replacing the getUserMedia code in loadTexture with something that plays a video in a loop.

Stereoscopic animated GIFs—a web search will yield many—give an impressive sense of 3D using just two images. Using an appropriate graphics package (I recommend GIMP), save their constituent images and replicate the effect by flipping between their images on the fly. [if you do this, it’s probably better not to update a single texture, but rather to initialise and upload two textures, flipping between which one you bind/render on render calls].

Create some HTML controls (e.g. <range> elements) for each of the amplitude, frequency, and speed. Add some uniforms to the vertex shader that allow these parameters to be manipulated on-the-fly.

The continuous ripple effect is pretty, but there’s fun in playing with it. How about a ripple that diminishes with the distance from the origin? What about a flag effect instead?

What’s Next

This series of blog posts was called ‘WebGL from Scratch’, and we’ve gone from drawing a red background through 2D shapes, 3D shapes, lighting, mesh loading, animation, and textures. That’s about as much as I want to accomplish in an introductory tutorial.

So is that all there is? Absolutely not. Graphics is an endlessly deep subject. Just scratching the surface, I haven’t talked about shadows, environment mapping, bump mapping, multi-texturing, or object picking. I haven’t played with any of the more difficult visual effects to model, such as fire, smoke, or glare. Lighting has been deliberately simple, and is usually extended with some kind of material parameters for specularity and reflection. Descriptions of multiple approaches to handle any one of these subjects are a web search away, in almost all cases trading off accuracy and efficiency.

The thread through these posts has been very much nuts n’ bolts. I don’t apologise for that. Higher-level APIs to 3D functionality is available via something like three.js, which masks complexity and gets results faster. My goal here though has been to get as close to the GPU as JavaScript and web browsers permit.

I hope you’ve enjoyed it.

If you would like something explained in more depth, attach a note and I’ll put together a post that builds on this material. Or posts, if need be.

WebGL from Scratch: Textures, part 2

Last time, I showed how to texture a mesh, but I can’t really deny that you might not be convinced that there was really a mesh under there: it just looked like a square with some perspective applied.

In this post, I’m going to demonstrate a simple hack that partially makes up for WebGL’s lack of a functional equivalent to OpenGL’s glPolygonMode by adding barycentric coordinates to each triangle. The basic premise is simple: when the vertex shader sees each vertex, it will have a barycentric coordinate of [1,0,0], [0,1,0], or [0,0,1]. Each triangle will always have all three, although the order doesn’t matter. The fragment shader will see the interpolated values of these coordinates, meaning that points that are too far away from an edge can be identified and discarded.

Unfortunately, this implementation is invasive: each vertex carries an additional three floats defining for for each vertex. In the next post, I’ll show how to do the same thing in a non-invasive way (although admittedly in a way that requires a regular repeating pattern to be effective; more on that later).

As usual, this code relies upon gl-matrix, and also the bricks.png image I used last time. You’ll likely need to deliver the page with a local web server, whether it’s industrial strengh or just python -m SimpleHTTPServer.

The Code

<!doctype html>
<html>
  <head>
    <title>Hacking WebGL</title>
    <script type="text/javascript" src="gl-matrix.js"></script>
    <script id="vertex-shader" type="x-shader/x-vertex">
      precision mediump float;

      uniform mat4 modelMatrix, viewMatrix, projectionMatrix;

      attribute vec3 pos;
      attribute vec2 texCoords;
      attribute vec3 barycentric;

      varying vec2 tc;
      varying vec3 bary;

      void main() {
        tc = texCoords;
        bary = barycentric;
        gl_Position = 
          projectionMatrix * viewMatrix *
          modelMatrix * vec4(pos, 1.0);
      }      
    </script>
    <script id="fragment-shader" type="x-shader/x-fragment">
      precision mediump float;

      uniform bool wireframe;
      uniform sampler2D image;
      uniform float wireframeThickness;

      varying vec2 tc;
      varying vec3 bary;

      void main() {
        if ( wireframe ) {
          if ( bary[0] > wireframeThickness &&
               bary[1] > wireframeThickness &&
               bary[2] > wireframeThickness ) {
            discard;
          }
        } 
        gl_FragColor = texture2D(image, tc.st);
      }
    </script>
    <script type="text/javascript" src="gl-matrix.js"></script>
    <script type="text/javascript">

    function createCanvas() {
      var canvas = document.createElement('canvas');
      document.getElementById('content').appendChild(canvas);
      return canvas;      
    }

    function createProgram(gl, shaderSpecs) {
      var program = gl.createProgram();
      for ( var i = 0 ; i < shaderSpecs.length ; i++ ) {
        var spec = shaderSpecs[i];
        var shader = gl.createShader(spec.type);
        gl.shaderSource(
          shader, document.getElementById(spec.container).text
        );
        gl.compileShader(shader);
        if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
          throw gl.getShaderInfoLog(shader);
        }
        gl.attachShader(program, shader);
        gl.deleteShader(shader);
      }
      gl.linkProgram(program);
      if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
        throw gl.getProgramInfoLog(program);
      }
      return program;
    }

    function render(gl, scene) {
      gl.clear(gl.COLOR_BUFFER_BIT);
      gl.useProgram(scene.program);
      gl.uniformMatrix4fv(
        scene.program.modelMatrixUniform, gl.FALSE,
        scene.object.modelMatrix);
      gl.bindBuffer(gl.ARRAY_BUFFER, scene.object.buffer);
      gl.bindTexture(gl.TEXTURE_2D, scene.object.texture);

      gl.drawArrays(
        scene.object.primitiveType, 0,
        scene.object.vertexCount);

      gl.bindTexture(gl.TEXTURE_2D, null);

      gl.bindBuffer(gl.ARRAY_BUFFER, null);
      gl.useProgram(null);
      requestAnimationFrame(function() {
        render(gl, scene);
      });
    }

    function createFlatMesh(gl) {
      var MAX_ROWS=32, MAX_COLS=32;
      var points = [];

      for ( var r = 0 ; r <= MAX_ROWS ; r++ ) {
        for ( var c = 0 ; c <= MAX_COLS ; c++ ) {
          points.push({
            location: [-0.75 + (1.5 / MAX_COLS) * c, 
                        0.75 - (1.5 / MAX_ROWS) * r,
                        0.0],
            texture: [1.0 / MAX_COLS * c,
                      1.0 / MAX_ROWS * r]
          });
        }
      }
      var OFFSET = function(R,C) {
        return ((R) * ((MAX_COLS)+1) + (C));
      };
      var
        vertices = [],
        rotations = [-1,-1,-1,0,1,1,1,0,-1,-1,-1,0,1,1,1,0];
      for ( var r = 1 ; r <= MAX_ROWS ; r += 2 ) {
        for ( var c = 1 ; c <= MAX_COLS ; c += 2 ) {
          for ( var i = 0 ; i < 8 ; i++ ) {
            var off1 = OFFSET(r, c);
            var off2 = OFFSET(r + rotations[i],   c + rotations[i+6]);
            var off3 = OFFSET(r + rotations[i+1], c + rotations[i+7]);
            Array.prototype.push.apply(
              vertices, points[off1].location);
            Array.prototype.push.apply(
              vertices, points[off1].texture);
            vertices.push(1,0,0);
            Array.prototype.push.apply(
              vertices, points[off2].location);
            Array.prototype.push.apply(
              vertices, points[off2].texture);
            vertices.push(0,1,0);
            Array.prototype.push.apply(
              vertices, points[off3].location);
            Array.prototype.push.apply(
              vertices, points[off3].texture);
            vertices.push(0,0,1);
          }
        }
      }

      var buffer = gl.createBuffer();
      gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
      gl.bufferData(
        gl.ARRAY_BUFFER, new Float32Array(vertices),
        gl.STATIC_DRAW);
      gl.bindBuffer(gl.ARRAY_BUFFER, null);

      return {
        buffer: buffer,
        primitiveType: gl.TRIANGLES,
        vertexCount: vertices.length / 8
      }
    }

    function loadTexture(name, gl, mesh, andThenFn) {
      var texture = gl.createTexture();
      var image = new Image();
      image.onload = function() {
        gl.bindTexture(gl.TEXTURE_2D, texture);
        gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
        gl.texImage2D(
          gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
        gl.texParameteri(
          gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
        gl.texParameteri(
          gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
        gl.texParameteri(
          gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
        gl.texParameteri(
          gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
        gl.bindTexture(gl.TEXTURE_2D, null);
        mesh.texture = texture;
        andThenFn();
      }
      image.src = name;
    }

    function init() {
      var canvas = createCanvas();
      var gl = canvas.getContext('experimental-webgl');
      var resize = function() {
        canvas.width = window.innerWidth;
        canvas.height = window.innerHeight;
        gl.viewport(0,0,canvas.width,canvas.height);
      };
      window.addEventListener('resize', resize);

      resize();

      gl.enable(gl.DEPTH_TEST);
      gl.clearColor(0.0, 0.0, 0.0, 0.0);

      var mesh = createFlatMesh(gl);

      var program = createProgram(
        gl,
        [{container: 'vertex-shader', type: gl.VERTEX_SHADER},
         {container: 'fragment-shader', type: gl.FRAGMENT_SHADER}]);

      canvas.addEventListener('click', function() {
        gl.useProgram(program);
        var existingValue = gl.getUniform(
          program,
          program.wireframeUniform);
        gl.uniform1i(program.wireframeUniform, !existingValue);
        gl.useProgram(null);
      });

      var projectionMatrix = mat4.create();
      mat4.perspective(
        projectionMatrix, 0.75, canvas.width/canvas.height,
        0.1, 100);
      var viewMatrix = mat4.create();
      var modelMatrix = mat4.create();
      mat4.translate(modelMatrix, modelMatrix, [0,0,-2]);
      mat4.rotate(modelMatrix, modelMatrix, -1, [1,0,0]);

      mesh.modelMatrix = modelMatrix;

      gl.useProgram(program);

      program.modelMatrixUniform =
        gl.getUniformLocation(program, 'modelMatrix');
      program.viewMatrixUniform =
        gl.getUniformLocation(program, 'viewMatrix');
      program.projectionMatrixUniform =
        gl.getUniformLocation(program, 'projectionMatrix');
      program.wireframeUniform =
        gl.getUniformLocation(program, 'wireframe');
      program.wireframeThicknessUniform =
        gl.getUniformLocation(program, 'wireframeThickness');

      gl.uniform1i(program.wireframeUniform, 1);
      gl.uniform1f(program.wireframeThicknessUniform, 0.1);
      
      gl.uniformMatrix4fv(
        program.projectionMatrixUniform, gl.FALSE,
        projectionMatrix);
      gl.uniformMatrix4fv(
        program.viewMatrixUniform, gl.FALSE, viewMatrix);

      gl.bindBuffer(gl.ARRAY_BUFFER, mesh.buffer);

      program.positionAttribute =
        gl.getAttribLocation(program, 'pos');
      program.textureCoordsAttribute =
        gl.getAttribLocation(program, 'texCoords');
      program.barycentricAttribute =
        gl.getAttribLocation(program, 'barycentric');
      gl.enableVertexAttribArray(program.positionAttribute);
      gl.enableVertexAttribArray(program.textureCoordsAttribute);
      gl.enableVertexAttribArray(program.barycentricAttribute);
      gl.vertexAttribPointer(
        program.positionAttribute, 3, gl.FLOAT, false,
        8 * Float32Array.BYTES_PER_ELEMENT,
        0);
      gl.vertexAttribPointer(
        program.textureCoordsAttribute, 2, gl.FLOAT, false,
        8 * Float32Array.BYTES_PER_ELEMENT,
        3 * Float32Array.BYTES_PER_ELEMENT);
      gl.vertexAttribPointer(
        program.barycentricAttribute, 3, gl.FLOAT, false,
        8 * Float32Array.BYTES_PER_ELEMENT,
        5 * Float32Array.BYTES_PER_ELEMENT);

      gl.bindBuffer(gl.ARRAY_BUFFER, null);
      gl.useProgram(null);

      loadTexture('bricks.png', gl, mesh,
        function() {
          requestAnimationFrame(function() {
            render(gl, {
              program: program,
              object: mesh
            });
          })
        });
    }
    </script>
  </head>
  <body onLoad="init()">
    <div id="content">
    </div>
  </body>
</html>

What’s New

The vertex shader has a barycentric vec3 attribute, which it assigns to a bary varying for the fragment shader. The fragment shader checks to see if a given fragment is too far from an edge location, potentially discarding it.

The barycentric attribute values are spliced into the vertex data array in createFlatMesh, and mapped with getAttribLocation/enableVertexAttribArray/vertexAttribPointer as usual.

Finally, a click event handler is attached to the <canvas>, which flips the value of the wireframe uniform in the fragment shader: this controls whether to display the whole image or the wireframe. Note two things here: 1) the current value of a uniform is read from GPU memory by calling getUniform; and 2) the program had to be made active to be able to do this.

Things to Try

  • Add a control to adjust the wireframe thickness
  • Draw another mesh at a different angle so that it partially overlays the existing one. Are the holes in the mesh transparent or drawn with the background colour?

What’s Next

Time-based distortion of the mesh—animating a simple ripple effect.

WebGL from Scratch: Textures, part I

My previous posts on model loading might have gotten a little carried away. See, I’m learning as I go here, so breaking free of specifying vertices in literal arrays and pulling them in from models exported from a class-A piece of software like Blender was quite exciting. So, in my haste, I’ve skipped over some fundamental topics, and texturing’s one of them.

In the traditional sense, textures are images mapped onto geometry to enhance realism. A column made out of only a few rectangles can look awesome when wrapped in a cracked marble texture, which offers detail above and beyond what the geometry itself describes. As with lighting, describing geometry with triangles is an approximation, and textures help fool the brain into seeing detail that really isn’t there.

I’m going to start from fresh with the code: aside from the borrowed createProgram function (which is going to be essentially the same in every WebGL program that I write), everything else has been hand-written from a blank text file. Well, that and the body of createFlatMesh, which I lifted from a C++ OpenGL program that I wrote a while ago.

Rather than trying to puzzle that code out, it helps if you understand that I’m drawing the mesh a patch at a time, where each patch defines 4 squares using 2 triangles each, drawn counter-clockwise around a central point. After spinning around that point, the centre point is moved 2 units along to be the centre of a new cluster of squares. When a row is complete, the loop bumps down 2 units to the centre of the next row and starts again. This means that a single cluster is created like this:

mesh-segment

This might seem like an awkward way to do this, but it generates a mesh that I think looks nicer when deformed (hint, hint).

mesh

Note that this code still requires gl-matrix—our ability to do without that library vanished when we moved into the third dimension. I’m no longer using jQuery here, as it doen’t add anything useful. While you can use any image, the code below uses bricks.png.

Chrome Users: as with the OBJ data models used in the last few posts, Chrome considers disk-sourced images to be cross-domain and won’t allow them to be loaded into a WebGL texture. You’re going to need a web server, even if it’s just python -m SimpleHTTPServer.

Mapping Textures to Surfaces

The texture that I’m going to be working with is the simplest, most commonly used form: a plain, 2 dimensional image. I’ll associate texture coordinates with each vertex, the interpolated value of which will be used in the fragment shader to look up the sample value in the texture. The shape I’ll be drawing—a square rotated around the X-axis, with perspective—is built with a regularly-spaced mesh, so the texture coordinates are easy to calculate. In effect, we’re stitching an image onto a mesh:

texture-mapping

One thing to note is that texture coordinates range from 0.0, 0.0 (bottom left) to 1.0, 1.0 (top right). Even though your shape might be defined to straddle the origin, with negative coordinates on any axis, the same is not true of texture coordinates.

<!doctype html>
<html>
  <head>
    <title>Hacking WebGL</title>
    <script type="text/javascript" src="gl-matrix.js"></script>
    <script id="vertex-shader" type="x-shader/x-vertex">
      precision mediump float;

      uniform mat4 modelMatrix, viewMatrix, projectionMatrix;

      attribute vec3 pos;
      attribute vec2 texCoords;

      varying vec2 tc;

      void main() {
        tc = texCoords;
        gl_Position = 
          projectionMatrix * viewMatrix *
          modelMatrix * vec4(pos, 1.0);
      }      
    </script>
    <script id="fragment-shader" type="x-shader/x-fragment">
      precision mediump float;

      uniform sampler2D image;

      varying vec2 tc;

      void main() {
        gl_FragColor = texture2D(image, tc.st);
      }
    </script>
    <script type="text/javascript" src="gl-matrix.js"></script>
    <script type="text/javascript">

    function createCanvas() {
      var canvas = document.createElement('canvas');
      document.getElementById('content').appendChild(canvas);
      return canvas;      
    }

    function createProgram(gl, shaderSpecs) {
      var program = gl.createProgram();
      for ( var i = 0 ; i < shaderSpecs.length ; i++ ) {
        var spec = shaderSpecs[i];
        var shader = gl.createShader(spec.type);
        gl.shaderSource(
          shader, document.getElementById(spec.container).text
        );
        gl.compileShader(shader);
        if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
          throw gl.getShaderInfoLog(shader);
        }
        gl.attachShader(program, shader);
        gl.deleteShader(shader);
      }
      gl.linkProgram(program);
      if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
        throw gl.getProgramInfoLog(program);
      }
      return program;
    }

    function render(gl, scene) {
      gl.clear(gl.COLOR_BUFFER_BIT);
      gl.useProgram(scene.program);
      gl.uniformMatrix4fv(
        scene.program.modelMatrixUniform, gl.FALSE,
        scene.object.modelMatrix);
      gl.bindBuffer(gl.ARRAY_BUFFER, scene.object.buffer);
      gl.bindTexture(gl.TEXTURE_2D, scene.object.texture);

      gl.drawArrays(
        scene.object.primitiveType, 0,
        scene.object.vertexCount);

      gl.bindTexture(gl.TEXTURE_2D, null);

      gl.bindBuffer(gl.ARRAY_BUFFER, null);
      gl.useProgram(null);
      requestAnimationFrame(function() {
        render(gl, scene);
      });
    }

    function createFlatMesh(gl) {
      var MAX_ROWS=32, MAX_COLS=32;
      var points = [];

      for ( var r = 0 ; r <= MAX_ROWS ; r++ ) {
        for ( var c = 0 ; c <= MAX_COLS ; c++ ) {
          points.push({
            location: [-0.75 + (1.5 / MAX_COLS) * c, 
                        0.75 - (1.5 / MAX_ROWS) * r,
                        0.0],
            texture: [1.0 / MAX_COLS * c,
                      1.0 / MAX_ROWS * r]
          });
        }
      }
      var OFFSET = function(R,C) {
        return ((R) * ((MAX_COLS)+1) + (C));
      };
      var
        vertices = [],
        rotations = [-1,-1,-1,0,1,1,1,0,-1,-1,-1,0,1,1,1,0];
      for ( var r = 1 ; r <= MAX_ROWS ; r += 2 ) {
        for ( var c = 1 ; c <= MAX_COLS ; c += 2 ) {
          for ( var i = 0 ; i < 8 ; i++ ) {
            var off1 = OFFSET(r, c);
            var off2 = OFFSET(r + rotations[i],   c + rotations[i+6]);
            var off3 = OFFSET(r + rotations[i+1], c + rotations[i+7]);
            Array.prototype.push.apply(
              vertices, points[off1].location);
            Array.prototype.push.apply(
              vertices, points[off1].texture);
            Array.prototype.push.apply(
              vertices, points[off2].location);
            Array.prototype.push.apply(
              vertices, points[off2].texture);
            Array.prototype.push.apply(
              vertices, points[off3].location);
            Array.prototype.push.apply(
              vertices, points[off3].texture);
          }
        }
      }

      var buffer = gl.createBuffer();
      gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
      gl.bufferData(
        gl.ARRAY_BUFFER, new Float32Array(vertices),
        gl.STATIC_DRAW);
      gl.bindBuffer(gl.ARRAY_BUFFER, null);

      return {
        buffer: buffer,
        primitiveType: gl.TRIANGLES,
        vertexCount: vertices.length / 5
      }
    }

    function loadTexture(name, gl, mesh, andThenFn) {
      var texture = gl.createTexture();
      var image = new Image();
      image.onload = function() {
        gl.bindTexture(gl.TEXTURE_2D, texture);
        gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false);
        gl.texImage2D(
          gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
        gl.texParameteri(
          gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
        gl.texParameteri(
          gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
        gl.texParameteri(
          gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
        gl.texParameteri(
          gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
        gl.bindTexture(gl.TEXTURE_2D, null);
        mesh.texture = texture;
        andThenFn();
      }
      image.src = name;
    }

    function init() {
      var canvas = createCanvas();
      var gl = canvas.getContext('experimental-webgl');
      var resize = function() {
        canvas.width = window.innerWidth;
        canvas.height = window.innerHeight;
        gl.viewport(0,0,canvas.width,canvas.height);
      };
      window.addEventListener('resize', resize);
      resize();

      gl.enable(gl.DEPTH_TEST);
      gl.clearColor(0.0, 0.0, 0.0, 0.0);

      var mesh = createFlatMesh(gl);

      var program = createProgram(
        gl,
        [{container: 'vertex-shader', type: gl.VERTEX_SHADER},
         {container: 'fragment-shader', type: gl.FRAGMENT_SHADER}]);


      var projectionMatrix = mat4.create();
      mat4.perspective(
        projectionMatrix, 0.75, canvas.width/canvas.height,
        0.1, 100);
      var viewMatrix = mat4.create();
      var modelMatrix = mat4.create();
      mat4.translate(modelMatrix, modelMatrix, [0,0,-2]);
      mat4.rotate(modelMatrix, modelMatrix, -1, [1,0,0]);

      mesh.modelMatrix = modelMatrix;

      gl.useProgram(program);

      program.modelMatrixUniform =
        gl.getUniformLocation(program, 'modelMatrix');
      program.viewMatrixUniform =
        gl.getUniformLocation(program, 'viewMatrix');
      program.projectionMatrixUniform =
        gl.getUniformLocation(program, 'projectionMatrix');
      
      gl.uniformMatrix4fv(
        program.projectionMatrixUniform, gl.FALSE,
        projectionMatrix);
      gl.uniformMatrix4fv(
        program.viewMatrixUniform, gl.FALSE, viewMatrix);

      gl.bindBuffer(gl.ARRAY_BUFFER, mesh.buffer);

      program.positionAttribute =
        gl.getAttribLocation(program, 'pos');
      program.textureCoordsAttribute =
        gl.getAttribLocation(program, 'texCoords');
      gl.enableVertexAttribArray(program.positionAttribute);
      gl.enableVertexAttribArray(program.textureCoordsAttribute);
      gl.vertexAttribPointer(
        program.positionAttribute, 3, gl.FLOAT, false,
        5 * Float32Array.BYTES_PER_ELEMENT,
        0);
      gl.vertexAttribPointer(
        program.textureCoordsAttribute, 2, gl.FLOAT, false,
        5 * Float32Array.BYTES_PER_ELEMENT,
        3 * Float32Array.BYTES_PER_ELEMENT);

      gl.bindBuffer(gl.ARRAY_BUFFER, null);
      gl.useProgram(null);

      loadTexture('bricks.png', gl, mesh,
        function() {
          requestAnimationFrame(function() {
            render(gl, {
              program: program,
              object: mesh
            });
          })
        });
    }
    </script>
  </head>
  <body onLoad="init()">
    <div id="content">
    </div>
  </body>
</html>

What’s New

  1. The vertex shader now accepts texture coordinates, a plain vec2 (i.e. an X/Y into a 2D image)
  2. The fragment shader is finally getting some action, and has a sampler2D that provides access to the sample data
  3. The fragment shader also calls a function, texture2D that looks up that sampler and extracts that sample data
  4. On the JavaScript/HTML front, I’m creating the <canvas> element programmatically. I’m also attaching an event handler to the window resize event which will resize the canvas and adjust the WebGL viewport.

Finally—arguably most importantly—I have a new loadTexture function. This is responsible for requesting the load of the source image and, after it has, ensuring that a WebGL texture is created and attached to the to-be-rendered object. Since this is asynchronous, we can’t just march onto requestAnimationFrame after calling it, so we parcel that action up in a post-image-load function that loadTexture can invoke. This kicks off the animation loop, but only once the texture data has been successfully loaded.

Like buffers, texture handles are created with a ‘create’ function, createTexture, which returns an opaque identifier that your code can subsequently use to refer to it. Also like buffers, just creating a texture does not allocate storage for it–it just reserves the identifier. It’s not until you call texImage2D that texture data is actually uploaded to GPU memory. This create/bind/manipulate/unbind workflow is a very common pattern in WebGL.

Depending upon your image it may be loaded upside-down, in which case a call to pixelStorei with UNPACK_FLIP_Y_WEBGL set to true will set things right.

So, what’s with all the texParameteri calls? The TEXTURE_{MAG,MIN}_FILTER says what to do if either multiple or no texture pixels are directly located at the sample point at render time. A value of LINEAR causes an interpolation between the neighbouring/contributing values, and generally offers good visual quality. A value of NEAREST, as the name implies, grabs the nearest concrete value. It’s worth experimenting with both to see what that means to the visual quality of your scene.

TEXTURE_WRAP_{T,S} says what to do when the texture image isn’t exactly the same size as the containing object. Should there be a border? Should it stretch the image to fit? The latter is what CLAMP_TO_EDGE does.

That wraps up simple texturing: upload a texture, specify how it maps to a given surface, and bind it during the draw call.

Next Up: If you run that and see a perspective-projected brick square, then all is well. You might, however, be skeptical about it being a fairly complex mesh, rather than just a pair of triangles. In OpenGL, you’d just flip glPolygonMode to GL_LINE and you’d see the wireframe, but WebGL doesn’t offer this (switching the primitive type to LINES doesn’t count: you have to change your model data to account for line drawing, and I don’t want to do that).

So the next post is going to expose the wireframe with a quick hack involving some extra vertex data and the introduction of a feature of the fragment shader: the discard keyword.

WebGL from Scratch: Converting a Complex Mesh

In the last post, I generated a model directly within Blender and exported it as an OBJ file.

In this post, I’m going to take a model posted on the web and turn it into something that the code from last time can render without modification to the code from last time. After a brief search, I found a nice skeleton, which is an excellent model because you can see between the ribs as it rotates.

First, get a hold of the .3ds version of the model, and import it into Blender (File -> Import -> 3D Studio (.3ds)). Now, unless something’s changed, this didn’t go quite to plan for me: the rib cage was fine, but the figure itself was on its back! To fix this, click on the $$$DUMMY.Skelet mesh on the right-hand side of the window, then click on the Object editor and change the X-rotation to 90°:

skeleton-in-blender

There’s still a small problem here, as far as we’re concerned: the model is straddling 0,0 on the X-axis, which for our renderings so far has been the centre of our canvas. There are two solutions here:

  1. Translate the skeleton body and rib-cage by -1.25 in the Z direction (not the Y direction. Using the Blender defaults, .3ds and .obj disagree about which axes point where)
  2. Adjust our view matrix (in JavaScript) to move the world by the same amount in the Y direction (Remember: the view matrix moves the world around the camera)

NB. These are not really equivalent, as you’ll see if you rotate around anything but the Y-axis. Play around with that and see why.

With your translations/adjustments done, export the model as before:

  1. [File] -> [Export] -> [Wavefront (.obj)]
  2. Check ‘Write Normals’, ‘Triangulate Faces’, and ‘Objects as OBJ Objects’, and clear everything else
  3. Pick a filename and click ‘Export OBJ’

Then, back in the JavaScript code, change the loadMesh call to use your model, and reload the page. After a small pause (a second or two on my late 2011 Macbook Pro), you’ll see something like this:

skeleton

This is being lit by the very same vertex and fragment shaders as for the monkey face in the last post. The primary difference here is that the vertex count—available from a console.log in the JavaScript console—will list just short of a million vertices. Lit in real time, at 60fps. And, unless you’re doing this on a machine with no GPU, the CPU usage won’t be any different than it was when rendering a 36-vertex cube.

Pretty neat, huh?

In the next post, I’m going to take a step back to a programmatically generated square mesh, and use it to play with texturing.

WebGL from Scratch: Loading a Mesh

In this post, I’m going to create a mesh using the Blender 3D modelling tool and load it into my scene. The cube that I’ve been playing with for the past few posts is going to disappear—as is all of the code that defines its vertices and calculates vertex normal vectors—and be replaced by a simple OBJ loader. What I’ll have by the end of this post is going to look like this:

monkey

However, one of the many things that WebGL doesn’t know about is loading meshes. It’s your job to load data from whichever resource that you have and turn it into the vertex data that WebGL needs. The difficulty of that task is determined by whoever designed the file format in question, so I’m going to start with an easy one: Wavefront OBJ.

Creating an OBJ File

The easiest way to create an OBJ file is to export one from Blender. It’s quite an intimidating tool at first (and second) glance, but the simplest way to create a reasonably complex mesh is with the following steps:

  1. Start Blender
  2. A default cube shape will be highlighted. Press ‘x’ on your keyboard, and Enter to confirm deletion.
  3. From the ‘Add’ menu (toward the bottom-left of the main window), choose ‘Mesh’ and then ‘Monkey’.
  4. Just to the right of the ‘Add’ menu, switch from ‘Object Mode’ to ‘Edit Mode’, and then click ‘Subdivide’ on the left pane twice (this isn’t really required: it just generates more polygons for a better effect).
  5. Switch back to ‘Object Mode’, and choose ‘Smooth’ from the ‘Shading’ part of the left pane (again, this isn’t required, but it results in a model that looks nicer with our lighting model).
  6. From the ‘File’ menu (top-left), select ‘Export’ and ‘Wavefront (.obj)’
  7. From the checkbox options on the lower-left side, select only ‘Write Normals’, ‘Triangulate Faces’ and ‘Objects as OBJ Objects’. Un-check anything else.
  8. Choose a directory and filename (I use ‘monkey.obj’ in the code below), and click ‘Export OBJ’
  9. Quit Blender.

Alternatively, you can just download the model from here, and rename the file as monkey.obj.

Parsing the Model

An OBJ file is plain text, and can be inspected in your text editor of choice. It’s line-based: a single line will define a single property (e.g. a position, texture coordinate, etc.)

The good news is that you don’t have to write a bulletproof, cover-all-cases OBJ parser to get at the model data. All you have to do is note that vertex coordinates are in lines starting ‘v’, vertex normals are in lines starting ‘vn’, and that faces are defined in lines starting ‘f’. All the ‘v’s appear in a single block before the first ‘vn’, and all of those appear in a single block before the first ‘f’. Every other line can be ignored for the purposes of this demonstration.

When I walked through the above instructions for exporting an OBJ file, the first position was defined as:

v 0.437500 0.164062 0.765625

This is just an XYZ position. Many lines starting ‘v’ follow it, until the first ‘vn’, which for my instance is:

vn 0.666000 -0.204900 0.717200

This is a plain vector of 3 floats, and is followed by a block of additional vertex normals.

The first face line is:

f 1//1 3//1 45//1

Which is the first line to warrant some explanation. It states that the face has three vertices, with each vertex component defined by indices into the corresponding data array. There are three indices per vertex, the first indexing the positions array, the second (not used here, so blank) indexing an array of texture coordinates, and the third indexing the normals array.

Note that indices start at 1, not 0. This is a common gotcha.

So, imagine that you’ve loaded the file as a single string, possibly via AJAX. This function will return an object with a ready-to-glBufferData Float32Array with interleaved position/normal data.

function loadMeshData(string) {
  var lines = string.split("\n");
  var positions = [];
  var normals = [];
  var vertices = [];

  for ( var i = 0 ; i < lines.length ; i++ ) {
    var parts = lines[i].trimRight().split(' ');
    if ( parts.length > 0 ) {
      switch(parts[0]) {
        case 'v':  positions.push(
          vec3.fromValues(
            parseFloat(parts[1]),
            parseFloat(parts[2]),
            parseFloat(parts[3])
          ));
          break;
        case 'vn':
          normals.push(
            vec3.fromValues(
              parseFloat(parts[1]),
              parseFloat(parts[2]),
              parseFloat(parts[3])
          ));
          break;
        case 'f': {
          var f1 = parts[1].split('/');
          var f2 = parts[2].split('/');
          var f3 = parts[3].split('/');
          Array.prototype.push.apply(
            vertices, positions[parseInt(f1[0]) - 1]
          );
          Array.prototype.push.apply(
            vertices, normals[parseInt(f1[2]) - 1]
          );
          Array.prototype.push.apply(
            vertices, positions[parseInt(f2[0]) - 1]
          );
          Array.prototype.push.apply(
            vertices, normals[parseInt(f2[2]) - 1]
          );
          Array.prototype.push.apply(
            vertices, positions[parseInt(f3[0]) - 1]
          );
          Array.prototype.push.apply(
            vertices, normals[parseInt(f3[2]) - 1]
          );
          break;
        }
      }
    }
  }
  var vertexCount = vertices.length / 6;
  console.log("Loaded mesh with " + vertexCount + " vertices");
  return {
    primitiveType: 'TRIANGLES',
    vertices: new Float32Array(vertices),
    vertexCount: vertexCount
  };
}

This takes advantage of the ordering within the file, knowing that an ‘f’ line won’t be encountered for a given mesh until after all the ‘v’s and ‘vn’s have been seen. Those ‘v’s and ‘vn’s are held only long enough to create the interleaved array, and are eligible for garbage collection when this function returns: we don’t need to hold onto them.

If the incomplete nature of this scanner offends you, feel free to read up on the OBJ spec and write a full parser, or find one online that you can plug in.

The New Code

I’m going to grab the OBJ file using an AJAX call, and for that I’m going to pull in jQuery, not because I couldn’t do without it, but it has a nice syntax for making the call and processing it.

<!doctype html>
<html>
  <head>
    <title>Hacking WebGL</title>
    <script type="x-shader/x-vertex" id="vertex-shader">
    precision mediump float;

    attribute vec3 pos;
    attribute vec3 normal;

    varying vec3 col;

    uniform mat4 projectionMatrix, viewMatrix, modelMatrix;
    uniform mat3 normalMatrix;

    uniform vec3 ambientLightColour, directionalLight, materialSpecular;
    uniform float materialAmbient, materialDiffuse, shininess;

    /* A function to determine the colour of a vertex, accounting
       for ambient and directional light */
    vec3 ads( vec4 position, vec3 norm )
    {
      vec3 s = normalize(vec3(vec4(directionalLight,1.0) - position));
      vec3 v = normalize(vec3(-position));
      vec3 r = reflect(-s, norm);
      return ambientLightColour +
        materialDiffuse * max(dot(s,norm), 0.0) +
        materialSpecular * pow(max(dot(r,v), 0.0), shininess);
    }

    void main() {
      vec3 eyeNormal = normalize(normalMatrix * normal);
      vec4 eyePosition =  viewMatrix * modelMatrix * vec4(pos, 1.0);
      col = min(vec3(0.0) + ads(eyePosition, eyeNormal), 1.0);
      gl_Position = projectionMatrix * viewMatrix * modelMatrix *
        vec4(pos, 1.0); 
    }
    </script>
    <script type="x-shader/x-fragment" id="fragment-shader">
    precision mediump float;

    varying vec3 col;

    void main() {
      gl_FragColor = vec4(col, 1.0);
    }
    </script>
    <script type="text/javascript" src="jquery-2.1.1.js"></script>
    <script type="text/javascript" src="gl-matrix.js"></script>
    <script type="text/javascript">

    function render(gl,scene,timestamp,previousTimestamp) {

      gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
      gl.useProgram(scene.program);

      var delta = (0.125 * Math.PI) / (timestamp - previousTimestamp);

      var light = vec3.fromValues(
        ($('#light-x').val() - 50.0) / 10.0,
        ($('#light-y').val() - 50.0) / 10.0,
        ($('#light-z').val() - 50.0) / 10.0);

      gl.uniform3fv(scene.program.directionalLightUniform, light);

      var rotateX = ($('#rotate-x').val() - 5) / 10;
      var rotateY = ($('#rotate-y').val() - 5) / 10;
      var rotateZ = ($('#rotate-z').val() - 5) / 10;

      mat4.rotate(
        scene.object.modelMatrix, scene.object.modelMatrix, delta,
        [rotateX, rotateY, rotateZ]);
      gl.uniformMatrix4fv(
        scene.program.modelMatrixUniform, gl.FALSE,
        scene.object.modelMatrix);

      var normalMatrix = mat3.create();
      mat3.normalFromMat4(
        normalMatrix,
        mat4.multiply(
          mat4.create(),
          scene.object.modelMatrix,
          scene.viewMatrix));
      gl.uniformMatrix3fv(
        scene.program.normalMatrixUniform, gl.FALSE, normalMatrix);

      gl.bindBuffer(gl.ARRAY_BUFFER, scene.object.vertexBuffer);
      gl.drawArrays(gl.TRIANGLES, 0, scene.object.vertexCount);
      gl.bindBuffer(gl.ARRAY_BUFFER, null);

      gl.useProgram(null);
      requestAnimationFrame(function(time) {
        render(gl,scene,time,timestamp);
      });
    }

    function createProgram(gl, shaderSpecs) {
      var program = gl.createProgram();
      for ( var i = 0 ; i < shaderSpecs.length ; i++ ) {
        var spec = shaderSpecs[i];
        var shader = gl.createShader(spec.type);
        gl.shaderSource(
          shader, document.getElementById(spec.container).text
        );
        gl.compileShader(shader);
        if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
          throw gl.getShaderInfoLog(shader);
        }
        gl.attachShader(program, shader);
        gl.deleteShader(shader);
      }
      gl.linkProgram(program);
      if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
        throw gl.getProgramInfoLog(program);
      }
      return program;
    }
    
    function init(object) {

      var surface = document.getElementById('rendering-surface');
      var gl = surface.getContext('experimental-webgl');
      gl.viewport(0,0,surface.width,surface.height);
      gl.enable(gl.DEPTH_TEST);
      gl.enable(gl.CULL_FACE);
      gl.cullFace(gl.BACK);
      gl.clearColor(0.0, 0.0, 0.0, 0.0);

      var program = createProgram(
        gl,
        [{container: 'vertex-shader', type: gl.VERTEX_SHADER},
         {container: 'fragment-shader', type: gl.FRAGMENT_SHADER}]);

      gl.useProgram(program);

      program.positionAttribute = gl.getAttribLocation(program, 'pos');
      gl.enableVertexAttribArray(program.positionAttribute);
      program.normalAttribute = gl.getAttribLocation(program, 'normal');
      gl.enableVertexAttribArray(program.normalAttribute);

      var vertexBuffer = gl.createBuffer();

      gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
      gl.bufferData(gl.ARRAY_BUFFER, object.vertices, gl.STATIC_DRAW);
      gl.vertexAttribPointer(
        program.positionAttribute, 3, gl.FLOAT, gl.FALSE, 
        Float32Array.BYTES_PER_ELEMENT * 6, 0);
      gl.vertexAttribPointer(
        program.normalAttribute, 3, gl.FLOAT, gl.FALSE,
        Float32Array.BYTES_PER_ELEMENT * 6,
        Float32Array.BYTES_PER_ELEMENT * 3);

      var projectionMatrix = mat4.create();
      mat4.perspective(
        projectionMatrix, 0.75, surface.width/surface.height,
        0.1, 100);
      program.projectionMatrixUniform = gl.getUniformLocation(
        program, 'projectionMatrix');
      gl.uniformMatrix4fv(
        program.projectionMatrixUniform, gl.FALSE, 
        projectionMatrix);

      var viewMatrix = mat4.create();
      program.viewMatrixUniform = gl.getUniformLocation(
        program, 'viewMatrix');
      gl.uniformMatrix4fv(
        program.viewMatrixUniform, gl.FALSE, viewMatrix);

      var modelMatrix = mat4.create();
      mat4.identity(modelMatrix);
      mat4.translate(modelMatrix, modelMatrix, [0, 0, -4]);
      program.modelMatrixUniform = gl.getUniformLocation(
        program, 'modelMatrix');
      gl.uniformMatrix4fv(
        program.modelMatrixUniform, gl.FALSE, modelMatrix);

      var normalMatrix = mat3.create();
      mat3.normalFromMat4(
        normalMatrix, mat4.multiply(
          mat4.create(), modelMatrix, viewMatrix));
      program.normalMatrixUniform = gl.getUniformLocation(
        program, 'normalMatrix');
      gl.uniformMatrix3fv(
        program.normalMatrixUniform, gl.FALSE, normalMatrix);

      program.ambientLightColourUniform = gl.getUniformLocation(
        program, 'ambientLightColour');
      program.directionalLightUniform = gl.getUniformLocation(
        program, 'directionalLight');
      program.materialSpecularUniform = gl.getUniformLocation(
        program, 'materialSpecular');
      object.materialAmbientUniform = gl.getUniformLocation(
        program, 'materialAmbient');
      object.materialDiffuseUniform = gl.getUniformLocation(
        program, 'materialDiffuse');
      object.shininessUniform = gl.getUniformLocation(
        program, 'shininess');

      var ambientLightColour = vec3.fromValues(0.2, 0.2, 0.2);
      gl.uniform3fv(
        program.ambientLightColourUniform, ambientLightColour);
      var directionalLight = vec3.fromValues(-0.5,0.5,0.5);
      gl.uniform3fv(
        program.directionalLightUniform, directionalLight);
      var materialSpecular = vec3.fromValues(0.5, 0.5, 0.5);
      gl.uniform3fv(
        program.materialSpecularUniform, materialSpecular);
      gl.uniform1f(
        object.shininessUniform, object.material.shininess);

      gl.uniform1f(
        object.materialAmbientUniform, object.material.ambient);
      gl.uniform1f(
        object.materialDiffuseUniform, object.material.diffuse);

      object.modelMatrix = modelMatrix;
      object.vertexBuffer = vertexBuffer;

      gl.bindBuffer(gl.ARRAY_BUFFER, null);
      gl.useProgram(null);

      var scene = {
        program: program,
        object: object,
        start: Date.now(),
        projectionMatrix: projectionMatrix,
        viewMatrix: viewMatrix
      };

      requestAnimationFrame(function(timestamp) {
        render(gl, scene, timestamp, 0);
      });
    }

    function loadMeshData(string) {
      var lines = string.split("\n");
      var positions = [];
      var normals = [];
      var vertices = [];

      for ( var i = 0 ; i < lines.length ; i++ ) {
        var parts = lines[i].trimRight().split(' ');
        if ( parts.length > 0 ) {
          switch(parts[0]) {
            case 'v':  positions.push(
              vec3.fromValues(
                parseFloat(parts[1]),
                parseFloat(parts[2]),
                parseFloat(parts[3])
              ));
              break;
            case 'vn':
              normals.push(
                vec3.fromValues(
                  parseFloat(parts[1]),
                  parseFloat(parts[2]),
                  parseFloat(parts[3])));
              break;
            case 'f': {
              var f1 = parts[1].split('/');
              var f2 = parts[2].split('/');
              var f3 = parts[3].split('/');
              Array.prototype.push.apply(
                vertices, positions[parseInt(f1[0]) - 1]);
              Array.prototype.push.apply(
                vertices, normals[parseInt(f1[2]) - 1]);
              Array.prototype.push.apply(
                vertices, positions[parseInt(f2[0]) - 1]);
              Array.prototype.push.apply(
                vertices, normals[parseInt(f2[2]) - 1]);
              Array.prototype.push.apply(
                vertices, positions[parseInt(f3[0]) - 1]);
              Array.prototype.push.apply(
                vertices, normals[parseInt(f3[2]) - 1]);
              break;
            }
          }
        }
      }
      console.log(
        "Loaded mesh with " + (vertices.length / 6) + " vertices");
      return {
        primitiveType: 'TRIANGLES',
        vertices: new Float32Array(vertices),
        vertexCount: vertices.length / 6,
        material: {ambient: 0.2, diffuse: 0.5, shininess: 10.0}
      };
    }

    function loadMesh(filename) {
      $.ajax({
        url: filename,
        dataType: 'text'
      }).done(function(data) {
        init(loadMeshData(data));
      }).fail(function() {
        alert('Faild to retrieve [' + filename + "]");
      });
    }

    $(document).ready(function() {
      loadMesh('monkey.obj')
    });

    </script>
  </head>
  <body>
    <canvas id="rendering-surface" height="500" width="500"></canvas>
    <form>
      <div>
        <label for="light-x">Light X<input type="range" name="light-x" id="light-x" min="0" max="100"></label>
        <label for="light-y">Light Y<input type="range" name="light-y" id="light-y" min="0" max="100"></label>
        <label for="light-z">Light Z<input type="range" name="light-z" id="light-z" min="0" max="100"></label>
      </div>
      <div>
        <label for="rotate-x">Rotate X<input type="range" name="rotate-x" id="rotate-x" min="0" max="10" value="5"></label>
        <label for="rotate-y">Rotate Y<input type="range" name="rotate-y" id="rotate-y" min="0" max="10" value="5"></label>
        <label for="rotate-z">Rotate Z<input type="range" name="rotate-z" id="rotate-z" min="0" max="10" value="5"></label>
      </div>
    </form>
  </body>
</html>

What’s New

Aside from the slightly more complex initialisation process, and the move from an HTML onLoad to jQuery’s $(document).ready, what you might notice is that there’s much less code than last time. Since the mesh data is being imported, nothing is being generated or calculated in-code. Note how little impact this has had on the rest of the code: in particular, the shaders are unchanged (but I am, however, including the specular lighting component that I mentioned last post, because it makes the model look much better). The init function is no longer responsible for generating the object mesh, so it’s now explicitly passed into it.

The Really Good News

Now that there’s code to load the model from an OBJ file, you can load anything that you can convert into that format from Blender. You can download models from sites on the web, as long as Blender can import it, and put it into a format that can be loaded into the browser. Hand-crafted cubes and pyramids are a thing of the past.

A Note on Inconsistent Browser Security

Different browsers have different ideas about when it’s acceptable to load resources from the filesystem. Firefox and Safari, for example, have no problems pulling the OBJ resource directly from the disk via AJAX if the HTML file itself was loaded from the filesystem. This means that your OBJ file can be in the same directory as your HTML file, and everything will work fine.

Chrome has other ideas, and you’ll get the ‘Failed to retrieve [monkey.obj]’ message in the .fail handler to the AJAX call. The only way to get around this is to actually serve the content from a web server. If you have Python available, a call to python -m SimpleHTTPServer in the hosting directory will give you something useable.

WebGL from Scratch: Directional Lighting

In this post, we’re going to go from the spinning cube defined in the last one to one where the faces are lit from a specific direction. This means that geometry facing that light will be rendered more brightly, and geometry facing away will be darker, something like this:

lit-cube

But there’s a catch:

WebGL knows nothing about lighting.

What it does know about is what you tell it: where a point is in space and where a light source is. It also provides the functions that you need to do the necessary calculations to figure out—according to your lighting model of choice—what the colour of a given pixel should be.

That’s horribly vague, isn’t it? Briefly, accurately modelling light is hideously expensive. You’d have to account for the path of each individual photon from source to your eye, accounting for any materials that it bounces off or refracts through. So WebGL puts the tools in your hands with which to approximate lighting according to whichever model you want. Having that amount of choice can be paralysing, though. Where should you start? Well, a common model is to account for ambient, diffuse and specular effects, and to multiply those with ambient, diffuse and specular coefficients for the materials that you decide your objects are to be made of. The specular calculation also accounts for the ‘shininess’ of a material. I’ll ignore the specular component for just now: it’s possible to get something visible using just the ambient and diffuse components. The specular contribution can always be calculated later.

But first, the ambient component. Ambient light is the approximating term that WebGL uses for light that has no definitive source. In reality, all lights have a source, but in some cases the light is scattered so much that trying to model it accurately is, for all intents and purposes, impossible. Yes, that means that ambient light is a hack, but it’s good enough. It’s just a colour value that’s added to whatever colour you specified for a given vertex. Given that it’s global to a scene, it can be effectively modeled as a uniform vec3, and can be set once and forgotten.

Second, the diffuse component. This is light that’s scattered equally in all directions by a given surface, but which comes from a definite direction. The sun is a good example.

Since directional light has, by definition, a direction, its contribution to a given fragment is dependent upon the angle of that fragment against that direction, combined with the direction that you’re viewing the fragment from. This involves some math. We can place the code for this in either the vertex or the fragment shader. If we do it in the vertex shader, the value calculated at each vertex by the vertex shader will be interpolated—not recalculated—for each fragment. This makes it a) cheap, and; b) less accurate. Conversely, doing it in the fragment shader will increase both the computational cost and accuracy of the effect. Which one you pick is up to you, based on the complexity of your scene, the capabilities of the hardware that will be rendering it, whether you’re optimising for performance or power, etc. First, I’ll be trying implementing the lighting model in the vertex shader.

Remember that, as with previous posts, you’re going to need to need the gl-matrix library.

Code first, explanation later.

<!doctype html>
<html>
  <head>
    <title>Hacking WebGL</title>
    <script type="x-shader/x-vertex" id="vertex-shader">
    precision mediump float;

    attribute vec3 pos;
    attribute vec3 normal;

    varying vec3 col;

    uniform mat4 projectionMatrix, viewMatrix, modelMatrix;
    uniform mat3 normalMatrix;
    uniform float time;
    uniform vec3 directionalLight;
    uniform vec3 ambientLightColour;
    uniform float materialAmbient;
    uniform float materialDiffuse;

    /* 
     * A function to determine the colour of a vertex, accounting
     * for ambient and directional light
     */
    vec3 ad( vec4 position, vec3 norm )
    {
      vec3 s = normalize(vec3(vec4(directionalLight,1.0) - position));
      vec3 r = reflect(-s, norm);
      return ambientLightColour + materialDiffuse * max(dot(s,norm), 0.0);
    }

    void main() {
      mat4 mvMatrix = viewMatrix * modelMatrix;
      vec3 eyeNormal = normalize(normalMatrix * normal);
      vec4 eyePosition = mvMatrix * vec4(pos, 1.0);
      col = min(ad(eyePosition, eyeNormal), 1.0);
      gl_Position = projectionMatrix * mvMatrix * vec4(pos, 1.0);       
    }
    </script>
    <script type="x-shader/x-fragment" id="fragment-shader">
    precision mediump float;

    uniform vec3 ambientLightColour;

    varying vec3 col;
    void main() {
      gl_FragColor = vec4(col, 1.0);
    }
    </script>
    <script type="text/javascript" src="gl-matrix.js"></script>
    <script type="text/javascript">

    function render(gl,scene,timestamp,previousTimestamp) {

      gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
      gl.useProgram(scene.program);

      var light = vec3.fromValues(
        (document.getElementById('light-x').value - 50.0) / 10.0,
        (document.getElementById('light-y').value - 50.0) / 10.0, 
        (document.getElementById('light-z').value - 50.0) / 10.0);

      gl.uniform3fv(scene.program.directionalLightUniform, light);

      gl.uniform1f(scene.program.timeUniform, timestamp);

      var delta = (0.125 * Math.PI) / (timestamp - previousTimestamp);

      var rotateX = (document.getElementById('rotate-x').value - 5) / 10;
      var rotateY = (document.getElementById('rotate-y').value - 5) / 10;
      var rotateZ = (document.getElementById('rotate-z').value - 5) / 10;

      mat4.rotate(
        scene.object.modelMatrix, scene.object.modelMatrix,
        delta, [rotateX, rotateY, rotateZ]);
      gl.uniformMatrix4fv(
        scene.program.modelMatrixUniform, gl.FALSE, scene.object.modelMatrix
      );

      var normalMatrix = mat3.create();
      mat3.normalFromMat4(
        normalMatrix, mat4.multiply(
          mat4.create(), scene.object.modelMatrix, scene.viewMatrix
      ));
      gl.uniformMatrix3fv(
        scene.program.normalMatrixUniform, gl.FALSE, normalMatrix);

      gl.bindBuffer(gl.ARRAY_BUFFER, scene.object.vertexBuffer);
      gl.drawArrays(
        scene.object.primitiveType, 0, scene.object.vertexCount
      );

      gl.bindBuffer(gl.ARRAY_BUFFER, null);
      gl.useProgram(null);
      requestAnimationFrame(function(time) {
        render(gl,scene,time,timestamp);
      });
    }

    function createProgram(gl, shaderSpecs) {
      var program = gl.createProgram();
      for ( var i = 0 ; i < shaderSpecs.length ; i++ ) {
        var spec = shaderSpecs[i];
        var shader = gl.createShader(spec.type);
        gl.shaderSource(
          shader, document.getElementById(spec.container).text
        );
        gl.compileShader(shader);
        if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
          throw gl.getShaderInfoLog(shader);
        }
        gl.attachShader(program, shader);
        gl.deleteShader(shader);
      }
      gl.linkProgram(program);
      if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
        throw gl.getProgramInfoLog(program);
      }
      return program;
    }
    
    function init() {
      var surface = document.getElementById('rendering-surface');
      var gl = surface.getContext('experimental-webgl');
      gl.viewport(0,0,surface.width,surface.height);
      gl.enable(gl.DEPTH_TEST);
      gl.enable(gl.CULL_FACE);
      gl.cullFace(gl.BACK);
      gl.clearColor(0.0, 0.0, 0.0, 0.0);

      var program = createProgram(
        gl,
        [{container: 'vertex-shader', type: gl.VERTEX_SHADER},
         {container: 'fragment-shader', type: gl.FRAGMENT_SHADER}]
      );

      var squareVertices = [
        /* front face */
          /*  position */     /* normal */
        +0.75, +0.75, +0.75, 0.0, 0.0, 0.0, /* front-top-right */
        -0.75, +0.75, +0.75, 0.0, 0.0, 0.0, /* front-top-left  */
        +0.75, -0.75, +0.75, 0.0, 0.0, 0.0, /* front-bottom-right */
        -0.75, -0.75, +0.75, 0.0, 0.0, 0.0, /* front-bottom-left */
        +0.75, -0.75, +0.75, 0.0, 0.0, 0.0, /* front-bottom-right */
        -0.75, +0.75, +0.75, 0.0, 0.0, 0.0, /* front-top-left */

        /* right face */
        +0.75, +0.75, -0.75, 0.0, 0.0, 0.0, /* rear-top-right */
        +0.75, +0.75, +0.75, 0.0, 0.0, 0.0, /* front-top-right */
        +0.75, -0.75, -0.75, 0.0, 0.0, 0.0, /* rear-bottom-right */
        +0.75, -0.75, +0.75, 0.0, 0.0, 0.0, /* front-bottom-right */
        +0.75, -0.75, -0.75, 0.0, 0.0, 0.0, /* rear-bottom-right */
        +0.75, +0.75, +0.75, 0.0, 0.0, 0.0, /* front-top-right */

        /* back face */
        -0.75, +0.75, -0.75, 0.0, 0.0, 0.0, /* rear-top-left */
        +0.75, +0.75, -0.75, 0.0, 0.0, 0.0, /* rear-top-right */
        -0.75, -0.75, -0.75, 0.0, 0.0, 0.0, /* rear-bottom-left */
        +0.75, -0.75, -0.75, 0.0, 0.0, 0.0, /* rear-bottom-right */
        -0.75, -0.75, -0.75, 0.0, 0.0, 0.0, /* rear-bottom-left */
        +0.75, +0.75, -0.75, 0.0, 0.0, 0.0, /* rear-top-right */

        /* left face */
        -0.75, +0.75, +0.75, 0.0, 0.0, 0.0, /* front-top-left */
        -0.75, +0.75, -0.75, 0.0, 0.0, 0.0, /* rear-top-left */
        -0.75, -0.75, +0.75, 0.0, 0.0, 0.0, /* front-bottom-left */
        -0.75, -0.75, -0.75, 0.0, 0.0, 0.0, /* rear-bottom-left */
        -0.75, -0.75, +0.75, 0.0, 0.0, 0.0, /* front-bottom-left */
        -0.75, +0.75, -0.75, 0.0, 0.0, 0.0, /* rear-top-left */

        /* top face */
        +0.75, +0.75, -0.75, 0.0, 0.0, 0.0, /* rear-top-right */
        -0.75, +0.75, -0.75, 0.0, 0.0, 0.0, /* rear-top-left */
        +0.75, +0.75, +0.75, 0.0, 0.0, 0.0, /* front-top-right */
        -0.75, +0.75, +0.75, 0.0, 0.0, 0.0, /* front-top-left */
        +0.75, +0.75, +0.75, 0.0, 0.0, 0.0, /* front-top-right */
        -0.75, +0.75, -0.75, 0.0, 0.0, 0.0, /* rear-top-left */

        /* bottom face */
        +0.75, -0.75, +0.75, 0.0, 0.0, 0.0, /* front-bottom-right */
        -0.75, -0.75, +0.75, 0.0, 0.0, 0.0, /* front-bottom-left */
        +0.75, -0.75, -0.75, 0.0, 0.0, 0.0, /* rear-bottom-right */        
        -0.75, -0.75, -0.75, 0.0, 0.0, 0.0, /* rear-bottom-left */
        +0.75, -0.75, -0.75, 0.0, 0.0, 0.0, /* rear-bottom-right */
        -0.75, -0.75, +0.75, 0.0, 0.0, 0.0  /* front-bottom-left */
      ];

      var fpv = 6; // 9 floats per vertex
      for ( var i = 0 ; i < 6 ; i++ ) {  // tackle each of 6 faces
        var offset = i * fpv * 6;        // offset to a face 'block'
        var normal = vec3.create();      // temp vertex normal
        var cross = vec3.create();       // temp cross product
        var right = vec3.create();       // temp right-side vector
        var left  = vec3.create();       // temp left-side vector

        // Despite the intimidating looking code, calculating
        // the normal is pretty simple:
        //   1. find the vector from one vertex to its neighbour by
        //      subtracting the vertex from that neighbour
        //   2. repeat for a second vertex.
        //   3. get the normal vector by taking the cross-product of
        //      those vectors, ensuring that you're always choosing
        //      neighbours in the same order---left/right or
        //      right/left---at each vertex.  If you mix them up, the
        //      normal vector will be flipped
        //   4. normalise the normal vector (unfortunate terminology:
        //      they have nothing to do with each other)

        vec3.normalize(
          normal,
          vec3.cross(
            cross,
            vec3.subtract(
              right,
              vec3.fromValues(
                squareVertices[offset+fpv*1+0],
                squareVertices[offset+fpv*1+1],
                squareVertices[offset+fpv*1+2]),
              vec3.fromValues(
                squareVertices[offset+fpv*0+0],
                squareVertices[offset+fpv*0+1],
                squareVertices[offset+fpv*0+2])),
            vec3.subtract(
              left,
              vec3.fromValues(
                squareVertices[offset+fpv*2+0],
                squareVertices[offset+fpv*2+1],
                squareVertices[offset+fpv*2+2]),
              vec3.fromValues(
                squareVertices[offset+fpv*0+0],
                squareVertices[offset+fpv*0+1],
                squareVertices[offset+fpv*0+2]))));

        // Write the calculated normal vector into its
        // reserved place in the vertex data array
        squareVertices[offset + fpv * 0 + 3] = normal[0];
        squareVertices[offset + fpv * 0 + 4] = normal[1];
        squareVertices[offset + fpv * 0 + 5] = normal[2];

        vec3.normalize(
          normal,
          vec3.cross(
            cross,
            vec3.subtract(
              right,
              vec3.fromValues(
                squareVertices[offset+fpv*2+0],
                squareVertices[offset+fpv*2+1],
                squareVertices[offset+fpv*2+2]),
              vec3.fromValues(
                squareVertices[offset+fpv*1+0],
                squareVertices[offset+fpv*1+1],
                squareVertices[offset+fpv*1+2])),
            vec3.subtract(
              left,
              vec3.fromValues(
                squareVertices[offset+fpv*0+0],
                squareVertices[offset+fpv*0+1],
                squareVertices[offset+fpv*0+2]),
              vec3.fromValues(
                squareVertices[offset+fpv*1+0],
                squareVertices[offset+fpv*1+1],
                squareVertices[offset+fpv*1+2]))));

        squareVertices[offset + fpv * 1 + 3] = normal[0];
        squareVertices[offset + fpv * 1 + 4] = normal[1];
        squareVertices[offset + fpv * 1 + 5] = normal[2];

        vec3.normalize(
          normal,
          vec3.cross(
            cross,
            vec3.subtract(
              right,
              vec3.fromValues(
                squareVertices[offset+fpv*0+0],
                squareVertices[offset+fpv*0+1],
                squareVertices[offset+fpv*0+2]),
              vec3.fromValues(
                squareVertices[offset+fpv*2+0],
                squareVertices[offset+fpv*2+1],
                squareVertices[offset+fpv*2+2])),
            vec3.subtract(
              left,
              vec3.fromValues(
                squareVertices[offset+fpv*1+0],
                squareVertices[offset+fpv*1+1],
                squareVertices[offset+fpv*1+2]),
              vec3.fromValues(
                squareVertices[offset+fpv*2+0],
                squareVertices[offset+fpv*2+1],
                squareVertices[offset+fpv*2+2]))));

        squareVertices[offset + fpv * 2 + 3] = normal[0];
        squareVertices[offset + fpv * 2 + 4] = normal[1];
        squareVertices[offset + fpv * 2 + 5] = normal[2];

        vec3.normalize(
          normal,
          vec3.cross(
            cross,
            vec3.subtract(
              right,
              vec3.fromValues(
                squareVertices[offset+fpv*4+0],
                squareVertices[offset+fpv*4+1],
                squareVertices[offset+fpv*4+2]),
              vec3.fromValues(
                squareVertices[offset+fpv*3+0],
                squareVertices[offset+fpv*3+1],
                squareVertices[offset+fpv*3+2])),
            vec3.subtract(
              left,
              vec3.fromValues(
                squareVertices[offset+fpv*5+0],
                squareVertices[offset+fpv*5+1],
                squareVertices[offset+fpv*5+2]),
              vec3.fromValues(
                squareVertices[offset+fpv*3+0],
                squareVertices[offset+fpv*3+1],
                squareVertices[offset+fpv*3+2]))));

        squareVertices[offset + fpv * 3 + 3] = normal[0];
        squareVertices[offset + fpv * 3 + 4] = normal[1];
        squareVertices[offset + fpv * 3 + 5] = normal[2];

        vec3.normalize(
          normal,
          vec3.cross(
            cross,
            vec3.subtract(
              right,
              vec3.fromValues(
                squareVertices[offset+fpv*5+0],
                squareVertices[offset+fpv*5+1],
                squareVertices[offset+fpv*5+2]),
              vec3.fromValues(
                squareVertices[offset+fpv*4+0],
                squareVertices[offset+fpv*4+1],
                squareVertices[offset+fpv*4+2])),
            vec3.subtract(
              left,
              vec3.fromValues(
                squareVertices[offset+fpv*3+0],
                squareVertices[offset+fpv*3+1],
                squareVertices[offset+fpv*3+2]),
              vec3.fromValues(
                squareVertices[offset+fpv*4+0],
                squareVertices[offset+fpv*4+1],
                squareVertices[offset+fpv*4+2]))));

        squareVertices[offset + fpv * 4 + 3] = normal[0];
        squareVertices[offset + fpv * 4 + 4] = normal[1];
        squareVertices[offset + fpv * 4 + 5] = normal[2];

        vec3.normalize(
          normal,
          vec3.cross(
            cross,
            vec3.subtract(
              right,
              vec3.fromValues(
                squareVertices[offset+fpv*3+0],
                squareVertices[offset+fpv*3+1],
                squareVertices[offset+fpv*3+2]),
              vec3.fromValues(
                squareVertices[offset+fpv*5+0],
                squareVertices[offset+fpv*5+1],
                squareVertices[offset+fpv*5+2])),
            vec3.subtract(
              right,
              vec3.fromValues(
                squareVertices[offset+fpv*4+0],
                squareVertices[offset+fpv*4+1],
                squareVertices[offset+fpv*4+2]),
              vec3.fromValues(
                squareVertices[offset+fpv*5+0],
                squareVertices[offset+fpv*5+1],
                squareVertices[offset+fpv*5+2]))));

        squareVertices[offset + fpv * 5 + 3] = normal[0];
        squareVertices[offset + fpv * 5 + 4] = normal[1];
        squareVertices[offset + fpv * 5 + 5] = normal[2];
      }

      gl.useProgram(program);

      var square = {
        vertexCount: squareVertices.length / fpv,
        primitiveType: gl.TRIANGLES,
        vertices: squareVertices,
        material: { ambient: 0.1, diffuse: 0.3 }
      };

      var vertexBuffer = gl.createBuffer();
      gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);

      program.positionAttribute = gl.getAttribLocation(program, 'pos');
      gl.enableVertexAttribArray(program.positionAttribute);
      gl.vertexAttribPointer(
        program.positionAttribute, 3, gl.FLOAT, false,
        Float32Array.BYTES_PER_ELEMENT * 6, 0);
      program.normalAttribute = gl.getAttribLocation(program, 'normal');
      gl.enableVertexAttribArray(program.normalAttribute);
      gl.vertexAttribPointer(
        program.normalAttribute, 3, gl.FLOAT, false,
        Float32Array.BYTES_PER_ELEMENT * 6,
        Float32Array.BYTES_PER_ELEMENT * 3);

      gl.bufferData(
        gl.ARRAY_BUFFER, new Float32Array(squareVertices), gl.STATIC_DRAW
      );

      square.vertexBuffer = vertexBuffer;

      program.timeUniform = gl.getUniformLocation(program, 'time');

      // New properties: the ambient and directional light values,
      // as well as values to determine how the surface material reacts
      // to these lights.
      program.ambientLightColourUniform =
        gl.getUniformLocation(program, 'ambientLightColour');
      program.directionalLightUniform =
        gl.getUniformLocation(program, 'directionalLight');
      square.materialAmbientUniform =
        gl.getUniformLocation(program, 'materialAmbient');
      square.materialDiffuseUniform =
        gl.getUniformLocation(program, 'materialDiffuse');

      // Also, a uniform to disable the vertex colour, so that
      // the effect of the light itself can be more easily observed.
      var ambientLightColour = vec3.fromValues(0.0, 0.0, 0.2);
      gl.uniform3fv(
        program.ambientLightColourUniform, ambientLightColour
      );
      var directionalLight = vec3.fromValues(-0.5,0.5,0.5);
      gl.uniform3fv(
        program.directionalLightUniform, directionalLight
      );

      gl.uniform1f(
        square.materialAmbientUniform, square.material.ambient
      );
      gl.uniform1f(
        square.materialDiffuseUniform, square.material.diffuse
      );

      var projectionMatrix = mat4.create();
      mat4.perspective(
        projectionMatrix, 0.75, surface.width/surface.height, 0.1, 100
      );
      program.projectionMatrixUniform = 
        gl.getUniformLocation(program, 'projectionMatrix');
      gl.uniformMatrix4fv(
        program.projectionMatrixUniform, gl.FALSE, projectionMatrix
      );

      var viewMatrix = mat4.create();
      program.viewMatrixUniform =
        gl.getUniformLocation(program, 'viewMatrix');
      gl.uniformMatrix4fv(
        program.viewMatrixUniform, gl.FALSE, viewMatrix
      );

      var modelMatrix = mat4.create();
      mat4.identity(modelMatrix);
      mat4.translate(
        modelMatrix, modelMatrix, [0, 0, -4]
      );
      program.modelMatrixUniform =
        gl.getUniformLocation(program, 'modelMatrix');
      gl.uniformMatrix4fv(
        program.modelMatrixUniform, gl.FALSE, modelMatrix
      );

      program.normalMatrixUniform =
        gl.getUniformLocation(program, 'normalMatrix');

      square.modelMatrix = modelMatrix;

      gl.bindBuffer(gl.ARRAY_BUFFER, null);
      gl.useProgram(null);

      var scene = {
        program: program,
        object: square,
        start: Date.now(),
        projectionMatrix: projectionMatrix,
        viewMatrix: viewMatrix
      };

      requestAnimationFrame(function(timestamp) {
        render(gl, scene, timestamp, 0);
      });
    }

    </script>
  </head>
  <body onLoad="init()">
    <canvas id="rendering-surface" height="500" width="500"></canvas>
    <form>
      <!-- Some new controls to feed values into the program uniforms -->
      <div>
        <label for="light-x">Light X
          <input type="range" name="light-x" id="light-x" min="0" max="100"/>
        </label>
        <label for="light-y">Light Y
          <input type="range" name="light-y" id="light-y" min="0" max="100"/>
        </label>
        <label for="light-z">Light Z
          <input type="range" name="light-z" id="light-z" min="0" max="100"/>
        </label>
      </div>
      <div>
        <label for="rotate-x">Rotate X
          <input type="range" name="rotate-x" id="rotate-x" min="0" max="10" value="5"/>
        </label>
        <label for="rotate-y">Rotate Y
          <input type="range" name="rotate-y" id="rotate-y" min="0" max="10" value="5"/>
        </label>
        <label for="rotate-z">Rotate Z
          <input type="range" name="rotate-z" id="rotate-z" min="0" max="10" value="5"/>
        </label>
      </div>
    </form>
  </body>
</html>

What’s New

The explicit colour values are gone from the vertices, as is their presence in the vertex shader and the binding code in the init function.

Instead, the vertices have floats that define the vertex normal vector. The normal vector is perpendicular to the vectors between the vertex position and the positions of the other two vertices of the triangle that it is part of. This is a handy thing to have when it comes to calculating how much a light source affects the value of a pixel: a light behind a face won’t affect it at all, whereas one in front it will to an extent dependent upon the angle that it strikes it. Notice that they’re all zeroes. This is because they’re just placeholders. Their values are actually calculated in a subsequent partially-unrolled loop. Note that there are two possible directions that the normal vector can point, so which of the two a given vector represents is dependent opon whether you calculated your normal by taking the cross product of the right-hand vector with the left-hand one, or vice versa. With the counter-clockwise winding scheme that we’re using, cross the right one with the left. To figure out which is right and which is left, pretend that you’re the normal vector you want, standing on the vertex. For example, in this scenario:

cross-product

the normal vector (in blue) is calculated by taking the cross product of v1 minus v0 and v2 minus v0.

Since the normal for a vertex is going to be passed over to the shader as an attribute, the mapping into the buffer is performed with the usual getAttribLocation/enableVertexAttribArray/vertexAttribPointer triplet.

There are additional uniforms: normalMatrix, a mat3, as well as vec3s defining the colour of the ambient light and the direction of the directional light, and floats defining how the object material interacts with those lights. An int allows the vertex colour to be selectively disabled.

The render() function now calculates the normal matrix—it needs to be recalculated after each change to either the model or view matrices that it’s derived from, and we’re adjusting the model matrix here—and uploads it to the GPU with a call to uniformMatrix3fv.

The vertex shader is significantly different, and now includes the function ad to calculate ambient and directional contributions to the intensity of a vertex. This is used from main to determine the final colour for a vertex.

Finally, there are some changes to the HTML to include range controls for moving the light and specifying the angle of rotation.

Wait, There’s More…

People who’ve been around the WebGL/OpenGL block before (you guys are still reading this?) might be surprised by my ad function: it’s called ads in almost every other demonstration, where the ‘s’ stands for ‘specular’. Well, I avoided accounting for the specular term because I personally think that it adds unhelpful complexity at the start. Whether or not your directional lighting model works can be quite easily determined just by having ambient and diffuse properties.

But now that they’re done, it’s not inappropriate to look at the specular component, and give our vertex shader function its full ads name.

The specular component captures a material’s ‘shininess’ or glossiness. You might have noticed with the diffuse light that, although you could easily see which faces of the cube were facing the light versus those which weren’t, the surface material was a bit matte. Even looking at it head-on, there was no real reflection of a light source. While this is fine for some surfaces (e.g. a brick wall), it doesn’t work for others (e.g. a polished wooden tabletop). So we add a specular term to the light source, and a shininess term to the object, that allow for the calculation of highlights on the surface, providing two new uniform float variables—materialSpecular and shininess—and an ads function that looks like this:

    vec3 ads( vec4 position, vec3 norm )
    {
      vec3 s = normalize(vec3(vec4(directionalLight,1.0) - position));
      vec3 v = normalize(vec3(-position));
      vec3 r = reflect(-s, norm);
      return ambientLightColour +
        materialDiffuse * max(dot(s,norm), 0.0) +
        materialSpecular * pow(max(dot(r,v), 0.0), shininess);
    }

If you use this with the current model, you might be a little disappointed: no specular highlights appear! This is because I’m calculating the effects of lighting at vertices: they’re being interpolated across the face, and any focussed highlights are being smoothed out as a result. There are two options here: 1) move the lighting calculations into the fragment shader, at which point you’ll get perfect, per-pixel valuation of the lighing model; or 2) subdivide the face a few more times so that some vertices fall on/near highlights.

This post has rambled on for quite long enough, though, so I’ll leave these approaches for another time.

What’s Next

Looking at this code, you’d be forgiven for thinking that complex models are going to be a pain to specify. All those manually-entered floats into an array, and all that calculation of normal vectors. After all, a spinning cube with some lighting took everything above to get going!

Fear not. This is not how complex models are built. Instead, specialised programs are used to interactively build models and export them to files in one of wide array of formats. These models can then be imported—with varying degrees of difficulty—to appear within our programs.

Since that’s more exciting, I’m going to look at generating a 3D mesh using the free Blender 3D modelling tool in the next post. I’ll load the exported .obj file with an AJAX call, and use it to build the buffer that WebGL will render. The model will interact with our ambient and directional lights, but there will be one important difference: we’ll stop playing with the 36 vertices that define our square and jump to a few tens of thousands.

If anyone’s wondering about where textures have gone, I’ll get to those after I’ve done some fun stuff.

WebGL from Scratch: Going 3D, part 2

That last post definitely gave us a sense of depth, but the object itself remained two-dimensional. Let’s fix that. If you’ve skipped ahead, remember that we’re now using the gl-matrix.js library introduced in the last post so that we end up with something like this:

webgl.3d.fixed

<!doctype html>
<html>
  <head>
    <title>Hacking WebGL</title>
    <script type="x-shader/x-vertex" id="vertex-shader">
    precision mediump float;

    attribute vec3 pos;
    attribute vec3 colour;
    varying vec3 col;

    uniform mat4 projectionMatrix, viewMatrix, modelMatrix;
    uniform float time;

    void main() {
      col = colour;
      float y = pos.y + sin(time * 3.141592653589793 / 1800.0) / 4.0;
      gl_Position = projectionMatrix * viewMatrix * modelMatrix * 
        vec4(pos.x, y, pos.z, 1.0);
    }
    </script>
    <script type="x-shader/x-fragment" id="fragment-shader">
    precision mediump float;
    varying vec3 col;
    void main() {
      gl_FragColor = vec4(col, 1.0);
    }
    </script>
    <script type="text/javascript" src="gl-matrix.js"></script>
    <script type="text/javascript">

    function render(gl,scene,timestamp,previousTimestamp) {

      gl.clear(gl.COLOR_BUFFER_BIT);
      gl.useProgram(scene.program);

      gl.uniform1f(scene.program.timeUniform, timestamp);

      var delta = (0.5 * Math.PI) / (timestamp - previousTimestamp);
      mat4.rotate(
        scene.object.modelMatrix, scene.object.modelMatrix,
        delta, [0,1,0]
      );
      gl.uniformMatrix4fv(
        scene.program.modelMatrixUniform, gl.FALSE,
        scene.object.modelMatrix
      );

      gl.bindBuffer(gl.ARRAY_BUFFER, scene.object.vertexBuffer);
      gl.drawArrays(
        scene.object.primitiveType, 0, scene.object.vertexCount
      );

      gl.bindBuffer(gl.ARRAY_BUFFER, null);
      gl.useProgram(null);
      requestAnimationFrame(function(time) {
        render(gl,scene,time,timestamp);
      });
    }

    function createProgram(gl, shaderSpecs) {
      var program = gl.createProgram();
      for ( var i = 0 ; i < shaderSpecs.length ; i++ ) {
        var spec = shaderSpecs[i];
        var shader = gl.createShader(spec.type);
        gl.shaderSource(
          shader,
          document.getElementById(spec.container).text
        );
        gl.compileShader(shader);
        if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
          throw gl.getShaderInfoLog(shader);
        }
        gl.attachShader(program, shader);
        gl.deleteShader(shader);
      }
      gl.linkProgram(program);
      if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
        throw gl.getProgramInfoLog(program);
      }
      return program;
    }
    
    function init() {
      var surface = document.getElementById('rendering-surface');
      var gl = surface.getContext('experimental-webgl');
      gl.viewport(0,0,surface.width,surface.height);
      gl.clearColor(1.0, 0.0, 0.0, 1.0);

      var program = createProgram(
        gl,
        [{container: 'vertex-shader', type: gl.VERTEX_SHADER},
         {container: 'fragment-shader', type: gl.FRAGMENT_SHADER}]
      );

      var squareVertices = [
        -0.75, +0.75, +0.75,    +0.0, +1.0, +0.0, /* front-top-left */
        -0.75, -0.75, +0.75,    +1.0, +0.0, +1.0, /* front-bottom-left */
        +0.75, +0.75, +0.75,    +0.75,+0.25,+0.5, /* front-top-right */
        +0.75, -0.75, +0.75,    +0.5, +0.25,+0.0, /* front-bottom-right */
        +0.75, +0.75, -0.75,    +0.25,+0.75,+1.0, /* rear-top-right */
        +0.75, -0.75, -0.75,    +1.0, +1.0, +0.0, /* rear-bottom-right */
        -0.75, +0.75, -0.75,    +0.0, +0.0, +1.0, /* rear-top-left */
        -0.75, -0.75, -0.75,    +0.0, +1.0, +0.0, /* rear-bottom-left */
        -0.75, +0.75, +0.75,    +0.0, +1.0, +0.0, /* front-top-left */
        -0.75, -0.75, +0.75,    +1.0, +0.0, +1.0  /* front-bottom-left */
      ];
             
      gl.useProgram(program);

      var square = {
        vertexCount: 10,
        primitiveType: gl.TRIANGLE_STRIP,
        vertices: squareVertices
      };

      var vertexBuffer = gl.createBuffer();
      gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);

      program.positionAttribute = gl.getAttribLocation(program, 'pos');
      gl.enableVertexAttribArray(program.positionAttribute);
      gl.vertexAttribPointer(
        program.positionAttribute, 3, gl.FLOAT, false,
        Float32Array.BYTES_PER_ELEMENT * 6,
        0
      );
      program.colourAttribute = gl.getAttribLocation(program, 'colour');
      gl.enableVertexAttribArray(program.colourAttribute);
      gl.vertexAttribPointer(
        program.colourAttribute, 3, gl.FLOAT, false,
        Float32Array.BYTES_PER_ELEMENT * 6,
        Float32Array.BYTES_PER_ELEMENT * 3
      );

      gl.bufferData(
        gl.ARRAY_BUFFER, new Float32Array(squareVertices),
        gl.STATIC_DRAW
      );

      square.vertexBuffer = vertexBuffer;

      program.timeUniform = gl.getUniformLocation(program, 'time');

      var projectionMatrix = mat4.create();
      mat4.perspective(
        projectionMatrix, 0.75, surface.width/surface.height, 0.1, 100
      );
      program.projectionMatrixUniform = gl.getUniformLocation(
        program, 'projectionMatrix'
      );
      gl.uniformMatrix4fv(
        program.projectionMatrixUniform, gl.FALSE, projectionMatrix
      );

      var viewMatrix = mat4.create();
      program.viewMatrixUniform = gl.getUniformLocation(
        program, 'viewMatrix'
      );
      gl.uniformMatrix4fv(
        program.viewMatrixUniform, gl.FALSE, viewMatrix
      );

      var modelMatrix = mat4.create();
      mat4.identity(modelMatrix);
      mat4.translate(modelMatrix, modelMatrix, [0, 0, -5]);
      program.modelMatrixUniform = gl.getUniformLocation(
        program, 'modelMatrix'
      );
      gl.uniformMatrix4fv(
        program.modelMatrixUniform, gl.FALSE, modelMatrix
      );

      square.modelMatrix = modelMatrix;

      gl.bindBuffer(gl.ARRAY_BUFFER, null);
      gl.useProgram(null);

      var scene = {
        program: program,
        object: square,
        start: Date.now(),
        projectionMatrix: projectionMatrix,
        viewMatrix: viewMatrix
      };

      requestAnimationFrame(function(timestamp) {
        render(gl, scene, timestamp, 0);
      });
    }
    </script>
  </head>
  <body onLoad="init()">
    <canvas id="rendering-surface" height="500" width="500"/>
  </body>
</html>

What’s New

There are a lot more vertices. The triangle strip that previously defined a square is now defining four squares in 3-dimensional space. That third coordinate is now being explicitly defined, rather than left for the vertex shader to fill in a default for. That means the pos attribute in the vertex shader has changed from a vec2 to a vec3, and the z-coordinate is included in setting gl_Position.

Since 3 floats are now being passed to the pos attribute, the call to vertexAttribPointer has changed, as has the stride for both pos and colour: everything is still being packed into a single array, so squeezing something new in has ripple effects on other attribute mappings.

But… That Cube Doesn’t Look Right

If you’ve run this, you might be wondering what’s going on. Something that’s cube-like is definitely spinning, but it’s probably not quite what you expected, and that’s because, by default, WebGL doesn’t know about the concept of depth. Things appear on screen in the order that they’re drawn, not the z-order that you’d intuitively expect. Tracking depth values costs something, so WebGL has it disabled by default. To enable it, pass the DEPTH_TEST flag to the enable function of the context. The init function is a good place for that just now:

	gl.enable(gl.DEPTH_TEST);

With this flag enabled, the depth buffer keeps track of the depth of a fragment written during a draw call. If it is asked to overwrite a fragment, it only does so if the new fragment is closer to the viewpoint than the old one. As always, it can get a little more complicated than this—especially when dealing with translucency—but that’s a subject for a future post.

Although not required for this example, it’s good practice to clear the depth buffer as well as the colour buffer when starting out drawing a frame. Thankfully, the various buffer specifiers can be bitwise-OR’d together and passed along in one call, so the current gl.clear(gl.COLOR_BUFFER_BIT), can be replaced with gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT). At best, the underlying hardware will be smart enough to clear them both at the same time. At worst, it’ll do them sequentially, but your code will be the same regardless.

You Cheated!

Yes, I did. That’s not really a cube: the top and bottom faces are missing. Exercise for the reader?

An Aside on WebGL’s Investment/Reward Curve

If you’ve been tracking these posts from the start, you might have initially been horrified by the amount of code that it took to go from a simple background clear to having a 2-dimensional square within it. If you were using the 2D canvas API, you could have had that square up with a fraction of the effort that WebGL required.

But things have gotten easier since then. You might have noticed that adding colour didn’t require significant additional effort. Neither did simple animation, or making the world 3-dimensional, or making an object within the world 3-dimensional. Can you imagine writing the code required to do all that by hand? Now it’s not only easy, it’s performant, making your normally-idle GPU stand up and do what it was designed for.

That’s the good news: new concepts map to new code much more simply. The bad news—and it’s only bad if you’re trying to ‘master’ WebGL—is that there are a ton of additional concepts: textures, lighting, shadows and environment mapping are just a few. On top of that, each of those can be tackled in a myriad of different ways, each carrying complexity/performance trade-offs.

Graphics programming is a rabbit hole with no bottom, and for a certain class of person, that’s fun.

Next Up: Adding a Directional Light

WebGL from Scratch: Going 3D, Part 1

In this post, I’m going go briefly touch on the third dimension. Our square will remain two dimensional, but it will spin around the Y axis and be rendered in such a way that there will be a definite sense of depth. But, as much as I might try, I can’t avoid it anymore. I have to: 1) bring in some matrices; 2) use an external JavaScript library to hide the details.

What Does High-School Mathematics Have To Do With Graphics?

Working with a 3D scene involves thinking about it as though it were in three dimensions right up until the point where it can’t be denied anymore: when you’re in shaders, projecting 3D points onto the 2D surface that is your screen.

There are three matrices that graphics programmers regularly deal with, commonly called model, view and projection matrices. These terms are horribly non-descriptive. What does it mean to be the ‘model’ matrix? I find it easiest to remember it this way:

1. The model matrix orients the object/model in the 3D world. It rotates it, moves it, and scales it.
2. The view matrix orients the world around the camera (the point from which your virtual presence in the 3D world is viewing the scene). It does not orient the camera in the world. In the domain of 3D graphics, the world really does revolve around you (yes, even in a AAA FPS game)
3. The projection matrix takes what up until now has been our 3D world and splats it onto the 2D screen.

I will not go into details about how this works: there are innumerable texts out there describing just that. All you need to know is which matrix to manipulate when you want the desired effect, and that the functions defined in the superb gl-matrix library do the required lifting. That’s the library that I mentioned earlier: you’re going to need to download gl-matrix.js and place it somewhere accessible to your page. I’ll be placing it alongside the HTML file for just now; for what’s happened so far, and what’s coming up soon, we still don’t need an actual server.

As a starting point, we’re going to take the coloured square that we’ve been working with so far, and rotate it around the Y axis while it’s bouncing up and down. Here’s the code:

<!doctype html>
<html>
  <head>
    <title>Hacking WebGL</title>
    <script type="x-shader/x-vertex" id="vertex-shader">
    precision mediump float;

    attribute vec2 pos;
    attribute vec3 colour;
    varying vec3 col;

    uniform mat4 projectionMatrix, viewMatrix, modelMatrix;
    uniform float time;

    void main() {
      col = colour;
      float y = pos.y + sin(time * 3.141592653589793 / 1800.0) / 4.0;
      gl_Position = projectionMatrix * viewMatrix * modelMatrix *
        vec4(pos.x, y, 0.0, 1.0);
    }
    </script>
    <script type="x-shader/x-fragment" id="fragment-shader">
    precision mediump float;
    varying vec3 col;
    void main() {
      gl_FragColor = vec4(col, 1.0);
    }
    </script>
    <script type="text/javascript" src="gl-matrix.js"></script>
    <script type="text/javascript">

    function render(gl,scene,timestamp,previousTimestamp) {

      gl.clear(gl.COLOR_BUFFER_BIT);
      gl.useProgram(scene.program);

      gl.uniform1f(scene.program.timeUniform, timestamp);

      var delta = (0.5 * Math.PI) / (timestamp - previousTimestamp);
      mat4.rotate(
        scene.object.modelMatrix,
        scene.object.modelMatrix,
        delta, [0,1,0]);
      gl.uniformMatrix4fv(
        scene.program.modelMatrixUniform, gl.FALSE,
        scene.object.modelMatrix);

      gl.bindBuffer(gl.ARRAY_BUFFER, scene.object.vertexBuffer);
      gl.drawArrays(
        scene.object.primitiveType, 0,
        scene.object.vertexCount);

      gl.bindBuffer(gl.ARRAY_BUFFER, null);
      gl.useProgram(null);
      requestAnimationFrame(function(time) {
        render(gl,scene,time,timestamp);
      });
    }

    function createProgram(gl, shaderSpecs) {
      var program = gl.createProgram();
      for ( var i = 0 ; i < shaderSpecs.length ; i++ ) {
        var spec = shaderSpecs[i];
        var shader = gl.createShader(spec.type);
        gl.shaderSource(shader, document.getElementById(spec.container).text);
        gl.compileShader(shader);
        if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
          throw gl.getShaderInfoLog(shader);
        }
        gl.attachShader(program, shader);
        gl.deleteShader(shader);
      }
      gl.linkProgram(program);
      if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
        throw gl.getProgramInfoLog(program);
      }
      return program;
    }
    
    function init() {
      var surface = document.getElementById('rendering-surface');
      var gl = surface.getContext('experimental-webgl');
      gl.viewport(0,0,surface.width,surface.height);
      gl.clearColor(1.0, 0.0, 0.0, 1.0);

      var program = createProgram(
        gl,
        [{container: 'vertex-shader', type: gl.VERTEX_SHADER},
         {container: 'fragment-shader', type: gl.FRAGMENT_SHADER}]
      );

      var squareVertices = [
        +0.75, +0.75, 0.0, +1.0, +0.0,
        -0.75, +0.75, 0.0, +1.0, +1.0,
        +0.75, -0.75, 0.0, +0.0, +1.0,
        -0.75, -0.75, 0.0, +0.5, +0.5
      ];
             
      gl.useProgram(program);

      var square = {
        vertexCount: 4,
        primitiveType: gl.TRIANGLE_STRIP,
        vertices: squareVertices
      };

      var vertexBuffer = gl.createBuffer();
      gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);

      program.positionAttribute = gl.getAttribLocation(program, 'pos');
      gl.enableVertexAttribArray(program.positionAttribute);
      gl.vertexAttribPointer(
        program.positionAttribute, 2, gl.FLOAT, false,
        Float32Array.BYTES_PER_ELEMENT * 5, 0);
      program.colourAttribute = gl.getAttribLocation(program, 'colour');
      gl.enableVertexAttribArray(program.colourAttribute);
      gl.vertexAttribPointer(
        program.colourAttribute, 3, gl.FLOAT, false,
        Float32Array.BYTES_PER_ELEMENT * 5,
        Float32Array.BYTES_PER_ELEMENT * 2);

      gl.bufferData(
        gl.ARRAY_BUFFER,
        new Float32Array(squareVertices),
        gl.STATIC_DRAW);

      square.vertexBuffer = vertexBuffer;

      program.timeUniform = gl.getUniformLocation(program, 'time');

      var projectionMatrix = mat4.create();
      mat4.perspective(
        projectionMatrix, 0.75, surface.width/surface.height, 0.1, 100
      );
      program.projectionMatrixUniform = gl.getUniformLocation(
        program, 'projectionMatrix'
      );
      gl.uniformMatrix4fv(
        program.projectionMatrixUniform, gl.FALSE, projectionMatrix
      );

      var viewMatrix = mat4.create();
      program.viewMatrixUniform = gl.getUniformLocation(program, 'viewMatrix');
      gl.uniformMatrix4fv(program.viewMatrixUniform, gl.FALSE, viewMatrix);

      var modelMatrix = mat4.create();
      mat4.identity(modelMatrix);
      mat4.translate(modelMatrix, modelMatrix, [0, 0, -5]);
      program.modelMatrixUniform = gl.getUniformLocation(
        program, 'modelMatrix'
      );
      gl.uniformMatrix4fv(
        program.modelMatrixUniform, gl.FALSE, modelMatrix
      );

      square.modelMatrix = modelMatrix;

      gl.bindBuffer(gl.ARRAY_BUFFER, null);
      gl.useProgram(null);

      var scene = {
        program: program,
        object: square,
        start: Date.now(),
        projectionMatrix: projectionMatrix,
        viewMatrix: viewMatrix
      };

      requestAnimationFrame(function(timestamp) {
        render(gl, scene, timestamp, 0);
      });
    }
    </script>
  </head>
  <body onLoad="init()">
    <canvas id="rendering-surface" height="500" width="500"/>
  </body>
</html>

What’s New

The <script> tag that pulls in gl-matrix.js.

The vertex shader now has three new uniforms of type mat4 to hold the model, view and projection matrices. Those matrices are created in the init function. Since the view (the movement of the world around the camera) and projection (mapping the 3D space onto the 2D screen) are pretty much static in this setup, I set them on the GLSL program immediately with a call to uniformMatrix4vf, and keep them as properties of the scene, just in case I ever need them later. Since I never update them, they’re never uploaded again.

The projection matrix is initialised with a call to mat4.perspective. The first argument is the matrix itself, which is where the values will be written. The second argument, 0.75, is the vertical field of view, in radians. The third, surface.width/surface.height is the aspect ratio. The last two values define the near and far bounds in the viewing frustum, which I’ll talk about later. Documentation for all of the functions gl-matrix.js provides can be found here.

The model matrix (manipulating the object’s place in the world) is going to change each frame, with a rotation. I assign that as a property of the object, not the scene, as its entirely possible—indeed, likely—that multiple objects in a scene would be subject to different model transforms.

The render function now rotates the model matrix slightly with a call to mat4.rotate, and uploads the modified matrix with another call to uniformMatrix4fv.

A Note on Time

This gets us a spinning square, but the hard-coding of the rotation angle in the render might have you frowning. What if it’s running on hardware that can only give us 30fps? 15? The rotation speed will be halved, then quartered. Yuck. What we want is to be able to rotate by an amount that we calculate based on the time taken since the last frame was drawn. This is where we use a useful feature of requestAnimationFrame. It gets an argument that we’ve not been using so far: a timestamp (in milliseconds) since the current browser tab was created.

Armed with that, the requestAnimationFrame call at the end of the init function becomes:

      requestAnimationFrame(function(timestamp) {
        render(gl, scene, timestamp, 0);
      });

while the render function is now:

    function render(gl,scene,timestamp,previousTimestamp) {

      gl.clear(gl.COLOR_BUFFER_BIT);
      gl.useProgram(scene.program);

      gl.uniform1f(scene.program.timeUniform, timestamp);

      var delta = (0.5 * Math.PI) / (timestamp - previousTimestamp);
      mat4.rotate(
        scene.object.modelMatrix,
        scene.object.modelMatrix,
        delta, [0,1,0]);
      gl.uniformMatrix4fv(
        scene.program.modelMatrixUniform, gl.FALSE,
        scene.object.modelMatrix);

      gl.bindBuffer(gl.ARRAY_BUFFER, scene.object.vertexBuffer);
      gl.drawArrays(
        scene.object.primitiveType, 0,
        scene.object.vertexCount);

      gl.bindBuffer(gl.ARRAY_BUFFER, null);
      gl.useProgram(null);
      requestAnimationFrame(function(time) {
        render(gl,scene,time,timestamp);
      });
    }

Note that delta is calculated from the timestamp and previousTimestamp variables, and the requestAnimationFrame call at the end rolls the stamps forward into the next call.

Very handy.

Next Up: The Spinning Cube

WebGL from Scratch: Touching on Animation

This is normally when we’d move into the third dimension, but I’m going have a quick post about animation instead. Why? Because it’s the last thing I’m going to be able to do without pulling in an external JavaScript file.

When we’re given the chance to draw during the browser’s rendering loop (via requestAnimationFrame), we have a chance to alter the model that we’re going to draw. But how? A simple approach would be to retain the vertex data for your meshes and update them with each call, bumping the values in the mesh and uploading the altered structure to the GPU for each frame. Let’s give this a whirl.

<!doctype html>
<html>
  <head>
    <title>Hacking WebGL</title>
    <script type="x-shader/x-vertex" id="vertex-shader">
    precision mediump float;
    attribute vec2 pos;
    attribute vec3 colour;
    varying vec3 col;
    uniform float time;
    void main() {
      col = colour;
      gl_Position = vec4(pos.xy, 0.0, 1.0);
    }
    </script>
    <script type="x-shader/x-fragment" id="fragment-shader">
    precision mediump float;
    varying vec3 col;
    void main() {
      gl_FragColor = vec4(col, 1.0);
    }
    </script>
    <script type="text/javascript">

    function render(gl,scene) {
      scene.time = parseFloat(Date.now() - scene.start);

      gl.clear(gl.COLOR_BUFFER_BIT);
      gl.useProgram(scene.program);

      gl.bindBuffer(gl.ARRAY_BUFFER, scene.object.vertexBuffer);
      gl.drawArrays(scene.object.primitiveType, 0, scene.object.vertexCount);

      update(gl, scene);

      gl.bindBuffer(gl.ARRAY_BUFFER, null);
      gl.useProgram(null);
      requestAnimationFrame(function() {
        render(gl,scene);
      });
    }

    function update(gl, scene) {
      var object = scene.object;
      var vertices = new Float32Array(object.vertices.slice(0));
      for ( var i = 1 ; i < vertices.length ; i += 5 ) {
          vertices[i] = object.vertices[i] + (Math.sin(scene.time * Math.PI / 1800.0) / 4.0);
      }
      gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
    }

    function createProgram(gl, shaderSpecs) {
      var program = gl.createProgram();
      for ( var i = 0 ; i < shaderSpecs.length ; i++ ) {
        var spec = shaderSpecs[i];
        var shader = gl.createShader(spec.type);
        gl.shaderSource(shader, document.getElementById(spec.container).text);
        gl.compileShader(shader);
        if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) throw gl.getShaderInfoLog(shader);
        gl.attachShader(program, shader);
        gl.deleteShader(shader);
      }
      gl.linkProgram(program);
      if (!gl.getProgramParameter(program, gl.LINK_STATUS)) throw gl.getProgramInfoLog(program);
      return program;
    }
    
    function init() {
      var surface = document.getElementById('rendering-surface');
      var gl = surface.getContext('experimental-webgl');
      gl.viewport(0,0,surface.width,surface.height);
      gl.clearColor(1.0, 0.0, 0.0, 1.0);

      var program = createProgram(
        gl,
        [{container: 'vertex-shader', type: gl.VERTEX_SHADER},
         {container: 'fragment-shader', type: gl.FRAGMENT_SHADER}]
      );

      var squareVertices = [
        +0.75, +0.75, 0.0, +1.0, +0.0,
        -0.75, +0.75, 0.0, +1.0, +1.0,
        +0.75, -0.75, 0.0, +0.0, +1.0,
        -0.75, -0.75, 0.0, +0.5, +0.5
      ];
             
      gl.useProgram(program);

      var square = {
        vertexCount: 4,
        primitiveType: gl.TRIANGLE_STRIP,
        vertices: squareVertices
      };

      var vertexBuffer = gl.createBuffer();
      gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);

      program.positionAttribute = gl.getAttribLocation(program, 'pos');
      gl.enableVertexAttribArray(program.positionAttribute);
      gl.vertexAttribPointer(program.positionAttribute, 2, gl.FLOAT, false, Float32Array.BYTES_PER_ELEMENT * 5, 0);
      program.colourAttribute = gl.getAttribLocation(program, 'colour');
      gl.enableVertexAttribArray(program.colourAttribute);
      gl.vertexAttribPointer(program.colourAttribute, 3, gl.FLOAT, false, Float32Array.BYTES_PER_ELEMENT * 5, Float32Array.BYTES_PER_ELEMENT * 2);

      gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(squareVertices), gl.STATIC_DRAW);

      gl.bindBuffer(gl.ARRAY_BUFFER, null);
      gl.useProgram(null);
      
      square.vertexBuffer = vertexBuffer;

      var scene = {
        program: program,
        object: square,
        start: Date.now()
      };

      requestAnimationFrame(function() {
        render(gl, scene);
      });
    }
    </script>
  </head>
  <body onLoad="init()">
    <canvas id="rendering-surface" height="500" width="500"/>
  </body>
</html>

With this, you’ll see the square from the previous example move up and down, tracing a sine wave.

The only change here is the addition of the update function—which treats the initial vertex array as a reference—and dragging along the vertex data as a property of the object. The update function perturbs the Y-coordinate based on the time stored in the scene and uploads the new mesh to the GPU.

This works, but if there’s something niggling you about efficiency and the uploading of essentially the same vertex data sixty times per second, you’re on the right track. What if it were possible to upload the vertex data once, have the GPU remember it, and then just upload the thing that’s actually changing frame-by-frame–the time? Couldn’t the GPU do the vertex manipulation on its own?

Well, yes.

<!doctype html>
<html>
  <head>
    <title>Hacking WebGL</title>
    <script type="x-shader/x-vertex" id="vertex-shader">
    precision mediump float;
    attribute vec2 pos;
    attribute vec3 colour;
    varying vec3 col;
    uniform float time;
    void main() {
      col = colour;
      gl_Position = vec4(pos.x, pos.y + (sin(time * 3.14159 / 1800.0) / 4.0), 0.0, 1.0);
    }
    </script>
    <script type="x-shader/x-fragment" id="fragment-shader">
    precision mediump float;
    varying vec3 col;
    void main() {
      gl_FragColor = vec4(col, 1.0);
    }
    </script>
    <script type="text/javascript">

    function render(gl,scene) {
      scene.time = parseFloat(Date.now() - scene.start);
      gl.clear(gl.COLOR_BUFFER_BIT);
      gl.useProgram(scene.program);
      gl.uniform1f(scene.program.timeUniform, scene.time);
      gl.bindBuffer(gl.ARRAY_BUFFER, scene.object.vertexBuffer);
      gl.drawArrays(scene.object.primitiveType, 0, scene.object.vertexCount);
      gl.bindBuffer(gl.ARRAY_BUFFER, null);
      gl.useProgram(null);
      requestAnimationFrame(function() {
        render(gl,scene);
      });
    }

    function createProgram(gl, shaderSpecs) {
      var program = gl.createProgram();
      for ( var i = 0 ; i < shaderSpecs.length ; i++ ) {
        var spec = shaderSpecs[i];
        var shader = gl.createShader(spec.type);
        gl.shaderSource(shader, document.getElementById(spec.container).text);
        gl.compileShader(shader);
        if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) throw gl.getShaderInfoLog(shader);
        gl.attachShader(program, shader);
        gl.deleteShader(shader);
      }
      gl.linkProgram(program);
      if (!gl.getProgramParameter(program, gl.LINK_STATUS)) throw gl.getProgramInfoLog(program);
      return program;
    }
    
    function init() {
      var surface = document.getElementById('rendering-surface');
      var gl = surface.getContext('experimental-webgl');
      gl.viewport(0,0,surface.width,surface.height);
      gl.clearColor(1.0, 0.0, 0.0, 1.0);

      var program = createProgram(
        gl,
        [{container: 'vertex-shader', type: gl.VERTEX_SHADER},
         {container: 'fragment-shader', type: gl.FRAGMENT_SHADER}]
      );

      var squareVertices = [
        +0.75, +0.75, 0.0, +1.0, +0.0,
        -0.75, +0.75, 0.0, +1.0, +1.0,
        +0.75, -0.75, 0.0, +0.0, +1.0,
        -0.75, -0.75, 0.0, +0.5, +0.5
      ];
             
      gl.useProgram(program);

      var square = {
        vertexCount: 4,
        primitiveType: gl.TRIANGLE_STRIP
      };

      var vertexBuffer = gl.createBuffer();
      gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);

      program.positionAttribute = gl.getAttribLocation(program, 'pos');
      gl.enableVertexAttribArray(program.positionAttribute);
      gl.vertexAttribPointer(program.positionAttribute, 2, gl.FLOAT, false, Float32Array.BYTES_PER_ELEMENT * 5, 0);
      program.colourAttribute = gl.getAttribLocation(program, 'colour');
      gl.enableVertexAttribArray(program.colourAttribute);
      gl.vertexAttribPointer(program.colourAttribute, 3, gl.FLOAT, false, Float32Array.BYTES_PER_ELEMENT * 5, Float32Array.BYTES_PER_ELEMENT * 2);

      program.timeUniform = gl.getUniformLocation(program, 'time');

      gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(squareVertices), gl.STATIC_DRAW);

      gl.bindBuffer(gl.ARRAY_BUFFER, null);
      gl.useProgram(null);
      
      square.vertexBuffer = vertexBuffer;

      var scene = {
        program: program,
        object: square,
        start: Date.now()
      };

      requestAnimationFrame(function() {
        render(gl, scene);
      });
    }
    </script>
  </head>
  <body onLoad="init()">
    <canvas id="rendering-surface" height="500" width="500"/>
  </body>
</html>

Here, I introduce a shader variable called a ‘uniform’. Uniforms are like attributes in that their values are supplied by the application, but as their name implies, their values do not change throughout a single draw call. In contrast, attributes are effectively mapped onto slices of the vertex data array uploaded with the bufferData call. Uniforms are set in a more straightforward manner with functions like ‘uniform1f’, ‘uniform3fv’, etc. The suffix (e.g. ‘1f’) means ‘1 float’, so uniform1f expects a single float argument. ‘uniform3f’ expects three floats. ‘uniform3fv’ expects a 3-element vector of floats (i.e. a Float32Array). It’s better to check the docs for the full list of variants than for me to list them here.

Back to the vertex shader, and we can see that I’m using the time variable in a calculation to adjust the final Y-coordinate of the point. That’s a point worth reiterating. The vertex shader can adjust, replace or ignore any or all components of an input vertex: it’s the output of the vertex shader (via gl_Position) that’s used to determine where a point appears on-screen. In this case, a sine function with some fudging gives a smooth up/down motion. This is where you can kind of go to town: GLSL provides a whole range of functions that you can use to mess around with the coordinates that the vertex shader generates. For example, try creating a bounce effect by wrapping the adjusted Y-coordinate in an abs() call: you’ll see the shape bounce off an invisible hard floor.

As with attribute values, uniform values come from the application. In order for it to make a smooth animation, the uniform is updated every frame from the render() call, with a value derived from the current time.

Again as with attributes, setting a uniform requires knowing its location, which can be obtained by getUniformLocation(program,uniformName).

A Brief Note on Performance

Since the data is uploaded to the GPU once only and never actually modified, and the only thing that changes is the time uniform, I’ve deleted the update function: its functionality has more elegantly been taken by the vertex shader. Note that this means that the displacement calculation is now happening on the GPU, not in JavaScript. This is a negligible saving for a simple shape, but becomes significant if you upload a mesh with thousands of triangles. JavaScript performance is advancing by leaps and bounds, but for bulk processing of vertex and pixel data it’ll never touch hugely parallel GLSL code on the GPU.

A side effect of this that we’re moving something on the GPU in a way that the CPU is completely unaware of. All that’s going to the card every frame is a few binding reqeusts and an updated uniform: the GPU has the mesh, the underlying vertex data, and the mapping from that data into shader attributes. In other words, all those pixels are being updated and moved around for as close to no CPU work as is feasible. With any kind of hardware acceleration, that work’s being done effetively for free.

Next Up: The Third Dimension

WebGL from Scratch: An Aside on Primitive Types and Winding

Primitive Types

In the post on drawing a 2D shape, I specified that the primitive type for my 2D shape should be TRIANGLE_STRIP. Here, the first three vertices define a triangle, but additional vertices define a triangle constructed from the new vertex and the previous two. See the Wikipiedia article for some pictures. My triangle strip was tiny, but there’s no real limit (outside of available memory) on how long a single strip can be.

I could have chosen to specify the square with the TRIANGLES primitive instead, at which point I would have had to specify six vertices, with each set of three vertices would be a complete triangle.

The other primitive types available in WebGL are POINTS, LINES, LINE_STRIP, LINE_LOOP and TRIANGLE_FAN, but I won’t replicate what you can find with a quick search, as I only need TRIANGLE_STRIP and TRIANGLES just now.

Winding

Also in that post, it may have seemed as though I specified the vertices in a somewhat arbitrary order. As a refresher, this is how they looked:

      var squareVertices = [
        +0.75, +0.75,
        -0.75, +0.75,
        +0.75, -0.75,
        -0.75, -0.75
      ];

In WebGL’s coordinate system, this means that I ordered the vertices like this:

quad-from-triangles

i.e. a Z flipped around the Y axis. The reason for that is something called winding, which basically defines the order—clockwise or counter-clockwise—that vertices are defined. This becomes important when drawing a solid mesh made of many triangles, at which point large performance savings can be gained by simply not drawing anything that has its back turned to the camera. This facing is determined by taking the cross-product of vectors defined by the vertices of the triangle, and doing that requires knowing which way the vertices are wound. I’ll get onto that later.

For now, all you should be concerned about is making sure that, when defining your own shapes, the winding order for a given triangle should be counter-clockwise. It’s not important for just now—WebGL by default draws both sides of a triangle—but it’s a good habit to get into early.