Here is code which creates a 3D video cube using webGL and html5 video.
The code used to generate the spinning cube uses the following function which are found in webGL main()
, render()
, drawScene()
, requestAnimationFrame()
, createShader()
, createTexture()
and setupVideo()
.
First up is a JavaScript function that sets up a WebGL context on a canvas element with the ID “glcanvas” and displays an error message if WebGL is not supported.
The first few lines set the colours for the canvas and clears down the colour buffer.
The next section bootstraps the vertex shader and the fragment shader. The vertex shader program applies simple lighting to the vertices of the 3D cube. The fragment shader program applies texture and color to the surface 3d Cube.
The initShaderProgram()
function initializes the shader program. The programInfo object contains information about the shader program.
We initialize a shader program, so WebGL knows how to draw our data.
loadShader()
– creates a shader of the given type, uploads the source and compiles it.
initTexture()
– In this case the texture will be a video texture.
updateTexture()
– Binds the texture to the cube.
setupVideo()
– Creates the player using browser controls, which loops the video infinately.
You can download the full source code here
let cubeRotation = 0.0; let deltaTime = 0; // will set to true when video can be copied to texture let copyVideo = false; main(); // start here function main() { const canvas = document.querySelector("#glcanvas"); // Initialize the GL context const gl = canvas.getContext("webgl"); // Only continue if WebGL is available and working if (gl === null) { alert( "Unable to initialize WebGL. Your browser or machine may not support it." ); return; } // Set clear color to black, fully opaque gl.clearColor(0.0, 0.0, 0.0, 1.0); // Clear the color buffer with specified clear color gl.clear(gl.COLOR_BUFFER_BIT); // Vertex shader program const vsSource = ` attribute vec4 aVertexPosition; attribute vec3 aVertexNormal; attribute vec2 aTextureCoord; uniform mat4 uNormalMatrix; uniform mat4 uModelViewMatrix; uniform mat4 uProjectionMatrix; varying highp vec2 vTextureCoord; varying highp vec3 vLighting; void main(void) { gl_Position = uProjectionMatrix * uModelViewMatrix * aVertexPosition; vTextureCoord = aTextureCoord; // Apply lighting effect highp vec3 ambientLight = vec3(0.3, 0.3, 0.3); highp vec3 directionalLightColor = vec3(1, 1, 1); highp vec3 directionalVector = normalize(vec3(0.85, 0.8, 0.75)); highp vec4 transformedNormal = uNormalMatrix * vec4(aVertexNormal, 1.0); highp float directional = max(dot(transformedNormal.xyz, directionalVector), 0.0); vLighting = ambientLight + (directionalLightColor * directional); } `; // Fragment shader program const fsSource = ` varying highp vec2 vTextureCoord; varying highp vec3 vLighting; uniform sampler2D uSampler; void main(void) { highp vec4 texelColor = texture2D(uSampler, vTextureCoord); gl_FragColor = vec4(texelColor.rgb * vLighting, texelColor.a); } `; // Initialize a shader program; this is where all the lighting // for the vertices and so forth is established. const shaderProgram = initShaderProgram(gl, vsSource, fsSource); // Collect all the info needed to use the shader program. // Look up which attributes our shader program is using // for aVertexPosition, aVertexColor and also // look up uniform locations. const programInfo = { program: shaderProgram, attribLocations: { vertexPosition: gl.getAttribLocation(shaderProgram, "aVertexPosition"), vertexNormal: gl.getAttribLocation(shaderProgram, "aVertexNormal"), textureCoord: gl.getAttribLocation(shaderProgram, "aTextureCoord"), }, uniformLocations: { projectionMatrix: gl.getUniformLocation( shaderProgram, "uProjectionMatrix" ), modelViewMatrix: gl.getUniformLocation(shaderProgram, "uModelViewMatrix"), normalMatrix: gl.getUniformLocation(shaderProgram, "uNormalMatrix"), uSampler: gl.getUniformLocation(shaderProgram, "uSampler"), }, }; // Here's where we call the routine that builds all the // objects we'll be drawing. const buffers = initBuffers(gl); const texture = initTexture(gl); const video = setupVideo("abstract_-_93849 (720p).mp4"); // Flip image pixels into the bottom-to-top order that WebGL expects. gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true); let then = 0; // Draw the scene repeatedly function render(now) { now *= 0.001; // convert to seconds deltaTime = now - then; then = now; if (copyVideo) { updateTexture(gl, texture, video); } drawScene(gl, programInfo, buffers, texture, cubeRotation); cubeRotation += deltaTime; requestAnimationFrame(render); } requestAnimationFrame(render); } // // Initialize a shader program, so WebGL knows how to draw our data // function initShaderProgram(gl, vsSource, fsSource) { const vertexShader = loadShader(gl, gl.VERTEX_SHADER, vsSource); const fragmentShader = loadShader(gl, gl.FRAGMENT_SHADER, fsSource); // Create the shader program const shaderProgram = gl.createProgram(); gl.attachShader(shaderProgram, vertexShader); gl.attachShader(shaderProgram, fragmentShader); gl.linkProgram(shaderProgram); // If creating the shader program failed, alert if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) { alert( `Unable to initialize the shader program: ${gl.getProgramInfoLog( shaderProgram )}` ); return null; } return shaderProgram; } // // creates a shader of the given type, uploads the source and // compiles it. // function loadShader(gl, type, source) { const shader = gl.createShader(type); // Send the source to the shader object gl.shaderSource(shader, source); // Compile the shader program gl.compileShader(shader); // See if it compiled successfully if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) { alert( `An error occurred compiling the shaders: ${gl.getShaderInfoLog(shader)}` ); gl.deleteShader(shader); return null; } return shader; } function initTexture(gl) { const texture = gl.createTexture(); gl.bindTexture(gl.TEXTURE_2D, texture); // Because video has to be download over the internet // they might take a moment until it's ready so // put a single pixel in the texture so we can // use it immediately. const level = 0; const internalFormat = gl.RGBA; const width = 1; const height = 1; const border = 0; const srcFormat = gl.RGBA; const srcType = gl.UNSIGNED_BYTE; const pixel = new Uint8Array([0, 0, 255, 255]); // opaque blue gl.texImage2D( gl.TEXTURE_2D, level, internalFormat, width, height, border, srcFormat, srcType, pixel ); // Turn off mips and set wrapping to clamp to edge so it // will work regardless of the dimensions of the video. gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); return texture; } function updateTexture(gl, texture, video) { const level = 0; const internalFormat = gl.RGBA; const srcFormat = gl.RGBA; const srcType = gl.UNSIGNED_BYTE; gl.bindTexture(gl.TEXTURE_2D, texture); gl.texImage2D( gl.TEXTURE_2D, level, internalFormat, srcFormat, srcType, video ); } function setupVideo(url) { const video = document.createElement("video"); let playing = false; let timeupdate = false; video.playsInline = true; video.muted = true; video.loop = true; // Waiting for these 2 events ensures // there is data in the video video.addEventListener( "playing", () => { playing = true; checkReady(); }, true ); video.addEventListener( "timeupdate", () => { timeupdate = true; checkReady(); }, true ); video.src = url; video.play(); function checkReady() { if (playing && timeupdate) { copyVideo = true; } } return video; }
Code licence is a creative commons via Mozilla, video under Pixabay Licence.