I'm using Unity to build my app to WebGL, and I've found myself needing to grab video from an HTML player and draw it on a plane in the 3D space.
I know you can call drawImage()
on a CanvasRenderingContext2D
and pass in a reference to the video player, and the current frame will be drawn to the canvas when the function's run.
The closest 3D equivalent I've been able to find for that function is WebGL2RenderingContext.texImage3D()
. However, I don't entirely understand how it works, and when I tried testing it, I received the following exception: Uncaught DOMException: The operation is insecure.
I used my own, local video file, so it can't be CORS, but I don't know what it is.
You can see the test project in this GitHub repo.
I found a similar question here, but unfortunately, the answers show how to draw what I assume are pre-loaded textures. I don't know how to grab those from the video player and pass them, or if that's fast enough to be done every frame.
To give some context, I'm trying to show an HLS live-stream inside my Unity/WebGL app. I could download the .ts
(MPEG-2 Transport Stream) video segments and queue them up into a coherent video stream, but Unity's built-in video player doesn't support this format.
As a solution, I thought I might grab the video in an HTML5 player (using hls.js if necessary) and inject the texture into the WebGL app, using JavaScript, every frame.
Unity lets you run JavaScript code from inside its C# scripts, so timing probably won't be an issue, nor getting the world scale/location of the target plane. I just need to write the JavaScript function to somehow draw the texture.
Here is my current code:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>WebGL</title>
<script src="https://unpkg.com/@ffmpeg/ffmpeg@0.10.1/dist/ffmpeg.min.js"></script>
<style>
body {
background-color: aquamarine;
}
</style>
</head>
<body>
<video muted autoplay width="480" height="270">
<source src="./test.mp4" type="video/mp4" />
</video>
<br>
<canvas width="1080" height="720"></canvas>
<button onclick="takeScreenshot()">Capture</button>
<script>
function takeScreenshot() {
var video = document.querySelector("video");
var canvas = document.querySelector("canvas");
var gl = canvas.getContext("webgl2");
gl.texImage3D(
gl.TEXTURE_3D, // target (enum)
0, // level of detail
gl.RGBA, // internalFormat
1920, // width of texture
1080, // height of texture
1, // depth
0, // border
gl.RGBA, // format
gl.UNSIGNED_BYTE, // type
video, // source
);
}
</script>
</body>
</html>
Here is an example code to setup a webGL object (a Plane), that can receive your video's pixels.
Basically:
Create a box/rectangle shape using two triangles...
Then project the video pixels onto that rectangle (as a texture map).
0-------1
| |
3-------2
//# two sets of... connected 3-points of a triangle
var vertexIndices = [ 0, 2, 1, 0, 2, 3, ];
The example code below also creates some required GPU shaders and programs.
Experiment with it
If any coders just want to do GPU pixel effects then write your effects code in the Fragment shader.
(see code part at: //# example of basic colour effect
).
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>WebGL</title>
<!--
<script src="https://unpkg.com/@ffmpeg/ffmpeg@0.10.1/dist/ffmpeg.min.js"></script>
-->
<style> body {background-color: aquamarine; } </style>
</head>
<body>
<video id="myVideo" controls muted autoplay width="480" height="270">
<source src="video.mp4" type="video/mp4" />
</video>
<br>
<button id="capture" onclick="takeScreenshot()"> Capture </button>
<br><br>
<!--
<canvas id="myCanvas" width="1080" height="720"></canvas>
-->
<canvas id="myCanvas" width="480" height="270"></canvas>
<!-- ########## Shader code ###### -->
<!-- ### Shader code here -->
<!-- Fragment shader program -->
<script id="shader-fs" type="x-shader/x-fragment">
//<!-- //## code for pixel effects goes here if needed -->
//# these two vars will access
varying mediump vec2 vDirection;
uniform sampler2D uSampler;
void main(void)
{
//# get current video pixel's color (no FOR-loops needed like in JS Canvas)
gl_FragColor = texture2D(uSampler, vec2(vDirection.x * 0.5 + 0.5, vDirection.y * 0.5 + 0.5));
/*
//# example of basic colour effect
gl_FragColor.r = ( gl_FragColor.r * 1.15 );
gl_FragColor.g = ( gl_FragColor.g * 0.8 );
gl_FragColor.b = ( gl_FragColor.b * 0.45 );
*/
}
</script>
<!-- Vertex shader program -->
<script id="shader-vs" type="x-shader/x-vertex">
attribute mediump vec2 aVertexPosition;
varying mediump vec2 vDirection;
void main( void )
{
gl_Position = vec4(aVertexPosition, 1.0, 1.0) * 2.0;
vDirection = aVertexPosition;
}
</script>
<!-- ### END Shader code... -->
<script>
//# WebGL setup
var video = document.getElementById('myVideo');
const glcanvas = document.getElementById('myCanvas');
const gl = ( ( glcanvas.getContext("webgl") ) || ( glcanvas.getContext("experimental-webgl") ) );
//# check if WebGL is available..
if (gl && gl instanceof WebGLRenderingContext) { console.log( "WebGL is available"); }
else { console.log( "WebGL is NOT available" ); } //# use regular JS canvas functions if this happens...
//# create and attach the shader program to the webGL context
var attributes, uniforms, program;
function attachShader( params )
{
fragmentShader = getShaderByName(params.fragmentShaderName);
vertexShader = getShaderByName(params.vertexShaderName);
program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS))
{ alert("Unable to initialize the shader program: " + gl.getProgramInfoLog(program)); }
gl.useProgram(program);
// get the location of attributes and uniforms
attributes = {};
for (var i = 0; i < params.attributes.length; i++)
{
var attributeName = params.attributes[i];
attributes[attributeName] = gl.getAttribLocation(program, attributeName);
gl.enableVertexAttribArray(attributes[attributeName]);
}
uniforms = {};
for (i = 0; i < params.uniforms.length; i++)
{
var uniformName = params.uniforms[i];
uniforms[uniformName] = gl.getUniformLocation(program, uniformName);
gl.enableVertexAttribArray(attributes[uniformName]);
}
}
function getShaderByName( id )
{
var shaderScript = document.getElementById(id);
var theSource = "";
var currentChild = shaderScript.firstChild;
while(currentChild)
{
if (currentChild.nodeType === 3) { theSource += currentChild.textContent; }
currentChild = currentChild.nextSibling;
}
var result;
if (shaderScript.type === "x-shader/x-fragment")
{ result = gl.createShader(gl.FRAGMENT_SHADER); }
else { result = gl.createShader(gl.VERTEX_SHADER); }
gl.shaderSource(result, theSource);
gl.compileShader(result);
if (!gl.getShaderParameter(result, gl.COMPILE_STATUS))
{
alert("An error occurred compiling the shaders: " + gl.getShaderInfoLog(result));
return null;
}
return result;
}
//# attach shader
attachShader({
fragmentShaderName: 'shader-fs',
vertexShaderName: 'shader-vs',
attributes: ['aVertexPosition'],
uniforms: ['someVal', 'uSampler'],
});
// some webGL initialization
gl.clearColor(0.0, 0.0, 0.0, 0.0);
gl.clearDepth(1.0);
gl.disable(gl.DEPTH_TEST);
positionsBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionsBuffer);
var positions = [
-1.0, -1.0,
1.0, -1.0,
1.0, 1.0,
-1.0, 1.0,
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
var vertexColors = [0xff00ff88,0xffffffff];
var cBuffer = gl.createBuffer();
verticesIndexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, verticesIndexBuffer);
var vertexIndices = [ 0, 1, 2, 0, 2, 3, ];
gl.bufferData(
gl.ELEMENT_ARRAY_BUFFER,
new Uint16Array(vertexIndices), gl.STATIC_DRAW
);
texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
//# must be LINEAR to avoid subtle pixelation (double-check this... test other options like NEAREST)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.bindTexture(gl.TEXTURE_2D, null);
// update the texture from the video
updateTexture = function()
{
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
//# next line fails in Safari if input video is NOT from same domain/server as this html code
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB,
gl.UNSIGNED_BYTE, video);
gl.bindTexture(gl.TEXTURE_2D, null);
};
</script>
<script>
//# Vars for video frame grabbing when system/browser provides a new frame
var requestAnimationFrame = (window.requestAnimationFrame || window.mozRequestAnimationFrame ||
window.webkitRequestAnimationFrame || window.msRequestAnimationFrame);
var cancelAnimationFrame = (window.cancelAnimationFrame || window.mozCancelAnimationFrame);
///////////////////////////////////////////////
function takeScreenshot( )
{
//# video is ready (can display pixels)
if( video.readyState >= 3 )
{
updateTexture(); //# update pixels with current video frame's pixels...
gl.useProgram(program); //# apply our program
gl.bindBuffer(gl.ARRAY_BUFFER, positionsBuffer);
gl.vertexAttribPointer(attributes['aVertexPosition'], 2, gl.FLOAT, false, 0, 0);
//# Specify the texture to map onto the faces.
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.uniform1i(uniforms['uSampler'], 0);
//# Draw GPU
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, verticesIndexBuffer);
gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
}
//# re-capture the next frame... basically the function loops itself
//# consider adding event listener for video pause to set value as... cancelAnimationFrame( takeScreenshot );
requestAnimationFrame( takeScreenshot );
}
//////////////////////////////////////
function takeScreenshot_old()
{
var gl = canvas.getContext("webgl2");
gl.texImage3D(
gl.TEXTURE_3D, // target (enum)
0, // level of detail
gl.RGBA, // internalFormat
1920, // width of texture
1080, // height of texture
1, // depth
0, // border
gl.RGBA, // format
gl.UNSIGNED_BYTE, // type
video, // source
);
}
</script>
</body>
</html>