I have the following function that is able to generate a thumbnail from a video:
async function getThumbnailForVideo(videoUrl) {
const video = document.createElement("video");
const canvas = document.createElement("canvas");
video.style.display = "none";
canvas.style.display = "none";
// Trigger video load
await new Promise((resolve, reject) => {
video.addEventListener("loadedmetadata", () => {
video.width = video.videoWidth;
video.height = video.videoHeight;
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
// Seek the video to 25%
video.currentTime = video.duration * 0.25;
});
video.addEventListener("seeked", () => resolve());
video.src = videoUrl;
});
// Draw the thumbnail
canvas
.getContext("2d")
.drawImage(video, 0, 0, video.videoWidth, video.videoHeight);
const imageUrl = canvas.toDataURL("image/png");
return imageUrl;
}
Paired with URL.createObjectURL
, I am able to generate a thumbnail from a user-selected video file. I have created the following test project on StackBlitz for testing: App Editor App Preview
While this seems to work fine for Chrome and Safari, it seems that Firefox does not respect the EXIF information of a video and as such draws it incorrectly.
The MDN documentation for CanvasRenderingContext2D.drawImage explicitly states that:
drawImage()
will ignore all EXIF metadata in images, including the Orientation.. You should detect the Orientation yourself and use rotate() to make it right.
Modernizr hints at a solution via its exiforientation feature detection should I be able to read the rotation data from the file such that I only need to perform the extra transformations on Firefox.
I'm curious, is there a more idempotent solution to drawing an image from a HTMLVideoElement
on all browsers?
So it turns out that the Modernizr exiforientation
test only checks if an img
element respects the EXIF data of an image, but not if the same image drawn onto a canvas is rendered correctly.
I set out instead to create my own test by drawing a known video on the canvas and testing it. I created the video as so using ffmpeg
:
ffmpeg -filter_complex \
"color=color=#ffffff:duration=1us:size=4x4[bg]; \
color=color=#ff0000:duration=1us:size=2x2[r]; \
color=color=#00ff00:duration=1us:size=2x2[g]; \
color=color=#0000ff:duration=1us:size=2x2[b]; \
[bg][r]overlay=x=2:y=0:format=rgb:alpha=premultiplied[bg+r]; \
[bg+r][g]overlay=x=0:y=2:format=rgb:alpha=premultiplied[bg+r+g]; \
[bg+r+g][b]overlay=x=2:y=2:format=rgb:alpha=premultiplied[bg+r+g+b]" \
-map "[bg+r+g+b]" \
-y wrgb-0.mp4
ffmpeg -i wrgb-0.mp4 -c copy -metadata:s:v:0 rotate=180 -y wrgb-180.mp4
Using the same demo, I can see that Chrome and Firefox generate different video previews on the canvas.
Next, I just needed a function such that given an array of RGBA values from the canvas it would spit out the pattern on the canvas:
function getColourPattern(rgbaData) {
let pattern = "";
for (let i = 0; i < rgbaData.length; i += 4) {
const r = rgbaData[i] / 255;
const g = rgbaData[i + 1] / 255;
const b = rgbaData[i + 2] / 255;
const w = (r + g + b) / 3;
if (w > 0.9) {
pattern += "w";
continue;
}
switch (Math.max(r, g, b)) {
case r:
pattern += "r";
break;
case g:
pattern += "g";
break;
case b:
pattern += "b";
break;
}
}
return pattern;
}
This returns bbggbbggrrwwrrww
on Chrome & Safari, and wwrrwwrrggbbggbb
on Firefox (with canvas fingerprinting turned off)
I then used basenc --base64 wrgb-180.mp4 -w 0
to get a base64 representation of the video so that I could embed it into a single test function:
export async function canvasUsesEXIF() {
const videoUrl = `data:video/mp4;base64,AAAAIGZ0eXBpc29tAAACAGlzb21pc28yYXZjMW1wNDEAAAAIZnJlZQAAAvxtZGF0AAACrgYF//+q3EXpvebZSLeWLNgg2SPu73gyNjQgLSBjb3JlIDE1OSByMjk5OSAyOTY0OTRhIC0gSC4yNjQvTVBFRy00IEFWQyBjb2RlYyAtIENvcHlsZWZ0IDIwMDMtMjAyMCAtIGh0dHA6Ly93d3cudmlkZW9sYW4ub3JnL3gyNjQuaHRtbCAtIG9wdGlvbnM6IGNhYmFjPTEgcmVmPTMgZGVibG9jaz0xOjA6MCBhbmFseXNlPTB4MzoweDExMyBtZT1oZXggc3VibWU9NyBwc3k9MSBwc3lfcmQ9MS4wMDowLjAwIG1peGVkX3JlZj0xIG1lX3JhbmdlPTE2IGNocm9tYV9tZT0xIHRyZWxsaXM9MSA4eDhkY3Q9MSBjcW09MCBkZWFkem9uZT0yMSwxMSBmYXN0X3Bza2lwPTEgY2hyb21hX3FwX29mZnNldD0tMiB0aHJlYWRzPTEgbG9va2FoZWFkX3RocmVhZHM9MSBzbGljZWRfdGhyZWFkcz0wIG5yPTAgZGVjaW1hdGU9MSBpbnRlcmxhY2VkPTAgYmx1cmF5X2NvbXBhdD0wIGNvbnN0cmFpbmVkX2ludHJhPTAgYmZyYW1lcz0zIGJfcHlyYW1pZD0yIGJfYWRhcHQ9MSBiX2JpYXM9MCBkaXJlY3Q9MSB3ZWlnaHRiPTEgb3Blbl9nb3A9MCB3ZWlnaHRwPTIga2V5aW50PTI1MCBrZXlpbnRfbWluPTI1IHNjZW5lY3V0PTQwIGludHJhX3JlZnJlc2g9MCByY19sb29rYWhlYWQ9NDAgcmM9Y3JmIG1idHJlZT0xIGNyZj0yMy4wIHFjb21wPTAuNjAgcXBtaW49MCBxcG1heD02OSBxcHN0ZXA9NCBpcF9yYXRpbz0xLjQwIGFxPTE6MS4wMACAAAAAPmWIhAAt/9pbuD7Z/gvI3kF2QzYeJnVbANgW8XnGVlnoDJNW7zJawMem6POfQ3cvmVl9l7mrZDdjuR26xB2/AAADAm1vb3YAAABsbXZoZAAAAAAAAAAAAAAAAAAAA+gAAAAoAAEAAAEAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAIsdHJhawAAAFx0a2hkAAAAAwAAAAAAAAAAAAAAAQAAAAAAAAAoAAAAAAAAAAAAAAAAAAAAAP//AAAAAAAAAAAAAAAAAAD//wAAAAAAAAAAAAAAAAAAQAAAAAAEAAAABAAAAAAAJGVkdHMAAAAcZWxzdAAAAAAAAAABAAAAKAAAAAAAAQAAAAABpG1kaWEAAAAgbWRoZAAAAAAAAAAAAAAAAAAAMgAAAAIAVcQAAAAAAC1oZGxyAAAAAAAAAAB2aWRlAAAAAAAAAAAAAAAAVmlkZW9IYW5kbGVyAAAAAU9taW5mAAAAFHZtaGQAAAABAAAAAAAAAAAAAAAkZGluZgAAABxkcmVmAAAAAAAAAAEAAAAMdXJsIAAAAAEAAAEPc3RibAAAAKtzdHNkAAAAAAAAAAEAAACbYXZjMQAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAEAAQASAAAAEgAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABj//wAAADVhdmNDAWQACv/hABhnZAAKrNlfnnwEQAAAAwBAAAAMg8SJZYABAAZo6+PLIsD9+PgAAAAAEHBhc3AAAAABAAAAAQAAABhzdHRzAAAAAAAAAAEAAAABAAACAAAAABxzdHNjAAAAAAAAAAEAAAABAAAAAQAAAAEAAAAUc3RzegAAAAAAAAL0AAAAAQAAABRzdGNvAAAAAAAAAAEAAAAwAAAAYnVkdGEAAABabWV0YQAAAAAAAAAhaGRscgAAAAAAAAAAbWRpcmFwcGwAAAAAAAAAAAAAAAAtaWxzdAAAACWpdG9vAAAAHWRhdGEAAAABAAAAAExhdmY1OC40NS4xMDA=`;
const video = document.createElement("video");
const canvas = document.createElement("canvas");
video.style.display = "none";
canvas.style.display = "none";
await new Promise((resolve, reject) => {
video.addEventListener("canplay", () => {
video.width = video.videoWidth;
video.height = video.videoHeight;
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
video.currentTime = 0;
});
video.addEventListener("seeked", () => resolve());
video.src = videoUrl;
});
const context = canvas.getContext("2d");
context.drawImage(video, 0, 0, video.videoWidth, video.videoHeight);
const { data } = context.getImageData(0, 0, 4, 4);
return getColourPattern(data) === "bbggbbggrrwwrrww";
}
Now assuming you have the rotation metadata of the video, you should be able to test if you need to rotate it on the canvas manually 🤓
Edit 1:
This should fix Firefox on Windows from throwing a NS_ERROR_NOT_AVAILABLE
error.
9c9
< video.addEventListener("loadedmetadata", () => {
---
> video.addEventListener("canplay", () => {