node.jsffmpegcropaspect-ratiofluent-ffmpeg

fluent-ffmpeg aspect ratio with crop


I am having trouble changing the aspect ratio between 16:9, 1:1 and 9:16 using fluent-ffmpeg When I try to change from 16:9 to 9:16, I kinda receive a squeezed video but actually I want the extra part should be removed.

I tried this with many combinations:

FFmpeg() .input(video) .size("608x?") .aspect("9:16") .output(tempFile) .run();

My input 16:9 video 1920x1080 My input 16:9 video 1920x1080

.

My Expected result 9:16 video 608x1080

My Expected result 9:16 video 608x1080


Solution

  • The most optimal solution I came with:
    As the fluent-ffmpeg do not provide any builtin method to crop and scale a video that's why we need to implement it by ourselves.

    Step 1:
    We need to calculate an intermediate crop resolution to achieve the target aspect ratio either by cropping the extra (landscape to portrait) or by adding black bars (portrait to landscape) to avoid squeezing or stretching of the video).

    Note:
    We can also add a nice blur effect in case of portrait to landscape but this will add an extra step (ffmpeg blur filter required).

    Step 2:
    We will simply scale up or down to our target resolution.

    The code seems to be too lengthy but believe me it's way to easy and divided into methods. Start from the bottom to understand it.
    Ignore the types if you want a JavaScript version.

    Just run this code and it will crop/scale a single video for you.

    import * as FFmpeg from "fluent-ffmpeg";
    
    function resizingFFmpeg(
      video: string,
      width: number,
      height: number,
      tempFile: string,
      autoPad?: boolean,
      padColor?: string
    ): Promise<string> {
      return new Promise((res, rej) => {
        let ff = FFmpeg().input(video).size(`${width}x${height}`);
        autoPad ? (ff = ff.autoPad(autoPad, padColor)) : null;
        ff.output(tempFile)
          .on("start", function (commandLine) {
            console.log("Spawned FFmpeg with command: " + commandLine);
            console.log("Start resizingFFmpeg:", video);
          })
          // .on("progress", function(progress) {
          //   console.log(progress);
          // })
          .on("error", function (err) {
            console.log("Problem performing ffmpeg function");
            rej(err);
          })
          .on("end", function () {
            console.log("End resizingFFmpeg:", tempFile);
            res(tempFile);
          })
          .run();
      });
    }
    
    function videoCropCenterFFmpeg(
      video: string,
      w: number,
      h: number,
      tempFile: string
    ): Promise<string> {
      return new Promise((res, rej) => {
        FFmpeg()
          .input(video)
          .videoFilters([
            {
              filter: "crop",
              options: {
                w,
                h,
              },
            },
          ])
          .output(tempFile)
          .on("start", function (commandLine) {
            console.log("Spawned FFmpeg with command: " + commandLine);
            console.log("Start videoCropCenterFFmpeg:", video);
          })
          // .on("progress", function(progress) {
          //   console.log(progress);
          // })
          .on("error", function (err) {
            console.log("Problem performing ffmpeg function");
            rej(err);
          })
          .on("end", function () {
            console.log("End videoCropCenterFFmpeg:", tempFile);
            res(tempFile);
          })
          .run();
      });
    }
    
    function getDimentions(media: string) {
      console.log("Getting Dimentions from:", media);
      return new Promise<{ width: number; height: number }>((res, rej) => {
        FFmpeg.ffprobe(media, async function (err, metadata) {
          if (err) {
            console.log("Error occured while getting dimensions of:", media);
            rej(err);
          }
          res({
            width: metadata.streams[0].width,
            height: metadata.streams[0].height,
          });
        });
      });
    }
    
    async function videoScale(video: string, newWidth: number, newHeight: number) {
      const output = "scaledOutput.mp4";
      const { width, height } = await getDimentions(video);
      if ((width / height).toFixed(2) > (newWidth / newHeight).toFixed(2)) {
        // y=0 case
        // landscape to potrait case
        const x = width - (newWidth / newHeight) * height;
        console.log(`New Intrim Res: ${width - x}x${height}`);
        const cropping = "tempCropped-" + output;
        let cropped = await videoCropCenterFFmpeg(
          video,
          width - x,
          height,
          cropping
        );
        let resized = await resizingFFmpeg(cropped, newWidth, newHeight, output);
        // unlink temp cropping file
        // fs.unlink(cropping, (err) => {
        //   if (err) console.log(err);
        //   console.log(`Temp file ${cropping} deleted Successfuly...`);
        // });
        return resized;
      } else if ((width / height).toFixed(2) < (newWidth / newHeight).toFixed(2)) {
        // x=0 case
        // potrait to landscape case
        // calculate crop or resize with padding or blur sides
        // or just return with black bars on the side
        return await resizingFFmpeg(video, newWidth, newHeight, output, true);
      } else {
        console.log("Same Aspect Ratio forward for resizing");
        return await resizingFFmpeg(video, newWidth, newHeight, output);
      }
    }
    
    videoScale("./path-to-some-video.mp4", 270, 480);