I have a raw yuv420p file at 1860x1920, fill it to cv::Mat, then use libyuv::I420Scale shrunk to a quarter of its original size,but get wired outout, for example there are some green stripes like this:
Use ffplay -f rawvideo -i yuv_image.yuv -video_size 1860x1920
, the origin image here:
The scaled image is not normal, it looks like the yuv data is not aligned
What am I doing wrong, code below:
#define ALIGN32(n) (((n) >> 5) << 5)
// yuv420_image is yuv420 cv::Mat
int src_width = yuv420_image.cols, src_height = yuv420_image.rows / 3 * 2;
const uint8_t* src = yuv420_image.data;
const uint8_t* src_y = src;
const uint8_t* src_u = src_y + src_width * src_width;
const uint8_t* src_v = src_u + src_width * src_width / 4;
int dst_width = ALIGN32(src_width / kScaleRatio), dst_height = ALIGN32(src_height / kScaleRatio);
cv::Mat yuv420_scaled(dst_height * 3 / 2, dst_width,
CV_8UC1);
uint8_t* dst = yuv420_scaled.data;
uint8_t* dst_y = dst;
uint8_t* dst_u = dst_y + dst_width * dst_height;
uint8_t* dst_v = dst_u + dst_width * dst_height / 4;
int result = libyuv::I420Scale(src_y, src_width,
src_u, src_width / 2,
src_v, src_width / 2,
src_width, src_width,
dst_y, dst_width,
dst_u, dst_width / 2,
dst_v, dst_width / 2,
dst_width, dst_height,
libyuv::FilterModeEnum::kFilterNone);
The issue is mixing width
and height
in 3 places.
Issues:
src_width
and src_height
:const uint8_t* src_u = src_y + src_width * src_width;
const uint8_t* src_v = src_u + src_width * src_width / 4;
const uint8_t* src_u = src_y + src_width * src_height;
const uint8_t* src_v = src_u + src_width * src_height / 4;
src_width
and src_height
:int result = libyuv::I420Scale(src_y, src_width,
src_u, src_width / 2,
src_v, src_width / 2,
src_width, src_width,
int result = libyuv::I420Scale(src_y, src_width,
src_u, src_width / 2,
src_v, src_width / 2,
src_width, src_height,
For testing I created a synthetic video frame in YUV420p (I420) pixel format using FFmpeg:
ffmpeg -f lavfi -i testsrc=1860x1920:rate=1:duration=1 -vf scale=out_color_matrix=bt709:out_range=full -pix_fmt yuv420p yuv_image.yuv
The following code sample downscales the image to 448x480.
The code uses OpenCV for converting the result to BGR (for testing).
#include <stdio.h>
#include <stdlib.h>
#include <stdint.h>
#include "opencv2/opencv.hpp"
#include "libyuv.h"
#define ALIGN32(n) (((n) >> 5) << 5)
//Building a sample input image:
//ffmpeg -f lavfi -i testsrc=1860x1920:rate=1:duration=1 -vf scale=out_color_matrix=bt709:out_range=full -pix_fmt yuv420p yuv_image.yuv
int main()
{
const int kScaleRatio = 4; //Set to 4 for example
const int width = 1860;
const int height = 1920;
const int stride = width; //Assume rows are continuous
uint8_t *frameData = new uint8_t[stride*height*3/2]; //Buffer for storing raw I420 input image.
//Read image from file.
FILE* f = fopen("yuv_image.yuv", "rb");
fread(frameData, 1, stride*height*3/2, f); //Assume rows are continuous (assume stride = width)
fclose(f);
//Make OpenCV Mat wrapper
cv::Mat yuv420_image(height*3/2, width, CV_8UC1, (void*)frameData, stride);
// yuv420_image is yuv420 cv::Mat
int src_width = yuv420_image.cols, src_height = yuv420_image.rows / 3 * 2;
const uint8_t* src = yuv420_image.data;
const uint8_t* src_y = src;
const uint8_t* src_u = src_y + src_width * src_height; //const uint8_t* src_u = src_y + src_width * src_width; <-- Supposed to be src_width * src_height
const uint8_t* src_v = src_u + src_width * src_height / 4; //const uint8_t* src_v = src_u + src_width * src_width / 4;
int dst_width = ALIGN32(src_width / kScaleRatio), dst_height = ALIGN32(src_height / kScaleRatio);
cv::Mat yuv420_scaled(dst_height * 3 / 2, dst_width, CV_8UC1);
uint8_t* dst = yuv420_scaled.data;
uint8_t* dst_y = dst;
uint8_t* dst_u = dst_y + dst_width * dst_height;
uint8_t* dst_v = dst_u + dst_width * dst_height / 4;
//int result = libyuv::I420Scale(src_y, src_width,
// src_u, src_width / 2,
// src_v, src_width / 2,
// src_width, src_width, //<-- Supposed to be src_width, src_height
// dst_y, dst_width,
// dst_u, dst_width / 2,
// dst_v, dst_width / 2,
// dst_width, dst_height,
// libyuv::FilterModeEnum::kFilterNone);
int result = I420Scale(src_y, //const uint8_t * src_y,
src_width, //int src_stride_y,
src_u, //const uint8_t * src_u,
src_width / 2, //int src_stride_u,
src_v, //const uint8_t * src_v,
src_width / 2, //int src_stride_v,
src_width, //int src_width,
src_height, //int src_height,
dst_y, //uint8_t * dst_y,
dst_width, //int dst_stride_y,
dst_u, //uint8_t * dst_u,
dst_width / 2, //int dst_stride_u,
dst_v, //uint8_t * dst_v,
dst_width / 2, //int dst_stride_v,
dst_width, //int dst_width,
dst_height, //int dst_height,
libyuv::FilterModeEnum::kFilterNone);//enum FilterMode filtering);
if (result != 0)
{
return result;
}
cv::Mat bgr_scaled;
//Convert YUV420p to BGR using OpenCV (note: the conversion may not result accurate colors).
//It looks like the is a bug in OpenCV (use COLOR_YUV420p2RGB instead of COLOR_YUV420p2BGR).
cv::cvtColor(yuv420_scaled, bgr_scaled, cv::COLOR_YUV420p2RGB);
//Show bgr_scaled for testing.
cv::imshow("bgr_scaled", bgr_scaled);
cv::waitKey();
cv::destroyAllWindows();
delete[] frameData;
cv::imwrite("bgr_scaled.png", bgr_scaled);
return 0;
}