I am making a C++ application in which I have a bunch of frames (in unsigned char* format) and I need to encode them as a video using gstreamer H265 encoder, running on GPU. Most of the gstreamer samples are working with camera directly, but in my case there is no camera.
Using some samples I made the video encoder but the frames doesn't get pushed to the video file and the output video is empty.
Here is the code I implemented:
GstElement *pipeline, *appsrc, *videoconvert, *x264enc, *mp4mux, *filesink, *autovideosink;
GstCaps *caps;
GstBuffer *buf;
GstMapInfo map;
gst_init(nullptr, nullptr);
pipeline = gst_pipeline_new("mypipeline");
// Create elements
appsrc = gst_element_factory_make("appsrc", "mysource");
videoconvert = gst_element_factory_make("videoconvert", "myconvert");
x264enc = gst_element_factory_make("x264enc", "myencoder");
mp4mux = gst_element_factory_make("mp4mux", "mymux");
filesink = gst_element_factory_make("filesink", "myfileoutput");
if (!pipeline || !appsrc || !videoconvert || !x264enc || !mp4mux || !filesink) {
g_printerr("Not all elements could be created.\n");
// return -1;
}
// Set the properties for filesink
g_object_set(filesink, "location", "output.mp4", NULL);
// Build the pipeline
gst_bin_add(GST_BIN(pipeline), appsrc);
gst_bin_add(GST_BIN(pipeline), videoconvert);
gst_bin_add(GST_BIN(pipeline), x264enc);
gst_bin_add(GST_BIN(pipeline), mp4mux);
gst_bin_add(GST_BIN(pipeline), filesink);
// Link the elements
gst_element_link(appsrc, videoconvert);
gst_element_link(videoconvert, x264enc);
gst_element_link(x264enc, mp4mux);
gst_element_link(mp4mux, filesink);
caps = gst_caps_from_string("video/x-raw, format=(string)BGR, width=(int)800, height=(int)600, framerate=(fraction)30/1");
gst_element_set_state(pipeline, GST_STATE_PLAYING);
for (int i = 0; i < 10; i++) {
buf = gst_buffer_new_and_alloc(800 * 600 * 3); // Assuming BGR format
gst_buffer_map(buf, &map, GST_MAP_WRITE);
memset(map.data, i, 800 * 600 * 3); // Filling with dummy data
gst_buffer_unmap(buf, &map);
gst_app_src_push_buffer(GST_APP_SRC(appsrc), buf);
}
gst_app_src_end_of_stream(GST_APP_SRC(appsrc));
GstBus *bus = gst_element_get_bus(pipeline);
GstMessage *msg = gst_bus_timed_pop(bus, GST_CLOCK_TIME_NONE);
if (msg != NULL)
gst_message_unref(msg);
gst_object_unref(bus);
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
It seems that the command gst_app_src_push_buffer
is doing nothing, which I don't have any clue why. Is there any mistake here?
You have a couple of issues in your code:
do-timestamp
property (to value true
) in order to do the auto-timestamping for you but you cannot use it as you push buffers to the pipeline without waiting for the pipeline to reach the playing state first. Alternatively you could set additionally the appsrc
's is-live
property to true
to let appsrc
push the buffers downstream once the playing state is reached, however the auto-timestamped buffers will be played with much bigger framerate than configured (30FPS). If you wanted 30FPS then something like the following would set the buffer timestamps properly: buf->pts = GST_MSECOND * 30 * i;
buf->dts = buf->pts;
buf->duration = GST_MSECOND * 33;
gst_app_src_push_buffer(GST_APP_SRC(appsrc), buf);
appsrc
's format
property should be set to the GST_FORMAT_TIME
value.appsrc
elementThe following fixed version works fine:
#include <gst/app/gstappsrc.h>
#include <gst/gst.h>
#include <cassert>
int main() {
GstElement *pipeline, *appsrc, *videoconvert, *x264enc, *mp4mux, *filesink,
*autovideosink;
GstCaps *caps;
GstBuffer *buf;
GstMapInfo map;
gst_init(nullptr, nullptr);
pipeline = gst_pipeline_new("mypipeline");
// Create elements
appsrc = gst_element_factory_make("appsrc", "mysource");
videoconvert = gst_element_factory_make("videoconvert", "myconvert");
x264enc = gst_element_factory_make("x264enc", "myencoder");
mp4mux = gst_element_factory_make("mp4mux", "mymux");
filesink = gst_element_factory_make("filesink", "myfileoutput");
if (!pipeline || !appsrc || !videoconvert || !x264enc || !mp4mux ||
!filesink) {
g_printerr("Not all elements could be created.\n");
// return -1;
}
// Set the properties for filesink
g_object_set(filesink, "location", "output.mp4", NULL);
// Build the pipeline
gst_bin_add(GST_BIN(pipeline), appsrc);
gst_bin_add(GST_BIN(pipeline), videoconvert);
gst_bin_add(GST_BIN(pipeline), x264enc);
gst_bin_add(GST_BIN(pipeline), mp4mux);
gst_bin_add(GST_BIN(pipeline), filesink);
// Link the elements
gst_element_link(appsrc, videoconvert);
gst_element_link(videoconvert, x264enc);
gst_element_link(x264enc, mp4mux);
gst_element_link(mp4mux, filesink);
caps =
gst_caps_from_string("video/x-raw, format=(string)BGR, width=(int)800, "
"height=(int)600, framerate=(fraction)30/1");
g_object_set(appsrc, "caps", caps, nullptr);
gst_caps_unref(caps);
g_object_set(appsrc, "format", GST_FORMAT_TIME, nullptr);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
for (int i = 0; i < 10; i++) {
buf = gst_buffer_new_allocate(nullptr, 800 * 600 * 3, nullptr); // Assuming BGR format
gst_buffer_map(buf, &map, GST_MAP_WRITE);
assert(map.size == 800*600*3);
memset(map.data, i%255, 800 * 600 * 3); // Filling with dummy data
gst_buffer_unmap(buf, &map);
buf->pts = GST_MSECOND * 30 * i;
buf->dts = buf->pts;
buf->duration = GST_MSECOND * 33;
gst_app_src_push_buffer(GST_APP_SRC(appsrc), buf);
}
gst_app_src_end_of_stream(GST_APP_SRC(appsrc));
GstBus *bus = gst_element_get_bus(pipeline);
GstMessage *msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, static_cast<GstMessageType>(GST_MESSAGE_ERROR | GST_MESSAGE_EOS ));
if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_ERROR) {
g_error ("An error occurred! Re-run with the GST_DEBUG=*:WARN environment "
"variable set for more details.");
}
if (msg != NULL)
gst_message_unref(msg);
gst_object_unref(bus);
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
}
Compiling, running and testing (on macos):
➜ ~ clang++ main.cpp $(pkg-config --libs --cflags gstreamer-base-1.0 gstreamer-app-1.0)
➜ ~ ffprobe output.mp4
ffprobe version 6.0 Copyright (c) 2007-2023 the FFmpeg developers
built with Apple clang version 14.0.3 (clang-1403.0.22.14.1)
configuration: --prefix=/opt/homebrew/Cellar/ffmpeg/6.0_1 --enable-shared --enable-pthreads --enable-version3 --cc=clang --host-cflags= --host-ldflags= --enable-ffplay --enable-gnutls --enable-gpl --enable-libaom --enable-libaribb24 --enable-libbluray --enable-libdav1d --enable-libjxl --enable-libmp3lame --enable-libopus --enable-librav1e --enable-librist --enable-librubberband --enable-libsnappy --enable-libsrt --enable-libsvtav1 --enable-libtesseract --enable-libtheora --enable-libvidstab --enable-libvmaf --enable-libvorbis --enable-libvpx --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxml2 --enable-libxvid --enable-lzma --enable-libfontconfig --enable-libfreetype --enable-frei0r --enable-libass --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenjpeg --enable-libspeex --enable-libsoxr --enable-libzmq --enable-libzimg --disable-libjack --disable-indev=jack --enable-videotoolbox --enable-audiotoolbox --enable-neon
libavutil 58. 2.100 / 58. 2.100
libavcodec 60. 3.100 / 60. 3.100
libavformat 60. 3.100 / 60. 3.100
libavdevice 60. 1.100 / 60. 1.100
libavfilter 9. 3.100 / 9. 3.100
libswscale 7. 1.100 / 7. 1.100
libswresample 4. 10.100 / 4. 10.100
libpostproc 57. 1.100 / 57. 1.100
Input #0, mov,mp4,m4a,3gp,3g2,mj2, from 'output.mp4':
Metadata:
major_brand : mp42
minor_version : 0
compatible_brands: mp42mp41isomiso2
creation_time : 2023-11-03T23:15:15.000000Z
encoder : x264
Duration: 00:01:16.50, start: 0.000000, bitrate: 14 kb/s
Stream #0:0[0x1](und): Video: h264 (High 4:4:4 Predictive) (avc1 / 0x31637661), yuv444p(tv, bt709, progressive), 800x600 [SAR 1:1 DAR 4:3], 11 kb/s, 33.33 fps, 33.33 tbr, 3k tbn (default)
Metadata:
creation_time : 2023-11-03T23:15:15.000000Z
handler_name : VideoHandler
vendor_id : [0][0][0][0]
➜ ~
I see that you wrote that code in C++. Please also consider using RAII technique to avoid maintaining resources manually. A simple RAII wrapper may help, simplify the code and make it less error prone. For instance:
template <auto Fn>
using FunctionObj = std::integral_constant<decltype(Fn), Fn>;
template <typename T, auto Fun>
using ResourceReleasedByFunction = std::unique_ptr<T, FunctionObj<Fun>>;
using ScopedGChar = ResourceReleasedByFunction<gchar, g_free>;
using ScopedGstElement = ResourceReleasedByFunction<GstElement, gst_object_unref>;
using ScopedGstCaps = ResourceReleasedByFunction<GstCaps, gst_caps_unref>;
using ScopedGstSample = ResourceReleasedByFunction<GstSample, gst_sample_unref>;
using ScopedGstBuffer = ResourceReleasedByFunction<GstBuffer, gst_buffer_unref>;
Example usage:
ScopedGstElement appsrc{gst_element_factory_make("appsrc", "mysource")};
...
// no need to call gst_object_unref at the end.