c++qtraspberry-pigstreamerqtgstreamer

Using qtgstreamer with Qt and Raspberry Pi


I am having a problem with decoding a video stream from my raspberry pi to a laptop with a Qt GUI.

My pipeline for the pi is (using the adafruit raspberry pi camera):

raspivid -t 999999 -h 480 -w 640 -fps 25 -hf -b 2000000 -o - | gst-launch-1.0 -v fdsrc ! h264parse !  rtph264pay config-interval=1 pt=96 ! gdppay ! tcpserversink host=10.0.0.128 port=5000 

Just using a viewer on the laptop with the pipeline:

gst-launch-1.0 -v tcpclientsrc host=10.0.0.128 port=5000  ! gdpdepay !  rtph264depay ! avdec_h264 ! videoconvert ! autovideosink sync=false 

Gives very nice color video at a pretty decent rate, although I didn't measure the frame rate.

When I use qtgstreamer in my GUI application (with a source width=640,height=480 and I assume an 8-bit RGB image) I am getting the buffer size in the below code of 460800, and I expect it to be 921600. If I use the QImage::Format_RGB888 the program will crash because the image buffer is too small. If I use QImage::Format_Index8 it will run fine, show video in my GUI and all but is Black and White. Anyone have any ideas? Here is my relevant code:

bool CameraStreamer::initStreamer()
{
    gst_init (NULL, NULL);
    //gst-launch-1.0 -v tcpclientsrc host=10.0.0.128 port=5000  ! gdpdepay !  rtph264depay ! avdec_h264 ! videoconvert ! autovideosink sync=false
    pipeline = gst_pipeline_new("Camera");
    source                  = gst_element_factory_make ("tcpclientsrc",           "cam-source");
    depay                   = gst_element_factory_make("gdpdepay",      "depay");
    rtpdepay                = gst_element_factory_make("rtph264depay","rtp-depay");
    decoder                 = gst_element_factory_make ("avdec_h264",          "videodecoder");
    videoconvert            = gst_element_factory_make("videoconvert","video-convert");
    sink                    = gst_element_factory_make ("appsink",          "video-output");
    if (!pipeline || !source  || !depay || !rtpdepay || !decoder || !videoconvert || !sink ) {
      qDebug() << "One element could not be created. Exiting.\n";
      return false;
    }
    callbacks.eos = NULL;
    callbacks.new_sample = newBufferCallback;
    callbacks.new_preroll = NULL;
    gst_app_sink_set_callbacks((GstAppSink *) sink, &callbacks, this, NULL);
    g_object_set (G_OBJECT(source), "port", 5001, NULL);
    g_object_set (G_OBJECT(source),"host","10.0.0.128",NULL);
    gst_bin_add_many (GST_BIN (pipeline),
                      source, depay,rtpdepay,decoder, videoconvert,sink, NULL);
    if (!gst_element_link_many (source, depay,rtpdepay,decoder, videoconvert,sink, NULL))
        g_warning ("Main pipeline link Fail...");
    ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE)
    {
        g_printerr ("Unable to set the pipeline to the playing state.");
        gst_object_unref (pipeline);
        return false;
    }
    return true;
}

GstFlowReturn CameraStreamer::newBufferCallback(GstAppSink *app_sink, void *obj)
{
    if(app_sink == NULL)
    {
        qDebug() << "app_sink is NULL";
        return GST_FLOW_ERROR;
    }
    GstSample* sample = gst_app_sink_pull_sample(app_sink);
    if(!sample)
    {
        qDebug() << "Error retreiving buffer...";
        return GST_FLOW_ERROR;
    }
    GstCaps* caps = gst_sample_get_caps (sample);
    if (!caps) {
        qDebug() << "could not get snapshot format\n";
        exit (-1);
    }
    gint width, height;
    GstStructure* s = gst_caps_get_structure (caps, 0);
    int res = gst_structure_get_int (s, "width", &width)
        | gst_structure_get_int (s, "height", &height);
    if (!res) {
        qDebug() << "could not get snapshot dimension\n";
        exit (-1);
    }
    GstMapInfo map;

    GstBuffer *buffer = gst_sample_get_buffer (sample);
    qDebug() << "size: " << gst_buffer_get_size(buffer);
    gst_buffer_map (buffer, &map, GST_MAP_READ);
    QImage img(map.data,width,height, QImage::Format_RGB888);
    img = img.copy();
    ((CameraStreamer*)obj)->emitNewImage(img);
    gst_buffer_unmap (buffer, &map);
    gst_sample_unref (sample);
    return GST_FLOW_OK;
}

Solution

  • So after a ridiculous amount of time and googling, I found the answer. I ended up using opencv to do the actual color conversion. Here is my method (continuing from above):

    GstBuffer *buffer = gst_sample_get_buffer (sample);
    gst_buffer_map (buffer, &map, GST_MAP_READ);
    cv::Mat temp_mat = cv::Mat(cv::Size(width, height+height/2), CV_8UC1, (char*)map.data);
    cv::Mat result(height,width,3);
    cv::cvtColor(temp_mat,result,CV_YUV2RGB_I420,3);
    QImage rgb(result.size().width,result.size().height,QImage::Format_RGB888);
    memcpy(rgb.scanLine(0), (unsigned char*)result.data, rgb.width() * rgb.height() * result.channels());
    ((CameraStreamer*)obj)->emitNewImage(rgb);
    gst_buffer_unmap (buffer, &map);
    gst_sample_unref (sample);
    

    I will post more information on my application git repo, but I thought this may help other people.

    Here's the link: camera streamer example