pythonopencvgstreamerrtsppython-gstreamer

Write opencv frames into gstreamer rtsp server pipeline


I'm trying to put opencv images into a gstreamer rtsp server in python. I have some issue writing in the mediafactory, I'm new to gst-rtsp-server ancd there's little documentation so I don't know exactly if I'm using the right approach. I'm using a thread to start the MainLoop and I'm using the main thread to create a buffer to push in the appsrc element of the mediafactory pipeline. Am I using the right approach to obtain my objective? Can anyone help me? My code is below:

from threading import Thread
from time import clock

import cv2
import gi

gi.require_version('Gst', '1.0')
gi.require_version('GstRtspServer', '1.0')
from gi.repository import Gst, GstRtspServer, GObject


class SensorFactory(GstRtspServer.RTSPMediaFactory):
    def __init__(self, **properties):
        super(SensorFactory, self).__init__(**properties)
        self.launch_string = 'appsrc ! video/x-raw,width=320,height=240,framerate=30/1 ' \
                             '! videoconvert ! x264enc speed-preset=ultrafast tune=zerolatency ' \
                             '! rtph264pay config-interval=1 name=pay0 pt=96'
        self.pipeline = Gst.parse_launch(self.launch_string)
        self.appsrc = self.pipeline.get_child_by_index(4)

    def do_create_element(self, url):
        return self.pipeline


class GstServer(GstRtspServer.RTSPServer):
    def __init__(self, **properties):
        super(GstServer, self).__init__(**properties)
        self.factory = SensorFactory()
        self.factory.set_shared(True)
        self.get_mount_points().add_factory("/test", self.factory)
        self.attach(None)


GObject.threads_init()
Gst.init(None)

server = GstServer()

loop = GObject.MainLoop()
th = Thread(target=loop.run)
th.start()

print('Thread started')

cap = cv2.VideoCapture(0)

print(cap.isOpened())

frame_number = 0

fps = 30
duration = 1 / fps

timestamp = clock()

while cap.isOpened():
    ret, frame = cap.read()
    if ret:

        print('Writing buffer')

        data = frame.tostring()

        buf = Gst.Buffer.new_allocate(None, len(data), None)
        buf.fill(0, data)
        buf.duration = fps
        timestamp = clock() - timestamp
        buf.pts = buf.dts = int(timestamp)
        buf.offset = frame_number
        frame_number += 1
        retval = server.factory.appsrc.emit('push-buffer', buf)
        print(retval)

        if cv2.waitKey(1) & 0xFF == ord('q'):
            break

cap.release()

By the way I tried to copy the buffer creation from opencv source code but I'm not sure I correctly trandlated the c++ code in python.


Solution

  • I found the solution, lots of stuff was missing.

    Here's the code for anyone who's facing the same problem or has a somewhat similar one.

    #!/usr/bin/env python3
    
    import cv2
    import gi
    
    gi.require_version('Gst', '1.0')
    gi.require_version('GstRtspServer', '1.0')
    from gi.repository import Gst, GstRtspServer, GObject
    
    
    class SensorFactory(GstRtspServer.RTSPMediaFactory):
        def __init__(self, **properties):
            super(SensorFactory, self).__init__(**properties)
            self.cap = cv2.VideoCapture(0)
            self.number_frames = 0
            self.fps = 30
            self.duration = 1 / self.fps * Gst.SECOND  # duration of a frame in nanoseconds
            self.launch_string = 'appsrc name=source is-live=true block=true format=GST_FORMAT_TIME ' \
                                 'caps=video/x-raw,format=BGR,width=640,height=480,framerate={}/1 ' \
                                 '! videoconvert ! video/x-raw,format=I420 ' \
                                 '! x264enc speed-preset=ultrafast tune=zerolatency ' \
                                 '! rtph264pay config-interval=1 name=pay0 pt=96'.format(self.fps)
    
        def on_need_data(self, src, lenght):
            if self.cap.isOpened():
                ret, frame = self.cap.read()
                if ret:
                    data = frame.tostring()
                    buf = Gst.Buffer.new_allocate(None, len(data), None)
                    buf.fill(0, data)
                    buf.duration = self.duration
                    timestamp = self.number_frames * self.duration
                    buf.pts = buf.dts = int(timestamp)
                    buf.offset = timestamp
                    self.number_frames += 1
                    retval = src.emit('push-buffer', buf)
                    print('pushed buffer, frame {}, duration {} ns, durations {} s'.format(self.number_frames,
                                                                                           self.duration,
                                                                                           self.duration / Gst.SECOND))
                    if retval != Gst.FlowReturn.OK:
                        print(retval)
    
        def do_create_element(self, url):
            return Gst.parse_launch(self.launch_string)
    
        def do_configure(self, rtsp_media):
            self.number_frames = 0
            appsrc = rtsp_media.get_element().get_child_by_name('source')
            appsrc.connect('need-data', self.on_need_data)
    
    
    class GstServer(GstRtspServer.RTSPServer):
        def __init__(self, **properties):
            super(GstServer, self).__init__(**properties)
            self.factory = SensorFactory()
            self.factory.set_shared(True)
            self.get_mount_points().add_factory("/test", self.factory)
            self.attach(None)
    
    
    GObject.threads_init()
    Gst.init(None)
    
    server = GstServer()
    
    loop = GObject.MainLoop()
    loop.run()