node.jsgstreamerhttp-live-streaming

How to use GStreamer to directly stream to a web browser?


There are many examples online to use GStreamer pipeline with "tcpclientsink" or "udpsink" with NodeJS to consume the GStreamer pipeline output to Web Browser.

But I could not find any example or documentation which clearly explains how to use the webrtcbin element with a NodeJS server to send stream to a web browser. (An alternative to webrtcbin would be fine, too.)

I have the following GStreamer pipeline:

gst-launch-1.0 videotestsrc  \
! queue ! vp8enc ! rtpvp8pay \
! application/x-rtp,media=video,encoding-name=VP8,payload=96 \
! webrtcbin name=sendrecv

Can someone help in consuming this pipeline with a NodeJS based server to display the stream onto a web browser?

Here is a similar example, but it uses tcpclientsink: https://tewarid.github.io/2011/04/26/stream-live-webm-video-to-browser-using-node.js-and-gstreamer.html


Solution

  • Update: In the end, I was able to achieve GStreamer to Browser using NodeJS tutotial mentioned in the question. Here is a proof of concept code that someone can use if needed (or in case the tutorial link gets removed from internet):

    var express = require('express')
    var http = require('http')
    var net = require('net');
    var child = require('child_process');
    require('log-timestamp');   //adds timestamp in console.log()
    
    var app = express();
    app.use(express.static(__dirname + '/'));
    
    var httpServer = http.createServer(app);
    const port = 9001;  //change port number is required
    
    //send the html page which holds the video tag
    app.get('/', function (req, res) {
        res.send('index.html');
    });
    
    //stop the connection
    app.post('/stop', function (req, res) {
        console.log('Connection closed using /stop endpoint.');
    
        if (gstMuxer != undefined) {
            gstMuxer.kill();    //killing GStreamer Pipeline
            console.log(`After gstkill in connection`);
        }
        gstMuxer = undefined;
        res.end();
    });
    
    //send the video stream
    app.get('/stream', function (req, res) {
    
        res.writeHead(200, {
            'Content-Type': 'video/webm',
        });
    
        var tcpServer = net.createServer(function (socket) {
            socket.on('data', function (data) {
                res.write(data);
            });
            socket.on('close', function (had_error) {
                console.log('Socket closed.');
                res.end();
            });
        });
    
        tcpServer.maxConnections = 1;
    
        tcpServer.listen(function () {
            console.log("Connection started.");
            if (gstMuxer == undefined) {
                console.log("inside gstMuxer == undefined");
                var cmd = 'gst-launch-1.0';
                var args = getGstPipelineArguments(this);
                var gstMuxer = child.spawn(cmd, args);
    
                gstMuxer.stderr.on('data', onSpawnError);
                gstMuxer.on('exit', onSpawnExit);
    
            }
            else {
                console.log("New GST pipeline rejected because gstMuxer != undefined.");
            }
        });
    });
    
    httpServer.listen(port);
    console.log(`Camera Stream App listening at http://localhost:${port}`)
    
    process.on('uncaughtException', function (err) {
        console.log(err);
    });
    
    //functions
    function onSpawnError(data) {
        console.log(data.toString());
    }
    
    function onSpawnExit(code) {
        if (code != null) {
            console.log('GStreamer error, exit code ' + code);
        }
    }
    
    function getGstPipelineArguments(tcpServer) {
        //Replace 'videotestsrc', 'pattern=ball' with camera source in below GStreamer pipeline arguments.
        //Note: Every argument should be written in single quotes as done below
        var args =
            ['videotestsrc', 'pattern=ball',
                '!', 'video/x-raw,width=320,height=240,framerate=100/1',
                '!', 'vpuenc_h264', 'bitrate=2000',
                '!', 'mp4mux', 'fragment-duration=10',
                '!', 'tcpclientsink', 'host=localhost',
                'port=' + tcpServer.address().port];
        return args;
    }
    

    And also sharing the HTML code:

    <!DOCTYPE html>
    
    <head>
        <title>GStreamer with NodeJS Demo</title>
        <meta name="viewport" content="width=device-width, initial-scale=0.9">
    
        <style>
            html,
            body {
                overflow: hidden;
            }
        </style>
        
        <script>
            function buffer() {
                //Start playback as soon as possible to minimize latency at startup 
                var dStream = document.getElementById('vidStream');
    
                try {
                    dStream.play();
                } catch (error) {
                    console.log("Error in buffer() method.");
                    console.log(error);
                }
    
            }
        </script>
    </head>
    
    <body onload="buffer();">
        <video id="vidStream" width="640" height="480" muted>
            <source src="/stream" type="video/mp4" />
            <source src="/stream" type="video/webm" />
            <source src="/stream" type="video/ogg" />
            <!-- fallback -->
            Your browser does not support the <code>video</code> element.
        </video>
    </body>