dartwebrtcflutter-webdart-htmlflutter-webrtc

How can I get native MediaStreamTrack from WebRtc MediaStreamTrackWeb object


I want to mix MediaStreamTrack objects in Dart using the package:universal_html/js.dart library.

      JsAudioContext audioContext = JsAudioContext();
      audioContext.initialize();
      var senders = await call!.peerConnection!.getSenders();
      for (var sender in senders) {
        for (var track in senderTracks) {
          if (sender.track!.id != track.id) {
            audioContext.connect(track);
          }
        }
      }

But WebRtc hides the jsTrack native object inside the MediaStreamTrackWeb object. How can I access this object ? Is there anyone have an idea ?


Solution

  • I found the solution using the js_bindings library. The library's MediaStream.getTracks() method throws a type error. I solved this problem using js_util interop.

    JsAudioContext.dart:

    import 'dart:convert';
    import 'package:flutter_webrtc/flutter_webrtc.dart' as webrtc;
    import 'package:dart_webrtc/src/media_stream_track_impl.dart' as track_impl;
    import 'package:js_bindings/js_bindings.dart' as js_bindings;
    import 'package:universal_html/html.dart' as html;
    import 'dart:js_util' as js_util;
    
    class JsAudioContext {
      js_bindings.AudioContext? audioContext;
      js_bindings.MediaStreamAudioDestinationNode? destinationNode;
      JsAudioContext() {
        audioContext = js_bindings.AudioContext();
      }
    
      void createMediaStreamDestination() {
        destinationNode = audioContext?.createMediaStreamDestination();
      }
    
      void connect(webrtc.MediaStreamTrack? trackWeb) {
        track_impl.MediaStreamTrackWeb mediaStreamTrackWeb =
            trackWeb as track_impl.MediaStreamTrackWeb;
        html.MediaStreamTrack htmlTrack = mediaStreamTrackWeb.jsTrack;
        var sourceStream = audioContext?.createMediaStreamSource(
            js_bindings.MediaStream([htmlTrack as js_bindings.MediaStreamTrack]));
        sourceStream?.connect(destinationNode!);
      }
    
      webrtc.MediaStreamTrack getMixedTrack() {
        List<dynamic> outputTrack =
            js_util.callMethod(destinationNode!.stream, 'getTracks', []);
    
        webrtc.MediaStreamTrack rtcTrack = track_impl.MediaStreamTrackWeb(
            outputTrack.toList()[0] as html.MediaStreamTrack);
        return rtcTrack;
      }
    }
    

    sip_call_event_service.dart:

    @override
      Future startConference(List<SipCallData> activeCallList) async {
        List<webrtc.MediaStreamTrack> receivedTracks = <webrtc.MediaStreamTrack>[];
    
        for (var item in activeCallList) {
          Call? call = sipuaHelper!.findCall(item.id!);
          var receives = await call!.peerConnection!.getReceivers();
          for (var element in receives) {
            receivedTracks.add(element.track!);
          }
        }
    
        JsAudioContext jsAudioContext = JsAudioContext();
    
        for (var item in activeCallList) {
          Call? call = sipuaHelper!.findCall(item.id!);
          jsAudioContext.createMediaStreamDestination();
    
          var receivers = await call!.peerConnection!.getReceivers();
          for (var receiver in receivers) {
            for (var track in receivedTracks) {
              if (receiver.track!.id != track.id) {
                jsAudioContext.connect(track);
              }
            }
          }
    
          var senders = await call.peerConnection!.getSenders();
          for (var sender in senders) {
            jsAudioContext.connect(sender.track);
          }
    
          await senders.first.replaceTrack(jsAudioContext.getMixedTrack());
        }
      }