webrtcmultipeer-connectivitypeerjsopenwebrtc

microphone not working while share screen + system audio shared using peerjs webrtc


Screenshot 2023-07-19 182435

We have tried to share screen audio. When sharing the screen, microphone and screen-sharing audio do not work together. Mic does not work when system audio is on. System audio does not working if end mic is on. please explain me what is the about issue.

Here is code:

function startScreenShare() {
    if (screenSharing) {
        stopScreenSharing()
    }
    navigator.mediaDevices.getDisplayMedia(
        { video: { mediaSource: "screen" }, audio: true }
    ).then((stream) => {
        setScreenSharingStream(stream);

        screenStream = stream;
        let videoTrack = screenStream.getVideoTracks()[0];
        videoTrack.onended = () => {
            console.log('Hiii')
            stopScreenSharing()
        }
        if (peer) {
            let sender = currentPeer.peerConnection.getSenders().find(function (s) {
                return s.track.kind == videoTrack.kind;
            })
            sender.replaceTrack(videoTrack)
            screenSharing = true
        }
        console.log(screenStream)
    })
}

We have tried to share screen audio. When sharing the screen, microphone and screen-sharing audio do not work together. Mic does not work when system audio is on. System audio does not working if end mic is on.

I want mic and system audio to play along with share screen.


Solution

  • Re-Solved:

    const audioStream = await navigator.mediaDevices.getUserMedia({ audio: true });
    
    audioStream.getAudioTracks().forEach((track) => {
                screenStream1.addTrack(track);
    });
    
    const screenStream1 = await navigator.mediaDevices.getDisplayMedia({
                video: {
                    cursor: "always"
                },
                audio: {
                    echoCancellation: true,
                    noiseSuppression: true,
                    sampleRate: 44100
                }
     });
    
    
    async function mergeTracks(audioStream, screenStream1) {
        const audioContext = new AudioContext();
        const audioStreamTwo = new MediaStream()
        audioStreamTwo.addTrack(screenStream1.getAudioTracks()[0])
        baseSource = audioContext.createMediaStreamSource(audioStream);
        extraSource = audioContext.createMediaStreamSource(audioStreamTwo);
       
        const dest = audioContext.createMediaStreamDestination();
    
        var baseGain = audioContext.createGain();
        var extraGain = audioContext.createGain();
        baseGain.gain.value = 0.8;
        extraGain.gain.value = 0.8;
        baseSource.connect(baseGain).connect(dest);
        extraSource.connect(extraGain).connect(dest);
    
        baseSource.connect(dest);
        extraSource.connect(dest);
    
        return dest.stream;
    
    }
    
    
      var screenStream = screenStream1;
      Stream = await mergeTracks(audioStream, screenStream); //FUNCTION
    
    console.log('currentPeer---------', currentPeer)
    if (peer && currentPeer) {
        const videoTrack = screenStream.getVideoTracks()[0];
    
        const sender = currentPeer.peerConnection.getSenders().find((s) => s.track.kind === videoTrack.kind);
        sender.replaceTrack(videoTrack);
    
        const audioTrack = Stream.getAudioTracks()[0];
        const audioSender = currentPeer.peerConnection.getSenders().find((s) => s.track.kind === audioTrack.kind);
        audioSender.replaceTrack(audioTrack);
    
        screenSharing = true;
    }