unity-game-enginewebrtcvideo-streamingstreamingrtcpeerconnection

Setting resolution for WebRTC using Unity Render Streaming to stream video file in a specific resolution


I have a video file of a 360 video on my computer that I am streaming it to Unity using skybox. The video original resolution is 1920x960, but when I view it inside Unity the resolution is clearly less. In-fact I am using the WebApp, I modified the bidirectional sample to only send the video and I am using the Receiver Sample from Unity Render Streaming plugin to visualize the stream. I can tell that the resolution is being adjusted based on my current bandwith. But that's not what I want. What I want to do is to force the stream to keep a resolution of 1920x960 and I am not able to.

I tried using applyConstraints, changing codecs to h264, setting default resolution to 1920x960, adding it to the list of possible resolutions in the array of resolutions. But, it appears that it overrides my attempts and forces the current resolution based on current bandwith.

ReceiverSample.cs

using System;
using UnityEngine;
using UnityEngine.UI;

namespace Unity.RenderStreaming
{
    public class ReceiverSample : MonoBehaviour
    {
#pragma warning disable 0649
        [SerializeField] private Button startButton;
        [SerializeField] private Button stopButton;
        [SerializeField] private ReceiveVideoViewer receiveVideoViewer;
        [SerializeField] private SingleConnection connection;
        [SerializeField] private Material skyboxMaterial;  
        [SerializeField] private Camera mainCamera;  
        [SerializeField] private Canvas canvas;  
#pragma warning restore 0649

        private string connectionId = "68052";  

        void Awake()
        {
            Texture2D fallbackTexture = Resources.Load<Texture2D>("exemplo"); 
            if (fallbackTexture == null)
            {
                Debug.LogError("Fallback TestImage not found in Resources folder!");
            }

            // Skybox material setup
            if (skyboxMaterial == null)
            {
                skyboxMaterial = Resources.Load<Material>("360Material");
                if (skyboxMaterial == null)
                {
                    Debug.LogError("360Material not found in Resources folder!");
                }
                else
                {
                    Debug.Log("360Material successfully assigned.");
                    skyboxMaterial.SetTexture("_MainTex", fallbackTexture);  
                    RenderSettings.skybox = skyboxMaterial;
                    DynamicGI.UpdateEnvironment();  
                }
            }

            if (mainCamera == null)
            {
                mainCamera = Camera.main;  
                if (mainCamera == null)
                {
                    Debug.LogError("Main camera not found!");
                }
                else
                {
                    Debug.Log("Main camera successfully assigned.");
                }
            }

            receiveVideoViewer.OnUpdateReceiveTexture += texture =>
            {
                if (texture == null)
                {
                    Debug.LogError("No texture received!");
                    return;
                }

                Debug.Log($"Received texture with dimensions: {texture.width}x{texture.height}");

                skyboxMaterial.SetTexture("_MainTex", texture);
                RenderSettings.skybox = skyboxMaterial;
                DynamicGI.UpdateEnvironment();  
                Debug.Log("Skybox material updated with video texture.");

                if (canvas != null)
                {
                    Debug.Log("Switching Canvas to World Space mode.");
                    canvas.renderMode = RenderMode.WorldSpace; 
                    canvas.transform.position = new Vector3(0, 1000, 0);  
                    canvas.transform.localScale = new Vector3(0.1f, 0.1f, 0.1f);  
                    Invoke(nameof(DeactivateCanvas), 2f);  
                }
            };
        }

        void Start()
        {
            mainCamera.transform.position = new Vector3(476f, 238f, -596f);  // Set camera position to X: 476, Y: 238, Z: -596

            OnStart();
        }

        private void OnStart()
        {
            connection.CreateConnection(connectionId, true);  

            startButton?.gameObject.SetActive(false);  
            stopButton?.gameObject.SetActive(true); 
        }

        private void OnStop()
        {
            connection.DeleteConnection(connectionId);
            connectionId = String.Empty;
            startButton?.gameObject.SetActive(true);   
            stopButton?.gameObject.SetActive(false); 
        }

        private void DeactivateCanvas()
        {
            if (canvas != null)
            {
                canvas.gameObject.SetActive(false);
            }
        }
    }
}

main.js

import { SendVideo } from "./sendvideo.js";
import { getServerConfig, getRTCConfiguration } from "../../js/config.js";
import { createDisplayStringArray } from "../../js/stats.js";
import { RenderStreaming } from "../../module/renderstreaming.js";
import { Signaling, WebSocketSignaling } from "../../module/signaling.js";

const defaultStreamWidth = 1920; // Set to custom resolution width
const defaultStreamHeight = 960; // Set to custom resolution height
const streamSizeList = [
  { width: 640, height: 360 },
  { width: 1280, height: 720 },
  { width: 1920, height: 1080 },
  { width: 2560, height: 1440 },
  { width: 3840, height: 2160 },
  { width: 360, height: 640 },
  { width: 720, height: 1280 },
  { width: 1080, height: 1920 },
  { width: 1440, height: 2560 },
  { width: 2160, height: 3840 },
  { width: 1920, height: 960 } // Add custom resolution to the list
];

const localVideo = document.getElementById('localVideo');
const remoteVideo = document.getElementById('remoteVideo');
const localVideoStatsDiv = document.getElementById('localVideoStats');
const remoteVideoStatsDiv = document.getElementById('remoteVideoStats');
const textForConnectionId = document.getElementById('textForConnectionId');
textForConnectionId.value = getRandom();
const videoSelect = document.querySelector('select#videoSource');
const audioSelect = document.querySelector('select#audioSource');
const videoResolutionSelect = document.querySelector('select#videoResolution');
const cameraWidthInput = document.querySelector('input#cameraWidth');
const cameraHeightInput = document.querySelector('input#cameraHeight');

const codecPreferences = document.getElementById('codecPreferences');
const supportsSetCodecPreferences = window.RTCRtpTransceiver &&
  'setCodecPreferences' in window.RTCRtpTransceiver.prototype;
const messageDiv = document.getElementById('message');
messageDiv.style.display = 'none';

let useCustomResolution = true; // Ensure custom resolution is used

setUpInputSelect();
showCodecSelect();

/** @type {SendVideo} */
let sendVideo = new SendVideo(localVideo, remoteVideo);
/** @type {RenderStreaming} */
let renderstreaming;
let useWebSocket;
let connectionId;

const startButton = document.getElementById('startVideoButton');
startButton.addEventListener('click', startVideo);
const setupButton = document.getElementById('setUpButton');
setupButton.addEventListener('click', setUp);
const hangUpButton = document.getElementById('hangUpButton');
hangUpButton.addEventListener('click', hangUp);

window.addEventListener('beforeunload', async () => {
  if (!renderstreaming)
    return;
  await renderstreaming.stop();
}, true);

setupConfig();

async function setupConfig() {
  const res = await getServerConfig();
  useWebSocket = res.useWebSocket;
  showWarningIfNeeded(res.startupMode);
}

function showWarningIfNeeded(startupMode) {
  const warningDiv = document.getElementById("warning");
  if (startupMode == "public") {
    warningDiv.innerHTML = "<h4>Warning</h4> This sample is not working on Public Mode.";
    warningDiv.hidden = false;
  }
}

async function startVideo() {
  startButton.disabled = true;

  await sendVideo.startLocalVideo();

  // Set custom resolution for video streaming
  cameraWidthInput.value = defaultStreamWidth;
  cameraHeightInput.value = defaultStreamHeight;
  cameraWidthInput.disabled = false;
  cameraHeightInput.disabled = false;
  useCustomResolution = true;

  // Enable setup button after initializing local video
  setupButton.disabled = false;
}

async function setUp() {
  setupButton.disabled = true;
  hangUpButton.disabled = false;
  connectionId = textForConnectionId.value;
  codecPreferences.disabled = true;

  const signaling = useWebSocket ? new WebSocketSignaling() : new Signaling();
  const config = getRTCConfiguration();
  renderstreaming = new RenderStreaming(signaling, config);
  renderstreaming.onConnect = () => {
    const tracks = sendVideo.getLocalTracks();
    for (const track of tracks) {
      renderstreaming.addTransceiver(track, { direction: 'sendonly' });
    }
    setCodecPreferences();
    showStatsMessage();
  };
  renderstreaming.onDisconnect = () => {
    hangUp();
  };

  await renderstreaming.start();
  await renderstreaming.createConnection(connectionId);
}

function setCodecPreferences() {
  /** @type {RTCRtpCodecCapability[] | null} */
  let selectedCodecs = null;
  if (supportsSetCodecPreferences) {
    const preferredCodec = codecPreferences.options[codecPreferences.selectedIndex];
    if (preferredCodec.value !== '') {
      const [mimeType, sdpFmtpLine] = preferredCodec.value.split(' ');
      const { codecs } = RTCRtpSender.getCapabilities('video');
      const selectedCodecIndex = codecs.findIndex(c => c.mimeType === mimeType && c.sdpFmtpLine === sdpFmtpLine);
      const selectCodec = codecs[selectedCodecIndex];
      selectedCodecs = [selectCodec];
    }
  }

  if (selectedCodecs == null) {
    return;
  }
  const transceivers = renderstreaming.getTransceivers().filter(t => t.receiver.track.kind == "video");
  if (transceivers && transceivers.length > 0) {
    transceivers.forEach(t => t.setCodecPreferences(selectedCodecs));
  }
}

async function hangUp() {
  clearStatsMessage();
  messageDiv.style.display = 'block';
  messageDiv.innerText = `Disconnect peer on ${connectionId}.`;

  hangUpButton.disabled = true;
  setupButton.disabled = false;
  await renderstreaming.deleteConnection();
  await renderstreaming.stop();
  renderstreaming = null;
  remoteVideo.srcObject = null;

  textForConnectionId.value = getRandom();
  connectionId = null;
  if (supportsSetCodecPreferences) {
    codecPreferences.disabled = false;
  }
}

function getRandom() {
  const max = 99999;
  const length = String(max).length;
  const number = Math.floor(Math.random() * max);
  return (Array(length).join('0') + number).slice(-length);
}

async function setUpInputSelect() {
  const deviceInfos = await navigator.mediaDevices.enumerateDevices();

  for (let i = 0; i !== deviceInfos.length; ++i) {
    const deviceInfo = deviceInfos[i];
    if (deviceInfo.kind === 'videoinput') {
      const option = document.createElement('option');
      option.value = deviceInfo.deviceId;
      option.text = deviceInfo.label || `camera ${videoSelect.length + 1}`;
      videoSelect.appendChild(option);
    } else if (deviceInfo.kind === 'audioinput') {
      const option = document.createElement('option');
      option.value = deviceInfo.deviceId;
      option.text = deviceInfo.label || `mic ${audioSelect.length + 1}`;
      audioSelect.appendChild(option);
    }
  }

  for (let i = 0; i < streamSizeList.length; i++) {
    const streamSize = streamSizeList[i];
    const option = document.createElement('option');
    option.value = i;
    option.text = `${streamSize.width} x ${streamSize.height}`;
    videoResolutionSelect.appendChild(option);
  }

  const option = document.createElement('option');
  option.value = streamSizeList.length;
  option.text = 'Custom';
  videoResolutionSelect.appendChild(option);
  videoResolutionSelect.value = streamSizeList.length - 1; // Default to custom resolution

  videoResolutionSelect.addEventListener('change', (event) => {
    const isCustom = event.target.value >= streamSizeList.length;
    cameraWidthInput.disabled = !isCustom;
    cameraHeightInput.disabled = !isCustom;
    useCustomResolution = isCustom;
  });
}

function showCodecSelect() {
  if (!supportsSetCodecPreferences) {
    messageDiv.style.display = 'block';
    messageDiv.innerHTML = `Current Browser does not support <a href="https://developer.mozilla.org/en-US/docs/Web/API/RTCRtpTransceiver/setCodecPreferences">RTCRtpTransceiver.setCodecPreferences</a>.`;
    return;
  }

  const codecs = RTCRtpSender.getCapabilities('video').codecs;
  codecs.forEach(codec => {
    if (['video/red', 'video/ulpfec', 'video/rtx'].includes(codec.mimeType)) {
      return;
    }
    const option = document.createElement('option');
    option.value = (codec.mimeType + ' ' + (codec.sdpFmtpLine || '')).trim();
    option.innerText = option.value;
    codecPreferences.appendChild(option);
  });
  codecPreferences.disabled = false;
}

let lastStats;
let intervalId;

function showStatsMessage() {
  intervalId = setInterval(async () => {
    if (localVideo.videoWidth) {
      localVideoStatsDiv.innerHTML = `<strong>Sending resolution:</strong> ${localVideo.videoWidth} x ${localVideo.videoHeight} px`;
    }
    if (remoteVideo.videoWidth) {
      remoteVideoStatsDiv.innerHTML = `<strong>Receiving resolution:</strong> ${remoteVideo.videoWidth} x ${remoteVideo.videoHeight} px`;
    }

    if (renderstreaming == null || connectionId == null) {
      return;
    }

    const stats = await renderstreaming.getStats();
    if (stats == null) {
      return;
    }

    const array = createDisplayStringArray(stats, lastStats);
    if (array.length) {
      messageDiv.style.display = 'block';
      messageDiv.innerHTML = array.join('<br>');
    }
    lastStats = stats;
  }, 1000);
}

function clearStatsMessage() {
  if (intervalId) {
    clearInterval(intervalId);
  }
  lastStats = null;
  intervalId = null;
  localVideoStatsDiv.innerHTML = '';
  remoteVideoStatsDiv.innerHTML = '';
  messageDiv.style.display = 'none';
  messageDiv.innerHTML = '';
}

sendvideo.js

import * as Logger from "../../module/logger.js";

export class SendVideo {
  constructor(localVideoElement, remoteVideoElement) {
    this.localVideo = localVideoElement;
    this.remoteVideo = remoteVideoElement;
    this.peerConnection = null;
  }

  async startLocalVideo() {
    try {
      const videoElement = document.createElement('video');
      videoElement.src = '/videos/video.mp4'; // Path to your video file
      videoElement.muted = true;
      await videoElement.play();

      const videoConstraints = {
        video: {
          width: { exact: 1920 },  // Force width to 1920
          height: { exact: 960 },  // Force height to 960
          frameRate: { ideal: 30 }, // Optional: Set frame rate target to 30 FPS
        }
      };

      const stream = videoElement.captureStream();
      this.localVideo.srcObject = stream;

      // Apply the video constraints to the tracks
      const videoTrack = stream.getVideoTracks()[0];
      await videoTrack.applyConstraints(videoConstraints);

      await this.localVideo.play();

      // Initialize WebRTC connection and data channel
      this.initializePeerConnection();

    } catch (err) {
      Logger.error(`Error starting local video: ${err}`);
    }
  }

  // Set up WebRTC connection and DataChannel
  initializePeerConnection() {
    this.peerConnection = new RTCPeerConnection();

    // Create a DataChannel (optional)
    const dataChannel = this.peerConnection.createDataChannel("myDataChannel");
    dataChannel.onopen = () => {
      console.log("DataChannel open: Connection established with Unity client");
    };
    dataChannel.onclose = () => {
      console.log("DataChannel closed");
    };

    // Set up connection state change event
    this.peerConnection.onconnectionstatechange = () => {
      if (this.peerConnection.connectionState === 'connected') {
        console.log("WebRTC connection established with Unity");
      }
    };

    // Add the video track to the peer connection
    const videoTrack = this.localVideo.srcObject.getVideoTracks()[0];
    const sender = this.peerConnection.addTrack(videoTrack);

    // Force H.264 codec by modifying transceiver settings
    const transceivers = this.peerConnection.getTransceivers();
    transceivers.forEach(transceiver => {
      if (transceiver.sender.track.kind === 'video') {
        const capabilities = RTCRtpSender.getCapabilities('video');
        const h264Codec = capabilities.codecs.find(codec => codec.mimeType === 'video/H264');
        if (h264Codec) {
          transceiver.setCodecPreferences([h264Codec]);
          console.log("H.264 codec has been set for the video stream.");
        } else {
          console.log("H.264 codec is not supported.");
        }
      }
    });

    // Create an SDP offer, modify to force H.264 and set resolution constraints
    this.peerConnection.createOffer().then(offer => {
      let sdp = offer.sdp;

      // Remove VP8 and VP9 codecs to force H.264
      sdp = sdp.replace(/a=rtpmap:\d+ VP8\/90000\n/g, '');
      sdp = sdp.replace(/a=rtpmap:\d+ VP9\/90000\n/g, '');

      // Enforce the resolution constraint for 224x224 in SDP
      sdp = sdp.replace(/a=mid:video\n/g, 'a=mid:video\nb=AS:500\n'); // Adjust bitrate as needed for 224x224

      // Apply the modified SDP offer
      offer.sdp = sdp;
      return this.peerConnection.setLocalDescription(offer);
    }).then(() => {
      console.log("Offer created with H.264 codec and resolution constraints");
    }).catch(err => console.error("Error creating offer:", err));
  }

  getLocalTracks() {
    return this.localVideo.srcObject.getTracks();
  }

  addRemoteTrack(track) {
    if (this.remoteVideo.srcObject == null) {
      this.remoteVideo.srcObject = new MediaStream();
    }
    this.remoteVideo.srcObject.addTrack(track);
  }
}

I would like to know how to force the resolution and/or other streaming parameters to what I want or where exactly do I need to adjust those parameters.


Solution

  • I was using the default codec for streaming the video, for a 1080p video, I needed to use VP9 or h256