I'm working on a 1:1 video call using Azure Communication Services (ACS) with a .NET MAUI app. I’ve managed to get video working both ways, but the audio only works one way. The callee can hear the caller, but the caller can't hear the callee. I’ve tried using RemoteAudioStream but still no luck. I have done the permissions part correctly, at least from the .NET MAUI side. I have checked multiple times on the app permissions from the manifest but also from the app details/permissions.
Basically from the output log I don't see any errors:
CallAgent initialized.
main.js:61 Device permissions granted.
main.js:65 Available microphones: Array(3)
main.js:71 Selected microphone: Headset earpiece
main.js:80 Incoming call received.
main.js:85 Notifying user of incoming call.
main.js:127 Call accepted.
main.js:273 Local participant is muted: false
main.js:312 Remote participant added: 8:acs:7fd9cad8-493b-4fd3-9e67-48bd84547a61_00000022-e7e6-7d84-8ed3-....
main.js:315 Remote participant is muted: false
main.js:276 Call state changed to: EarlyMedia
main.js:276 Call state changed to: Connected
main.js:280 Connected label shown.
main.js:336 Remote participant state changed to: Connected
main.js:330 Remote video stream removed
This worked and works fine when I use regular chrome browser or through the desktop, it even worked fine when I was calling from desktop to android, just not mobile to mobile.
I am providing the javascript I am using below
// Import necessary ACS modules
const { CallClient, VideoStreamRenderer, LocalVideoStream } = require('@azure/communication-calling');
const { AzureCommunicationTokenCredential } = require('@azure/communication-common');
const { AzureLogger, setLogLevel } = require("@azure/logger");
// Set the log level and output for debugging purposes
setLogLevel('error'); // Change to 'info' or 'verbose' for more detailed logs during development
AzureLogger.log = (...args) => {
console.error(...args);
};
// Variables to store ACS objects and call state
let callAgent;
let deviceManager;
let call;
let currentCall;
let localVideoStream;
let localVideoRenderer;
let remoteVideoRenderer;
let isVideoStarted = false; // Track video state
// UI elements
let remoteVideosGallery = document.getElementById('remoteVideosGallery');
let localVideoContainer = document.getElementById('localVideoContainer');
let connectedLabel = document.getElementById('connectedLabel'); // Ensure this element exists in HTML
let localAudioIndicator = document.getElementById('localAudioIndicator'); // Optional: Add this in your HTML
// Extract query parameters
const urlParams = new URLSearchParams(window.location.search);
const token = urlParams.get('token');
const userId = urlParams.get('userId');
const targetId = urlParams.get('targetId');
const role = urlParams.get('role');
// Function to select the best microphone
function selectBestMicrophone(mics) {
// Prefer 'Headset earpiece' if available
const headset = mics.find(m => m.name.toLowerCase().includes('headset'));
if (headset) return headset;
// Fallback to 'microphone:default' if available
const defaultMic = mics.find(m => m.id === 'microphone:default');
if (defaultMic) return defaultMic;
// Otherwise, return the first available microphone with a valid name
return mics.find(m => m.name && m.name.trim() !== '') || mics[0];
}
// Function to initialize the Call Agent
async function initializeCallAgent(token, userId, targetId, role) {
try {
const credential = new AzureCommunicationTokenCredential(token);
const callClient = new CallClient();
callAgent = await callClient.createCallAgent(credential);
console.log('CallAgent initialized.');
// Initialize the device manager to access camera and microphone
deviceManager = await callClient.getDeviceManager();
await deviceManager.askDevicePermission({ video: true, audio: true });
console.log('Device permissions granted.');
// Select the best microphone
const microphones = await deviceManager.getMicrophones();
console.log('Available microphones:', microphones);
let selectedMicrophone = selectBestMicrophone(microphones);
if (selectedMicrophone) {
await deviceManager.selectMicrophone(selectedMicrophone);
console.log(`Selected microphone: ${selectedMicrophone.name || 'Default Microphone'}`);
} else {
console.error('No suitable microphone device found.');
alert('No suitable microphone detected. Please connect a microphone and refresh the page.');
return;
}
// Handle incoming calls
callAgent.on('incomingCall', async (args) => {
console.log('Incoming call received.');
currentCall = args.incomingCall;
// Notify the user of the incoming call
setTimeout(async () => {
console.log('Notifying user of incoming call.');
alert('Incoming call received. Press OK to accept.');
await acceptCall(); // Automatically accept the call for testing
}, 1000);
});
} catch (error) {
console.error('Error initializing CallAgent:', error);
alert(`Failed to initialize call. Error: ${error.message}`);
}
}
async function startCall() {
if (!callAgent) {
console.error('CallAgent is not initialized.');
alert('CallAgent is not initialized.');
return;
}
try {
const targetUser = { communicationUserId: targetId };
const callOptions = {
audioOptions: { muted: false }, // Start with audio, no video
// videoOptions: { localVideoStreams: [] } // Uncomment if starting without video
};
call = callAgent.startCall([targetUser], callOptions);
console.log('Call initiated.');
setupCall(call);
} catch (error) {
console.error('Error starting call:', error);
alert('Failed to start call. Please check the console for more details.');
}
}
async function acceptCall() {
if (currentCall) {
try {
const callOptions = {
audioOptions: { muted: false }, // Start with audio
// videoOptions: { localVideoStreams: [] } // Uncomment if accepting without video
};
call = await currentCall.accept(callOptions);
console.log('Call accepted.');
setupCall(call);
} catch (error) {
console.error('Error accepting call:', error);
alert('Failed to accept the call.');
}
} else {
console.error('No incoming call to accept.');
}
}
async function declineCall() {
if (currentCall) {
try {
await currentCall.reject();
console.log('Call declined.');
currentCall = null;
} catch (error) {
console.error('Error declining call:', error);
alert('Failed to decline the call.');
}
}
}
async function hangUpCall() {
if (call) {
try {
await call.hangUp();
console.log('Call hung up.');
call = null;
currentCall = null;
isVideoStarted = false; // Reset video state after hang up
// Clear video containers when the call ends
clearVideoContainers();
} catch (error) {
console.error('Error hanging up call:', error);
alert('Failed to hang up the call.');
}
}
}
// Clear video elements (both local and remote)
function clearVideoContainers() {
if (localVideoRenderer) {
localVideoRenderer.dispose(); // Dispose the local video renderer
localVideoRenderer = null;
localVideoContainer.hidden = true;
}
if (remoteVideoRenderer) {
remoteVideoRenderer.dispose(); // Dispose the remote video renderer
remoteVideoRenderer = null;
}
remoteVideosGallery.innerHTML = ''; // Clear all remote video elements
}
// Start video
async function startVideo() {
if (!call) {
console.error('No active call to start video.');
alert('No active call to start video.');
return;
}
try {
if (!localVideoStream) {
localVideoStream = await createLocalVideoStream();
}
await call.startVideo(localVideoStream);
console.log('Video started.');
await displayLocalVideoStream();
isVideoStarted = true;
} catch (error) {
console.error('Error starting video:', error);
alert('Failed to start video.');
}
}
// Stop video
async function stopVideo() {
if (!call || !isVideoStarted) {
console.error('No active video to stop.');
alert('No active video to stop.');
return;
}
try {
await call.stopVideo(localVideoStream);
console.log('Video stopped.');
isVideoStarted = false;
// Remove local video stream and replace it with a black screen
if (localVideoRenderer) {
localVideoRenderer.dispose();
localVideoRenderer = null;
localVideoContainer.style.backgroundColor = 'black'; // Black background
}
} catch (error) {
console.error('Error stopping video:', error);
alert('Failed to stop video.');
}
}
// Create local video stream
async function createLocalVideoStream() {
const cameras = await deviceManager.getCameras();
if (cameras.length > 0) {
const camera = cameras[0];
console.log(`Using camera: ${camera.name || 'Default Camera'}`);
localVideoStream = new LocalVideoStream(camera);
return localVideoStream;
} else {
console.error('No camera device found.');
alert('No camera device detected. Please connect a camera and refresh the page.');
return null;
}
}
// Display local video stream
async function displayLocalVideoStream() {
try {
if (localVideoRenderer) {
localVideoRenderer.dispose(); // Dispose of existing renderer to prevent duplicates
}
if (localVideoStream) {
localVideoRenderer = new VideoStreamRenderer(localVideoStream);
const view = await localVideoRenderer.createView();
localVideoContainer.innerHTML = ''; // Clear any existing video element
localVideoContainer.appendChild(view.target);
localVideoContainer.hidden = false;
console.log('Local video stream rendered.');
}
} catch (error) {
console.error('Error displaying local video stream:', error);
alert('Failed to display local video stream.');
}
}
// Setup call event listeners
function setupCall(call) {
currentCall = call;
console.log(`Local participant is muted: ${call.isMuted}`);
call.on('stateChanged', () => {
console.log(`Call state changed to: ${call.state}`);
if (call.state === 'Connected') {
if (connectedLabel) {
connectedLabel.hidden = false;
console.log('Connected label shown.');
}
} else if (call.state === 'Disconnected') {
if (connectedLabel) {
connectedLabel.hidden = true;
console.log('Connected label hidden.');
}
call = null;
clearVideoContainers(); // Ensure media is cleared on disconnect
}
});
call.on('isMutedChanged', () => {
console.log(`Local participant mute state changed: ${call.isMuted}`);
if (localAudioIndicator) {
localAudioIndicator.textContent = call.isMuted ? 'Local Audio: Muted' : 'Local Audio: Unmuted';
}
});
call.remoteParticipants.forEach(remoteParticipant => {
subscribeToRemoteParticipant(remoteParticipant);
});
call.on('remoteParticipantsUpdated', e => {
e.added.forEach(remoteParticipant => subscribeToRemoteParticipant(remoteParticipant));
e.removed.forEach(() => console.log('Remote participant removed'));
});
}
// Subscribe to remote participants and handle video streams
function subscribeToRemoteParticipant(remoteParticipant) {
const remoteId = remoteParticipant.identifier.communicationUserId || remoteParticipant.identifier.id;
console.log(`Remote participant added: ${remoteId}`);
// Log if remote participant is muted
console.log(`Remote participant is muted: ${remoteParticipant.isMuted}`);
remoteParticipant.on('isMutedChanged', () => {
console.log(`Remote participant mute state changed: ${remoteParticipant.isMuted}`);
});
remoteParticipant.videoStreams.forEach(remoteVideoStream => {
subscribeToRemoteVideoStream(remoteVideoStream);
});
remoteParticipant.on('videoStreamsUpdated', e => {
e.added.forEach(remoteVideoStream => {
subscribeToRemoteVideoStream(remoteVideoStream);
});
e.removed.forEach(() => {
console.log('Remote video stream removed');
});
});
// Handle remote participant state changes
remoteParticipant.on('stateChanged', () => {
console.log(`Remote participant state changed to: ${remoteParticipant.state}`);
});
}
// Subscribe to remote video streams
async function subscribeToRemoteVideoStream(remoteVideoStream) {
if (remoteVideoStream.isAvailable) {
await displayRemoteVideoStream(remoteVideoStream);
}
remoteVideoStream.on('isAvailableChanged', async () => {
if (remoteVideoStream.isAvailable) {
await displayRemoteVideoStream(remoteVideoStream);
} else {
console.log('Remote video stream is no longer available.');
}
});
}
// Display remote video stream
async function displayRemoteVideoStream(remoteVideoStream) {
try {
// Dispose of the previous renderer to prevent duplication
if (remoteVideoRenderer) {
remoteVideoRenderer.dispose();
remoteVideoRenderer = null;
}
remoteVideoRenderer = new VideoStreamRenderer(remoteVideoStream);
const view = await remoteVideoRenderer.createView();
remoteVideosGallery.innerHTML = ''; // Clear existing video elements
remoteVideosGallery.appendChild(view.target);
console.log('Remote video stream rendered.');
} catch (error) {
console.error('Error rendering remote video stream:', error);
alert('Failed to render remote video stream.');
}
}
// Initialize Call Agent with extracted parameters
initializeCallAgent(token, userId, targetId, role);
// Expose functions to be callable from MAUI WebView
window.startCall = startCall;
window.hangUpCall = hangUpCall;
window.startVideo = startVideo;
window.stopVideo = stopVideo;
window.acceptCall = acceptCall;
window.declineCall = declineCall;
window.toggleMute = toggleMute;
window.isReady = true; // Flag to notify MAUI WebView that the script is ready
// For testing in a browser environment
console.log('Script is ready.');
// Toggle Mute Function
let isMuted = false;
// Toggle mute state
async function toggleMute() {
if (!call) {
console.error('No active call to toggle mute.');
alert('No active call to toggle mute.');
return;
}
try {
if (isMuted) {
await call.unmute();
console.log('Unmuted.');
} else {
await call.mute();
console.log('Muted.');
}
isMuted = !isMuted;
// Optionally, update local audio indicator
if (localAudioIndicator) {
localAudioIndicator.textContent = isMuted ? 'Local Audio: Muted' : 'Local Audio: Unmuted';
}
} catch (error) {
console.error('Error toggling mute:', error);
alert('Failed to toggle mute.');
}
}
Also once it did work amphidromously and the call was perfect, but without changing anything just by re-running it, it started being one-way call again!
I appreciate any heads-up, help or critique, I am new on this .NET MAUI and I like it so far! Thank you for your time.
I attempted a 1:1 video call using Azure Communication Services. I implemented remoteAudioStream.getMediaStream() for audio and subscribed to remote participants, but audio is one-way; callee hears the caller, but the caller cannot hear the callee. Tried selecting microphones and speakers with deviceManager, checked mute states, and handled audio streams with audioStreamsUpdated events. Expected two-way audio.
Also troubleshooted:
I am also running it on HTTPS using legitimate SSL
After a lot of back and forth with this code, there are a lot of things to troubleshoot to make sure it's playing:
FIXES (for each one of the above):
1. Introduce user gesture buttons. Since you are starting and accepting a call, each user should have the corresponding buttons to ensure a user gesture has been given, avoiding autoplay policy issues. Example:
startCallButton.addEventListener('click', async () => {
// Enter code here, such as initiating a call
call = callAgent.startCall([targetUser], callOptions);
});
Or for acceptCallButton
:
acceptCallButton.addEventListener('click', async () => {
// Unlock the audio context
try {
const audioContext = new (window.AudioContext || window.webkitAudioContext)();
const source = audioContext.createBufferSource();
source.buffer = audioContext.createBuffer(1, 1, 22050);
source.connect(audioContext.destination);
source.start(0);
if (audioContext.state === 'suspended') {
await audioContext.resume();
}
console.log('Audio context unlocked.');
} catch (e) {
console.error('Error unlocking audio context:', e);
}
// Proceed to accept the call
await acceptCall();
notifyMAUICallAccepted();
acceptCallButton.style.display = 'none'; // Hide button after accepting
});
2. Single Instance WebView.
To manage multiple WebView instances:
if (Instance != null) {
Console.WriteLine("Attempted to create a second instance of CallPage.");
return; // Optionally handle as needed
}
Instance = this;
Or, if navigating back to the main page:
public async void NavigateToMainPage() {
await MainThread.InvokeOnMainThreadAsync(async () => {
Application.Current.MainPage = new NavigationPage(new LoginPage());
});
}
3. Token Disposal
Ensure the token is used once by disposing of the CallAgent
before reinitializing:
if (callAgent) {
await callAgent.dispose();
console.log('Existing CallAgent disposed.');
}
4. Device Permissions in .NET MAUI
For iOS:
if (CallWebView.Handler != null) {
var iosWebView = (WKWebView)((IWebViewHandler)CallWebView.Handler).PlatformView;
iosWebView.UIDelegate = new CustomWKUIDelegate();
iosWebView.Configuration.AllowsInlineMediaPlayback = true;
iosWebView.Configuration.MediaTypesRequiringUserActionForPlayback = WKAudiovisualMediaTypes.None;
}
For Android:
if (CallWebView.Handler != null) {
var androidWebView = (Android.Webkit.WebView)((IWebViewHandler)CallWebView.Handler).PlatformView;
androidWebView.Settings.JavaScriptEnabled = true;
androidWebView.Settings.MediaPlaybackRequiresUserGesture = false;
androidWebView.Settings.DomStorageEnabled = true;
androidWebView.Settings.DatabaseEnabled = true;
}
5. Variable Resetting.
Create a function to reset all call-related variables after each call.
6. Unmute by Default
Ensure users are unmuted when initiating a call:
const callOptions = {
audioOptions: { muted: false },
};
call = callAgent.startCall([targetUser], callOptions);
if (call.isMuted) {
await call.unmute();
console.log('Call unmuted after accept.');
}
7. Subscribe to Remote Participant
Subscribe to remote participants and handle their video streams:
function subscribeToRemoteParticipant(remoteParticipant) {
console.log(`Remote participant added: ${remoteParticipant.identifier.id}`);
remoteParticipant.on('isMutedChanged', () => {
console.log(`Remote participant mute state changed: ${remoteParticipant.isMuted}`);
});
remoteParticipant.videoStreams.forEach(remoteVideoStream => {
subscribeToRemoteVideoStream(remoteVideoStream);
});
remoteParticipant.on('stateChanged', () => {
console.log(`Remote participant state changed to: ${remoteParticipant.state}`);
});
}
Hopefully, this helps! Thank you for your time.