// utilty.js
export const fetchIceCredentials = async () => {
try {
const response = await axios.get('https://westus2.tts.speech.microsoft.com/cognitiveservices/avatar/relay/token/v1', {
headers: {
'Ocp-Apim-Subscription-Key': cogSvcSubKey
}
});
const { Username: iceUsername, Password: iceCredential } = response.data;
if (!iceUsername || !iceCredential) {
throw new Error('ICE credentials are missing');
}
if (iceUsername && iceCredential) {
// Check base64 encoding
if (!isValidBase64(iceCredential)) {
console.error('ICE credential is not a valid base64 string:', iceCredential);
console.error('ICE UserName is not a valid base64 string:', iceUsername);
throw new Error('Invalid base64 encoding');
} else{
console.log('iceCredential is valid')
return { iceUsername, iceCredential };
}
} else {
throw new Error('Invalid credentials format');
}
} catch (error) {
console.error('Error fetching ICE credentials:', error);
throw error;
}
};
export function isValidBase64(str) {
const base64Regex = /^([A-Za-z0-9+/=]+)$/;
if (!base64Regex.test(str)) {
return false;
}
try {
const decodedStr = atob(str);
return btoa(decodedStr) === str;
} catch (e) {
return false;
}
}
export const createWebRTCConnection = (iceServerUrl, iceServerUsername, iceServerCredential) => {
try {
const peerConnection = new RTCPeerConnection({
iceServers: [{
urls: [iceServerUrl],
username: iceServerUsername,
credential: iceServerCredential
}]
});
return peerConnection;
} catch (error) {
console.error("Error creating WebRTC connection: ", error);
throw error;
}
};
export const createAvatarSynthesizer = () => {
try {
console.log("Initializing Speech SDK with:", cogSvcSubKey, cogSvcRegion, voiceName);
const speechSynthesisConfig = SpeechSDK.SpeechConfig.fromSubscription(cogSvcSubKey, cogSvcRegion);
speechSynthesisConfig.speechSynthesisVoiceName = voiceName;
const videoFormat = new SpeechSDK.AvatarVideoFormat();
const videoCropTopLeftX = 600;
const videoCropBottomRightX = 1320;
videoFormat.setCropRange(new SpeechSDK.Coordinate(videoCropTopLeftX, 50), new SpeechSDK.Coordinate(videoCropBottomRightX, 1080));
console.log("Avatar configuration:", avatarCharacter, avatarStyle, avatarBackgroundColor);
const avatarConfig = new SpeechSDK.AvatarConfig(avatarCharacter, avatarStyle, videoFormat);
avatarConfig.backgroundColor = avatarBackgroundColor;
const avatarSynthesizer = new SpeechSDK.AvatarSynthesizer(speechSynthesisConfig, avatarConfig);
avatarSynthesizer.avatarEventReceived = (s, e) => {
const offsetMessage = e.offset === 0 ? "" : `, offset from session start: ${e.offset / 10000}ms.`;
console.log(`[${new Date().toISOString()}] Event received: ${e.description}${offsetMessage}`);
};
return avatarSynthesizer;
} catch (error) {
console.error("Error creating Avatar Synthesizer: ", error);
throw error;
}
};
// App.jsx--Part of React code
// Fetch ICE credentials
const { iceUsername, iceCredential } = await fetchIceCredentials();
const iceUrl = avatarAppConfig.iceUrl;
let peerConnection = createWebRTCConnection(iceUrl, iceUsername, iceCredential);
peerConnection.ontrack = handleOnTrack;
peerConnection.addTransceiver('video', { direction: 'sendrecv' });
peerConnection.addTransceiver('audio', { direction: 'sendrecv' });
console.log("WebRTC connection created.");
let synthesizer = createAvatarSynthesizer();
setAvatarSynthesizer(synthesizer);
peerConnection.oniceconnectionstatechange = e => {
console.log("WebRTC status: " + peerConnection.iceConnectionState);
if (peerConnection.iceConnectionState === 'connected') {
console.log("Connected to Azure Avatar service");
} else if (peerConnection.iceConnectionState === 'disconnected' || peerConnection.iceConnectionState === 'failed') {
console.log("Azure Avatar service Disconnected");
}
};
synthesizer.startAvatarAsync(peerConnection).then((r) => {
console.log("[" + (new Date()).toISOString() + "] Avatar started.");
}).catch(async (error) => {
console.error("[" + (new Date()).toISOString() + "] Avatar failed to start. Error: " + error);
}
);
};
I am getting error as
Avatar failed to start. Error: InvalidCharacterError: Failed to execute 'atob' on 'Window': The string to be decoded is not correctly encoded.
When attempting to access the webpage, I consistently encounter an error. Refreshing the page does not resolve the issue, as the error persists. Occasionally, if I wait for 30-40 seconds before refreshing, the page loads correctly and the avatar appears. However, this solution is unreliable, as the error occurs more often than not. I am unable to determine the underlying cause of this problem and require assistance in diagnosing and resolving it.
Avatar failed to start. Error: InvalidCharacterError: Failed to execute ‘atob’ on ‘Window’: The string to be decoded is not correctly encoded.
The credentials you receive from the API are indeed valid base64 strings. First log the response to see what you’re getting.
// Check base64 encoding
if (!isValidBase64(iceCredential)) {
console.error('ICE credential is not a valid base64 string:', iceCredential);
throw new Error('Invalid base64 encoding');
} else if (!isValidBase64(iceUsername)) {
console.error('ICE Username is not a valid base64 string:', iceUsername);
throw new Error('Invalid base64 encoding');
} else {
console.log('ICE credentials are valid');
return { iceUsername, iceCredential };
}
} catch (error) {
console.error('Error fetching ICE credentials:', error);
throw error;
}
App.jsx:
import { useEffect, useState } from 'react';
import { fetchIceCredentials, createWebRTCConnection, createAvatarSynthesizer } from './utility';
const App = () => {
const [avatarSynthesizer, setAvatarSynthesizer] = useState(null);
useEffect(() => {
const initialize = async () => {
try {
const { iceUsername, iceCredential } = await fetchIceCredentials();
const iceUrl = avatarAppConfig.iceUrl;
let peerConnection = createWebRTCConnection(iceUrl, iceUsername, iceCredential);
peerConnection.ontrack = handleOnTrack;
peerConnection.addTransceiver('video', { direction: 'sendrecv' });
peerConnection.addTransceiver('audio', { direction: 'sendrecv' });
console.log("WebRTC connection created.");
let synthesizer = createAvatarSynthesizer();
setAvatarSynthesizer(synthesizer);
peerConnection.oniceconnectionstatechange = e => {
console.log("WebRTC status: " + peerConnection.iceConnectionState);
if (peerConnection.iceConnectionState === 'connected') {
console.log("Connected to Azure Avatar service");
} else if (peerConnection.iceConnectionState === 'disconnected' || peerConnection.iceConnectionState === 'failed') {
console.log("Azure Avatar service Disconnected");
}
};
synthesizer.startAvatarAsync(peerConnection).then((r) => {
console.log("[" + (new Date()).toISOString() + "] Avatar started.");
}).catch((error) => {
console.error("[" + (new Date()).toISOString() + "] Avatar failed to start. Error: " + error);
});
} catch (error) {
console.error("Error during initialization:", error);
}
};
initialize();
}, []);
return (
<div>
{/* Your UI code here */}
</div>
);
};
export default App;
Result:
[INFO] Initializing Speech SDK with: YOUR_COG_SVC_SUB_KEY YOUR_COG_SVC_REGION YOUR_VOICE_NAME
[INFO] Avatar configuration: YOUR_AVATAR_CHARACTER YOUR_AVATAR_STYLE YOUR_AVATAR_BACKGROUND_COLOR
[INFO] Fetching ICE credentials...
[INFO] ICE credentials are valid
[INFO] WebRTC connection created.
[INFO] WebRTC status: new
[INFO] WebRTC status: checking
[INFO] WebRTC status: connected
[INFO] Connected to Azure Avatar service
[INFO] [2024-08-01T12:34:56.789Z] Avatar started.