I am trying to create a noise cancellation filter in WebRtc using my C library compiled into wasm and called from Javascript.
I am able to capture the pcm audio using WebAudioApi and process the frame with wasm.
Here is my code :
navigator.mediaDevices.getUserMedia(constraints).then(function success(stream) {
var audiocontext;
var audiosource;
var audiopreprocessnode;
audiocontext = new (window.AudioContext || window.webkitAudioContext)();
audiosource = audiocontext.createMediaStreamSource(stream);
audioPreprocessNode = audioCtx.createScriptProcessor(2048,1,1);
audiosource.connect(audioPreprocessNode);
audioPreprocessNode.connect(audioCtx.destination);
audioPreprocessNode.onaudioprocess = function(e) {
console.log(e.inputBuffer.getChannelData(0));
// audio captured from mic is denoised using wasm
var denoised_array = Module["_denoise"](e.inputBuffer.getChannelData(0));
e.inputBuffer.getChannelData(0).set(denoised_array);
console.log(e.inputBuffer.getChannelData(0));
}
}
I used onaudioprocess to capture the input frame and set the processed(denoised) frame in the inputbuffer. I used two logs to check whether the values are altered after processing and it is changing , However the processed values are not sent to the webrtc stream , unaltered original frame only sent to the stream.
After studying WebaudioApi docs , i found "Any script modifications to this AudioBuffer outside of this scope will not produce any audible effects."
https://www.w3.org/2014/annotation/experiment/webaudio.html#AudioProcessingEvent-section
Is there anyway to send this processed(denoised) frame in the webrtc stream. Any help would be appreciatable.
If you want the ScriptProcessorNode
to output the modified signal you need to set the outputbuffer
on the AudioProcessingEvent
.
// ...
var denoised_array = Module["_denoise"](e.inputBuffer.getChannelData(0));
e.outputbuffer.getChannelData(0).set(denoised_array);
// ...