google-chromeipadsafarimediarecordermediarecorder-api

Failed to load resource: The operation coulden't be completed. (WebKitBlobResource error 3)


I am using MediaRecorder web-API to record audio. Its working perfectly fine in windows and mac browsers but when I am trying it in ipad browser sometimes it's loding data in blob and creating url but sometimes I'm getting error Failed to load resource: The operation coulden't be completed. (WebKitBlobResource error 3)

enter image description here

there is one more problem which I'm facing, sometimes it abruptly stops recording while doing recording, I'm attaching screenshot for reference enter image description here

here is part of my angular code :

 @ViewChild('audioPlayer') audioPlayer: ElementRef;
 mediaRecorder: MediaRecorder;
 chunks: Blob[] = [];

startRecording() {
    navigator.mediaDevices.getUserMedia({ audio: true })
      .then(stream => {
        this.mediaRecorder = new MediaRecorder(stream);
        console.log('MediaRecorder.mimeType: ', "audio/mp3");
        this.mediaRecorder.ondataavailable = (e: any) => {
          this.chunks.push(e.data);
          console.log("chunk size: ", this.chunks.length);
          console.log("e.data: ", e.data);
          console.log("e", e);
          
          const blob = new Blob(this.chunks, { type: "audio/mp3" });
          const audioURL = window.URL.createObjectURL(blob);
          this.audioPlayer.nativeElement.src = audioURL;
        };

        this.mediaRecorder.onstop = () => {
          console.log('Stopped recording');
          this.chunks = [];
          
        };  

        this.mediaRecorder.start();
     
      })
      .catch(error => {
        console.error('Error accessing the microphone: ', error);
      });
  }

  pauseRecording() {
    if (this.mediaRecorder.state === 'recording') {
      this.mediaRecorder.requestData();
      this.mediaRecorder.pause();
    }
  }

  resumeRecording() {
    if (this.mediaRecorder.state === 'paused') {
      this.mediaRecorder.resume();
    }
  }

  stopRecording() {
    if (this.mediaRecorder.state !== 'inactive') {
      this.mediaRecorder.stop();
    }
  }

I tried adjusting sequence of mediaRecorder.pause() function and mediaRecorder.requestData() function also I tried adding settimeout of 4 second on onDataAvilable() event handler so that it can reduce the blob processing pressure (I got to read it somewhere) but it didn't work. also tryed adding onError() event handler to mediaRecorder and tried printing it but it does not throw error from that handler.

if you want to check whole code, I have added it on my github. feel free to fork, clone or raise issue or send pull request : https://github.com/Jayantkhandebharad/angular-audio-toolkit

Thank you in advance!

Note: if you feel anything is wrong in this question, feel free to comment or edit it.


Solution

  • In order to make safari AudioRecorder API work, the following step must be done:

    From what you provided:

    1. Change your record type to audio/mp4
    2. check if event is an valid blob event like: if( !e.data?.size ) return

    https://gist.github.com/hikariNTU/19d744b8072ab056291781109bcd17ed

    export class VoiceRecorder {
      recorder: MediaRecorder | undefined;
      options: {
        onStart?: () => void;
        onStop?: () => void;
      };
    
      constructor(options: VoiceRecorder['options'] = {}) {
        console.log('!MediaRecorder Init!');
        this.options = options;
      }
    
      async start(deviceId = '') {
        if (this.recorder) {
          this.stop();
        }
    
        const recordType = VoiceRecorder.getSupportedTypes()[0];
        if (!recordType) {
          throw Error('Browser MediaRecorder cannot support desired format!');
        }
        const stream = await navigator.mediaDevices.getUserMedia({
          audio: {
            deviceId,
            echoCancellation: false,
            autoGainControl: false,
            noiseSuppression: false,
          },
        });
    
        this.recorder = new MediaRecorder(stream, {
          mimeType: recordType,
          audioBitsPerSecond: 320000,
        });
        this.recorder.start();
        const started = new Promise<void>((res, rej) => {
          this.recorder?.addEventListener('start', () => {
            setTimeout(rej, 5000); // wait for some slow device like remote iPhone
            console.log('[Mic] Recording...', recordType);
            this.options.onStart?.();
            res();
          });
        });
        this.recorder.addEventListener('stop', () => {
          console.log('[Mic] Stop!');
        });
        this.recorder.addEventListener('error', (e) => {
          console.log('[Mic] Error?', e);
        });
        return started;
      }
    
      pauseOrResume() {
        if (this.recorder?.state === 'recording') {
          this.recorder.pause();
        } else if (this.recorder?.state === 'paused') {
          this.recorder.resume();
        }
      }
    
      stop() {
        this.options.onStop?.();
        if (!this.recorder) {
          return;
        }
        const data = new Promise<{
          type: string;
          blob: Blob;
        }>((res, rej) => {
          const timeId = setTimeout(() => {
            rej('Record End Responding Timeout!');
          }, 3000);
    
          if (this.recorder) {
            const mimeType = this.recorder.mimeType;
            const handler = (event: BlobEvent) => {
              // Safari tend to send out an empty blob just before real one end here.
              if (!event.data.size) {
                return;
              }
              res({
                type: mimeType,
                blob: event.data,
              });
              clearTimeout(timeId);
              this.recorder?.removeEventListener('dataavailable', handler);
            };
            this.recorder.addEventListener('dataavailable', handler);
            this.recorder.stream.getTracks().forEach((track) => track.stop());
            this.recorder.stop();
            this.recorder = undefined;
          }
        });
    
        return data;
      }
    
      static getSupportedTypes() {
        const containers = [
          'wav',
          'aac',
          'm4a',
          'mp4',
          'webm',
          'ogg',
          'mpeg',
          'flac',
        ];
        const codecs = ['avc1', 'aac', 'mp4a', 'av1', 'opus', 'mpeg', 'pcm'];
        const supportedAudios = containers
          .map((format) => `audio/${format}`)
          .filter((mimeType) => MediaRecorder.isTypeSupported(mimeType));
        const supportedAudioCodecs = supportedAudios
          .flatMap((audio) => codecs.map((codec) => `${audio};codecs=${codec}`))
          .filter((mimeType) => MediaRecorder.isTypeSupported(mimeType));
        return supportedAudioCodecs;
      }
    
      static async getInputList() {
        // trigger permission first to get full list
        await navigator.mediaDevices
          .getUserMedia({
            audio: true,
          })
          .then((stream) => {
            stream.getTracks().forEach((t) => t.stop());
          });
        const list = await navigator.mediaDevices.enumerateDevices();
        // filter out hidden deviceId, when user first visited the site and didn't provide permission.
        return list.filter((v) => v.kind === 'audioinput' && v.deviceId !== '');
      }
    }