I'm trying to understand a code developed by another person. Briefly, I need to record an audio stream from the microphone and send it to Google API Speech Recongition system.
We are using recordrtc.
I have a button that, if pressed, records the audio and then, pressed again, stops the recording.
The problem is in the stop recording function. Using the reader.onloadend, when I try to read the audio, always I read the previous audio and not the new one, like I'm in a loop and I read the i-1 data insted the i-th.
here the code of the stopRecording:
stopRecording() {
if (this.isRecording) {
this.audioRecordingService.stopRecording();
this.isRecording = false;
}
var reader = new FileReader();
reader.readAsDataURL(this.ccc.blob);
reader.onloadend = function() {
console.log("onloadend")
var base64data = reader.result;
localStorage.setItem("audioData", base64data.toString());
}
var data = localStorage.getItem("audioData");
var clean_data = data.substr(data.indexOf(',')+1);
var post_obj = JSON.stringify({
"name":"audio",
"base64": clean_data
})
this.commandList.postBlob(post_obj).subscribe((res) => {
console.log("Google API translation: "+ res["res"]);
});
}
Sorry if something is missing.
thanks
To ble totally clear, I also added the functions in the audio-recording-service. Hope this help. I also checked and I can confirm that he used recordRTC
getRecordedBlob(): Observable<RecordedAudioOutput> {
return this._recorded.asObservable();
}
getRecordedTime(): Observable<string> {
return this._recordingTime.asObservable();
}
recordingFailed(): Observable<string> {
return this._recordingFailed.asObservable();
}
startRecording() {
if (this.recorder) {
return;
}
this._recordingTime.next('00:00');
navigator.mediaDevices.getUserMedia({ audio: true }).then(s => {
this.stream = s;
this.record();
}).catch(error => {
this._recordingFailed.next();
});
}
abortRecording() {
this.stopMedia();
}
private record() {
this.recorder = new RecordRTC.StereoAudioRecorder(this.stream, {
type: 'audio',
numberOfAudioChannels: 1, // or leftChannel:true
mimeType: 'audio/webm'
});
this.recorder.record();
this.startTime = moment();
this.interval = setInterval(
() => {
const currentTime = moment();
const diffTime = moment.duration(currentTime.diff(this.startTime));
const time = this.toString(diffTime.minutes()) + ':' + this.toString(diffTime.seconds());
this._recordingTime.next(time);
},
1000
);
}
private toString(value) {
let val = value;
if (!value) {
val = '00';
}
if (value < 10) {
val = '0' + value;
}
return val;
}
stopRecording() {
if (this.recorder) {
this.recorder.stop((blob) => {
if (this.startTime) {
const mp3Name = encodeURIComponent('audio_' + new Date().getTime() + '.mp3');
this.stopMedia();
this._recorded.next({ blob: blob, title: mp3Name });
}
}, () => {
this.stopMedia();
this._recordingFailed.next();
});
}
}
private stopMedia() {
if (this.recorder) {
this.recorder = null;
clearInterval(this.interval);
this.startTime = null;
if (this.stream) {
this.stream.getAudioTracks().forEach(track => track.stop());
this.stream = null;
}
}
}
Please try this:
stopRecording() {
if (this.isRecording) {
var that = this;
this.audioRecordingService.stopRecording(function() {
that.ccc.blob = that.audioRecordingService.getBlob();
stopRecording(); // this line is tricky; maybe "that.stopRecording()"?
});
this.isRecording = false;
return;
}
var reader = new FileReader();
reader.readAsDataURL(this.ccc.blob);
reader.onloadend = function() {
console.log("onloadend")
var base64data = reader.result;
localStorage.setItem("audioData", base64data.toString());
}
var data = localStorage.getItem("audioData");
var clean_data = data.substr(data.indexOf(',') + 1);
var post_obj = JSON.stringify({
"name": "audio",
"base64": clean_data
})
this.commandList.postBlob(post_obj).subscribe((res) => {
console.log("Google API translation: " + res["res"]);
});
}
i.e. I changed this in your code:
audioRecordingService.stopRecording(function() {
var blob = audioRecordingServices.getBlob();
});