I am working on a project where I need the user to be able to record screen, audio and microphone. At the moment I could only make it recognize screen and audio.
First I am capturing the screen and the audio from it and saving it to a variable. And then I am capturing that variable to show the a video component.
invokeGetDisplayMedia(success, error) {
let displaymediastreamconstraints = {
video: {
displaySurface: 'monitor', // monitor, window, application, browser
logicalSurface: true,
cursor: 'always' // never, always, motion
}
};
// above constraints are NOT supported YET
// that's why overridnig them
displaymediastreamconstraints = {
video: true,
audio:true
};
if (navigator.mediaDevices.getDisplayMedia) {
navigator.mediaDevices.getDisplayMedia(displaymediastreamconstraints).then(success).catch(error);
}
else {
navigator.getDisplayMedia(displaymediastreamconstraints).then(success).catch(error);
}
},
captureScreen(callback) {
this.invokeGetDisplayMedia((screen) => {
this.addStreamStopListener(screen, () => {
//
});
callback(screen);
}, function (error) {
console.error(error);
alert('Unable to capture your screen. Please check console logs.\n' + error);
});
},
startRecording() {
this.captureScreen(screen=>{
this.audioStream = audio
console.log(audio)
this.video=this.$refs.videoScreen
this.video.srcObject = screen;
this.recorder = RecordRTC(screen, {
type: 'video'
});
this.recorder.startRecording();
// release screen on stopRecording
this.recorder.screen = screen;
this.videoStart = true;
});
},
I fixed it by increasing a function where I capture the audio from the microphone
captureAudio(success, error) {
let displayuserstreamconstraints = {
audio:true
};
if (navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia(displayuserstreamconstraints).then(success).catch(error);
}
else {
navigator.getUserMedia(displayuserstreamconstraints).then(success).catch(error);
}
},
And adding a function in the startRecording method
startRecording() {
this.captureAudio((audio) => {
this.captureScreen(screen=>{
this.video=this.$refs.videoScreen
this.audioStream=audio
this.video.srcObject = screen;
this.recorder = RecordRTC(screen, {
type: 'video'
});
this.recorder.startRecording();
// release screen on stopRecording
this.recorder.screen = screen;
this.videoStart = true;
});
})
},
And adding a function in the stopRecording method
stopRecordingCallback() {
this.video.src = this.video.srcObject = null;
this.video=this.$refs.videoScreen
this.video.src = URL.createObjectURL(this.recorder.getBlob());
this.recorder.screen.stop();
this.audioStream.stop();
this.recorder.destroy();
this.recorder = null;
},