When sending a big file, such as one over twenty megabytes from the client to the server using a fetch request, the HTML page with the file input freezes for about 10 seconds and gives the error in the console:
"Uncaught out of memory"
This is my JavaScript code that I think is part of the issue:
for (let i = 0; i < files.length; i++) {
const file = files[i];
const reader = new FileReader();
reader.onload = function(event) {
const fileData = new Uint8Array(event.target.result);
const postFileData = {
file: {
name: file.name,
type: file.type,
data: Array.from(fileData)
}
}
fetch("/file", {
method: "POST",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify(postFileData)
});
};
I tried making the code split the file's post data and send it one by one, however I am not sure how to properly do it.
You have to make smaller part of file and then upload.
for (let i = 0; i < files.length; i++) {
const file = files[i];
const chunkSize = 2 * 1024 * 1024; // 2MB per chunk
const totalChunks = Math.ceil(file.size / chunkSize);
for (let j = 0; j < totalChunks; j++) {
const start = j * chunkSize;
const end = Math.min(start + chunkSize, file.size);
const chunk = file.slice(start, end);
const formData = new FormData();
formData.append('chunk', chunk, file.name);
formData.append('chunkIndex', j);
formData.append('totalChunks', totalChunks);
fetch('/upload-chunk', {
method: 'POST',
body: formData
})
.then(response => response.json())
.then(data => {
if (data.success && j === totalChunks - 1) {
console.log('File uploaded successfully');
}
})
.catch(error => console.error('Error uploading chunk:', error));
}
}