In NodeJs I can use fs
and readline
to make an interface and read the file line by line this is usefull for large text files (for me 100GB +) how can I achieve similar thing using bun
I've tried to read the file like this:
const input = Bun.file("input.txt");
but it's the same as fs.readFile
which is not what I want in this case.
Nodejs example
const readline = require("readline");
const fs = require("fs");
const input = fs.createReadStream("file.txt", {
encoding: "utf16le",
});
const rl = readline.createInterface({ input });
rl.on("line", (line) => {
// proccess line
})
As @GrafiCode mentioned in the comments
const foo = Bun.file("foo.txt");
does not read the file from the disk it creates BunFile
which you can read in many ways one of them is stream
await foo.stream(); // contents as ReadableStream
you can read more here
async function readFileLineByLine(filePath, onLineRead, onFinish) {
const foo = Bun.file(filePath);
const stream = await foo.stream();
const decoder = new TextDecoder();
let remainingData = "";
for await (const chunk of stream) {
const str = decoder.decode(chunk);
remainingData += str; // Append the chunk to the remaining data
// Split the remaining data by newline character
let lines = remainingData.split(/\r?\n/);
// Loop through each line, except the last one
while (lines.length > 1) {
// Remove the first line from the array and pass it to the callback
onLineRead(lines.shift());
}
// Update the remaining data with the last incomplete line
remainingData = lines[0];
}
return onFinish();
}
function onLineRead(line) {
console.log("Line read: " + line);
}
function onFinish() {
console.log("File read successfully");
}
readFileLineByLine("2.txt", onLineRead, onFinish);