I am trying to read a .csv file and to assign it to an array with this code:
const fs = require("fs");
const csv = require("@fast-csv/parse");
let data = []
csv
.parseFile("./downloads/aggiornamento.csv", { skipRows: 2 })
.on("error", (error) => console.error(error))
.on("data", (row) => {
let obj = {};
obj["item_id"] = row[2];
obj["qty"] = row[20];
data.push(obj);
console.log(obj);
})
.on("end", function () {
return data;
});
However, the file read by fast-csv
is a stream, hence I can only process row by row.
I need the full array in a second moment to call some API service, hence I would like to read the file synchronously, appending very row, and returning the array at the end, so that, in a second moment, I can wrap the previous code in a function, and obtain the array in a way such as:
const data = readCSVSynchronously()
How can I do it with fast-csv
?
You can't turn an asynchronous function into a synchronous one.
You could, however, wrap that eventful reading code into a promise, which you can then use in a promise/async
context, like so:
const fs = require("fs");
const csv = require("@fast-csv/parse");
function readCsv(path, options, rowProcessor) {
return new Promise((resolve, reject) => {
const data = [];
csv
.parseFile(path, options)
.on("error", reject)
.on("data", (row) => {
const obj = rowProcessor(row);
if (obj) data.push(obj);
})
.on("end", () => {
resolve(data);
});
});
}
async function doThings() {
const data = await readCsv(
"./downloads/aggiornamento.csv",
{ skipRows: 2 },
(row) => ({ item_id: row[2], qty: row[20] }),
);
// use data in API...
}