I am trying to implement writing files to an SFTP server in Node using the SSH2 module. For my use case, the file source is Azure Blob Storage, and files are relatively large (more than 5 gigs) and so the idea is to capture data from blob storage in chunks and write them to the server. Don't want to download the whole file and then perform the write as files are large and don't want to have a disk space issue during runtime.
I have a working implementation of this by making use of downloadToBuffer() and write() functions available and incrementing the 'offset' until all the bytes and written. As seen in the code snippet
sftp.open('remoteFilePath','w', async (openError,handle) => {
if (openError) throw openError;
var blobOffset=0;
try{
while(blobOffset<file.size){
await client.downloadToBuffer(blobOffset, blobOffset+ length > file.size? file.size - blobOffset: length).then((buffer) => {
sftp.write(handle,buffer,0,blobOffset + length > file.size? buffer.length:length, blobOffset, (writeError)=>{if(writeError) throw writeError});
});
blobOffset += length;
}
}
catch(e){
console.log(e);
}
}
This solution works but does not feel very efficient for large files. Is there a much better way to implement this? Maybe using streams and don't have to use loops?
Regarding the issue, please refer to the following code
var Client = require("ssh2").Client;
var {
BlobServiceClient,
StorageSharedKeyCredential,
} = require("@azure/storage-blob");
var accountName = "andyprivate";
var accountKey =
"";
var creds = new StorageSharedKeyCredential(accountName, accountKey);
var blobServiceClient = new BlobServiceClient(
`https://${accountName}.blob.core.windows.net`,
creds
);
var containerClient = blobServiceClient.getContainerClient("output");
let blob = containerClient.getBlockBlobClient("5m Sales Records.csv");
let read = (await blob.download(0)).readableStreamBody;
var conn = new Client();
conn.connect({
host: "",
port: 22,
username: "",
password: "!",
});
conn.on("ready", async () => {
conn.sftp((err, sftp) => {
if (err) throw err;
var write = sftp.createWriteStream("/home/testqw/test.csv");
read.pipe(write);
write
.on("error", function (error) {
throw error;
})
.on("finish", () => {
console.log("All writes are now complete.");
sftp.end();
});
});
});
conn.on("end", () => {
console.log("close the connection");
});