jsonnode.jsstreamtransform-stream

wrapping nodejs stream in JSON object


I have a readable stream, something like this:

const algorithm = 'aes-256-ctr';
stream = file.stream
    .pipe(crypto.createCipher(algorithm, encryptionKey))
    .pipe(outStream);

Encryption works as expected on the entire file. I need to wrap the result of crypto into some sort of json, so the output stream to receive something like this:

{
    "content": "/* MY STREAM CONTENT */"
}

how do I do that?

Additionally, I need to read the file stored on disk and unwrap it from json if the encryption key matches.

Thanks in advance


Solution

  • As of node v13, you can use generators within pipeline and build your object as a string:

    // const { pipeline } = require('stream/promises'); // <- node >= 16
    const Util = require('util');
    const pipeline = Util.promisify(Stream.pipeline);
    
    const algorithm = 'aes-256-ctr';
    const Crypto = require('crypto');
    
    async function run() {
      await pipeline(
        file.stream, // <- your file read stream
        Crypto.createCipher(algorithm, encryptionKey),
        chunksToJson,
        outStream
      );
    }
    
    async function* chunksToJson(chunksAsync) {
      yield '{"content": "';
      for await (const chunk of chunksAsync) {
        yield Buffer.isBuffer(chunk) ? chunk.toString('utf8') : JSON.stringify(chunk);
      }
      yield '"}';
    }
    

    Assuming a more complex case where a large amount of data is being streamed (which is usually the case when using streams), you may be tempted to do something like the following. This is not a good practice since all of the content will build up in memory before yielding, defeating the purpose of streaming.

    async function* chunksToJson(chunksAsync) {
      const json = { content: [] };
      for await (const chunk of chunksAsync) {
        json.content.push(Buffer.isBuffer(chunk) ? chunk.toString('utf8') : JSON.stringify(chunk));
      }
      yield JSON.stringify(json);
    }