node.jsformidablenodejs-stream

how to assemble a stream pipleine with node-formidable fileWriteStreamHandler?


I'm trying to upload a file to S3 using the node-formidable method fileWriteStreamHandler.Before the upload to S3 I want to create a hash of file. This means implementing a stream pipe that first pass the data through a hash then passes that data to the S3 upload.

When trying to implement the pipe I kept running into issues. So below is a simplified function that more or less represents what I want to do.

formHandler.js

const form = formidable({
  encoding: 'utf-8',
  keepExtensions: true,
  allowEmptyFiles: false,
  maxFiles, maxFileSize, maxTotalFileSize,
  maxFields, maxFieldsSize, minFileSize,
  multiples: true,
  fileWriteStreamHandler: streamUploadImage,
});

streamUploadImage.js

function streamUploadImage() {
  const firstStream = new PassThrough();
  const lastStream = new PassThrough();

  const hash = createHash('SHA2-256');
  hash.setEncoding('hex');
  const transform = new Transform({
    transform(chunk, encoding, cb) {
      hash.write(chunk);
      cb();
    },
    flush(cb) {
      hash.end();
      console.log('all done', hash.read());
      cb();
    }
  });

  firstStream.on('data', () => console.log('first'));
  lastStream.on('data', () => console.log('last'));

  return first.pipe(transform).pipe(last);
};

When using the above streamUploadImage only the lastStream is called. firstStream & transform are never called.

Why is that? Is the pipeline not implemented correctly? Does the formidable fileWriteStreamHandler not work with pipes?

using formidable@3.2.1

UPDATE: see below for a quick reproduction of my issue:

var server = http.createServer(function (req, res) {
  if (req.url == '/') {
      res.writeHead(200, { 'Content-Type': 'text/html' });
      res.end(`
        <form action="/upload" enctype="multipart/form-data" method="post">
          <label>file name<input type="text" name="file_name" autofocus /></label><br />
          <label>single file<input type="file" name="file_single" /></label><br />
          <label>multiple files<input type="file" name="filearray_with_multiple[]" multiple /></label><br />
          <br />
          <button>Upload</button>
        </form>
      `);

      res.end();
  } else if (req.url === '/upload') {
    const form = formidable({
      encoding: 'utf-8',
      keepExtensions: true,
      allowEmptyFiles: false,
      multiples: true,
      fileWriteStreamHandler: streamUploadImage,
    });

    form.parse(req, (err, fields, files) => {
      if (err) throw err;

      console.log('parsed file upload');
      console.log({ fields, files });
      res.writeHead(201, { 'Content-Type': 'application/json' });
      res.end(JSON.stringify({ err, fields, files}, null, 2))
    })
  }
});

function streamUploadImage() {
  const firstStream = new PassThrough();
  const lastStream = new PassThrough();

  const hash = createHash('SHA2-256');
  hash.setEncoding('hex');
  const transform = new Transform({
    transform(chunk, encoding, cb) {
      hash.write(chunk);
      cb();
    },
    flush(cb) {
      hash.end();
      console.log('all done', hash.read());
      cb();
    }
  });

  firstStream.on('data', () => console.log('first'));
  lastStream.on('data', () => console.log('last'));

  return firstStream.pipe(transform).pipe(lastStream);
};

server.listen(5000);

Solution

  • stream.pipe() returns the destination stream to allow for chaining.

    You need to return the head of the pipeline from streamUploadImage() (firstStream in your example), rather than the tail.

    function streamUploadImage() {
      const firstStream = new PassThrough();
      const lastStream = new PassThrough();
    
      // *snip*
    
      // Wire up the pipeline
      firstStream.pipe(transform).pipe(lastStream);
      // Return the head of the pipeline
      return firstStream;
    };