javascriptnode.jsnode-archiver

API returning a zip file - Cannot set headers after they are sent to the client?


I have an API that creates a zip file using the archiver module in which I would like to pass back the zip as a respone and download it on the client side. However, when I try to pass the zip file back, I get the error Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client.

What is causing this issue?

This is what my API that creates the zip looks like:

reports.get('/xxx/:fileName', async (req, res) => {

  var s3 = new AWS.S3();
  var archiver = require('archiver');

  var filenames = "xxx,xxx"
  var str_array = filenames.split(','); 

  for (var i = 0; i < str_array.length; i++) {

    var filename = str_array[i].trim();
    localFileName = './temp/' + filename.substring(filename.indexOf("/") + 1);

    file = fs.createWriteStream(localFileName, {flags: 'a', encoding: 'utf-8',mode: 0666});
    file.on('error', function(e) { console.error(e); });

    s3.getObject({
          Bucket: config.xxx,
          Key: filename
      })
      .on('error', function (err) {
          console.log(err);
      })
      .on('httpData', function (chunk) {
          file.on('open', function(){

            file.write(chunk);
          });
      })
      .on('httpDone', function () {
          file.end();
      })
      .send();
  }
    res.end("Files have been downloaded successfully")

    // create a file to stream archive data to.
    var output = fs.createWriteStream('example.zip');
    var archive = archiver('zip', {
      zlib: { level: 9 } // Sets the compression level.
    });

    // listen for all archive data to be written
    // 'close' event is fired only when a file descriptor is involved
    output.on('close', function() {
      console.log(archive.pointer() + ' total bytes');
      console.log('archiver has been finalized and the output file descriptor has closed.');
    });

    // This event is fired when the data source is drained no matter what was the data source.
    // It is not part of this library but rather from the NodeJS Stream API.
    // @see: https://nodejs.org/api/stream.html#stream_event_end
    output.on('end', function() {
      console.log('Data has been drained');
    });

    // good practice to catch warnings (ie stat failures and other non-blocking errors)
    archive.on('warning', function(err) {
      if (err.code === 'ENOENT') {
        // log warning
      } else {
        // throw error
        throw err;
      }
    });

    // good practice to catch this error explicitly
    archive.on('error', function(err) {
      throw err;
    });

    // pipe archive data to the file
    archive.pipe(output);

    // append files from a sub-directory, putting its contents at the root of archive
    archive.directory('./temp', false);

    // finalize the archive (ie we are done appending files but streams have to finish yet)
    // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
    archive.finalize();

    const filePath =  "./example.zip" // or any file format

    res.writeHead(200, {"Content-Type": "text/html"});

    fs.createReadStream(path.resolve(__dirname, 'example.zip')).pipe(res);
    return;

});

Solution

  • Your are calling res.end("Files have been downloaded successfully") before starting to write the stream response.