So I followed this tutorial by amazon: https://docs.aws.amazon.com/lambda/latest/dg/with-s3-example.html to make a thumbnail generator for every image that I upload to my S3 bucket. It works fine on small images around 500KB, and it correctly puts them in the thumbnail bucket, but I attempted to upload a 300MB image file to my S3 bucket, and my lambda function doesn't seem to work correctly.
I looked through other forums and I have attempted to fiddle with some timeout and memory size settings in AWS Lambda becuase I thought that the function maybe needed more memory, but that wasn't the case and I personally don't know what else I have left to go off of.
Here is the copy of the lambda function straight from the link I used, the error occurs at line 57 when the MAX_HEIGHT and MAX_WIDTH are being set. It seems that size in the case of large files seems to always be undefined.
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm')
.subClass({ imageMagick: true }); // Enable ImageMagick integration.
var util = require('util');
// constants
var MAX_WIDTH = 100;
var MAX_HEIGHT = 100;
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context, callback) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
// Object key may have spaces or unicode non-ASCII characters.
var srcKey =
decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var dstBucket = srcBucket + "resized";
var dstKey = "resized-" + srcKey;
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
callback("Source and destination buckets are the same.");
return;
}
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
callback("Could not determine the image type.");
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
callback('Unsupported image type: ${imageType}');
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function transform(response, next) {
gm(response.Body).size(function(err, size) {
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
MAX_WIDTH / size.width,
MAX_HEIGHT / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
// Transform the image buffer in memory.
this.resize(width, height)
.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
});
},
function upload(contentType, data, next) {
// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
},
next);
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' + dstKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket + '/' + dstKey
);
}
callback(null, "message");
}
);
};
Here is the error message directly from AAWS Cloud Watch logs:
2019-05-14T22:31:28.731Z b5fccf54-e55f-49f2-9206-462fa5769149 TypeError: Cannot read property 'width' of undefined
at gm.<anonymous> (/var/task/index.js:57:38)
at emitMany (events.js:147:13)
at gm.emit (events.js:224:7)
at gm.<anonymous> (/var/task/node_modules/gm/lib/getters.js:70:16)
at cb (/var/task/node_modules/gm/lib/command.js:322:16)
at gm._spawn (/var/task/node_modules/gm/lib/command.js:226:14)
at gm._exec (/var/task/node_modules/gm/lib/command.js:190:17)
at gm.proto.(anonymous function) [as size] (/var/task/node_modules/gm/lib/getters.js:68:12)
at transform (/var/task/index.js:54:31)
at nextTask (/var/task/node_modules/async/dist/async.js:5324:14)
EDIT: In addition I also seem to get this error which alternated between the two:
2019-05-14T22:55:25.923Z 7a7f1ec2-cd78-4fa5-a296-fee58033aea6 Unable to resize MYBUCKET/image.png and upload to MYBUCKETRESIZE/resize-image.png due to an error: Error: Stream yields empty buffer
EDIT: Added the report line:
REPORT RequestId: a67e1e79-ebec-4b17-9832-4049ff31bd89 Duration: 7164.64 ms Billed Duration: 7200 ms Memory Size: 1024 MB Max Memory Used: 810 MB
Your two errors are for slightly different reasons.
Stream yields empty buffer
almost definitely means you're running out of memory.
The other error might be a specific imagemagick error, which I don't think you're handling. Though it is also probably related to the memory issue. Because it's in a callback, you need to check the err
variable:
function transform(response, next) {
gm(response.Body).size(function(err, size) {
if (err) throw err;
The imagemamgick module might be writing something to /tmp. A snippet for checking/clearing disk usage from a project i worked on
const fs = require('fs')
const folder = '/tmp/'
// Deleting any files in the /tmp folder (lambda will reuse the same container, we will run out of space)
const files = fs.readdirSync(folder)
for (const file of files) {
console.log('Deleting file from previous invocation: ' + folder + file)
fs.unlinkSync(folder + file)
}