jeudi 26 février 2015

Resizing image with nodeJs and AWS

I am attempting to get an image from a AWS S3 bucket using nodejs, resizing it into 4 different sizes and then saving it back to the same bucket but into a folder which in turn contains 4 folders, each for the new sizes.


When I run the function, I get the following error:



Unable to resize devimageresize/diavelBlack.jpg and upload to / due to an error: Error: Stream yields empty buffer


I am relatively new to nodejs and am not sure I am writing the code correctly. What is causing this error?


This is my code:



// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm');
var util = require('util');


// get reference to S3 client
var s3 = new AWS.S3();

exports.handler = function(event, context) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
var srcKey = event.Records[0].s3.object.key;
var dstBucket = event.Records[0].s3.dst;

var _800px = {
width: 800,
dstKey: 800 + srcKey,
dstBucket: dstBucket.large
};

var _500px = {
width: 500,
dstKey: 500 + srcKey,
dstBucket: dstBucket.medium
};

var _200px = {
width: 200,
dstKey: 200 + srcKey,
dstBucket: dstBucket.small
};

var _45px = {
width: 45,
dstKey: 45 + srcKey,
dstBucket: dstBucket.thumbnail
};

var _sizesArray = [_800px, _500px, _200px, _45px];

var len = _sizesArray.length;

// Sanity check: validate that source and destination are same buckets.
if (srcBucket == dstBucket) {
console.error("Destination bucket must match source bucket.");
}

// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
console.error('unable to infer image type for key ' + srcKey);
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
console.log('skipping non-image ' + srcKey);
return;
}

// Download the image from S3, transform, and upload to same S3 bucket but different folders.
async.waterfall([
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},

function transform(response, next) {


for (var i = 0; i<len; i++) {

// Transform the image buffer in memory.
gm(response.Body).resize(_sizesArray[i].width)
.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
}
},

function upload(contentType, data, next) {

for (var i = 0; i<len; i++) {

// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: _sizesArray[i].dstBucket,
Key: _sizesArray[i].dstKey,
Body: data,
ContentType: contentType
},
next);
}
}

], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket
);
}

context.done();
}
);
};

Aucun commentaire:

Enregistrer un commentaire