使用nodeJs和AWS调整图像大小

我正尝试使用nodejs从AWS S3存储桶中获取图像,将其大小调整为4种不同的大小,然后将其保存回同一个存储桶中,但保存到一个文件夹中,每个文件夹又包含4个文件夹,每个文件夹用于新的大小。

当我运行该函数时,我得到以下错误:

 Unable to resize devimageresize/diavelBlack.jpg and upload to / due to an error: Error: Stream yields empty buffer 

我是相对较新的nodejs,我不知道我正在写代码。 什么是造成这个错误?

这是我的代码:

 // dependencies var async = require('async'); var AWS = require('aws-sdk'); var gm = require('gm'); var util = require('util'); // get reference to S3 client var s3 = new AWS.S3(); exports.handler = function(event, context) { // Read options from the event. console.log("Reading options from event:\n", util.inspect(event, {depth: 5})); var srcBucket = event.Records[0].s3.bucket.name; var srcKey = event.Records[0].s3.object.key; var dstBucket = event.Records[0].s3.dst; var _800px = { width: 800, dstKey: 800 + srcKey, dstBucket: dstBucket.large }; var _500px = { width: 500, dstKey: 500 + srcKey, dstBucket: dstBucket.medium }; var _200px = { width: 200, dstKey: 200 + srcKey, dstBucket: dstBucket.small }; var _45px = { width: 45, dstKey: 45 + srcKey, dstBucket: dstBucket.thumbnail }; var _sizesArray = [_800px, _500px, _200px, _45px]; var len = _sizesArray.length; // Sanity check: validate that source and destination are same buckets. if (srcBucket == dstBucket) { console.error("Destination bucket must match source bucket."); } // Infer the image type. var typeMatch = srcKey.match(/\.([^.]*)$/); if (!typeMatch) { console.error('unable to infer image type for key ' + srcKey); return; } var imageType = typeMatch[1]; if (imageType != "jpg" && imageType != "png") { console.log('skipping non-image ' + srcKey); return; } // Download the image from S3, transform, and upload to same S3 bucket but different folders. async.waterfall([ function download(next) { // Download the image from S3 into a buffer. s3.getObject({ Bucket: srcBucket, Key: srcKey }, next); }, function transform(response, next) { for (var i = 0; i<len; i++) { // Transform the image buffer in memory. gm(response.Body).resize(_sizesArray[i].width) .toBuffer(imageType, function(err, buffer) { if (err) { next(err); } else { next(null, response.ContentType, buffer); } }); } }, function upload(contentType, data, next) { for (var i = 0; i<len; i++) { // Stream the transformed image to a different S3 bucket. s3.putObject({ Bucket: _sizesArray[i].dstBucket, Key: _sizesArray[i].dstKey, Body: data, ContentType: contentType }, next); } } ], function (err) { if (err) { console.error( 'Unable to resize ' + srcBucket + '/' + srcKey + ' and upload to ' + dstBucket + '/' + ' due to an error: ' + err ); } else { console.log( 'Successfully resized ' + srcBucket + '/' + srcKey + ' and uploaded to ' + dstBucket ); } context.done(); } ); }; 

得到它的工作。 主要原因是需要传递给gm(response.Body, srcKey)的额外参数。

完整代码:

 // dependencies var async = require('async'); var AWS = require('aws-sdk'); var gm = require('gm').subClass({ imageMagick: true }); var util = require('util'); // get reference to S3 client var s3 = new AWS.S3(); exports.handler = function(event, context) { // Read options from the event. console.log("Reading options from event:\n", util.inspect(event, {depth: 5})); var srcBucket = event.Records[0].s3.bucket.name; var srcKey = event.Records[0].s3.object.key; var _800px = { width: 800, dstnKey: srcKey, destinationPath: "large" }; var _500px = { width: 500, dstnKey: srcKey, destinationPath: "medium" }; var _200px = { width: 200, dstnKey: srcKey, destinationPath: "small" }; var _45px = { width: 45, dstnKey: srcKey, destinationPath: "thumbnail" }; var _sizesArray = [_800px, _500px, _200px, _45px]; var len = _sizesArray.length; console.log(len); console.log(srcBucket); console.log(srcKey); // Infer the image type. var typeMatch = srcKey.match(/\.([^.]*)$/); if (!typeMatch) { console.error('unable to infer image type for key ' + srcKey); return; } var imageType = typeMatch[1]; if (imageType != "jpg" && imageType != "png") { console.log('skipping non-image ' + srcKey); return; } // Download the image from S3, transform, and upload to same S3 bucket but different folders. async.waterfall([ function download(next) { // Download the image from S3 into a buffer. s3.getObject({ Bucket: srcBucket, Key: srcKey }, next); }, function transform(response, next) { for (var i = 0; i<len; i++) { // Transform the image buffer in memory. gm(response.Body, srcKey) .resize(_sizesArray[i].width) .toBuffer(imageType, function(err, buffer) { if (err) { next(err); } else { next(null, response.ContentType, buffer); } }); } }, function upload(contentType, data, next) { for (var i = 0; i<len; i++) { // Stream the transformed image to a different folder. s3.putObject({ Bucket: srcBucket, Key: "dst/" + _sizesArray[i].destinationPath + "/" + _sizesArray[i].dstnKey, Body: data, ContentType: contentType }, next); } } ], function (err) { if (err) { console.error( '---->Unable to resize ' + srcBucket + '/' + srcKey + ' and upload to ' + srcBucket + '/dst' + ' due to an error: ' + err ); } else { console.log( '---->Successfully resized ' + srcBucket + ' and uploaded to' + srcBucket + "/dst" ); } context.done(); } ); };