为什么我的S3上传无法正确上传?

我使用以下格式上传图像文件:

var body = fs.createReadStream(tempPath).pipe(zlib.createGzip()); var s3obj = new AWS.S3({params: {Bucket: myBucket, Key: myKey}}); var params = { Body: body, ACL: 'public-read', ContentType: 'image/png' }; s3obj.upload(params, function(err, data) { if (err) console.log("An error occurred with S3 fig upload: ", err); console.log("Uploaded the image file at: ", data.Location); }); 

该图像成功上传到我的S3存储桶(没有错误消息,我在S3控制台中看到它),但是当我尝试在我的网站上显示它时,它会返回一个损坏的img图标。 当我使用S3控制台文件下载器下载图像时,我无法打开文件“损坏或损坏”的错误。

如果我使用S3控制台手动上传文件,我可以正确地在我的网站上显示它,所以我很确定我的上传有什么问题。

出了什么问题?

我最终find了我的问题的答案。 我需要发布一个更多的参数,因为该文件是gzip'd(从使用var body =zlib.createGzip() )。 这解决了我的问题:

 var params = { Body: body, ACL: 'public-read', ContentType: 'image/png', ContentEncoding: 'gzip' }; 

这里有一个非常好的节点模块s3-upload-stream来上传(和第一次压缩)图像到S3,这里是他们的示例代码,这是很好地logging:

 var AWS = require('aws-sdk'), zlib = require('zlib'), fs = require('fs'); s3Stream = require('s3-upload-stream')(new AWS.S3()), // Set the client to be used for the upload. AWS.config.loadFromPath('./config.json'); // or do AWS.config.update({accessKeyId: 'akid', secretAccessKey: 'secret'}); // Create the streams var read = fs.createReadStream('/path/to/a/file'); var compress = zlib.createGzip(); var upload = s3Stream.upload({ "Bucket": "bucket-name", "Key": "key-name" }); // Optional configuration upload.maxPartSize(20971520); // 20 MB upload.concurrentParts(5); // Handle errors. upload.on('error', function (error) { console.log(error); }); /* Handle progress. Example details object: { ETag: '"f9ef956c83756a80ad62f54ae5e7d34b"', PartNumber: 5, receivedSize: 29671068, uploadedSize: 29671068 } */ upload.on('part', function (details) { console.log(details); }); /* Handle upload completion. Example details object: { Location: 'https://bucketName.s3.amazonaws.com/filename.ext', Bucket: 'bucketName', Key: 'filename.ext', ETag: '"bf2acbedf84207d696c8da7dbb205b9f-5"' } */ upload.on('uploaded', function (details) { console.log(details); }); // Pipe the incoming filestream through compression, and up to S3. read.pipe(compress).pipe(upload);