使用节点js中的AWS sdk将整个目录树上传到S3

我目前上传单个对象到S3使用像这样:

var options = { Bucket: bucket, Key: s3Path, Body: body, ACL: s3FilePermissions }; S3.putObject(options, function (err, data) { //console.log(data); }); 

但是,当我有一个大的资源文件夹,例如,我使用AWS CLI工具。
我想知道,是否有一种本地的方式来做同样的事情与aws sdk(上传整个文件夹到S3)?

有一天我正在考虑这个问题,正在想这样的事情:

 ... var async = require('async'), fs = require('fs'), path = require("path"); var directoryName = './test', directoryPath = path.resolve(directoryName); var files = fs.readdirSync(directoryPath); async.map(files, function (f, cb) { var filePath = path.join(directoryPath, f); var options = { Bucket: bucket, Key: s3Path, Body: fs.readFileSync(filePath), ACL: s3FilePermissions }; S3.putObject(options, cb); }, function (err, results) { if (err) console.error(err); console.log(results); }); 

老派recursion的方式,我匆匆起来。 只使用核心节点模块和标准AWS sdk。 只需将s3path和bucketName的值replace为它们各自的path即可。

 var AWS = require('aws-sdk'); var path = require("path"); var fs = require('fs'); var s3 = new AWS.S3(); const s3Path = path.join(__dirname, "./yourS3Dir"); var bucketName = 'yourBucketName'; var key = ""; var uploadS3 = function(keyPath) { let fullPath = path.join(s3Path,keyPath); fs.readdirSync(fullPath).forEach( file => { let stats = fs.lstatSync(path.join(fullPath,file)); if(stats.isDirectory()) { key = path.join(key, file); uploadS3(key); } else if(stats.isFile()) { params = {Bucket: bucketName, Key: path.join(key, file), Body: fs.readFileSync(path.join(fullPath,file)) }; s3.putObject(params, function(err, data) { if (err) { console.log(err) } else { console.log('Successfully uploaded data to bucket'); } }); } }); }; uploadS3(key); 

你可以试试node-s3-client 。

更新:在这里 npm可用

从sync目录到s3 文档 :

更新:增加了客户端的初始化代码。

 var client = s3.createClient({ maxAsyncS3: 20, // this is the default s3RetryCount: 3, // this is the default s3RetryDelay: 1000, // this is the default multipartUploadThreshold: 20971520, // this is the default (20 MB) multipartUploadSize: 15728640, // this is the default (15 MB) s3Options: { accessKeyId: "YOUR ACCESS KEY", secretAccessKey: "YOUR SECRET ACCESS KEY" } }); var params = { localDir: "some/local/dir", deleteRemoved: true, // default false, whether to remove s3 objects // that have no corresponding local file. s3Params: { Bucket: "s3 bucket name", Prefix: "some/remote/dir/", // other options supported by putObject, except Body and ContentLength. // See: http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putObject-property }, }; var uploader = client.uploadDir(params); uploader.on('error', function(err) { console.error("unable to sync:", err.stack); }); uploader.on('progress', function() { console.log("progress", uploader.progressAmount, uploader.progressTotal); }); uploader.on('end', function() { console.log("done uploading"); });