使用NodeJS将多个file upload到AWS S3

我正尝试使用NodeJS将我的目录中的所有file upload到S3存储桶。 我能够一次上传一个文件,如果我明确地给文件path+文字string的Key:字段。

以下是我正在使用的脚本:

 var AWS = require('aws-sdk'), fs = require('fs'); // For dev purposes only AWS.config.update({ accessKeyId: '...', secretAccessKey: '...' }); // reg ex to match var re = /\.txt$/; // ensure that this file is in the directory of the files you want to run the cronjob on // ensure that this file is in the directory of the files you want to run the cronjob on fs.readdir(".", function(err, files) { if (err) { console.log( "Could not list the directory.", err) process.exit( 1 ) } var matches = files.filter( function(text) { return re.test(text) } ) console.log("These are the files you have", matches) var numFiles = matches.length if ( numFiles ) { // Read in the file, convert it to base64, store to S3 for( i = 0; i < numFiles; i++ ) { var fileName = matches[i] fs.readFile(fileName, function (err, data) { if (err) { throw err } // Buffer Pattern; how to handle buffers; straw, intake/outtake analogy var base64data = new Buffer(data, 'binary'); var s3 = new AWS.S3() s3.putObject({ 'Bucket': 'noonebetterhaventakenthisbucketnname', 'Key': fileName, 'Body': base64data, 'ACL': 'public-read' }, function (resp) { console.log(arguments); console.log('Successfully uploaded, ', fileName) }) }) } } }) 

它会为每个试图上传到S3的文件产生这个错误:

 These are the files you have [ 'test.txt', 'test2.txt' ] { '0': null, '1': { ETag: '"2cad20c19a8eb9bb11a9f76527aec9bc"' } } Successfully uploaded, test2.txt { '0': null, '1': { ETag: '"2cad20c19a8eb9bb11a9f76527aec9bc"' } } Successfully uploaded, test2.txt 

编辑 :使用variables名更新,以允许读取密钥而不是matches[i]

为什么只上传test2.txt ,我怎样才能上传我的matchesvariables中的每个文件?

引用这个asynchronous读取和cachingnodejs中的多个文件来得到一个解决scheme。

tl; dr范围问题 – 需要在闭包中封装variables; 可以通过为readFiles3.putObject创build一个函数并在for循环中调用它来实现。

 var AWS = require('aws-sdk'), fs = require('fs'); // For dev purposes only AWS.config.update({ accessKeyId: '...', secretAccessKey: '...' }); var s3 = new AWS.S3() function read(file) { fs.readFile(file, function (err, data) { if (err) { throw err } // Buffer Pattern; how to handle buffers; straw, intake/outtake analogy var base64data = new Buffer(data, 'binary'); s3.putObject({ 'Bucket': 'noonebetterhaventakenthisbucketnname', 'Key': file, 'Body': base64data, 'ACL': 'public-read' }, function (resp) { console.log(arguments); console.log('Successfully uploaded, ', file) }) }) } // reg ex to match var re = /\.txt$/; // ensure that this file is in the directory of the files you want to run the cronjob on fs.readdir(".", function(err, files) { if (err) { console.log( "Could not list the directory.", err) process.exit( 1 ) } var matches = files.filter( function(text) { return re.test(text) } ) console.log("These are the files you have", matches) var numFiles = matches.length if ( numFiles ) { // Read in the file, convert it to base64, store to S3 for( i = 0; i < numFiles; i++ ) { read(matches[i]) } } })