Ah, problem is:
If timeout is greater than 0, then it
will kill the child process if it runs
longer than timeout milliseconds. The
child process is killed with
killSignal (default: 'SIGTERM').
maxBuffer specifies the largest amount
of data allowed on stdout or stderr -
if this value is exceeded then the
child process is killed.
Source: http://nodejs.org/docs/v0.4.8/api/child_processes.html#child_process.exec
So if your image is over the default buffer size of 200*1024 bytes, your image is going to be corrupted as you mentioned. I was able to get it to work with the following code:
var fs = require('fs');
var spawn = require('child_process').spawn;
var util = require('util');
var output_file = fs.createWriteStream('test2.jpg', {encoding: 'binary'});
var convert = spawn('convert', ['test.jpg', '-']);
convert.stdout.on('data', function(data) {
output_file.write(data);
});
convert.on('exit', function(code) {
output_file.end();
});
Here I used spawn to get a streamable stdout, then I used a Writeable Stream to write the data in binary format. Just tested it and was able to open the resulting test2.jpg
image.
EDIT: Yes you can use this to send the result over HTTP. Here's an example of me downsizing an image with convert, then posting the result to the glowfoto API:
var fs = require('fs');
var http = require('http');
var util = require('util');
var spawn = require('child_process').spawn;
var url = require('url');
// Technically the only reason I'm using this
// is to get the XML parsed from the first call
// you probably don't need this, but just in case:
//
// npm install xml2js
var xml = require('xml2js');
var post_url;
var input_filename = 'giant_image.jpg';
var output_filename = 'giant_image2.jpg';
// The general format of a multipart/form-data part looks something like:
// --[boundary]\r\n
// Content-Disposition: form-data; name="fieldname"\r\n
// \r\n
// field value
function EncodeFieldPart(boundary,name,value) {
var return_part = "--" + boundary + "\r\n";
return_part += "Content-Disposition: form-data; name=\"" + name + "\"\r\n\r\n";
return_part += value + "\r\n";
return return_part;
}
// Same as EncodeFieldPart except that it adds a filename,
// as well as sets the content type (mime) for the part
function EncodeFilePart(boundary,type,name,filename) {
var return_part = "--" + boundary + "\r\n";
return_part += "Content-Disposition: form-data; name=\"" + name + "\"; filename=\"" + filename + "\"\r\n";
return_part += "Content-Type: " + type + "\r\n\r\n";
return return_part;
}
// We could use Transfer-Encoding: Chunked in the headers
// but not every server supports this. Instead we're going
// to build our post data, then create a buffer from it to
// pass to our MakePost() function. This means you'll have
// 2 copies of the post data sitting around
function PreparePost() {
// Just a random string I copied from a packet sniff of a mozilla post
// This can be anything you want really
var boundary = "---------------------------168072824752491622650073";
var post_data = '';
post_data += EncodeFieldPart(boundary, 'type', 'file');
post_data += EncodeFieldPart(boundary, 'thumbnail', '400');
post_data += EncodeFilePart(boundary, 'image/jpeg', 'image', output_filename);
fs.readFile(output_filename, 'binary', function(err,data){
post_data += data;
// This terminates our multi-part data
post_data += "\r\n--" + boundary + "--";
// We need to have our network transfer in binary
// Buffer is a global object
MakePost(new Buffer(post_data, 'binary'));
});
}
function MakePost(post_data) {
var parsed_url = url.parse(post_url);
var post_options = {
host: parsed_url.hostname,
port: '80',
path: parsed_url.pathname,
method: 'POST',
headers : {
'Content-Type' : 'multipart/form-data; boundary=---------------------------168072824752491622650073',
'Content-Length' : post_data.length
}
};
var post_request = http.request(post_options, function(response){
response.setEncoding('utf8');
response.on('data', function(chunk){
console.log(chunk);
});
});
post_request.write(post_data);
post_request.end();
}
// Glowfoto first makes you get the url of the server
// to upload
function GetServerURL() {
var response = '';
var post_options = {
host: 'www.glowfoto.com',
port: '80',
path: '/getserverxml.php'
};
var post_req = http.request(post_options, function(res) {
res.setEncoding('utf8');
// Here we buildup the xml
res.on('data', function (chunk) {
response += chunk;
});
// When we're done, we parse the xml
// Could probably just do string manipulation instead,
// but just to be safe
res.on('end', function(){
var parser = new xml.Parser();
parser.addListener('end', function(result){
// Grab the uploadform element value and prepare our post
post_url = result.uploadform;
PreparePost();
});
// This parses an XML string into a JS object
var xml_object = parser.parseString(response);
});
});
post_req.end();
}
// We use spawn here to get a streaming stdout
// This will use imagemagick to downsize the full image to 30%
var convert = spawn('convert', ['-resize', '30%', input_filename, '-']);
// Create a binary write stream for the resulting file
var output_file = fs.createWriteStream(output_filename, {encoding: 'binary'});
// This just writes to the file and builds the data
convert.stdout.on('data', function(data){
output_file.write(data);
});
// When the process is done, we close off the file stream
// Then trigger off our POST code
convert.on('exit', function(code){
output_file.end();
GetServerURL();
});
Sample result:
$ node test.js
<?xml version="1.0" encoding="utf-8"?>
<upload>
<thumburl>http://img4.glowfoto.com/images/2011/05/29-0939312591T.jpg</thumburl>
<imageurl>http://www.glowfoto.com/static_image/29-093931L/2591/jpg/05/2011/img4/glowfoto</imageurl>
<codes>http://www.glowfoto.com/getcode.php?srv=img4&img=29-093931L&t=jpg&rand=2591&m=05&y=2011</codes>
</upload>