I am trying to understand how curl multi channel works. Having looked through several locations, including stackoverflow, I now have the following piece of code:
$ch = curl_init();
curl_setopt($ch, CURLOPT_URL, MY_STREAM_URL);
curl_setopt($ch, CURLOPT_ENCODING, "gzip");
curl_setopt($ch, CURLOPT_FOLLOWLOCATION, "true");
curl_setopt($ch, CURLOPT_HTTPAUTH, CURLAUTH_BASIC);
curl_setopt($ch, CURLOPT_USERPWD, MY_LOGIN.":".MY_PWD);
curl_setopt($ch, CURLOPT_WRITEFUNCTION, 'get_stream');
curl_setopt($ch, CURLOPT_BUFFERSIZE, 2000);
curl_setopt($ch, CURLOPT_LOW_SPEED_LIMIT, 1);
curl_setopt($ch, CURLOPT_LOW_SPEED_TIME , 120);
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false);
$running = null;
$mh = curl_multi_init();
curl_multi_add_handle($mh, $ch);
do {
curl_multi_select($mh, 1);
curl_multi_exec($mh, $running);
} while($running > 0);
curl_multi_remove_handle($mh, $ch);
curl_multi_close($mh);
function get_stream($ch, $data) {
$length = strlen($data);
echo 'Received '.$length.'B on '.date('Y-M-d H:i:s T');
return $length;
}
The issue I'm having is that while this works, I am not really sure how to make good us of it (and the curl PHP manual did not have the detail I'm looking for).
What I need to do is grab the data, and write it out to a file when the cumulative data reaches 1MB, then start again with a new file. I'm sure once I figure it out, I'm going to say "duh", but in the meanwhile, I'm a bit confused.
Any guidance would be appreciated.
Here's what I implemented:
function get_stream($ch, $data) {
global $cum_data_size;
global $data_buffer;
$length = strlen($data);
echo 'Received '.$length.'B at '.date('Y-M-d H:i:s T');
$cum_data_size = $cum_data_size + $length;
$data_buffer .= $data;
if ($cum_data_size >= 1000000) {
write_out_data_file();
}
return $length;
}
function write_out_data_file ()
{
global $cum_data_size;
global $data_buffer;
$handle = fopen(DATA_FLDR.time().'_.gzip', 'w+');
fwrite($handle, $data_buffer);
fclose($handle);
$data_buffer = '';
$cum_data_size = 0;
}