Remove sleeps from curl handling, select it is responsible for waiting
The current sleeps mean that large files download slowly as select would return quickly when data has arrived and needs to be processed, but the sleep waits while the buffers are full. On the flipside we need to ensure that some code that would keep the CPU busy if run too often does not get run every time select returns.pull/9604/head
parent
a20ee1a448
commit
c043fe841b
|
@ -49,6 +49,7 @@ class CurlDownloader
|
||||||
CURLM_OUT_OF_MEMORY => array('CURLM_OUT_OF_MEMORY', 'You are doomed.'),
|
CURLM_OUT_OF_MEMORY => array('CURLM_OUT_OF_MEMORY', 'You are doomed.'),
|
||||||
CURLM_INTERNAL_ERROR => array('CURLM_INTERNAL_ERROR', 'This can only be returned if libcurl bugs. Please report it to us!'),
|
CURLM_INTERNAL_ERROR => array('CURLM_INTERNAL_ERROR', 'This can only be returned if libcurl bugs. Please report it to us!'),
|
||||||
);
|
);
|
||||||
|
private $lastInfoUpdate = 0;
|
||||||
|
|
||||||
private static $options = array(
|
private static $options = array(
|
||||||
'http' => array(
|
'http' => array(
|
||||||
|
@ -240,6 +241,7 @@ class CurlDownloader
|
||||||
}
|
}
|
||||||
|
|
||||||
$this->checkCurlResult(curl_multi_add_handle($this->multiHandle, $curlHandle));
|
$this->checkCurlResult(curl_multi_add_handle($this->multiHandle, $curlHandle));
|
||||||
|
$this->lastInfoUpdate = 0;
|
||||||
// TODO progress
|
// TODO progress
|
||||||
|
|
||||||
return (int) $curlHandle;
|
return (int) $curlHandle;
|
||||||
|
@ -390,30 +392,35 @@ class CurlDownloader
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
foreach ($this->jobs as $i => $curlHandle) {
|
if (microtime(true) - $this->lastInfoUpdate > 0.1) {
|
||||||
if (!isset($this->jobs[$i])) {
|
$this->lastInfoUpdate = microtime(true);
|
||||||
continue;
|
|
||||||
}
|
|
||||||
$curlHandle = $this->jobs[$i]['curlHandle'];
|
|
||||||
$progress = array_diff_key(curl_getinfo($curlHandle), self::$timeInfo);
|
|
||||||
|
|
||||||
if ($this->jobs[$i]['progress'] !== $progress) {
|
foreach ($this->jobs as $i => $curlHandle) {
|
||||||
$this->jobs[$i]['progress'] = $progress;
|
if (!isset($this->jobs[$i])) {
|
||||||
|
continue;
|
||||||
if (isset($this->jobs[$i]['options']['max_file_size'])) {
|
|
||||||
// Compare max_file_size with the content-length header this value will be -1 until the header is parsed
|
|
||||||
if ($this->jobs[$i]['options']['max_file_size'] < $progress['download_content_length']) {
|
|
||||||
throw new MaxFileSizeExceededException('Maximum allowed download size reached. Content-length header indicates ' . $progress['download_content_length'] . ' bytes. Allowed ' . $this->jobs[$i]['options']['max_file_size'] . ' bytes');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Compare max_file_size with the download size in bytes
|
|
||||||
if ($this->jobs[$i]['options']['max_file_size'] < $progress['size_download']) {
|
|
||||||
throw new MaxFileSizeExceededException('Maximum allowed download size reached. Downloaded ' . $progress['size_download'] . ' of allowed ' . $this->jobs[$i]['options']['max_file_size'] . ' bytes');
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
$curlHandle = $this->jobs[$i]['curlHandle'];
|
||||||
|
$progress = array_diff_key(curl_getinfo($curlHandle), self::$timeInfo);
|
||||||
|
|
||||||
// TODO progress
|
if ($this->jobs[$i]['progress'] !== $progress) {
|
||||||
|
$this->jobs[$i]['progress'] = $progress;
|
||||||
|
|
||||||
|
if (isset($this->jobs[$i]['options']['max_file_size'])) {
|
||||||
|
// Compare max_file_size with the content-length header this value will be -1 until the header is parsed
|
||||||
|
if ($this->jobs[$i]['options']['max_file_size'] < $progress['download_content_length']) {
|
||||||
|
throw new MaxFileSizeExceededException('Maximum allowed download size reached. Content-length header indicates ' . $progress['download_content_length'] . ' bytes. Allowed ' . $this->jobs[$i]['options']['max_file_size'] . ' bytes');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare max_file_size with the download size in bytes
|
||||||
|
if ($this->jobs[$i]['options']['max_file_size'] < $progress['size_download']) {
|
||||||
|
throw new MaxFileSizeExceededException('Maximum allowed download size reached. Downloaded ' . $progress['size_download'] . ' of allowed ' . $this->jobs[$i]['options']['max_file_size'] . ' bytes');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO progress
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -340,8 +340,6 @@ class HttpDownloader
|
||||||
if (!$this->countActiveJobs($index)) {
|
if (!$this->countActiveJobs($index)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
usleep(1000);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -85,6 +85,7 @@ class Loop
|
||||||
$progress->start($totalJobs);
|
$progress->start($totalJobs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
$lastUpdate = 0;
|
||||||
while (true) {
|
while (true) {
|
||||||
$activeJobs = 0;
|
$activeJobs = 0;
|
||||||
|
|
||||||
|
@ -95,15 +96,20 @@ class Loop
|
||||||
$activeJobs += $this->processExecutor->countActiveJobs();
|
$activeJobs += $this->processExecutor->countActiveJobs();
|
||||||
}
|
}
|
||||||
|
|
||||||
if ($progress) {
|
if ($progress && microtime(true) - $lastUpdate > 0.1) {
|
||||||
|
$lastUpdate = microtime(true);
|
||||||
|
echo "setting progress\n";
|
||||||
$progress->setProgress($progress->getMaxSteps() - $activeJobs);
|
$progress->setProgress($progress->getMaxSteps() - $activeJobs);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!$activeJobs) {
|
if (!$activeJobs) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
usleep(5000);
|
// as we skip progress updates if they are too quick, make sure we do one last one here at 100%
|
||||||
|
if ($progress) {
|
||||||
|
$progress->setProgress($progress->getMaxSteps());
|
||||||
}
|
}
|
||||||
|
|
||||||
unset($this->currentPromises[$waitIndex]);
|
unset($this->currentPromises[$waitIndex]);
|
||||||
|
|
Loading…
Reference in New Issue