Bugfix: Range checking with ParseUrl made problems - we now use the standard
This commit is contained in:
parent
1441abd15a
commit
1506886947
2 changed files with 9 additions and 45 deletions
|
@ -65,7 +65,7 @@ function fetch_url($url,$binary = false, &$redirects = 0, $timeout = 0, $accept_
|
|||
* string 'body' => fetched content
|
||||
*/
|
||||
function z_fetch_url($url, $binary = false, &$redirects = 0, $opts = array()) {
|
||||
$ret = array('return_code' => 0, 'success' => false, 'header' => '', 'body' => '');
|
||||
$ret = array('return_code' => 0, 'success' => false, 'header' => '', 'info' => '', 'body' => '');
|
||||
|
||||
$stamp1 = microtime(true);
|
||||
|
||||
|
@ -173,6 +173,7 @@ function z_fetch_url($url, $binary = false, &$redirects = 0, $opts = array()) {
|
|||
|
||||
$base = $s;
|
||||
$curl_info = @curl_getinfo($ch);
|
||||
$ret['info'] = $curl_info;
|
||||
|
||||
$http_code = $curl_info['http_code'];
|
||||
logger('fetch_url ' . $url . ': ' . $http_code . " " . $s, LOGGER_DATA);
|
||||
|
|
|
@ -139,53 +139,24 @@ class ParseUrl {
|
|||
$siteinfo["url"] = $url;
|
||||
$siteinfo["type"] = "link";
|
||||
|
||||
$check_cert = Config::get("system", "verifyssl");
|
||||
|
||||
$stamp1 = microtime(true);
|
||||
|
||||
$ch = curl_init();
|
||||
curl_setopt($ch, CURLOPT_URL, $url);
|
||||
curl_setopt($ch, CURLOPT_HEADER, 1);
|
||||
curl_setopt($ch, CURLOPT_TIMEOUT, 10);
|
||||
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
|
||||
curl_setopt($ch, CURLOPT_USERAGENT, $a->get_useragent());
|
||||
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, (($check_cert) ? true : false));
|
||||
if ($check_cert) {
|
||||
@curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, 2);
|
||||
}
|
||||
|
||||
$range = intval(Config::get('system', 'curl_range_bytes', 0));
|
||||
|
||||
if ($range > 0) {
|
||||
curl_setopt($ch, CURLOPT_RANGE, '0-' . $range);
|
||||
}
|
||||
|
||||
$header = curl_exec($ch);
|
||||
$curl_info = @curl_getinfo($ch);
|
||||
curl_close($ch);
|
||||
|
||||
$a->save_timestamp($stamp1, "network");
|
||||
|
||||
if ((($curl_info["http_code"] == "301") || ($curl_info["http_code"] == "302") || ($curl_info["http_code"] == "303") || ($curl_info["http_code"] == "307"))
|
||||
&& (($curl_info["redirect_url"] != "") || ($curl_info["location"] != ""))) {
|
||||
if ($curl_info["redirect_url"] != "") {
|
||||
$siteinfo = self::getSiteinfo($curl_info["redirect_url"], $no_guessing, $do_oembed, ++$count);
|
||||
} else {
|
||||
$siteinfo = self::getSiteinfo($curl_info["location"], $no_guessing, $do_oembed, ++$count);
|
||||
}
|
||||
$data = z_fetch_url($url);
|
||||
if (!$data['success']) {
|
||||
return($siteinfo);
|
||||
}
|
||||
|
||||
// If the file is too large then exit
|
||||
if ($curl_info["download_content_length"] > 1000000) {
|
||||
if ($data["info"]["download_content_length"] > 1000000) {
|
||||
return($siteinfo);
|
||||
}
|
||||
|
||||
// If it isn't a HTML file then exit
|
||||
if (($curl_info["content_type"] != "") && !strstr(strtolower($curl_info["content_type"]), "html")) {
|
||||
if (($data["info"]["content_type"] != "") && !strstr(strtolower($data["info"]["content_type"]), "html")) {
|
||||
return($siteinfo);
|
||||
}
|
||||
|
||||
$header = $data["header"];
|
||||
$body = $data["body"];
|
||||
|
||||
if ($do_oembed) {
|
||||
|
||||
$oembed_data = oembed_fetch_url($url);
|
||||
|
@ -217,14 +188,6 @@ class ParseUrl {
|
|||
$charset = "utf-8";
|
||||
}
|
||||
|
||||
$pos = strpos($header, "\r\n\r\n");
|
||||
|
||||
if ($pos) {
|
||||
$body = trim(substr($header, $pos));
|
||||
} else {
|
||||
$body = $header;
|
||||
}
|
||||
|
||||
if (($charset != "") && (strtoupper($charset) != "UTF-8")) {
|
||||
logger("parseurl_getsiteinfo: detected charset ".$charset, LOGGER_DEBUG);
|
||||
//$body = mb_convert_encoding($body, "UTF-8", $charset);
|
||||
|
|
Loading…
Reference in a new issue