/** * @param mixed $path * @param string $search * @return array */ public function get_listing($path = '', $page = '') { global $CFG, $OUTPUT; $ret = array(); $curl = new curl(); $msg = $curl->head($this->file_url); $info = $curl->get_info(); if ($info['http_code'] != 200) { $ret['e'] = $msg; } else { $ret['list'] = array(); $ret['nosearch'] = true; $ret['nologin'] = true; $filename = $this->guess_filename($info['url'], $info['content_type']); if (strstr($info['content_type'], 'text/html') || empty($info['content_type'])) { // analysis this web page, general file list $ret['list'] = array(); $content = $curl->get($info['url']); $this->analyse_page($info['url'], $content, $ret); } else { // download this file $ret['list'][] = array('title' => $filename, 'source' => $this->file_url, 'thumbnail' => $OUTPUT->old_icon_url(file_extension_icon($filename, 32))); } } return $ret; }
/** * Calls the pluginfo.php service and returns the raw response * * @param string $component * @param string $version * @return string */ protected function call_service($component, $version) { global $CFG; require_once $CFG->libdir . '/filelib.php'; $curl = new curl(array('proxy' => true)); $response = $curl->get($this->service_request_url(), $this->service_request_params($component, $version), $this->service_request_options()); $curlerrno = $curl->get_errno(); $curlinfo = $curl->get_info(); if (!empty($curlerrno)) { throw new tool_installaddon_pluginfo_exception('err_curl_exec', array('url' => $curlinfo['url'], 'errno' => $curlerrno, 'error' => $curl->error)); } else { if ($curlinfo['http_code'] != 200) { throw new tool_installaddon_pluginfo_exception('err_curl_http_code', array('url' => $curlinfo['url'], 'http_code' => $curlinfo['http_code'])); } else { if (isset($curlinfo['ssl_verify_result']) and $curlinfo['ssl_verify_result'] != 0) { throw new tool_installaddon_pluginfo_exception('err_curl_ssl_verify', array('url' => $curlinfo['url'], 'ssl_verify_result' => $curlinfo['ssl_verify_result'])); } } } return $response; }
/** * Fetches content of file from Internet (using proxy if defined). Uses cURL extension if present. * Due to security concerns only downloads from http(s) sources are supported. * * @category files * @param string $url file url starting with http(s):// * @param array $headers http headers, null if none. If set, should be an * associative array of header name => value pairs. * @param array $postdata array means use POST request with given parameters * @param bool $fullresponse return headers, responses, etc in a similar way snoopy does * (if false, just returns content) * @param int $timeout timeout for complete download process including all file transfer * (default 5 minutes) * @param int $connecttimeout timeout for connection to server; this is the timeout that * usually happens if the remote server is completely down (default 20 seconds); * may not work when using proxy * @param bool $skipcertverify If true, the peer's SSL certificate will not be checked. * Only use this when already in a trusted location. * @param string $tofile store the downloaded content to file instead of returning it. * @param bool $calctimeout false by default, true enables an extra head request to try and determine * filesize and appropriately larger timeout based on $CFG->curltimeoutkbitrate * @return stdClass|string|bool stdClass object if $fullresponse is true, false if request failed, true * if file downloaded into $tofile successfully or the file content as a string. */ function download_file_content($url, $headers = null, $postdata = null, $fullresponse = false, $timeout = 300, $connecttimeout = 20, $skipcertverify = false, $tofile = NULL, $calctimeout = false) { global $CFG; // Only http and https links supported. if (!preg_match('|^https?://|i', $url)) { if ($fullresponse) { $response = new stdClass(); $response->status = 0; $response->headers = array(); $response->response_code = 'Invalid protocol specified in url'; $response->results = ''; $response->error = 'Invalid protocol specified in url'; return $response; } else { return false; } } $options = array(); $headers2 = array(); if (is_array($headers)) { foreach ($headers as $key => $value) { if (is_numeric($key)) { $headers2[] = $value; } else { $headers2[] = "{$key}: {$value}"; } } } if ($skipcertverify) { $options['CURLOPT_SSL_VERIFYPEER'] = false; } else { $options['CURLOPT_SSL_VERIFYPEER'] = true; } $options['CURLOPT_CONNECTTIMEOUT'] = $connecttimeout; $options['CURLOPT_FOLLOWLOCATION'] = 1; $options['CURLOPT_MAXREDIRS'] = 5; // Use POST if requested. if (is_array($postdata)) { $postdata = format_postdata_for_curlcall($postdata); } else { if (empty($postdata)) { $postdata = null; } } // Optionally attempt to get more correct timeout by fetching the file size. if (!isset($CFG->curltimeoutkbitrate)) { // Use very slow rate of 56kbps as a timeout speed when not set. $bitrate = 56; } else { $bitrate = $CFG->curltimeoutkbitrate; } if ($calctimeout and !isset($postdata)) { $curl = new curl(); $curl->setHeader($headers2); $curl->head($url, $postdata, $options); $info = $curl->get_info(); $error_no = $curl->get_errno(); if (!$error_no && $info['download_content_length'] > 0) { // No curl errors - adjust for large files only - take max timeout. $timeout = max($timeout, ceil($info['download_content_length'] * 8 / ($bitrate * 1024))); } } $curl = new curl(); $curl->setHeader($headers2); $options['CURLOPT_RETURNTRANSFER'] = true; $options['CURLOPT_NOBODY'] = false; $options['CURLOPT_TIMEOUT'] = $timeout; if ($tofile) { $fh = fopen($tofile, 'w'); if (!$fh) { if ($fullresponse) { $response = new stdClass(); $response->status = 0; $response->headers = array(); $response->response_code = 'Can not write to file'; $response->results = false; $response->error = 'Can not write to file'; return $response; } else { return false; } } $options['CURLOPT_FILE'] = $fh; } if (isset($postdata)) { $content = $curl->post($url, $postdata, $options); } else { $content = $curl->get($url, null, $options); } if ($tofile) { fclose($fh); @chmod($tofile, $CFG->filepermissions); } /* // Try to detect encoding problems. if ((curl_errno($ch) == 23 or curl_errno($ch) == 61) and defined('CURLOPT_ENCODING')) { curl_setopt($ch, CURLOPT_ENCODING, 'none'); $result = curl_exec($ch); } */ $info = $curl->get_info(); $error_no = $curl->get_errno(); $rawheaders = $curl->get_raw_response(); if ($error_no) { $error = $content; if (!$fullresponse) { debugging("cURL request for \"{$url}\" failed with: {$error} ({$error_no})", DEBUG_ALL); return false; } $response = new stdClass(); if ($error_no == 28) { $response->status = '-100'; // Mimic snoopy. } else { $response->status = '0'; } $response->headers = array(); $response->response_code = $error; $response->results = false; $response->error = $error; return $response; } if ($tofile) { $content = true; } if (empty($info['http_code'])) { // For security reasons we support only true http connections (Location: file:// exploit prevention). $response = new stdClass(); $response->status = '0'; $response->headers = array(); $response->response_code = 'Unknown cURL error'; $response->results = false; // do NOT change this, we really want to ignore the result! $response->error = 'Unknown cURL error'; } else { $response = new stdClass(); $response->status = (string) $info['http_code']; $response->headers = $rawheaders; $response->results = $content; $response->error = ''; // There might be multiple headers on redirect, find the status of the last one. $firstline = true; foreach ($rawheaders as $line) { if ($firstline) { $response->response_code = $line; $firstline = false; } if (trim($line, "\r\n") === '') { $firstline = true; } } } if ($fullresponse) { return $response; } if ($info['http_code'] != 200) { debugging("cURL request for \"{$url}\" failed, HTTP response code: " . $response->response_code, DEBUG_ALL); return false; } return $response->results; }
/** * Migrate the references to local files. * * As the APIv1 is reaching its end of life on the 14th of Dec 2013, and we cannot * convert the existing references to new references, we need to convert them * to real files. * * @todo Deprecate/remove this function after the 14th of December 2013. * @return void */ function repository_boxnet_migrate_references_from_apiv1() { global $DB; // A string that the old references contain. $apiv1signature = '/api/1.0/download/'; // Downloading the files could take a very long time! @set_time_limit(0); // Create directory to download temporary files. $dir = make_temp_directory('download/repository_boxnet/'); // Create a dummy file for the broken files. $fs = get_file_storage(); list($dummyhash, $dummysize, $unused) = $fs->add_string_to_pool('Lost reference from Box.net'); // Get the Box.net instances. There should be only one. $sql = "SELECT i.id, i.typeid, r.id, r.type FROM {repository} r, {repository_instances} i WHERE i.typeid = r.id AND r.type = :type"; $ids = $DB->get_fieldset_sql($sql, array('type' => 'boxnet')); if (empty($ids)) { // We did not find any instance of Box.net. Let's just ignore this migration. mtrace('Could not find any instance of the repository, aborting migration...'); return; } // The next bit is copied from the function file_storage::instance_sql_fields() // because it is private and there is nothing in file_storage that suits our needs here. $filefields = array('contenthash', 'pathnamehash', 'contextid', 'component', 'filearea', 'itemid', 'filepath', 'filename', 'userid', 'filesize', 'mimetype', 'status', 'source', 'author', 'license', 'timecreated', 'timemodified', 'sortorder', 'referencefileid'); $referencefields = array('repositoryid' => 'repositoryid', 'reference' => 'reference', 'lifetime' => 'referencelifetime', 'lastsync' => 'referencelastsync'); $fields = array(); $fields[] = 'f.id AS id'; foreach ($filefields as $field) { $fields[] = "f.{$field}"; } foreach ($referencefields as $field => $alias) { $fields[] = "r.{$field} AS {$alias}"; } $fields = implode(', ', $fields); // We are not using repository::convert_references_to_local() or file_storage::get_external_files() // because they would select too many records and load everything in memory as it is not using a recordset. // Also, we filter the results not to get the draft area which should not be converted. list($sqlfragment, $fragmentparams) = $DB->get_in_or_equal($ids, SQL_PARAMS_NAMED); $sql = "SELECT " . $fields . " FROM {files_reference} r LEFT JOIN {files} f ON f.referencefileid = r.id WHERE r.repositoryid $sqlfragment AND f.referencefileid IS NOT NULL AND NOT (f.component = :component AND f.filearea = :filearea)"; // For each reference we download the file. Then we add it to the file pool and update the references. // The reason why we are re-inventing the wheel here is because the current API ends up calling // repository::get_file() which includes a download timeout. As we are trying our best to copy // the files here, we want to ignre any timeout. $filerecords = $DB->get_recordset_sql($sql, array_merge($fragmentparams, array('component' => 'user', 'filearea' => 'draft'))); $referenceids = array(); foreach ($filerecords as $filerecord) { $file = $fs->get_file_instance($filerecord); $reference = unserialize(repository_boxnet::convert_to_valid_reference($file->get_reference())); if (empty($reference->downloadurl)) { // Something is wrong... mtrace('Skipping malformed reference (id: ' . $file->get_referencefileid() . ')'); continue; } else if (strpos($reference->downloadurl, $apiv1signature) === false) { // This is not an old reference, we are not supposed to work on thos. mtrace('Skipping non APIv1 reference (id: ' . $file->get_referencefileid() . ')'); continue; } else if (isset($referenceids[$file->get_referencefileid()])) { // We have already worked on that reference, we skip any other file related to it. // We cannot work on them here because they have been updated in the database but our // recordset does not have those new values. They will be taken care of after this foreach. continue; } mtrace('Starting migration of file reference ' . $file->get_referencefileid()); // Manually import the file to the file pool to prevent timeout limitations of the repository method get_file(). // We ignore the fact that the content of the file could exist locally because we want to synchronize the file // now to prevent the repository to try to download the file as well. $saveas = $dir . uniqid('', true) . '_' . time() . '.tmp'; $c = new curl(); $result = $c->download_one($reference->downloadurl, null, array('filepath' => $saveas, 'followlocation' => true)); $info = $c->get_info(); if ($result !== true || !isset($info['http_code']) || $info['http_code'] != 200) { // There was a problem while trying to download the reference... if ($fs->content_exists($file->get_contenthash()) && $file->get_contenthash() != sha1('')) { // Fortunately we already had a local version of this reference, so we keep it. We have to // set it synchronized or there is a risk that repository::sync_reference() will try to download // the file again. We cannot use $file->get_contenthash() and $file->get_filesize() because they // cause repository::sync_reference() to be called. $file->set_synchronized($filerecord->contenthash, $filerecord->filesize, 0, DAYSECS); mtrace('Could not download reference, using last synced file. (id: ' . $file->get_referencefileid() . ')'); } else { // We don't know what the file was, but what can we do? In order to prevent a re-attempt to fetch the // file in the next bit of this script (import_external_file()), we set a dummy content to the reference. $file->set_synchronized($dummyhash, $dummysize, 0, DAYSECS); mtrace('Could not download reference, dummy file used. (id: ' . $file->get_referencefileid() . ')'); } } else { try { // The file has been downloaded, we add it to the file pool and synchronize // all the files using this reference. list($contenthash, $filesize, $unused) = $fs->add_file_to_pool($saveas); $file->set_synchronized($contenthash, $filesize, 0, DAYSECS); } catch (moodle_exception $e) { // Something wrong happened... mtrace('Something went wrong during sync (id: ' . $file->get_referencefileid() . ')'); } } // Log the reference IDs. $referenceids[$file->get_referencefileid()] = $file->get_referencefileid(); // Now that the file is downloaded, we can loop over all the files using this reference // to convert them to local copies. We have chosen to do that in this loop so that if the // execution fails in the middle, we would not have to redownload the files again and again. // By the way, we cannot use the records fetched in $filerecords because they will not be updated. $sql = "SELECT " . $fields . " FROM {files} f LEFT JOIN {files_reference} r ON f.referencefileid = r.id WHERE f.referencefileid = :refid AND NOT (f.component = :component AND f.filearea = :filearea)"; $reffilerecords = $DB->get_recordset_sql($sql, array('component' => 'user', 'filearea' => 'draft', 'refid' => $file->get_referencefileid())); foreach ($reffilerecords as $reffilerecord) { $reffile = $fs->get_file_instance($reffilerecord); try { // Updating source to remove trace of APIv1 URL. $reffile->set_source('Box APIv1 reference'); } catch (moodle_exception $e) { // Do not fail for this lame reason... } try { $fs->import_external_file($reffile); mtrace('File using reference converted to local file (id: ' . $reffile->get_id() . ')'); } catch (moodle_exception $e) { // Oh well... we tried what we could! $reffile->delete_reference(); mtrace('Failed to convert file from reference to local file, sorry! (id: ' . $reffile->get_id() . ')'); } } } mtrace('Migration finished.'); }
/** * Synchronize the references. * * @param stored_file $file Stored file. * @return boolean */ public function sync_reference(stored_file $file) { if ($file->get_referencelastsync() + DAYSECS > time()) { // Synchronise not more often than once a day. return false; } $c = new curl(); $reference = unserialize(self::convert_to_valid_reference($file->get_reference())); $url = $reference->downloadurl; if (file_extension_in_typegroup($file->get_filename(), 'web_image')) { $path = $this->prepare_file(''); $result = $c->download_one($url, null, array('filepath' => $path, 'timeout' => $CFG->repositorysyncimagetimeout)); $info = $c->get_info(); if ($result === true && isset($info['http_code']) && $info['http_code'] == 200) { $fs = get_file_storage(); list($contenthash, $filesize, $newfile) = $fs->add_file_to_pool($path); $file->set_synchronized($contenthash, $filesize); return true; } } $c->get($url, null, array('timeout' => $CFG->repositorysyncimagetimeout, 'followlocation' => true, 'nobody' => true)); $info = $c->get_info(); if (isset($info['http_code']) && $info['http_code'] == 200 && array_key_exists('download_content_length', $info) && $info['download_content_length'] >= 0) { $filesize = (int) $info['download_content_length']; $file->set_synchronized(null, $filesize); return true; } $file->set_missingsource(); return true; }
/** * Makes cURL request to get data from the remote site * * @return string raw request result * @throws available_update_checker_exception */ protected function get_response() { $curl = new curl(array('proxy' => true)); $response = $curl->post($this->prepare_request_url(), $this->prepare_request_params()); $curlinfo = $curl->get_info(); if ($curlinfo['http_code'] != 200) { throw new available_update_checker_exception('err_response_http_code', $curlinfo['http_code']); } return $response; }
// /////////////////////////////////////////////////////////// if (empty($password)) { echo 'EQUELLA link checking has not been configured. Please see the source code for this page.'; exit; } $password_param = required_param('password', PARAM_RAW); if ($password_param != $password) { echo 'Password doesn\'t match.'; exit; } $http = new curl(array('cookie' => true)); echo '<style>.ok {color: green;} .bad {color: red;}</style><ul>'; foreach ($DB->get_records('equella') as $resource) { $url = equella_appendtoken($resource->url, equella_getssotoken_api()); $http->head($url); $info = $http->get_info(); $statuscode = $info['http_code']; echo '<li>Checking <a href="' . $resource->url . '">' . $resource->url . '</a><br>'; if ((int) $statuscode == 200) { echo '<span class="ok">OK</span>'; } else { echo '<span class="bad">Could not find in EQUELLA</span><br>'; // tell someone - get users with course edit perms for the course in question $recipients = $DB->get_records_list('user', 'username', $notify); if ($recipients) { $from = get_admin(); $subject = get_string('checker.subject', 'equella'); $course = $DB->get_record('course', array('id' => $resource->course)); $courseurl = new moodle_url('/course/view.php', array('id' => $course->id)); $message = get_string('checker.message', 'equella', array('name' => $resource->name, 'url' => $resource->url, 'coursename' => $course->shortname, 'courseurl' => $courseurl)); echo 'Emailing the following users:<ul>';
/** * Check for the availability of a resource by URL. * * Check is performed using an HTTP HEAD call. * * @param $url string A valid URL * @return bool|string True if no issue is found. The error string message, otherwise */ function scorm_check_url($url) { $curl = new curl(); if (!ini_get('open_basedir') and !ini_get('safe_mode')) { // Same options as in {@link download_file_content()}, used in {@link scorm_parse_scorm()}. $curl->setopt(array('CURLOPT_FOLLOWLOCATION' => true, 'CURLOPT_MAXREDIRS' => 5)); } $cmsg = $curl->head($url); $info = $curl->get_info(); if (empty($info['http_code']) || $info['http_code'] != 200) { return get_string('invalidurlhttpcheck', 'scorm', array('cmsg' => $cmsg)); } return true; }
/** * Check if the remote site is valid (not localhost and available by the hub) * Note: it doesn't matter if the site returns a 404 error. * The point here is to check if the site exists. It does not matter if the hub can not call the site, * as by security design, a hub should never call a site. * However an admin user registering his site should be able to access the site, * as people searching on the hub. * So we want: * a) to check that the url is not a local address * b) to check that the site return some not empty headers * (it exists, at least the domain name is registered) * @param string $url the site url * @return boolean true if the site is valid */ public function is_remote_site_valid($url) { global $CFG; require_once $CFG->libdir . '/filelib.php'; //Check if site is valid if (strpos($url, 'http://localhost') !== false or strpos($url, 'http://127.0.0.1') !== false) { return false; } $curl = new curl(); $curl->setopt(array('CURLOPT_FOLLOWLOCATION' => true, 'CURLOPT_MAXREDIRS' => 3)); $curl->head($url); $info = $curl->get_info(); // Return true if return code is OK (200) or redirection (302). // Redirection occurs for many reasons including redirection to another site that handles single sign-on. if ($info['http_code'] === 200 || $info['http_code'] === 302) { return true; } // Some sites respond to head() with a 503. // As a fallback try get(). // We don't just always do get() as it is much slower than head(). $curl->get($url); $info = $curl->get_info(); if ($info['http_code'] === 200 || $info['http_code'] === 302) { return true; } return false; }
/** * Parses one file (either html or css) * * @param string $baseurl (optional) URL of the file where link to this file was found * @param string $relativeurl relative or absolute link to the file * @param array $list * @param bool $mainfile true only for main HTML false and false for all embedded/linked files */ protected function parse_file($baseurl, $relativeurl, &$list, $mainfile = false) { if (preg_match('/([\'"])(.*)\\1/', $relativeurl, $matches)) { $relativeurl = $matches[2]; } if (empty($baseurl)) { $url = $relativeurl; } else { $url = htmlspecialchars_decode(url_to_absolute($baseurl, $relativeurl)); } if (in_array($url, $this->processedfiles)) { // avoid endless recursion return; } $this->processedfiles[] = $url; $curl = new curl(); $curl->setopt(array('CURLOPT_FOLLOWLOCATION' => true, 'CURLOPT_MAXREDIRS' => 3)); $msg = $curl->head($url); $info = $curl->get_info(); if ($info['http_code'] != 200) { if ($mainfile) { $list['error'] = $msg; } } else { $csstoanalyze = ''; if ($mainfile && (strstr($info['content_type'], 'text/html') || empty($info['content_type']))) { // parse as html $htmlcontent = $curl->get($info['url']); $ddoc = new DOMDocument(); @$ddoc->loadHTML($htmlcontent); // extract <img> $tags = $ddoc->getElementsByTagName('img'); foreach ($tags as $tag) { $url = $tag->getAttribute('src'); $this->add_image_to_list($info['url'], $url, $list); } // analyse embedded css (<style>) $tags = $ddoc->getElementsByTagName('style'); foreach ($tags as $tag) { if ($tag->getAttribute('type') == 'text/css') { $csstoanalyze .= $tag->textContent . "\n"; } } // analyse links to css (<link type='text/css' href='...'>) $tags = $ddoc->getElementsByTagName('link'); foreach ($tags as $tag) { if ($tag->getAttribute('type') == 'text/css' && strlen($tag->getAttribute('href'))) { $this->parse_file($info['url'], $tag->getAttribute('href'), $list); } } } else { if (strstr($info['content_type'], 'css')) { // parse as css $csscontent = $curl->get($info['url']); $csstoanalyze .= $csscontent . "\n"; } else { if (strstr($info['content_type'], 'image/')) { // download this file $this->add_image_to_list($info['url'], $info['url'], $list); } else { $list['error'] = get_string('validfiletype', 'repository_url'); } } } // parse all found css styles if (strlen($csstoanalyze)) { $urls = extract_css_urls($csstoanalyze); if (!empty($urls['property'])) { foreach ($urls['property'] as $url) { $this->add_image_to_list($info['url'], $url, $list); } } if (!empty($urls['import'])) { foreach ($urls['import'] as $cssurl) { $this->parse_file($info['url'], $cssurl, $list); } } } } }
/** * Download the given file into the given destination. * * This is basically a simplified version of {@link download_file_content()} from * Moodle itself, tuned for fetching files from moodle.org servers. Same code is used * in mdeploy.php for fetching available updates. * * @param string $source file url starting with http(s):// * @param string $target store the downloaded content to this file (full path) * @throws tool_installaddon_installer_exception */ public function download_file($source, $target) { global $CFG; require_once $CFG->libdir . '/filelib.php'; $targetfile = fopen($target, 'w'); if (!$targetfile) { throw new tool_installaddon_installer_exception('err_download_write_file', $target); } $options = array('file' => $targetfile, 'timeout' => 300, 'followlocation' => true, 'maxredirs' => 3, 'ssl_verifypeer' => true, 'ssl_verifyhost' => 2); $curl = new curl(array('proxy' => true)); $result = $curl->download_one($source, null, $options); $curlinfo = $curl->get_info(); fclose($targetfile); if ($result !== true) { throw new tool_installaddon_installer_exception('err_curl_exec', array('url' => $source, 'errorno' => $curl->get_errno(), 'error' => $result)); } else { if (empty($curlinfo['http_code']) or $curlinfo['http_code'] != 200) { throw new tool_installaddon_installer_exception('err_curl_http_code', array('url' => $source, 'http_code' => $curlinfo['http_code'])); } else { if (isset($curlinfo['ssl_verify_result']) and $curlinfo['ssl_verify_result'] != 0) { throw new tool_installaddon_installer_exception('err_curl_ssl_verify', array('url' => $source, 'ssl_verify_result' => $curlinfo['ssl_verify_result'])); } } } }
public function sync_reference(stored_file $file) { global $USER; if ($file->get_referencelastsync() + DAYSECS > time() || !$this->connection_result()) { // Synchronise not more often than once a day. // if we had several unsuccessfull attempts to connect to server - do not try any more. return false; } $ref = @unserialize(base64_decode($file->get_reference())); if (!isset($ref->url) || !($url = $this->appendtoken($ref->url))) { // Occurs when the user isn't known.. $file->set_missingsource(); return true; } $cookiepathname = $this->prepare_file($USER->id . '_' . uniqid('', true) . '.cookie'); $c = new curl(array('cookie' => $cookiepathname)); if (file_extension_in_typegroup($ref->filename, 'web_image')) { $path = $this->prepare_file(''); $result = $c->download_one($url, null, array('filepath' => $path, 'followlocation' => true, 'timeout' => $CFG->repositorysyncimagetimeout)); if ($result === true) { $fs = get_file_storage(); list($contenthash, $filesize, $newfile) = $fs->add_file_to_pool($path); $file->set_synchronized($contenthash, $filesize); return true; } } else { $result = $c->head($url, array('followlocation' => true, 'timeout' => $CFG->repositorysyncfiletimeout)); } // Delete cookie jar. if (file_exists($cookiepathname)) { unlink($cookiepathname); } $this->connection_result($c->get_errno()); $curlinfo = $c->get_info(); if (isset($curlinfo['http_code']) && $curlinfo['http_code'] == 200 && array_key_exists('download_content_length', $curlinfo) && $curlinfo['download_content_length'] >= 0) { // we received a correct header and at least can tell the file size $file->set_synchronized(null, $curlinfo['download_content_length']); return true; } $file->set_missingsource(); return true; }
/** * Returns information about file in this repository by reference * {@link repository::get_file_reference()} * {@link repository::get_file()} * * Returns null if file not found or is not readable * * @param stdClass $reference file reference db record * @return null|stdClass with attribute 'filepath' */ public function get_file_by_reference($reference) { $reference = unserialize(self::convert_to_valid_reference($reference->reference)); $url = $reference->downloadurl; $c = new curl(); $c->get($url, null, array('timeout' => self::SYNCIMAGE_TIMEOUT, 'followlocation' => true, 'nobody' => true)); $info = $c->get_info(); if (isset($info['http_code']) && $info['http_code'] == 200 && array_key_exists('download_content_length', $info) && $info['download_content_length'] >= 0) { $filesize = (int)$info['download_content_length']; return (object) array('filesize' => $filesize); } return null; }
/** * Makes cURL request to get data from the remote site * * @return string raw request result * @throws available_update_checker_exception */ protected function get_response() { global $CFG; require_once $CFG->libdir . '/filelib.php'; $curl = new curl(array('proxy' => true)); $response = $curl->post($this->prepare_request_url(), $this->prepare_request_params(), $this->prepare_request_options()); $curlerrno = $curl->get_errno(); if (!empty($curlerrno)) { throw new available_update_checker_exception('err_response_curl', 'cURL error ' . $curlerrno . ': ' . $curl->error); } $curlinfo = $curl->get_info(); if ($curlinfo['http_code'] != 200) { throw new available_update_checker_exception('err_response_http_code', $curlinfo['http_code']); } return $response; }
/** * Returns information about file in this repository by reference * * If the file is an image we download the contents and save it in our filesystem * so we can generate thumbnails. Otherwise we just request the file size. * Returns null if file not found or can not be accessed * * @param stdClass $reference file reference db record * @return stdClass|null contains one of the following: * - 'filesize' (for non-image files or files we failed to retrieve fully because of timeout) * - 'filepath' (for image files that we retrieived and saved) */ public function get_file_by_reference($reference) { global $USER; $ref = @unserialize(base64_decode($reference->reference)); if (!isset($ref->url) || !($url = $this->appendtoken($ref->url))) { // Occurs when the user isn't known.. return null; } $return = null; $cookiepathname = $this->prepare_file($USER->id. '_'. uniqid('', true). '.cookie'); $c = new curl(array('cookie' => $cookiepathname)); if (file_extension_in_typegroup($ref->filename, 'web_image')) { $path = $this->prepare_file(''); $result = $c->download_one($url, null, array('filepath' => $path, 'followlocation' => true, 'timeout' => self::SYNCIMAGE_TIMEOUT)); if ($result === true) { $return = (object)array('filepath' => $path); } } else { $result = $c->head($url, array('followlocation' => true, 'timeout' => self::SYNCFILE_TIMEOUT)); } // Delete cookie jar. if (file_exists($cookiepathname)) { unlink($cookiepathname); } $this->connection_result($c->get_errno()); $curlinfo = $c->get_info(); if ($return === null && isset($curlinfo['http_code']) && $curlinfo['http_code'] == 200 && array_key_exists('download_content_length', $curlinfo) && $curlinfo['download_content_length'] >= 0) { // we received a correct header and at least can tell the file size $return = (object)array('filesize' => $curlinfo['download_content_length']); } return $return; }
/** * Builds XHTML to display the control. * The main purpose of this overloading is to display a warning when https * is not supported by the server * @param string $data Unused * @param string $query * @return string XHTML */ public function output_html($data, $query = '') { global $CFG, $OUTPUT; $html = parent::output_html($data, $query); if ((string) $data === $this->yes) { require_once $CFG->dirroot . "/lib/filelib.php"; $curl = new curl(); $httpswwwroot = str_replace('http:', 'https:', $CFG->wwwroot); //force https url $curl->head($httpswwwroot . "/login/index.php"); $info = $curl->get_info(); if (empty($info['http_code']) or $info['http_code'] >= 400) { $html .= $OUTPUT->notification(get_string('nohttpsformobilewarning', 'admin')); } } return $html; }
/** * Returns information about file in this repository by reference * {@link repository::get_file_reference()} * {@link repository::get_file()} * * Returns null if file not found or is not readable * * @param stdClass $reference file reference db record * @return null|stdClass that has 'filepath' property */ public function get_file_by_reference($reference) { global $USER; $ref = unserialize($reference->reference); if (!isset($ref->url)) { // this is an old-style reference in DB. We need to fix it $ref = unserialize($this->fix_old_style_reference($reference->reference)); } if (!isset($ref->url)) { return null; } $c = new curl; $url = $this->get_file_download_link($ref->url); if (file_extension_in_typegroup($ref->path, 'web_image')) { $saveas = $this->prepare_file(''); try { $result = $c->download_one($url, array(), array('filepath' => $saveas, 'timeout' => self::SYNCIMAGE_TIMEOUT, 'followlocation' => true)); $info = $c->get_info(); if ($result === true && isset($info['http_code']) && $info['http_code'] == 200) { return (object)array('filepath' => $saveas); } } catch (Exception $e) {} } $c->get($url, null, array('timeout' => self::SYNCIMAGE_TIMEOUT, 'followlocation' => true, 'nobody' => true)); $info = $c->get_info(); if (isset($info['http_code']) && $info['http_code'] == 200 && array_key_exists('download_content_length', $info) && $info['download_content_length'] >= 0) { return (object)array('filesize' => (int)$info['download_content_length']); } return null; }
public function sync_reference(stored_file $file) { global $CFG; if ($file->get_referencelastsync() + DAYSECS > time()) { // Synchronise not more often than once a day. return false; } $ref = unserialize($file->get_reference()); if (!isset($ref->url)) { // this is an old-style reference in DB. We need to fix it $ref = unserialize($this->fix_old_style_reference($file->get_reference())); } if (!isset($ref->url)) { return false; } $c = new curl(); $url = $this->get_file_download_link($ref->url); if (file_extension_in_typegroup($ref->path, 'web_image')) { $saveas = $this->prepare_file(''); try { $result = $c->download_one($url, array(), array('filepath' => $saveas, 'timeout' => $CFG->repositorysyncimagetimeout, 'followlocation' => true)); $info = $c->get_info(); if ($result === true && isset($info['http_code']) && $info['http_code'] == 200) { $fs = get_file_storage(); list($contenthash, $filesize, $newfile) = $fs->add_file_to_pool($saveas); $file->set_synchronized($contenthash, $filesize); return true; } } catch (Exception $e) { } } $c->get($url, null, array('timeout' => $CFG->repositorysyncimagetimeout, 'followlocation' => true, 'nobody' => true)); $info = $c->get_info(); if (isset($info['http_code']) && $info['http_code'] == 200 && array_key_exists('download_content_length', $info) && $info['download_content_length'] >= 0) { $filesize = (int) $info['download_content_length']; $file->set_synchronized(null, $filesize); return true; } $file->set_missingsource(); return true; }
/** * Performs synchronisation of an external file if the previous one has expired. * * This function must be implemented for external repositories supporting * FILE_REFERENCE, it is called for existing aliases when their filesize, * contenthash or timemodified are requested. It is not called for internal * repositories (see {@link repository::has_moodle_files()}), references to * internal files are updated immediately when source is modified. * * Referenced files may optionally keep their content in Moodle filepool (for * thumbnail generation or to be able to serve cached copy). In this * case both contenthash and filesize need to be synchronized. Otherwise repositories * should use contenthash of empty file and correct filesize in bytes. * * Note that this function may be run for EACH file that needs to be synchronised at the * moment. If anything is being downloaded or requested from external sources there * should be a small timeout. The synchronisation is performed to update the size of * the file and/or to update image and re-generated image preview. There is nothing * fatal if syncronisation fails but it is fatal if syncronisation takes too long * and hangs the script generating a page. * * Note: If you wish to call $file->get_filesize(), $file->get_contenthash() or * $file->get_timemodified() make sure that recursion does not happen. * * Called from {@link stored_file::sync_external_file()} * * @inheritDocs */ public function sync_reference(stored_file $file) { global $CFG; if ($file->get_referencelastsync() + DAYSECS > time()) { // Only synchronise once per day. return false; } $reference = $this->unpack_reference($file->get_reference()); if (!isset($reference->url)) { // The URL to sync with is missing. return false; } $c = new curl(); $url = $this->get_file_download_link($reference->url); if (file_extension_in_typegroup($reference->path, 'web_image')) { $saveas = $this->prepare_file(''); try { $result = $c->download_one($url, [], ['filepath' => $saveas, 'timeout' => $CFG->repositorysyncimagetimeout, 'followlocation' => true]); $info = $c->get_info(); if ($result === true && isset($info['http_code']) && $info['http_code'] == 200) { $fs = get_file_storage(); list($contenthash, $filesize, ) = $fs->add_file_to_pool($saveas); $file->set_synchronized($contenthash, $filesize); return true; } } catch (Exception $e) { // IF the download_one fails, we will attempt to download // again with get() anyway. } } $c->get($url, null, array('timeout' => $CFG->repositorysyncimagetimeout, 'followlocation' => true, 'nobody' => true)); $info = $c->get_info(); if (isset($info['http_code']) && $info['http_code'] == 200 && array_key_exists('download_content_length', $info) && $info['download_content_length'] >= 0) { $filesize = (int) $info['download_content_length']; $file->set_synchronized(null, $filesize); return true; } $file->set_missingsource(); return true; }