/** * @param mixed $path * @param string $search * @return array */ public function get_listing($path = '', $page = '') { global $CFG, $OUTPUT; $ret = array(); $curl = new curl(); $msg = $curl->head($this->file_url); $info = $curl->get_info(); if ($info['http_code'] != 200) { $ret['e'] = $msg; } else { $ret['list'] = array(); $ret['nosearch'] = true; $ret['nologin'] = true; $filename = $this->guess_filename($info['url'], $info['content_type']); if (strstr($info['content_type'], 'text/html') || empty($info['content_type'])) { // analysis this web page, general file list $ret['list'] = array(); $content = $curl->get($info['url']); $this->analyse_page($info['url'], $content, $ret); } else { // download this file $ret['list'][] = array('title' => $filename, 'source' => $this->file_url, 'thumbnail' => $OUTPUT->old_icon_url(file_extension_icon($filename, 32))); } } return $ret; }
/** * Send the request via our curl object. * * @param curl $curl prepared curl object. * @param Google_HttpRequest $request The request. * @return string result of the request. */ private function do_request($curl, $request) { $url = $request->getUrl(); $method = $request->getRequestMethod(); switch (strtoupper($method)) { case 'POST': $ret = $curl->post($url, $request->getPostBody()); break; case 'GET': $ret = $curl->get($url); break; case 'HEAD': $ret = $curl->head($url); break; case 'PUT': $ret = $curl->put($url); break; default: throw new coding_exception('Unknown request type: ' . $method); break; } return $ret; }
/** * Check for the availability of a resource by URL. * * Check is performed using an HTTP HEAD call. * * @param $url string A valid URL * @return bool|string True if no issue is found. The error string message, otherwise */ function scorm_check_url($url) { $curl = new curl(); if (!ini_get('open_basedir') and !ini_get('safe_mode')) { // Same options as in {@link download_file_content()}, used in {@link scorm_parse_scorm()}. $curl->setopt(array('CURLOPT_FOLLOWLOCATION' => true, 'CURLOPT_MAXREDIRS' => 5)); } $cmsg = $curl->head($url); $info = $curl->get_info(); if (empty($info['http_code']) || $info['http_code'] != 200) { return get_string('invalidurlhttpcheck', 'scorm', array('cmsg' => $cmsg)); } return true; }
/** * Builds XHTML to display the control. * The main purpose of this overloading is to display a warning when https * is not supported by the server * @param string $data Unused * @param string $query * @return string XHTML */ public function output_html($data, $query = '') { global $CFG, $OUTPUT; $html = parent::output_html($data, $query); if ((string) $data === $this->yes) { require_once $CFG->dirroot . "/lib/filelib.php"; $curl = new curl(); $httpswwwroot = str_replace('http:', 'https:', $CFG->wwwroot); //force https url $curl->head($httpswwwroot . "/login/index.php"); $info = $curl->get_info(); if (empty($info['http_code']) or $info['http_code'] >= 400) { $html .= $OUTPUT->notification(get_string('nohttpsformobilewarning', 'admin')); } } return $html; }
/** * Checks if the mdeploy.php will be able to fetch the ZIP from the given URL * * This is mainly supposed to check if the transmission over HTTPS would * work. That is, if the CA certificates are present at the server. * * @param string $downloadurl the URL of the ZIP package to download * @return bool */ protected function update_downloadable($downloadurl) { global $CFG; $curloptions = array('CURLOPT_SSL_VERIFYHOST' => 2, 'CURLOPT_SSL_VERIFYPEER' => true); $curl = new curl(array('proxy' => true)); $result = $curl->head($downloadurl, $curloptions); $errno = $curl->get_errno(); if (empty($errno)) { return true; } else { return false; } }
/** * Fetches content of file from Internet (using proxy if defined). Uses cURL extension if present. * Due to security concerns only downloads from http(s) sources are supported. * * @category files * @param string $url file url starting with http(s):// * @param array $headers http headers, null if none. If set, should be an * associative array of header name => value pairs. * @param array $postdata array means use POST request with given parameters * @param bool $fullresponse return headers, responses, etc in a similar way snoopy does * (if false, just returns content) * @param int $timeout timeout for complete download process including all file transfer * (default 5 minutes) * @param int $connecttimeout timeout for connection to server; this is the timeout that * usually happens if the remote server is completely down (default 20 seconds); * may not work when using proxy * @param bool $skipcertverify If true, the peer's SSL certificate will not be checked. * Only use this when already in a trusted location. * @param string $tofile store the downloaded content to file instead of returning it. * @param bool $calctimeout false by default, true enables an extra head request to try and determine * filesize and appropriately larger timeout based on $CFG->curltimeoutkbitrate * @return stdClass|string|bool stdClass object if $fullresponse is true, false if request failed, true * if file downloaded into $tofile successfully or the file content as a string. */ function download_file_content($url, $headers = null, $postdata = null, $fullresponse = false, $timeout = 300, $connecttimeout = 20, $skipcertverify = false, $tofile = NULL, $calctimeout = false) { global $CFG; // Only http and https links supported. if (!preg_match('|^https?://|i', $url)) { if ($fullresponse) { $response = new stdClass(); $response->status = 0; $response->headers = array(); $response->response_code = 'Invalid protocol specified in url'; $response->results = ''; $response->error = 'Invalid protocol specified in url'; return $response; } else { return false; } } $options = array(); $headers2 = array(); if (is_array($headers)) { foreach ($headers as $key => $value) { if (is_numeric($key)) { $headers2[] = $value; } else { $headers2[] = "{$key}: {$value}"; } } } if ($skipcertverify) { $options['CURLOPT_SSL_VERIFYPEER'] = false; } else { $options['CURLOPT_SSL_VERIFYPEER'] = true; } $options['CURLOPT_CONNECTTIMEOUT'] = $connecttimeout; $options['CURLOPT_FOLLOWLOCATION'] = 1; $options['CURLOPT_MAXREDIRS'] = 5; // Use POST if requested. if (is_array($postdata)) { $postdata = format_postdata_for_curlcall($postdata); } else { if (empty($postdata)) { $postdata = null; } } // Optionally attempt to get more correct timeout by fetching the file size. if (!isset($CFG->curltimeoutkbitrate)) { // Use very slow rate of 56kbps as a timeout speed when not set. $bitrate = 56; } else { $bitrate = $CFG->curltimeoutkbitrate; } if ($calctimeout and !isset($postdata)) { $curl = new curl(); $curl->setHeader($headers2); $curl->head($url, $postdata, $options); $info = $curl->get_info(); $error_no = $curl->get_errno(); if (!$error_no && $info['download_content_length'] > 0) { // No curl errors - adjust for large files only - take max timeout. $timeout = max($timeout, ceil($info['download_content_length'] * 8 / ($bitrate * 1024))); } } $curl = new curl(); $curl->setHeader($headers2); $options['CURLOPT_RETURNTRANSFER'] = true; $options['CURLOPT_NOBODY'] = false; $options['CURLOPT_TIMEOUT'] = $timeout; if ($tofile) { $fh = fopen($tofile, 'w'); if (!$fh) { if ($fullresponse) { $response = new stdClass(); $response->status = 0; $response->headers = array(); $response->response_code = 'Can not write to file'; $response->results = false; $response->error = 'Can not write to file'; return $response; } else { return false; } } $options['CURLOPT_FILE'] = $fh; } if (isset($postdata)) { $content = $curl->post($url, $postdata, $options); } else { $content = $curl->get($url, null, $options); } if ($tofile) { fclose($fh); @chmod($tofile, $CFG->filepermissions); } /* // Try to detect encoding problems. if ((curl_errno($ch) == 23 or curl_errno($ch) == 61) and defined('CURLOPT_ENCODING')) { curl_setopt($ch, CURLOPT_ENCODING, 'none'); $result = curl_exec($ch); } */ $info = $curl->get_info(); $error_no = $curl->get_errno(); $rawheaders = $curl->get_raw_response(); if ($error_no) { $error = $content; if (!$fullresponse) { debugging("cURL request for \"{$url}\" failed with: {$error} ({$error_no})", DEBUG_ALL); return false; } $response = new stdClass(); if ($error_no == 28) { $response->status = '-100'; // Mimic snoopy. } else { $response->status = '0'; } $response->headers = array(); $response->response_code = $error; $response->results = false; $response->error = $error; return $response; } if ($tofile) { $content = true; } if (empty($info['http_code'])) { // For security reasons we support only true http connections (Location: file:// exploit prevention). $response = new stdClass(); $response->status = '0'; $response->headers = array(); $response->response_code = 'Unknown cURL error'; $response->results = false; // do NOT change this, we really want to ignore the result! $response->error = 'Unknown cURL error'; } else { $response = new stdClass(); $response->status = (string) $info['http_code']; $response->headers = $rawheaders; $response->results = $content; $response->error = ''; // There might be multiple headers on redirect, find the status of the last one. $firstline = true; foreach ($rawheaders as $line) { if ($firstline) { $response->response_code = $line; $firstline = false; } if (trim($line, "\r\n") === '') { $firstline = true; } } } if ($fullresponse) { return $response; } if ($info['http_code'] != 200) { debugging("cURL request for \"{$url}\" failed, HTTP response code: " . $response->response_code, DEBUG_ALL); return false; } return $response->results; }
/** * Checks if the mdeploy.php will be able to fetch the ZIP from the given URL * * This is mainly supposed to check if the transmission over HTTPS would * work. That is, if the CA certificates are present at the server. * * @param string $downloadurl the URL of the ZIP package to download * @return bool */ protected function update_downloadable($downloadurl) { global $CFG; $curloptions = array('CURLOPT_SSL_VERIFYHOST' => 2, 'CURLOPT_SSL_VERIFYPEER' => true); $cacertfile = $CFG->dataroot . '/moodleorgca.crt'; if (is_readable($cacertfile)) { // Do not use CA certs provided by the operating system. Instead, // use this CA cert to verify the updates provider. $curloptions['CURLOPT_CAINFO'] = $cacertfile; } $curl = new curl(array('proxy' => true)); $result = $curl->head($downloadurl, $curloptions); $errno = $curl->get_errno(); if (empty($errno)) { return true; } else { return false; } }
/** * Returns information about file in this repository by reference * * If the file is an image we download the contents and save it in our filesystem * so we can generate thumbnails. Otherwise we just request the file size. * Returns null if file not found or can not be accessed * * @param stdClass $reference file reference db record * @return stdClass|null contains one of the following: * - 'filesize' (for non-image files or files we failed to retrieve fully because of timeout) * - 'filepath' (for image files that we retrieived and saved) */ public function get_file_by_reference($reference) { global $USER; $ref = @unserialize(base64_decode($reference->reference)); if (!isset($ref->url) || !($url = $this->appendtoken($ref->url))) { // Occurs when the user isn't known.. return null; } $return = null; $cookiepathname = $this->prepare_file($USER->id. '_'. uniqid('', true). '.cookie'); $c = new curl(array('cookie' => $cookiepathname)); if (file_extension_in_typegroup($ref->filename, 'web_image')) { $path = $this->prepare_file(''); $result = $c->download_one($url, null, array('filepath' => $path, 'followlocation' => true, 'timeout' => self::SYNCIMAGE_TIMEOUT)); if ($result === true) { $return = (object)array('filepath' => $path); } } else { $result = $c->head($url, array('followlocation' => true, 'timeout' => self::SYNCFILE_TIMEOUT)); } // Delete cookie jar. if (file_exists($cookiepathname)) { unlink($cookiepathname); } $this->connection_result($c->get_errno()); $curlinfo = $c->get_info(); if ($return === null && isset($curlinfo['http_code']) && $curlinfo['http_code'] == 200 && array_key_exists('download_content_length', $curlinfo) && $curlinfo['download_content_length'] >= 0) { // we received a correct header and at least can tell the file size $return = (object)array('filesize' => $curlinfo['download_content_length']); } return $return; }
/** * Check if the remote site is valid (not localhost and available by the hub) * Note: it doesn't matter if the site returns a 404 error. * The point here is to check if the site exists. It does not matter if the hub can not call the site, * as by security design, a hub should never call a site. * However an admin user registering his site should be able to access the site, * as people searching on the hub. * So we want: * a) to check that the url is not a local address * b) to check that the site return some not empty headers * (it exists, at least the domain name is registered) * @param string $url the site url * @return boolean true if the site is valid */ public function is_remote_site_valid($url) { global $CFG; require_once $CFG->libdir . '/filelib.php'; //Check if site is valid if (strpos($url, 'http://localhost') !== false or strpos($url, 'http://127.0.0.1') !== false) { return false; } $curl = new curl(); $curl->setopt(array('CURLOPT_FOLLOWLOCATION' => true, 'CURLOPT_MAXREDIRS' => 3)); $curl->head($url); $info = $curl->get_info(); // Return true if return code is OK (200) or redirection (302). // Redirection occurs for many reasons including redirection to another site that handles single sign-on. if ($info['http_code'] === 200 || $info['http_code'] === 302) { return true; } // Some sites respond to head() with a 503. // As a fallback try get(). // We don't just always do get() as it is much slower than head(). $curl->get($url); $info = $curl->get_info(); if ($info['http_code'] === 200 || $info['http_code'] === 302) { return true; } return false; }
/** * Parses one file (either html or css) * * @param string $baseurl (optional) URL of the file where link to this file was found * @param string $relativeurl relative or absolute link to the file * @param array $list * @param bool $mainfile true only for main HTML false and false for all embedded/linked files */ protected function parse_file($baseurl, $relativeurl, &$list, $mainfile = false) { if (preg_match('/([\'"])(.*)\\1/', $relativeurl, $matches)) { $relativeurl = $matches[2]; } if (empty($baseurl)) { $url = $relativeurl; } else { $url = htmlspecialchars_decode(url_to_absolute($baseurl, $relativeurl)); } if (in_array($url, $this->processedfiles)) { // avoid endless recursion return; } $this->processedfiles[] = $url; $curl = new curl(); $curl->setopt(array('CURLOPT_FOLLOWLOCATION' => true, 'CURLOPT_MAXREDIRS' => 3)); $msg = $curl->head($url); $info = $curl->get_info(); if ($info['http_code'] != 200) { if ($mainfile) { $list['error'] = $msg; } } else { $csstoanalyze = ''; if ($mainfile && (strstr($info['content_type'], 'text/html') || empty($info['content_type']))) { // parse as html $htmlcontent = $curl->get($info['url']); $ddoc = new DOMDocument(); @$ddoc->loadHTML($htmlcontent); // extract <img> $tags = $ddoc->getElementsByTagName('img'); foreach ($tags as $tag) { $url = $tag->getAttribute('src'); $this->add_image_to_list($info['url'], $url, $list); } // analyse embedded css (<style>) $tags = $ddoc->getElementsByTagName('style'); foreach ($tags as $tag) { if ($tag->getAttribute('type') == 'text/css') { $csstoanalyze .= $tag->textContent . "\n"; } } // analyse links to css (<link type='text/css' href='...'>) $tags = $ddoc->getElementsByTagName('link'); foreach ($tags as $tag) { if ($tag->getAttribute('type') == 'text/css' && strlen($tag->getAttribute('href'))) { $this->parse_file($info['url'], $tag->getAttribute('href'), $list); } } } else { if (strstr($info['content_type'], 'css')) { // parse as css $csscontent = $curl->get($info['url']); $csstoanalyze .= $csscontent . "\n"; } else { if (strstr($info['content_type'], 'image/')) { // download this file $this->add_image_to_list($info['url'], $info['url'], $list); } else { $list['error'] = get_string('validfiletype', 'repository_url'); } } } // parse all found css styles if (strlen($csstoanalyze)) { $urls = extract_css_urls($csstoanalyze); if (!empty($urls['property'])) { foreach ($urls['property'] as $url) { $this->add_image_to_list($info['url'], $url, $list); } } if (!empty($urls['import'])) { foreach ($urls['import'] as $cssurl) { $this->parse_file($info['url'], $cssurl, $list); } } } } }
public function sync_reference(stored_file $file) { global $USER; if ($file->get_referencelastsync() + DAYSECS > time() || !$this->connection_result()) { // Synchronise not more often than once a day. // if we had several unsuccessfull attempts to connect to server - do not try any more. return false; } $ref = @unserialize(base64_decode($file->get_reference())); if (!isset($ref->url) || !($url = $this->appendtoken($ref->url))) { // Occurs when the user isn't known.. $file->set_missingsource(); return true; } $cookiepathname = $this->prepare_file($USER->id . '_' . uniqid('', true) . '.cookie'); $c = new curl(array('cookie' => $cookiepathname)); if (file_extension_in_typegroup($ref->filename, 'web_image')) { $path = $this->prepare_file(''); $result = $c->download_one($url, null, array('filepath' => $path, 'followlocation' => true, 'timeout' => $CFG->repositorysyncimagetimeout)); if ($result === true) { $fs = get_file_storage(); list($contenthash, $filesize, $newfile) = $fs->add_file_to_pool($path); $file->set_synchronized($contenthash, $filesize); return true; } } else { $result = $c->head($url, array('followlocation' => true, 'timeout' => $CFG->repositorysyncfiletimeout)); } // Delete cookie jar. if (file_exists($cookiepathname)) { unlink($cookiepathname); } $this->connection_result($c->get_errno()); $curlinfo = $c->get_info(); if (isset($curlinfo['http_code']) && $curlinfo['http_code'] == 200 && array_key_exists('download_content_length', $curlinfo) && $curlinfo['download_content_length'] >= 0) { // we received a correct header and at least can tell the file size $file->set_synchronized(null, $curlinfo['download_content_length']); return true; } $file->set_missingsource(); return true; }
$notify = array('some.user'); // /////////////////////////////////////////////////////////// if (empty($password)) { echo 'EQUELLA link checking has not been configured. Please see the source code for this page.'; exit; } $password_param = required_param('password', PARAM_RAW); if ($password_param != $password) { echo 'Password doesn\'t match.'; exit; } $http = new curl(array('cookie' => true)); echo '<style>.ok {color: green;} .bad {color: red;}</style><ul>'; foreach ($DB->get_records('equella') as $resource) { $url = equella_appendtoken($resource->url, equella_getssotoken_api()); $http->head($url); $info = $http->get_info(); $statuscode = $info['http_code']; echo '<li>Checking <a href="' . $resource->url . '">' . $resource->url . '</a><br>'; if ((int) $statuscode == 200) { echo '<span class="ok">OK</span>'; } else { echo '<span class="bad">Could not find in EQUELLA</span><br>'; // tell someone - get users with course edit perms for the course in question $recipients = $DB->get_records_list('user', 'username', $notify); if ($recipients) { $from = get_admin(); $subject = get_string('checker.subject', 'equella'); $course = $DB->get_record('course', array('id' => $resource->course)); $courseurl = new moodle_url('/course/view.php', array('id' => $course->id)); $message = get_string('checker.message', 'equella', array('name' => $resource->name, 'url' => $resource->url, 'coursename' => $course->shortname, 'courseurl' => $courseurl));
/** * Checks if the mdeploy.php will be able to fetch the ZIP from the given URL * * This is mainly supposed to check if the transmission over HTTPS would * work. That is, if the CA certificates are present at the server. * * @param string $downloadurl the URL of the ZIP package to download * @return bool */ protected function update_downloadable($downloadurl) { global $CFG; $curloptions = array( 'CURLOPT_SSL_VERIFYHOST' => 2, // this is the default in {@link curl} class but just in case 'CURLOPT_SSL_VERIFYPEER' => true, ); $curl = new curl(array('proxy' => true)); $result = $curl->head($downloadurl, $curloptions); $errno = $curl->get_errno(); if (empty($errno)) { return true; } else { return false; } }