示例#1
0
 public static function call(OkapiRequest $request)
 {
     $issue_id = $request->get_parameter('issue_id');
     if (!$issue_id) {
         throw new ParamMissing('issue_id');
     }
     if (!preg_match("/^[0-9]+\$/", $issue_id) || strlen($issue_id) > 6) {
         throw new InvalidParam('issue_id');
     }
     $cache_key = "apiref/issue#" . $issue_id;
     $result = Cache::get($cache_key);
     if ($result == null) {
         # Download the number of comments from GitHub Issue Tracker.
         try {
             $extra_headers = array();
             $extra_headers[] = "Accept: application/vnd.github.v3.html+json";
             $extra_headers[] = "User-Agent: https://github.com/opencaching/okapi/";
             if (Settings::get('GITHUB_ACCESS_TOKEN')) {
                 $extra_headers[] = "Authorization: token " . Settings::get('GITHUB_ACCESS_TOKEN');
             }
             $opts = array('http' => array('method' => "GET", 'timeout' => 2.0, 'header' => implode("\r\n", $extra_headers)));
             $context = stream_context_create($opts);
             $json = file_get_contents("https://api.github.com/repos/opencaching/okapi/issues/{$issue_id}", false, $context);
         } catch (ErrorException $e) {
             throw new BadRequest("Sorry, we could not retrieve issue stats from the GitHub site. " . "This is probably due to a temporary connection problem. Try again later or contact " . "us if this seems permanent.");
         }
         $doc = json_decode($json, true);
         $result = array('id' => $issue_id + 0, 'last_updated' => $doc['updated_at'], 'title' => $doc['title'], 'url' => $doc['html_url'], 'comment_count' => $doc['comments']);
         # On one hand, we want newly added comments to show up quickly.
         # On the other, we don't want OKAPI to spam GitHub with queries.
         # So it's difficult to choose the best timeout for this.
         Cache::set($cache_key, $result, 3600);
     }
     return Okapi::formatted_response($request, $result);
 }
 public function __construct()
 {
     $cache_key = 'changelog';
     $cache_backup_key = 'changelog-backup';
     $changes_xml = Cache::get($cache_key);
     $changelog = null;
     if (!$changes_xml) {
         # Download the current changelog.
         try {
             $opts = array('http' => array('method' => "GET", 'timeout' => 5.0));
             $context = stream_context_create($opts);
             $changes_xml = file_get_contents('https://raw.githubusercontent.com/opencaching/okapi/master/etc/changes.xml', false, $context);
             $changelog = simplexml_load_string($changes_xml);
             if (!$changelog) {
                 throw new ErrorException();
             }
             Cache::set($cache_key, $changes_xml, 3600);
             Cache::set($cache_backup_key, $changes_xml, 3600 * 24 * 30);
         } catch (Exception $e) {
             # GitHub failed on us. User backup list, if available.
             $changes_xml = Cache::get($cache_backup_key);
             if ($changes_xml) {
                 Cache::set($cache_key, $changes_xml, 3600 * 12);
             }
         }
     }
     if (!$changelog && $changes_xml) {
         $changelog = simplexml_load_string($changes_xml);
     }
     # TODO: verify XML scheme
     $this->unavailable_changes = array();
     $this->available_changes = array();
     if (!$changelog) {
         # We could not retreive the changelog from Github, and there was
         # no backup key or it expired. Probably we are on a developer
         # machine. The template will output some error message.
     } else {
         $commits = array();
         $versions = array();
         foreach ($changelog->changes->change as $change) {
             $change = array('commit' => (string) $change['commit'], 'version' => (string) $change['version'], 'time' => (string) $change['time'], 'type' => (string) $change['type'], 'comment' => trim(self::get_inner_xml($change)));
             if (strlen($change['commit']) != 8 || $change['version'] == 0 || $change['time'] == '' || isset($commits[$change['commit']]) || isset($versions[$change['version']])) {
                 # All of these problems would have been detected or prevented
                 # by update_changes.
                 throw new Exception("Someone forgot to run update_changes.php (or ignored error messages).");
             } else {
                 if ($change['version'] > Okapi::$version_number) {
                     $this->unavailable_changes[] = $change;
                 } else {
                     $this->available_changes[] = $change;
                 }
                 $commits[$change['commit']] = true;
                 $versions[$change['version']] = true;
             }
         }
     }
 }
示例#3
0
 public static function call(OkapiRequest $request)
 {
     $cachekey = "apisrv/stats";
     $result = Cache::get($cachekey);
     if (!$result) {
         $result = array('cache_count' => 0 + Db::select_value("\n                    select count(*) from caches where status in (1,2,3)\n                "), 'user_count' => 0 + Db::select_value("\n                    select count(*) from (\n                        select distinct user_id\n                        from cache_logs\n                        where\n                            type in (1,2,7)\n                            and " . (Settings::get('OC_BRANCH') == 'oc.pl' ? "deleted = 0" : "true") . "\n                        UNION DISTINCT\n                        select distinct user_id\n                        from caches\n                    ) as t;\n                "), 'apps_count' => 0 + Db::select_value("select count(*) from okapi_consumers;"), 'apps_active' => 0 + Db::select_value("\n                    select count(distinct s.consumer_key)\n                    from\n                        okapi_stats_hourly s,\n                        okapi_consumers c\n                    where\n                        s.consumer_key = c.`key`\n                        and s.period_start > date_add(now(), interval -30 day)\n                "));
         Cache::set($cachekey, $result, 86400);
         # cache it for one day
     }
     return Okapi::formatted_response($request, $result);
 }
 public static function call(OkapiRequest $request)
 {
     $methodnames = OkapiServiceRunner::$all_names;
     sort($methodnames);
     $cache_key = "api_ref/method_index#" . md5(implode("#", $methodnames));
     $results = Cache::get($cache_key);
     if ($results == null) {
         $results = array();
         foreach ($methodnames as $methodname) {
             $info = OkapiServiceRunner::call('services/apiref/method', new OkapiInternalRequest(new OkapiInternalConsumer(), null, array('name' => $methodname)));
             $results[] = array('name' => $info['name'], 'brief_description' => $info['brief_description']);
         }
         Cache::set($cache_key, $results, 3600);
     }
     return Okapi::formatted_response($request, $results);
 }
示例#5
0
 public static function call(OkapiRequest $request)
 {
     $cache_codes = $request->get_parameter('cache_codes');
     if ($cache_codes === null) {
         throw new ParamMissing('cache_codes');
     }
     # Issue 106 requires us to allow empty list of cache codes to be passed into this method.
     # All of the queries below have to be ready for $cache_codes to be empty!
     $langpref = $request->get_parameter('langpref');
     if (!$langpref) {
         $langpref = "en|" . Settings::get('SITELANG');
     }
     $images = $request->get_parameter('images');
     if (!$images) {
         $images = "all";
     }
     if (!in_array($images, array("none", "all", "spoilers", "nonspoilers"))) {
         throw new InvalidParam('images');
     }
     $format = $request->get_parameter('caches_format');
     if (!$format) {
         $format = "gpx";
     }
     if (!in_array($format, array("gpx", "ggz"))) {
         throw new InvalidParam('caches_format');
     }
     $location_source = $request->get_parameter('location_source');
     $location_change_prefix = $request->get_parameter('location_change_prefix');
     # Start creating ZIP archive.
     $response = new OkapiZIPHttpResponse();
     # Include a GPX/GGZ file compatible with Garmin devices. It should include all
     # Geocaching.com (groundspeak:) and Opencaching.com (ox:) extensions. It will
     # also include image references (actual images will be added as separate files later)
     # and personal data (if the method was invoked using Level 3 Authentication).
     switch ($format) {
         case 'gpx':
             $data_filename = "Garmin/GPX/opencaching" . time() . rand(100000, 999999) . ".gpx";
             $data_method = 'services/caches/formatters/gpx';
             $data_use_compression = true;
             break;
         case 'ggz':
             $data_filename = "Garmin/GGZ/opencaching" . time() . rand(100000, 999999) . ".ggz";
             $data_method = 'services/caches/formatters/ggz';
             $data_use_compression = false;
             break;
     }
     $response->zip->FileAdd($data_filename, OkapiServiceRunner::call($data_method, new OkapiInternalRequest($request->consumer, $request->token, array('cache_codes' => $cache_codes, 'langpref' => $langpref, 'ns_ground' => 'true', 'ns_ox' => 'true', 'images' => 'ox:all', 'attrs' => 'ox:tags', 'trackables' => 'desc:count', 'alt_wpts' => 'true', 'recommendations' => 'desc:count', 'latest_logs' => 'true', 'lpc' => 'all', 'my_notes' => $request->token != null ? "desc:text" : "none", 'location_source' => $location_source, 'location_change_prefix' => $location_change_prefix)))->get_body(), clsTbsZip::TBSZIP_STRING, $data_use_compression);
     # Then, include all the images.
     $caches = OkapiServiceRunner::call('services/caches/geocaches', new OkapiInternalRequest($request->consumer, $request->token, array('cache_codes' => $cache_codes, 'langpref' => $langpref, 'fields' => "images")));
     if (count($caches) > 50) {
         throw new InvalidParam('cache_codes', "The maximum number of caches allowed to be downloaded with this method is 50.");
     }
     if ($images != 'none') {
         $supported_extensions = array('jpg', 'jpeg', 'gif', 'png', 'bmp');
         foreach ($caches as $cache_code => $dict) {
             $imgs = $dict['images'];
             if (count($imgs) == 0) {
                 continue;
             }
             $dir = "Garmin/GeocachePhotos/" . $cache_code[strlen($cache_code) - 1];
             $dir .= "/" . $cache_code[strlen($cache_code) - 2];
             $dir .= "/" . $cache_code;
             foreach ($imgs as $no => $img) {
                 if ($images == 'spoilers' && !$img['is_spoiler']) {
                     continue;
                 }
                 if ($images == 'nonspoilers' && $img['is_spoiler']) {
                     continue;
                 }
                 $tmp = false;
                 foreach ($supported_extensions as $ext) {
                     if (strtolower(substr($img['url'], strlen($img['url']) - strlen($ext) - 1)) != "." . $ext) {
                         $tmp = true;
                         continue;
                     }
                 }
                 if (!$tmp) {
                     continue;
                 }
                 # unsupported file extension
                 if ($img['is_spoiler']) {
                     $zippath = $dir . "/Spoilers/" . $img['unique_caption'] . ".jpg";
                 } else {
                     $zippath = $dir . "/" . $img['unique_caption'] . ".jpg";
                 }
                 # The safest way would be to use the URL, but that would be painfully slow!
                 # That's why we're trying to access files directly (and fail silently on error).
                 # This was tested on OCPL server only.
                 # Note: Oliver Dietz (oc.de) replied that images with 'local' set to 0 could not
                 # be accessed locally. But all the files have 'local' set to 1 anyway.
                 $syspath = Settings::get('IMAGES_DIR') . "/" . $img['uuid'] . ".jpg";
                 if (file_exists($syspath)) {
                     $response->zip->FileAdd($zippath, $syspath, clsTbsZip::TBSZIP_FILE, false);
                 } else {
                     # If file exists, but does not end with ".jpg", we will create
                     # JPEG version of it and store it in the cache.
                     $cache_key = "jpg#" . $img['uuid'];
                     $jpeg_contents = Cache::get($cache_key);
                     if ($jpeg_contents === null) {
                         foreach ($supported_extensions as $ext) {
                             $syspath_other = Settings::get('IMAGES_DIR') . "/" . $img['uuid'] . "." . $ext;
                             if (file_exists($syspath_other)) {
                                 try {
                                     $image = imagecreatefromstring(file_get_contents($syspath_other));
                                     ob_start();
                                     imagejpeg($image);
                                     $jpeg_contents = ob_get_clean();
                                     imagedestroy($image);
                                 } catch (Exception $e) {
                                     # GD couldn't parse the file. We will skip it, and cache
                                     # the "false" value as the contents. This way, we won't
                                     # attempt to parse it during the next 24 hours.
                                     $jpeg_contents = false;
                                 }
                                 Cache::set($cache_key, $jpeg_contents, 86400);
                                 break;
                             }
                         }
                     }
                     if ($jpeg_contents) {
                         # This can be "null" *or* "false"!
                         $response->zip->FileAdd($zippath, $jpeg_contents, clsTbsZip::TBSZIP_STRING, false);
                     }
                 }
             }
         }
     }
     # The result could be big, but it's created and streamed right
     # to the browser, so it shouldn't hit our memory limit. We also
     # should set a higher time limit, because downloading this response
     # may take some time over slow network connections (and I'm not sure
     # what is the PHP's default way of handling such scenario).
     set_time_limit(600);
     $response->content_type = "application/zip";
     $response->content_disposition = 'attachment; filename="results.zip"';
     return $response;
 }
示例#6
0
文件: core.php 项目: Slini11/okapi
 /** Send an email message to local OKAPI administrators. */
 public static function mail_admins($subject, $message)
 {
     # Make sure we're not sending HUGE emails.
     if (strlen($message) > 10000) {
         $message = substr($message, 0, 10000) . "\n\n...(message clipped at 10k chars)\n";
     }
     # Make sure we're not spamming.
     $cache_key = 'mail_admins_counter/' . floor(time() / 3600) * 3600 . '/' . md5($subject);
     try {
         $counter = Cache::get($cache_key);
     } catch (DbException $e) {
         # This exception can occur during OKAPI update (#156), or when
         # the cache table is broken (#340). I am not sure which option is
         # better: 1. notify the admins about the error and risk spamming
         # them, 2. don't notify and don't risk spamming them. Currently,
         # I choose option 2.
         return;
     }
     if ($counter === null) {
         $counter = 0;
     }
     $counter++;
     try {
         Cache::set($cache_key, $counter, 3600);
     } catch (DbException $e) {
         # If `get` suceeded and `set` did not, then probably we're having
         # issue #156 scenario. We can ignore it here.
     }
     if ($counter <= 5) {
         # We're not spamming yet.
         self::mail_from_okapi(get_admin_emails(), $subject, $message);
     } else {
         # We are spamming. Prevent sending more emails.
         $content_cache_key_prefix = 'mail_admins_spam/' . floor(time() / 3600) * 3600 . '/';
         $timeout = 86400;
         if ($counter == 6) {
             self::mail_from_okapi(get_admin_emails(), "Anti-spam mode activated for '{$subject}'", "OKAPI has activated an \"anti-spam\" mode for the following subject:\n\n" . "\"{$subject}\"\n\n" . "Anti-spam mode is activiated when more than 5 messages with\n" . "the same subject are sent within one hour.\n\n" . "Additional debug information:\n" . "- counter cache key: {$cache_key}\n" . "- content prefix: {$content_cache_key_prefix}<n>\n" . "- content timeout: {$timeout}\n");
         }
         $content_cache_key = $content_cache_key_prefix . $counter;
         Cache::set($content_cache_key, $message, $timeout);
     }
 }
示例#7
0
 public function execute()
 {
     require_once $GLOBALS['rootpath'] . "okapi/services/replicate/replicate_common.inc.php";
     $max_revision = ReplicateCommon::get_revision();
     $cache_key = 'clog_revisions_daily';
     $data = Cache::get($cache_key);
     if ($data == null) {
         $data = array();
     }
     $data[time()] = $max_revision;
     $new_min_revision = 1;
     $new_data = array();
     foreach ($data as $time => $r) {
         if ($time < time() - 10 * 86400) {
             $new_min_revision = max($new_min_revision, $r);
         } else {
             $new_data[$time] = $r;
         }
     }
     Db::execute("\n            delete from okapi_clog\n            where id < '" . mysql_real_escape_string($new_min_revision) . "'\n        ");
     Cache::set($cache_key, $new_data, 10 * 86400);
     Db::query("optimize table okapi_clog");
 }
示例#8
0
 /**
  * Get the mapping: A-codes => attribute name. The language for the name
  * is selected based on the $langpref parameter. The result is cached!
  */
 public static function get_acode_to_name_mapping($langpref)
 {
     static $mapping = null;
     if ($mapping !== null) {
         return $mapping;
     }
     $cache_key = md5(serialize(array("attrhelper/acode2name", $langpref, Okapi::$revision, self::cache_key_suffix())));
     $mapping = Cache::get($cache_key);
     if (!$mapping) {
         self::init_from_cache();
         $mapping = array();
         foreach (self::$attr_dict as $acode => &$attr_ref) {
             $mapping[$acode] = Okapi::pick_best_language($attr_ref['names'], $langpref);
         }
         Cache::set($cache_key, $mapping, self::ttl());
     }
     return $mapping;
 }
示例#9
0
 private function loadSearchData($searchData)
 {
     \okapi\OkapiErrorHandler::reenable();
     // We need to transform OC's "searchdata" into OKAPI's "search set".
     // First, we need to determine if we ALREADY did that.
     // Note, that this is not exactly thread-efficient. Multiple threads may
     // do this transformation in the same time. However, this is done only once
     // for each searchdata, so we will ignore it.
     $cache_key = "OC_searchdata_" . $searchData;
     $set_id = \okapi\Cache::get($cache_key);
     if ($set_id === null) {
         // Read the searchdata file into a temporary table.
         $filepath = \okapi\Settings::get('VAR_DIR') . "/searchdata/" . $searchData;
         \okapi\Db::execute("\n            create temporary table temp_" . $searchData . " (\n                cache_id integer primary key\n            ) engine=memory\n        ");
         if (file_exists($filepath)) {
             \okapi\Db::execute("\n                        load data local infile '{$filepath}'\n                        into table temp_" . $searchData . "\n                fields terminated by ' '\n                lines terminated by '\\n'\n                (cache_id)\n            ");
         }
         // Tell OKAPI to import the table into its own internal structures.
         // Cache it for two hours.
         $set_info = \okapi\Facade::import_search_set("temp_" . $searchData, 7200, 7200);
         $set_id = $set_info['set_id'];
         \okapi\Cache::set($cache_key, $set_id, 7200);
     }
     $this->search_params['set_and'] = $set_id;
     $this->search_params['status'] = "Available|Temporarily unavailable|Archived";
     \okapi\OkapiErrorHandler::disable();
     return true;
 }
 /**
  * Generate a new fulldump file and put it into the OKAPI cache table.
  * Return the cache key.
  */
 public static function generate_fulldump()
 {
     # First we will create temporary files, then compress them in the end.
     $revision = self::get_revision();
     $generated_at = date('c', time());
     $dir = Okapi::get_var_dir() . "/okapi-db-dump";
     $i = 1;
     $json_files = array();
     # Cleanup (from a previous, possibly unsuccessful, execution)
     shell_exec("rm -f {$dir}/*");
     shell_exec("rmdir {$dir}");
     shell_exec("mkdir {$dir}");
     shell_exec("chmod 777 {$dir}");
     # Geocaches
     $cache_codes = Db::select_column("select wp_oc from caches");
     $cache_code_groups = Okapi::make_groups($cache_codes, self::$chunk_size);
     unset($cache_codes);
     foreach ($cache_code_groups as $cache_codes) {
         $basename = "part" . str_pad($i, 5, "0", STR_PAD_LEFT);
         $json_files[] = $basename . ".json";
         $entries = self::generate_changelog_entries('services/caches/geocaches', 'geocache', 'cache_codes', 'code', $cache_codes, self::$logged_cache_fields, true, false);
         $filtered = array();
         foreach ($entries as $entry) {
             if ($entry['change_type'] == 'replace') {
                 $filtered[] = $entry;
             }
         }
         unset($entries);
         file_put_contents("{$dir}/{$basename}.json", json_encode($filtered));
         unset($filtered);
         $i++;
     }
     unset($cache_code_groups);
     # Log entries. We cannot load all the uuids at one time, this would take
     # too much memory. Hence the offset/limit loop.
     $offset = 0;
     while (true) {
         $log_uuids = Db::select_column("\n                select uuid\n                from cache_logs\n                where " . (Settings::get('OC_BRANCH') == 'oc.pl' ? "deleted = 0" : "true") . "\n                order by uuid\n                limit {$offset}, 10000\n            ");
         if (count($log_uuids) == 0) {
             break;
         }
         $offset += 10000;
         $log_uuid_groups = Okapi::make_groups($log_uuids, 500);
         unset($log_uuids);
         foreach ($log_uuid_groups as $log_uuids) {
             $basename = "part" . str_pad($i, 5, "0", STR_PAD_LEFT);
             $json_files[] = $basename . ".json";
             $entries = self::generate_changelog_entries('services/logs/entries', 'log', 'log_uuids', 'uuid', $log_uuids, self::$logged_log_entry_fields, true, false);
             $filtered = array();
             foreach ($entries as $entry) {
                 if ($entry['change_type'] == 'replace') {
                     $filtered[] = $entry;
                 }
             }
             unset($entries);
             file_put_contents("{$dir}/{$basename}.json", json_encode($filtered));
             unset($filtered);
             $i++;
         }
     }
     # Package data.
     $metadata = array('revision' => $revision, 'data_files' => $json_files, 'meta' => array('site_name' => Okapi::get_normalized_site_name(), 'okapi_version_number' => Okapi::$version_number, 'okapi_revision' => Okapi::$version_number, 'okapi_git_revision' => Okapi::$git_revision, 'generated_at' => $generated_at));
     file_put_contents("{$dir}/index.json", json_encode($metadata));
     # Compute uncompressed size.
     $size = filesize("{$dir}/index.json");
     foreach ($json_files as $filename) {
         $size += filesize("{$dir}/{$filename}");
     }
     # Create JSON archive. We use tar options: -j for bzip2, -z for gzip
     # (bzip2 is MUCH slower).
     $use_bzip2 = true;
     $dumpfilename = "okapi-dump.tar." . ($use_bzip2 ? "bz2" : "gz");
     shell_exec("tar --directory {$dir} -c" . ($use_bzip2 ? "j" : "z") . "f {$dir}/{$dumpfilename} index.json " . implode(" ", $json_files) . " 2>&1");
     # Delete temporary files.
     shell_exec("rm -f {$dir}/*.json");
     # Move the archive one directory upwards, replacing the previous one.
     # Remove the temporary directory.
     shell_exec("mv -f {$dir}/{$dumpfilename} " . Okapi::get_var_dir());
     shell_exec("rmdir {$dir}");
     # Update the database info.
     $metadata['meta']['filepath'] = Okapi::get_var_dir() . '/' . $dumpfilename;
     $metadata['meta']['content_type'] = $use_bzip2 ? "application/octet-stream" : "application/x-gzip";
     $metadata['meta']['public_filename'] = 'okapi-dump-r' . $metadata['revision'] . '.tar.' . ($use_bzip2 ? "bz2" : "gz");
     $metadata['meta']['uncompressed_size'] = $size;
     $metadata['meta']['compressed_size'] = filesize($metadata['meta']['filepath']);
     Cache::set("last_fulldump", $metadata, 10 * 86400);
 }
示例#11
0
 public static function call(OkapiRequest $request)
 {
     $checkpointA_started = microtime(true);
     # Make sure the request is internal.
     if (in_array($request->consumer->key, array('internal', 'facade'))) {
         /* Okay, these two consumers can always access it. */
     } elseif ($request->consumer->hasFlag(OkapiConsumer::FLAG_MAPTILE_ACCESS)) {
         /* If the Consumer is aware that it is not backward-compatible, then
          * he may be granted permission to access it. */
     } else {
         throw new BadRequest("Your Consumer Key has not been allowed to access this method.");
     }
     # zoom, x, y - required tile-specific parameters.
     $zoom = self::require_uint($request, 'z');
     if ($zoom > 21) {
         throw new InvalidParam('z', "Maximum value for this parameter is 21.");
     }
     $x = self::require_uint($request, 'x');
     $y = self::require_uint($request, 'y');
     if ($x >= 1 << $zoom) {
         throw new InvalidParam('x', "Should be in 0.." . ((1 << $zoom) - 1) . ".");
     }
     if ($y >= 1 << $zoom) {
         throw new InvalidParam('y', "Should be in 0.." . ((1 << $zoom) - 1) . ".");
     }
     # Now, we will create a search set (or use one previously created).
     # Instead of creating a new OkapiInternalRequest object, we will pass
     # the current request directly. We can do that, because we inherit all
     # of the "save" method's parameters.
     $search_set = OkapiServiceRunner::call('services/caches/search/save', new OkapiInternalRequest($request->consumer, $request->token, $request->get_all_parameters_including_unknown()));
     $set_id = $search_set['set_id'];
     # Get caches which are present in the result set AND within the tile
     # (+ those around the borders).
     $rs = TileTree::query_fast($zoom, $x, $y, $set_id);
     $rows = array();
     if ($rs !== null) {
         while ($row = Db::fetch_row($rs)) {
             $rows[] = $row;
         }
         unset($row);
     }
     OkapiServiceRunner::save_stats_extra("caches/map/tile/checkpointA", null, microtime(true) - $checkpointA_started);
     $checkpointB_started = microtime(true);
     # Add dynamic, user-related flags.
     if (count($rows) > 0) {
         # Load user-related cache ids.
         $cache_key = "tileuser/" . $request->token->user_id;
         $user = self::$USE_OTHER_CACHE ? Cache::get($cache_key) : null;
         if ($user === null) {
             $user = array();
             # Ignored caches.
             $rs = Db::query("\n                    select cache_id\n                    from cache_ignore\n                    where user_id = '" . Db::escape_string($request->token->user_id) . "'\n                ");
             $user['ignored'] = array();
             while (list($cache_id) = Db::fetch_row($rs)) {
                 $user['ignored'][$cache_id] = true;
             }
             # Found caches.
             $rs = Db::query("\n                    select distinct cache_id\n                    from cache_logs\n                    where\n                        user_id = '" . Db::escape_string($request->token->user_id) . "'\n                        and type = 1\n                        and " . (Settings::get('OC_BRANCH') == 'oc.pl' ? "deleted = 0" : "true") . "\n                ");
             $user['found'] = array();
             while (list($cache_id) = Db::fetch_row($rs)) {
                 $user['found'][$cache_id] = true;
             }
             # Own caches.
             $rs = Db::query("\n                    select distinct cache_id\n                    from caches\n                    where user_id = '" . Db::escape_string($request->token->user_id) . "'\n                ");
             $user['own'] = array();
             while (list($cache_id) = Db::fetch_row($rs)) {
                 $user['own'][$cache_id] = true;
             }
             Cache::set($cache_key, $user, 30);
         }
         # Add extra flags to geocaches.
         foreach ($rows as &$row_ref) {
             # Add the "found" flag (to indicate that this cache needs
             # to be drawn as found) and the "own" flag (to indicate that
             # the current user is the owner).
             if (isset($user['found'][$row_ref[0]])) {
                 $row_ref[6] |= TileTree::$FLAG_FOUND;
             }
             # $row[6] is "flags"
             if (isset($user['own'][$row_ref[0]])) {
                 $row_ref[6] |= TileTree::$FLAG_OWN;
             }
             # $row[6] is "flags"
         }
     }
     # Compute the image hash/fingerprint. This will be used both for ETags
     # and internal cache ($cache_key).
     $tile = new TileRenderer($zoom, $rows);
     $image_fingerprint = $tile->get_unique_hash();
     # Start creating response.
     $response = new OkapiHttpResponse();
     $response->content_type = $tile->get_content_type();
     $response->cache_control = "Cache-Control: private, max-age=600";
     $response->etag = 'W/"' . $image_fingerprint . '"';
     $response->allow_gzip = false;
     // images are usually compressed, prevent compression at Apache level
     # Check if the request didn't include the same ETag.
     OkapiServiceRunner::save_stats_extra("caches/map/tile/checkpointB", null, microtime(true) - $checkpointB_started);
     $checkpointC_started = microtime(true);
     if (self::$USE_ETAGS_CACHE && $request->etag == $response->etag) {
         # Hit. Report the content was unmodified.
         $response->etag = null;
         $response->status = "304 Not Modified";
         return $response;
     }
     # Check if the image was recently rendered and is kept in image cache.
     $cache_key = "tile/" . $image_fingerprint;
     $response->body = self::$USE_IMAGE_CACHE ? Cache::get($cache_key) : null;
     OkapiServiceRunner::save_stats_extra("caches/map/tile/checkpointC", null, microtime(true) - $checkpointC_started);
     $checkpointD_started = microtime(true);
     if ($response->body !== null) {
         # Hit. We will use the cached version of the image.
         return $response;
     }
     # Miss. Render the image. Cache the result.
     $response->body = $tile->render();
     Cache::set_scored($cache_key, $response->body);
     OkapiServiceRunner::save_stats_extra("caches/map/tile/checkpointD", null, microtime(true) - $checkpointD_started);
     return $response;
 }
示例#12
0
文件: facade.php 项目: 4Vs/oc-server3
 /**
  * Store the object $value in OKAPI's cache, under the name of $key.
  *
  * Parameters:
  *
  * $key -- must be a string of max 57 characters in length (you can use
  *     md5(...) to shorten your keys). Use the same $key to retrieve your
  *     value later.
  *
  * $value -- can be any serializable PHP object. Currently there's no
  *     strict size limit, but try to keep it below 1 MB (for future
  *     compatibility with memcached).
  *
  * $timeout -- *maximum* time allowed to store the value, given in seconds
  *     (however, the value *can* be removed sooner than that, see the note
  *     below). Timeout can be also set to null, but you should avoid this,
  *     because such objects may clutter the cache unnecessarilly. (You must
  *     remember to remove them yourself!)
  *
  * Please note, that this cache is not guaranteed to be persistent.
  * Usually it is, but it can be emptied in case of emergency (low disk
  * space), or if we decide to switch the underlying cache engine in the
  * future (e.g. to memcached). Most of your values should be safe though.
  */
 public static function cache_set($key, $value, $timeout)
 {
     Cache::set("facade#" . $key, $value, $timeout);
 }
示例#13
0
文件: core.php 项目: 4Vs/oc-server3
 /** Send an email message to local OKAPI administrators. */
 public static function mail_admins($subject, $message)
 {
     # Make sure we're not sending HUGE emails.
     if (strlen($message) > 10000) {
         $message = substr($message, 0, 10000) . "\n\n...(message clipped at 10k chars)\n";
     }
     # Make sure we're not spamming.
     $cache_key = 'mail_admins_counter/' . floor(time() / 3600) * 3600 . '/' . md5($subject);
     try {
         $counter = Cache::get($cache_key);
     } catch (DbException $e) {
         # Why catching exceptions here? See bug#156.
         $counter = null;
     }
     if ($counter === null) {
         $counter = 0;
     }
     $counter++;
     try {
         Cache::set($cache_key, $counter, 3600);
     } catch (DbException $e) {
         # Why catching exceptions here? See bug#156.
     }
     if ($counter <= 5) {
         # We're not spamming yet.
         self::mail_from_okapi(get_admin_emails(), $subject, $message);
     } else {
         # We are spamming. Prevent sending more emails.
         $content_cache_key_prefix = 'mail_admins_spam/' . floor(time() / 3600) * 3600 . '/';
         $timeout = 86400;
         if ($counter == 6) {
             self::mail_from_okapi(get_admin_emails(), "Anti-spam mode activated for '{$subject}'", "OKAPI has activated an \"anti-spam\" mode for the following subject:\n\n" . "\"{$subject}\"\n\n" . "Anti-spam mode is activiated when more than 5 messages with\n" . "the same subject are sent within one hour.\n\n" . "Additional debug information:\n" . "- counter cache key: {$cache_key}\n" . "- content prefix: {$content_cache_key_prefix}<n>\n" . "- content timeout: {$timeout}\n");
         }
         $content_cache_key = $content_cache_key_prefix . $counter;
         Cache::set($content_cache_key, $message, $timeout);
     }
 }
示例#14
0
    # do this transformation in the same time. However, this is done only once
    # for each searchdata, so we will ignore it.
    $cache_key = "OC_searchdata_" . $searchdata;
    $set_id = \okapi\Cache::get($cache_key);
    if ($set_id === null) {
        # Read the searchdata file into a temporary table.
        $filepath = \okapi\Settings::get('VAR_DIR') . "/searchdata/" . $searchdata;
        \okapi\Db::execute("\n            create temporary table temp_" . $searchdata . " (\n                cache_id integer primary key\n            ) engine=memory\n        ");
        if (file_exists($filepath)) {
            \okapi\Db::execute("\n                load data local infile '{$filepath}'\n                into table temp_" . $searchdata . "\n                fields terminated by ' '\n                lines terminated by '\\n'\n                (cache_id)\n            ");
        }
        # Tell OKAPI to import the table into its own internal structures.
        # Cache it for two hours.
        $set_info = \okapi\Facade::import_search_set("temp_" . $searchdata, 7200, 7200);
        $set_id = $set_info['set_id'];
        \okapi\Cache::set($cache_key, $set_id, 7200);
    }
    $params['set_and'] = $set_id;
    $params['status'] = "Available|Temporarily unavailable|Archived";
    \okapi\OkapiErrorHandler::disable();
} else {
    # Mode 1 - without "searchdata".
    # h_ignored - convert to OKAPI's "exclude_ignored".
    if ($_GET['h_ignored'] == "true") {
        $params['exclude_ignored'] = "true";
    }
    # h_avail, h_temp_unavail, h_arch ("hide available" etc.) - convert to
    # OKAPI's "status" filter.
    $tmp = array();
    if ($_GET['h_avail'] != "true") {
        $tmp[] = "Available";
示例#15
0
 public static function call(OkapiRequest $request)
 {
     # The list of installations is periodically refreshed by contacting OKAPI
     # repository. This method usually displays the cached version of it.
     $cachekey = 'apisrv/installations';
     $backupkey = 'apisrv/installations-backup';
     $results = Cache::get($cachekey);
     if (!$results) {
         # Download the current list of OKAPI servers.
         try {
             $opts = array('http' => array('method' => "GET", 'timeout' => 5.0));
             $context = stream_context_create($opts);
             $xml = file_get_contents("http://opencaching-api.googlecode.com/svn/trunk/etc/installations.xml", false, $context);
             $doc = simplexml_load_string($xml);
             if (!$doc) {
                 throw new ErrorException();
                 # just to get to the catch block
             }
         } catch (ErrorException $e) {
             # Google failed on us. Try to respond with a backup list.
             $results = Cache::get($backupkey);
             if ($results) {
                 Cache::set($cachekey, $results, 12 * 3600);
                 # so to retry no earlier than after 12 hours
                 return Okapi::formatted_response($request, $results);
             }
             # Backup has expired (or have never been cached). If we're on a development
             # server then probably it's okay. In production this SHOULD NOT happen.
             $results = array(array('site_url' => Settings::get('SITE_URL'), 'site_name' => "Unable to retrieve!", 'okapi_base_url' => Settings::get('SITE_URL') . "okapi/"));
             Cache::set($cachekey, $results, 12 * 3600);
             # so to retry no earlier than after 12 hours
             return Okapi::formatted_response($request, $results);
         }
         $results = array();
         $i_was_included = false;
         foreach ($doc->installation as $inst) {
             $site_url = (string) $inst[0]['site_url'];
             if ($inst[0]['okapi_base_url']) {
                 $okapi_base_url = (string) $inst[0]['okapi_base_url'];
             } else {
                 $okapi_base_url = $site_url . "okapi/";
             }
             if ($inst[0]['site_name']) {
                 $site_name = (string) $inst[0]['site_name'];
             } else {
                 $site_name = Okapi::get_normalized_site_name($site_url);
             }
             $results[] = array('site_url' => $site_url, 'site_name' => $site_name, 'okapi_base_url' => $okapi_base_url);
             if ($site_url == Settings::get('SITE_URL')) {
                 $i_was_included = true;
             }
         }
         # If running on a local development installation, then include the local
         # installation URL.
         if (!$i_was_included) {
             $results[] = array('site_url' => Settings::get('SITE_URL'), 'site_name' => "DEVELSITE", 'okapi_base_url' => Settings::get('SITE_URL') . "okapi/");
             # Contact OKAPI developers in order to get added to the official sites list!
         }
         # Cache it for one day. Also, save a backup (valid for 30 days).
         Cache::set($cachekey, $results, 86400);
         Cache::set($backupkey, $results, 86400 * 30);
     }
     return Okapi::formatted_response($request, $results);
 }