<?php // error_reporting(E_ALL); define(CACHE_PATH, 'cache'); $maxSize = 10000000; // [bytes], max size of file $maxTime = 60; // [s], max download time $cacheExpireSeconds = 60; // echo "Fetching ".$_REQUEST['url']; retrieveFile($_REQUEST['url']); cleanUpExpired(); function retrieveFile($url) { $contents = file_get_contents($url); $f = CACHE_PATH . "/tmp-" . time() . "-" . rand(0, 1024 * 1024) . "-" . basename($url); file_put_contents("./" . $f, $contents); $data = array('filename' => basename($url), 'file' => CACHE_PATH . "/" . basename($f), 'url' => $url); echo json_encode($data); } function cleanUpExpired() { // Remove cached files that expired $files = glob("" . CACHE_PATH . "/*"); foreach ($files as $file) { if (time() - filemtime($file) > $cacheExpireSeconds) { @unlink($file); } } }
if (fileValid($bucket, $file, $interval)) { if (retrieveFile($bucket, $file)) { return; } } // file expired, create a token, see if search can be done $token = $file . ".token"; $expiryInterval = 60 * 60; // someone tried to search 1 hr ago, 1 hr is where fb updates quotas if (fileValid($bucket, $token, $expiryInterval)) { // someone else is searching, so retrieve existing file // XXX: IF I forcefully remove this events file, and if token says someone else is searching, then // console will have an error saying 404 (file not found). This will go to client and client says // (I print in jsonStore function) that client has a problem try later. This should be ok. Just // forcefully remove token file to start search. retrieveFile($bucket, $file); // if this fails, do nothing } else { // either token is not there, or its old and carried over from previous search // or someone created token, tried to search but aborted search // renew token $content = "foo"; if (storeGCS($content, $bucket, $token) != 0) { $msg = 'zouk calendar: failed to store token ' . $file; syslog(LOG_EMERG, $msg); sendMail($msg); } // file not valid, send json object as error echo "{ \"error\": \"zouk calendar: file not found\" }"; } } else {