/** * Removes objects from pool and cache. * * @param string[] $keys of the objects to be removed * * @return void */ public function deleteMulti(array $keys) { $this->cache->deleteMulti($keys); foreach ($keys as $key) { if (isset($this->keyToObj[$key])) { unset($this->keyToObj[$key]); } } }
/** * Removes objects from pool and from cache, if the later is being used * * @param int &$ids of the objects to be removed * * @return void */ public function deleteFromCache(&$ids) { $c = $this->cache instanceof ICache; foreach ($ids as $id) { if (isset($this->idToObj[$id])) { unset($this->idToObj[$id]); } if ($c) { $this->cache->deleteItem($this->classNick . '_' . $id); } } }
/** * Gathers multipage endpoint responses and joins them into one array, using the passed callback functions to * traverse and index the data. The result of this (potentially expensive) operation can be cached. * * @param string $endpointHref the URL to the first page of the endpoint * @param callable $indexFunc function to be used to extract the ID from/for an individual Response object * @param callable $elementFunc function to be used to extract the desired data from an individual Response object * @param string $accept the representation to request from CREST * @param bool $cache whether the gathered data should be cached or not * @param int $ttl the time to live to be used for caching of the gathered data * @param string $subCommandKey to avoid cache namespace collisions when different gather requests access the same * endpoint URL, an additional subcommand key can be specified. * * @return array */ public function gather($endpointHref, callable $indexFunc = null, callable $elementFunc = null, $accept = null, $cache = true, $ttl = 3600, $subCommandKey = null) { $dataKey = 'gathered:' . $endpointHref . (isset($subCommandKey) ? ',' . $subCommandKey : ''); //we introduce another caching layer here because gathering and reindexing multipage data is expensive, even //when the individual CREST responses are already cached. try { $dataObj = $this->cache->getItem($dataKey); } catch (KeyNotFoundInCacheException $e) { //setup a cacheable array object $dataClass = Config::getIveeClassName('CacheableArray'); $dataObj = new $dataClass($dataKey, time() + $ttl); //gather all the pages into one compact array $dataObj->data = $this->gather2($endpointHref, $indexFunc, $elementFunc, $accept, false); if ($cache) { $this->cache->setItem($dataObj); } } return $dataObj->data; }
/** * Performs parallel asynchronous GET requests using callbacks to process incoming responses. * * @param array $hrefs the hrefs to request * @param array $header the header to be passed in all requests * @param callable $getAuthHeader that returns an appropriate bearer authentication header, for instance * Client::getBearerAuthHeader(). We do this on-the-fly as during large multi-GET batches the access token might * expire. * @param callable $callback a function expecting one iveeCrest\Response object as argument, called for every * successful response * @param callable $errCallback a function expecting one iveeCrest\Response object as argument, called for every * non-successful response * @param bool $cache whether the individual Responses should be cached * * @return void * @throws \iveeCrest\Exceptions\IveeCrestException on general CURL error */ public function asyncMultiGet(array $hrefs, array $header, callable $getAuthHeader, callable $callback, callable $errCallback = null, $cache = true) { //This method is fairly complex in part due to the tricky and ugly interface of multi-curl and moving window //logic. Ideas or patches how to make it nicer welcome! //separate hrefs that are already cached from those that need to be requested $hrefsToQuery = []; $keysToQuery = []; foreach ($hrefs as $href) { $responseKey = md5($this->refreshToken . '_get:' . $href); try { $callback($this->cache->getItem($responseKey)); } catch (KeyNotFoundInCacheException $e) { $hrefsToQuery[] = $href; $keysToQuery[] = $responseKey; } } // make sure the rolling window isn't greater than the number of hrefs $rollingWindow = count($hrefsToQuery) > 10 ? 10 : count($hrefsToQuery); //CURL options for all requests $stdOptions = array(CURLOPT_RETURNTRANSFER => true, CURLOPT_USERAGENT => $this->userAgent, CURLOPT_SSL_VERIFYPEER => true, CURLOPT_SSL_CIPHER_LIST => 'TLSv1', CURLOPT_HTTPHEADER => $header); $responses = []; $master = curl_multi_init(); //setup the first batch of requests for ($i = 0; $i < $rollingWindow; $i++) { $href = $hrefsToQuery[$i]; $responses[$href] = $this->addHandleToMulti($master, $href, $keysToQuery[$i], $stdOptions, $getAuthHeader, $header); } $crestExceptionClass = Config::getIveeClassName('IveeCrestException'); $running = false; do { //execute whichever handles need to be started do { $execrun = curl_multi_exec($master, $running); } while ($execrun == CURLM_CALL_MULTI_PERFORM); if ($execrun != CURLM_OK) { throw new $crestExceptionClass("CURL Multi-GET error", $execrun); } //block until we have anything on at least one of the handles curl_multi_select($master); //a request returned, process it while ($done = curl_multi_info_read($master)) { $info = curl_getinfo($done['handle']); //find the Response object matching the URL $res = $responses[$info['url']]; //set info and content to Response object $res->setContentAndInfo(curl_multi_getcontent($done['handle']), $info); //execute the callbacks passing the response as argument if ($info['http_code'] == 200) { //cache it if configured if ($cache) { $this->cache->setItem($res); } $callback($res); } elseif (isset($errCallback)) { $errCallback($res); } else { throw new $crestExceptionClass('CREST http error ' . $info['http_code']); } //remove the reference to response to conserve memory on large batches $responses[$info['url']] = null; //start a new request (it's important to do this before removing the old one) if ($i < count($hrefsToQuery)) { $href = $hrefsToQuery[$i]; $responses[$href] = $this->addHandleToMulti($master, $href, $keysToQuery[$i], $stdOptions, $getAuthHeader, $header); $i++; } //remove the curl handle that just completed curl_multi_remove_handle($master, $done['handle']); } //don't waste too many CPU cycles on looping usleep(1000); //TODO: implement proper rate limiting } while ($running > 0); curl_multi_close($master); }