protected function invalidateTitle(\Title $title) { global $wgParsoidCacheServers, $wgContentNamespaces; if (!in_array($title->getNamespace(), $wgContentNamespaces)) { return false; } # First request the new version $parsoidInfo = array(); $parsoidInfo['cacheID'] = $title->getPreviousRevisionID($title->getLatestRevID()); $parsoidInfo['changedTitle'] = $this->title->getPrefixedDBkey(); $requests = array(); foreach ($wgParsoidCacheServers as $server) { $singleUrl = $this->getParsoidURL($title); $requests[] = array('url' => $singleUrl, 'headers' => array('X-Parsoid: ' . json_encode($parsoidInfo), 'Cache-control: no-cache')); $this->wikiaLog(array("action" => "invalidateTitle", "get_url" => $singleUrl)); } $this->checkCurlResults(\CurlMultiClient::request($requests)); # And now purge the previous revision so that we make efficient use of # the Varnish cache space without relying on LRU. Since the URL # differs we can't use implicit refresh. $requests = array(); foreach ($wgParsoidCacheServers as $server) { // @TODO: this triggers a getPreviousRevisionID() query per server $singleUrl = $this->getParsoidURL($title, true); $requests[] = array('url' => $singleUrl); $this->wikiaLog(array("action" => "invalidateTitle", "purge_url" => $singleUrl)); } $options = \CurlMultiClient::getDefaultOptions(); $options[CURLOPT_CUSTOMREQUEST] = "PURGE"; return $this->checkCurlResults(\CurlMultiClient::request($requests, $options)); }
/** * Peform several CURL requests in parallel, and return the combined * results. * * @static * @param $requests array requests, each with an url and an optional * 'headers' member: * array( * 'url' => 'http://server.com/foo', * 'headers' => array( 'X-Foo: Bar' ) * ) * @param $options array curl options used for each request, default * {CurlMultiClient::getDefaultOptions}. * @returns array An array of arrays containing 'error' and 'data' * members. If there are errors, data will be null. If there are no * errors, the error member will be null and data will contain the * response data as a string. */ public static function request($requests, array $options = null) { if (!count($requests)) { return array(); } $handles = array(); if ($options === null) { // add default options $options = CurlMultiClient::getDefaultOptions(); } // add curl options to each handle foreach ($requests as $k => $row) { $handle = curl_init(); $reqOptions = array(CURLOPT_URL => $row['url'], CURLOPT_FORBID_REUSE => true) + $options; wfDebug("adding url: " . $row['url']); if (isset($row['headers'])) { $reqOptions[CURLOPT_HTTPHEADER] = $row['headers']; } curl_setopt_array($handle, $reqOptions); $handles[$k] = $handle; } $mh = curl_multi_init(); foreach ($handles as $handle) { curl_multi_add_handle($mh, $handle); } $active = null; // handles still being processed //execute the handles do { do { // perform work as long as there is any $status_cme = curl_multi_exec($mh, $active); } while ($status_cme == CURLM_CALL_MULTI_PERFORM); if ($active > 0 && $status_cme === CURLM_OK) { // wait for more work to become available if (curl_multi_select($mh, 10)) { // Wait for 5 ms, somewhat similar to the suggestion at // http://curl.haxx.se/libcurl/c/curl_multi_fdset.html // We pick a smaller value as we are typically hitting // fast internal services so status changes are more // likely. usleep(5000); } } } while ($active && $status_cme == CURLM_OK); $res = array(); foreach ($requests as $k => $row) { $res[$k] = array(); $res[$k]['error'] = curl_error($handles[$k]); if (strlen($res[$k]['error'])) { $res[$k]['data'] = null; } else { $res[$k]['error'] = null; $res[$k]['data'] = curl_multi_getcontent($handles[$k]); // get results } // close current handler curl_multi_remove_handle($mh, $handles[$k]); } curl_multi_close($mh); #wfDebug(serialize($res)); return $res; // return response }