/** * This method is called via hook at the end of the request handling * * Make the list of unique URLs and send them to Fastly via Scribe queue * * @author macbre * * @return bool true - it's a hook */ static function onRestInPeace() { // don't process an empty queue if (empty(self::$urls)) { return true; } wfProfileIn(__METHOD__); $scribe = WScribeClient::singleton(self::SCRIBE_KEY); try { wfDebug(sprintf("%s: sending %d unique URLs to the purger (%d items were queued in total)\n", __METHOD__, count(self::$urls), self::$urlsCount)); foreach (self::$urls as $url => $data) { wfDebug(sprintf("%s: %s\n", __METHOD__, $url)); // send to Scribe queue $scribe->send(json_encode($data)); // debugging data to be sent to both sFlow (for monitoring) and Kibana (for debugging) $context = ['url' => $data['url'], 'method' => $data['method']]; // log purges using SFlow (BAC-1258) SFlow::operation('varnish.purge', $context); // log purges using Kibana (BAC-1317) WikiaLogger::instance()->info('varnish.purge', $context); } } catch (TException $e) { Wikia::log(__METHOD__, 'scribeClient exception', $e->getMessage()); } wfProfileOut(__METHOD__); return true; }
function run() { $this->startTime = $this->endTime = time(); if ($this->exception) { $this->results[] = $this->exception; return; } foreach ($this->callOrder as $callData) { list($classIndex, $callIndex) = $callData; /** @var \Wikia\Tasks\Tasks\BaseTask $task */ $task = $this->taskList[$classIndex]; list($method, $args) = $task->getCall($callIndex); foreach ($args as $i => $arg) { if (is_array($arg) || is_object($arg)) { continue; } if (preg_match('/^#([0-9]+)$/', trim($arg), $match)) { if (!isset($this->results[$match[1]])) { throw new InvalidArgumentException(); } $args[$i] = $this->results[$match[1]]; } } WikiaLogger::instance()->pushContext(['task_call' => get_class($task) . "::{$method}"]); $result = $task->execute($method, $args); WikiaLogger::instance()->popContext(); $this->results[] = $result; if ($result instanceof Exception) { break; } } $this->endTime = time(); }
private function getGlobalFooterLinks() { global $wgCityId, $wgContLang, $wgLang, $wgMemc; wfProfileIn(__METHOD__); $verticalId = WikiFactoryHub::getInstance()->getVerticalId($wgCityId); $memcKey = wfSharedMemcKey(self::MEMC_KEY_GLOBAL_FOOTER_LINKS, $wgContLang->getCode(), $wgLang->getCode(), $verticalId, self::MEMC_KEY_GLOBAL_FOOTER_VERSION); $globalFooterLinks = $wgMemc->get($memcKey); if (!empty($globalFooterLinks)) { wfProfileOut(__METHOD__); return $globalFooterLinks; } if (is_null($globalFooterLinks = getMessageAsArray(self::MESSAGE_KEY_GLOBAL_FOOTER_LINKS . '-' . $verticalId))) { if (is_null($globalFooterLinks = getMessageAsArray(self::MESSAGE_KEY_GLOBAL_FOOTER_LINKS))) { wfProfileOut(__METHOD__); WikiaLogger::instance()->error("Global Footer's links not found in messages", ['exception' => new Exception()]); return []; } } $parsedLinks = []; foreach ($globalFooterLinks as $link) { $link = trim($link); if (strpos($link, '*') === 0) { $parsedLink = parseItem($link); if (strpos($parsedLink['text'], 'LICENSE') !== false || $parsedLink['text'] == 'GFDL') { $parsedLink['isLicense'] = true; } else { $parsedLink['isLicense'] = false; } $parsedLinks[] = $parsedLink; } } $wgMemc->set($memcKey, $parsedLinks, self::MEMC_EXPIRY); wfProfileOut(__METHOD__); return $parsedLinks; }
/** * Initialize the experiment and set all required tracking things * * @param string $experimentName * @param array $experimentConfig */ private static function startExperiment($experimentName, array $experimentConfig) { wfDebug(sprintf("%s[%s] using %s class with %s params\n", __METHOD__, $experimentName, $experimentConfig['handler'], json_encode($experimentConfig['params']))); new $experimentConfig['handler']($experimentConfig['params'] ?: []); // mark a transaction with an experiment name \Transaction::getInstance()->set(\Transaction::PARAM_AB_PERFORMANCE_TEST, $experimentName); // set a global JS variable with an experiment name global $wgHooks; $wgHooks['WikiaSkinTopScripts'][] = function (array &$vars, &$scripts) use($experimentName) { $vars['wgABPerformanceTest'] = $experimentName; return true; }; /* * Start the session to bypass CDN cache * * We don't want to polute the CDN cache with the A/B performance testing tracking data. * As the test are run for only a small subset of the traffic, start the session for client * that are in the test groups to bypass the CDN cache. */ if (session_id() == '') { wfSetupSession(); wfDebug(__METHOD__ . " - session started\n"); // log started sessions global $wgUser; WikiaLogger::instance()->info(__METHOD__, ['experiment' => $experimentName, 'session_id' => session_id(), 'is_anon' => $wgUser->isAnon()]); } }
/** * This method is called from Facebook's side whenever a user deletes the Wikia app from their account. Most * of the functionality is based on the example given on Facebook: * * https://developers.facebook.com/docs/facebook-login/using-login-with-games/#parsingsr * * Additional general information on the callback here: * * https://developers.facebook.com/docs/facebook-login/manually-build-a-login-flow/v2.1#deauth-callback * */ public function deauthorizeCallback() { global $fbAppSecret; $log = WikiaLogger::instance(); $signedRequest = $this->getVal('signed_request', ''); list($encodedSig, $payload) = explode('.', $signedRequest, 2); // decode the data $sig = $this->base64UrlDecode($encodedSig); $data = json_decode($this->base64UrlDecode($payload), true); // confirm the signature $expectedSig = hash_hmac('sha256', $payload, $fbAppSecret, $raw = true); if ($sig !== $expectedSig) { $log->info('Deauthorization callback received with invalid signature', ['method' => __METHOD__]); return; } if (empty($data['user_id'])) { $log->warning('Deauthorization callback received with missing user ID', ['method' => __METHOD__]); return; } $facebookUserId = $data['user_id']; $map = FacebookMapModel::lookupFromFacebookID($facebookUserId); if (empty($map)) { $log->info('Deauthorization callback received with no matching Wikia ID mapping found', ['method' => __METHOD__, 'facebookId' => $facebookUserId]); return; } // Send this to the normal disconnect action $res = $this->sendSelfRequest('disconnectFromFB', ['user' => $map->getWikiaUserId()]); $status = $res->getVal('status', ''); $logResultParams = ['method' => __METHOD__, 'facebookId' => $facebookUserId, 'wikiaUserId' => $map->getWikiaUserId()]; if ($status == 'ok') { $log->info('Deauthorization callback received and completed successfully', $logResultParams); } else { $log->error('Deauthorization callback received and did not complete', $logResultParams); } }
/** * Check if infobox (div element or table which contains 'infobox' string in class attribute) * exists in first article section, and extract it from this section * * @param Parser $parser Parser instance * @param integer $section number of section in article text * @param string $content reference to section content * @param boolean $showEditLinks should add edit link * @return bool */ public static function onParserSectionCreate($parser, $section, &$content, $showEditLinks) { // skip if we're not parsing for venus if (!F::app()->checkSkin('venus')) { return true; } try { if (self::isInfoboxInFirstSection($parser, $section, $content)) { $infoboxExtractor = new InfoboxExtractor($content); $dom = $infoboxExtractor->getDOMDocument(); $nodes = $infoboxExtractor->getInfoboxNodes(); $node = $nodes->item(0); if ($node instanceof DOMElement) { $body = $dom->documentElement->firstChild; // replace extracted infobox with a dummy element to prevent newlines from creating empty paragraphs (CON-2166) // <table infobox-placeholder="1"></table> $placeholder = $dom->createElement('table'); $placeholder->setAttribute('infobox-placeholder', 'true'); $node->parentNode->insertBefore($placeholder, $node); // perform a magic around infobox wrapper $node = $infoboxExtractor->clearInfoboxStyles($node); $infoboxWrapper = $infoboxExtractor->wrapInfobox($node, 'infoboxWrapper', 'infobox-wrapper'); $infoboxContainer = $infoboxExtractor->wrapInfobox($infoboxWrapper, 'infoboxContainer', 'infobox-container'); // move infobox to the beginning of article content $infoboxExtractor->insertNode($body, $infoboxContainer, true); $content = $dom->saveHTML(); $parser->getOutput()->addModules('ext.wikia.venus.article.infobox'); } } } catch (DOMException $e) { // log exceptions WikiaLogger::instance()->error(__METHOD__, ['exception' => $e]); } return true; }
/** * Get data for each gallery item * @param array $item Data about the media item * @param int $index Where the item shows up in the gallery * @return array|null */ protected function getMediaData(array $item, $index) { $file = wfFindFile($item['title']); if (!$file instanceof File) { WikiaLogger::instance()->error('File with title: ' . $item['title'] . 'doesn\'t exist', ['class' => __CLASS__]); return null; } $dimension = MediaGalleryHelper::getImageWidth($this->itemCount, $index); $thumbUrl = WikiaFileHelper::getSquaredThumbnailUrl($file, $dimension); $dimensions = ['width' => $dimension, 'height' => $dimension]; $thumb = $file->transform($dimensions); if (!$thumb instanceof ThumbnailImage) { WikiaLogger::instance()->error('ThumbnailImage from title: ' . $item['title'] . ' couldn\'t be created.', ['thumbClass' => get_class($thumb)]); return null; } $thumb->setUrl($thumbUrl); $thumbnail = $this->app->renderView('ThumbnailController', 'gallery', ['thumb' => $thumb]); $caption = ''; if (!empty($item['caption'])) { // parse any wikitext in caption. Logic borrowed from WikiaMobileMediaService::renderMediaGroup. $parser = $this->getParser(); $caption = $parser->internalParse($item['caption']); $parser->replaceLinkHolders($caption); $caption = $parser->killMarkers($caption); } $title = $file->getTitle(); return ['thumbUrl' => $thumbUrl, 'thumbHtml' => $thumbnail, 'caption' => $caption, 'linkHref' => $file->getTitle()->getLinkURL(), 'title' => $title->getText(), 'dbKey' => $title->getDBKey()]; }
public static function onArticleViewAfterParser(Article $article, ParserOutput $parserOutput) { global $wgCityId, $wgDBname; // we collect production data from Oasis only /* $app = F::app(); if ( !$app->checkSkin( 'oasis', $app->wg->Skin ) || $app->wg->DevelEnvironment || $app->wg->StagingEnvironment ) { return true; } */ if (class_exists('WScribeClient')) { try { $title = $article->getTitle(); $fields = array('wikiId' => intval($wgCityId), 'databaseName' => $wgDBname, 'articleId' => $title->getArticleID(), 'namespaceId' => $title->getNamespace(), 'articleTitle' => $title->getText(), 'parserTime' => $parserOutput->getPerformanceStats('time'), 'wikitextSize' => $parserOutput->getPerformanceStats('wikitextSize'), 'htmlSize' => $parserOutput->getPerformanceStats('htmlSize'), 'expFuncCount' => $parserOutput->getPerformanceStats('expFuncCount'), 'nodeCount' => $parserOutput->getPerformanceStats('nodeCount'), 'postExpandSize' => $parserOutput->getPerformanceStats('postExpandSize'), 'tempArgSize' => $parserOutput->getPerformanceStats('tempArgSize')); $data = json_encode($fields); WScribeClient::singleton(self::SCRIBE_KEY)->send($data); } catch (TException $e) { Wikia::log(__METHOD__, 'scribeClient exception', $e->getMessage()); } } // Logging parser activity for monitoring // wiki and article info are sent to logstash anyways so no need to repeat them here WikiaLogger::instance()->info("Parser execution", ['parser-time' => round($parserOutput->getPerformanceStats('time') * 1000), 'node-count' => (int) $parserOutput->getPerformanceStats('nodeCount'), 'wikitext-size' => (int) $parserOutput->getPerformanceStats('wikitextSize'), 'skin-name' => RequestContext::getMain()->getSkin()->getSkinName()]); return true; }
/** * The category list template. Used by article pages on view and edit save. */ public function categories() { wfProfileIn(__METHOD__); $categories = $this->request->getVal('categories', array()); $data = array(); // Because $out->getCategoryLinks doesn't maintain the order of the stored category data, // we have to build this information ourselves manually. This is essentially the same // code from $out->addCategoryLinks() but it results in a different data format. foreach ($categories as $category) { // Support category name or category data object $name = is_array($category) ? $category['name'] : $category; $originalName = $name; $title = Title::makeTitleSafe(NS_CATEGORY, $name); if ($title != null) { $this->wg->ContLang->findVariantLink($name, $title, true); if ($name != $originalName && array_key_exists($name, $data)) { continue; } $text = $this->wg->ContLang->convertHtml($title->getText()); $data[$name] = array('link' => Linker::link($title, $text), 'name' => $text, 'type' => CategoryHelper::getCategoryType($originalName)); } else { \Wikia\Logger\WikiaLogger::instance()->warning("Unsafe category provided", ['name' => $name]); } } $this->response->setVal('categories', $data); wfProfileOut(__METHOD__); }
static function request($method, $url, $timeout = 'default') { global $wgHTTPTimeout, $wgVersion, $wgTitle, $wgDevelEnvironment; wfProfileIn(__METHOD__); wfDebug(__METHOD__ . ": {$method} {$url}\n"); # Use curl if available if (function_exists('curl_init')) { $c = curl_init($url); /* if ( self::isLocalURL( $url ) ) { curl_setopt( $c, CURLOPT_PROXY, 'localhost:80' ); } else if ($wgHTTPProxy) { curl_setopt($c, CURLOPT_PROXY, $wgHTTPProxy); } */ if (empty($wgDevelEnvironment)) { curl_setopt($c, CURLOPT_PROXY, 'localhost:80'); } if ($timeout == 'default') { $timeout = $wgHTTPTimeout; } curl_setopt($c, CURLOPT_TIMEOUT, $timeout); curl_setopt($c, CURLOPT_HEADER, true); curl_setopt($c, CURLOPT_FOLLOWLOCATION, false); curl_setopt($c, CURLOPT_USERAGENT, "MediaWiki/{$wgVersion}"); if ($method == 'POST') { curl_setopt($c, CURLOPT_POST, true); } else { curl_setopt($c, CURLOPT_CUSTOMREQUEST, $method); } # Set the referer to $wgTitle, even in command-line mode # This is useful for interwiki transclusion, where the foreign # server wants to know what the referring page is. # $_SERVER['REQUEST_URI'] gives a less reliable indication of the # referring page. if (is_object($wgTitle)) { curl_setopt($c, CURLOPT_REFERER, $wgTitle->getFullURL()); } $requestTime = microtime(true); ob_start(); curl_exec($c); $text = ob_get_contents(); ob_end_clean(); // log HTTP requests $requestTime = (int) ((microtime(true) - $requestTime) * 1000.0); $params = ['statusCode' => curl_getinfo($c, CURLINFO_HTTP_CODE), 'reqMethod' => $method, 'reqUrl' => $url, 'caller' => __CLASS__, 'requestTimeMS' => $requestTime]; \Wikia\Logger\WikiaLogger::instance()->debug('Http request', $params); # Don't return the text of error messages, return false on error if (curl_getinfo($c, CURLINFO_HTTP_CODE) != 200 && curl_getinfo($c, CURLINFO_HTTP_CODE) != 301) { $text = false; } # Don't return truncated output if (curl_errno($c) != CURLE_OK) { $text = false; } } wfProfileOut(__METHOD__); return array($text, $c); }
/** * add video * @param string $url * @return string error message or array( $videoTitle, $videoPageId, $videoProvider ) */ public function addVideo($url) { global $wgIsGhostVideo; wfProfileIn(__METHOD__); if (!$this->wg->User->isAllowed('videoupload')) { wfProfileOut(__METHOD__); return wfMessage('videos-error-admin-only')->plain(); } if (empty($url)) { wfProfileOut(__METHOD__); return wfMessage('videos-error-no-video-url')->text(); } $vHelper = new VideoHandlerHelper(); # @TODO Commenting out to fix MAIN-4436 -- Should be fixed correctly when content team is back # if ( !$vHelper->isVideoProviderSupported( $url ) ) { # wfProfileOut( __METHOD__ ); # return wfMessage( 'videos-error-provider-not-supported' )->parse(); # } try { // is it a WikiLink? $title = Title::newFromText($url, NS_FILE); if (!$title || !WikiaFileHelper::isFileTypeVideo($title)) { $title = Title::newFromText(str_replace(array('[[', ']]'), array('', ''), $url), NS_FILE); } if (!$title || !WikiaFileHelper::isFileTypeVideo($title)) { $file = $this->getVideoFileByUrl($url); if ($file) { $title = $file->getTitle(); } } if ($title && WikiaFileHelper::isFileTypeVideo($title)) { $videoTitle = $title; $videoPageId = $title->getArticleId(); $videoProvider = ''; wfRunHooks('AddPremiumVideo', array($title)); } else { if (empty($this->wg->allowNonPremiumVideos)) { wfProfileOut(__METHOD__); return wfMessage('videohandler-non-premium')->parse(); } list($videoTitle, $videoPageId, $videoProvider) = $this->addVideoVideoHandlers($url); $file = RepoGroup::singleton()->findFile($videoTitle); } if (!$file instanceof File) { WikiaLogger::instance()->error('\\VideoHandlerHelper->adDefaultVideoDescription() - File is empty', ['exception' => new Exception(), 'url' => $url, 'title' => $title, 'videoTitle' => $videoTitle, 'videoPageId' => $videoPageId, 'videoProvider' => $videoProvider, 'wgIsGhostVideo' => $wgIsGhostVideo]); wfProfileOut(__METHOD__); return wfMessage('videos-something-went-wrong')->parse(); } else { // Add a default description if available and one doesn't already exist $vHelper->addDefaultVideoDescription($file); } } catch (Exception $e) { wfProfileOut(__METHOD__); return $e->getMessage(); } wfProfileOut(__METHOD__); return array($videoTitle, $videoPageId, $videoProvider); }
/** * Construct a database error * @param $db DatabaseBase object which threw the error * @param $error String A simple error message to be used for debugging */ function __construct(DatabaseBase &$db, $error) { global $wgDBcluster; $this->db = $db; parent::__construct($error); $isMaster = !is_null($db->getLBInfo('master')); // Wikia change - @author macbre - MAIN-2304 \Wikia\Logger\WikiaLogger::instance()->error('DBError', ['name' => $db->getDBname(), 'cluster' => $wgDBcluster, 'server' => $db->getServer(), 'server_role' => $isMaster ? 'master' : 'slave', 'errno' => $db->lastErrno(), 'err' => $db->lastError(), 'exception' => $this]); }
/** * @param mixed $check * @param string|null $message * @return bool true if the check passes * @throws AssertionException if the check fails */ public static function true($check, $message = 'Assert::true failed') { if (!$check) { $exception = new AssertionException($message); WikiaLogger::instance()->error($message, ['exception' => $exception]); throw $exception; } return true; }
public function popInfoboxKeyValue(JsonFormatInfoboxKeyValueNode $keyValueNode) { if ($this->jsonStack[sizeof($this->jsonStack) - 1] === $keyValueNode->getValue()) { array_pop($this->jsonStack); $this->currentContainer = $this->jsonStack[sizeof($this->jsonStack) - 1]; } else { # log invalid instead of failing \Wikia\Logger\WikiaLogger::instance()->debug('Invalid infobox'); } }
function efImageReviewDisplayStatus(ImagePage $imagePage, &$html) { global $wgCityId, $wgExternalDatawareDB, $wgUser; if (!$wgUser->isAllowed('imagereviewstats')) { return true; } if (!$imagePage->getTitle()->exists()) { return true; } $html .= Xml::element('h2', array(), wfMsg('imagereview-imagepage-header')); $reviews = array(); $headers = array(wfMessage('imagereview-imagepage-table-header-reviewer')->text(), wfMessage('imagereview-imagepage-table-header-state')->text(), wfMessage('imagereview-imagepage-table-header-time')->text()); $where = array('wiki_id' => $wgCityId, 'page_id' => $imagePage->getId()); $dbr = wfGetDB(DB_SLAVE, array(), $wgExternalDatawareDB); $res = $dbr->select('image_review_stats', '*', $where); if ($dbr->numRows($res) == 0) { //check if image is in the queue at all! $imgCurState = $dbr->selectField('image_review', 'state', $where); if (false === $imgCurState) { /** * If the file is a local one and is older than 1 hour - send it to ImageReview * since it's probably been restored, and is not just a fresh file. */ $lastTouched = new DateTime($imagePage->getRevisionFetched()->getTimestamp()); $now = new DateTime(); $file = $imagePage->getDisplayedFile(); if ($file instanceof WikiaLocalFile && $lastTouched < $now->modify('-1 hour')) { $scribeEventProducer = new ScribeEventProducer('edit'); $user = User::newFromName($file->getUser()); if ($scribeEventProducer->buildEditPackage($imagePage, $user, null, null, $file)) { $logParams = ['cityId' => $wgCityId, 'pageId' => $imagePage->getID(), 'pageTitle' => $imagePage->getTitle()->getText(), 'uploadUser' => $user->getName()]; \Wikia\Logger\WikiaLogger::instance()->info('ImageReviewLog', ['message' => 'Image moved back to queue', 'params' => $logParams]); $scribeEventProducer->sendLog(); } } // oh oh, image is not in queue at all $html .= wfMsg('imagereview-imagepage-not-in-queue'); } else { // image is in the queue but not reviewed yet $html .= wfMsg('imagereview-state-0'); } } else { // go through the list and display review states while ($row = $dbr->fetchObject($res)) { $data = array(); $data[] = User::newFromId($row->reviewer_id)->getName(); $data[] = wfMsg('imagereview-state-' . $row->review_state); $data[] = $row->review_end . ' (UTC)'; $reviews[] = $data; } $html .= Xml::buildTable($reviews, array('class' => 'wikitable filehistory sortable', 'style' => 'width: 60%'), $headers); } return true; }
private function getC7Value() { global $wgCityId; $verticalName = HubService::getVerticalNameForComscore($wgCityId); if (!$verticalName) { \Wikia\Logger\WikiaLogger::instance()->error('Vertical not set for comscore', ['cityId' => $wgCityId, 'exception' => new Exception()]); return false; } else { return 'wikiacsid_' . $verticalName; } }
/** * Logs the results of all add-to-watchlist actions * @param \User $oUser A user's object * @param Array $aWatchSuccess An array of successfully watched articles' titles * @param Array $aWatchFail An array of articles' titles that failed from being watched */ private function logResults(\User $oUser, array $aWatchSuccess, array $aWatchFail) { global $wgSitename; $iFailures = count($aWatchFail); $sUserName = $oUser->getName(); $aLogParams = ['failures' => $iFailures, 'user_id' => $oUser->getId(), 'user_name' => $sUserName, 'user_lang' => $oUser->getGlobalPreference('language'), 'watched' => $aWatchSuccess, 'watched_failed' => $aWatchFail]; if ($iFailures === 0) { WikiaLogger::instance()->info("AutoFollow log: User {$sUserName} added to watchlist at {$wgSitename}.", $aLogParams); } else { WikiaLogger::instance()->error("AutoFollow log: {$iFailures} happened when adding {$sUserName} to watchlist at {$wgSitename}.", $aLogParams); } }
private function sendNotification() { global $wgFlowerUrl; $subject = "ImageReview deletion failed #{$this->taskId}"; $body = "{$wgFlowerUrl}/task/{$this->taskId}"; $recipients = [new \MailAddress('*****@*****.**'), new \MailAddress('*****@*****.**'), new \MailAddress('*****@*****.**')]; $from = $recipients[0]; foreach ($recipients as $recipient) { \UserMailer::send($recipient, $from, $subject, $body); } WikiaLogger::instance()->error("ImageReviewLog", ['method' => __METHOD__, 'message' => "Task #{$this->taskId} deleting images did not succeed. Please check.", 'taskId' => $this->taskId, 'taskUrl' => $body]); }
/** * Produces a JSON response based on calls to the provided pages' WikiaSearchIndexer::getPageDefaultValues method. */ public function get() { $this->getResponse()->setFormat('json'); $serviceName = 'Wikia\\Search\\IndexService\\' . $this->getVal('service', 'DefaultContent'); $ids = explode('|', $this->getVal('ids', '')); if (class_exists($serviceName)) { $service = new $serviceName($ids); $ids = $this->getVal('ids'); if (!empty($ids)) { $this->response->setData($service->getResponseForPageIds()); } } else { \Wikia\Logger\WikiaLogger::instance()->error('WikiaSearchIndexer invoked with bad service param.', ['serviceName' => $serviceName]); } }
protected function requestParsoid($method, $title, $params) { global $wgVisualEditorParsoidURL, $wgVisualEditorParsoidTimeout, $wgVisualEditorParsoidForwardCookies; $url = $wgVisualEditorParsoidURL . '/' . urlencode($this->getApiSource()) . '/' . urlencode($title->getPrefixedDBkey()); $data = array_merge($this->getProxyConf(), array('method' => $method, 'timeout' => $wgVisualEditorParsoidTimeout)); if ($method === 'POST') { $data['postData'] = $params; } else { $url = wfAppendQuery($url, $params); } $req = MWHttpRequest::factory($url, $data); // Forward cookies, but only if configured to do so and if there are read restrictions if ($wgVisualEditorParsoidForwardCookies && !User::isEveryoneAllowed('read')) { $req->setHeader('Cookie', $this->getRequest()->getHeader('Cookie')); } $status = $req->execute(); if ($status->isOK()) { // Pass thru performance data from Parsoid to the client, unless the response was // served directly from Varnish, in which case discard the value of the XPP header // and use it to declare the cache hit instead. $xCache = $req->getResponseHeader('X-Cache'); if (is_string($xCache) && strpos(strtolower($xCache), 'hit') !== false) { $xpp = 'cached-response=true'; $hit = true; } else { $xpp = $req->getResponseHeader('X-Parsoid-Performance'); $hit = false; } WikiaLogger::instance()->debug('ApiVisualEditor', array('hit' => $hit, 'method' => $method, 'url' => $url)); if ($xpp !== null) { $resp = $this->getRequest()->response(); $resp->header('X-Parsoid-Performance: ' . $xpp); } } elseif ($status->isGood()) { $this->dieUsage($req->getContent(), 'parsoidserver-http-' . $req->getStatus()); } elseif ($errors = $status->getErrorsByType('error')) { $error = $errors[0]; $code = $error['message']; if (count($error['params'])) { $message = $error['params'][0]; } else { $message = 'MWHttpRequest error'; } $this->dieUsage($message, 'parsoidserver-' . $code); } // TODO pass through X-Parsoid-Performance header, merge with getHTML above return $req->getContent(); }
public static function send($eventName, $pageId, $params = []) { global $wgCityId; $msg = new stdClass(); $msg->cityId = $wgCityId; $msg->pageId = $pageId; $msg->args = new stdClass(); foreach ($params as $param => $value) { $msg->args->{$param} = $value; } try { self::getPipeline()->publish(implode('.', [self::ARTICLE_MESSAGE_PREFIX, $eventName]), $msg); } catch (Exception $e) { \Wikia\Logger\WikiaLogger::instance()->error($e->getMessage()); } }
private static function parseEvent($articleId, $titleUrl, $task) { global $wgCityId; $logError = function (\Exception $e, $additionalData = []) { WikiaLogger::instance()->critical('NLP Processing exception', ['error' => $e->getMessage(), 'additionalData' => $additionalData]); return null; }; try { $taskList = new AsyncNLPTaskList(); $taskList->taskType($task)->add($articleId)->wikiId($wgCityId)->wikiUrl(preg_replace('/\\/wiki\\/.*$/', '', $titleUrl))->setPriority(NlpPipelineQueue::NAME)->queue(); } catch (AMQPRuntimeException $e) { return $logError($e, ['city_id' => $wgCityId, 'article_id' => $articleId]); } catch (AMQPTimeoutException $e) { return $logError($e, ['city_id' => $wgCityId, 'article_id' => $articleId]); } }
/** * Remove variable from WikiFactory (delete from city_variables_pool table) * @param array $varData * @return Status */ function removeFromVariablesPool($varData) { $log = WikiaLogger::instance(); $dbw = WikiFactory::db(DB_MASTER); $dbw->begin(); try { $dbw->delete("city_variables_pool", array("cv_id" => $varData['cv_id']), __METHOD__); $log->info("Remove variable from city_variables_pool table.", $varData); $dbw->commit(); $status = Status::newGood(); } catch (DBQueryError $e) { $log->error("Database error: Cannot remove variable from city_variables_pool table.", $varData); $dbw->rollback(); $status = Status::newFatal("Database error: Cannot remove variable from city_variables_pool table (" . $e->getMessage() . ")."); } return $status; }
public function execute() { global $wgDevelEnvironment, $wgFlowerUrl; if ($wgDevelEnvironment) { \Wikia\Logger\WikiaLogger::instance()->setDevModeWithES(); } \Wikia\Logger\WikiaLogger::instance()->pushContext(['task_id' => $this->mOptions['task_id']]); $runner = new TaskRunner($this->mOptions['wiki_id'], $this->mOptions['task_id'], $this->mOptions['task_list'], $this->mOptions['call_order'], $this->mOptions['created_by']); ob_start(); $runner->run(); $result = $runner->format(); if ($runner->runTime() > TaskRunner::TASK_NOTIFY_TIMEOUT) { Http::post("{$wgFlowerUrl}/api/task/status/{$this->mOptions['task_id']}", ['noProxy' => true, 'postData' => json_encode(['kwargs' => ['completed' => time(), 'state' => $result->status, 'result' => $result->status == 'success' ? $result->retval : $result->reason]])]); } ob_end_clean(); echo json_encode($result); }
static function onRestInPeace() { if (empty(CeleryPurge::$buckets)) { return true; } // log purges using Kibana (BAC-1317) $context = ['urls' => CeleryPurge::$buckets]; WikiaLogger::instance()->info('varnish.purge', $context); // Queue the tasks foreach (CeleryPurge::$buckets as $service => $urls) { if (empty($urls)) { continue; } (new AsyncCeleryTask())->taskType('celery_workers.purger.purge')->setArgs($urls, [], $service)->setPriority(PurgeQueue::NAME)->queue(); } return true; }
protected function warm() { global $wgHubRssFeeds; global $wgLanguageCode; foreach ($wgHubRssFeeds as $feedName) { echo "| Warming '{$feedName}' cache..." . PHP_EOL; $langExtFeedName = $feedName . $wgLanguageCode; $feed = BaseRssModel::newFromName($langExtFeedName); if ($feed instanceof BaseRssModel) { $time = time(); $numRows = $feed->generateFeedData(); echo "| Got " . $numRows . " new entries " . PHP_EOL; \Wikia\Logger\WikiaLogger::instance()->info(__CLASS__ . ' ' . $feedName . 'time (s): ' . (time() - $time)); } else { echo "| Feed not found: " . $feedName . PHP_EOL; } } }
/** * Enqueues a job based on a few simple preliminary checks. * * Called once an article has been saved. * * @param $articleObject Object The WikiPage object for the contribution. * @param $userObject Object The User object for the contribution. * @param $editContent String The contributed text. * @param $editSummary String The summary for the contribution. * @param $isMinorEdit Integer Indicates whether a contribution has been marked as a minor one. * @param $watchThis Null Not used as of MW 1.8 * @param $sectionAnchor Null Not used as of MW 1.8 * @param $editFlags Integer Bitmask flags for the edit. * @param $revisionObject Object The Revision object. * @param $statusObject Object The Status object returned by Article::doEdit(). * @param $baseRevisionId Integer The ID of the revision, the current edit is based on (or Boolean False). * @return Boolean True so the calling method would continue. * @see http://www.mediawiki.org/wiki/Manual:$wgHooks * @see http://www.mediawiki.org/wiki/Manual:Hooks/ArticleSaveComplete * @since MediaWiki 1.19.4 * @internal */ public static function onArticleSaveComplete(&$articleObject, &$userObject, $editContent, $editSummary, $isMinorEdit, $watchThis, $sectionAnchor, &$editFlags, $revisionObject, $statusObject, $baseRevisionId) { global $wgHAWelcomeNotices, $wgCityId, $wgCommandLineMode, $wgMemc, $wgUser; // means we're dealing with a null edit (no content change) and therefore we don't have to welcome anybody if (is_null($revisionObject)) { WikiaLogger::instance()->error("error, null \$revisionObject passed to " . __METHOD__); return true; } // Ignore revisions created in the command-line mode. Otherwise this job could // invoke HAWelcome::onRevisionInsertComplete(), too which may cause an infinite loop // and serious performance problems. if ($wgCommandLineMode) { return true; } $dispatcher = new HAWelcomeTaskHookDispatcher(); $dispatcher->setRevisionObject($revisionObject)->setCityId($wgCityId)->setMemcacheClient($wgMemc)->setCurrentUser($wgUser); return $dispatcher->dispatch(); }
/** * create a UrlGenerator from a config map. $config must have the following keys: relative-path. * optionally, it can also have timestamp, is-archive, path-prefix, bucket, base-url, and domain-shard-count. * if the optional values aren't in the map, they'll be generated from the current wiki environment * * @param $config * @return UrlGenerator * @throws InvalidArgumentException */ public static function fromConfigMap($config) { $replaceThumbnail = false; $requiredKeys = ['relative-path']; $isArchive = isset($config['is-archive']) ? $config['is-archive'] : false; $pathPrefix = isset($config['path-prefix']) ? $config['path-prefix'] : null; $timestamp = isset($config['timestamp']) ? $config['timestamp'] : 0; if (isset($config['replace'])) { $replaceThumbnail = $config['replace']; } else { global $wgVignetteReplaceThumbnails; if ($wgVignetteReplaceThumbnails || !empty($_GET['vignetteReplaceThumbnails']) && (bool) $_GET['vignetteReplaceThumbnails']) { $replaceThumbnail = true; } } if (!isset($config['base-url'])) { global $wgVignetteUrl; $config['base-url'] = $wgVignetteUrl; } if (!isset($config['bucket'])) { /** * get the top level bucket for a given wiki. this may or may not be the same as $wgDBName. This is done via * regular expression because there is no variable that contains the bucket name :( */ global $wgUploadPath; $config['bucket'] = self::parseBucket($wgUploadPath); } if (!isset($config['domain-shard-count'])) { global $wgImagesServers; $config['domain-shard-count'] = $wgImagesServers; } foreach ($requiredKeys as $key) { if (!isset($config[$key])) { \Wikia\Logger\WikiaLogger::instance()->error("missing key", array_merge($config, ['missing_key' => $key])); throw new InvalidArgumentException("missing key '{$key}'"); } } $config = (new UrlConfig())->setIsArchive($isArchive)->setReplaceThumbnail($replaceThumbnail)->setRelativePath($config['relative-path'])->setPathPrefix($pathPrefix)->setBucket($config['bucket'])->setBaseUrl($config['base-url'])->setDomainShardCount($config['domain-shard-count']); if ($timestamp > 0) { $config->setTimestamp($timestamp); } return new UrlGenerator($config); }
public static function requeueConnectionResetWarning($errno, $errstr = null, $errfile = null, $errline = null, $errcontext = null) { if (strpos($errstr, 'Connection reset by peer') !== false) { if (isset($errcontext['retryAttempt']) && $errcontext['retryAttempt'] !== false) { WikiaLogger::instance()->error('Retrying sending ExactTarget request failed', ['cause_error_message' => $errstr]); } else { $location = isset($errcontext['location']) ? $errcontext['location'] : null; $saction = isset($errcontext['saction']) ? $errcontext['saction'] : null; $version = isset($errcontext['version']) ? $errcontext['version'] : null; $one_way = isset($errcontext['one_way']) ? $errcontext['one_way'] : null; $request = isset($errcontext['request']) ? $errcontext['request'] : null; /* Requeue request */ $task = new ExactTargetRedoSoapRequestTask(); $task->call('redoSoapRequestTask', $request, $location, $saction, $version, $one_way); $task->queue(); } } /* Use default WikiaLogger to store default logs as well */ WikiaLogger::instance()->onError($errno, $errstr, $errfile, $errline, $errcontext); }
/** * Logs the data sent by the Evergreens extension for Google Chrome * * In addition, the data are labelled with a hash which is then returned in the response as JSON. */ public function log() { wfProfileIn(__METHOD__); if ($this->request->wasPosted()) { $sHeaderInfo = $this->request->getVal('headerInfo', null); // preliminary validation of the input data if ($sHeaderInfo && is_string($sHeaderInfo)) { // generate a hash $sHash = sha1($sHeaderInfo); // decode, validate and log the data $oHeaderInfo = json_decode($sHeaderInfo); if (is_array($oHeaderInfo->response[0]->responseHeaders) && is_array($oHeaderInfo->request[0]->requestHeaders)) { \Wikia\Logger\WikiaLogger::instance()->info('Evergreens: stale page cache detected', ['hash' => $sHash, 'request' => $oHeaderInfo->request[0]->requestHeaders, 'response' => $oHeaderInfo->response[0]->responseHeaders]); } // add the hash to the response $this->response->setVal('hash', $sHash); } } $this->response->setFormat('json'); wfProfileOut(__METHOD__); }