/** * Run the update */ public function doUpdate() { global $wgHitcounterUpdateFreq; $dbw = wfGetDB(DB_MASTER); if ($wgHitcounterUpdateFreq <= 1 || $dbw->getType() == 'sqlite') { $id = $this->id; $method = __METHOD__; $dbw->onTransactionIdle(function () use($dbw, $id, $method) { try { $dbw->update('page', array('page_counter = page_counter + 1'), array('page_id' => $id), $method); } catch (DBError $e) { MWExceptionHandler::logException($e); } }); return; } # Not important enough to warrant an error page in case of failure try { // Since `hitcounter` is non-transactional, the contention is minimal $dbw->insert('hitcounter', array('hc_id' => $this->id), __METHOD__); $checkfreq = intval($wgHitcounterUpdateFreq / 25 + 1); if (rand() % $checkfreq == 0 && $dbw->lastErrno() == 0) { $this->collect(); } } catch (DBError $e) { MWExceptionHandler::logException($e); } }
/** * @param UUID $postId * @param int $limit * @param UUID|null $offset * @param string $direction 'rev' or 'fwd' * @return FormatterRow[] */ public function getResults(UUID $postId, $limit = 50, UUID $offset = null, $direction = 'fwd') { $history = $this->storage->find('TopicHistoryEntry', array('topic_root_id' => $postId), array('sort' => 'rev_id', 'order' => 'DESC', 'limit' => $limit, 'offset-id' => $offset, 'offset-dir' => $direction, 'offset-include' => false, 'offset-elastic' => false)); if (!$history) { return array(); } $this->loadMetadataBatch($history); $results = $replies = array(); foreach ($history as $revision) { try { if ($this->excludeFromHistory($revision)) { continue; } $results[] = $row = new TopicRow(); $this->buildResult($revision, null, $row); if ($revision instanceof PostRevision) { $replyToId = $revision->getReplyToId(); if ($replyToId) { // $revisionId into the key rather than value prevents // duplicate insertion $replies[$replyToId->getAlphadecimal()][$revision->getPostId()->getAlphadecimal()] = true; } } } catch (FlowException $e) { \MWExceptionHandler::logException($e); } } foreach ($results as $result) { if ($result->revision instanceof PostRevision) { $alpha = $result->revision->getPostId()->getAlphadecimal(); $result->replies = isset($replies[$alpha]) ? array_keys($replies[$alpha]) : array(); } } return $results; }
/** * Get contents of a javascript file for inline use. * * Roughly based MediaWiki core methods: * - ResourceLoader::filter() * - ResourceLoaderFileModule::readScriptFiles() * * @param string $name Path to file relative to /modules/inline/ * @return string Minified script * @throws Exception If file doesn't exist */ protected static function getInlineScript($name) { // Get file $filePath = __DIR__ . '/../../modules/inline/' . $name; if (!file_exists($filePath)) { throw new Exception(__METHOD__ . ": file not found: \"{$filePath}\""); } $contents = file_get_contents($filePath); // Try minified from cache $key = wfMemcKey('centralauth', 'minify-js', md5($contents)); $cache = wfGetCache(CACHE_ANYTHING); $cacheEntry = $cache->get($key); if (is_string($cacheEntry)) { return $cacheEntry; } // Compute new value $result = ''; try { $result = JavaScriptMinifier::minify($contents) . "\n/* cache key: {$key} */"; $cache->set($key, $result); } catch (Exception $e) { MWExceptionHandler::logException($e); wfDebugLog('CentralAuth', __METHOD__ . ": minification failed for {$name}: {$e}"); $result = ResourceLoader::formatException($e) . "\n" . $contents; } return $result; }
/** * All queries must be against the same index. Results are equivalent to * array_map, maintaining order and key relationship between input $queries * and $result. * * @param array $queries * @param array $options * @return array|null null is query failure. empty array is no result. array is success */ public function findMulti(array $queries, array $options = array()) { if (!$queries) { return array(); } $keys = array_keys(reset($queries)); if (isset($options['sort']) && !is_array($options['sort'])) { $options['sort'] = ObjectManager::makeArray($options['sort']); } try { $index = $this->getIndexFor($keys, $options); $res = $index->findMulti($queries, $options); } catch (NoIndexException $e) { if (array_search('topic_root_id', $keys)) { wfDebugLog('Flow', __METHOD__ . ': ' . json_encode($keys) . ' : ' . json_encode($options) . ' : ' . json_encode(array_map('get_class', $this->indexes))); \MWExceptionHandler::logException($e); } else { wfDebugLog('FlowDebug', __METHOD__ . ': ' . $e->getMessage()); } $res = $this->storage->findMulti($queries, $this->convertToDbOptions($options)); } if ($res === null) { return null; } $output = array(); foreach ($res as $index => $queryOutput) { foreach ($queryOutput as $k => $v) { if ($v) { $output[$index][$k] = $this->load($v); } } } return $output; }
/** * @param UUID[]|TopicListEntry[] $topicIdsOrEntries * @return FormatterRow[] */ public function getResults(array $topicIdsOrEntries) { $topicIds = $this->getTopicIds($topicIdsOrEntries); $allPostIds = $this->collectPostIds($topicIds); $topicSummary = $this->collectSummary($topicIds); $posts = $this->collectRevisions($allPostIds); $watchStatus = $this->collectWatchStatus($topicIds); $missing = array_diff(array_keys($allPostIds), array_keys($posts)); if ($missing) { $needed = array(); foreach ($missing as $alpha) { // convert alpha back into UUID object $needed[] = $allPostIds[$alpha]; } $posts += $this->createFakePosts($needed); } $this->loadMetadataBatch($posts); $results = array(); $replies = array(); foreach ($posts as $post) { try { if (!$this->permissions->isAllowed($post, 'view')) { continue; } $row = new TopicRow(); $this->buildResult($post, null, $row); /** @var PostRevision $revision */ $revision = $row->revision; $replyToId = $revision->getReplyToId(); $replyToId = $replyToId ? $replyToId->getAlphadecimal() : null; $postId = $revision->getPostId()->getAlphadecimal(); $replies[$replyToId] = $postId; if ($post->isTopicTitle()) { // Attach the summary if (isset($topicSummary[$postId])) { $row->summary = $this->buildResult($topicSummary[$postId], 'rev_id'); } // Attach the watch status if (isset($watchStatus[$postId]) && $watchStatus[$postId]) { $row->isWatched = true; } } $results[] = $row; } catch (FlowException $e) { \MWExceptionHandler::logException($e); } } foreach ($results as $result) { $alpha = $result->revision->getPostId()->getAlphadecimal(); $result->replies = isset($replies[$alpha]) ? $replies[$alpha] : array(); } return $results; }
public function run() { /** @noinspection PhpUnusedLocalVariableInspection */ $scope = RequestContext::importScopedSession($this->params['session']); $context = RequestContext::getMain(); $user = $context->getUser(); try { if (!$user->isLoggedIn()) { $this->setLastError("Could not load the author user from session."); return false; } UploadBase::setSessionStatus($user, $this->params['filekey'], array('result' => 'Poll', 'stage' => 'publish', 'status' => Status::newGood())); $upload = new UploadFromStash($user); // @todo initialize() causes a GET, ideally we could frontload the antivirus // checks and anything else to the stash stage (which includes concatenation and // the local file is thus already there). That way, instead of GET+PUT, there could // just be a COPY operation from the stash to the public zone. $upload->initialize($this->params['filekey'], $this->params['filename']); // Check if the local file checks out (this is generally a no-op) $verification = $upload->verifyUpload(); if ($verification['status'] !== UploadBase::OK) { $status = Status::newFatal('verification-error'); $status->value = array('verification' => $verification); UploadBase::setSessionStatus($user, $this->params['filekey'], array('result' => 'Failure', 'stage' => 'publish', 'status' => $status)); $this->setLastError("Could not verify upload."); return false; } // Upload the stashed file to a permanent location $status = $upload->performUpload($this->params['comment'], $this->params['text'], $this->params['watch'], $user); if (!$status->isGood()) { UploadBase::setSessionStatus($user, $this->params['filekey'], array('result' => 'Failure', 'stage' => 'publish', 'status' => $status)); $this->setLastError($status->getWikiText()); return false; } // Build the image info array while we have the local reference handy $apiMain = new ApiMain(); // dummy object (XXX) $imageInfo = $upload->getImageInfo($apiMain->getResult()); // Cleanup any temporary local file $upload->cleanupTempFile(); // Cache the info so the user doesn't have to wait forever to get the final info UploadBase::setSessionStatus($user, $this->params['filekey'], array('result' => 'Success', 'stage' => 'publish', 'filename' => $upload->getLocalFile()->getName(), 'imageinfo' => $imageInfo, 'status' => Status::newGood())); } catch (Exception $e) { UploadBase::setSessionStatus($user, $this->params['filekey'], array('result' => 'Failure', 'stage' => 'publish', 'status' => Status::newFatal('api-error-publishfailed'))); $this->setLastError(get_class($e) . ": " . $e->getMessage()); // To prevent potential database referential integrity issues. // See bug 32551. MWExceptionHandler::rollbackMasterChangesAndLog($e); return false; } return true; }
public function run() { $scope = RequestContext::importScopedSession($this->params['session']); $context = RequestContext::getMain(); try { $user = $context->getUser(); if (!$user->isLoggedIn()) { $this->setLastError("Could not load the author user from session."); return false; } if (count($_SESSION) === 0) { // Empty session probably indicates that we didn't associate // with the session correctly. Note that being able to load // the user does not necessarily mean the session was loaded. // Most likely cause by suhosin.session.encrypt = On. $this->setLastError("Error associating with user session. " . "Try setting suhosin.session.encrypt = Off"); return false; } UploadBase::setSessionStatus($this->params['filekey'], array('result' => 'Poll', 'stage' => 'assembling', 'status' => Status::newGood())); $upload = new UploadFromChunks($user); $upload->continueChunks($this->params['filename'], $this->params['filekey'], $context->getRequest()); // Combine all of the chunks into a local file and upload that to a new stash file $status = $upload->concatenateChunks(); if (!$status->isGood()) { UploadBase::setSessionStatus($this->params['filekey'], array('result' => 'Failure', 'stage' => 'assembling', 'status' => $status)); $this->setLastError($status->getWikiText()); return false; } // We have a new filekey for the fully concatenated file $newFileKey = $upload->getLocalFile()->getFileKey(); // Remove the old stash file row and first chunk file $upload->stash->removeFileNoAuth($this->params['filekey']); // Build the image info array while we have the local reference handy $apiMain = new ApiMain(); // dummy object (XXX) $imageInfo = $upload->getImageInfo($apiMain->getResult()); // Cleanup any temporary local file $upload->cleanupTempFile(); // Cache the info so the user doesn't have to wait forever to get the final info UploadBase::setSessionStatus($this->params['filekey'], array('result' => 'Success', 'stage' => 'assembling', 'filekey' => $newFileKey, 'imageinfo' => $imageInfo, 'status' => Status::newGood())); } catch (MWException $e) { UploadBase::setSessionStatus($this->params['filekey'], array('result' => 'Failure', 'stage' => 'assembling', 'status' => Status::newFatal('api-error-stashfailed'))); $this->setLastError(get_class($e) . ": " . $e->getText()); // To be extra robust. MWExceptionHandler::rollbackMasterChangesAndLog($e); return false; } return true; }
/** * Creates a flow board. * Archives any pre-existing wikitext talk page. * * @param array $data Form data * @return Status Status indicating result */ public function onSubmit(array $data) { $page = $data['page']; $title = Title::newFromText($page); if (!$title) { return Status::newFatal('flow-special-enableflow-invalid-title', $page); } // Canonicalize so the error or confirmation message looks nicer (no underscores). $page = $title->getPrefixedText(); if ($this->occupationController->isTalkpageOccupied($title, true)) { return Status::newFatal('flow-special-enableflow-board-already-exists', $page); } $status = Status::newGood(); if ($title->exists(Title::GAID_FOR_UPDATE)) { if (class_exists('LqtDispatch') && \LqtDispatch::isLqtPage($title)) { return Status::newFatal('flow-special-enableflow-page-is-liquidthreads', $page); } $logger = Container::get('default_logger'); $converter = new Converter(wfGetDB(DB_MASTER), Container::get('importer'), $logger, $this->occupationController->getTalkpageManager(), new EnableFlowWikitextConversionStrategy(Container::get('parser'), new NullImportSourceStore(), $logger, array(), $data['header'])); try { $converter->convert($title); } catch (\Exception $e) { \MWExceptionHandler::logException($e); $status->fatal('flow-error-external', $e->getMessage()); } } else { $allowCreationStatus = $this->occupationController->allowCreation($title, $this->getUser(), false); if (!$allowCreationStatus->isGood()) { return Status::newFatal('flow-special-enableflow-board-creation-not-allowed', $page); } $loader = $this->loaderFactory->createWorkflowLoader($title); $blocks = $loader->getBlocks(); $action = 'edit-header'; $params = array('header' => array('content' => $data['header'], 'format' => 'wikitext')); $blocksToCommit = $loader->handleSubmit($this->getContext(), $action, $params); foreach ($blocks as $block) { if ($block->hasErrors()) { $errors = $block->getErrors(); foreach ($errors as $errorKey) { $status->fatal($block->getErrorMessage($errorKey)); } } } $loader->commit($blocksToCommit); } $this->page = $data['page']; return $status; }
public function run() { $scope = RequestContext::importScopedSession($this->params['session']); $this->addTeardownCallback(function () use(&$scope) { ScopedCallback::consume($scope); // T126450 }); $context = RequestContext::getMain(); $user = $context->getUser(); try { if (!$user->isLoggedIn()) { $this->setLastError("Could not load the author user from session."); return false; } UploadBase::setSessionStatus($user, $this->params['filekey'], ['result' => 'Poll', 'stage' => 'assembling', 'status' => Status::newGood()]); $upload = new UploadFromChunks($user); $upload->continueChunks($this->params['filename'], $this->params['filekey'], new WebRequestUpload($context->getRequest(), 'null')); // Combine all of the chunks into a local file and upload that to a new stash file $status = $upload->concatenateChunks(); if (!$status->isGood()) { UploadBase::setSessionStatus($user, $this->params['filekey'], ['result' => 'Failure', 'stage' => 'assembling', 'status' => $status]); $this->setLastError($status->getWikiText(false, false, 'en')); return false; } // We can only get warnings like 'duplicate' after concatenating the chunks $status = Status::newGood(); $status->value = ['warnings' => $upload->checkWarnings()]; // We have a new filekey for the fully concatenated file $newFileKey = $upload->getStashFile()->getFileKey(); // Remove the old stash file row and first chunk file $upload->stash->removeFileNoAuth($this->params['filekey']); // Build the image info array while we have the local reference handy $apiMain = new ApiMain(); // dummy object (XXX) $imageInfo = $upload->getImageInfo($apiMain->getResult()); // Cleanup any temporary local file $upload->cleanupTempFile(); // Cache the info so the user doesn't have to wait forever to get the final info UploadBase::setSessionStatus($user, $this->params['filekey'], ['result' => 'Success', 'stage' => 'assembling', 'filekey' => $newFileKey, 'imageinfo' => $imageInfo, 'status' => $status]); } catch (Exception $e) { UploadBase::setSessionStatus($user, $this->params['filekey'], ['result' => 'Failure', 'stage' => 'assembling', 'status' => Status::newFatal('api-error-stashfailed')]); $this->setLastError(get_class($e) . ": " . $e->getMessage()); // To be extra robust. MWExceptionHandler::rollbackMasterChangesAndLog($e); return false; } return true; }
/** * @param UUID $postId * @param int $limit * @param UUID|null $offset * @param string $direction 'rev' or 'fwd' * @return FormatterRow[] */ public function getResults(UUID $postId, $limit = 50, UUID $offset = null, $direction = 'fwd') { $history = $this->storage->find('PostRevision', array('rev_type_id' => $postId), array('sort' => 'rev_id', 'order' => 'DESC', 'limit' => $limit, 'offset-id' => $offset, 'offset-dir' => $direction, 'offset-include' => false, 'offset-elastic' => false)); if (!$history) { return array(); } $this->loadMetadataBatch($history); $results = array(); foreach ($history as $revision) { try { $results[] = $row = new FormatterRow(); $this->buildResult($revision, null, $row); } catch (FlowException $e) { \MWExceptionHandler::logException($e); } } return $results; }
/** * @covers MWExceptionHandler::getRedactedTrace */ public function testGetRedactedTrace() { $refvar = 'value'; try { $array = array('a', 'b'); $object = new stdClass(); self::helperThrowAnException($array, $object, $refvar); } catch (Exception $e) { } # Make sure our stack trace contains an array and an object passed to # some function in the stacktrace. Else, we can not assert the trace # redaction achieved its job. $trace = $e->getTrace(); $hasObject = false; $hasArray = false; foreach ($trace as $frame) { if (!isset($frame['args'])) { continue; } foreach ($frame['args'] as $arg) { $hasObject = $hasObject || is_object($arg); $hasArray = $hasArray || is_array($arg); } if ($hasObject && $hasArray) { break; } } $this->assertTrue($hasObject, "The stacktrace must have a function having an object has parameter"); $this->assertTrue($hasArray, "The stacktrace must have a function having an array has parameter"); # Now we redact the trace.. and make sure no function arguments are # arrays or objects. $redacted = MWExceptionHandler::getRedactedTrace($e); foreach ($redacted as $frame) { if (!isset($frame['args'])) { continue; } foreach ($frame['args'] as $arg) { $this->assertNotInternalType('array', $arg); $this->assertNotInternalType('object', $arg); } } $this->assertEquals('value', $refvar, 'Ensuring reference variable wasn\'t changed'); }
/** * Look for a fatal error as the cause of the request termination and log * as an exception. * * Special handling is included for missing class errors as they may * indicate that the user needs to install 3rd-party libraries via * Composer or other means. * * @since 1.25 */ public static function handleFatalError() { self::$reservedMemory = null; $lastError = error_get_last(); if ($lastError && isset($lastError['type']) && in_array($lastError['type'], self::$fatalErrorTypes)) { $msg = "Fatal Error: {$lastError['message']}"; // HHVM: Class undefined: foo // PHP5: Class 'foo' not found if (preg_match("/Class (undefined: \\w+|'\\w+' not found)/", $lastError['message'])) { // @codingStandardsIgnoreStart Generic.Files.LineLength.TooLong $msg = <<<TXT {$msg} MediaWiki or an installed extension requires this class but it is not embedded directly in MediaWiki's git repository and must be installed separately by the end user. Please see <a href="https://www.mediawiki.org/wiki/Download_from_Git#Fetch_external_libraries">mediawiki.org</a> for help on installing the required components. TXT; // @codingStandardsIgnoreEnd } $e = new ErrorException($msg, 0, $lastError['type']); self::logError($e, 'fatal'); } }
/** * Generate code for a response. * * @param ResourceLoaderContext $context Context in which to generate a response * @param array $modules List of module objects keyed by module name * @param array $missing List of requested module names that are unregistered (optional) * @return string Response data */ public function makeModuleResponse(ResourceLoaderContext $context, array $modules, array $missing = array()) { $out = ''; $states = array(); if (!count($modules) && !count($missing)) { return <<<MESSAGE /* This file is the Web entry point for MediaWiki's ResourceLoader: <https://www.mediawiki.org/wiki/ResourceLoader>. In this request, no modules were requested. Max made me put this here. */ MESSAGE; } $image = $context->getImageObj(); if ($image) { $data = $image->getImageData($context); if ($data === false) { $data = ''; $this->errors[] = 'Image generation failed'; } return $data; } // Pre-fetch blobs if ($context->shouldIncludeMessages()) { try { $this->blobStore->get($this, $modules, $context->getLanguage()); } catch (Exception $e) { MWExceptionHandler::logException($e); $this->logger->warning('Prefetching MessageBlobStore failed: {exception}', array('exception' => $e)); $this->errors[] = self::formatExceptionNoComment($e); } } foreach ($missing as $name) { $states[$name] = 'missing'; } // Generate output $isRaw = false; foreach ($modules as $name => $module) { try { $content = $module->getModuleContent($context); // Append output switch ($context->getOnly()) { case 'scripts': $scripts = $content['scripts']; if (is_string($scripts)) { // Load scripts raw... $out .= $scripts; } elseif (is_array($scripts)) { // ...except when $scripts is an array of URLs $out .= self::makeLoaderImplementScript($name, $scripts, array(), array()); } break; case 'styles': $styles = $content['styles']; // We no longer seperate into media, they are all combined now with // custom media type groups into @media .. {} sections as part of the css string. // Module returns either an empty array or a numerical array with css strings. $out .= isset($styles['css']) ? implode('', $styles['css']) : ''; break; default: $out .= self::makeLoaderImplementScript($name, isset($content['scripts']) ? $content['scripts'] : '', isset($content['styles']) ? $content['styles'] : array(), isset($content['messagesBlob']) ? new XmlJsCode($content['messagesBlob']) : array(), isset($content['templates']) ? $content['templates'] : array()); break; } } catch (Exception $e) { MWExceptionHandler::logException($e); $this->logger->warning('Generating module package failed: {exception}', array('exception' => $e)); $this->errors[] = self::formatExceptionNoComment($e); // Respond to client with error-state instead of module implementation $states[$name] = 'error'; unset($modules[$name]); } $isRaw |= $module->isRaw(); } // Update module states if ($context->shouldIncludeScripts() && !$context->getRaw() && !$isRaw) { if (count($modules) && $context->getOnly() === 'scripts') { // Set the state of modules loaded as only scripts to ready as // they don't have an mw.loader.implement wrapper that sets the state foreach ($modules as $name => $module) { $states[$name] = 'ready'; } } // Set the state of modules we didn't respond to with mw.loader.implement if (count($states)) { $out .= self::makeLoaderStateScript($states); } } else { if (count($states)) { $this->errors[] = 'Problematic modules: ' . FormatJson::encode($states, ResourceLoader::inDebugMode()); } } $enableFilterCache = true; if (count($modules) === 1 && reset($modules) instanceof ResourceLoaderUserTokensModule) { // If we're building the embedded user.tokens, don't cache (T84960) $enableFilterCache = false; } if (!$context->getDebug()) { if ($context->getOnly() === 'styles') { $out = $this->filter('minify-css', $out); } else { $out = $this->filter('minify-js', $out, array('cache' => $enableFilterCache)); } } return $out; }
/** * Do standard deferred updates after page view (existing or missing page) * @param User $user The relevant user * @param int $oldid Revision id being viewed; if not given or 0, latest revision is assumed */ public function doViewUpdates(User $user, $oldid = 0) { if (wfReadOnly()) { return; } Hooks::run('PageViewUpdates', [$this, $user]); // Update newtalk / watchlist notification status try { $user->clearNotification($this->mTitle, $oldid); } catch (DBError $e) { // Avoid outage if the master is not reachable MWExceptionHandler::logException($e); } }
/** * Dual purpose callback used as both a set_error_handler() callback and * a registered shutdown function. Receive a callback from the interpreter * for a raised error or system shutdown, check for a fatal error, and log * to the 'fatal' logging channel. * * Special handling is included for missing class errors as they may * indicate that the user needs to install 3rd-party libraries via * Composer or other means. * * @since 1.25 * * @param int $level Error level raised * @param string $message Error message * @param string $file File that error was raised in * @param int $line Line number error was raised at * @param array $context Active symbol table point of error * @param array $trace Backtrace at point of error (undocumented HHVM * feature) * @return bool Always returns false */ public static function handleFatalError($level = null, $message = null, $file = null, $line = null, $context = null, $trace = null) { // Free reserved memory so that we have space to process OOM // errors self::$reservedMemory = null; if ($level === null) { // Called as a shutdown handler, get data from error_get_last() if (static::$handledFatalCallback) { // Already called once (probably as an error handler callback // under HHVM) so don't log again. return false; } $lastError = error_get_last(); if ($lastError !== null) { $level = $lastError['type']; $message = $lastError['message']; $file = $lastError['file']; $line = $lastError['line']; } else { $level = 0; $message = ''; } } if (!in_array($level, self::$fatalErrorTypes)) { // Only interested in fatal errors, others should have been // handled by MWExceptionHandler::handleError return false; } $msg = "[{exception_id}] PHP Fatal Error: {$message}"; // Look at message to see if this is a class not found failure // HHVM: Class undefined: foo // PHP5: Class 'foo' not found if (preg_match("/Class (undefined: \\w+|'\\w+' not found)/", $msg)) { // @codingStandardsIgnoreStart Generic.Files.LineLength.TooLong $msg = <<<TXT {$msg} MediaWiki or an installed extension requires this class but it is not embedded directly in MediaWiki's git repository and must be installed separately by the end user. Please see <a href="https://www.mediawiki.org/wiki/Download_from_Git#Fetch_external_libraries">mediawiki.org</a> for help on installing the required components. TXT; // @codingStandardsIgnoreEnd } // We can't just create an exception and log it as it is likely that // the interpreter has unwound the stack already. If that is true the // stacktrace we would get would be functionally empty. If however we // have been called as an error handler callback *and* HHVM is in use // we will have been provided with a useful stacktrace that we can // log. $trace = $trace ?: debug_backtrace(); $logger = LoggerFactory::getInstance('fatal'); $logger->error($msg, ['exception' => ['class' => 'ErrorException', 'message' => "PHP Fatal Error: {$message}", 'code' => $level, 'file' => $file, 'line' => $line, 'trace' => static::redactTrace($trace)], 'exception_id' => wfRandomString(8)]); // Remember call so we don't double process via HHVM's fatal // notifications and the shutdown hook behavior static::$handledFatalCallback = true; return false; }
$headers = []; foreach (headers_list() as $header) { list($name, $value) = explode(':', $header, 2); $headers[strtolower(trim($name))][] = trim($value); } if (isset($headers['set-cookie'])) { $cacheControl = isset($headers['cache-control']) ? implode(', ', $headers['cache-control']) : ''; if (!preg_match('/(?:^|,)\\s*(?:private|no-cache|no-store)\\s*(?:$|,)/i', $cacheControl)) { header('Expires: Thu, 01 Jan 1970 00:00:00 GMT'); header('Cache-Control: private, max-age=0, s-maxage=0'); MediaWiki\Logger\LoggerFactory::getInstance('cache-cookies')->warning('Cookies set on {url} with Cache-Control "{cache-control}"', ['url' => WebRequest::getGlobalRequestURL(), 'cookies' => $headers['set-cookie'], 'cache-control' => $cacheControl ?: '<not set>']); } } }); } MWExceptionHandler::installHandler(); require_once "{$IP}/includes/compat/normal/UtfNormalUtil.php"; $ps_validation = Profiler::instance()->scopedProfileIn($fname . '-validation'); // T48998: Bail out early if $wgArticlePath is non-absolute foreach (['wgArticlePath', 'wgVariantArticlePath'] as $varName) { if (${$varName} && !preg_match('/^(https?:\\/\\/|\\/)/', ${$varName})) { throw new FatalError("If you use a relative URL for \${$varName}, it must start " . 'with a slash (<code>/</code>).<br><br>See ' . "<a href=\"https://www.mediawiki.org/wiki/Manual:\${$varName}\">" . "https://www.mediawiki.org/wiki/Manual:\${$varName}</a>."); } } Profiler::instance()->scopedProfileOut($ps_validation); $ps_default2 = Profiler::instance()->scopedProfileIn($fname . '-defaults2'); if ($wgCanonicalServer === false) { $wgCanonicalServer = wfExpandUrl($wgServer, PROTO_HTTP); } // Set server name $serverParts = wfParseUrl($wgCanonicalServer);
/** * Issue ROLLBACK only on master, only if queries were done on connection * @since 1.23 */ public function rollbackMasterChanges() { $failedServers = array(); $masterIndex = $this->getWriterIndex(); foreach ($this->mConns as $conns2) { if (empty($conns2[$masterIndex])) { continue; } /** @var DatabaseBase $conn */ foreach ($conns2[$masterIndex] as $conn) { if ($conn->trxLevel() && $conn->writesOrCallbacksPending()) { try { $conn->rollback(__METHOD__, 'flush'); } catch (DBError $e) { MWExceptionHandler::logException($e); $failedServers[] = $conn->getServer(); } } } } if ($failedServers) { throw new DBExpectedError(null, "Rollback failed on server(s) " . implode(', ', array_unique($failedServers))); } }
/** * Execute any due periodic queue maintenance tasks for all queues. * * A task is "due" if the time ellapsed since the last run is greater than * the defined run period. Concurrent calls to this function will cause tasks * to be attempted twice, so they may need their own methods of mutual exclusion. * * @return int Number of tasks run */ public function executeReadyPeriodicTasks() { global $wgMemc; list($db, $prefix) = wfSplitWikiID($this->wiki); $key = wfForeignMemcKey($db, $prefix, 'jobqueuegroup', 'taskruns', 'v1'); $lastRuns = $wgMemc->get($key); // (queue => task => UNIX timestamp) $count = 0; $tasksRun = array(); // (queue => task => UNIX timestamp) foreach ($this->getQueueTypes() as $type) { $queue = $this->get($type); foreach ($queue->getPeriodicTasks() as $task => $definition) { if ($definition['period'] <= 0) { continue; // disabled } elseif (!isset($lastRuns[$type][$task]) || $lastRuns[$type][$task] < time() - $definition['period']) { try { if (call_user_func($definition['callback']) !== null) { $tasksRun[$type][$task] = time(); ++$count; } } catch (JobQueueError $e) { MWExceptionHandler::logException($e); } } } } if ($count === 0) { return $count; // nothing to update } $wgMemc->merge($key, function ($cache, $key, $lastRuns) use($tasksRun) { if (is_array($lastRuns)) { foreach ($tasksRun as $type => $tasks) { foreach ($tasks as $task => $timestamp) { if (!isset($lastRuns[$type][$task]) || $timestamp > $lastRuns[$type][$task]) { $lastRuns[$type][$task] = $timestamp; } } } } else { $lastRuns = $tasksRun; } return $lastRuns; }); return $count; }
/** * Save this user's settings into the database. * @todo Only rarely do all these fields need to be set! */ public function saveSettings() { global $wgAuth; if (wfReadOnly()) { // @TODO: caller should deal with this instead! // This should really just be an exception. MWExceptionHandler::logException(new DBExpectedError(null, "Could not update user with ID '{$this->mId}'; DB is read-only.")); return; } $this->load(); $this->loadPasswords(); if (0 == $this->mId) { return; // anon } // Get a new user_touched that is higher than the old one. // This will be used for a CAS check as a last-resort safety // check against race conditions and slave lag. $oldTouched = $this->mTouched; $newTouched = $this->newTouchedTimestamp(); if (!$wgAuth->allowSetLocalPassword()) { $this->mPassword = self::getPasswordFactory()->newFromCiphertext(null); } $dbw = wfGetDB(DB_MASTER); $dbw->update('user', array('user_name' => $this->mName, 'user_password' => $this->mPassword->toString(), 'user_newpassword' => $this->mNewpassword->toString(), 'user_newpass_time' => $dbw->timestampOrNull($this->mNewpassTime), 'user_real_name' => $this->mRealName, 'user_email' => $this->mEmail, 'user_email_authenticated' => $dbw->timestampOrNull($this->mEmailAuthenticated), 'user_touched' => $dbw->timestamp($newTouched), 'user_token' => strval($this->mToken), 'user_email_token' => $this->mEmailToken, 'user_email_token_expires' => $dbw->timestampOrNull($this->mEmailTokenExpires), 'user_password_expires' => $dbw->timestampOrNull($this->mPasswordExpires)), array('user_id' => $this->mId, 'user_touched' => $dbw->timestamp($oldTouched)), __METHOD__); if (!$dbw->affectedRows()) { // Maybe the problem was a missed cache update; clear it to be safe $this->clearSharedCache(); // User was changed in the meantime or loaded with stale data $from = $this->queryFlagsUsed & self::READ_LATEST ? 'master' : 'slave'; throw new MWException("CAS update failed on user_touched for user ID '{$this->mId}' (read from {$from});" . " the version of the user to be saved is older than the current version."); } $this->mTouched = $newTouched; $this->saveOptions(); Hooks::run('UserSaveSettings', array($this)); $this->clearSharedCache(); $this->getUserPage()->invalidateCache(); }
/** * Replace the result data with the information about an exception. * Returns the error code * @param Exception $e * @return string */ protected function substituteResultWithError($e) { $result = $this->getResult(); $config = $this->getConfig(); if ($e instanceof UsageException) { // User entered incorrect parameters - generate error response $errMessage = $e->getMessageArray(); $link = wfExpandUrl(wfScript('api')); ApiResult::setContentValue($errMessage, 'docref', "See {$link} for API usage"); } else { // Something is seriously wrong if ($e instanceof DBQueryError && !$config->get('ShowSQLErrors')) { $info = 'Database query error'; } else { $info = "Exception Caught: {$e->getMessage()}"; } $errMessage = array('code' => 'internal_api_error_' . get_class($e), 'info' => '[' . MWExceptionHandler::getLogId($e) . '] ' . $info); if ($config->get('ShowExceptionDetails')) { ApiResult::setContentValue($errMessage, 'trace', MWExceptionHandler::getRedactedTraceAsString($e)); } } // Remember all the warnings to re-add them later $warnings = $result->getResultData(array('warnings')); $result->reset(); // Re-add the id $requestid = $this->getParameter('requestid'); if (!is_null($requestid)) { $result->addValue(null, 'requestid', $requestid, ApiResult::NO_SIZE_CHECK); } if ($config->get('ShowHostnames')) { // servedby is especially useful when debugging errors $result->addValue(null, 'servedby', wfHostName(), ApiResult::NO_SIZE_CHECK); } if ($warnings !== null) { $result->addValue(null, 'warnings', $warnings, ApiResult::NO_SIZE_CHECK); } $result->addValue(null, 'error', $errMessage, ApiResult::NO_SIZE_CHECK); return $errMessage['code']; }
/** * Potentially open a socket and sent an HTTP request back to the server * to run a specified number of jobs. This registers a callback to cleanup * the socket once it's done. */ public function triggerJobs() { $jobRunRate = $this->config->get('JobRunRate'); if ($jobRunRate <= 0 || wfReadOnly()) { return; } elseif ($this->getTitle()->isSpecial('RunJobs')) { return; // recursion guard } if ($jobRunRate < 1) { $max = mt_getrandmax(); if (mt_rand(0, $max) > $max * $jobRunRate) { return; // the higher the job run rate, the less likely we return here } $n = 1; } else { $n = intval($jobRunRate); } $runJobsLogger = LoggerFactory::getInstance('runJobs'); if (!$this->config->get('RunJobsAsync')) { // Fall back to running the job here while the user waits $runner = new JobRunner($runJobsLogger); $runner->run(array('maxJobs' => $n)); return; } try { if (!JobQueueGroup::singleton()->queuesHaveJobs(JobQueueGroup::TYPE_DEFAULT)) { return; // do not send request if there are probably no jobs } } catch (JobQueueError $e) { MWExceptionHandler::logException($e); return; // do not make the site unavailable } $query = array('title' => 'Special:RunJobs', 'tasks' => 'jobs', 'maxjobs' => $n, 'sigexpiry' => time() + 5); $query['signature'] = SpecialRunJobs::getQuerySignature($query, $this->config->get('SecretKey')); $errno = $errstr = null; $info = wfParseUrl($this->config->get('Server')); MediaWiki\suppressWarnings(); $sock = fsockopen($info['host'], isset($info['port']) ? $info['port'] : 80, $errno, $errstr, 0.1); MediaWiki\restoreWarnings(); if (!$sock) { $runJobsLogger->error("Failed to start cron API (socket error {$errno}): {$errstr}"); // Fall back to running the job here while the user waits $runner = new JobRunner($runJobsLogger); $runner->run(array('maxJobs' => $n)); return; } $url = wfAppendQuery(wfScript('index'), $query); $req = "POST {$url} HTTP/1.1\r\n" . "Host: {$info['host']}\r\n" . "Connection: Close\r\n" . "Content-Length: 0\r\n\r\n"; $runJobsLogger->info("Running {$n} job(s) via '{$url}'"); // Send a cron API request to be performed in the background. // Give up if this takes too long to send (which should be rare). stream_set_timeout($sock, 1); $bytes = fwrite($sock, $req); if ($bytes !== strlen($req)) { $runJobsLogger->error("Failed to start cron API (socket write error)"); } else { // Do not wait for the response (the script should handle client aborts). // Make sure that we don't close before that script reaches ignore_user_abort(). $status = fgets($sock); if (!preg_match('#^HTTP/\\d\\.\\d 202 #', $status)) { $runJobsLogger->error("Failed to start cron API: received '{$status}'"); } } fclose($sock); }
/** * Output a report about the exception and takes care of formatting. * It will be either HTML or plain text based on isCommandLine(). */ function report() { $log = $this->getLogMessage(); if ($log) { wfDebugLog('exception', $log); Wikia::log('exceptions-WIKIA', get_class($this), $log, true); // Wikia change - @author macbre (BAC-1199) } if (self::isCommandLine()) { MWExceptionHandler::printError($this->getText()); } else { $this->reportHTML(); } }
protected function doGetSiblingQueueSizes( array $types ) { $result = array(); foreach ( $this->partitionQueues as $queue ) { try { $sizes = $queue->doGetSiblingQueueSizes( $types ); if ( is_array( $sizes ) ) { foreach ( $sizes as $type => $size ) { $result[$type] = isset( $result[$type] ) ? $result[$type] + $size : $size; } } else { return null; // not supported on all partitions; bail } } catch ( JobQueueError $e ) { MWExceptionHandler::logException( $e ); } } return $result; }
/** * Potentially open a socket and sent an HTTP request back to the server * to run a specified number of jobs. This registers a callback to cleanup * the socket once it's done. */ public function triggerJobs() { $jobRunRate = $this->config->get('JobRunRate'); if ($this->getTitle()->isSpecial('RunJobs')) { return; // recursion guard } elseif ($jobRunRate <= 0 || wfReadOnly()) { return; } if ($jobRunRate < 1) { $max = mt_getrandmax(); if (mt_rand(0, $max) > $max * $jobRunRate) { return; // the higher the job run rate, the less likely we return here } $n = 1; } else { $n = intval($jobRunRate); } $runJobsLogger = LoggerFactory::getInstance('runJobs'); // Fall back to running the job(s) while the user waits if needed if (!$this->config->get('RunJobsAsync')) { $runner = new JobRunner($runJobsLogger); $runner->run(['maxJobs' => $n]); return; } // Do not send request if there are probably no jobs try { $group = JobQueueGroup::singleton(); if (!$group->queuesHaveJobs(JobQueueGroup::TYPE_DEFAULT)) { return; } } catch (JobQueueError $e) { MWExceptionHandler::logException($e); return; // do not make the site unavailable } $query = ['title' => 'Special:RunJobs', 'tasks' => 'jobs', 'maxjobs' => $n, 'sigexpiry' => time() + 5]; $query['signature'] = SpecialRunJobs::getQuerySignature($query, $this->config->get('SecretKey')); $errno = $errstr = null; $info = wfParseUrl($this->config->get('CanonicalServer')); $host = $info ? $info['host'] : null; $port = 80; if (isset($info['scheme']) && $info['scheme'] == 'https') { $host = "tls://" . $host; $port = 443; } if (isset($info['port'])) { $port = $info['port']; } MediaWiki\suppressWarnings(); $sock = $host ? fsockopen($host, $port, $errno, $errstr, 0.1) : false; MediaWiki\restoreWarnings(); $invokedWithSuccess = true; if ($sock) { $special = SpecialPageFactory::getPage('RunJobs'); $url = $special->getPageTitle()->getCanonicalURL($query); $req = "POST {$url} HTTP/1.1\r\n" . "Host: {$info['host']}\r\n" . "Connection: Close\r\n" . "Content-Length: 0\r\n\r\n"; $runJobsLogger->info("Running {$n} job(s) via '{$url}'"); // Send a cron API request to be performed in the background. // Give up if this takes too long to send (which should be rare). stream_set_timeout($sock, 2); $bytes = fwrite($sock, $req); if ($bytes !== strlen($req)) { $invokedWithSuccess = false; $runJobsLogger->error("Failed to start cron API (socket write error)"); } else { // Do not wait for the response (the script should handle client aborts). // Make sure that we don't close before that script reaches ignore_user_abort(). $start = microtime(true); $status = fgets($sock); $sec = microtime(true) - $start; if (!preg_match('#^HTTP/\\d\\.\\d 202 #', $status)) { $invokedWithSuccess = false; $runJobsLogger->error("Failed to start cron API: received '{$status}' ({$sec})"); } } fclose($sock); } else { $invokedWithSuccess = false; $runJobsLogger->error("Failed to start cron API (socket error {$errno}): {$errstr}"); } // Fall back to running the job(s) while the user waits if needed if (!$invokedWithSuccess) { $runJobsLogger->warning("Jobs switched to blocking; Special:RunJobs disabled"); $runner = new JobRunner($runJobsLogger); $runner->run(['maxJobs' => $n]); } }
/** * Actually any "on transaction pre-commit" callbacks. * * @since 1.22 */ protected function runOnTransactionPreCommitCallbacks() { $e = $ePrior = null; // last exception do { // callbacks may add callbacks :) $callbacks = $this->mTrxPreCommitCallbacks; $this->mTrxPreCommitCallbacks = array(); // recursion guard foreach ($callbacks as $callback) { try { list($phpCallback) = $callback; call_user_func($phpCallback); } catch (Exception $e) { if ($ePrior) { MWExceptionHandler::logException($ePrior); } $ePrior = $e; } } } while (count($this->mTrxPreCommitCallbacks)); if ($e instanceof Exception) { throw $e; // re-throw any last exception } }
/** * Run the current MediaWiki instance * index.php just calls this */ public function run() { try { $this->checkMaxLag(); $this->main(); if (function_exists('fastcgi_finish_request')) { fastcgi_finish_request(); } $this->triggerJobs(); $this->restInPeace(); } catch (Exception $e) { MWExceptionHandler::handle($e); } }
/** * Run the current MediaWiki instance * index.php just calls this */ public function run() { try { $this->checkMaxLag(); $this->main(); $this->restInPeace(); } catch (Exception $e) { MWExceptionHandler::handle($e); } }
/** * Generate code for a response. * * @param ResourceLoaderContext $context Context in which to generate a response * @param array $modules List of module objects keyed by module name * @param array $missing List of requested module names that are unregistered (optional) * @return string Response data */ public function makeModuleResponse(ResourceLoaderContext $context, array $modules, array $missing = array()) { $out = ''; $states = array(); if (!count($modules) && !count($missing)) { return <<<MESSAGE /* This file is the Web entry point for MediaWiki's ResourceLoader: <https://www.mediawiki.org/wiki/ResourceLoader>. In this request, no modules were requested. Max made me put this here. */ MESSAGE; } $image = $context->getImageObj(); if ($image) { $data = $image->getImageData($context); if ($data === false) { $data = ''; $this->errors[] = 'Image generation failed'; } return $data; } // Pre-fetch blobs if ($context->shouldIncludeMessages()) { try { $blobs = $this->blobStore->get($this, $modules, $context->getLanguage()); } catch (Exception $e) { MWExceptionHandler::logException($e); wfDebugLog('resourceloader', __METHOD__ . ": pre-fetching blobs from MessageBlobStore failed: {$e}"); $this->errors[] = self::formatExceptionNoComment($e); } } else { $blobs = array(); } foreach ($missing as $name) { $states[$name] = 'missing'; } // Generate output $isRaw = false; foreach ($modules as $name => $module) { /** * @var $module ResourceLoaderModule */ try { $scripts = ''; if ($context->shouldIncludeScripts()) { // If we are in debug mode, we'll want to return an array of URLs if possible // However, we can't do this if the module doesn't support it // We also can't do this if there is an only= parameter, because we have to give // the module a way to return a load.php URL without causing an infinite loop if ($context->getDebug() && !$context->getOnly() && $module->supportsURLLoading()) { $scripts = $module->getScriptURLsForDebug($context); } else { $scripts = $module->getScript($context); // rtrim() because there are usually a few line breaks // after the last ';'. A new line at EOF, a new line // added by ResourceLoaderFileModule::readScriptFiles, etc. if (is_string($scripts) && strlen($scripts) && substr(rtrim($scripts), -1) !== ';') { // Append semicolon to prevent weird bugs caused by files not // terminating their statements right (bug 27054) $scripts .= ";\n"; } } } // Styles $styles = array(); if ($context->shouldIncludeStyles()) { // Don't create empty stylesheets like array( '' => '' ) for modules // that don't *have* any stylesheets (bug 38024). $stylePairs = $module->getStyles($context); if (count($stylePairs)) { // If we are in debug mode without &only= set, we'll want to return an array of URLs // See comment near shouldIncludeScripts() for more details if ($context->getDebug() && !$context->getOnly() && $module->supportsURLLoading()) { $styles = array('url' => $module->getStyleURLsForDebug($context)); } else { // Minify CSS before embedding in mw.loader.implement call // (unless in debug mode) if (!$context->getDebug()) { foreach ($stylePairs as $media => $style) { // Can be either a string or an array of strings. if (is_array($style)) { $stylePairs[$media] = array(); foreach ($style as $cssText) { if (is_string($cssText)) { $stylePairs[$media][] = $this->filter('minify-css', $cssText); } } } elseif (is_string($style)) { $stylePairs[$media] = $this->filter('minify-css', $style); } } } // Wrap styles into @media groups as needed and flatten into a numerical array $styles = array('css' => self::makeCombinedStyles($stylePairs)); } } } // Messages $messagesBlob = isset($blobs[$name]) ? $blobs[$name] : '{}'; // Append output switch ($context->getOnly()) { case 'scripts': if (is_string($scripts)) { // Load scripts raw... $out .= $scripts; } elseif (is_array($scripts)) { // ...except when $scripts is an array of URLs $out .= self::makeLoaderImplementScript($name, $scripts, array(), array()); } break; case 'styles': // We no longer seperate into media, they are all combined now with // custom media type groups into @media .. {} sections as part of the css string. // Module returns either an empty array or a numerical array with css strings. $out .= isset($styles['css']) ? implode('', $styles['css']) : ''; break; case 'messages': $out .= self::makeMessageSetScript(new XmlJsCode($messagesBlob)); break; case 'templates': $out .= Xml::encodeJsCall('mw.templates.set', array($name, (object) $module->getTemplates()), ResourceLoader::inDebugMode()); break; default: $out .= self::makeLoaderImplementScript($name, $scripts, $styles, new XmlJsCode($messagesBlob), $module->getTemplates()); break; } } catch (Exception $e) { MWExceptionHandler::logException($e); wfDebugLog('resourceloader', __METHOD__ . ": generating module package failed: {$e}"); $this->errors[] = self::formatExceptionNoComment($e); // Respond to client with error-state instead of module implementation $states[$name] = 'error'; unset($modules[$name]); } $isRaw |= $module->isRaw(); } // Update module states if ($context->shouldIncludeScripts() && !$context->getRaw() && !$isRaw) { if (count($modules) && $context->getOnly() === 'scripts') { // Set the state of modules loaded as only scripts to ready as // they don't have an mw.loader.implement wrapper that sets the state foreach ($modules as $name => $module) { $states[$name] = 'ready'; } } // Set the state of modules we didn't respond to with mw.loader.implement if (count($states)) { $out .= self::makeLoaderStateScript($states); } } else { if (count($states)) { $this->errors[] = 'Problematic modules: ' . FormatJson::encode($states, ResourceLoader::inDebugMode()); } } if (!$context->getDebug()) { if ($context->getOnly() === 'styles') { $out = $this->filter('minify-css', $out); } else { $out = $this->filter('minify-js', $out); } } return $out; }
/** * Run jobs of the specified number/type for the specified time * * The response map has a 'job' field that lists status of each job, including: * - type : the job type * - status : ok/failed * - error : any error message string * - time : the job run time in ms * The response map also has: * - backoffs : the (job type => seconds) map of backoff times * - elapsed : the total time spent running tasks in ms * - reached : the reason the script finished, one of (none-ready, job-limit, time-limit) * * This method outputs status information only if a debug handler was set. * Any exceptions are caught and logged, but are not reported as output. * * @param array $options Map of parameters: * - type : the job type (or false for the default types) * - maxJobs : maximum number of jobs to run * - maxTime : maximum time in seconds before stopping * - throttle : whether to respect job backoff configuration * @return array Summary response that can easily be JSON serialized */ public function run(array $options) { global $wgJobClasses, $wgTrxProfilerLimits; $response = array('jobs' => array(), 'reached' => 'none-ready'); $type = isset($options['type']) ? $options['type'] : false; $maxJobs = isset($options['maxJobs']) ? $options['maxJobs'] : false; $maxTime = isset($options['maxTime']) ? $options['maxTime'] : false; $noThrottle = isset($options['throttle']) && !$options['throttle']; if ($type !== false && !isset($wgJobClasses[$type])) { $response['reached'] = 'none-possible'; return $response; } // Bail out if in read-only mode if (wfReadOnly()) { $response['reached'] = 'read-only'; return $response; } // Catch huge single updates that lead to slave lag $trxProfiler = Profiler::instance()->getTransactionProfiler(); $trxProfiler->setLogger(LoggerFactory::getInstance('DBPerformance')); $trxProfiler->setExpectations($wgTrxProfilerLimits['JobRunner'], __METHOD__); // Bail out if there is too much DB lag. // This check should not block as we want to try other wiki queues. $maxAllowedLag = 3; list(, $maxLag) = wfGetLB(wfWikiID())->getMaxLag(); if ($maxLag >= $maxAllowedLag) { $response['reached'] = 'slave-lag-limit'; return $response; } $group = JobQueueGroup::singleton(); // Flush any pending DB writes for sanity wfGetLBFactory()->commitAll(); // Some jobs types should not run until a certain timestamp $backoffs = array(); // map of (type => UNIX expiry) $backoffDeltas = array(); // map of (type => seconds) $wait = 'wait'; // block to read backoffs the first time $stats = RequestContext::getMain()->getStats(); $jobsPopped = 0; $timeMsTotal = 0; $flags = JobQueueGroup::USE_CACHE; $startTime = microtime(true); // time since jobs started running $checkLagPeriod = 1.0; // check slave lag this many seconds $lastCheckTime = 1; // timestamp of last slave check do { // Sync the persistent backoffs with concurrent runners $backoffs = $this->syncBackoffDeltas($backoffs, $backoffDeltas, $wait); $blacklist = $noThrottle ? array() : array_keys($backoffs); $wait = 'nowait'; // less important now if ($type === false) { $job = $group->pop(JobQueueGroup::TYPE_DEFAULT, $flags, $blacklist); } elseif (in_array($type, $blacklist)) { $job = false; // requested queue in backoff state } else { $job = $group->pop($type); // job from a single queue } if ($job) { // found a job $popTime = time(); $jType = $job->getType(); // Back off of certain jobs for a while (for throttling and for errors) $ttw = $this->getBackoffTimeToWait($job); if ($ttw > 0) { // Always add the delta for other runners in case the time running the // job negated the backoff for each individually but not collectively. $backoffDeltas[$jType] = isset($backoffDeltas[$jType]) ? $backoffDeltas[$jType] + $ttw : $ttw; $backoffs = $this->syncBackoffDeltas($backoffs, $backoffDeltas, $wait); } $msg = $job->toString() . " STARTING"; $this->logger->debug($msg); $this->debugCallback($msg); // Run the job... $jobStartTime = microtime(true); try { ++$jobsPopped; $status = $job->run(); $error = $job->getLastError(); $this->commitMasterChanges($job); DeferredUpdates::doUpdates(); $this->commitMasterChanges($job); } catch (Exception $e) { MWExceptionHandler::rollbackMasterChangesAndLog($e); $status = false; $error = get_class($e) . ': ' . $e->getMessage(); MWExceptionHandler::logException($e); } // Commit all outstanding connections that are in a transaction // to get a fresh repeatable read snapshot on every connection. // Note that jobs are still responsible for handling slave lag. wfGetLBFactory()->commitAll(); // Clear out title cache data from prior snapshots LinkCache::singleton()->clear(); $timeMs = intval((microtime(true) - $jobStartTime) * 1000); $timeMsTotal += $timeMs; // Record how long jobs wait before getting popped $readyTs = $job->getReadyTimestamp(); if ($readyTs) { $pickupDelay = $popTime - $readyTs; $stats->timing('jobqueue.pickup_delay.all', 1000 * $pickupDelay); $stats->timing("jobqueue.pickup_delay.{$jType}", 1000 * $pickupDelay); } // Record root job age for jobs being run $root = $job->getRootJobParams(); if ($root['rootJobTimestamp']) { $age = $popTime - wfTimestamp(TS_UNIX, $root['rootJobTimestamp']); $stats->timing("jobqueue.pickup_root_age.{$jType}", 1000 * $age); } // Track the execution time for jobs $stats->timing("jobqueue.run.{$jType}", $timeMs); // Mark the job as done on success or when the job cannot be retried if ($status !== false || !$job->allowRetries()) { $group->ack($job); // done } // Back off of certain jobs for a while (for throttling and for errors) if ($status === false && mt_rand(0, 49) == 0) { $ttw = max($ttw, 30); // too many errors $backoffDeltas[$jType] = isset($backoffDeltas[$jType]) ? $backoffDeltas[$jType] + $ttw : $ttw; } if ($status === false) { $msg = $job->toString() . " t={$timeMs} error={$error}"; $this->logger->error($msg); $this->debugCallback($msg); } else { $msg = $job->toString() . " t={$timeMs} good"; $this->logger->info($msg); $this->debugCallback($msg); } $response['jobs'][] = array('type' => $jType, 'status' => $status === false ? 'failed' : 'ok', 'error' => $error, 'time' => $timeMs); // Break out if we hit the job count or wall time limits... if ($maxJobs && $jobsPopped >= $maxJobs) { $response['reached'] = 'job-limit'; break; } elseif ($maxTime && microtime(true) - $startTime > $maxTime) { $response['reached'] = 'time-limit'; break; } // Don't let any of the main DB slaves get backed up. // This only waits for so long before exiting and letting // other wikis in the farm (on different masters) get a chance. $timePassed = microtime(true) - $lastCheckTime; if ($timePassed >= $checkLagPeriod || $timePassed < 0) { if (!wfWaitForSlaves($lastCheckTime, false, '*', $maxAllowedLag)) { $response['reached'] = 'slave-lag-limit'; break; } $lastCheckTime = microtime(true); } // Don't let any queue slaves/backups fall behind if ($jobsPopped > 0 && $jobsPopped % 100 == 0) { $group->waitForBackups(); } // Bail if near-OOM instead of in a job if (!$this->checkMemoryOK()) { $response['reached'] = 'memory-limit'; break; } } } while ($job); // stop when there are no jobs // Sync the persistent backoffs for the next runJobs.php pass if ($backoffDeltas) { $this->syncBackoffDeltas($backoffs, $backoffDeltas, 'wait'); } $response['backoffs'] = $backoffs; $response['elapsed'] = $timeMsTotal; return $response; }
/** * @todo document */ function wfLogProfilingData() { global $wgDebugLogGroups, $wgDebugRawPage; $context = RequestContext::getMain(); $request = $context->getRequest(); $profiler = Profiler::instance(); $profiler->setContext($context); $profiler->logData(); $config = $context->getConfig(); if ($config->get('StatsdServer')) { try { $statsdServer = explode(':', $config->get('StatsdServer')); $statsdHost = $statsdServer[0]; $statsdPort = isset($statsdServer[1]) ? $statsdServer[1] : 8125; $statsdSender = new SocketSender($statsdHost, $statsdPort); $statsdClient = new SamplingStatsdClient($statsdSender, true, false); $statsdClient->send($context->getStats()->getBuffer()); } catch (Exception $ex) { MWExceptionHandler::logException($ex); } } # Profiling must actually be enabled... if ($profiler instanceof ProfilerStub) { return; } if (isset($wgDebugLogGroups['profileoutput']) && $wgDebugLogGroups['profileoutput'] === false) { // Explicitly disabled return; } if (!$wgDebugRawPage && wfIsDebugRawPage()) { return; } $ctx = array('elapsed' => $request->getElapsedTime()); if (!empty($_SERVER['HTTP_X_FORWARDED_FOR'])) { $ctx['forwarded_for'] = $_SERVER['HTTP_X_FORWARDED_FOR']; } if (!empty($_SERVER['HTTP_CLIENT_IP'])) { $ctx['client_ip'] = $_SERVER['HTTP_CLIENT_IP']; } if (!empty($_SERVER['HTTP_FROM'])) { $ctx['from'] = $_SERVER['HTTP_FROM']; } if (isset($ctx['forwarded_for']) || isset($ctx['client_ip']) || isset($ctx['from'])) { $ctx['proxy'] = $_SERVER['REMOTE_ADDR']; } // Don't load $wgUser at this late stage just for statistics purposes // @todo FIXME: We can detect some anons even if it is not loaded. // See User::getId() $user = $context->getUser(); $ctx['anon'] = $user->isItemLoaded('id') && $user->isAnon(); // Command line script uses a FauxRequest object which does not have // any knowledge about an URL and throw an exception instead. try { $ctx['url'] = urldecode($request->getRequestURL()); } catch (Exception $ignored) { // no-op } $ctx['output'] = $profiler->getOutput(); $log = LoggerFactory::getInstance('profileoutput'); $log->info("Elapsed: {elapsed}; URL: <{url}>\n{output}", $ctx); }