Example #1
0
 /**
  * @param array $params
  * @param Config $mainConfig
  * @return array
  */
 public static function applyDefaultParameters(array $params, Config $mainConfig)
 {
     $logger = LoggerFactory::getInstance('Mime');
     $params += ['typeFile' => $mainConfig->get('MimeTypeFile'), 'infoFile' => $mainConfig->get('MimeInfoFile'), 'xmlTypes' => $mainConfig->get('XMLMimeTypes'), 'guessCallback' => function ($mimeAnalyzer, &$head, &$tail, $file, &$mime) use($logger) {
         // Also test DjVu
         $deja = new DjVuImage($file);
         if ($deja->isValid()) {
             $logger->info(__METHOD__ . ": detected {$file} as image/vnd.djvu\n");
             $mime = 'image/vnd.djvu';
             return;
         }
         // Some strings by reference for performance - assuming well-behaved hooks
         Hooks::run('MimeMagicGuessFromContent', [$mimeAnalyzer, &$head, &$tail, $file, &$mime]);
     }, 'extCallback' => function ($mimeAnalyzer, $ext, &$mime) {
         // Media handling extensions can improve the MIME detected
         Hooks::run('MimeMagicImproveFromExtension', [$mimeAnalyzer, $ext, &$mime]);
     }, 'initCallback' => function ($mimeAnalyzer) {
         // Allow media handling extensions adding MIME-types and MIME-info
         Hooks::run('MimeMagicInit', [$mimeAnalyzer]);
     }, 'logger' => $logger];
     if ($params['infoFile'] === 'includes/mime.info') {
         $params['infoFile'] = __DIR__ . "/libs/mime/mime.info";
     }
     if ($params['typeFile'] === 'includes/mime.types') {
         $params['typeFile'] = __DIR__ . "/libs/mime/mime.types";
     }
     $detectorCmd = $mainConfig->get('MimeDetectorCommand');
     if ($detectorCmd) {
         $params['detectCallback'] = function ($file) use($detectorCmd) {
             return wfShellExec("{$detectorCmd} " . wfEscapeShellArg($file));
         };
     }
     return $params;
 }
Example #2
0
 public function execute()
 {
     global $wgCommandLineMode;
     if ($this->hasOption('procs')) {
         $procs = intval($this->getOption('procs'));
         if ($procs < 1 || $procs > 1000) {
             $this->error("Invalid argument to --procs", true);
         } elseif ($procs != 1) {
             $fc = new ForkController($procs);
             if ($fc->start() != 'child') {
                 exit(0);
             }
         }
     }
     $outputJSON = $this->getOption('result') === 'json';
     // Enable DBO_TRX for atomicity; JobRunner manages transactions
     // and works well in web server mode already (@TODO: this is a hack)
     $wgCommandLineMode = false;
     $runner = new JobRunner(LoggerFactory::getInstance('runJobs'));
     if (!$outputJSON) {
         $runner->setDebugHandler(array($this, 'debugInternal'));
     }
     $response = $runner->run(array('type' => $this->getOption('type', false), 'maxJobs' => $this->getOption('maxjobs', false), 'maxTime' => $this->getOption('maxtime', false), 'throttle' => $this->hasOption('nothrottle') ? false : true));
     if ($outputJSON) {
         $this->output(FormatJson::encode($response, true));
     }
     $wgCommandLineMode = true;
 }
 /**
  * @param LoggerInterface $logger
  */
 public function __construct(LoggerInterface $logger = null)
 {
     if ($logger === null) {
         $logger = LoggerFactory::getInstance('runJobs');
     }
     $this->setLogger($logger);
 }
 protected static function getMonthViews(Title $title)
 {
     global $wgMemc;
     $key = wfMemcKey('pvi', 'month', md5($title->getPrefixedText()));
     $data = $wgMemc->get($key);
     if ($data) {
         return $data;
     }
     $today = date('Ymd');
     $lastMonth = date('Ymd', time() - 60 * 60 * 24 * 30);
     $url = self::buildApiUrl($title, $lastMonth, $today);
     $req = MWHttpRequest::factory($url, ['timeout' => 10], __METHOD__);
     $status = $req->execute();
     if (!$status->isOK()) {
         LoggerFactory::getInstance('PageViewInfo')->error("Failed fetching {$url}: {$status->getWikiText()}", ['url' => $url, 'title' => $title->getPrefixedText()]);
         return false;
     }
     $data = FormatJson::decode($req->getContent(), true);
     // Add our start/end periods
     $data['start'] = $lastMonth;
     $data['end'] = $today;
     // Cache for an hour
     $wgMemc->set($key, $data, 60 * 60);
     return $data;
 }
Example #5
0
 public function execute()
 {
     if (wfReadOnly()) {
         $this->error("Unable to run jobs; the wiki is in read-only mode.", 1);
         // die
     }
     if ($this->hasOption('procs')) {
         $procs = intval($this->getOption('procs'));
         if ($procs < 1 || $procs > 1000) {
             $this->error("Invalid argument to --procs", true);
         } elseif ($procs != 1) {
             $fc = new ForkController($procs);
             if ($fc->start() != 'child') {
                 exit(0);
             }
         }
     }
     $json = $this->getOption('result') === 'json';
     $runner = new JobRunner(LoggerFactory::getInstance('runJobs'));
     if (!$json) {
         $runner->setDebugHandler(array($this, 'debugInternal'));
     }
     $response = $runner->run(array('type' => $this->getOption('type', false), 'maxJobs' => $this->getOption('maxjobs', false), 'maxTime' => $this->getOption('maxtime', false), 'throttle' => $this->hasOption('nothrottle') ? false : true));
     if ($json) {
         $this->output(FormatJson::encode($response, true));
     }
 }
 /**
  * @param array $params Possible keys:
  *   - redisConfig  : An array of parameters to RedisConnectionPool::__construct().
  *   - redisServers : Array of server entries, the first being the primary and the
  *                    others being fallback servers. Each entry is either a hostname/port
  *                    combination or the absolute path of a UNIX socket.
  *                    If a hostname is specified but no port, the standard port number
  *                    6379 will be used. Required.
  */
 public function __construct(array $params)
 {
     parent::__construct($params);
     $this->servers = isset($params['redisServers']) ? $params['redisServers'] : [$params['redisServer']];
     // b/c
     $params['redisConfig']['serializer'] = 'none';
     $this->redisPool = RedisConnectionPool::singleton($params['redisConfig']);
     $this->logger = \MediaWiki\Logger\LoggerFactory::getInstance('redis');
 }
Example #7
0
 /**
  * Construct a factory based on a configuration array (typically from $wgLBFactoryConf)
  * @param array $conf
  */
 public function __construct(array $conf)
 {
     if (isset($conf['readOnlyReason']) && is_string($conf['readOnlyReason'])) {
         $this->readOnlyReason = $conf['readOnlyReason'];
     }
     $this->chronProt = $this->newChronologyProtector();
     $this->trxProfiler = Profiler::instance()->getTransactionProfiler();
     $this->logger = LoggerFactory::getInstance('DBTransaction');
 }
 public function execute($par = '')
 {
     $this->getOutput()->disable();
     if (wfReadOnly()) {
         // HTTP 423 Locked
         HttpStatus::header(423);
         print 'Wiki is in read-only mode';
         return;
     } elseif (!$this->getRequest()->wasPosted()) {
         HttpStatus::header(400);
         print 'Request must be POSTed';
         return;
     }
     $optional = array('maxjobs' => 0, 'maxtime' => 30, 'type' => false, 'async' => true);
     $required = array_flip(array('title', 'tasks', 'signature', 'sigexpiry'));
     $params = array_intersect_key($this->getRequest()->getValues(), $required + $optional);
     $missing = array_diff_key($required, $params);
     if (count($missing)) {
         HttpStatus::header(400);
         print 'Missing parameters: ' . implode(', ', array_keys($missing));
         return;
     }
     $squery = $params;
     unset($squery['signature']);
     $correctSignature = self::getQuerySignature($squery, $this->getConfig()->get('SecretKey'));
     $providedSignature = $params['signature'];
     $verified = is_string($providedSignature) && hash_equals($correctSignature, $providedSignature);
     if (!$verified || $params['sigexpiry'] < time()) {
         HttpStatus::header(400);
         print 'Invalid or stale signature provided';
         return;
     }
     // Apply any default parameter values
     $params += $optional;
     if ($params['async']) {
         // Client will usually disconnect before checking the response,
         // but it needs to know when it is safe to disconnect. Until this
         // reaches ignore_user_abort(), it is not safe as the jobs won't run.
         ignore_user_abort(true);
         // jobs may take a bit of time
         // HTTP 202 Accepted
         HttpStatus::header(202);
         ob_flush();
         flush();
         // Once the client receives this response, it can disconnect
     }
     // Do all of the specified tasks...
     if (in_array('jobs', explode('|', $params['tasks']))) {
         $runner = new JobRunner(LoggerFactory::getInstance('runJobs'));
         $response = $runner->run(array('type' => $params['type'], 'maxJobs' => $params['maxjobs'] ? $params['maxjobs'] : 1, 'maxTime' => $params['maxtime'] ? $params['maxjobs'] : 30));
         if (!$params['async']) {
             print FormatJson::encode($response, true);
         }
     }
 }
Example #9
0
 /**
  * @param array $lbConf Config for LBFactory::__construct()
  * @param Config $mainConfig Main config object from MediaWikiServices
  * @return array
  */
 public static function applyDefaultConfig(array $lbConf, Config $mainConfig)
 {
     global $wgCommandLineMode;
     $lbConf += ['localDomain' => new DatabaseDomain($mainConfig->get('DBname'), null, $mainConfig->get('DBprefix')), 'profiler' => Profiler::instance(), 'trxProfiler' => Profiler::instance()->getTransactionProfiler(), 'replLogger' => LoggerFactory::getInstance('DBReplication'), 'queryLogger' => LoggerFactory::getInstance('DBQuery'), 'connLogger' => LoggerFactory::getInstance('DBConnection'), 'perfLogger' => LoggerFactory::getInstance('DBPerformance'), 'errorLogger' => [MWExceptionHandler::class, 'logException'], 'cliMode' => $wgCommandLineMode, 'hostname' => wfHostname(), 'readOnlyReason' => wfConfiguredReadOnlyReason()];
     if ($lbConf['class'] === 'LBFactorySimple') {
         if (isset($lbConf['servers'])) {
             // Server array is already explicitly configured; leave alone
         } elseif (is_array($mainConfig->get('DBservers'))) {
             foreach ($mainConfig->get('DBservers') as $i => $server) {
                 if ($server['type'] === 'sqlite') {
                     $server += ['dbDirectory' => $mainConfig->get('SQLiteDataDir')];
                 } elseif ($server['type'] === 'postgres') {
                     $server += ['port' => $mainConfig->get('DBport')];
                 }
                 $lbConf['servers'][$i] = $server + ['schema' => $mainConfig->get('DBmwschema'), 'tablePrefix' => $mainConfig->get('DBprefix'), 'flags' => DBO_DEFAULT, 'sqlMode' => $mainConfig->get('SQLMode'), 'utf8Mode' => $mainConfig->get('DBmysql5')];
             }
         } else {
             $flags = DBO_DEFAULT;
             $flags |= $mainConfig->get('DebugDumpSql') ? DBO_DEBUG : 0;
             $flags |= $mainConfig->get('DBssl') ? DBO_SSL : 0;
             $flags |= $mainConfig->get('DBcompress') ? DBO_COMPRESS : 0;
             $server = ['host' => $mainConfig->get('DBserver'), 'user' => $mainConfig->get('DBuser'), 'password' => $mainConfig->get('DBpassword'), 'dbname' => $mainConfig->get('DBname'), 'schema' => $mainConfig->get('DBmwschema'), 'tablePrefix' => $mainConfig->get('DBprefix'), 'type' => $mainConfig->get('DBtype'), 'load' => 1, 'flags' => $flags, 'sqlMode' => $mainConfig->get('SQLMode'), 'utf8Mode' => $mainConfig->get('DBmysql5')];
             if ($server['type'] === 'sqlite') {
                 $server['dbDirectory'] = $mainConfig->get('SQLiteDataDir');
             } elseif ($server['type'] === 'postgres') {
                 $server['port'] = $mainConfig->get('DBport');
             }
             $lbConf['servers'] = [$server];
         }
         if (!isset($lbConf['externalClusters'])) {
             $lbConf['externalClusters'] = $mainConfig->get('ExternalServers');
         }
     } elseif ($lbConf['class'] === 'LBFactoryMulti') {
         if (isset($lbConf['serverTemplate'])) {
             $lbConf['serverTemplate']['schema'] = $mainConfig->get('DBmwschema');
             $lbConf['serverTemplate']['sqlMode'] = $mainConfig->get('SQLMode');
             $lbConf['serverTemplate']['utf8Mode'] = $mainConfig->get('DBmysql5');
         }
     }
     // Use APC/memcached style caching, but avoids loops with CACHE_DB (T141804)
     $sCache = MediaWikiServices::getInstance()->getLocalServerObjectCache();
     if ($sCache->getQoS($sCache::ATTR_EMULATION) > $sCache::QOS_EMULATION_SQL) {
         $lbConf['srvCache'] = $sCache;
     }
     $cCache = ObjectCache::getLocalClusterInstance();
     if ($cCache->getQoS($cCache::ATTR_EMULATION) > $cCache::QOS_EMULATION_SQL) {
         $lbConf['memCache'] = $cCache;
     }
     $wCache = MediaWikiServices::getInstance()->getMainWANObjectCache();
     if ($wCache->getQoS($wCache::ATTR_EMULATION) > $wCache::QOS_EMULATION_SQL) {
         $lbConf['wanCache'] = $wCache;
     }
     return $lbConf;
 }
Example #10
0
 /**
  * Logs a content-security-policy violation report from web browser.
  */
 public function execute()
 {
     $reportOnly = $this->getParameter('reportonly');
     $logname = $reportOnly ? 'csp-report-only' : 'csp';
     $this->log = LoggerFactory::getInstance($logname);
     $userAgent = $this->getRequest()->getHeader('user-agent');
     $this->verifyPostBodyOk();
     $report = $this->getReport();
     $flags = $this->getFlags($report);
     $warningText = $this->generateLogLine($flags, $report);
     $this->logReport($flags, $warningText, ['csp-report' => $report, 'method' => __METHOD__, 'user' => $this->getUser()->getName(), 'user-agent' => $userAgent, 'source' => $this->getParameter('source')]);
     $this->getResult()->addValue(null, $this->getModuleName(), 'success');
 }
Example #11
0
 private function doLog()
 {
     $logger = LoggerFactory::getInstance('HttpError');
     $content = $this->content;
     if ($content instanceof Message) {
         $content = $content->text();
     }
     $context = ['file' => $this->getFile(), 'line' => $this->getLine(), 'http_code' => $this->httpCode];
     $logMsg = "{$content} ({http_code}) from {file}:{line}";
     if ($this->getStatusCode() < 500) {
         $logger->info($logMsg, $context);
     } else {
         $logger->error($logMsg, $context);
     }
 }
 protected function doJob()
 {
     // Reload pages from pageIds to throw into the updater
     $pageData = array();
     foreach ($this->params['pageDBKeys'] as $pageDBKey) {
         $title = Title::newFromDBKey($pageDBKey);
         // Skip any titles with broken keys.  We can't do anything with them.
         if (!$title) {
             LoggerFactory::getInstance('CirrusSearch')->warning("Skipping invalid DBKey: {pageDBKey}", array('pageDBKey' => $pageDBKey));
             continue;
         }
         $pageData[] = WikiPage::factory($title);
     }
     // Now invoke the updater!
     $updater = $this->createUpdater();
     $count = $updater->updatePages($pageData, null, null, $this->params['updateFlags']);
     return $count >= 0;
 }
Example #13
0
 /**
  * @param array $conditions An array of arrays describing throttling conditions.
  *     Defaults to $wgPasswordAttemptThrottle. See documentation of that variable for format.
  * @param array $params Parameters (all optional):
  *   - type: throttle type, used as a namespace for counters,
  *   - cache: a BagOStuff object where throttle counters are stored.
  *   - warningLimit: the log level will be raised to warning when rejecting an attempt after
  *     no less than this many failures.
  */
 public function __construct(array $conditions = null, array $params = [])
 {
     $invalidParams = array_diff_key($params, array_fill_keys(['type', 'cache', 'warningLimit'], true));
     if ($invalidParams) {
         throw new \InvalidArgumentException('unrecognized parameters: ' . implode(', ', array_keys($invalidParams)));
     }
     if ($conditions === null) {
         $config = \ConfigFactory::getDefaultInstance()->makeConfig('main');
         $conditions = $config->get('PasswordAttemptThrottle');
         $params += ['type' => 'password', 'cache' => \ObjectCache::getLocalClusterInstance(), 'warningLimit' => 50];
     } else {
         $params += ['type' => 'custom', 'cache' => \ObjectCache::getLocalClusterInstance(), 'warningLimit' => INF];
     }
     $this->type = $params['type'];
     $this->conditions = static::normalizeThrottleConditions($conditions);
     $this->cache = $params['cache'];
     $this->warningLimit = $params['warningLimit'];
     $this->setLogger(LoggerFactory::getInstance('throttler'));
 }
 /**
  * Pick the best near match if possible.
  *
  * @return Title|null title if there is a near match and null otherwise
  */
 public function pickBest()
 {
     if (!$this->titles) {
         return null;
     }
     if (!$this->term) {
         return null;
     }
     if (count($this->titles) === 1) {
         if (isset($this->titles[0]['titleMatch'])) {
             return $this->titles[0]['titleMatch'];
         }
         if (isset($this->titles[0]['redirectMatches'][0])) {
             return $this->titles[0]['redirectMatches'][0];
         }
         LoggerFactory::getInstance('CirrusSearch')->info('NearMatchPicker built with busted matches.  Assuming no near match');
         return null;
     }
     $transformers = array(function ($term) {
         return $term;
     }, array($this->language, 'lc'), array($this->language, 'ucwords'));
     foreach ($transformers as $transformer) {
         $transformedTerm = call_user_func($transformer, $this->term);
         $found = null;
         foreach ($this->titles as $title) {
             $match = $this->checkAllMatches($transformer, $transformedTerm, $title);
             if ($match) {
                 if (!$found) {
                     $found = $match;
                 } else {
                     // Found more than one result so we try another transformer
                     $found = null;
                     break;
                 }
             }
         }
         if ($found) {
             return $found;
         }
     }
     // Didn't find anything
     return null;
 }
Example #15
0
 public function execute()
 {
     global $wgCommandLineMode;
     if ($this->hasOption('procs')) {
         $procs = intval($this->getOption('procs'));
         if ($procs < 1 || $procs > 1000) {
             $this->error("Invalid argument to --procs", true);
         } elseif ($procs != 1) {
             $fc = new ForkController($procs);
             if ($fc->start() != 'child') {
                 exit(0);
             }
         }
     }
     $outputJSON = $this->getOption('result') === 'json';
     $wait = $this->hasOption('wait');
     // Enable DBO_TRX for atomicity; JobRunner manages transactions
     // and works well in web server mode already (@TODO: this is a hack)
     $wgCommandLineMode = false;
     $runner = new JobRunner(LoggerFactory::getInstance('runJobs'));
     if (!$outputJSON) {
         $runner->setDebugHandler([$this, 'debugInternal']);
     }
     $type = $this->getOption('type', false);
     $maxJobs = $this->getOption('maxjobs', false);
     $maxTime = $this->getOption('maxtime', false);
     $throttle = !$this->hasOption('nothrottle');
     while (true) {
         $response = $runner->run(['type' => $type, 'maxJobs' => $maxJobs, 'maxTime' => $maxTime, 'throttle' => $throttle]);
         if ($outputJSON) {
             $this->output(FormatJson::encode($response, true));
         }
         if (!$wait || $response['reached'] === 'time-limit' || $response['reached'] === 'job-limit' || $response['reached'] === 'memory-limit') {
             break;
         }
         if ($maxJobs !== false) {
             $maxJobs -= count($response['jobs']);
         }
         sleep(1);
     }
     $wgCommandLineMode = true;
 }
 /**
  * Get text to index from a ParserOutput assuming the content was wikitext.
  *
  * @param ParserOutput $parserOutput The parsed wikitext's parser output
  * @return array who's first entry is text and second is opening text, and third is an
  *  array of auxiliary text
  */
 private function formatWikitext(ParserOutput $parserOutput)
 {
     global $wgCirrusSearchBoostOpening;
     $parserOutput->setEditSectionTokens(false);
     $parserOutput->setTOCEnabled(false);
     $text = $parserOutput->getText();
     $opening = null;
     switch ($wgCirrusSearchBoostOpening) {
         case 'first_heading':
             $opening = $this->extractHeadingBeforeFirstHeading($text);
         case 'none':
             break;
         default:
             LoggerFactory::getInstance('CirrusSearch')->warning("Invalid value for \$wgCirrusSearchBoostOpening: {wgCirrusSearchBoostOpening}", array('wgCirrusSearchBoostOpening' => $wgCirrusSearchBoostOpening));
     }
     // Add extra spacing around break tags so text crammed together like<br>this doesn't make one word.
     $text = str_replace('<br', "\n<br", $text);
     $formatter = new HtmlFormatter($text);
     // Strip elements from the page that we never want in the search text.
     $formatter->remove($this->excludedElementSelectors);
     $filterResult = $formatter->filterContent();
     if ($filterResult === null) {
         // We're running against Mediawiki < 1.24wm10 which won't support auxiliary text
         // because it can't extract it using the HtmlFormatter.  We'll just set text to
         // all the text.
         $allText = trim(Sanitizer::stripAllTags($formatter->getText()));
         $auxiliary = array();
     } else {
         // Strip elements from the page that are auxiliary text.  These will still be
         // searched but matches will be ranked lower and non-auxiliary matches will be
         // prefered in highlighting.
         $formatter->remove($this->auxiliaryElementSelectors);
         $auxiliaryElements = $formatter->filterContent();
         $allText = trim(Sanitizer::stripAllTags($formatter->getText()));
         $auxiliary = array();
         foreach ($auxiliaryElements as $auxiliaryElement) {
             $auxiliary[] = trim(Sanitizer::stripAllTags($formatter->getText($auxiliaryElement)));
         }
     }
     return array($allText, $opening, $auxiliary);
 }
Example #17
0
 /**
  * Attempt to load a precomputed document tree for some given wikitext
  * from the cache.
  *
  * @param string $text
  * @param int $flags
  * @return PPNode_Hash_Tree|bool
  */
 protected function cacheGetTree($text, $flags)
 {
     $config = RequestContext::getMain()->getConfig();
     $length = strlen($text);
     $threshold = $config->get('PreprocessorCacheThreshold');
     if ($threshold === false || $length < $threshold || $length > 1000000.0) {
         return false;
     }
     $cache = ObjectCache::getInstance($config->get('MainCacheType'));
     $key = wfMemcKey(defined('static::CACHE_PREFIX') ? static::CACHE_PREFIX : get_called_class(), md5($text), $flags);
     $value = $cache->get($key);
     if (!$value) {
         return false;
     }
     $version = intval(substr($value, 0, 8));
     if ($version !== static::CACHE_VERSION) {
         return false;
     }
     LoggerFactory::getInstance('Preprocessor')->info("Loaded preprocessor output from cache (key: {$key})");
     return substr($value, 8);
 }
Example #18
0
 /**
  * Perform an HTTP request
  *
  * @param string $method HTTP method. Usually GET/POST
  * @param string $url Full URL to act on. If protocol-relative, will be expanded to an http:// URL
  * @param array $options Options to pass to MWHttpRequest object.
  *	Possible keys for the array:
  *    - timeout             Timeout length in seconds
  *    - connectTimeout      Timeout for connection, in seconds (curl only)
  *    - postData            An array of key-value pairs or a url-encoded form data
  *    - proxy               The proxy to use.
  *                          Otherwise it will use $wgHTTPProxy (if set)
  *                          Otherwise it will use the environment variable "http_proxy" (if set)
  *    - noProxy             Don't use any proxy at all. Takes precedence over proxy value(s).
  *    - sslVerifyHost       Verify hostname against certificate
  *    - sslVerifyCert       Verify SSL certificate
  *    - caInfo              Provide CA information
  *    - maxRedirects        Maximum number of redirects to follow (defaults to 5)
  *    - followRedirects     Whether to follow redirects (defaults to false).
  *		                    Note: this should only be used when the target URL is trusted,
  *		                    to avoid attacks on intranet services accessible by HTTP.
  *    - userAgent           A user agent, if you want to override the default
  *                          MediaWiki/$wgVersion
  * @param string $caller The method making this request, for profiling
  * @return string|bool (bool)false on failure or a string on success
  */
 public static function request($method, $url, $options = array(), $caller = __METHOD__)
 {
     wfDebug("HTTP: {$method}: {$url}\n");
     $options['method'] = strtoupper($method);
     if (!isset($options['timeout'])) {
         $options['timeout'] = 'default';
     }
     if (!isset($options['connectTimeout'])) {
         $options['connectTimeout'] = 'default';
     }
     $req = MWHttpRequest::factory($url, $options, $caller);
     $status = $req->execute();
     if ($status->isOK()) {
         return $req->getContent();
     } else {
         $errors = $status->getErrorsByType('error');
         $logger = LoggerFactory::getInstance('http');
         $logger->warning($status->getWikiText(), array('caller' => $caller));
         return false;
     }
 }
 public static function onBeforePageDisplay(OutputPage &$out, Skin &$skin)
 {
     // Enable only if the user has turned it on in Beta Preferences, or BetaFeatures is not installed.
     // Will only be loaded if PageImages & TextExtracts extensions are installed.
     $registry = ExtensionRegistry::getInstance();
     if (!$registry->isLoaded('TextExtracts') || !class_exists('ApiQueryPageImages')) {
         $logger = LoggerFactory::getInstance('popups');
         $logger->error('Popups requires the PageImages and TextExtracts extensions.');
         return true;
     }
     if (self::getConfig()->get('PopupsBetaFeature') === true) {
         if (!class_exists('BetaFeatures')) {
             $logger = LoggerFactory::getInstance('popups');
             $logger->error('PopupsMode cannot be used as a beta feature unless ' . 'the BetaFeatures extension is present.');
             return true;
         }
         if (!BetaFeatures::isFeatureEnabled($skin->getUser(), 'popups')) {
             return true;
         }
     }
     $out->addModules(array('ext.popups', 'schema.Popups'));
     return true;
 }
Example #20
0
 public function execute()
 {
     if ($this->hasOption('procs')) {
         $procs = intval($this->getOption('procs'));
         if ($procs < 1 || $procs > 1000) {
             $this->error("Invalid argument to --procs", true);
         } elseif ($procs != 1) {
             $fc = new ForkController($procs);
             if ($fc->start() != 'child') {
                 exit(0);
             }
         }
     }
     $outputJSON = $this->getOption('result') === 'json';
     $wait = $this->hasOption('wait');
     $runner = new JobRunner(LoggerFactory::getInstance('runJobs'));
     if (!$outputJSON) {
         $runner->setDebugHandler([$this, 'debugInternal']);
     }
     $type = $this->getOption('type', false);
     $maxJobs = $this->getOption('maxjobs', false);
     $maxTime = $this->getOption('maxtime', false);
     $throttle = !$this->hasOption('nothrottle');
     while (true) {
         $response = $runner->run(['type' => $type, 'maxJobs' => $maxJobs, 'maxTime' => $maxTime, 'throttle' => $throttle]);
         if ($outputJSON) {
             $this->output(FormatJson::encode($response, true));
         }
         if (!$wait || $response['reached'] === 'time-limit' || $response['reached'] === 'job-limit' || $response['reached'] === 'memory-limit') {
             break;
         }
         if ($maxJobs !== false) {
             $maxJobs -= count($response['jobs']);
         }
         sleep(1);
     }
 }
Example #21
0
 /**
  * @param array $options
  *  - config: Config to fetch configuration from. Defaults to the default 'main' config.
  *  - logger: LoggerInterface to use for logging. Defaults to the 'session' channel.
  *  - store: BagOStuff to store session data in.
  */
 public function __construct($options = array())
 {
     if (isset($options['config'])) {
         $this->config = $options['config'];
         if (!$this->config instanceof Config) {
             throw new \InvalidArgumentException('$options[\'config\'] must be an instance of Config');
         }
     } else {
         $this->config = \ConfigFactory::getDefaultInstance()->makeConfig('main');
     }
     if (isset($options['logger'])) {
         if (!$options['logger'] instanceof LoggerInterface) {
             throw new \InvalidArgumentException('$options[\'logger\'] must be an instance of LoggerInterface');
         }
         $this->setLogger($options['logger']);
     } else {
         $this->setLogger(\MediaWiki\Logger\LoggerFactory::getInstance('session'));
     }
     if (isset($options['store'])) {
         if (!$options['store'] instanceof BagOStuff) {
             throw new \InvalidArgumentException('$options[\'store\'] must be an instance of BagOStuff');
         }
         $this->store = $options['store'];
     } else {
         $this->store = \ObjectCache::getInstance($this->config->get('SessionCacheType'));
         $this->store->setLogger($this->logger);
     }
     register_shutdown_function(array($this, 'shutdown'));
 }
Example #22
0
 /**
  * Get a ResourceLoader object associated with this OutputPage
  *
  * @return ResourceLoader
  */
 public function getResourceLoader()
 {
     if (is_null($this->mResourceLoader)) {
         $this->mResourceLoader = new ResourceLoader($this->getConfig(), LoggerFactory::getInstance('resourceloader'));
     }
     return $this->mResourceLoader;
 }
Example #23
0
 /**
  * Run jobs of the specified number/type for the specified time
  *
  * The response map has a 'job' field that lists status of each job, including:
  *   - type   : the job type
  *   - status : ok/failed
  *   - error  : any error message string
  *   - time   : the job run time in ms
  * The response map also has:
  *   - backoffs : the (job type => seconds) map of backoff times
  *   - elapsed  : the total time spent running tasks in ms
  *   - reached  : the reason the script finished, one of (none-ready, job-limit, time-limit)
  *
  * This method outputs status information only if a debug handler was set.
  * Any exceptions are caught and logged, but are not reported as output.
  *
  * @param array $options Map of parameters:
  *    - type     : the job type (or false for the default types)
  *    - maxJobs  : maximum number of jobs to run
  *    - maxTime  : maximum time in seconds before stopping
  *    - throttle : whether to respect job backoff configuration
  * @return array Summary response that can easily be JSON serialized
  */
 public function run(array $options)
 {
     global $wgJobClasses, $wgTrxProfilerLimits;
     $response = array('jobs' => array(), 'reached' => 'none-ready');
     $type = isset($options['type']) ? $options['type'] : false;
     $maxJobs = isset($options['maxJobs']) ? $options['maxJobs'] : false;
     $maxTime = isset($options['maxTime']) ? $options['maxTime'] : false;
     $noThrottle = isset($options['throttle']) && !$options['throttle'];
     if ($type !== false && !isset($wgJobClasses[$type])) {
         $response['reached'] = 'none-possible';
         return $response;
     }
     // Bail out if in read-only mode
     if (wfReadOnly()) {
         $response['reached'] = 'read-only';
         return $response;
     }
     // Catch huge single updates that lead to slave lag
     $trxProfiler = Profiler::instance()->getTransactionProfiler();
     $trxProfiler->setLogger(LoggerFactory::getInstance('DBPerformance'));
     $trxProfiler->setExpectations($wgTrxProfilerLimits['JobRunner'], __METHOD__);
     // Bail out if there is too much DB lag.
     // This check should not block as we want to try other wiki queues.
     $maxAllowedLag = 3;
     list(, $maxLag) = wfGetLB(wfWikiID())->getMaxLag();
     if ($maxLag >= $maxAllowedLag) {
         $response['reached'] = 'slave-lag-limit';
         return $response;
     }
     $group = JobQueueGroup::singleton();
     // Flush any pending DB writes for sanity
     wfGetLBFactory()->commitAll();
     // Some jobs types should not run until a certain timestamp
     $backoffs = array();
     // map of (type => UNIX expiry)
     $backoffDeltas = array();
     // map of (type => seconds)
     $wait = 'wait';
     // block to read backoffs the first time
     $stats = RequestContext::getMain()->getStats();
     $jobsPopped = 0;
     $timeMsTotal = 0;
     $flags = JobQueueGroup::USE_CACHE;
     $startTime = microtime(true);
     // time since jobs started running
     $checkLagPeriod = 1.0;
     // check slave lag this many seconds
     $lastCheckTime = 1;
     // timestamp of last slave check
     do {
         // Sync the persistent backoffs with concurrent runners
         $backoffs = $this->syncBackoffDeltas($backoffs, $backoffDeltas, $wait);
         $blacklist = $noThrottle ? array() : array_keys($backoffs);
         $wait = 'nowait';
         // less important now
         if ($type === false) {
             $job = $group->pop(JobQueueGroup::TYPE_DEFAULT, $flags, $blacklist);
         } elseif (in_array($type, $blacklist)) {
             $job = false;
             // requested queue in backoff state
         } else {
             $job = $group->pop($type);
             // job from a single queue
         }
         if ($job) {
             // found a job
             $popTime = time();
             $jType = $job->getType();
             // Back off of certain jobs for a while (for throttling and for errors)
             $ttw = $this->getBackoffTimeToWait($job);
             if ($ttw > 0) {
                 // Always add the delta for other runners in case the time running the
                 // job negated the backoff for each individually but not collectively.
                 $backoffDeltas[$jType] = isset($backoffDeltas[$jType]) ? $backoffDeltas[$jType] + $ttw : $ttw;
                 $backoffs = $this->syncBackoffDeltas($backoffs, $backoffDeltas, $wait);
             }
             $msg = $job->toString() . " STARTING";
             $this->logger->debug($msg);
             $this->debugCallback($msg);
             // Run the job...
             $jobStartTime = microtime(true);
             try {
                 ++$jobsPopped;
                 $status = $job->run();
                 $error = $job->getLastError();
                 $this->commitMasterChanges($job);
                 DeferredUpdates::doUpdates();
                 $this->commitMasterChanges($job);
             } catch (Exception $e) {
                 MWExceptionHandler::rollbackMasterChangesAndLog($e);
                 $status = false;
                 $error = get_class($e) . ': ' . $e->getMessage();
                 MWExceptionHandler::logException($e);
             }
             // Commit all outstanding connections that are in a transaction
             // to get a fresh repeatable read snapshot on every connection.
             // Note that jobs are still responsible for handling slave lag.
             wfGetLBFactory()->commitAll();
             // Clear out title cache data from prior snapshots
             LinkCache::singleton()->clear();
             $timeMs = intval((microtime(true) - $jobStartTime) * 1000);
             $timeMsTotal += $timeMs;
             // Record how long jobs wait before getting popped
             $readyTs = $job->getReadyTimestamp();
             if ($readyTs) {
                 $pickupDelay = $popTime - $readyTs;
                 $stats->timing('jobqueue.pickup_delay.all', 1000 * $pickupDelay);
                 $stats->timing("jobqueue.pickup_delay.{$jType}", 1000 * $pickupDelay);
             }
             // Record root job age for jobs being run
             $root = $job->getRootJobParams();
             if ($root['rootJobTimestamp']) {
                 $age = $popTime - wfTimestamp(TS_UNIX, $root['rootJobTimestamp']);
                 $stats->timing("jobqueue.pickup_root_age.{$jType}", 1000 * $age);
             }
             // Track the execution time for jobs
             $stats->timing("jobqueue.run.{$jType}", $timeMs);
             // Mark the job as done on success or when the job cannot be retried
             if ($status !== false || !$job->allowRetries()) {
                 $group->ack($job);
                 // done
             }
             // Back off of certain jobs for a while (for throttling and for errors)
             if ($status === false && mt_rand(0, 49) == 0) {
                 $ttw = max($ttw, 30);
                 // too many errors
                 $backoffDeltas[$jType] = isset($backoffDeltas[$jType]) ? $backoffDeltas[$jType] + $ttw : $ttw;
             }
             if ($status === false) {
                 $msg = $job->toString() . " t={$timeMs} error={$error}";
                 $this->logger->error($msg);
                 $this->debugCallback($msg);
             } else {
                 $msg = $job->toString() . " t={$timeMs} good";
                 $this->logger->info($msg);
                 $this->debugCallback($msg);
             }
             $response['jobs'][] = array('type' => $jType, 'status' => $status === false ? 'failed' : 'ok', 'error' => $error, 'time' => $timeMs);
             // Break out if we hit the job count or wall time limits...
             if ($maxJobs && $jobsPopped >= $maxJobs) {
                 $response['reached'] = 'job-limit';
                 break;
             } elseif ($maxTime && microtime(true) - $startTime > $maxTime) {
                 $response['reached'] = 'time-limit';
                 break;
             }
             // Don't let any of the main DB slaves get backed up.
             // This only waits for so long before exiting and letting
             // other wikis in the farm (on different masters) get a chance.
             $timePassed = microtime(true) - $lastCheckTime;
             if ($timePassed >= $checkLagPeriod || $timePassed < 0) {
                 if (!wfWaitForSlaves($lastCheckTime, false, '*', $maxAllowedLag)) {
                     $response['reached'] = 'slave-lag-limit';
                     break;
                 }
                 $lastCheckTime = microtime(true);
             }
             // Don't let any queue slaves/backups fall behind
             if ($jobsPopped > 0 && $jobsPopped % 100 == 0) {
                 $group->waitForBackups();
             }
             // Bail if near-OOM instead of in a job
             if (!$this->checkMemoryOK()) {
                 $response['reached'] = 'memory-limit';
                 break;
             }
         }
     } while ($job);
     // stop when there are no jobs
     // Sync the persistent backoffs for the next runJobs.php pass
     if ($backoffDeltas) {
         $this->syncBackoffDeltas($backoffs, $backoffDeltas, 'wait');
     }
     $response['backoffs'] = $backoffs;
     $response['elapsed'] = $timeMsTotal;
     return $response;
 }
Example #24
0
 /**
  * Returns an associative array, ID => param value, for all items that match
  * Removes the matched items from the input string (passed by reference)
  *
  * @param string $text
  *
  * @return array
  */
 public function matchAndRemove(&$text)
 {
     $found = array();
     $regexes = $this->getRegex();
     foreach ($regexes as $regex) {
         if ($regex === '') {
             continue;
         }
         $matches = array();
         $res = preg_match_all($regex, $text, $matches, PREG_SET_ORDER);
         if ($res === false) {
             LoggerFactory::getInstance('parser')->warning('preg_match_all returned false', array('code' => preg_last_error(), 'regex' => $regex, 'text' => $text));
         } elseif ($res) {
             foreach ($matches as $m) {
                 list($name, $param) = $this->parseMatch($m);
                 $found[$name] = $param;
             }
         }
         $res = preg_replace($regex, '', $text);
         if ($res === null) {
             LoggerFactory::getInstance('parser')->warning('preg_replace returned null', array('code' => preg_last_error(), 'regex' => $regex, 'text' => $text));
         }
         $text = $res;
     }
     return $found;
 }
 /**
  * @param array $options
  * $options include:
  *   - connectTimeout : The timeout for new connections, in seconds.
  *                      Optional, default is 1 second.
  *   - readTimeout    : The timeout for operation reads, in seconds.
  *                      Commands like BLPOP can fail if told to wait longer than this.
  *                      Optional, default is 1 second.
  *   - persistent     : Set this to true to allow connections to persist across
  *                      multiple web requests. False by default.
  *   - password       : The authentication password, will be sent to Redis in clear text.
  *                      Optional, if it is unspecified, no AUTH command will be sent.
  *   - serializer     : Set to "php", "igbinary", or "none". Default is "php".
  * @return RedisConnectionPool
  */
 public static function singleton(array $options)
 {
     $options = self::applyDefaultConfig($options);
     // Map the options to a unique hash...
     ksort($options);
     // normalize to avoid pool fragmentation
     $id = sha1(serialize($options));
     // Initialize the object at the hash as needed...
     if (!isset(self::$instances[$id])) {
         self::$instances[$id] = new self($options);
         LoggerFactory::getInstance('redis')->debug("Creating a new " . __CLASS__ . " instance with id {$id}.");
     }
     return self::$instances[$id];
 }
 /**
  * @var Connection
  */
 public function __construct(Connection $conn)
 {
     parent::__construct($conn, null, null);
     $this->log = LoggerFactory::getInstance('CirrusSearch');
     $this->failedLog = LoggerFactory::getInstance('CirrusSearchChangeFailed');
 }
 /**
  * Log an exception that wasn't thrown but made to wrap an error.
  *
  * @since 1.25
  * @param ErrorException $e
  * @param string $channel
  */
 protected static function logError(ErrorException $e, $channel)
 {
     // The set_error_handler callback is independent from error_reporting.
     // Filter out unwanted errors manually (e.g. when
     // MediaWiki\suppressWarnings is active).
     $suppressed = (error_reporting() & $e->getSeverity()) === 0;
     if (!$suppressed) {
         $logger = LoggerFactory::getInstance($channel);
         $logger->error(self::getLogMessage($e), self::getLogContext($e));
     }
     // Include all errors in the json log (surpressed errors will be flagged)
     $json = self::jsonSerializeException($e, false, FormatJson::ALL_OK);
     if ($json !== false) {
         $logger = LoggerFactory::getInstance("{$channel}-json");
         $logger->error($json, ['private' => true]);
     }
     Hooks::run('LogException', [$e, $suppressed]);
 }
Example #28
0
 /**
  * Do standard deferred updates after page edit.
  * Update links tables, site stats, search index and message cache.
  * Purges pages that include this page if the text was changed here.
  * Every 100th edit, prune the recent changes table.
  *
  * @param Revision $revision
  * @param User $user User object that did the revision
  * @param array $options Array of options, following indexes are used:
  * - changed: boolean, whether the revision changed the content (default true)
  * - created: boolean, whether the revision created the page (default false)
  * - moved: boolean, whether the page was moved (default false)
  * - restored: boolean, whether the page was undeleted (default false)
  * - oldrevision: Revision object for the pre-update revision (default null)
  * - oldcountable: boolean, null, or string 'no-change' (default null):
  *   - boolean: whether the page was counted as an article before that
  *     revision, only used in changed is true and created is false
  *   - null: if created is false, don't update the article count; if created
  *     is true, do update the article count
  *   - 'no-change': don't update the article count, ever
  */
 public function doEditUpdates(Revision $revision, User $user, array $options = [])
 {
     global $wgRCWatchCategoryMembership, $wgContLang;
     $options += ['changed' => true, 'created' => false, 'moved' => false, 'restored' => false, 'oldrevision' => null, 'oldcountable' => null];
     $content = $revision->getContent();
     $logger = LoggerFactory::getInstance('SaveParse');
     // See if the parser output before $revision was inserted is still valid
     $editInfo = false;
     if (!$this->mPreparedEdit) {
         $logger->debug(__METHOD__ . ": No prepared edit...\n");
     } elseif ($this->mPreparedEdit->output->getFlag('vary-revision')) {
         $logger->info(__METHOD__ . ": Prepared edit has vary-revision...\n");
     } elseif ($this->mPreparedEdit->output->getFlag('vary-revision-id') && $this->mPreparedEdit->output->getSpeculativeRevIdUsed() !== $revision->getId()) {
         $logger->info(__METHOD__ . ": Prepared edit has vary-revision-id with wrong ID...\n");
     } elseif ($this->mPreparedEdit->output->getFlag('vary-user') && !$options['changed']) {
         $logger->info(__METHOD__ . ": Prepared edit has vary-user and is null...\n");
     } else {
         wfDebug(__METHOD__ . ": Using prepared edit...\n");
         $editInfo = $this->mPreparedEdit;
     }
     if (!$editInfo) {
         // Parse the text again if needed. Be careful not to do pre-save transform twice:
         // $text is usually already pre-save transformed once. Avoid using the edit stash
         // as any prepared content from there or in doEditContent() was already rejected.
         $editInfo = $this->prepareContentForEdit($content, $revision, $user, null, false);
     }
     // Save it to the parser cache.
     // Make sure the cache time matches page_touched to avoid double parsing.
     ParserCache::singleton()->save($editInfo->output, $this, $editInfo->popts, $revision->getTimestamp(), $editInfo->revid);
     // Update the links tables and other secondary data
     if ($content) {
         $recursive = $options['changed'];
         // bug 50785
         $updates = $content->getSecondaryDataUpdates($this->getTitle(), null, $recursive, $editInfo->output);
         foreach ($updates as $update) {
             if ($update instanceof LinksUpdate) {
                 $update->setRevision($revision);
                 $update->setTriggeringUser($user);
             }
             DeferredUpdates::addUpdate($update);
         }
         if ($wgRCWatchCategoryMembership && $this->getContentHandler()->supportsCategories() === true && ($options['changed'] || $options['created']) && !$options['restored']) {
             // Note: jobs are pushed after deferred updates, so the job should be able to see
             // the recent change entry (also done via deferred updates) and carry over any
             // bot/deletion/IP flags, ect.
             JobQueueGroup::singleton()->lazyPush(new CategoryMembershipChangeJob($this->getTitle(), ['pageId' => $this->getId(), 'revTimestamp' => $revision->getTimestamp()]));
         }
     }
     Hooks::run('ArticleEditUpdates', [&$this, &$editInfo, $options['changed']]);
     if (Hooks::run('ArticleEditUpdatesDeleteFromRecentchanges', [&$this])) {
         // Flush old entries from the `recentchanges` table
         if (mt_rand(0, 9) == 0) {
             JobQueueGroup::singleton()->lazyPush(RecentChangesUpdateJob::newPurgeJob());
         }
     }
     if (!$this->exists()) {
         return;
     }
     $id = $this->getId();
     $title = $this->mTitle->getPrefixedDBkey();
     $shortTitle = $this->mTitle->getDBkey();
     if ($options['oldcountable'] === 'no-change' || !$options['changed'] && !$options['moved']) {
         $good = 0;
     } elseif ($options['created']) {
         $good = (int) $this->isCountable($editInfo);
     } elseif ($options['oldcountable'] !== null) {
         $good = (int) $this->isCountable($editInfo) - (int) $options['oldcountable'];
     } else {
         $good = 0;
     }
     $edits = $options['changed'] ? 1 : 0;
     $total = $options['created'] ? 1 : 0;
     DeferredUpdates::addUpdate(new SiteStatsUpdate(0, $edits, $good, $total));
     DeferredUpdates::addUpdate(new SearchUpdate($id, $title, $content));
     // If this is another user's talk page, update newtalk.
     // Don't do this if $options['changed'] = false (null-edits) nor if
     // it's a minor edit and the user doesn't want notifications for those.
     if ($options['changed'] && $this->mTitle->getNamespace() == NS_USER_TALK && $shortTitle != $user->getTitleKey() && !($revision->isMinor() && $user->isAllowed('nominornewtalk'))) {
         $recipient = User::newFromName($shortTitle, false);
         if (!$recipient) {
             wfDebug(__METHOD__ . ": invalid username\n");
         } else {
             // Allow extensions to prevent user notification
             // when a new message is added to their talk page
             if (Hooks::run('ArticleEditUpdateNewTalk', [&$this, $recipient])) {
                 if (User::isIP($shortTitle)) {
                     // An anonymous user
                     $recipient->setNewtalk(true, $revision);
                 } elseif ($recipient->isLoggedIn()) {
                     $recipient->setNewtalk(true, $revision);
                 } else {
                     wfDebug(__METHOD__ . ": don't need to notify a nonexistent user\n");
                 }
             }
         }
     }
     if ($this->mTitle->getNamespace() == NS_MEDIAWIKI) {
         // XXX: could skip pseudo-messages like js/css here, based on content model.
         $msgtext = $content ? $content->getWikitextForTransclusion() : null;
         if ($msgtext === false || $msgtext === null) {
             $msgtext = '';
         }
         MessageCache::singleton()->replace($shortTitle, $msgtext);
         if ($wgContLang->hasVariants()) {
             $wgContLang->updateConversionTable($this->mTitle);
         }
     }
     if ($options['created']) {
         self::onArticleCreate($this->mTitle);
     } elseif ($options['changed']) {
         // bug 50785
         self::onArticleEdit($this->mTitle, $revision);
     }
     ResourceLoaderWikiModule::invalidateModuleCache($this->mTitle, $options['oldrevision'], $revision, wfWikiID());
 }
Example #29
0
/**
 * @todo document
 */
function wfLogProfilingData()
{
    global $wgDebugLogGroups, $wgDebugRawPage;
    $context = RequestContext::getMain();
    $request = $context->getRequest();
    $profiler = Profiler::instance();
    $profiler->setContext($context);
    $profiler->logData();
    $config = $context->getConfig();
    if ($config->get('StatsdServer')) {
        $statsdServer = explode(':', $config->get('StatsdServer'));
        $statsdHost = $statsdServer[0];
        $statsdPort = isset($statsdServer[1]) ? $statsdServer[1] : 8125;
        $statsdSender = new SocketSender($statsdHost, $statsdPort);
        $statsdClient = new StatsdClient($statsdSender);
        $statsdClient->send($context->getStats()->getBuffer());
    }
    # Profiling must actually be enabled...
    if ($profiler instanceof ProfilerStub) {
        return;
    }
    if (isset($wgDebugLogGroups['profileoutput']) && $wgDebugLogGroups['profileoutput'] === false) {
        // Explicitly disabled
        return;
    }
    if (!$wgDebugRawPage && wfIsDebugRawPage()) {
        return;
    }
    $ctx = array('elapsed' => $request->getElapsedTime());
    if (!empty($_SERVER['HTTP_X_FORWARDED_FOR'])) {
        $ctx['forwarded_for'] = $_SERVER['HTTP_X_FORWARDED_FOR'];
    }
    if (!empty($_SERVER['HTTP_CLIENT_IP'])) {
        $ctx['client_ip'] = $_SERVER['HTTP_CLIENT_IP'];
    }
    if (!empty($_SERVER['HTTP_FROM'])) {
        $ctx['from'] = $_SERVER['HTTP_FROM'];
    }
    if (isset($ctx['forwarded_for']) || isset($ctx['client_ip']) || isset($ctx['from'])) {
        $ctx['proxy'] = $_SERVER['REMOTE_ADDR'];
    }
    // Don't load $wgUser at this late stage just for statistics purposes
    // @todo FIXME: We can detect some anons even if it is not loaded.
    // See User::getId()
    $user = $context->getUser();
    $ctx['anon'] = $user->isItemLoaded('id') && $user->isAnon();
    // Command line script uses a FauxRequest object which does not have
    // any knowledge about an URL and throw an exception instead.
    try {
        $ctx['url'] = urldecode($request->getRequestURL());
    } catch (Exception $ignored) {
        // no-op
    }
    $ctx['output'] = $profiler->getOutput();
    $log = LoggerFactory::getInstance('profileoutput');
    $log->info("Elapsed: {elapsed}; URL: <{url}>\n{output}", $ctx);
}
Example #30
0
            $profName = $fname . '-extensions-' . get_class($func[0]) . '::' . $func[1];
        } else {
            $profName = $fname . '-extensions-' . implode('::', $func);
        }
    } else {
        $profName = $fname . '-extensions-' . strval($func);
    }
    $ps_ext_func = Profiler::instance()->scopedProfileIn($profName);
    call_user_func($func);
    Profiler::instance()->scopedProfileOut($ps_ext_func);
}
// If the session user has a 0 id but a valid name, that means we need to
// autocreate it.
if (!defined('MW_NO_SESSION') && !$wgCommandLineMode) {
    $sessionUser = MediaWiki\Session\SessionManager::getGlobalSession()->getUser();
    if ($sessionUser->getId() === 0 && User::isValidUserName($sessionUser->getName())) {
        $ps_autocreate = Profiler::instance()->scopedProfileIn($fname . '-autocreate');
        $res = MediaWiki\Auth\AuthManager::singleton()->autoCreateUser($sessionUser, MediaWiki\Auth\AuthManager::AUTOCREATE_SOURCE_SESSION, true);
        Profiler::instance()->scopedProfileOut($ps_autocreate);
        \MediaWiki\Logger\LoggerFactory::getInstance('authevents')->info('Autocreation attempt', ['event' => 'autocreate', 'status' => $res]);
        unset($res);
    }
    unset($sessionUser);
}
if (!$wgCommandLineMode) {
    Pingback::schedulePingback();
}
wfDebug("Fully initialised\n");
$wgFullyInitialised = true;
Profiler::instance()->scopedProfileOut($ps_extensions);
Profiler::instance()->scopedProfileOut($ps_setup);