Beispiel #1
0
 /**
  * @param array $updates Array of arrays each containing two keys, 'primaryKey'
  *  and 'changes'. primaryKey must contain a map of column names to values
  *  sufficient to uniquely identify the row changes must contain a map of column
  *  names to update values to apply to the row.
  */
 public function write(array $updates)
 {
     $lbFactory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
     $ticket = $lbFactory->getEmptyTransactionTicket(__METHOD__);
     foreach ($updates as $update) {
         $this->db->update($this->table, $update['changes'], $update['primaryKey'], __METHOD__);
     }
     $lbFactory->commitAndWaitForReplication(__METHOD__, $ticket);
 }
 public static function provideSearchOptionsTests()
 {
     $defaultNS = MediaWikiServices::getInstance()->getSearchEngineConfig()->defaultNamespaces();
     $EMPTY_REQUEST = [];
     $NO_USER_PREF = null;
     return [[$EMPTY_REQUEST, $NO_USER_PREF, 'default', $defaultNS, 'Bug 33270: No request nor user preferences should give default profile'], [['ns5' => 1], $NO_USER_PREF, 'advanced', [5], 'Web request with specific NS should override user preference'], [$EMPTY_REQUEST, ['searchNs2' => 1, 'searchNs14' => 1] + array_fill_keys(array_map(function ($ns) {
         return "searchNs{$ns}";
     }, $defaultNS), 0), 'advanced', [2, 14], 'Bug 33583: search with no option should honor User search preferences' . ' and have all other namespace disabled']];
 }
Beispiel #3
0
 /**
  * Do the import.
  */
 public function execute()
 {
     $file = $this->getArg(0);
     $siteStore = \MediaWiki\MediaWikiServices::getInstance()->getSiteStore();
     $importer = new SiteImporter($siteStore);
     $importer->setExceptionCallback([$this, 'reportException']);
     $importer->importFromFile($file);
     $this->output("Done.\n");
 }
 public function testStuff()
 {
     $user = self::$users[__CLASS__]->getUser();
     $page = WikiPage::factory(Title::newFromText('UTPage'));
     $user->addWatch($page->getTitle());
     $result = $this->doApiRequestWithToken(['action' => 'setnotificationtimestamp', 'timestamp' => '20160101020202', 'pageids' => $page->getId()], null, $user);
     $this->assertEquals(['batchcomplete' => true, 'setnotificationtimestamp' => [['ns' => 0, 'title' => 'UTPage', 'notificationtimestamp' => '2016-01-01T02:02:02Z']]], $result[0]);
     $watchedItemStore = MediaWikiServices::getInstance()->getWatchedItemStore();
     $this->assertEquals($watchedItemStore->getNotificationTimestampsBatch($user, [$page->getTitle()]), [['UTPage' => '20160101020202']]);
 }
 public function __construct()
 {
     // We use a try/catch because we don't want to fail here
     // if $wgObjectCaches is not configured properly for APC setup
     try {
         $this->cache = MediaWikiServices::getInstance()->getLocalServerObjectCache();
     } catch (MWException $e) {
         $this->cache = new EmptyBagOStuff();
     }
 }
 public function testGetNotificationTimestamp_falseOnNotAllowed()
 {
     $user = $this->getUser();
     $title = Title::newFromText('WatchedItemIntegrationTestPage');
     WatchedItem::fromUserTitle($user, $title)->addWatch();
     MediaWikiServices::getInstance()->getWatchedItemStore()->resetNotificationTimestamp($user, $title);
     $this->assertEquals(null, WatchedItem::fromUserTitle($user, $title)->getNotificationTimestamp());
     $user->mRights = [];
     $this->assertFalse(WatchedItem::fromUserTitle($user, $title)->getNotificationTimestamp());
 }
 protected function tearDown()
 {
     global $wgContLang;
     // Reset namespace cache
     MWNamespace::getCanonicalNamespaces(true);
     $wgContLang->resetNamespaces();
     // And LinkCache
     MediaWikiServices::getInstance()->resetServiceForTesting('LinkCache');
     parent::tearDown();
 }
Beispiel #8
0
 /**
  * Returns the global SiteStore instance. This is a relict of the first implementation
  * of SiteStore, and is kept around for compatibility.
  *
  * @note This does not return an instance of SiteSQLStore!
  *
  * @since 1.21
  * @deprecated 1.27 use MediaWikiServices::getSiteStore() or MediaWikiServices::getSiteLookup()
  *             instead.
  *
  * @param null $sitesTable IGNORED
  * @param null $cache IGNORED
  *
  * @return SiteStore
  */
 public static function newInstance($sitesTable = null, BagOStuff $cache = null)
 {
     if ($sitesTable !== null) {
         throw new InvalidArgumentException(__METHOD__ . ': $sitesTable parameter is unused and must be null');
     }
     // NOTE: we silently ignore $cache for now, since some existing callers
     // specify it. If we break compatibility with them, we could just as
     // well just remove this class.
     return \MediaWiki\MediaWikiServices::getInstance()->getSiteStore();
 }
Beispiel #9
0
 /**
  * @param array $lbConf Config for LBFactory::__construct()
  * @param Config $mainConfig Main config object from MediaWikiServices
  * @return array
  */
 public static function applyDefaultConfig(array $lbConf, Config $mainConfig)
 {
     global $wgCommandLineMode;
     $lbConf += ['localDomain' => new DatabaseDomain($mainConfig->get('DBname'), null, $mainConfig->get('DBprefix')), 'profiler' => Profiler::instance(), 'trxProfiler' => Profiler::instance()->getTransactionProfiler(), 'replLogger' => LoggerFactory::getInstance('DBReplication'), 'queryLogger' => LoggerFactory::getInstance('DBQuery'), 'connLogger' => LoggerFactory::getInstance('DBConnection'), 'perfLogger' => LoggerFactory::getInstance('DBPerformance'), 'errorLogger' => [MWExceptionHandler::class, 'logException'], 'cliMode' => $wgCommandLineMode, 'hostname' => wfHostname(), 'readOnlyReason' => wfConfiguredReadOnlyReason()];
     if ($lbConf['class'] === 'LBFactorySimple') {
         if (isset($lbConf['servers'])) {
             // Server array is already explicitly configured; leave alone
         } elseif (is_array($mainConfig->get('DBservers'))) {
             foreach ($mainConfig->get('DBservers') as $i => $server) {
                 if ($server['type'] === 'sqlite') {
                     $server += ['dbDirectory' => $mainConfig->get('SQLiteDataDir')];
                 } elseif ($server['type'] === 'postgres') {
                     $server += ['port' => $mainConfig->get('DBport')];
                 }
                 $lbConf['servers'][$i] = $server + ['schema' => $mainConfig->get('DBmwschema'), 'tablePrefix' => $mainConfig->get('DBprefix'), 'flags' => DBO_DEFAULT, 'sqlMode' => $mainConfig->get('SQLMode'), 'utf8Mode' => $mainConfig->get('DBmysql5')];
             }
         } else {
             $flags = DBO_DEFAULT;
             $flags |= $mainConfig->get('DebugDumpSql') ? DBO_DEBUG : 0;
             $flags |= $mainConfig->get('DBssl') ? DBO_SSL : 0;
             $flags |= $mainConfig->get('DBcompress') ? DBO_COMPRESS : 0;
             $server = ['host' => $mainConfig->get('DBserver'), 'user' => $mainConfig->get('DBuser'), 'password' => $mainConfig->get('DBpassword'), 'dbname' => $mainConfig->get('DBname'), 'schema' => $mainConfig->get('DBmwschema'), 'tablePrefix' => $mainConfig->get('DBprefix'), 'type' => $mainConfig->get('DBtype'), 'load' => 1, 'flags' => $flags, 'sqlMode' => $mainConfig->get('SQLMode'), 'utf8Mode' => $mainConfig->get('DBmysql5')];
             if ($server['type'] === 'sqlite') {
                 $server['dbDirectory'] = $mainConfig->get('SQLiteDataDir');
             } elseif ($server['type'] === 'postgres') {
                 $server['port'] = $mainConfig->get('DBport');
             }
             $lbConf['servers'] = [$server];
         }
         if (!isset($lbConf['externalClusters'])) {
             $lbConf['externalClusters'] = $mainConfig->get('ExternalServers');
         }
     } elseif ($lbConf['class'] === 'LBFactoryMulti') {
         if (isset($lbConf['serverTemplate'])) {
             $lbConf['serverTemplate']['schema'] = $mainConfig->get('DBmwschema');
             $lbConf['serverTemplate']['sqlMode'] = $mainConfig->get('SQLMode');
             $lbConf['serverTemplate']['utf8Mode'] = $mainConfig->get('DBmysql5');
         }
     }
     // Use APC/memcached style caching, but avoids loops with CACHE_DB (T141804)
     $sCache = MediaWikiServices::getInstance()->getLocalServerObjectCache();
     if ($sCache->getQoS($sCache::ATTR_EMULATION) > $sCache::QOS_EMULATION_SQL) {
         $lbConf['srvCache'] = $sCache;
     }
     $cCache = ObjectCache::getLocalClusterInstance();
     if ($cCache->getQoS($cCache::ATTR_EMULATION) > $cCache::QOS_EMULATION_SQL) {
         $lbConf['memCache'] = $cCache;
     }
     $wCache = MediaWikiServices::getInstance()->getMainWANObjectCache();
     if ($wCache->getQoS($wCache::ATTR_EMULATION) > $wCache::QOS_EMULATION_SQL) {
         $lbConf['wanCache'] = $wCache;
     }
     return $lbConf;
 }
 /**
  * Driver function that handles updating assessment data in database
  * @param Title $titleObj Title object of the subject page
  * @param array $assessmentData Data for all assessments compiled
  */
 public static function doUpdates($titleObj, $assessmentData)
 {
     global $wgUpdateRowsPerQuery;
     $factory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
     $ticket = $factory->getEmptyTransactionTicket(__METHOD__);
     $pageId = $titleObj->getArticleID();
     $revisionId = $titleObj->getLatestRevID();
     // Compile a list of projects to find out which ones to be deleted afterwards
     $projects = array();
     foreach ($assessmentData as $parserData) {
         // For each project, get the corresponding ID from page_assessments_projects table
         $projectId = self::getProjectId($parserData[0]);
         if ($projectId === false) {
             $projectId = self::insertProject($parserData[0]);
         }
         $projects[$parserData[0]] = $projectId;
     }
     $projectsInDb = self::getAllProjects($pageId, self::READ_LATEST);
     $toInsert = array_diff($projects, $projectsInDb);
     $toDelete = array_diff($projectsInDb, $projects);
     $toUpdate = array_intersect($projects, $projectsInDb);
     $i = 0;
     // Add and update records to the database
     foreach ($assessmentData as $parserData) {
         $projectId = $projects[$parserData[0]];
         if ($projectId) {
             $class = $parserData[1];
             $importance = $parserData[2];
             $values = array('pa_page_id' => $pageId, 'pa_project_id' => $projectId, 'pa_class' => $class, 'pa_importance' => $importance, 'pa_page_revision' => $revisionId);
             if (in_array($projectId, $toInsert)) {
                 self::insertRecord($values);
             } elseif (in_array($projectId, $toUpdate)) {
                 self::updateRecord($values);
             }
             // Check for database lag if there's a huge number of assessments
             if ($i > 0 && $i % $wgUpdateRowsPerQuery == 0) {
                 $factory->commitAndWaitForReplication(__METHOD__, $ticket);
             }
             $i++;
         }
     }
     // Delete records from the database
     foreach ($toDelete as $project) {
         $values = array('pa_page_id' => $pageId, 'pa_project_id' => $project);
         self::deleteRecord($values);
         // Check for database lag if there's a huge number of deleted assessments
         if ($i > 0 && $i % $wgUpdateRowsPerQuery == 0) {
             $factory->commitAndWaitForReplication(__METHOD__, $ticket);
         }
         $i++;
     }
     return;
 }
Beispiel #11
0
 /**
  * Changeslist constructor
  *
  * @param Skin|IContextSource $obj
  */
 public function __construct($obj)
 {
     if ($obj instanceof IContextSource) {
         $this->setContext($obj);
         $this->skin = $obj->getSkin();
     } else {
         $this->setContext($obj->getContext());
         $this->skin = $obj;
     }
     $this->preCacheMessages();
     $this->watchMsgCache = new HashBagOStuff(['maxKeys' => 50]);
     $this->linkRenderer = MediaWikiServices::getInstance()->getLinkRenderer();
 }
Beispiel #12
0
 /**
  * Initialize from a Title and if possible initializes a corresponding
  * Revision and File.
  *
  * @param Title $title
  */
 protected function initFromTitle($title)
 {
     $this->mTitle = $title;
     if (!is_null($this->mTitle)) {
         $id = false;
         Hooks::run('SearchResultInitFromTitle', [$title, &$id]);
         $this->mRevision = Revision::newFromTitle($this->mTitle, $id, Revision::READ_NORMAL);
         if ($this->mTitle->getNamespace() === NS_FILE) {
             $this->mImage = wfFindFile($this->mTitle);
         }
     }
     $this->searchEngine = MediaWikiServices::getInstance()->newSearchEngine();
 }
 protected function setUp()
 {
     parent::setUp();
     if (!$this->isWikitextNS(NS_MAIN)) {
         $this->markTestSkipped('Main namespace does not support wikitext.');
     }
     // Avoid special pages from extensions interferring with the tests
     $this->setMwGlobals(['wgSpecialPages' => [], 'wgHooks' => []]);
     $this->search = MediaWikiServices::getInstance()->newSearchEngine();
     $this->search->setNamespaces([]);
     $this->originalHandlers = TestingAccessWrapper::newFromClass('Hooks')->handlers;
     TestingAccessWrapper::newFromClass('Hooks')->handlers = [];
     SpecialPageFactory::resetList();
 }
 public function run()
 {
     $lbFactory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
     $lb = $lbFactory->getMainLB();
     $dbw = $lb->getConnection(DB_MASTER);
     $this->ticket = $lbFactory->getEmptyTransactionTicket(__METHOD__);
     $page = WikiPage::newFromID($this->params['pageId'], WikiPage::READ_LATEST);
     if (!$page) {
         $this->setLastError("Could not find page #{$this->params['pageId']}");
         return false;
         // deleted?
     }
     // Use a named lock so that jobs for this page see each others' changes
     $lockKey = "CategoryMembershipUpdates:{$page->getId()}";
     $scopedLock = $dbw->getScopedLockAndFlush($lockKey, __METHOD__, 3);
     if (!$scopedLock) {
         $this->setLastError("Could not acquire lock '{$lockKey}'");
         return false;
     }
     $dbr = $lb->getConnection(DB_REPLICA, ['recentchanges']);
     // Wait till the replica DB is caught up so that jobs for this page see each others' changes
     if (!$lb->safeWaitForMasterPos($dbr)) {
         $this->setLastError("Timed out while waiting for replica DB to catch up");
         return false;
     }
     // Clear any stale REPEATABLE-READ snapshot
     $dbr->flushSnapshot(__METHOD__);
     $cutoffUnix = wfTimestamp(TS_UNIX, $this->params['revTimestamp']);
     // Using ENQUEUE_FUDGE_SEC handles jobs inserted out of revision order due to the delay
     // between COMMIT and actual enqueueing of the CategoryMembershipChangeJob job.
     $cutoffUnix -= self::ENQUEUE_FUDGE_SEC;
     // Get the newest revision that has a SRC_CATEGORIZE row...
     $row = $dbr->selectRow(['revision', 'recentchanges'], ['rev_timestamp', 'rev_id'], ['rev_page' => $page->getId(), 'rev_timestamp >= ' . $dbr->addQuotes($dbr->timestamp($cutoffUnix))], __METHOD__, ['ORDER BY' => 'rev_timestamp DESC, rev_id DESC'], ['recentchanges' => ['INNER JOIN', ['rc_this_oldid = rev_id', 'rc_source' => RecentChange::SRC_CATEGORIZE, 'rc_cur_id = rev_page', 'rc_timestamp >= rev_timestamp']]]);
     // Only consider revisions newer than any such revision
     if ($row) {
         $cutoffUnix = wfTimestamp(TS_UNIX, $row->rev_timestamp);
         $lastRevId = (int) $row->rev_id;
     } else {
         $lastRevId = 0;
     }
     // Find revisions to this page made around and after this revision which lack category
     // notifications in recent changes. This lets jobs pick up were the last one left off.
     $encCutoff = $dbr->addQuotes($dbr->timestamp($cutoffUnix));
     $res = $dbr->select('revision', Revision::selectFields(), ['rev_page' => $page->getId(), "rev_timestamp > {$encCutoff}" . " OR (rev_timestamp = {$encCutoff} AND rev_id > {$lastRevId})"], __METHOD__, ['ORDER BY' => 'rev_timestamp ASC, rev_id ASC']);
     // Apply all category updates in revision timestamp order
     foreach ($res as $row) {
         $this->notifyUpdatesForRevision($lbFactory, $page, Revision::newFromRow($row));
     }
     return true;
 }
 /**
  * If there are any open database transactions, roll them back and log
  * the stack trace of the exception that should have been caught so the
  * transaction could be aborted properly.
  *
  * @since 1.23
  * @param Exception|Throwable $e
  */
 public static function rollbackMasterChangesAndLog($e)
 {
     $services = MediaWikiServices::getInstance();
     if ($services->isServiceDisabled('DBLoadBalancerFactory')) {
         return;
         // T147599
     }
     $lbFactory = $services->getDBLoadBalancerFactory();
     if ($lbFactory->hasMasterChanges()) {
         $logger = LoggerFactory::getInstance('Bug56269');
         $logger->warning('Exception thrown with an uncommited database transaction: ' . self::getLogMessage($e), self::getLogContext($e));
     }
     $lbFactory->rollbackMasterChanges(__METHOD__);
 }
 /**
  * @param ApiPageSet $resultPageSet
  */
 private function run($resultPageSet = null)
 {
     $params = $this->extractRequestParams();
     $search = $params['search'];
     $limit = $params['limit'];
     $namespaces = $params['namespace'];
     $offset = $params['offset'];
     $searchEngine = MediaWikiServices::getInstance()->newSearchEngine();
     $searchEngine->setLimitOffset($limit + 1, $offset);
     $searchEngine->setNamespaces($namespaces);
     $titles = $searchEngine->extractTitles($searchEngine->completionSearchWithVariants($search));
     if ($resultPageSet) {
         $resultPageSet->setRedirectMergePolicy(function (array $current, array $new) {
             if (!isset($current['index']) || $new['index'] < $current['index']) {
                 $current['index'] = $new['index'];
             }
             return $current;
         });
         if (count($titles) > $limit) {
             $this->setContinueEnumParameter('offset', $offset + $params['limit']);
             array_pop($titles);
         }
         $resultPageSet->populateFromTitles($titles);
         foreach ($titles as $index => $title) {
             $resultPageSet->setGeneratorData($title, ['index' => $index + $offset + 1]);
         }
     } else {
         $result = $this->getResult();
         $count = 0;
         foreach ($titles as $title) {
             if (++$count > $limit) {
                 $this->setContinueEnumParameter('offset', $offset + $params['limit']);
                 break;
             }
             $vals = ['ns' => intval($title->getNamespace()), 'title' => $title->getPrefixedText()];
             if ($title->isSpecialPage()) {
                 $vals['special'] = true;
             } else {
                 $vals['pageid'] = intval($title->getArticleID());
             }
             $fit = $result->addValue(['query', $this->getModuleName()], null, $vals);
             if (!$fit) {
                 $this->setContinueEnumParameter('offset', $offset + $count - 1);
                 break;
             }
         }
         $result->addIndexedTagName(['query', $this->getModuleName()], $this->getModulePrefix());
     }
 }
Beispiel #17
0
 /**
  * Do the actual work. All child classes will need to implement this
  */
 public function execute()
 {
     $file = $this->getArg(0);
     if ($file === 'php://output' || $file === 'php://stdout') {
         $this->mQuiet = true;
     }
     $handle = fopen($file, 'w');
     if (!$handle) {
         $this->error("Failed to open {$file} for writing.\n", 1);
     }
     $exporter = new SiteExporter($handle);
     $siteLookup = \MediaWiki\MediaWikiServices::getInstance()->getSiteLookup();
     $exporter->exportSites($siteLookup->getSites());
     fclose($handle);
     $this->output("Exported sites to " . realpath($file) . ".\n");
 }
Beispiel #18
0
 function execute()
 {
     if ($this->hasOption('tpl-time')) {
         $this->templateTimestamp = wfTimestamp(TS_MW, strtotime($this->getOption('tpl-time')));
         Hooks::register('BeforeParserFetchTemplateAndtitle', [$this, 'onFetchTemplate']);
     }
     $this->clearLinkCache = $this->hasOption('reset-linkcache');
     // Set as a member variable to avoid function calls when we're timing the parse
     $this->linkCache = MediaWikiServices::getInstance()->getLinkCache();
     $title = Title::newFromText($this->getArg());
     if (!$title) {
         $this->error("Invalid title");
         exit(1);
     }
     if ($this->hasOption('page-time')) {
         $pageTimestamp = wfTimestamp(TS_MW, strtotime($this->getOption('page-time')));
         $id = $this->getRevIdForTime($title, $pageTimestamp);
         if (!$id) {
             $this->error("The page did not exist at that time");
             exit(1);
         }
         $revision = Revision::newFromId($id);
     } else {
         $revision = Revision::newFromTitle($title);
     }
     if (!$revision) {
         $this->error("Unable to load revision, incorrect title?");
         exit(1);
     }
     $warmup = $this->getOption('warmup', 1);
     for ($i = 0; $i < $warmup; $i++) {
         $this->runParser($revision);
     }
     $loops = $this->getOption('loops', 1);
     if ($loops < 1) {
         $this->error('Invalid number of loops specified', true);
     }
     $startUsage = getrusage();
     $startTime = microtime(true);
     for ($i = 0; $i < $loops; $i++) {
         $this->runParser($revision);
     }
     $endUsage = getrusage();
     $endTime = microtime(true);
     printf("CPU time = %.3f s, wall clock time = %.3f s\n", ($endUsage['ru_utime.tv_sec'] + $endUsage['ru_utime.tv_usec'] * 1.0E-6 - $startUsage['ru_utime.tv_sec'] - $startUsage['ru_utime.tv_usec'] * 1.0E-6) / $loops, ($endTime - $startTime) / $loops);
 }
Beispiel #19
0
 /**
  * Creates a new instance of MediaWikiServices and sets it as the global default
  * instance. getInstance() will return a different MediaWikiServices object
  * after every call to resetGlobalInstance().
  *
  * @since 1.28
  *
  * @warning This should not be used during normal operation. It is intended for use
  * when the configuration has changed significantly since bootstrap time, e.g.
  * during the installation process or during testing.
  *
  * @warning Calling resetGlobalInstance() may leave the application in an inconsistent
  * state. Calling this is only safe under the ASSUMPTION that NO REFERENCE to
  * any of the services managed by MediaWikiServices exist. If any service objects
  * managed by the old MediaWikiServices instance remain in use, they may INTERFERE
  * with the operation of the services managed by the new MediaWikiServices.
  * Operating with a mix of services created by the old and the new
  * MediaWikiServices instance may lead to INCONSISTENCIES and even DATA LOSS!
  * Any class implementing LAZY LOADING is especially prone to this problem,
  * since instances would typically retain a reference to a storage layer service.
  *
  * @see forceGlobalInstance()
  * @see resetGlobalInstance()
  * @see resetBetweenTest()
  *
  * @param Config|null $bootstrapConfig The Config object to be registered as the
  *        'BootstrapConfig' service. This has to contain at least the information
  *        needed to set up the 'ConfigFactory' service. If not given, the bootstrap
  *        config of the old instance of MediaWikiServices will be re-used. If there
  *        was no previous instance, a new GlobalVarConfig object will be used to
  *        bootstrap the services.
  *
  * @param string $quick Set this to "quick" to allow expensive resources to be re-used.
  * See SalvageableService for details.
  *
  * @throws MWException If called after MW_SERVICE_BOOTSTRAP_COMPLETE has been defined in
  *         Setup.php (unless MW_PHPUNIT_TEST or MEDIAWIKI_INSTALL or RUN_MAINTENANCE_IF_MAIN
  *          is defined).
  */
 public static function resetGlobalInstance(Config $bootstrapConfig = null, $quick = '')
 {
     if (self::$instance === null) {
         // no global instance yet, nothing to reset
         return;
     }
     self::failIfResetNotAllowed(__METHOD__);
     if ($bootstrapConfig === null) {
         $bootstrapConfig = self::$instance->getBootstrapConfig();
     }
     $oldInstance = self::$instance;
     self::$instance = self::newInstance($bootstrapConfig, 'load');
     self::$instance->importWiring($oldInstance, ['BootstrapConfig']);
     if ($quick === 'quick') {
         self::$instance->salvage($oldInstance);
     } else {
         $oldInstance->destroy();
     }
 }
Beispiel #20
0
 public function testGetLinkClasses()
 {
     $wanCache = ObjectCache::getMainWANInstance();
     $titleFormatter = MediaWikiServices::getInstance()->getTitleFormatter();
     $linkCache = new LinkCache($titleFormatter, $wanCache);
     $foobarTitle = new TitleValue(NS_MAIN, 'FooBar');
     $redirectTitle = new TitleValue(NS_MAIN, 'Redirect');
     $userTitle = new TitleValue(NS_USER, 'Someuser');
     $linkCache->addGoodLinkObj(1, $foobarTitle, 10, 0);
     $linkCache->addGoodLinkObj(2, $redirectTitle, 10, 1);
     $linkCache->addGoodLinkObj(3, $userTitle, 10, 0);
     $linkRenderer = new LinkRenderer($titleFormatter, $linkCache);
     $linkRenderer->setStubThreshold(0);
     $this->assertEquals('', $linkRenderer->getLinkClasses($foobarTitle));
     $linkRenderer->setStubThreshold(20);
     $this->assertEquals('stub', $linkRenderer->getLinkClasses($foobarTitle));
     $linkRenderer->setStubThreshold(0);
     $this->assertEquals('mw-redirect', $linkRenderer->getLinkClasses($redirectTitle));
     $linkRenderer->setStubThreshold(20);
     $this->assertEquals('', $linkRenderer->getLinkClasses($userTitle));
 }
 public function execute()
 {
     $username = $this->getOption('user');
     $file = $this->getOption('file');
     if ($username === null && $file === null) {
         $this->error('Either --user or --file is required', 1);
     } elseif ($username !== null && $file !== null) {
         $this->error('Cannot use both --user and --file', 1);
     }
     if ($username !== null) {
         $usernames = [$username];
     } else {
         $usernames = is_readable($file) ? file($file, FILE_IGNORE_NEW_LINES | FILE_SKIP_EMPTY_LINES) : false;
         if ($usernames === false) {
             $this->error("Could not open {$file}", 2);
         }
     }
     $i = 0;
     $lbFactory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
     $sessionManager = SessionManager::singleton();
     foreach ($usernames as $username) {
         $i++;
         $user = User::newFromName($username);
         try {
             $sessionManager->invalidateSessionsForUser($user);
             if ($user->getId()) {
                 $this->output("Invalidated sessions for user {$username}\n");
             } else {
                 # session invalidation might still work if there is a central identity provider
                 $this->output("Could not find user {$username}, tried to invalidate anyway\n");
             }
         } catch (Exception $e) {
             $this->output("Failed to invalidate sessions for user {$username} | " . str_replace(["\r", "\n"], ' ', $e->getMessage()) . "\n");
         }
         if ($i % $this->mBatchSize) {
             $lbFactory->waitForReplication();
         }
     }
 }
Beispiel #22
0
 /**
  * @param string $wikiID
  * @return WikiReference|null WikiReference object or null if the wiki was not found
  */
 private static function getWikiWikiReferenceFromSites($wikiID)
 {
     $siteLookup = \MediaWiki\MediaWikiServices::getInstance()->getSiteLookup();
     $site = $siteLookup->getSite($wikiID);
     if (!$site instanceof MediaWikiSite) {
         // Abort if not a MediaWikiSite, as this is about Wikis
         return null;
     }
     $urlParts = wfParseUrl($site->getPageUrl());
     if ($urlParts === false || !isset($urlParts['path']) || !isset($urlParts['host'])) {
         // We can't create a meaningful WikiReference without URLs
         return null;
     }
     // XXX: Check whether path contains a $1?
     $path = $urlParts['path'];
     if (isset($urlParts['query'])) {
         $path .= '?' . $urlParts['query'];
     }
     $canonicalServer = isset($urlParts['scheme']) ? $urlParts['scheme'] : 'http';
     $canonicalServer .= '://' . $urlParts['host'];
     return new WikiReference($canonicalServer, $path);
 }
Beispiel #23
0
 function run()
 {
     if (is_null($this->title)) {
         $this->setLastError("deleteLinks: Invalid title");
         return false;
     }
     $pageId = $this->params['pageId'];
     // Serialize links updates by page ID so they see each others' changes
     $scopedLock = LinksUpdate::acquirePageLock(wfGetDB(DB_MASTER), $pageId, 'job');
     if (WikiPage::newFromID($pageId, WikiPage::READ_LATEST)) {
         // The page was restored somehow or something went wrong
         $this->setLastError("deleteLinks: Page #{$pageId} exists");
         return false;
     }
     $factory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
     $timestamp = isset($this->params['timestamp']) ? $this->params['timestamp'] : null;
     $page = WikiPage::factory($this->title);
     // title when deleted
     $update = new LinksDeletionUpdate($page, $pageId, $timestamp);
     $update->setTransactionTicket($factory->getEmptyTransactionTicket(__METHOD__));
     $update->doUpdate();
     return true;
 }
Beispiel #24
0
 /**
  * Invalidate the cache of a list of pages from a single namespace.
  * This is intended for use by subclasses.
  *
  * @param IDatabase $dbw
  * @param int $namespace Namespace number
  * @param array $dbkeys
  */
 public static function invalidatePages(IDatabase $dbw, $namespace, array $dbkeys)
 {
     if ($dbkeys === []) {
         return;
     }
     $dbw->onTransactionIdle(function () use($dbw, $namespace, $dbkeys) {
         $services = MediaWikiServices::getInstance();
         $lbFactory = $services->getDBLoadBalancerFactory();
         // Determine which pages need to be updated.
         // This is necessary to prevent the job queue from smashing the DB with
         // large numbers of concurrent invalidations of the same page.
         $now = $dbw->timestamp();
         $ids = $dbw->selectFieldValues('page', 'page_id', ['page_namespace' => $namespace, 'page_title' => $dbkeys, 'page_touched < ' . $dbw->addQuotes($now)], __METHOD__);
         if (!$ids) {
             return;
         }
         $batchSize = $services->getMainConfig()->get('UpdateRowsPerQuery');
         $ticket = $lbFactory->getEmptyTransactionTicket(__METHOD__);
         foreach (array_chunk($ids, $batchSize) as $idBatch) {
             $dbw->update('page', ['page_touched' => $now], ['page_id' => $idBatch, 'page_touched < ' . $dbw->addQuotes($now)], __METHOD__);
             $lbFactory->commitAndWaitForReplication(__METHOD__, $ticket);
         }
     }, __METHOD__);
 }
Beispiel #25
0
 /**
  * Get the namespace display name in the preferred variant.
  *
  * @param int $index Namespace id
  * @param string|null $variant Variant code or null for preferred variant
  * @return string Namespace name for display
  */
 public function convertNamespace($index, $variant = null)
 {
     if ($index === NS_MAIN) {
         return '';
     }
     if ($variant === null) {
         $variant = $this->getPreferredVariant();
     }
     $cache = MediaWikiServices::getInstance()->getLocalServerObjectCache();
     $key = $cache->makeKey('languageconverter', 'namespace-text', $index, $variant);
     $nsVariantText = $cache->get($key);
     if ($nsVariantText !== false) {
         return $nsVariantText;
     }
     // First check if a message gives a converted name in the target variant.
     $nsConvMsg = wfMessage('conversion-ns' . $index)->inLanguage($variant);
     if ($nsConvMsg->exists()) {
         $nsVariantText = $nsConvMsg->plain();
     }
     // Then check if a message gives a converted name in content language
     // which needs extra translation to the target variant.
     if ($nsVariantText === false) {
         $nsConvMsg = wfMessage('conversion-ns' . $index)->inContentLanguage();
         if ($nsConvMsg->exists()) {
             $nsVariantText = $this->translate($nsConvMsg->plain(), $variant);
         }
     }
     if ($nsVariantText === false) {
         // No message exists, retrieve it from the target variant's namespace names.
         $langObj = $this->mLangObj->factory($variant);
         $nsVariantText = $langObj->getFormattedNsText($index);
     }
     $cache->set($key, $nsVariantText, 60);
     return $nsVariantText;
 }
Beispiel #26
0
 /**
  * Purge caches on page update etc
  *
  * @param Title $title
  * @param Revision|null $revision Revision that was just saved, may be null
  */
 public static function onArticleEdit(Title $title, Revision $revision = null)
 {
     // Invalidate caches of articles which include this page
     DeferredUpdates::addUpdate(new HTMLCacheUpdate($title, 'templatelinks'));
     // Invalidate the caches of all pages which redirect here
     DeferredUpdates::addUpdate(new HTMLCacheUpdate($title, 'redirect'));
     MediaWikiServices::getInstance()->getLinkCache()->invalidateTitle($title);
     // Purge CDN for this page only
     $title->purgeSquid();
     // Clear file cache for this page only
     HTMLFileCache::clearFileCache($title);
     $revid = $revision ? $revision->getId() : null;
     DeferredUpdates::addCallableUpdate(function () use($title, $revid) {
         InfoAction::invalidateCache($title, $revid);
     });
 }
Beispiel #27
0
$wgUser = RequestContext::getMain()->getUser();
// BackCompat
/**
 * @var Language $wgLang
 */
$wgLang = new StubUserLang();
/**
 * @var OutputPage $wgOut
 */
$wgOut = RequestContext::getMain()->getOutput();
// BackCompat
/**
 * @var Parser $wgParser
 */
$wgParser = new StubObject('wgParser', function () {
    return MediaWikiServices::getInstance()->getParser();
});
/**
 * @var Title $wgTitle
 */
$wgTitle = null;
Profiler::instance()->scopedProfileOut($ps_globals);
$ps_extensions = Profiler::instance()->scopedProfileIn($fname . '-extensions');
// Extension setup functions
// Entries should be added to this variable during the inclusion
// of the extension file. This allows the extension to perform
// any necessary initialisation in the fully initialised environment
foreach ($wgExtensionFunctions as $func) {
    // Allow closures in PHP 5.3+
    if (is_object($func) && $func instanceof Closure) {
        $profName = $fname . '-extensions-closure';
Beispiel #28
0
 /**
  * Get a MediaHandler for a given MIME type from the instance cache
  *
  * @param string $type
  * @return MediaHandler|bool
  */
 static function getHandler($type)
 {
     return MediaWikiServices::getInstance()->getMediaHandlerFactory()->getHandler($type);
 }
 /**
  * @since 2.4
  *
  * @param string $name
  */
 public function resetMediaWikiService($name)
 {
     // MW 1.27+ (yet 1.27.0.rc has no access to "resetServiceForTesting")
     if (!class_exists('\\MediaWiki\\MediaWikiServices') || !method_exists(\MediaWiki\MediaWikiServices::getInstance(), 'resetServiceForTesting')) {
         return null;
     }
     try {
         \MediaWiki\MediaWikiServices::getInstance()->resetServiceForTesting($name);
     } catch (\Exception $e) {
         // Do nothing just avoid a
         // MediaWiki\Services\NoSuchServiceException: No such service ...
     }
     return $this;
 }
 /**
  * Return an array of subpages beginning with $search that this special page will accept.
  *
  * @param string $search Prefix to search for
  * @param int $limit Maximum number of results to return (usually 10)
  * @param int $offset Number of results to skip (usually 0)
  * @return string[] Matching subpages
  */
 public function prefixSearchSubpages($search, $limit, $offset)
 {
     $title = Title::newFromText($search, NS_FILE);
     if (!$title || $title->getNamespace() !== NS_FILE) {
         // No prefix suggestion outside of file namespace
         return [];
     }
     $searchEngine = MediaWikiServices::getInstance()->newSearchEngine();
     $searchEngine->setLimitOffset($limit, $offset);
     // Autocomplete subpage the same as a normal search, but just for files
     $searchEngine->setNamespaces([NS_FILE]);
     $result = $searchEngine->defaultPrefixSearch($search);
     return array_map(function (Title $t) {
         // Remove namespace in search suggestion
         return $t->getText();
     }, $result);
 }