public function testPurgeMergeWeb() { $this->setMwGlobals('wgCommandLineMode', false); $urls1 = array(); $title = Title::newMainPage(); $urls1[] = $title->getCanonicalURL('?x=1'); $urls1[] = $title->getCanonicalURL('?x=2'); $urls1[] = $title->getCanonicalURL('?x=3'); $update1 = new CdnCacheUpdate($urls1); DeferredUpdates::addUpdate($update1); $urls2 = array(); $urls2[] = $title->getCanonicalURL('?x=2'); $urls2[] = $title->getCanonicalURL('?x=3'); $urls2[] = $title->getCanonicalURL('?x=4'); $update2 = new CdnCacheUpdate($urls2); DeferredUpdates::addUpdate($update2); $wrapper = TestingAccessWrapper::newFromObject($update1); $this->assertEquals(array_merge($urls1, $urls2), $wrapper->urls); }
public static function onUserLoadFromSession($user, &$result) { $result = false; // don't attempt default auth process if (!isset($_SERVER['SSL_CLIENT_S_DN'])) { return true; } $parsed = self::parseDistinguishedName($_SERVER['SSL_CLIENT_S_DN']); if (!isset($parsed['CN'])) { return true; } $userName = $parsed['CN']; $localId = User::idFromName($userName); if ($localId === null) { // local user doesn't exists yet $user->loadDefaults($parsed['CN']); if (!User::isCreatableName($user->getName())) { wfDebug(__METHOD__ . ": Invalid username\n"); return true; } $user->addToDatabase(); if (isset($parsed['emailAddress'])) { $user->setEmail($parsed['emailAddress']); } $user->saveSettings(); $user->addNewUserLogEntryAutoCreate(); Hooks::run('AuthPluginAutoCreate', array($user)); DeferredUpdates::addUpdate(new SiteStatsUpdate(0, 0, 0, 0, 1)); } else { $user->setID($localId); $user->loadFromId(); } global $wgUser; $wgUser =& $user; $result = true; // this also aborts default auth process return true; }
/** * @param User $user * @param string $reason * @param bool $createRedirect * @return Status */ public function move(User $user, $reason, $createRedirect) { global $wgCategoryCollation; Hooks::run('TitleMove', [$this->oldTitle, $this->newTitle, $user]); // If it is a file, move it first. // It is done before all other moving stuff is done because it's hard to revert. $dbw = wfGetDB(DB_MASTER); if ($this->oldTitle->getNamespace() == NS_FILE) { $file = wfLocalFile($this->oldTitle); $file->load(File::READ_LATEST); if ($file->exists()) { $status = $file->move($this->newTitle); if (!$status->isOK()) { return $status; } } // Clear RepoGroup process cache RepoGroup::singleton()->clearCache($this->oldTitle); RepoGroup::singleton()->clearCache($this->newTitle); # clear false negative cache } $dbw->startAtomic(__METHOD__); Hooks::run('TitleMoveStarting', [$this->oldTitle, $this->newTitle, $user]); $pageid = $this->oldTitle->getArticleID(Title::GAID_FOR_UPDATE); $protected = $this->oldTitle->isProtected(); // Do the actual move; if this fails, it will throw an MWException(!) $nullRevision = $this->moveToInternal($user, $this->newTitle, $reason, $createRedirect); // Refresh the sortkey for this row. Be careful to avoid resetting // cl_timestamp, which may disturb time-based lists on some sites. // @todo This block should be killed, it's duplicating code // from LinksUpdate::getCategoryInsertions() and friends. $prefixes = $dbw->select('categorylinks', ['cl_sortkey_prefix', 'cl_to'], ['cl_from' => $pageid], __METHOD__); if ($this->newTitle->getNamespace() == NS_CATEGORY) { $type = 'subcat'; } elseif ($this->newTitle->getNamespace() == NS_FILE) { $type = 'file'; } else { $type = 'page'; } foreach ($prefixes as $prefixRow) { $prefix = $prefixRow->cl_sortkey_prefix; $catTo = $prefixRow->cl_to; $dbw->update('categorylinks', ['cl_sortkey' => Collation::singleton()->getSortKey($this->newTitle->getCategorySortkey($prefix)), 'cl_collation' => $wgCategoryCollation, 'cl_type' => $type, 'cl_timestamp=cl_timestamp'], ['cl_from' => $pageid, 'cl_to' => $catTo], __METHOD__); } $redirid = $this->oldTitle->getArticleID(); if ($protected) { # Protect the redirect title as the title used to be... $res = $dbw->select('page_restrictions', '*', ['pr_page' => $pageid], __METHOD__, 'FOR UPDATE'); $rowsInsert = []; foreach ($res as $row) { $rowsInsert[] = ['pr_page' => $redirid, 'pr_type' => $row->pr_type, 'pr_level' => $row->pr_level, 'pr_cascade' => $row->pr_cascade, 'pr_user' => $row->pr_user, 'pr_expiry' => $row->pr_expiry]; } $dbw->insert('page_restrictions', $rowsInsert, __METHOD__, ['IGNORE']); // Build comment for log $comment = wfMessage('prot_1movedto2', $this->oldTitle->getPrefixedText(), $this->newTitle->getPrefixedText())->inContentLanguage()->text(); if ($reason) { $comment .= wfMessage('colon-separator')->inContentLanguage()->text() . $reason; } // reread inserted pr_ids for log relation $insertedPrIds = $dbw->select('page_restrictions', 'pr_id', ['pr_page' => $redirid], __METHOD__); $logRelationsValues = []; foreach ($insertedPrIds as $prid) { $logRelationsValues[] = $prid->pr_id; } // Update the protection log $logEntry = new ManualLogEntry('protect', 'move_prot'); $logEntry->setTarget($this->newTitle); $logEntry->setComment($comment); $logEntry->setPerformer($user); $logEntry->setParameters(['4::oldtitle' => $this->oldTitle->getPrefixedText()]); $logEntry->setRelations(['pr_id' => $logRelationsValues]); $logId = $logEntry->insert(); $logEntry->publish($logId); } // Update *_from_namespace fields as needed if ($this->oldTitle->getNamespace() != $this->newTitle->getNamespace()) { $dbw->update('pagelinks', ['pl_from_namespace' => $this->newTitle->getNamespace()], ['pl_from' => $pageid], __METHOD__); $dbw->update('templatelinks', ['tl_from_namespace' => $this->newTitle->getNamespace()], ['tl_from' => $pageid], __METHOD__); $dbw->update('imagelinks', ['il_from_namespace' => $this->newTitle->getNamespace()], ['il_from' => $pageid], __METHOD__); } # Update watchlists $oldtitle = $this->oldTitle->getDBkey(); $newtitle = $this->newTitle->getDBkey(); $oldsnamespace = MWNamespace::getSubject($this->oldTitle->getNamespace()); $newsnamespace = MWNamespace::getSubject($this->newTitle->getNamespace()); if ($oldsnamespace != $newsnamespace || $oldtitle != $newtitle) { $store = MediaWikiServices::getInstance()->getWatchedItemStore(); $store->duplicateAllAssociatedEntries($this->oldTitle, $this->newTitle); } Hooks::run('TitleMoveCompleting', [$this->oldTitle, $this->newTitle, $user, $pageid, $redirid, $reason, $nullRevision]); $dbw->endAtomic(__METHOD__); $params = [&$this->oldTitle, &$this->newTitle, &$user, $pageid, $redirid, $reason, $nullRevision]; // Keep each single hook handler atomic DeferredUpdates::addUpdate(new AtomicSectionUpdate($dbw, __METHOD__, function () use($params) { Hooks::run('TitleMoveComplete', $params); })); return Status::newGood(); }
/** * Do some database updates after deletion * * @param int $id page_id value of the page being deleted * @param $content Content: optional page content to be used when determining the required updates. * This may be needed because $this->getContent() may already return null when the page proper was deleted. */ public function doDeleteUpdates( $id, Content $content = null ) { // update site status DeferredUpdates::addUpdate( new SiteStatsUpdate( 0, 1, - (int)$this->isCountable(), -1 ) ); // remove secondary indexes, etc $updates = $this->getDeletionUpdates( $content ); DataUpdate::runUpdates( $updates ); // Clear caches WikiPage::onArticleDelete( $this->mTitle ); // Reset this object and the Title object $this->loadFromRow( false, self::READ_LATEST ); // Search engine DeferredUpdates::addUpdate( new SearchUpdate( $id, $this->mTitle ) ); }
/** * Mostly for hook use * @param Title $title * @param ForeignTitle $foreignTitle * @param int $revCount * @param int $sRevCount * @param array $pageInfo * @return bool */ public function finishImportPage($title, $foreignTitle, $revCount, $sRevCount, $pageInfo) { // Update article count statistics (T42009) // The normal counting logic in WikiPage->doEditUpdates() is designed for // one-revision-at-a-time editing, not bulk imports. In this situation it // suffers from issues of slave lag. We let WikiPage handle the total page // and revision count, and we implement our own custom logic for the // article (content page) count. $page = WikiPage::factory($title); $page->loadPageData('fromdbmaster'); $content = $page->getContent(); if ($content === null) { wfDebug(__METHOD__ . ': Skipping article count adjustment for ' . $title . ' because WikiPage::getContent() returned null'); } else { $editInfo = $page->prepareContentForEdit($content); $countKey = 'title_' . $title->getPrefixedText(); $countable = $page->isCountable($editInfo); if (array_key_exists($countKey, $this->countableCache) && $countable != $this->countableCache[$countKey]) { DeferredUpdates::addUpdate(SiteStatsUpdate::factory(array('articles' => (int) $countable - (int) $this->countableCache[$countKey]))); } } $args = func_get_args(); return Hooks::run('AfterImportPage', $args); }
/** * This is the meaty bit -- restores archived revisions of the given page * to the cur/old tables. If the page currently exists, all revisions will * be stuffed into old, otherwise the most recent will go into cur. * * @param array $timestamps Pass an empty array to restore all revisions, * otherwise list the ones to undelete. * @param bool $unsuppress Remove all ar_deleted/fa_deleted restrictions of seletected revs * @param string $comment * @throws ReadOnlyError * @return Status Status object containing the number of revisions restored on success */ private function undeleteRevisions($timestamps, $unsuppress = false, $comment = '') { if (wfReadOnly()) { throw new ReadOnlyError(); } $restoreAll = empty($timestamps); $dbw = wfGetDB(DB_MASTER); # Does this page already exist? We'll have to update it... $article = WikiPage::factory($this->title); # Load latest data for the current page (bug 31179) $article->loadPageData('fromdbmaster'); $oldcountable = $article->isCountable(); $page = $dbw->selectRow('page', array('page_id', 'page_latest'), array('page_namespace' => $this->title->getNamespace(), 'page_title' => $this->title->getDBkey()), __METHOD__, array('FOR UPDATE')); if ($page) { $makepage = false; # Page already exists. Import the history, and if necessary # we'll update the latest revision field in the record. $previousRevId = $page->page_latest; # Get the time span of this page $previousTimestamp = $dbw->selectField('revision', 'rev_timestamp', array('rev_id' => $previousRevId), __METHOD__); if ($previousTimestamp === false) { wfDebug(__METHOD__ . ": existing page refers to a page_latest that does not exist\n"); $status = Status::newGood(0); $status->warning('undeleterevision-missing'); return $status; } } else { # Have to create a new article... $makepage = true; $previousRevId = 0; $previousTimestamp = 0; } $oldWhere = array('ar_namespace' => $this->title->getNamespace(), 'ar_title' => $this->title->getDBkey()); if (!$restoreAll) { $oldWhere['ar_timestamp'] = array_map(array(&$dbw, 'timestamp'), $timestamps); } $fields = array('ar_rev_id', 'ar_text', 'ar_comment', 'ar_user', 'ar_user_text', 'ar_timestamp', 'ar_minor_edit', 'ar_flags', 'ar_text_id', 'ar_deleted', 'ar_page_id', 'ar_len', 'ar_sha1'); if ($this->config->get('ContentHandlerUseDB')) { $fields[] = 'ar_content_format'; $fields[] = 'ar_content_model'; } /** * Select each archived revision... */ $result = $dbw->select('archive', $fields, $oldWhere, __METHOD__, array('ORDER BY' => 'ar_timestamp')); $rev_count = $result->numRows(); if (!$rev_count) { wfDebug(__METHOD__ . ": no revisions to restore\n"); $status = Status::newGood(0); $status->warning("undelete-no-results"); return $status; } $result->seek($rev_count - 1); // move to last $row = $result->fetchObject(); // get newest archived rev $oldPageId = (int) $row->ar_page_id; // pass this to ArticleUndelete hook $result->seek(0); // move back // grab the content to check consistency with global state before restoring the page. $revision = Revision::newFromArchiveRow($row, array('title' => $article->getTitle())); $user = User::newFromName($revision->getUserText(Revision::RAW), false); $content = $revision->getContent(Revision::RAW); // NOTE: article ID may not be known yet. prepareSave() should not modify the database. $status = $content->prepareSave($article, 0, -1, $user); if (!$status->isOK()) { return $status; } if ($makepage) { // Check the state of the newest to-be version... if (!$unsuppress && $row->ar_deleted & Revision::DELETED_TEXT) { return Status::newFatal("undeleterevdel"); } // Safe to insert now... $newid = $article->insertOn($dbw, $row->ar_page_id); if ($newid === false) { // The old ID is reserved; let's pick another $newid = $article->insertOn($dbw); } $pageId = $newid; } else { // Check if a deleted revision will become the current revision... if ($row->ar_timestamp > $previousTimestamp) { // Check the state of the newest to-be version... if (!$unsuppress && $row->ar_deleted & Revision::DELETED_TEXT) { return Status::newFatal("undeleterevdel"); } } $newid = false; $pageId = $article->getId(); } $revision = null; $restored = 0; foreach ($result as $row) { // Check for key dupes due to needed archive integrity. if ($row->ar_rev_id) { $exists = $dbw->selectField('revision', '1', array('rev_id' => $row->ar_rev_id), __METHOD__); if ($exists) { continue; // don't throw DB errors } } // Insert one revision at a time...maintaining deletion status // unless we are specifically removing all restrictions... $revision = Revision::newFromArchiveRow($row, array('page' => $pageId, 'title' => $this->title, 'deleted' => $unsuppress ? 0 : $row->ar_deleted)); $revision->insertOn($dbw); $restored++; Hooks::run('ArticleRevisionUndeleted', array(&$this->title, $revision, $row->ar_page_id)); } # Now that it's safely stored, take it out of the archive $dbw->delete('archive', $oldWhere, __METHOD__); // Was anything restored at all? if ($restored == 0) { return Status::newGood(0); } $created = (bool) $newid; // Attach the latest revision to the page... $wasnew = $article->updateIfNewerOn($dbw, $revision, $previousRevId); if ($created || $wasnew) { // Update site stats, link tables, etc $article->doEditUpdates($revision, User::newFromName($revision->getUserText(Revision::RAW), false), array('created' => $created, 'oldcountable' => $oldcountable, 'restored' => true)); } Hooks::run('ArticleUndelete', array(&$this->title, $created, $comment, $oldPageId)); if ($this->title->getNamespace() == NS_FILE) { DeferredUpdates::addUpdate(new HTMLCacheUpdate($this->title, 'imagelinks')); } return Status::newGood($restored); }
/** * Override handling of action=purge * @return bool */ public function doPurge() { $this->loadFile(); if ($this->mFile->exists()) { wfDebug('ImagePage::doPurge purging ' . $this->mFile->getName() . "\n"); DeferredUpdates::addUpdate(new HTMLCacheUpdate($this->mTitle, 'imagelinks')); $this->mFile->purgeCache(['forThumbRefresh' => true]); } else { wfDebug('ImagePage::doPurge no image for ' . $this->mFile->getName() . "; limiting purge to cache only\n"); // even if the file supposedly doesn't exist, force any cached information // to be updated (in case the cached information is wrong) $this->mFile->purgeCache(['forThumbRefresh' => true]); } if ($this->mRepo) { // Purge redirect cache $this->mRepo->invalidateImageRedirect($this->mTitle); } return parent::doPurge(); }
/** * Invalidate any necessary link lists related to page property changes * @param array $changed */ private function invalidateProperties($changed) { global $wgPagePropLinkInvalidations; foreach ($changed as $name => $value) { if (isset($wgPagePropLinkInvalidations[$name])) { $inv = $wgPagePropLinkInvalidations[$name]; if (!is_array($inv)) { $inv = array($inv); } foreach ($inv as $table) { DeferredUpdates::addUpdate(new HTMLCacheUpdate($this->mTitle, $table)); } } } }
/** * Actually add a user to the database. * Give it a User object that has been initialised with a name. * * @param User $u * @param bool $autocreate True if this is an autocreation via auth plugin * @return Status Status object, with the User object in the value member on success * @private */ function initUser($u, $autocreate) { global $wgAuth; $status = $u->addToDatabase(); if (!$status->isOK()) { return $status; } if ($wgAuth->allowPasswordChange()) { $u->setPassword($this->mPassword); } $u->setEmail($this->mEmail); $u->setRealName($this->mRealName); $u->setToken(); $wgAuth->initUser($u, $autocreate); $u->saveSettings(); // Update user count DeferredUpdates::addUpdate(new SiteStatsUpdate(0, 0, 0, 0, 1)); // Watch user's userpage and talk page $u->addWatch($u->getUserPage(), WatchedItem::IGNORE_USER_RIGHTS); return Status::newGood($u); }
/** * Actually add a user to the database. * Give it a User object that has been initialised with a name. * * @param $u User object. * @param $autocreate boolean -- true if this is an autocreation via auth plugin * @return User object. * @private */ function initUser($u, $autocreate) { global $wgAuth; $u->addToDatabase(); if ($wgAuth->allowPasswordChange()) { $u->setPassword($this->mPassword); } $u->setEmail($this->mEmail); $u->setRealName($this->mRealName); $u->setToken(); $wgAuth->initUser($u, $autocreate); if ($this->mExtUser) { $this->mExtUser->linkToLocal($u->getId()); $email = $this->mExtUser->getPref('emailaddress'); if ($email && !$this->mEmail) { $u->setEmail($email); } } $u->setOption('rememberpassword', $this->mRemember ? 1 : 0); $u->saveSettings(); # Update user count DeferredUpdates::addUpdate(new SiteStatsUpdate(0, 0, 0, 0, 1)); return $u; }
/** * Attempt to add a user to the database * Does the required authentication checks and updates for auto-creation * @param $user User * @throws Exception * @return bool Success */ static function attemptAddUser($user) { global $wgAuth, $wgCentralAuthCreateOnView; $userName = $user->getName(); // Denied by configuration? if (!$wgAuth->autoCreate()) { wfDebug(__METHOD__ . ": denied by configuration\n"); return false; } if (!$wgCentralAuthCreateOnView) { // Only create local accounts when we perform an active login... // Don't freak people out on every page view wfDebug(__METHOD__ . ": denied by \$wgCentralAuthCreateOnView\n"); return false; } // Is the user blacklisted by the session? // This is just a cache to avoid expensive DB queries in $user->isAllowedToCreateAccount(). // The user can log in via Special:UserLogin to bypass the blacklist and get a proper // error message. $session = CentralAuthUser::getSession(); if (isset($session['auto-create-blacklist']) && in_array(wfWikiID(), (array) $session['auto-create-blacklist'])) { wfDebug(__METHOD__ . ": blacklisted by session\n"); return false; } // Is the user blocked? $anon = new User(); if (!$anon->isAllowedAny('createaccount', 'centralauth-autoaccount') || $anon->isBlockedFromCreateAccount()) { // Blacklist the user to avoid repeated DB queries subsequently // First load the session again in case it changed while the above DB query was in progress wfDebug(__METHOD__ . ": user is blocked from this wiki, blacklisting\n"); $session['auto-create-blacklist'][] = wfWikiID(); CentralAuthUser::setSession($session); return false; } // Check for validity of username if (!User::isCreatableName($userName)) { wfDebug(__METHOD__ . ": Invalid username\n"); $session['auto-create-blacklist'][] = wfWikiID(); CentralAuthUser::setSession($session); return false; } // Give other extensions a chance to stop auto creation. $user->loadDefaults($userName); $abortMessage = ''; if (!Hooks::run('AbortAutoAccount', array($user, &$abortMessage))) { // In this case we have no way to return the message to the user, // but we can log it. wfDebug(__METHOD__ . ": denied by other extension: {$abortMessage}\n"); $session['auto-create-blacklist'][] = wfWikiID(); CentralAuthUser::setSession($session); return false; } // Make sure the name has not been changed if ($user->getName() !== $userName) { throw new Exception("AbortAutoAccount hook tried to change the user name"); } // Checks passed, create the user $from = isset($_SERVER['REQUEST_URI']) ? $_SERVER['REQUEST_URI'] : 'CLI'; wfDebugLog('CentralAuth-Bug39996', __METHOD__ . ": creating new user ({$userName}) - from: {$from}\n"); try { $status = $user->addToDatabase(); } catch (Exception $e) { wfDebugLog('CentralAuth-Bug39996', __METHOD__ . " User::addToDatabase for \"{$userName}\" threw an exception:" . " {$e->getMessage()}"); throw $e; } if ($status === null) { // MW before 1.21 -- ok, continue } elseif (!$status->isOK()) { wfDebugLog('CentralAuth-Bug39996', __METHOD__ . ": failed with message " . $status->getWikiText() . "\n"); return false; } $wgAuth->initUser($user, true); # Notify hooks (e.g. Newuserlog) Hooks::run('AuthPluginAutoCreate', array($user)); # Update user count DeferredUpdates::addUpdate(new SiteStatsUpdate(0, 0, 0, 0, 1)); return true; }
/** * Do some database updates after deletion * * @param int $id The page_id value of the page being deleted * @param Content $content Optional page content to be used when determining * the required updates. This may be needed because $this->getContent() * may already return null when the page proper was deleted. */ public function doDeleteUpdates($id, Content $content = null) { // update site status DeferredUpdates::addUpdate(new SiteStatsUpdate(0, 1, -(int) $this->isCountable(), -1)); // remove secondary indexes, etc $updates = $this->getDeletionUpdates($content); DataUpdate::runUpdates($updates); // Reparse any pages transcluding this page LinksUpdate::queueRecursiveJobsForTable($this->mTitle, 'templatelinks'); // Reparse any pages including this image if ($this->mTitle->getNamespace() == NS_FILE) { LinksUpdate::queueRecursiveJobsForTable($this->mTitle, 'imagelinks'); } // Clear caches WikiPage::onArticleDelete($this->mTitle); // Reset this object and the Title object $this->loadFromRow(false, self::READ_LATEST); // Search engine DeferredUpdates::addUpdate(new SearchUpdate($id, $this->mTitle)); }
static function setPageSearchText($title, $text) { DeferredUpdates::addUpdate(new SearchUpdate($title->getArticleID(), $title->getText(), $text)); }
/** * Purges the cache of a page */ public function execute() { $main = $this->getMain(); if (!$main->isInternalMode() && !$main->getRequest()->wasPosted()) { $this->logFeatureUsage('purge-via-GET'); $this->setWarning('Use of action=purge via GET is deprecated. Use POST instead.'); } $params = $this->extractRequestParams(); $continuationManager = new ApiContinuationManager($this, [], []); $this->setContinuationManager($continuationManager); $forceLinkUpdate = $params['forcelinkupdate']; $forceRecursiveLinkUpdate = $params['forcerecursivelinkupdate']; $pageSet = $this->getPageSet(); $pageSet->execute(); $result = $pageSet->getInvalidTitlesAndRevisions(); $user = $this->getUser(); foreach ($pageSet->getGoodTitles() as $title) { $r = []; ApiQueryBase::addTitleInfo($r, $title); $page = WikiPage::factory($title); if (!$user->pingLimiter('purge')) { $flags = WikiPage::PURGE_ALL; if (!$this->getRequest()->wasPosted()) { $flags ^= WikiPage::PURGE_GLOBAL_PCACHE; // skip DB_MASTER write } // Directly purge and skip the UI part of purge() $page->doPurge($flags); $r['purged'] = true; } else { $error = $this->parseMsg(['actionthrottledtext']); $this->setWarning($error['info']); } if ($forceLinkUpdate || $forceRecursiveLinkUpdate) { if (!$user->pingLimiter('linkpurge')) { $popts = $page->makeParserOptions('canonical'); # Parse content; note that HTML generation is only needed if we want to cache the result. $content = $page->getContent(Revision::RAW); if ($content) { $enableParserCache = $this->getConfig()->get('EnableParserCache'); $p_result = $content->getParserOutput($title, $page->getLatest(), $popts, $enableParserCache); # Logging to better see expensive usage patterns if ($forceRecursiveLinkUpdate) { LoggerFactory::getInstance('RecursiveLinkPurge')->info("Recursive link purge enqueued for {title}", ['user' => $this->getUser()->getName(), 'title' => $title->getPrefixedText()]); } # Update the links tables $updates = $content->getSecondaryDataUpdates($title, null, $forceRecursiveLinkUpdate, $p_result); foreach ($updates as $update) { DeferredUpdates::addUpdate($update, DeferredUpdates::PRESEND); } $r['linkupdate'] = true; if ($enableParserCache) { $pcache = ParserCache::singleton(); $pcache->save($p_result, $page, $popts); } } } else { $error = $this->parseMsg(['actionthrottledtext']); $this->setWarning($error['info']); $forceLinkUpdate = false; } } $result[] = $r; } $apiResult = $this->getResult(); ApiResult::setIndexedTagName($result, 'page'); $apiResult->addValue(null, $this->getModuleName(), $result); $values = $pageSet->getNormalizedTitlesAsResult($apiResult); if ($values) { $apiResult->addValue(null, 'normalized', $values); } $values = $pageSet->getConvertedTitlesAsResult($apiResult); if ($values) { $apiResult->addValue(null, 'converted', $values); } $values = $pageSet->getRedirectTitlesAsResult($apiResult); if ($values) { $apiResult->addValue(null, 'redirects', $values); } $this->setContinuationManager(null); $continuationManager->setContinuationIntoResult($apiResult); }
/** * Auto-create an account, and log into that account * @param User $user User to auto-create * @param string $source What caused the auto-creation? This must be the ID * of a PrimaryAuthenticationProvider or the constant self::AUTOCREATE_SOURCE_SESSION. * @param bool $login Whether to also log the user in * @return Status Good if user was created, Ok if user already existed, otherwise Fatal */ public function autoCreateUser(User $user, $source, $login = true) { if ($source !== self::AUTOCREATE_SOURCE_SESSION && !$this->getAuthenticationProvider($source) instanceof PrimaryAuthenticationProvider) { throw new \InvalidArgumentException("Unknown auto-creation source: {$source}"); } $username = $user->getName(); // Try the local user from the slave DB $localId = User::idFromName($username); $flags = User::READ_NORMAL; // Fetch the user ID from the master, so that we don't try to create the user // when they already exist, due to replication lag // @codeCoverageIgnoreStart if (!$localId && wfGetLB()->getReaderIndex() != 0) { $localId = User::idFromName($username, User::READ_LATEST); $flags = User::READ_LATEST; } // @codeCoverageIgnoreEnd if ($localId) { $this->logger->debug(__METHOD__ . ': {username} already exists locally', ['username' => $username]); $user->setId($localId); $user->loadFromId($flags); if ($login) { $this->setSessionDataForUser($user); } $status = Status::newGood(); $status->warning('userexists'); return $status; } // Wiki is read-only? if (wfReadOnly()) { $this->logger->debug(__METHOD__ . ': denied by wfReadOnly(): {reason}', ['username' => $username, 'reason' => wfReadOnlyReason()]); $user->setId(0); $user->loadFromId(); return Status::newFatal('readonlytext', wfReadOnlyReason()); } // Check the session, if we tried to create this user already there's // no point in retrying. $session = $this->request->getSession(); if ($session->get('AuthManager::AutoCreateBlacklist')) { $this->logger->debug(__METHOD__ . ': blacklisted in session {sessionid}', ['username' => $username, 'sessionid' => $session->getId()]); $user->setId(0); $user->loadFromId(); $reason = $session->get('AuthManager::AutoCreateBlacklist'); if ($reason instanceof StatusValue) { return Status::wrap($reason); } else { return Status::newFatal($reason); } } // Is the username creatable? if (!User::isCreatableName($username)) { $this->logger->debug(__METHOD__ . ': name "{username}" is not creatable', ['username' => $username]); $session->set('AuthManager::AutoCreateBlacklist', 'noname', 600); $user->setId(0); $user->loadFromId(); return Status::newFatal('noname'); } // Is the IP user able to create accounts? $anon = new User(); if (!$anon->isAllowedAny('createaccount', 'autocreateaccount')) { $this->logger->debug(__METHOD__ . ': IP lacks the ability to create or autocreate accounts', ['username' => $username, 'ip' => $anon->getName()]); $session->set('AuthManager::AutoCreateBlacklist', 'authmanager-autocreate-noperm', 600); $session->persist(); $user->setId(0); $user->loadFromId(); return Status::newFatal('authmanager-autocreate-noperm'); } // Avoid account creation races on double submissions $cache = \ObjectCache::getLocalClusterInstance(); $lock = $cache->getScopedLock($cache->makeGlobalKey('account', md5($username))); if (!$lock) { $this->logger->debug(__METHOD__ . ': Could not acquire account creation lock', ['user' => $username]); $user->setId(0); $user->loadFromId(); return Status::newFatal('usernameinprogress'); } // Denied by providers? $providers = $this->getPreAuthenticationProviders() + $this->getPrimaryAuthenticationProviders() + $this->getSecondaryAuthenticationProviders(); foreach ($providers as $provider) { $status = $provider->testUserForCreation($user, $source); if (!$status->isGood()) { $ret = Status::wrap($status); $this->logger->debug(__METHOD__ . ': Provider denied creation of {username}: {reason}', ['username' => $username, 'reason' => $ret->getWikiText(null, null, 'en')]); $session->set('AuthManager::AutoCreateBlacklist', $status, 600); $user->setId(0); $user->loadFromId(); return $ret; } } // Ignore warnings about master connections/writes...hard to avoid here \Profiler::instance()->getTransactionProfiler()->resetExpectations(); $backoffKey = wfMemcKey('AuthManager', 'autocreate-failed', md5($username)); if ($cache->get($backoffKey)) { $this->logger->debug(__METHOD__ . ': {username} denied by prior creation attempt failures', ['username' => $username]); $user->setId(0); $user->loadFromId(); return Status::newFatal('authmanager-autocreate-exception'); } // Checks passed, create the user... $from = isset($_SERVER['REQUEST_URI']) ? $_SERVER['REQUEST_URI'] : 'CLI'; $this->logger->info(__METHOD__ . ': creating new user ({username}) - from: {from}', ['username' => $username, 'from' => $from]); try { $status = $user->addToDatabase(); if (!$status->isOk()) { // double-check for a race condition (T70012) $localId = User::idFromName($username, User::READ_LATEST); if ($localId) { $this->logger->info(__METHOD__ . ': {username} already exists locally (race)', ['username' => $username]); $user->setId($localId); $user->loadFromId(User::READ_LATEST); if ($login) { $this->setSessionDataForUser($user); } $status = Status::newGood(); $status->warning('userexists'); } else { $this->logger->error(__METHOD__ . ': {username} failed with message {message}', ['username' => $username, 'message' => $status->getWikiText(null, null, 'en')]); $user->setId(0); $user->loadFromId(); } return $status; } } catch (\Exception $ex) { $this->logger->error(__METHOD__ . ': {username} failed with exception {exception}', ['username' => $username, 'exception' => $ex]); // Do not keep throwing errors for a while $cache->set($backoffKey, 1, 600); // Bubble up error; which should normally trigger DB rollbacks throw $ex; } $this->setDefaultUserOptions($user, true); // Inform the providers $this->callMethodOnProviders(6, 'autoCreatedAccount', [$user, $source]); \Hooks::run('AuthPluginAutoCreate', [$user], '1.27'); \Hooks::run('LocalUserCreated', [$user, true]); $user->saveSettings(); // Update user count \DeferredUpdates::addUpdate(new \SiteStatsUpdate(0, 0, 0, 0, 1)); // Watch user's userpage and talk page $user->addWatch($user->getUserPage(), User::IGNORE_USER_RIGHTS); // Log the creation if ($this->config->get('NewUserLog')) { $logEntry = new \ManualLogEntry('newusers', 'autocreate'); $logEntry->setPerformer($user); $logEntry->setTarget($user->getUserPage()); $logEntry->setComment(''); $logEntry->setParameters(['4::userid' => $user->getId()]); $logid = $logEntry->insert(); } if ($login) { $this->setSessionDataForUser($user); } return Status::newGood(); }
/** * Actually add a user to the database. * Give it a User object that has been initialised with a name. * * @param User $u * @param bool $autocreate True if this is an autocreation via auth plugin * @return Status Status object, with the User object in the value member on success * @private */ function initUser($u, $autocreate) { global $wgAuth; $status = $u->addToDatabase(); if (!$status->isOK()) { return $status; } if ($wgAuth->allowPasswordChange()) { $u->setPassword($this->mPassword); } $u->setEmail($this->mEmail); $u->setRealName($this->mRealName); $u->setToken(); Hooks::run('LocalUserCreated', array($u, $autocreate)); $oldUser = $u; $wgAuth->initUser($u, $autocreate); if ($oldUser !== $u) { wfWarn(get_class($wgAuth) . '::initUser() replaced the user object'); } $u->saveSettings(); // Update user count DeferredUpdates::addUpdate(new SiteStatsUpdate(0, 0, 0, 0, 1)); // Watch user's userpage and talk page $u->addWatch($u->getUserPage(), WatchedItem::IGNORE_USER_RIGHTS); return Status::newGood($u); }
public function doPostCommitUpdates() { $file = wfLocalFile($this->title); $file->purgeCache(); $file->purgeDescription(); // Purge full images from cache $purgeUrls = array(); foreach ($this->ids as $timestamp) { $archiveName = $timestamp . '!' . $this->title->getDBkey(); $file->purgeOldThumbnails($archiveName); $purgeUrls[] = $file->getArchiveUrl($archiveName); } DeferredUpdates::addUpdate(new CdnCacheUpdate($purgeUrls), DeferredUpdates::PRESEND); return Status::newGood(); }
/** * This is the meaty bit -- It restores archived revisions of the given page * to the revision table. * * @param array $timestamps Pass an empty array to restore all revisions, * otherwise list the ones to undelete. * @param bool $unsuppress Remove all ar_deleted/fa_deleted restrictions of seletected revs * @param string $comment * @throws ReadOnlyError * @return Status Status object containing the number of revisions restored on success */ private function undeleteRevisions($timestamps, $unsuppress = false, $comment = '') { if (wfReadOnly()) { throw new ReadOnlyError(); } $dbw = wfGetDB(DB_MASTER); $dbw->startAtomic(__METHOD__); $restoreAll = empty($timestamps); # Does this page already exist? We'll have to update it... $article = WikiPage::factory($this->title); # Load latest data for the current page (bug 31179) $article->loadPageData('fromdbmaster'); $oldcountable = $article->isCountable(); $page = $dbw->selectRow('page', ['page_id', 'page_latest'], ['page_namespace' => $this->title->getNamespace(), 'page_title' => $this->title->getDBkey()], __METHOD__, ['FOR UPDATE']); if ($page) { $makepage = false; # Page already exists. Import the history, and if necessary # we'll update the latest revision field in the record. # Get the time span of this page $previousTimestamp = $dbw->selectField('revision', 'rev_timestamp', ['rev_id' => $page->page_latest], __METHOD__); if ($previousTimestamp === false) { wfDebug(__METHOD__ . ": existing page refers to a page_latest that does not exist\n"); $status = Status::newGood(0); $status->warning('undeleterevision-missing'); $dbw->endAtomic(__METHOD__); return $status; } } else { # Have to create a new article... $makepage = true; $previousTimestamp = 0; } $oldWhere = ['ar_namespace' => $this->title->getNamespace(), 'ar_title' => $this->title->getDBkey()]; if (!$restoreAll) { $oldWhere['ar_timestamp'] = array_map([&$dbw, 'timestamp'], $timestamps); } $fields = ['ar_id', 'ar_rev_id', 'rev_id', 'ar_text', 'ar_comment', 'ar_user', 'ar_user_text', 'ar_timestamp', 'ar_minor_edit', 'ar_flags', 'ar_text_id', 'ar_deleted', 'ar_page_id', 'ar_len', 'ar_sha1']; if ($this->config->get('ContentHandlerUseDB')) { $fields[] = 'ar_content_format'; $fields[] = 'ar_content_model'; } /** * Select each archived revision... */ $result = $dbw->select(['archive', 'revision'], $fields, $oldWhere, __METHOD__, ['ORDER BY' => 'ar_timestamp'], ['revision' => ['LEFT JOIN', 'ar_rev_id=rev_id']]); $rev_count = $result->numRows(); if (!$rev_count) { wfDebug(__METHOD__ . ": no revisions to restore\n"); $status = Status::newGood(0); $status->warning("undelete-no-results"); $dbw->endAtomic(__METHOD__); return $status; } // We use ar_id because there can be duplicate ar_rev_id even for the same // page. In this case, we may be able to restore the first one. $restoreFailedArIds = []; // Map rev_id to the ar_id that is allowed to use it. When checking later, // if it doesn't match, the current ar_id can not be restored. // Value can be an ar_id or -1 (-1 means no ar_id can use it, since the // rev_id is taken before we even start the restore). $allowedRevIdToArIdMap = []; $latestRestorableRow = null; foreach ($result as $row) { if ($row->ar_rev_id) { // rev_id is taken even before we start restoring. if ($row->ar_rev_id === $row->rev_id) { $restoreFailedArIds[] = $row->ar_id; $allowedRevIdToArIdMap[$row->ar_rev_id] = -1; } else { // rev_id is not taken yet in the DB, but it might be taken // by a prior revision in the same restore operation. If // not, we need to reserve it. if (isset($allowedRevIdToArIdMap[$row->ar_rev_id])) { $restoreFailedArIds[] = $row->ar_id; } else { $allowedRevIdToArIdMap[$row->ar_rev_id] = $row->ar_id; $latestRestorableRow = $row; } } } else { // If ar_rev_id is null, there can't be a collision, and a // rev_id will be chosen automatically. $latestRestorableRow = $row; } } $result->seek(0); // move back $oldPageId = 0; if ($latestRestorableRow !== null) { $oldPageId = (int) $latestRestorableRow->ar_page_id; // pass this to ArticleUndelete hook // grab the content to check consistency with global state before restoring the page. $revision = Revision::newFromArchiveRow($latestRestorableRow, ['title' => $article->getTitle()]); $user = User::newFromName($revision->getUserText(Revision::RAW), false); $content = $revision->getContent(Revision::RAW); // NOTE: article ID may not be known yet. prepareSave() should not modify the database. $status = $content->prepareSave($article, 0, -1, $user); if (!$status->isOK()) { $dbw->endAtomic(__METHOD__); return $status; } } $newid = false; // newly created page ID $restored = 0; // number of revisions restored /** @var Revision $revision */ $revision = null; // If there are no restorable revisions, we can skip most of the steps. if ($latestRestorableRow === null) { $failedRevisionCount = $rev_count; } else { if ($makepage) { // Check the state of the newest to-be version... if (!$unsuppress && $latestRestorableRow->ar_deleted & Revision::DELETED_TEXT) { $dbw->endAtomic(__METHOD__); return Status::newFatal("undeleterevdel"); } // Safe to insert now... $newid = $article->insertOn($dbw, $latestRestorableRow->ar_page_id); if ($newid === false) { // The old ID is reserved; let's pick another $newid = $article->insertOn($dbw); } $pageId = $newid; } else { // Check if a deleted revision will become the current revision... if ($latestRestorableRow->ar_timestamp > $previousTimestamp) { // Check the state of the newest to-be version... if (!$unsuppress && $latestRestorableRow->ar_deleted & Revision::DELETED_TEXT) { $dbw->endAtomic(__METHOD__); return Status::newFatal("undeleterevdel"); } } $newid = false; $pageId = $article->getId(); } foreach ($result as $row) { // Check for key dupes due to needed archive integrity. if ($row->ar_rev_id && $allowedRevIdToArIdMap[$row->ar_rev_id] !== $row->ar_id) { continue; } // Insert one revision at a time...maintaining deletion status // unless we are specifically removing all restrictions... $revision = Revision::newFromArchiveRow($row, ['page' => $pageId, 'title' => $this->title, 'deleted' => $unsuppress ? 0 : $row->ar_deleted]); $revision->insertOn($dbw); $restored++; Hooks::run('ArticleRevisionUndeleted', [&$this->title, $revision, $row->ar_page_id]); } // Now that it's safely stored, take it out of the archive // Don't delete rows that we failed to restore $toDeleteConds = $oldWhere; $failedRevisionCount = count($restoreFailedArIds); if ($failedRevisionCount > 0) { $toDeleteConds[] = 'ar_id NOT IN ( ' . $dbw->makeList($restoreFailedArIds) . ' )'; } $dbw->delete('archive', $toDeleteConds, __METHOD__); } $status = Status::newGood($restored); if ($failedRevisionCount > 0) { $status->warning(wfMessage('undeleterevision-duplicate-revid', $failedRevisionCount)); } // Was anything restored at all? if ($restored) { $created = (bool) $newid; // Attach the latest revision to the page... $wasnew = $article->updateIfNewerOn($dbw, $revision); if ($created || $wasnew) { // Update site stats, link tables, etc $article->doEditUpdates($revision, User::newFromName($revision->getUserText(Revision::RAW), false), ['created' => $created, 'oldcountable' => $oldcountable, 'restored' => true]); } Hooks::run('ArticleUndelete', [&$this->title, $created, $comment, $oldPageId]); if ($this->title->getNamespace() == NS_FILE) { DeferredUpdates::addUpdate(new HTMLCacheUpdate($this->title, 'imagelinks')); } } $dbw->endAtomic(__METHOD__); return $status; }
/** * Do some database updates after deletion * * @param $id Int: page_id value of the page being deleted (B/C, currently unused) * @param $content Content: optional page content to be used when determining the required updates. * This may be needed because $this->getContent() may already return null when the page proper was deleted. */ public function doDeleteUpdates($id, Content $content = null) { # update site status DeferredUpdates::addUpdate(new SiteStatsUpdate(0, 1, -(int) $this->isCountable(), -1)); # remove secondary indexes, etc $updates = $this->getDeletionUpdates($content); DataUpdate::runUpdates($updates); # Clear caches WikiPage::onArticleDelete($this->mTitle); # Reset this object $this->clear(); # Clear the cached article id so the interface doesn't act like we exist $this->mTitle->resetArticleID(0); }
/** * Purge expired blocks from the ipblocks table */ public static function purgeExpired() { if (wfReadOnly()) { return; } DeferredUpdates::addUpdate(new AtomicSectionUpdate(wfGetDB(DB_MASTER), __METHOD__, function (IDatabase $dbw, $fname) { $dbw->delete('ipblocks', array('ipb_expiry < ' . $dbw->addQuotes($dbw->timestamp())), $fname); })); }
/** * Run the transaction, except the cleanup batch. * The cleanup batch should be run in a separate transaction, because it locks different * rows and there's no need to keep the image row locked while it's acquiring those locks * The caller may have its own transaction open. * So we save the batch and let the caller call cleanup() * @return FileRepoStatus */ function execute() { global $wgLang; if (!$this->all && !$this->ids) { // Do nothing return $this->file->repo->newGood(); } $exists = $this->file->lock(); $dbw = $this->file->repo->getMasterDB(); $status = $this->file->repo->newGood(); // Fetch all or selected archived revisions for the file, // sorted from the most recent to the oldest. $conditions = array('fa_name' => $this->file->getName()); if (!$this->all) { $conditions['fa_id'] = $this->ids; } $result = $dbw->select('filearchive', ArchivedFile::selectFields(), $conditions, __METHOD__, array('ORDER BY' => 'fa_timestamp DESC')); $idsPresent = array(); $storeBatch = array(); $insertBatch = array(); $insertCurrent = false; $deleteIds = array(); $first = true; $archiveNames = array(); foreach ($result as $row) { $idsPresent[] = $row->fa_id; if ($row->fa_name != $this->file->getName()) { $status->error('undelete-filename-mismatch', $wgLang->timeanddate($row->fa_timestamp)); $status->failCount++; continue; } if ($row->fa_storage_key == '') { // Revision was missing pre-deletion $status->error('undelete-bad-store-key', $wgLang->timeanddate($row->fa_timestamp)); $status->failCount++; continue; } $deletedRel = $this->file->repo->getDeletedHashPath($row->fa_storage_key) . $row->fa_storage_key; $deletedUrl = $this->file->repo->getVirtualUrl() . '/deleted/' . $deletedRel; if (isset($row->fa_sha1)) { $sha1 = $row->fa_sha1; } else { // old row, populate from key $sha1 = LocalRepo::getHashFromKey($row->fa_storage_key); } # Fix leading zero if (strlen($sha1) == 32 && $sha1[0] == '0') { $sha1 = substr($sha1, 1); } if (is_null($row->fa_major_mime) || $row->fa_major_mime == 'unknown' || is_null($row->fa_minor_mime) || $row->fa_minor_mime == 'unknown' || is_null($row->fa_media_type) || $row->fa_media_type == 'UNKNOWN' || is_null($row->fa_metadata)) { // Refresh our metadata // Required for a new current revision; nice for older ones too. :) $props = RepoGroup::singleton()->getFileProps($deletedUrl); } else { $props = array('minor_mime' => $row->fa_minor_mime, 'major_mime' => $row->fa_major_mime, 'media_type' => $row->fa_media_type, 'metadata' => $row->fa_metadata); } if ($first && !$exists) { // This revision will be published as the new current version $destRel = $this->file->getRel(); $insertCurrent = array('img_name' => $row->fa_name, 'img_size' => $row->fa_size, 'img_width' => $row->fa_width, 'img_height' => $row->fa_height, 'img_metadata' => $props['metadata'], 'img_bits' => $row->fa_bits, 'img_media_type' => $props['media_type'], 'img_major_mime' => $props['major_mime'], 'img_minor_mime' => $props['minor_mime'], 'img_description' => $row->fa_description, 'img_user' => $row->fa_user, 'img_user_text' => $row->fa_user_text, 'img_timestamp' => $row->fa_timestamp, 'img_sha1' => $sha1); // The live (current) version cannot be hidden! if (!$this->unsuppress && $row->fa_deleted) { $status->fatal('undeleterevdel'); $this->file->unlock(); return $status; } } else { $archiveName = $row->fa_archive_name; if ($archiveName == '') { // This was originally a current version; we // have to devise a new archive name for it. // Format is <timestamp of archiving>!<name> $timestamp = wfTimestamp(TS_UNIX, $row->fa_deleted_timestamp); do { $archiveName = wfTimestamp(TS_MW, $timestamp) . '!' . $row->fa_name; $timestamp++; } while (isset($archiveNames[$archiveName])); } $archiveNames[$archiveName] = true; $destRel = $this->file->getArchiveRel($archiveName); $insertBatch[] = array('oi_name' => $row->fa_name, 'oi_archive_name' => $archiveName, 'oi_size' => $row->fa_size, 'oi_width' => $row->fa_width, 'oi_height' => $row->fa_height, 'oi_bits' => $row->fa_bits, 'oi_description' => $row->fa_description, 'oi_user' => $row->fa_user, 'oi_user_text' => $row->fa_user_text, 'oi_timestamp' => $row->fa_timestamp, 'oi_metadata' => $props['metadata'], 'oi_media_type' => $props['media_type'], 'oi_major_mime' => $props['major_mime'], 'oi_minor_mime' => $props['minor_mime'], 'oi_deleted' => $this->unsuppress ? 0 : $row->fa_deleted, 'oi_sha1' => $sha1); } $deleteIds[] = $row->fa_id; if (!$this->unsuppress && $row->fa_deleted & File::DELETED_FILE) { // private files can stay where they are $status->successCount++; } else { $storeBatch[] = array($deletedUrl, 'public', $destRel); $this->cleanupBatch[] = $row->fa_storage_key; } $first = false; } unset($result); // Add a warning to the status object for missing IDs $missingIds = array_diff($this->ids, $idsPresent); foreach ($missingIds as $id) { $status->error('undelete-missing-filearchive', $id); } // Remove missing files from batch, so we don't get errors when undeleting them $storeBatch = $this->removeNonexistentFiles($storeBatch); // Run the store batch // Use the OVERWRITE_SAME flag to smooth over a common error $storeStatus = $this->file->repo->storeBatch($storeBatch, FileRepo::OVERWRITE_SAME); $status->merge($storeStatus); if (!$status->isGood()) { // Even if some files could be copied, fail entirely as that is the // easiest thing to do without data loss $this->cleanupFailedBatch($storeStatus, $storeBatch); $status->ok = false; $this->file->unlock(); return $status; } // Run the DB updates // Because we have locked the image row, key conflicts should be rare. // If they do occur, we can roll back the transaction at this time with // no data loss, but leaving unregistered files scattered throughout the // public zone. // This is not ideal, which is why it's important to lock the image row. if ($insertCurrent) { $dbw->insert('image', $insertCurrent, __METHOD__); } if ($insertBatch) { $dbw->insert('oldimage', $insertBatch, __METHOD__); } if ($deleteIds) { $dbw->delete('filearchive', array('fa_id' => $deleteIds), __METHOD__); } // If store batch is empty (all files are missing), deletion is to be considered successful if ($status->successCount > 0 || !$storeBatch) { if (!$exists) { wfDebug(__METHOD__ . " restored {$status->successCount} items, creating a new current\n"); DeferredUpdates::addUpdate(SiteStatsUpdate::factory(array('images' => 1))); $this->file->purgeEverything(); } else { wfDebug(__METHOD__ . " restored {$status->successCount} as archived versions\n"); $this->file->purgeDescription(); $this->file->purgeHistory(); } } $this->file->unlock(); return $status; }
/** * Invalidates/purges pages where only stable version includes this page. * @param Title $title */ public static function extraHTMLCacheUpdate(Title $title) { DeferredUpdates::addUpdate(new FRExtraCacheUpdate($title)); }
/** * Purge caches on page update etc * * @param Title $title * @param Revision|null $revision Revision that was just saved, may be null */ public static function onArticleEdit(Title $title, Revision $revision = null) { // Invalidate caches of articles which include this page DeferredUpdates::addUpdate(new HTMLCacheUpdate($title, 'templatelinks')); // Invalidate the caches of all pages which redirect here DeferredUpdates::addUpdate(new HTMLCacheUpdate($title, 'redirect')); MediaWikiServices::getInstance()->getLinkCache()->invalidateTitle($title); // Purge CDN for this page only $title->purgeSquid(); // Clear file cache for this page only HTMLFileCache::clearFileCache($title); $revid = $revision ? $revision->getId() : null; DeferredUpdates::addCallableUpdate(function () use($title, $revid) { InfoAction::invalidateCache($title, $revid); }); }
/** * Run LinksUpdate for all links on a given page_id * @param int $id The page_id * @param int|bool $ns Only fix links if it is in this namespace */ public static function fixLinksFromArticle($id, $ns = false) { $page = WikiPage::newFromID($id); LinkCache::singleton()->clear(); if ($page === null) { return; } elseif ($ns !== false && !$page->getTitle()->inNamespace($ns)) { return; } $content = $page->getContent(Revision::RAW); if ($content === null) { return; } foreach ($content->getSecondaryDataUpdates($page->getTitle()) as $update) { DeferredUpdates::addUpdate($update); } }
/** * Purge metadata and all affected pages when the file is created, * deleted, or majorly updated. */ function purgeEverything() { // Delete thumbnails and refresh file metadata cache $this->purgeCache(); $this->purgeDescription(); // Purge cache of all pages using this file $title = $this->getTitle(); if ($title) { DeferredUpdates::addUpdate(new HTMLCacheUpdate($title, 'imagelinks')); } }
/** * Actually add a user to the database. * Give it a User object that has been initialised with a name. * * @param $u User object. * @param $autocreate boolean -- true if this is an autocreation via auth plugin * @return Status object, with the User object in the value member on success * @private */ function initUser($u, $autocreate) { global $wgAuth; $status = $u->addToDatabase(); if (!$status->isOK()) { return $status; } if ($wgAuth->allowPasswordChange()) { $u->setPassword($this->mPassword); } $u->setEmail($this->mEmail); $u->setRealName($this->mRealName); $u->setToken(); $wgAuth->initUser($u, $autocreate); $u->setOption('rememberpassword', $this->mRemember ? 1 : 0); $u->saveSettings(); # Update user count DeferredUpdates::addUpdate(new SiteStatsUpdate(0, 0, 0, 0, 1)); return Status::newGood($u); }
/** * Update page_touched timestamps and send squid purge messages for * pages linking to this title. May be sent to the job queue depending * on the number of links. Typically called on create and delete. */ public function touchLinks() { DeferredUpdates::addUpdate(new HTMLCacheUpdate($this, 'pagelinks')); if ($this->getNamespace() == NS_CATEGORY) { DeferredUpdates::addUpdate(new HTMLCacheUpdate($this, 'categorylinks')); } }
/** * Intercept hook before page intialization to see if we tracking a recommendation */ public static function onBeforeInitialize(&$title, &$article, &$output, &$user, $request, $mediaWiki) { // Make sure we exclude anons if ($user->getId() == 0) { return true; } // If they came with a recommendation URL we track it $track = $request->getVal("utm_campaign", ""); if ($track == self::CAMPAIGN_NAME) { // Use deferred insert for tracking clicks $rlu = new RecommendationLogUpdate($title->getArticleId(), $user->getId()); DeferredUpdates::addUpdate($rlu); } return true; }
/** * Auto-create the given user, if necessary * @private Don't call this yourself. Let Setup.php do it for you at the right time. * @note This more properly belongs in AuthManager, but we need it now. * When AuthManager comes, this will be deprecated and will pass-through * to the corresponding AuthManager method. * @param User $user User to auto-create * @return bool Success */ public static function autoCreateUser(User $user) { global $wgAuth; $logger = self::singleton()->logger; // Much of this code is based on that in CentralAuth // Try the local user from the slave DB $localId = User::idFromName($user->getName()); // Fetch the user ID from the master, so that we don't try to create the user // when they already exist, due to replication lag // @codeCoverageIgnoreStart if (!$localId && wfGetLB()->getReaderIndex() != 0) { $localId = User::idFromName($user->getName(), User::READ_LATEST); } // @codeCoverageIgnoreEnd if ($localId) { // User exists after all. $user->setId($localId); $user->loadFromId(); return false; } // Denied by AuthPlugin? But ignore AuthPlugin itself. if (get_class($wgAuth) !== 'AuthPlugin' && !$wgAuth->autoCreate()) { $logger->debug(__METHOD__ . ': denied by AuthPlugin'); $user->setId(0); $user->loadFromId(); return false; } // Wiki is read-only? if (wfReadOnly()) { $logger->debug(__METHOD__ . ': denied by wfReadOnly()'); $user->setId(0); $user->loadFromId(); return false; } $userName = $user->getName(); // Check the session, if we tried to create this user already there's // no point in retrying. $session = self::getGlobalSession(); $reason = $session->get('MWSession::AutoCreateBlacklist'); if ($reason) { $logger->debug(__METHOD__ . ": blacklisted in session ({$reason})"); $user->setId(0); $user->loadFromId(); return false; } // Is the IP user able to create accounts? $anon = new User(); if (!$anon->isAllowedAny('createaccount', 'autocreateaccount') || $anon->isBlockedFromCreateAccount()) { // Blacklist the user to avoid repeated DB queries subsequently $logger->debug(__METHOD__ . ': user is blocked from this wiki, blacklisting'); $session->set('MWSession::AutoCreateBlacklist', 'blocked', 600); $session->persist(); $user->setId(0); $user->loadFromId(); return false; } // Check for validity of username if (!User::isCreatableName($userName)) { $logger->debug(__METHOD__ . ': Invalid username, blacklisting'); $session->set('MWSession::AutoCreateBlacklist', 'invalid username', 600); $session->persist(); $user->setId(0); $user->loadFromId(); return false; } // Give other extensions a chance to stop auto creation. $user->loadDefaults($userName); $abortMessage = ''; if (!\Hooks::run('AbortAutoAccount', array($user, &$abortMessage))) { // In this case we have no way to return the message to the user, // but we can log it. $logger->debug(__METHOD__ . ": denied by hook: {$abortMessage}"); $session->set('MWSession::AutoCreateBlacklist', "hook aborted: {$abortMessage}", 600); $session->persist(); $user->setId(0); $user->loadFromId(); return false; } // Make sure the name has not been changed if ($user->getName() !== $userName) { $user->setId(0); $user->loadFromId(); throw new \UnexpectedValueException('AbortAutoAccount hook tried to change the user name'); } // Ignore warnings about master connections/writes...hard to avoid here \Profiler::instance()->getTransactionProfiler()->resetExpectations(); $cache = \ObjectCache::getLocalClusterInstance(); $backoffKey = wfMemcKey('MWSession', 'autocreate-failed', md5($userName)); if ($cache->get($backoffKey)) { $logger->debug(__METHOD__ . ': denied by prior creation attempt failures'); $user->setId(0); $user->loadFromId(); return false; } // Checks passed, create the user... $from = isset($_SERVER['REQUEST_URI']) ? $_SERVER['REQUEST_URI'] : 'CLI'; $logger->info(__METHOD__ . ": creating new user ({$userName}) - from: {$from}"); try { // Insert the user into the local DB master $status = $user->addToDatabase(); if (!$status->isOK()) { // @codeCoverageIgnoreStart $logger->error(__METHOD__ . ': failed with message ' . $status->getWikiText()); $user->setId(0); $user->loadFromId(); return false; // @codeCoverageIgnoreEnd } } catch (\Exception $ex) { // @codeCoverageIgnoreStart $logger->error(__METHOD__ . ': failed with exception ' . $ex->getMessage()); // Do not keep throwing errors for a while $cache->set($backoffKey, 1, 600); // Bubble up error; which should normally trigger DB rollbacks throw $ex; // @codeCoverageIgnoreEnd } # Notify hooks (e.g. Newuserlog) \Hooks::run('AuthPluginAutoCreate', array($user)); \Hooks::run('LocalUserCreated', array($user, true)); # Notify AuthPlugin too $tmpUser = $user; $wgAuth->initUser($tmpUser, true); if ($tmpUser !== $user) { $logger->warning(__METHOD__ . ': ' . get_class($wgAuth) . '::initUser() replaced the user object'); } $user->saveSettings(); # Update user count \DeferredUpdates::addUpdate(new \SiteStatsUpdate(0, 0, 0, 0, 1)); # Watch user's userpage and talk page $user->addWatch($user->getUserPage(), \WatchedItem::IGNORE_USER_RIGHTS); return true; }
/** * Do some database updates after deletion * * @param $id Int: page_id value of the page being deleted */ public function doDeleteUpdates($id) { DeferredUpdates::addUpdate(new SiteStatsUpdate(0, 1, -(int) $this->isCountable(), -1)); $dbw = wfGetDB(DB_MASTER); # Delete restrictions for it $dbw->delete('page_restrictions', array('pr_page' => $id), __METHOD__); # Fix category table counts $cats = array(); $res = $dbw->select('categorylinks', 'cl_to', array('cl_from' => $id), __METHOD__); foreach ($res as $row) { $cats[] = $row->cl_to; } $this->updateCategoryCounts(array(), $cats); # If using cascading deletes, we can skip some explicit deletes if (!$dbw->cascadingDeletes()) { $dbw->delete('revision', array('rev_page' => $id), __METHOD__); # Delete outgoing links $dbw->delete('pagelinks', array('pl_from' => $id), __METHOD__); $dbw->delete('imagelinks', array('il_from' => $id), __METHOD__); $dbw->delete('categorylinks', array('cl_from' => $id), __METHOD__); $dbw->delete('templatelinks', array('tl_from' => $id), __METHOD__); $dbw->delete('externallinks', array('el_from' => $id), __METHOD__); $dbw->delete('langlinks', array('ll_from' => $id), __METHOD__); $dbw->delete('iwlinks', array('iwl_from' => $id), __METHOD__); $dbw->delete('redirect', array('rd_from' => $id), __METHOD__); $dbw->delete('page_props', array('pp_page' => $id), __METHOD__); } # If using cleanup triggers, we can skip some manual deletes if (!$dbw->cleanupTriggers()) { # Clean up recentchanges entries... $dbw->delete('recentchanges', array('rc_type != ' . RC_LOG, 'rc_namespace' => $this->mTitle->getNamespace(), 'rc_title' => $this->mTitle->getDBkey()), __METHOD__); $dbw->delete('recentchanges', array('rc_type != ' . RC_LOG, 'rc_cur_id' => $id), __METHOD__); } # Clear caches self::onArticleDelete($this->mTitle); # Clear the cached article id so the interface doesn't act like we exist $this->mTitle->resetArticleID(0); }