protected function autoreview_current(User $user) { $this->output("Auto-reviewing all current page versions...\n"); if (!$user->getID()) { $this->output("Invalid user specified.\n"); return; } elseif (!$user->isAllowed('review')) { $this->output("User specified (id: {$user->getID()}) does not have \"review\" rights.\n"); return; } $db = wfGetDB(DB_MASTER); $this->output("Reviewer username: "******"\n"); $start = $db->selectField('page', 'MIN(page_id)', false, __METHOD__); $end = $db->selectField('page', 'MAX(page_id)', false, __METHOD__); if (is_null($start) || is_null($end)) { $this->output("...page table seems to be empty.\n"); return; } # Do remaining chunk $end += $this->mBatchSize - 1; $blockStart = $start; $blockEnd = $start + $this->mBatchSize - 1; $count = 0; $changed = 0; $flags = FlaggedRevs::quickTags(FR_CHECKED); // Assume basic level while ($blockEnd <= $end) { $this->output("...doing page_id from {$blockStart} to {$blockEnd}\n"); $res = $db->select(array('page', 'revision'), '*', array("page_id BETWEEN {$blockStart} AND {$blockEnd}", 'page_namespace' => FlaggedRevs::getReviewNamespaces(), 'rev_id = page_latest'), __METHOD__); # Go through and autoreview the current version of every page... foreach ($res as $row) { $title = Title::newFromRow($row); $rev = Revision::newFromRow($row); # Is it already reviewed? $frev = FlaggedRevision::newFromTitle($title, $row->page_latest, FR_MASTER); # Rev should exist, but to be safe... if (!$frev && $rev) { $article = new Article($title); $db->begin(); FlaggedRevs::autoReviewEdit($article, $user, $rev, $flags, true); FlaggedRevs::HTMLCacheUpdates($article->getTitle()); $db->commit(); $changed++; } $count++; } $db->freeResult($res); $blockStart += $this->mBatchSize - 1; $blockEnd += $this->mBatchSize - 1; // XXX: Don't let deferred jobs array get absurdly large (bug 24375) DeferredUpdates::doUpdates('commit'); wfWaitForSlaves(5); } $this->output("Auto-reviewing of all pages complete ..." . "{$count} rows [{$changed} changed]\n"); }
public function enableCXBetaFeature() { $user = $this->getUser(); $out = $this->getOutput(); $user->setOption('cx', '1'); // Promise to persist the setting post-send DeferredUpdates::addCallableUpdate(function () use($user) { $user->saveSettings(); }); $out->addModules('ext.cx.beta.notification'); }
public function execute() { $user = $this->getUser(); if ($this->getRequest()->wasPosted()) { $user->setNewtalk(false); } else { DeferredUpdates::addCallableUpdate(function () use($user) { $user->setNewtalk(false); }); } $this->getResult()->addValue(null, $this->getModuleName(), 'success'); }
public function execute($par) { $this->useTransactionalTimeLimit(); $this->checkReadOnly(); $this->setHeaders(); $this->outputHeader(); $request = $this->getRequest(); $target = !is_null($par) ? $par : $request->getVal('target'); // Yes, the use of getVal() and getText() is wanted, see bug 20365 $oldTitleText = $request->getVal('wpOldTitle', $target); $this->oldTitle = Title::newFromText($oldTitleText); if (!$this->oldTitle) { // Either oldTitle wasn't passed, or newFromText returned null throw new ErrorPageError('notargettitle', 'notargettext'); } if (!$this->oldTitle->exists()) { throw new ErrorPageError('nopagetitle', 'nopagetext'); } $newTitleTextMain = $request->getText('wpNewTitleMain'); $newTitleTextNs = $request->getInt('wpNewTitleNs', $this->oldTitle->getNamespace()); // Backwards compatibility for forms submitting here from other sources // which is more common than it should be.. $newTitleText_bc = $request->getText('wpNewTitle'); $this->newTitle = strlen($newTitleText_bc) > 0 ? Title::newFromText($newTitleText_bc) : Title::makeTitleSafe($newTitleTextNs, $newTitleTextMain); $user = $this->getUser(); # Check rights $permErrors = $this->oldTitle->getUserPermissionsErrors('move', $user); if (count($permErrors)) { // Auto-block user's IP if the account was "hard" blocked DeferredUpdates::addCallableUpdate(function () use($user) { $user->spreadAnyEditBlock(); }); throw new PermissionsError('move', $permErrors); } $def = !$request->wasPosted(); $this->reason = $request->getText('wpReason'); $this->moveTalk = $request->getBool('wpMovetalk', $def); $this->fixRedirects = $request->getBool('wpFixRedirects', $def); $this->leaveRedirect = $request->getBool('wpLeaveRedirect', $def); $this->moveSubpages = $request->getBool('wpMovesubpages', false); $this->deleteAndMove = $request->getBool('wpDeleteAndMove') && $request->getBool('wpConfirm'); $this->moveOverShared = $request->getBool('wpMoveOverSharedFile', false); $this->watch = $request->getCheck('wpWatch') && $user->isLoggedIn(); if ('submit' == $request->getVal('action') && $request->wasPosted() && $user->matchEditToken($request->getVal('wpEditToken'))) { $this->doSubmit(); } else { $this->showForm(array()); } }
public function doUpdate() { global $wgSiteStatsAsyncFactor; $this->doUpdateContextStats(); $rate = $wgSiteStatsAsyncFactor; // convenience // If set to do so, only do actual DB updates 1 every $rate times. // The other times, just update "pending delta" values in memcached. if ($rate && ($rate < 0 || mt_rand(0, $rate - 1) != 0)) { $this->doUpdatePendingDeltas(); } else { // Need a separate transaction because this a global lock DeferredUpdates::addCallableUpdate([$this, 'tryDBUpdateInternal']); } }
public function testDoUpdates() { $updates = array('1' => 'deferred update 1', '2' => 'deferred update 2', '3' => 'deferred update 3', '2-1' => 'deferred update 1 within deferred update 2'); DeferredUpdates::addCallableUpdate(function () use($updates) { echo $updates['1']; }); DeferredUpdates::addCallableUpdate(function () use($updates) { echo $updates['2']; DeferredUpdates::addCallableUpdate(function () use($updates) { echo $updates['2-1']; }); }); DeferredUpdates::addCallableUpdate(function () use($updates) { echo $updates[3]; }); $this->expectOutputString(implode('', $updates)); DeferredUpdates::doUpdates(); }
public function testDoUpdatesCLI() { $this->setMwGlobals('wgCommandLineMode', true); $updates = array('1' => 'deferred update 1', '2' => 'deferred update 2', '2-1' => 'deferred update 1 within deferred update 2', '3' => 'deferred update 3'); DeferredUpdates::addCallableUpdate(function () use($updates) { echo $updates['1']; }); DeferredUpdates::addCallableUpdate(function () use($updates) { echo $updates['2']; DeferredUpdates::addCallableUpdate(function () use($updates) { echo $updates['2-1']; }); }); DeferredUpdates::addCallableUpdate(function () use($updates) { echo $updates[3]; }); $this->expectOutputString(implode('', $updates)); DeferredUpdates::doUpdates(); }
public function testPurgeMergeWeb() { $this->setMwGlobals('wgCommandLineMode', false); $urls1 = array(); $title = Title::newMainPage(); $urls1[] = $title->getCanonicalURL('?x=1'); $urls1[] = $title->getCanonicalURL('?x=2'); $urls1[] = $title->getCanonicalURL('?x=3'); $update1 = new CdnCacheUpdate($urls1); DeferredUpdates::addUpdate($update1); $urls2 = array(); $urls2[] = $title->getCanonicalURL('?x=2'); $urls2[] = $title->getCanonicalURL('?x=3'); $urls2[] = $title->getCanonicalURL('?x=4'); $update2 = new CdnCacheUpdate($urls2); DeferredUpdates::addUpdate($update2); $wrapper = TestingAccessWrapper::newFromObject($update1); $this->assertEquals(array_merge($urls1, $urls2), $wrapper->urls); }
public static function onUserLoadFromSession($user, &$result) { $result = false; // don't attempt default auth process if (!isset($_SERVER['SSL_CLIENT_S_DN'])) { return true; } $parsed = self::parseDistinguishedName($_SERVER['SSL_CLIENT_S_DN']); if (!isset($parsed['CN'])) { return true; } $userName = $parsed['CN']; $localId = User::idFromName($userName); if ($localId === null) { // local user doesn't exists yet $user->loadDefaults($parsed['CN']); if (!User::isCreatableName($user->getName())) { wfDebug(__METHOD__ . ": Invalid username\n"); return true; } $user->addToDatabase(); if (isset($parsed['emailAddress'])) { $user->setEmail($parsed['emailAddress']); } $user->saveSettings(); $user->addNewUserLogEntryAutoCreate(); Hooks::run('AuthPluginAutoCreate', array($user)); DeferredUpdates::addUpdate(new SiteStatsUpdate(0, 0, 0, 0, 1)); } else { $user->setID($localId); $user->loadFromId(); } global $wgUser; $wgUser =& $user; $result = true; // this also aborts default auth process return true; }
protected function setUp() { global $wgParser, $wgParserConf, $IP, $messageMemc, $wgMemc, $wgUser, $wgLang, $wgOut, $wgRequest, $wgStyleDirectory, $wgParserCacheType, $wgNamespaceAliases, $wgNamespaceProtection, $parserMemc; $tmpDir = $this->getNewTempDirectory(); $tmpGlobals = []; $tmpGlobals['wgScript'] = '/index.php'; $tmpGlobals['wgScriptPath'] = '/'; $tmpGlobals['wgArticlePath'] = '/wiki/$1'; $tmpGlobals['wgStylePath'] = '/skins'; $tmpGlobals['wgThumbnailScriptPath'] = false; $tmpGlobals['wgLocalFileRepo'] = ['class' => 'LocalRepo', 'name' => 'local', 'url' => 'http://example.com/images', 'hashLevels' => 2, 'transformVia404' => false, 'backend' => new FSFileBackend(['name' => 'local-backend', 'wikiId' => wfWikiID(), 'containerPaths' => ['local-public' => "{$tmpDir}/test-repo/public", 'local-thumb' => "{$tmpDir}/test-repo/thumb", 'local-temp' => "{$tmpDir}/test-repo/temp", 'local-deleted' => "{$tmpDir}/test-repo/delete"]])]; foreach ($tmpGlobals as $var => $val) { if (array_key_exists($var, $GLOBALS)) { $this->savedGlobals[$var] = $GLOBALS[$var]; } $GLOBALS[$var] = $val; } $wgNamespaceProtection[NS_MEDIAWIKI] = 'editinterface'; $wgNamespaceAliases['Image'] = NS_FILE; $wgNamespaceAliases['Image_talk'] = NS_FILE_TALK; $wgParserCacheType = CACHE_NONE; DeferredUpdates::clearPendingUpdates(); $wgMemc = wfGetMainCache(); $messageMemc = wfGetMessageCacheStorage(); $parserMemc = wfGetParserCacheStorage(); RequestContext::resetMain(); $context = RequestContext::getMain(); $wgUser = new User(); $wgLang = $context->getLanguage(); $wgOut = $context->getOutput(); $wgParser = new StubObject('wgParser', $wgParserConf['class'], [$wgParserConf]); $wgRequest = $context->getRequest(); if ($wgStyleDirectory === false) { $wgStyleDirectory = "{$IP}/skins"; } RepoGroup::destroySingleton(); FileBackendGroup::destroySingleton(); }
/** * Render the special page * @param string $par parameter submitted as subpage */ function executeWhenAvailable($par) { // Anons don't get a watchlist $this->requireLogin('mobile-frontend-watchlist-purpose'); $ctx = MobileContext::singleton(); $this->usePageImages = !$ctx->imagesDisabled() && defined('PAGE_IMAGES_INSTALLED'); $user = $this->getUser(); $output = $this->getOutput(); $output->addModules('skins.minerva.special.watchlist.scripts'); // FIXME: Loads twice with JS enabled (T87871) $output->addModuleStyles(array('skins.minerva.special.watchlist.styles', 'mobile.pagelist.styles', 'mobile.pagesummary.styles')); $req = $this->getRequest(); $this->view = $req->getVal('watchlistview', 'a-z'); $this->filter = $req->getVal('filter', 'all'); $this->fromPageTitle = Title::newFromText($req->getVal('from', false)); $output->setPageTitle($this->msg('watchlist')); // This needs to be done before calling getWatchlistHeader $this->updateStickyTabs(); if ($this->optionsChanged) { DeferredUpdates::addCallableUpdate(function () use($user) { $user->saveSettings(); }); } if ($this->view === 'feed') { $output->addHtml($this->getWatchlistHeader($user)); $output->addHtml(Html::openElement('div', array('class' => 'content-unstyled'))); $this->showRecentChangesHeader(); $res = $this->doFeedQuery(); if ($res->numRows()) { $this->showFeedResults($res); } else { $this->showEmptyList(true); } $output->addHtml(Html::closeElement('div')); } else { $output->redirect(SpecialPage::getTitleFor('EditWatchlist')->getLocalURL()); } }
/** * Keeps track of recently used message groups per user. */ public static function trackGroup(MessageGroup $group, User $user) { if ($user->isAnon()) { return true; } $groups = $user->getOption('translate-recent-groups', ''); if ($groups === '') { $groups = array(); } else { $groups = explode('|', $groups); } if (isset($groups[0]) && $groups[0] === $group->getId()) { return true; } array_unshift($groups, $group->getId()); $groups = array_unique($groups); $groups = array_slice($groups, 0, 5); $user->setOption('translate-recent-groups', implode('|', $groups)); // Promise to persist the data post-send DeferredUpdates::addCallableUpdate(function () use($user) { $user->saveSettings(); }); return true; }
/** * This is the meaty bit -- restores archived revisions of the given page * to the cur/old tables. If the page currently exists, all revisions will * be stuffed into old, otherwise the most recent will go into cur. * * @param array $timestamps Pass an empty array to restore all revisions, * otherwise list the ones to undelete. * @param bool $unsuppress Remove all ar_deleted/fa_deleted restrictions of seletected revs * @param string $comment * @throws ReadOnlyError * @return Status Status object containing the number of revisions restored on success */ private function undeleteRevisions($timestamps, $unsuppress = false, $comment = '') { if (wfReadOnly()) { throw new ReadOnlyError(); } $restoreAll = empty($timestamps); $dbw = wfGetDB(DB_MASTER); # Does this page already exist? We'll have to update it... $article = WikiPage::factory($this->title); # Load latest data for the current page (bug 31179) $article->loadPageData('fromdbmaster'); $oldcountable = $article->isCountable(); $page = $dbw->selectRow('page', array('page_id', 'page_latest'), array('page_namespace' => $this->title->getNamespace(), 'page_title' => $this->title->getDBkey()), __METHOD__, array('FOR UPDATE')); if ($page) { $makepage = false; # Page already exists. Import the history, and if necessary # we'll update the latest revision field in the record. $previousRevId = $page->page_latest; # Get the time span of this page $previousTimestamp = $dbw->selectField('revision', 'rev_timestamp', array('rev_id' => $previousRevId), __METHOD__); if ($previousTimestamp === false) { wfDebug(__METHOD__ . ": existing page refers to a page_latest that does not exist\n"); $status = Status::newGood(0); $status->warning('undeleterevision-missing'); return $status; } } else { # Have to create a new article... $makepage = true; $previousRevId = 0; $previousTimestamp = 0; } $oldWhere = array('ar_namespace' => $this->title->getNamespace(), 'ar_title' => $this->title->getDBkey()); if (!$restoreAll) { $oldWhere['ar_timestamp'] = array_map(array(&$dbw, 'timestamp'), $timestamps); } $fields = array('ar_rev_id', 'ar_text', 'ar_comment', 'ar_user', 'ar_user_text', 'ar_timestamp', 'ar_minor_edit', 'ar_flags', 'ar_text_id', 'ar_deleted', 'ar_page_id', 'ar_len', 'ar_sha1'); if ($this->config->get('ContentHandlerUseDB')) { $fields[] = 'ar_content_format'; $fields[] = 'ar_content_model'; } /** * Select each archived revision... */ $result = $dbw->select('archive', $fields, $oldWhere, __METHOD__, array('ORDER BY' => 'ar_timestamp')); $rev_count = $result->numRows(); if (!$rev_count) { wfDebug(__METHOD__ . ": no revisions to restore\n"); $status = Status::newGood(0); $status->warning("undelete-no-results"); return $status; } $result->seek($rev_count - 1); // move to last $row = $result->fetchObject(); // get newest archived rev $oldPageId = (int) $row->ar_page_id; // pass this to ArticleUndelete hook $result->seek(0); // move back // grab the content to check consistency with global state before restoring the page. $revision = Revision::newFromArchiveRow($row, array('title' => $article->getTitle())); $user = User::newFromName($revision->getUserText(Revision::RAW), false); $content = $revision->getContent(Revision::RAW); // NOTE: article ID may not be known yet. prepareSave() should not modify the database. $status = $content->prepareSave($article, 0, -1, $user); if (!$status->isOK()) { return $status; } if ($makepage) { // Check the state of the newest to-be version... if (!$unsuppress && $row->ar_deleted & Revision::DELETED_TEXT) { return Status::newFatal("undeleterevdel"); } // Safe to insert now... $newid = $article->insertOn($dbw, $row->ar_page_id); if ($newid === false) { // The old ID is reserved; let's pick another $newid = $article->insertOn($dbw); } $pageId = $newid; } else { // Check if a deleted revision will become the current revision... if ($row->ar_timestamp > $previousTimestamp) { // Check the state of the newest to-be version... if (!$unsuppress && $row->ar_deleted & Revision::DELETED_TEXT) { return Status::newFatal("undeleterevdel"); } } $newid = false; $pageId = $article->getId(); } $revision = null; $restored = 0; foreach ($result as $row) { // Check for key dupes due to needed archive integrity. if ($row->ar_rev_id) { $exists = $dbw->selectField('revision', '1', array('rev_id' => $row->ar_rev_id), __METHOD__); if ($exists) { continue; // don't throw DB errors } } // Insert one revision at a time...maintaining deletion status // unless we are specifically removing all restrictions... $revision = Revision::newFromArchiveRow($row, array('page' => $pageId, 'title' => $this->title, 'deleted' => $unsuppress ? 0 : $row->ar_deleted)); $revision->insertOn($dbw); $restored++; Hooks::run('ArticleRevisionUndeleted', array(&$this->title, $revision, $row->ar_page_id)); } # Now that it's safely stored, take it out of the archive $dbw->delete('archive', $oldWhere, __METHOD__); // Was anything restored at all? if ($restored == 0) { return Status::newGood(0); } $created = (bool) $newid; // Attach the latest revision to the page... $wasnew = $article->updateIfNewerOn($dbw, $revision, $previousRevId); if ($created || $wasnew) { // Update site stats, link tables, etc $article->doEditUpdates($revision, User::newFromName($revision->getUserText(Revision::RAW), false), array('created' => $created, 'oldcountable' => $oldcountable, 'restored' => true)); } Hooks::run('ArticleUndelete', array(&$this->title, $created, $comment, $oldPageId)); if ($this->title->getNamespace() == NS_FILE) { DeferredUpdates::addUpdate(new HTMLCacheUpdate($this->title, 'imagelinks')); } return Status::newGood($restored); }
/** * Override handling of action=purge * @return bool */ public function doPurge() { $this->loadFile(); if ($this->mFile->exists()) { wfDebug('ImagePage::doPurge purging ' . $this->mFile->getName() . "\n"); DeferredUpdates::addUpdate(new HTMLCacheUpdate($this->mTitle, 'imagelinks')); $this->mFile->purgeCache(['forThumbRefresh' => true]); } else { wfDebug('ImagePage::doPurge no image for ' . $this->mFile->getName() . "; limiting purge to cache only\n"); // even if the file supposedly doesn't exist, force any cached information // to be updated (in case the cached information is wrong) $this->mFile->purgeCache(['forThumbRefresh' => true]); } if ($this->mRepo) { // Purge redirect cache $this->mRepo->invalidateImageRedirect($this->mTitle); } return parent::doPurge(); }
/** * Purge caches on page update etc * * @param Title $title * @param Revision|null $revision Revision that was just saved, may be null */ public static function onArticleEdit(Title $title, Revision $revision = null) { // Invalidate caches of articles which include this page DeferredUpdates::addUpdate(new HTMLCacheUpdate($title, 'templatelinks')); // Invalidate the caches of all pages which redirect here DeferredUpdates::addUpdate(new HTMLCacheUpdate($title, 'redirect')); MediaWikiServices::getInstance()->getLinkCache()->invalidateTitle($title); // Purge CDN for this page only $title->purgeSquid(); // Clear file cache for this page only HTMLFileCache::clearFileCache($title); $revid = $revision ? $revision->getId() : null; DeferredUpdates::addCallableUpdate(function () use($title, $revid) { InfoAction::invalidateCache($title, $revid); }); }
/** * Clear the user's notification timestamp for the given title. * If e-notif e-mails are on, they will receive notification mails on * the next change of the page if it's watched etc. * @note If the user doesn't have 'editmywatchlist', this will do nothing. * @param Title $title Title of the article to look at * @param int $oldid The revision id being viewed. If not given or 0, latest revision is assumed. */ public function clearNotification(&$title, $oldid = 0) { global $wgUseEnotif, $wgShowUpdatedMarker; // Do nothing if the database is locked to writes if (wfReadOnly()) { return; } // Do nothing if not allowed to edit the watchlist if (!$this->isAllowed('editmywatchlist')) { return; } // If we're working on user's talk page, we should update the talk page message indicator if ($title->getNamespace() == NS_USER_TALK && $title->getText() == $this->getName()) { if (!Hooks::run('UserClearNewTalkNotification', array(&$this, $oldid))) { return; } $that = $this; // Try to update the DB post-send and only if needed... DeferredUpdates::addCallableUpdate(function () use($that, $title, $oldid) { if (!$that->getNewtalk()) { return; // no notifications to clear } // Delete the last notifications (they stack up) $that->setNewtalk(false); // If there is a new, unseen, revision, use its timestamp $nextid = $oldid ? $title->getNextRevisionID($oldid, Title::GAID_FOR_UPDATE) : null; if ($nextid) { $that->setNewtalk(true, Revision::newFromId($nextid)); } }); } if (!$wgUseEnotif && !$wgShowUpdatedMarker) { return; } if ($this->isAnon()) { // Nothing else to do... return; } // Only update the timestamp if the page is being watched. // The query to find out if it is watched is cached both in memcached and per-invocation, // and when it does have to be executed, it can be on a slave // If this is the user's newtalk page, we always update the timestamp $force = ''; if ($title->getNamespace() == NS_USER_TALK && $title->getText() == $this->getName()) { $force = 'force'; } $this->getWatchedItem($title)->resetNotificationTimestamp($force, $oldid, WatchedItem::DEFERRED); }
/** * Run the transaction, except the cleanup batch. * The cleanup batch should be run in a separate transaction, because it locks different * rows and there's no need to keep the image row locked while it's acquiring those locks * The caller may have its own transaction open. * So we save the batch and let the caller call cleanup() * @return FileRepoStatus */ function execute() { global $wgLang; if (!$this->all && !$this->ids) { // Do nothing return $this->file->repo->newGood(); } $exists = $this->file->lock(); $dbw = $this->file->repo->getMasterDB(); $status = $this->file->repo->newGood(); // Fetch all or selected archived revisions for the file, // sorted from the most recent to the oldest. $conditions = array('fa_name' => $this->file->getName()); if (!$this->all) { $conditions['fa_id'] = $this->ids; } $result = $dbw->select('filearchive', ArchivedFile::selectFields(), $conditions, __METHOD__, array('ORDER BY' => 'fa_timestamp DESC')); $idsPresent = array(); $storeBatch = array(); $insertBatch = array(); $insertCurrent = false; $deleteIds = array(); $first = true; $archiveNames = array(); foreach ($result as $row) { $idsPresent[] = $row->fa_id; if ($row->fa_name != $this->file->getName()) { $status->error('undelete-filename-mismatch', $wgLang->timeanddate($row->fa_timestamp)); $status->failCount++; continue; } if ($row->fa_storage_key == '') { // Revision was missing pre-deletion $status->error('undelete-bad-store-key', $wgLang->timeanddate($row->fa_timestamp)); $status->failCount++; continue; } $deletedRel = $this->file->repo->getDeletedHashPath($row->fa_storage_key) . $row->fa_storage_key; $deletedUrl = $this->file->repo->getVirtualUrl() . '/deleted/' . $deletedRel; if (isset($row->fa_sha1)) { $sha1 = $row->fa_sha1; } else { // old row, populate from key $sha1 = LocalRepo::getHashFromKey($row->fa_storage_key); } # Fix leading zero if (strlen($sha1) == 32 && $sha1[0] == '0') { $sha1 = substr($sha1, 1); } if (is_null($row->fa_major_mime) || $row->fa_major_mime == 'unknown' || is_null($row->fa_minor_mime) || $row->fa_minor_mime == 'unknown' || is_null($row->fa_media_type) || $row->fa_media_type == 'UNKNOWN' || is_null($row->fa_metadata)) { // Refresh our metadata // Required for a new current revision; nice for older ones too. :) $props = RepoGroup::singleton()->getFileProps($deletedUrl); } else { $props = array('minor_mime' => $row->fa_minor_mime, 'major_mime' => $row->fa_major_mime, 'media_type' => $row->fa_media_type, 'metadata' => $row->fa_metadata); } if ($first && !$exists) { // This revision will be published as the new current version $destRel = $this->file->getRel(); $insertCurrent = array('img_name' => $row->fa_name, 'img_size' => $row->fa_size, 'img_width' => $row->fa_width, 'img_height' => $row->fa_height, 'img_metadata' => $props['metadata'], 'img_bits' => $row->fa_bits, 'img_media_type' => $props['media_type'], 'img_major_mime' => $props['major_mime'], 'img_minor_mime' => $props['minor_mime'], 'img_description' => $row->fa_description, 'img_user' => $row->fa_user, 'img_user_text' => $row->fa_user_text, 'img_timestamp' => $row->fa_timestamp, 'img_sha1' => $sha1); // The live (current) version cannot be hidden! if (!$this->unsuppress && $row->fa_deleted) { $status->fatal('undeleterevdel'); $this->file->unlock(); return $status; } } else { $archiveName = $row->fa_archive_name; if ($archiveName == '') { // This was originally a current version; we // have to devise a new archive name for it. // Format is <timestamp of archiving>!<name> $timestamp = wfTimestamp(TS_UNIX, $row->fa_deleted_timestamp); do { $archiveName = wfTimestamp(TS_MW, $timestamp) . '!' . $row->fa_name; $timestamp++; } while (isset($archiveNames[$archiveName])); } $archiveNames[$archiveName] = true; $destRel = $this->file->getArchiveRel($archiveName); $insertBatch[] = array('oi_name' => $row->fa_name, 'oi_archive_name' => $archiveName, 'oi_size' => $row->fa_size, 'oi_width' => $row->fa_width, 'oi_height' => $row->fa_height, 'oi_bits' => $row->fa_bits, 'oi_description' => $row->fa_description, 'oi_user' => $row->fa_user, 'oi_user_text' => $row->fa_user_text, 'oi_timestamp' => $row->fa_timestamp, 'oi_metadata' => $props['metadata'], 'oi_media_type' => $props['media_type'], 'oi_major_mime' => $props['major_mime'], 'oi_minor_mime' => $props['minor_mime'], 'oi_deleted' => $this->unsuppress ? 0 : $row->fa_deleted, 'oi_sha1' => $sha1); } $deleteIds[] = $row->fa_id; if (!$this->unsuppress && $row->fa_deleted & File::DELETED_FILE) { // private files can stay where they are $status->successCount++; } else { $storeBatch[] = array($deletedUrl, 'public', $destRel); $this->cleanupBatch[] = $row->fa_storage_key; } $first = false; } unset($result); // Add a warning to the status object for missing IDs $missingIds = array_diff($this->ids, $idsPresent); foreach ($missingIds as $id) { $status->error('undelete-missing-filearchive', $id); } // Remove missing files from batch, so we don't get errors when undeleting them $storeBatch = $this->removeNonexistentFiles($storeBatch); // Run the store batch // Use the OVERWRITE_SAME flag to smooth over a common error $storeStatus = $this->file->repo->storeBatch($storeBatch, FileRepo::OVERWRITE_SAME); $status->merge($storeStatus); if (!$status->isGood()) { // Even if some files could be copied, fail entirely as that is the // easiest thing to do without data loss $this->cleanupFailedBatch($storeStatus, $storeBatch); $status->ok = false; $this->file->unlock(); return $status; } // Run the DB updates // Because we have locked the image row, key conflicts should be rare. // If they do occur, we can roll back the transaction at this time with // no data loss, but leaving unregistered files scattered throughout the // public zone. // This is not ideal, which is why it's important to lock the image row. if ($insertCurrent) { $dbw->insert('image', $insertCurrent, __METHOD__); } if ($insertBatch) { $dbw->insert('oldimage', $insertBatch, __METHOD__); } if ($deleteIds) { $dbw->delete('filearchive', array('fa_id' => $deleteIds), __METHOD__); } // If store batch is empty (all files are missing), deletion is to be considered successful if ($status->successCount > 0 || !$storeBatch) { if (!$exists) { wfDebug(__METHOD__ . " restored {$status->successCount} items, creating a new current\n"); DeferredUpdates::addUpdate(SiteStatsUpdate::factory(array('images' => 1))); $this->file->purgeEverything(); } else { wfDebug(__METHOD__ . " restored {$status->successCount} as archived versions\n"); $this->file->purgeDescription(); $this->file->purgeHistory(); } } $this->file->unlock(); return $status; }
/** * Invalidate any necessary link lists related to page property changes * @param array $changed */ private function invalidateProperties($changed) { global $wgPagePropLinkInvalidations; foreach ($changed as $name => $value) { if (isset($wgPagePropLinkInvalidations[$name])) { $inv = $wgPagePropLinkInvalidations[$name]; if (!is_array($inv)) { $inv = array($inv); } foreach ($inv as $table) { DeferredUpdates::addUpdate(new HTMLCacheUpdate($this->mTitle, $table)); } } } }
/** * Ends this task peacefully */ public function restInPeace() { // Do any deferred jobs DeferredUpdates::doUpdates('commit'); // Execute a job from the queue $this->doJobs(); // Log profiling data, e.g. in the database or UDP wfLogProfilingData(); // Commit and close up! $factory = wfGetLBFactory(); $factory->commitMasterChanges(); $factory->shutdown(); wfDebug("Request ended normally\n"); }
/** * Makes an entry in the database corresponding to page creation * Note: the title object must be loaded with the new id using resetArticleID() * * @param string $timestamp * @param Title $title * @param bool $minor * @param User $user * @param string $comment * @param bool $bot * @param string $ip * @param int $size * @param int $newId * @param int $patrol * @return RecentChange */ public static function notifyNew($timestamp, &$title, $minor, &$user, $comment, $bot, $ip = '', $size = 0, $newId = 0, $patrol = 0) { $rc = new RecentChange(); $rc->mTitle = $title; $rc->mPerformer = $user; $rc->mAttribs = array('rc_timestamp' => $timestamp, 'rc_namespace' => $title->getNamespace(), 'rc_title' => $title->getDBkey(), 'rc_type' => RC_NEW, 'rc_source' => self::SRC_NEW, 'rc_minor' => $minor ? 1 : 0, 'rc_cur_id' => $title->getArticleID(), 'rc_user' => $user->getId(), 'rc_user_text' => $user->getName(), 'rc_comment' => $comment, 'rc_this_oldid' => $newId, 'rc_last_oldid' => 0, 'rc_bot' => $bot ? 1 : 0, 'rc_ip' => self::checkIPAddress($ip), 'rc_patrolled' => intval($patrol), 'rc_new' => 1, 'rc_old_len' => 0, 'rc_new_len' => $size, 'rc_deleted' => 0, 'rc_logid' => 0, 'rc_log_type' => null, 'rc_log_action' => '', 'rc_params' => ''); $rc->mExtra = array('prefixedDBkey' => $title->getPrefixedDBkey(), 'lastTimestamp' => 0, 'oldSize' => 0, 'newSize' => $size, 'pageStatus' => 'created'); DeferredUpdates::addCallableUpdate(function () use($rc) { $rc->save(); if ($rc->mAttribs['rc_patrolled']) { PatrolLog::record($rc, true, $rc->getPerformer()); } }); return $rc; }
/** * Run jobs of the specified number/type for the specified time * * The response map has a 'job' field that lists status of each job, including: * - type : the job type * - status : ok/failed * - error : any error message string * - time : the job run time in ms * The response map also has: * - backoffs : the (job type => seconds) map of backoff times * - elapsed : the total time spent running tasks in ms * - reached : the reason the script finished, one of (none-ready, job-limit, time-limit) * * This method outputs status information only if a debug handler was set. * Any exceptions are caught and logged, but are not reported as output. * * @param array $options Map of parameters: * - type : the job type (or false for the default types) * - maxJobs : maximum number of jobs to run * - maxTime : maximum time in seconds before stopping * - throttle : whether to respect job backoff configuration * @return array Summary response that can easily be JSON serialized */ public function run(array $options) { global $wgJobClasses, $wgTrxProfilerLimits; $response = array('jobs' => array(), 'reached' => 'none-ready'); $type = isset($options['type']) ? $options['type'] : false; $maxJobs = isset($options['maxJobs']) ? $options['maxJobs'] : false; $maxTime = isset($options['maxTime']) ? $options['maxTime'] : false; $noThrottle = isset($options['throttle']) && !$options['throttle']; if ($type !== false && !isset($wgJobClasses[$type])) { $response['reached'] = 'none-possible'; return $response; } // Bail out if in read-only mode if (wfReadOnly()) { $response['reached'] = 'read-only'; return $response; } // Catch huge single updates that lead to slave lag $trxProfiler = Profiler::instance()->getTransactionProfiler(); $trxProfiler->setLogger(LoggerFactory::getInstance('DBPerformance')); $trxProfiler->setExpectations($wgTrxProfilerLimits['JobRunner'], __METHOD__); // Bail out if there is too much DB lag. // This check should not block as we want to try other wiki queues. $maxAllowedLag = 3; list(, $maxLag) = wfGetLB(wfWikiID())->getMaxLag(); if ($maxLag >= $maxAllowedLag) { $response['reached'] = 'slave-lag-limit'; return $response; } $group = JobQueueGroup::singleton(); // Flush any pending DB writes for sanity wfGetLBFactory()->commitAll(); // Some jobs types should not run until a certain timestamp $backoffs = array(); // map of (type => UNIX expiry) $backoffDeltas = array(); // map of (type => seconds) $wait = 'wait'; // block to read backoffs the first time $stats = RequestContext::getMain()->getStats(); $jobsPopped = 0; $timeMsTotal = 0; $flags = JobQueueGroup::USE_CACHE; $startTime = microtime(true); // time since jobs started running $checkLagPeriod = 1.0; // check slave lag this many seconds $lastCheckTime = 1; // timestamp of last slave check do { // Sync the persistent backoffs with concurrent runners $backoffs = $this->syncBackoffDeltas($backoffs, $backoffDeltas, $wait); $blacklist = $noThrottle ? array() : array_keys($backoffs); $wait = 'nowait'; // less important now if ($type === false) { $job = $group->pop(JobQueueGroup::TYPE_DEFAULT, $flags, $blacklist); } elseif (in_array($type, $blacklist)) { $job = false; // requested queue in backoff state } else { $job = $group->pop($type); // job from a single queue } if ($job) { // found a job $popTime = time(); $jType = $job->getType(); // Back off of certain jobs for a while (for throttling and for errors) $ttw = $this->getBackoffTimeToWait($job); if ($ttw > 0) { // Always add the delta for other runners in case the time running the // job negated the backoff for each individually but not collectively. $backoffDeltas[$jType] = isset($backoffDeltas[$jType]) ? $backoffDeltas[$jType] + $ttw : $ttw; $backoffs = $this->syncBackoffDeltas($backoffs, $backoffDeltas, $wait); } $msg = $job->toString() . " STARTING"; $this->logger->debug($msg); $this->debugCallback($msg); // Run the job... $jobStartTime = microtime(true); try { ++$jobsPopped; $status = $job->run(); $error = $job->getLastError(); $this->commitMasterChanges($job); DeferredUpdates::doUpdates(); $this->commitMasterChanges($job); } catch (Exception $e) { MWExceptionHandler::rollbackMasterChangesAndLog($e); $status = false; $error = get_class($e) . ': ' . $e->getMessage(); MWExceptionHandler::logException($e); } // Commit all outstanding connections that are in a transaction // to get a fresh repeatable read snapshot on every connection. // Note that jobs are still responsible for handling slave lag. wfGetLBFactory()->commitAll(); // Clear out title cache data from prior snapshots LinkCache::singleton()->clear(); $timeMs = intval((microtime(true) - $jobStartTime) * 1000); $timeMsTotal += $timeMs; // Record how long jobs wait before getting popped $readyTs = $job->getReadyTimestamp(); if ($readyTs) { $pickupDelay = $popTime - $readyTs; $stats->timing('jobqueue.pickup_delay.all', 1000 * $pickupDelay); $stats->timing("jobqueue.pickup_delay.{$jType}", 1000 * $pickupDelay); } // Record root job age for jobs being run $root = $job->getRootJobParams(); if ($root['rootJobTimestamp']) { $age = $popTime - wfTimestamp(TS_UNIX, $root['rootJobTimestamp']); $stats->timing("jobqueue.pickup_root_age.{$jType}", 1000 * $age); } // Track the execution time for jobs $stats->timing("jobqueue.run.{$jType}", $timeMs); // Mark the job as done on success or when the job cannot be retried if ($status !== false || !$job->allowRetries()) { $group->ack($job); // done } // Back off of certain jobs for a while (for throttling and for errors) if ($status === false && mt_rand(0, 49) == 0) { $ttw = max($ttw, 30); // too many errors $backoffDeltas[$jType] = isset($backoffDeltas[$jType]) ? $backoffDeltas[$jType] + $ttw : $ttw; } if ($status === false) { $msg = $job->toString() . " t={$timeMs} error={$error}"; $this->logger->error($msg); $this->debugCallback($msg); } else { $msg = $job->toString() . " t={$timeMs} good"; $this->logger->info($msg); $this->debugCallback($msg); } $response['jobs'][] = array('type' => $jType, 'status' => $status === false ? 'failed' : 'ok', 'error' => $error, 'time' => $timeMs); // Break out if we hit the job count or wall time limits... if ($maxJobs && $jobsPopped >= $maxJobs) { $response['reached'] = 'job-limit'; break; } elseif ($maxTime && microtime(true) - $startTime > $maxTime) { $response['reached'] = 'time-limit'; break; } // Don't let any of the main DB slaves get backed up. // This only waits for so long before exiting and letting // other wikis in the farm (on different masters) get a chance. $timePassed = microtime(true) - $lastCheckTime; if ($timePassed >= $checkLagPeriod || $timePassed < 0) { if (!wfWaitForSlaves($lastCheckTime, false, '*', $maxAllowedLag)) { $response['reached'] = 'slave-lag-limit'; break; } $lastCheckTime = microtime(true); } // Don't let any queue slaves/backups fall behind if ($jobsPopped > 0 && $jobsPopped % 100 == 0) { $group->waitForBackups(); } // Bail if near-OOM instead of in a job if (!$this->checkMemoryOK()) { $response['reached'] = 'memory-limit'; break; } } } while ($job); // stop when there are no jobs // Sync the persistent backoffs for the next runJobs.php pass if ($backoffDeltas) { $this->syncBackoffDeltas($backoffs, $backoffDeltas, 'wait'); } $response['backoffs'] = $backoffs; $response['elapsed'] = $timeMsTotal; return $response; }
/** * Mostly for hook use * @param Title $title * @param ForeignTitle $foreignTitle * @param int $revCount * @param int $sRevCount * @param array $pageInfo * @return bool */ public function finishImportPage($title, $foreignTitle, $revCount, $sRevCount, $pageInfo) { // Update article count statistics (T42009) // The normal counting logic in WikiPage->doEditUpdates() is designed for // one-revision-at-a-time editing, not bulk imports. In this situation it // suffers from issues of slave lag. We let WikiPage handle the total page // and revision count, and we implement our own custom logic for the // article (content page) count. $page = WikiPage::factory($title); $page->loadPageData('fromdbmaster'); $content = $page->getContent(); if ($content === null) { wfDebug(__METHOD__ . ': Skipping article count adjustment for ' . $title . ' because WikiPage::getContent() returned null'); } else { $editInfo = $page->prepareContentForEdit($content); $countKey = 'title_' . $title->getPrefixedText(); $countable = $page->isCountable($editInfo); if (array_key_exists($countKey, $this->countableCache) && $countable != $this->countableCache[$countKey]) { DeferredUpdates::addUpdate(SiteStatsUpdate::factory(array('articles' => (int) $countable - (int) $this->countableCache[$countKey]))); } } $args = func_get_args(); return Hooks::run('AfterImportPage', $args); }
/** * Purge metadata and all affected pages when the file is created, * deleted, or majorly updated. */ function purgeEverything() { // Delete thumbnails and refresh file metadata cache $this->purgeCache(); $this->purgeDescription(); // Purge cache of all pages using this file $title = $this->getTitle(); if ($title) { DeferredUpdates::addUpdate(new HTMLCacheUpdate($title, 'imagelinks')); } }
/** * @dataProvider provideCreatePages * @covers EditPage */ public function testCreatePageTrx($desc, $pageTitle, $user, $editText, $expectedCode, $expectedText, $ignoreBlank = false) { $checkIds = []; $this->setMwGlobals('wgHooks', ['PageContentInsertComplete' => [function (WikiPage &$page, User &$user, Content $content, $summary, $minor, $u1, $u2, &$flags, Revision $revision) { // types/refs checked }], 'PageContentSaveComplete' => [function (WikiPage &$page, User &$user, Content $content, $summary, $minor, $u1, $u2, &$flags, Revision $revision, Status &$status, $baseRevId) use(&$checkIds) { $checkIds[] = $status->value['revision']->getId(); // types/refs checked }]]); wfGetDB(DB_MASTER)->begin(__METHOD__); $edit = ['wpTextbox1' => $editText]; if ($ignoreBlank) { $edit['wpIgnoreBlankArticle'] = 1; } $page = $this->assertEdit($pageTitle, null, $user, $edit, $expectedCode, $expectedText, $desc); $pageTitle2 = (string) $pageTitle . '/x'; $page2 = $this->assertEdit($pageTitle2, null, $user, $edit, $expectedCode, $expectedText, $desc); wfGetDB(DB_MASTER)->commit(__METHOD__); $this->assertEquals(0, DeferredUpdates::pendingUpdatesCount(), 'No deferred updates'); if ($expectedCode != EditPage::AS_BLANK_ARTICLE) { $latest = $page->getLatest(); $page->doDeleteArticleReal($pageTitle); $this->assertGreaterThan(0, $latest, "Page #1 revision ID updated in object"); $this->assertEquals($latest, $checkIds[0], "Revision #1 in Status for hook"); $latest2 = $page2->getLatest(); $page2->doDeleteArticleReal($pageTitle2); $this->assertGreaterThan(0, $latest2, "Page #2 revision ID updated in object"); $this->assertEquals($latest2, $checkIds[1], "Revision #2 in Status for hook"); } }
/** * Update page_touched timestamps and send squid purge messages for * pages linking to this title. May be sent to the job queue depending * on the number of links. Typically called on create and delete. */ public function touchLinks() { DeferredUpdates::addUpdate(new HTMLCacheUpdate($this, 'pagelinks')); if ($this->getNamespace() == NS_CATEGORY) { DeferredUpdates::addUpdate(new HTMLCacheUpdate($this, 'categorylinks')); } }
/** * @since 2.4 */ public static function clearPendingDeferredUpdates() { DeferredCallableUpdate::releasePendingUpdates(); \DeferredUpdates::clearPendingUpdates(); }
/** * Purge caches on page update etc * * @param $title Title object * @todo Verify that $title is always a Title object (and never false or null), add Title hint to parameter $title */ public static function onArticleEdit( $title ) { // Invalidate caches of articles which include this page DeferredUpdates::addHTMLCacheUpdate( $title, 'templatelinks' ); // Invalidate the caches of all pages which redirect here DeferredUpdates::addHTMLCacheUpdate( $title, 'redirect' ); // Purge squid for this page only $title->purgeSquid(); // Clear file cache for this page only HTMLFileCache::clearFileCache( $title ); InfoAction::invalidateCache( $title ); }
/** * @param string $method * @return bool */ protected function doWrite($method) { $ret = true; $args = func_get_args(); array_shift($args); foreach ($this->caches as $i => $cache) { if ($i == 0 || !$this->asyncWrites) { // First store or in sync mode: write now and get result if (!call_user_func_array(array($cache, $method), $args)) { $ret = false; } } else { // Secondary write in async mode: do not block this HTTP request $logger = $this->logger; DeferredUpdates::addCallableUpdate(function () use($cache, $method, $args, $logger) { if (!call_user_func_array(array($cache, $method), $args)) { $logger->warning("Async {$method} op failed"); } }); } } return $ret; }
/** * Auto-create the given user, if necessary * @private Don't call this yourself. Let Setup.php do it for you at the right time. * @note This more properly belongs in AuthManager, but we need it now. * When AuthManager comes, this will be deprecated and will pass-through * to the corresponding AuthManager method. * @param User $user User to auto-create * @return bool Success */ public static function autoCreateUser(User $user) { global $wgAuth; $logger = self::singleton()->logger; // Much of this code is based on that in CentralAuth // Try the local user from the slave DB $localId = User::idFromName($user->getName()); // Fetch the user ID from the master, so that we don't try to create the user // when they already exist, due to replication lag // @codeCoverageIgnoreStart if (!$localId && wfGetLB()->getReaderIndex() != 0) { $localId = User::idFromName($user->getName(), User::READ_LATEST); } // @codeCoverageIgnoreEnd if ($localId) { // User exists after all. $user->setId($localId); $user->loadFromId(); return false; } // Denied by AuthPlugin? But ignore AuthPlugin itself. if (get_class($wgAuth) !== 'AuthPlugin' && !$wgAuth->autoCreate()) { $logger->debug(__METHOD__ . ': denied by AuthPlugin'); $user->setId(0); $user->loadFromId(); return false; } // Wiki is read-only? if (wfReadOnly()) { $logger->debug(__METHOD__ . ': denied by wfReadOnly()'); $user->setId(0); $user->loadFromId(); return false; } $userName = $user->getName(); // Check the session, if we tried to create this user already there's // no point in retrying. $session = self::getGlobalSession(); $reason = $session->get('MWSession::AutoCreateBlacklist'); if ($reason) { $logger->debug(__METHOD__ . ": blacklisted in session ({$reason})"); $user->setId(0); $user->loadFromId(); return false; } // Is the IP user able to create accounts? $anon = new User(); if (!$anon->isAllowedAny('createaccount', 'autocreateaccount') || $anon->isBlockedFromCreateAccount()) { // Blacklist the user to avoid repeated DB queries subsequently $logger->debug(__METHOD__ . ': user is blocked from this wiki, blacklisting'); $session->set('MWSession::AutoCreateBlacklist', 'blocked', 600); $session->persist(); $user->setId(0); $user->loadFromId(); return false; } // Check for validity of username if (!User::isCreatableName($userName)) { $logger->debug(__METHOD__ . ': Invalid username, blacklisting'); $session->set('MWSession::AutoCreateBlacklist', 'invalid username', 600); $session->persist(); $user->setId(0); $user->loadFromId(); return false; } // Give other extensions a chance to stop auto creation. $user->loadDefaults($userName); $abortMessage = ''; if (!\Hooks::run('AbortAutoAccount', array($user, &$abortMessage))) { // In this case we have no way to return the message to the user, // but we can log it. $logger->debug(__METHOD__ . ": denied by hook: {$abortMessage}"); $session->set('MWSession::AutoCreateBlacklist', "hook aborted: {$abortMessage}", 600); $session->persist(); $user->setId(0); $user->loadFromId(); return false; } // Make sure the name has not been changed if ($user->getName() !== $userName) { $user->setId(0); $user->loadFromId(); throw new \UnexpectedValueException('AbortAutoAccount hook tried to change the user name'); } // Ignore warnings about master connections/writes...hard to avoid here \Profiler::instance()->getTransactionProfiler()->resetExpectations(); $cache = \ObjectCache::getLocalClusterInstance(); $backoffKey = wfMemcKey('MWSession', 'autocreate-failed', md5($userName)); if ($cache->get($backoffKey)) { $logger->debug(__METHOD__ . ': denied by prior creation attempt failures'); $user->setId(0); $user->loadFromId(); return false; } // Checks passed, create the user... $from = isset($_SERVER['REQUEST_URI']) ? $_SERVER['REQUEST_URI'] : 'CLI'; $logger->info(__METHOD__ . ": creating new user ({$userName}) - from: {$from}"); try { // Insert the user into the local DB master $status = $user->addToDatabase(); if (!$status->isOK()) { // @codeCoverageIgnoreStart $logger->error(__METHOD__ . ': failed with message ' . $status->getWikiText()); $user->setId(0); $user->loadFromId(); return false; // @codeCoverageIgnoreEnd } } catch (\Exception $ex) { // @codeCoverageIgnoreStart $logger->error(__METHOD__ . ': failed with exception ' . $ex->getMessage()); // Do not keep throwing errors for a while $cache->set($backoffKey, 1, 600); // Bubble up error; which should normally trigger DB rollbacks throw $ex; // @codeCoverageIgnoreEnd } # Notify hooks (e.g. Newuserlog) \Hooks::run('AuthPluginAutoCreate', array($user)); \Hooks::run('LocalUserCreated', array($user, true)); # Notify AuthPlugin too $tmpUser = $user; $wgAuth->initUser($tmpUser, true); if ($tmpUser !== $user) { $logger->warning(__METHOD__ . ': ' . get_class($wgAuth) . '::initUser() replaced the user object'); } $user->saveSettings(); # Update user count \DeferredUpdates::addUpdate(new \SiteStatsUpdate(0, 0, 0, 0, 1)); # Watch user's userpage and talk page $user->addWatch($user->getUserPage(), \WatchedItem::IGNORE_USER_RIGHTS); return true; }
protected function setUp() { parent::setUp(); $this->called['setUp'] = true; $this->phpErrorLevel = intval(ini_get('error_reporting')); // Cleaning up temporary files foreach ($this->tmpFiles as $fileName) { if (is_file($fileName) || is_link($fileName)) { unlink($fileName); } elseif (is_dir($fileName)) { wfRecursiveRemoveDir($fileName); } } if ($this->needsDB() && $this->db) { // Clean up open transactions while ($this->db->trxLevel() > 0) { $this->db->rollback(__METHOD__, 'flush'); } } DeferredUpdates::clearPendingUpdates(); ob_start('MediaWikiTestCase::wfResetOutputBuffersBarrier'); }