/** * Constructor * * @param bool $withTransaction Whether this update should be wrapped in a * transaction (default: true). A transaction is only started if no * transaction is already in progress, see beginTransaction() for details. */ public function __construct($withTransaction = true) { parent::__construct(); $this->mDb = wfGetLB()->getLazyConnectionRef(DB_MASTER); $this->mWithTransaction = $withTransaction; $this->mHasTransaction = false; }
/** * Purges the cache of a page */ public function execute() { $params = $this->extractRequestParams(); $continuationManager = new ApiContinuationManager($this, array(), array()); $this->setContinuationManager($continuationManager); $forceLinkUpdate = $params['forcelinkupdate']; $forceRecursiveLinkUpdate = $params['forcerecursivelinkupdate']; $pageSet = $this->getPageSet(); $pageSet->execute(); $result = $pageSet->getInvalidTitlesAndRevisions(); foreach ($pageSet->getGoodTitles() as $title) { $r = array(); ApiQueryBase::addTitleInfo($r, $title); $page = WikiPage::factory($title); $page->doPurge(); // Directly purge and skip the UI part of purge(). $r['purged'] = true; if ($forceLinkUpdate || $forceRecursiveLinkUpdate) { if (!$this->getUser()->pingLimiter('linkpurge')) { $popts = $page->makeParserOptions('canonical'); # Parse content; note that HTML generation is only needed if we want to cache the result. $content = $page->getContent(Revision::RAW); $enableParserCache = $this->getConfig()->get('EnableParserCache'); $p_result = $content->getParserOutput($title, $page->getLatest(), $popts, $enableParserCache); # Update the links tables $updates = $content->getSecondaryDataUpdates($title, null, $forceRecursiveLinkUpdate, $p_result); DataUpdate::runUpdates($updates); $r['linkupdate'] = true; if ($enableParserCache) { $pcache = ParserCache::singleton(); $pcache->save($p_result, $page, $popts); } } else { $error = $this->parseMsg(array('actionthrottledtext')); $this->setWarning($error['info']); $forceLinkUpdate = false; } } $result[] = $r; } $apiResult = $this->getResult(); ApiResult::setIndexedTagName($result, 'page'); $apiResult->addValue(null, $this->getModuleName(), $result); $values = $pageSet->getNormalizedTitlesAsResult($apiResult); if ($values) { $apiResult->addValue(null, 'normalized', $values); } $values = $pageSet->getConvertedTitlesAsResult($apiResult); if ($values) { $apiResult->addValue(null, 'converted', $values); } $values = $pageSet->getRedirectTitlesAsResult($apiResult); if ($values) { $apiResult->addValue(null, 'redirects', $values); } $this->setContinuationManager(null); $continuationManager->setContinuationIntoResult($apiResult); }
public static function runForTitleInternal(Title $title, Revision $revision, $fname) { global $wgParser, $wgContLang; wfProfileIn($fname . '-parse'); $options = ParserOptions::newFromUserAndLang(new User(), $wgContLang); $parserOutput = $wgParser->parse($revision->getText(), $title, $options, true, true, $revision->getId()); wfProfileOut($fname . '-parse'); wfProfileIn($fname . '-update'); $updates = $parserOutput->getSecondaryDataUpdates($title, false); DataUpdate::runUpdates($updates); wfProfileOut($fname . '-update'); }
/** * Constructor * * @param bool $withTransaction whether this update should be wrapped in a transaction (default: true). * A transaction is only started if no transaction is already in progress, * see beginTransaction() for details. **/ public function __construct($withTransaction = true) { global $wgAntiLockFlags; parent::__construct(); if ($wgAntiLockFlags & ALF_NO_LINK_LOCK) { $this->mOptions = array(); } else { $this->mOptions = array('FOR UPDATE'); } // @todo get connection only when it's needed? make sure that doesn't break anything, especially transactions! $this->mDb = wfGetDB(DB_MASTER); $this->mWithTransaction = $withTransaction; $this->mHasTransaction = false; }
/** * @param WikiPage $page Page we are updating * @param integer|null $pageId ID of the page we are updating [optional] * @param string|null $timestamp TS_MW timestamp of deletion * @throws MWException */ function __construct(WikiPage $page, $pageId = null, $timestamp = null) { parent::__construct(); $this->page = $page; if ($pageId) { $this->pageId = $pageId; // page ID at time of deletion } elseif ($page->exists()) { $this->pageId = $page->getId(); } else { throw new InvalidArgumentException("Page ID not known. Page doesn't exist?"); } $this->timestamp = $timestamp ?: wfTimestampNow(); }
function run() { if (is_null($this->title)) { $this->setLastError("deleteLinks: Invalid title"); return false; } $pageId = $this->params['pageId']; if (WikiPage::newFromID($pageId, WikiPage::READ_LATEST)) { // The page was restored somehow or something went wrong $this->setLastError("deleteLinks: Page #{$pageId} exists"); return false; } $page = WikiPage::factory($this->title); // title when deleted $update = new LinksDeletionUpdate($page, $pageId); DataUpdate::runUpdates([$update]); return true; }
/** * Run LinksUpdate for all links on a given page_id * @param int $id The page_id */ public static function fixLinksFromArticle($id) { $page = WikiPage::newFromID($id); LinkCache::singleton()->clear(); if ($page === null) { return; } $content = $page->getContent(Revision::RAW); if ($content === null) { return; } $dbw = wfGetDB(DB_MASTER); $dbw->begin(__METHOD__); $updates = $content->getSecondaryDataUpdates($page->getTitle()); DataUpdate::runUpdates($updates); $dbw->commit(__METHOD__); }
/** * @param Title $title * @return bool */ protected function runForTitle(Title $title = null) { $linkCache = LinkCache::singleton(); $linkCache->clear(); if (is_null($title)) { $this->setLastError("refreshLinks: Invalid title"); return false; } // Wait for the DB of the current/next slave DB handle to catch up to the master. // This way, we get the correct page_latest for templates or files that just changed // milliseconds ago, having triggered this job to begin with. if (isset($this->params['masterPos']) && $this->params['masterPos'] !== false) { wfGetLB()->waitFor($this->params['masterPos']); } $page = WikiPage::factory($title); // Fetch the current revision... $revision = Revision::newFromTitle($title, false, Revision::READ_NORMAL); if (!$revision) { $this->setLastError("refreshLinks: Article not found {$title->getPrefixedDBkey()}"); return false; // XXX: what if it was just deleted? } $content = $revision->getContent(Revision::RAW); if (!$content) { // If there is no content, pretend the content is empty $content = $revision->getContentHandler()->makeEmptyContent(); } $parserOutput = false; $parserOptions = $page->makeParserOptions('canonical'); // If page_touched changed after this root job (with a good slave lag skew factor), // then it is likely that any views of the pages already resulted in re-parses which // are now in cache. This can be reused to avoid expensive parsing in some cases. if (isset($this->params['rootJobTimestamp'])) { $skewedTimestamp = wfTimestamp(TS_UNIX, $this->params['rootJobTimestamp']) + 5; if ($page->getLinksTimestamp() > wfTimestamp(TS_MW, $skewedTimestamp)) { // Something already updated the backlinks since this job was made return true; } if ($page->getTouched() > wfTimestamp(TS_MW, $skewedTimestamp)) { $parserOutput = ParserCache::singleton()->getDirty($page, $parserOptions); if ($parserOutput && $parserOutput->getCacheTime() <= $skewedTimestamp) { $parserOutput = false; // too stale } } } // Fetch the current revision and parse it if necessary... if ($parserOutput == false) { $start = microtime(true); // Revision ID must be passed to the parser output to get revision variables correct $parserOutput = $content->getParserOutput($title, $revision->getId(), $parserOptions, false); $ellapsed = microtime(true) - $start; // If it took a long time to render, then save this back to the cache to avoid // wasted CPU by other apaches or job runners. We don't want to always save to // cache as this can cause high cache I/O and LRU churn when a template changes. if ($ellapsed >= self::PARSE_THRESHOLD_SEC && $page->isParserCacheUsed($parserOptions, $revision->getId()) && $parserOutput->isCacheable()) { $ctime = wfTimestamp(TS_MW, (int) $start); // cache time ParserCache::singleton()->save($parserOutput, $page, $parserOptions, $ctime, $revision->getId()); } } $updates = $content->getSecondaryDataUpdates($title, null, false, $parserOutput); DataUpdate::runUpdates($updates); InfoAction::invalidateCache($title); return true; }
/** * Do any deferred updates and clear the list * * @param string $commit Set to 'commit' to commit after every update to * @param string $mode Use "enqueue" to use the job queue when possible [Default: run] * prevent lock contention */ public static function doUpdates($commit = '', $mode = 'run') { $updates = self::$updates; while (count($updates)) { self::clearPendingUpdates(); /** @var DataUpdate[] $dataUpdates */ $dataUpdates = array(); /** @var DeferrableUpdate[] $otherUpdates */ $otherUpdates = array(); foreach ($updates as $update) { if ($update instanceof DataUpdate) { $dataUpdates[] = $update; } else { $otherUpdates[] = $update; } } // Delegate DataUpdate execution to the DataUpdate class DataUpdate::runUpdates($dataUpdates, $mode); // Execute the non-DataUpdate tasks foreach ($otherUpdates as $update) { try { $update->doUpdate(); if ($commit === 'commit') { wfGetLBFactory()->commitMasterChanges(); } } catch (Exception $e) { // We don't want exceptions thrown during deferred updates to // be reported to the user since the output is already sent. // Instead we just log them. if (!$e instanceof ErrorPageError) { MWExceptionHandler::logException($e); } } } $updates = self::$updates; } }
/** * Do some database updates after deletion * * @param int $id The page_id value of the page being deleted * @param Content $content Optional page content to be used when determining * the required updates. This may be needed because $this->getContent() * may already return null when the page proper was deleted. */ public function doDeleteUpdates($id, Content $content = null) { // update site status DeferredUpdates::addUpdate(new SiteStatsUpdate(0, 1, -(int) $this->isCountable(), -1)); // remove secondary indexes, etc $updates = $this->getDeletionUpdates($content); DataUpdate::runUpdates($updates, 'enqueue'); // Reparse any pages transcluding this page LinksUpdate::queueRecursiveJobsForTable($this->mTitle, 'templatelinks'); // Reparse any pages including this image if ($this->mTitle->getNamespace() == NS_FILE) { LinksUpdate::queueRecursiveJobsForTable($this->mTitle, 'imagelinks'); } // Clear caches WikiPage::onArticleDelete($this->mTitle); // Reset this object and the Title object $this->loadFromRow(false, self::READ_LATEST); // Search engine DeferredUpdates::addUpdate(new SearchUpdate($id, $this->mTitle)); }
public function __construct() { parent::__construct(); $this->mDb = wfGetLB()->getLazyConnectionRef(DB_MASTER); }
public static function execute(array &$queue, $mode) { $updates = $queue; // snapshot of queue // Keep doing rounds of updates until none get enqueued while (count($updates)) { $queue = array(); // clear the queue /** @var DataUpdate[] $dataUpdates */ $dataUpdates = array(); /** @var DeferrableUpdate[] $otherUpdates */ $otherUpdates = array(); foreach ($updates as $update) { if ($update instanceof DataUpdate) { $dataUpdates[] = $update; } else { $otherUpdates[] = $update; } } // Delegate DataUpdate execution to the DataUpdate class DataUpdate::runUpdates($dataUpdates, $mode); // Execute the non-DataUpdate tasks foreach ($otherUpdates as $update) { try { $update->doUpdate(); wfGetLBFactory()->commitMasterChanges(__METHOD__); } catch (Exception $e) { // We don't want exceptions thrown during deferred updates to // be reported to the user since the output is already sent if (!$e instanceof ErrorPageError) { MWExceptionHandler::logException($e); } // Make sure incomplete transactions are not committed and end any // open atomic sections so that other DB updates have a chance to run wfGetLBFactory()->rollbackMasterChanges(__METHOD__); } } $updates = $queue; // new snapshot of queue (check for new entries) } }
/** * Do some database updates after deletion * * @param int $id The page_id value of the page being deleted * @param Content $content Optional page content to be used when determining * the required updates. This may be needed because $this->getContent() * may already return null when the page proper was deleted. */ public function doDeleteUpdates($id, Content $content = null) { // Update site status DeferredUpdates::addUpdate(new SiteStatsUpdate(0, 1, -(int) $this->isCountable(), -1)); // Delete pagelinks, update secondary indexes, etc $updates = $this->getDeletionUpdates($content); // Make sure an enqueued jobs run after commit so they see the deletion wfGetDB(DB_MASTER)->onTransactionIdle(function () use($updates) { DataUpdate::runUpdates($updates, 'enqueue'); }); // Reparse any pages transcluding this page LinksUpdate::queueRecursiveJobsForTable($this->mTitle, 'templatelinks'); // Reparse any pages including this image if ($this->mTitle->getNamespace() == NS_FILE) { LinksUpdate::queueRecursiveJobsForTable($this->mTitle, 'imagelinks'); } // Clear caches WikiPage::onArticleDelete($this->mTitle); // Reset this object and the Title object $this->loadFromRow(false, self::READ_LATEST); // Search engine DeferredUpdates::addUpdate(new SearchUpdate($id, $this->mTitle)); }
/** * @param Title $title * @return bool */ protected function runForTitle(Title $title) { // Wait for the DB of the current/next slave DB handle to catch up to the master. // This way, we get the correct page_latest for templates or files that just changed // milliseconds ago, having triggered this job to begin with. if (isset($this->params['masterPos']) && $this->params['masterPos'] !== false) { wfGetLB()->waitFor($this->params['masterPos']); } // Fetch the current page and revision... $page = WikiPage::factory($title); $revision = Revision::newFromTitle($title, false, Revision::READ_NORMAL); if (!$revision) { $this->setLastError("refreshLinks: Article not found {$title->getPrefixedDBkey()}"); return false; // XXX: what if it was just deleted? } $content = $revision->getContent(Revision::RAW); if (!$content) { // If there is no content, pretend the content is empty $content = $revision->getContentHandler()->makeEmptyContent(); } $parserOutput = false; $parserOptions = $page->makeParserOptions('canonical'); // If page_touched changed after this root job, then it is likely that // any views of the pages already resulted in re-parses which are now in // cache. The cache can be reused to avoid expensive parsing in some cases. if (isset($this->params['rootJobTimestamp'])) { $opportunistic = !empty($this->params['isOpportunistic']); $skewedTimestamp = $this->params['rootJobTimestamp']; if ($opportunistic) { // Neither clock skew nor DB snapshot/slave lag matter much for such // updates; focus on reusing the (often recently updated) cache } else { // For transclusion updates, the template changes must be reflected $skewedTimestamp = wfTimestamp(TS_MW, wfTimestamp(TS_UNIX, $skewedTimestamp) + self::CLOCK_FUDGE); } if ($page->getLinksTimestamp() > $skewedTimestamp) { // Something already updated the backlinks since this job was made return true; } if ($page->getTouched() >= $skewedTimestamp || $opportunistic) { // Something bumped page_touched since this job was made // or the cache is otherwise suspected to be up-to-date $parserOutput = ParserCache::singleton()->getDirty($page, $parserOptions); if ($parserOutput && $parserOutput->getCacheTime() < $skewedTimestamp) { $parserOutput = false; // too stale } } } // Fetch the current revision and parse it if necessary... if ($parserOutput == false) { $start = microtime(true); // Revision ID must be passed to the parser output to get revision variables correct $parserOutput = $content->getParserOutput($title, $revision->getId(), $parserOptions, false); $elapsed = microtime(true) - $start; // If it took a long time to render, then save this back to the cache to avoid // wasted CPU by other apaches or job runners. We don't want to always save to // cache as this can cause high cache I/O and LRU churn when a template changes. if ($elapsed >= self::PARSE_THRESHOLD_SEC && $page->shouldCheckParserCache($parserOptions, $revision->getId()) && $parserOutput->isCacheable()) { $ctime = wfTimestamp(TS_MW, (int) $start); // cache time ParserCache::singleton()->save($parserOutput, $page, $parserOptions, $ctime, $revision->getId()); } } $updates = $content->getSecondaryDataUpdates($title, null, !empty($this->params['useRecursiveLinksUpdate']), $parserOutput); foreach ($updates as $key => $update) { if ($update instanceof LinksUpdate) { if (!empty($this->params['triggeredRecursive'])) { $update->setTriggeredRecursive(); } if (!empty($this->params['triggeringUser'])) { $userInfo = $this->params['triggeringUser']; if ($userInfo['userId']) { $user = User::newFromId($userInfo['userId']); } else { // Anonymous, use the username $user = User::newFromName($userInfo['userName'], false); } $update->setTriggeringUser($user); } if (!empty($this->params['triggeringRevisionId'])) { $revision = Revision::newFromId($this->params['triggeringRevisionId']); if ($revision === null) { $revision = Revision::newFromId($this->params['triggeringRevisionId'], Revision::READ_LATEST); } $update->setRevision($revision); } } } DataUpdate::runUpdates($updates); InfoAction::invalidateCache($title); return true; }
/** * Do some database updates after deletion * * @param $id Int: page_id value of the page being deleted (B/C, currently unused) * @param $content Content: optional page content to be used when determining the required updates. * This may be needed because $this->getContent() may already return null when the page proper was deleted. */ public function doDeleteUpdates($id, Content $content = null) { # update site status DeferredUpdates::addUpdate(new SiteStatsUpdate(0, 1, -(int) $this->isCountable(), -1)); # remove secondary indexes, etc $updates = $this->getDeletionUpdates($content); DataUpdate::runUpdates($updates); # Clear caches WikiPage::onArticleDelete($this->mTitle); # Reset this object $this->clear(); # Clear the cached article id so the interface doesn't act like we exist $this->mTitle->resetArticleID(0); }
/** * Constructor * * @param Title $title Title of the page we're updating * @param ParserOutput $parserOutput Output from a full parse of this page * @param bool $recursive Queue jobs for recursive updates? * @throws MWException */ function __construct(Title $title, ParserOutput $parserOutput, $recursive = true) { parent::__construct(); $this->mTitle = $title; $this->mId = $title->getArticleID(Title::GAID_FOR_UPDATE); if (!$this->mId) { throw new InvalidArgumentException("The Title object yields no ID. Perhaps the page doesn't exist?"); } $this->mParserOutput = $parserOutput; $this->mLinks = $parserOutput->getLinks(); $this->mImages = $parserOutput->getImages(); $this->mTemplates = $parserOutput->getTemplates(); $this->mExternals = $parserOutput->getExternalLinks(); $this->mCategories = $parserOutput->getCategories(); $this->mProperties = $parserOutput->getProperties(); $this->mInterwikis = $parserOutput->getInterwikiLinks(); # Convert the format of the interlanguage links # I didn't want to change it in the ParserOutput, because that array is passed all # the way back to the skin, so either a skin API break would be required, or an # inefficient back-conversion. $ill = $parserOutput->getLanguageLinks(); $this->mInterlangs = []; foreach ($ill as $link) { list($key, $title) = explode(':', $link, 2); $this->mInterlangs[$key] = $title; } foreach ($this->mCategories as &$sortkey) { # If the sortkey is longer then 255 bytes, # it truncated by DB, and then doesn't get # matched when comparing existing vs current # categories, causing bug 25254. # Also. substr behaves weird when given "". if ($sortkey !== '') { $sortkey = substr($sortkey, 0, 255); } } $this->mRecursive = $recursive; Hooks::run('LinksUpdateConstructed', [&$this]); }
/** * @param Title $title * @return bool */ protected function runForTitle(Title $title) { $page = WikiPage::factory($title); if (!empty($this->params['triggeringRevisionId'])) { // Fetch the specified revision; lockAndGetLatest() below detects if the page // was edited since and aborts in order to avoid corrupting the link tables $revision = Revision::newFromId($this->params['triggeringRevisionId'], Revision::READ_LATEST); } else { // Fetch current revision; READ_LATEST reduces lockAndGetLatest() check failures $revision = Revision::newFromTitle($title, false, Revision::READ_LATEST); } if (!$revision) { $this->setLastError("Revision not found for {$title->getPrefixedDBkey()}"); return false; // just deleted? } $content = $revision->getContent(Revision::RAW); if (!$content) { // If there is no content, pretend the content is empty $content = $revision->getContentHandler()->makeEmptyContent(); } $parserOutput = false; $parserOptions = $page->makeParserOptions('canonical'); // If page_touched changed after this root job, then it is likely that // any views of the pages already resulted in re-parses which are now in // cache. The cache can be reused to avoid expensive parsing in some cases. if (isset($this->params['rootJobTimestamp'])) { $opportunistic = !empty($this->params['isOpportunistic']); $skewedTimestamp = $this->params['rootJobTimestamp']; if ($opportunistic) { // Neither clock skew nor DB snapshot/slave lag matter much for such // updates; focus on reusing the (often recently updated) cache } else { // For transclusion updates, the template changes must be reflected $skewedTimestamp = wfTimestamp(TS_MW, wfTimestamp(TS_UNIX, $skewedTimestamp) + self::CLOCK_FUDGE); } if ($page->getLinksTimestamp() > $skewedTimestamp) { // Something already updated the backlinks since this job was made return true; } if ($page->getTouched() >= $skewedTimestamp || $opportunistic) { // Something bumped page_touched since this job was made // or the cache is otherwise suspected to be up-to-date $parserOutput = ParserCache::singleton()->getDirty($page, $parserOptions); if ($parserOutput && $parserOutput->getCacheTime() < $skewedTimestamp) { $parserOutput = false; // too stale } } } // Fetch the current revision and parse it if necessary... if ($parserOutput == false) { $start = microtime(true); // Revision ID must be passed to the parser output to get revision variables correct $parserOutput = $content->getParserOutput($title, $revision->getId(), $parserOptions, false); $elapsed = microtime(true) - $start; // If it took a long time to render, then save this back to the cache to avoid // wasted CPU by other apaches or job runners. We don't want to always save to // cache as this can cause high cache I/O and LRU churn when a template changes. if ($elapsed >= self::PARSE_THRESHOLD_SEC && $page->shouldCheckParserCache($parserOptions, $revision->getId()) && $parserOutput->isCacheable()) { $ctime = wfTimestamp(TS_MW, (int) $start); // cache time ParserCache::singleton()->save($parserOutput, $page, $parserOptions, $ctime, $revision->getId()); } } $updates = $content->getSecondaryDataUpdates($title, null, !empty($this->params['useRecursiveLinksUpdate']), $parserOutput); foreach ($updates as $key => $update) { // FIXME: This code probably shouldn't be here? // Needed by things like Echo notifications which need // to know which user caused the links update if ($update instanceof LinksUpdate) { if (!empty($this->params['triggeringUser'])) { $userInfo = $this->params['triggeringUser']; if ($userInfo['userId']) { $user = User::newFromId($userInfo['userId']); } else { // Anonymous, use the username $user = User::newFromName($userInfo['userName'], false); } $update->setTriggeringUser($user); } } } $latestNow = $page->lockAndGetLatest(); if (!$latestNow || $revision->getId() != $latestNow) { // Do not clobber over newer updates with older ones. If all jobs where FIFO and // serialized, it would be OK to update links based on older revisions since it // would eventually get to the latest. Since that is not the case (by design), // only update the link tables to a state matching the current revision's output. $this->setLastError("page_latest changed from {$revision->getId()} to {$latestNow}"); return false; } DataUpdate::runUpdates($updates); InfoAction::invalidateCache($title); return true; }
/** * Do any deferred updates and clear the list * * @param string $mode Use "enqueue" to use the job queue when possible [Default: run] * prevent lock contention * @param string $oldMode Unused */ public static function doUpdates($mode = 'run', $oldMode = '') { // B/C for ( $commit, $mode ) args $mode = $oldMode ?: $mode; if ($mode === 'commit') { $mode = 'run'; } $updates = self::$updates; while (count($updates)) { self::clearPendingUpdates(); /** @var DataUpdate[] $dataUpdates */ $dataUpdates = array(); /** @var DeferrableUpdate[] $otherUpdates */ $otherUpdates = array(); foreach ($updates as $update) { if ($update instanceof DataUpdate) { $dataUpdates[] = $update; } else { $otherUpdates[] = $update; } } // Delegate DataUpdate execution to the DataUpdate class DataUpdate::runUpdates($dataUpdates, $mode); // Execute the non-DataUpdate tasks foreach ($otherUpdates as $update) { try { $update->doUpdate(); wfGetLBFactory()->commitMasterChanges(); } catch (Exception $e) { // We don't want exceptions thrown during deferred updates to // be reported to the user since the output is already sent if (!$e instanceof ErrorPageError) { MWExceptionHandler::logException($e); } // Make sure incomplete transactions are not committed and end any // open atomic sections so that other DB updates have a chance to run wfGetLBFactory()->rollbackMasterChanges(); } } $updates = self::$updates; } }
/** * @param $title Title * @param $revision Revision * @param $fname string * @return void */ public static function runForTitleInternal(Title $title, Revision $revision, $fname) { wfProfileIn($fname); $content = $revision->getContent(Revision::RAW); if (!$content) { // if there is no content, pretend the content is empty $content = $revision->getContentHandler()->makeEmptyContent(); } // Revision ID must be passed to the parser output to get revision variables correct $parserOutput = $content->getParserOutput($title, $revision->getId(), null, false); $updates = $content->getSecondaryDataUpdates($title, null, false, $parserOutput); DataUpdate::runUpdates($updates); InfoAction::invalidateCache($title); wfProfileOut($fname); }
/** * Run LinksUpdate for all links on a given page_id * @param int $id The page_id */ public static function fixLinksFromArticle($id) { $page = WikiPage::newFromID($id); LinkCache::singleton()->clear(); if ($page === null) { return; } $content = $page->getContent(Revision::RAW); if ($content === null) { return; } $updates = $content->getSecondaryDataUpdates($page->getTitle()); DataUpdate::runUpdates($updates); }
/** * Do some database updates after deletion * * @param int $id page_id value of the page being deleted * @param $content Content: optional page content to be used when determining the required updates. * This may be needed because $this->getContent() may already return null when the page proper was deleted. */ public function doDeleteUpdates( $id, Content $content = null ) { // update site status DeferredUpdates::addUpdate( new SiteStatsUpdate( 0, 1, - (int)$this->isCountable(), -1 ) ); // remove secondary indexes, etc $updates = $this->getDeletionUpdates( $content ); DataUpdate::runUpdates( $updates ); // Clear caches WikiPage::onArticleDelete( $this->mTitle ); // Reset this object and the Title object $this->loadFromRow( false, self::READ_LATEST ); // Search engine DeferredUpdates::addUpdate( new SearchUpdate( $id, $this->mTitle ) ); }
/** * Purges the cache of a page */ public function execute() { $user = $this->getUser(); $params = $this->extractRequestParams(); if (!$user->isAllowed('purge') && !$this->getMain()->isInternalMode() && !$this->getRequest()->wasPosted()) { $this->dieUsageMsg(array('mustbeposted', $this->getModuleName())); } $forceLinkUpdate = $params['forcelinkupdate']; $pageSet = new ApiPageSet($this); $pageSet->execute(); $result = array(); foreach ($pageSet->getInvalidTitles() as $title) { $r = array(); $r['title'] = $title; $r['invalid'] = ''; $result[] = $r; } foreach ($pageSet->getMissingPageIDs() as $p) { $page = array(); $page['pageid'] = $p; $page['missing'] = ''; $result[] = $page; } foreach ($pageSet->getMissingRevisionIDs() as $r) { $rev = array(); $rev['revid'] = $r; $rev['missing'] = ''; $result[] = $rev; } foreach ($pageSet->getTitles() as $title) { $r = array(); ApiQueryBase::addTitleInfo($r, $title); if (!$title->exists()) { $r['missing'] = ''; $result[] = $r; continue; } $page = WikiPage::factory($title); $page->doPurge(); // Directly purge and skip the UI part of purge(). $r['purged'] = ''; if ($forceLinkUpdate) { if (!$user->pingLimiter()) { global $wgParser, $wgEnableParserCache; $popts = $page->makeParserOptions('canonical'); $p_result = $wgParser->parse($page->getRawText(), $title, $popts, true, true, $page->getLatest()); # Update the links tables $updates = $p_result->getSecondaryDataUpdates($title); DataUpdate::runUpdates($updates); $r['linkupdate'] = ''; if ($wgEnableParserCache) { $pcache = ParserCache::singleton(); $pcache->save($p_result, $page, $popts); } } else { $error = $this->parseMsg(array('actionthrottledtext')); $this->setWarning($error['info']); $forceLinkUpdate = false; } } $result[] = $r; } $apiResult = $this->getResult(); $apiResult->setIndexedTagName($result, 'page'); $apiResult->addValue(null, $this->getModuleName(), $result); }