/** * Run a refreshLinks job * @return boolean success */ function run() { global $wgParser; wfProfileIn(__METHOD__); $linkCache =& LinkCache::singleton(); $linkCache->clear(); if (is_null($this->title)) { $this->error = "refreshLinks: Invalid title"; wfProfileOut(__METHOD__); return false; } $revision = Revision::newFromTitle($this->title); if (!$revision) { $this->error = 'refreshLinks: Article not found "' . $this->title->getPrefixedDBkey() . '"'; wfProfileOut(__METHOD__); return false; } wfProfileIn(__METHOD__ . '-parse'); $options = new ParserOptions(); $parserOutput = $wgParser->parse($revision->getText(), $this->title, $options, true, true, $revision->getId()); wfProfileOut(__METHOD__ . '-parse'); wfProfileIn(__METHOD__ . '-update'); $update = new LinksUpdate($this->title, $parserOutput, false); $update->doUpdate(); wfProfileOut(__METHOD__ . '-update'); wfProfileOut(__METHOD__); return true; }
/** * @depends testUpdateToSetPredefinedAnnotations */ public function testDoUpdateUsingUserdefinedAnnotations() { $this->pageCreator->createPage($this->title)->doEdit('[[HasFirstLinksUpdatetest::testDoUpdate]] [[HasSecondLinksUpdatetest::testDoUpdate]]'); $parserData = $this->applicationFactory->newParserData($this->title, $this->pageCreator->getEditInfo()->output); $contentParser = $this->applicationFactory->newContentParser($this->title); $contentParser->parse(); $parsedParserData = $this->applicationFactory->newParserData($this->title, $contentParser->getOutput()); $this->assertCount(4, $parserData->getSemanticData()->getProperties()); $this->assertCount(4, $this->getStore()->getSemanticData(DIWikiPage::newFromTitle($this->title))->getProperties()); /** * See #347 and LinksUpdateConstructed */ $linksUpdate = new \LinksUpdate($this->title, new \ParserOutput()); $linksUpdate->doUpdate(); $this->testEnvironment->executePendingDeferredUpdates(); /** * Asserts that before and after the update, the SemanticData container * holds the same amount of properties despite the fact that the ParserOutput * was invoked empty */ $semanticData = $this->getStore()->getSemanticData(DIWikiPage::newFromTitle($this->title)); $this->assertCount(4, $semanticData->getProperties()); $expected = array('propertyKeys' => array('_SKEY', '_MDAT', 'HasFirstLinksUpdatetest', 'HasSecondLinksUpdatetest')); $this->semanticDataValidator->assertThatPropertiesAreSet($expected, $semanticData); return $this->pageCreator->getPage()->getRevision(); }
/** * Call LinksUpdate on the text of this page's approved revision, * if there is one. */ public static function updateLinksAfterEdit(&$page, &$editInfo, $changed) { $title = $page->getTitle(); if (!ApprovedRevs::pageIsApprovable($title)) { return true; } // If this user's revisions get approved automatically, // exit now, because this will be the approved // revision anyway. if (self::userRevsApprovedAutomatically($title)) { return true; } $text = ''; $approvedText = ApprovedRevs::getApprovedContent($title); if (!is_null($approvedText)) { $text = $approvedText; } // If there's no approved revision, and 'blank if // unapproved' is set to true, set the text to blank. if (is_null($approvedText)) { global $egApprovedRevsBlankIfUnapproved; if ($egApprovedRevsBlankIfUnapproved) { $text = ''; } else { // If it's an unapproved page and there's no // page blanking, exit here. return true; } } $editInfo = $page->prepareTextForEdit($text); $u = new LinksUpdate($page->mTitle, $editInfo->output); $u->doUpdate(); return true; }
function fixLinksFromArticle( $id ) { global $wgTitle, $wgParser; $wgTitle = Title::newFromID( $id ); $dbw = wfGetDB( DB_MASTER ); $linkCache =& LinkCache::singleton(); $linkCache->clear(); if ( is_null( $wgTitle ) ) { return; } $dbw->begin(); $revision = Revision::newFromTitle( $wgTitle ); if ( !$revision ) { return; } $options = new ParserOptions; $parserOutput = $wgParser->parse( $revision->getText(), $wgTitle, $options, true, true, $revision->getId() ); $update = new LinksUpdate( $wgTitle, $parserOutput, false ); $update->doUpdate(); $dbw->commit(); }
/** * Invalidates the cache for a campaign when any of its dependents are edited. The 'dependents' * are tracked by entries in the templatelinks table, which are inserted by using the * PageContentSaveComplete hook. * * This is usually run via the Job Queue mechanism. */ public static function onLinksUpdateComplete(LinksUpdate &$linksupdate) { if (!$linksupdate->getTitle()->inNamespace(NS_CAMPAIGN)) { return true; } $campaign = new UploadWizardCampaign($linksupdate->getTitle()); $campaign->invalidateCache(); return true; }
/** * Purges the cache of a page */ public function execute() { global $wgUser; $params = $this->extractRequestParams(); if (!$wgUser->isAllowed('purge') && !$this->getMain()->isInternalMode() && !$this->getMain()->getRequest()->wasPosted()) { $this->dieUsageMsg(array('mustbeposted', $this->getModuleName())); } $forceLinkUpdate = $params['forcelinkupdate']; $result = array(); foreach ($params['titles'] as $t) { $r = array(); $title = Title::newFromText($t); if (!$title instanceof Title) { $r['title'] = $t; $r['invalid'] = ''; $result[] = $r; continue; } ApiQueryBase::addTitleInfo($r, $title); if (!$title->exists()) { $r['missing'] = ''; $result[] = $r; continue; } $context = $this->createContext(); $context->setTitle($title); $article = Article::newFromTitle($title, $context); $article->doPurge(); // Directly purge and skip the UI part of purge(). $r['purged'] = ''; if ($forceLinkUpdate) { if (!$wgUser->pingLimiter()) { global $wgParser, $wgEnableParserCache; $popts = new ParserOptions(); $p_result = $wgParser->parse($article->getContent(), $title, $popts); # Update the links tables $u = new LinksUpdate($title, $p_result); $u->doUpdate(); $r['linkupdate'] = ''; if ($wgEnableParserCache) { $pcache = ParserCache::singleton(); $pcache->save($p_result, $article, $popts); } } else { $this->setWarning($this->parseMsg(array('actionthrottledtext'))); $forceLinkUpdate = false; } } $result[] = $r; } $apiResult = $this->getResult(); $apiResult->setIndexedTagName($result, 'page'); $apiResult->addValue(null, $this->getModuleName(), $result); }
/** * LinksUpdate hook handler * @see https://www.mediawiki.org/wiki/Manual:Hooks/LinksUpdate * @param LinksUpdate $linksUpdate */ public static function onLinksUpdate(&$linksUpdate) { global $wgUseDumbLinkUpdate; $out = $linksUpdate->getParserOutput(); $data = array(); if (isset($out->geoData)) { $geoData = $out->geoData; $data = $geoData->getAll(); } if ($wgUseDumbLinkUpdate || !count($data)) { self::doDumbUpdate($data, $linksUpdate->mId); } else { self::doSmartUpdate($data, $linksUpdate->mId); } return true; }
/** * @param LinksUpdate $linksUpdate * @return bool return true to continue hooks flow */ public static function onLinksUpdateComplete($linksUpdate) { wfProfileIn(__METHOD__); $images = $linksUpdate->getImages(); $articleId = $linksUpdate->getTitle()->getArticleID(); if (count($images) === 1) { $images = array_keys($images); self::buildIndex($articleId, $images); wfProfileOut(__METHOD__); return true; } $article = new Article($linksUpdate->getTitle()); self::buildAndGetIndex($article); wfProfileOut(__METHOD__); return true; }
/** * Insert assessment records after page is saved * @param LinksUpdate $linksUpdate */ public static function onLinksUpdateComplete(&$linksUpdate) { $pOut = $linksUpdate->getParserOutput(); if ($pOut->getExtensionData('ext-pageassessment-assessmentdata') !== null) { $assessmentData = $pOut->getExtensionData('ext-pageassessment-assessmentdata'); } else { // Even if there is no assessment data, we still need to run doUpdates // in case any assessment data was deleted from the page. $assessmentData = []; } $title = $linksUpdate->getTitle(); // In most cases $title will be a talk page, but we want to associate the // assessment data with the subject page. $subjectTitle = $title->getSubjectPage(); PageAssessmentsBody::doUpdates($subjectTitle, $assessmentData); }
public function doUpdate() { $services = MediaWikiServices::getInstance(); $config = $services->getMainConfig(); $lbFactory = $services->getDBLoadBalancerFactory(); $batchSize = $config->get('UpdateRowsPerQuery'); // Page may already be deleted, so don't just getId() $id = $this->pageId; if ($this->ticket) { // Make sure all links update threads see the changes of each other. // This handles the case when updates have to batched into several COMMITs. $scopedLock = LinksUpdate::acquirePageLock($this->getDB(), $id); } $title = $this->page->getTitle(); $dbw = $this->getDB(); // convenience // Delete restrictions for it $dbw->delete('page_restrictions', ['pr_page' => $id], __METHOD__); // Fix category table counts $cats = $dbw->selectFieldValues('categorylinks', 'cl_to', ['cl_from' => $id], __METHOD__); $catBatches = array_chunk($cats, $batchSize); foreach ($catBatches as $catBatch) { $this->page->updateCategoryCounts([], $catBatch, $id); if (count($catBatches) > 1) { $lbFactory->commitAndWaitForReplication(__METHOD__, $this->ticket, ['wiki' => $dbw->getWikiID()]); } } // Refresh the category table entry if it seems to have no pages. Check // master for the most up-to-date cat_pages count. if ($title->getNamespace() === NS_CATEGORY) { $row = $dbw->selectRow('category', ['cat_id', 'cat_title', 'cat_pages', 'cat_subcats', 'cat_files'], ['cat_title' => $title->getDBkey(), 'cat_pages <= 0'], __METHOD__); if ($row) { Category::newFromRow($row, $title)->refreshCounts(); } } $this->batchDeleteByPK('pagelinks', ['pl_from' => $id], ['pl_from', 'pl_namespace', 'pl_title'], $batchSize); $this->batchDeleteByPK('imagelinks', ['il_from' => $id], ['il_from', 'il_to'], $batchSize); $this->batchDeleteByPK('categorylinks', ['cl_from' => $id], ['cl_from', 'cl_to'], $batchSize); $this->batchDeleteByPK('templatelinks', ['tl_from' => $id], ['tl_from', 'tl_namespace', 'tl_title'], $batchSize); $this->batchDeleteByPK('externallinks', ['el_from' => $id], ['el_id'], $batchSize); $this->batchDeleteByPK('langlinks', ['ll_from' => $id], ['ll_from', 'll_lang'], $batchSize); $this->batchDeleteByPK('iwlinks', ['iwl_from' => $id], ['iwl_from', 'iwl_prefix', 'iwl_title'], $batchSize); // Delete any redirect entry or page props entries $dbw->delete('redirect', ['rd_from' => $id], __METHOD__); $dbw->delete('page_props', ['pp_page' => $id], __METHOD__); // Find recentchanges entries to clean up... $rcIdsForTitle = $dbw->selectFieldValues('recentchanges', 'rc_id', ['rc_type != ' . RC_LOG, 'rc_namespace' => $title->getNamespace(), 'rc_title' => $title->getDBkey(), 'rc_timestamp < ' . $dbw->addQuotes($dbw->timestamp($this->timestamp))], __METHOD__); $rcIdsForPage = $dbw->selectFieldValues('recentchanges', 'rc_id', ['rc_type != ' . RC_LOG, 'rc_cur_id' => $id], __METHOD__); // T98706: delete by PK to avoid lock contention with RC delete log insertions $rcIdBatches = array_chunk(array_merge($rcIdsForTitle, $rcIdsForPage), $batchSize); foreach ($rcIdBatches as $rcIdBatch) { $dbw->delete('recentchanges', ['rc_id' => $rcIdBatch], __METHOD__); if (count($rcIdBatches) > 1) { $lbFactory->commitAndWaitForReplication(__METHOD__, $this->ticket, ['wiki' => $dbw->getWikiID()]); } } // Commit and release the lock (if set) ScopedCallback::consume($scopedLock); }
/** * LinksUpdate hook handler - saves a count of h2 elements that occur in the WikiPage * @see https://www.mediawiki.org/wiki/Manual:Hooks/LinksUpdate * * @param LinksUpdate $lu * @return bool */ public static function onLinksUpdate(LinksUpdate $lu) { if ($lu->getTitle()->isTalkPage()) { $parserOutput = $lu->getParserOutput(); $sections = $parserOutput->getSections(); $numTopics = 0; foreach ($sections as $section) { if ($section['toclevel'] == 1) { $numTopics += 1; } } if ($numTopics) { $lu->mProperties['page_top_level_section_count'] = $numTopics; } } return true; }
public function doUpdate() { # Page may already be deleted, so don't just getId() $id = $this->pageId; // Make sure all links update threads see the changes of each other. // This handles the case when updates have to batched into several COMMITs. $scopedLock = LinksUpdate::acquirePageLock($this->mDb, $id); # Delete restrictions for it $this->mDb->delete('page_restrictions', ['pr_page' => $id], __METHOD__); # Fix category table counts $cats = $this->mDb->selectFieldValues('categorylinks', 'cl_to', ['cl_from' => $id], __METHOD__); $this->page->updateCategoryCounts([], $cats); # If using cascading deletes, we can skip some explicit deletes if (!$this->mDb->cascadingDeletes()) { # Delete outgoing links $this->mDb->delete('pagelinks', ['pl_from' => $id], __METHOD__); $this->mDb->delete('imagelinks', ['il_from' => $id], __METHOD__); $this->mDb->delete('categorylinks', ['cl_from' => $id], __METHOD__); $this->mDb->delete('templatelinks', ['tl_from' => $id], __METHOD__); $this->mDb->delete('externallinks', ['el_from' => $id], __METHOD__); $this->mDb->delete('langlinks', ['ll_from' => $id], __METHOD__); $this->mDb->delete('iwlinks', ['iwl_from' => $id], __METHOD__); $this->mDb->delete('redirect', ['rd_from' => $id], __METHOD__); $this->mDb->delete('page_props', ['pp_page' => $id], __METHOD__); } # If using cleanup triggers, we can skip some manual deletes if (!$this->mDb->cleanupTriggers()) { $title = $this->page->getTitle(); # Find recentchanges entries to clean up... $rcIdsForTitle = $this->mDb->selectFieldValues('recentchanges', 'rc_id', ['rc_type != ' . RC_LOG, 'rc_namespace' => $title->getNamespace(), 'rc_title' => $title->getDBkey()], __METHOD__); $rcIdsForPage = $this->mDb->selectFieldValues('recentchanges', 'rc_id', ['rc_type != ' . RC_LOG, 'rc_cur_id' => $id], __METHOD__); # T98706: delete PK to avoid lock contention with RC delete log insertions $rcIds = array_merge($rcIdsForTitle, $rcIdsForPage); if ($rcIds) { $this->mDb->delete('recentchanges', ['rc_id' => $rcIds], __METHOD__); } } $this->mDb->onTransactionIdle(function () use(&$scopedLock) { // Release the lock *after* the final COMMIT for correctness ScopedCallback::consume($scopedLock); }); }
function run() { if (is_null($this->title)) { $this->setLastError("deleteLinks: Invalid title"); return false; } $pageId = $this->params['pageId']; // Serialize links updates by page ID so they see each others' changes $scopedLock = LinksUpdate::acquirePageLock(wfGetDB(DB_MASTER), $pageId, 'job'); if (WikiPage::newFromID($pageId, WikiPage::READ_LATEST)) { // The page was restored somehow or something went wrong $this->setLastError("deleteLinks: Page #{$pageId} exists"); return false; } $factory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory(); $timestamp = isset($this->params['timestamp']) ? $this->params['timestamp'] : null; $page = WikiPage::factory($this->title); // title when deleted $update = new LinksDeletionUpdate($page, $pageId, $timestamp); $update->setTransactionTicket($factory->getEmptyTransactionTicket(__METHOD__)); $update->doUpdate(); return true; }
protected function assertLinksUpdate(Title $title, ParserOutput $parserOutput, $table, $fields, $condition, array $expectedRows) { $update = new LinksUpdate($title, $parserOutput); // NOTE: make sure LinksUpdate does not generate warnings when called inside a transaction. $update->beginTransaction(); $update->doUpdate(); $update->commitTransaction(); $this->assertSelect($table, $fields, $condition, $expectedRows); return $update; }
/** * Record a file upload in the upload log and the image table * @param string $oldver * @param string $comment * @param string $pageText * @param bool|array $props * @param string|bool $timestamp * @param null|User $user * @return bool */ function recordUpload2($oldver, $comment, $pageText, $props = false, $timestamp = false, $user = null) { wfProfileIn(__METHOD__); if (is_null($user)) { global $wgUser; $user = $wgUser; } $dbw = $this->repo->getMasterDB(); $dbw->begin(__METHOD__); if (!$props) { wfProfileIn(__METHOD__ . '-getProps'); $props = $this->repo->getFileProps($this->getVirtualUrl()); wfProfileOut(__METHOD__ . '-getProps'); } if ($timestamp === false) { $timestamp = $dbw->timestamp(); } $props['description'] = $comment; $props['user'] = $user->getId(); $props['user_text'] = $user->getName(); $props['timestamp'] = wfTimestamp(TS_MW, $timestamp); // DB -> TS_MW $this->setProps($props); # Fail now if the file isn't there if (!$this->fileExists) { wfDebug(__METHOD__ . ": File " . $this->getRel() . " went missing!\n"); wfProfileOut(__METHOD__); return false; } $reupload = false; # Test to see if the row exists using INSERT IGNORE # This avoids race conditions by locking the row until the commit, and also # doesn't deadlock. SELECT FOR UPDATE causes a deadlock for every race condition. $dbw->insert('image', array('img_name' => $this->getName(), 'img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->media_type, 'img_major_mime' => $this->major_mime, 'img_minor_mime' => $this->minor_mime, 'img_timestamp' => $timestamp, 'img_description' => $comment, 'img_user' => $user->getId(), 'img_user_text' => $user->getName(), 'img_metadata' => $dbw->encodeBlob($this->metadata), 'img_sha1' => $this->sha1), __METHOD__, 'IGNORE'); if ($dbw->affectedRows() == 0) { # (bug 34993) Note: $oldver can be empty here, if the previous # version of the file was broken. Allow registration of the new # version to continue anyway, because that's better than having # an image that's not fixable by user operations. $reupload = true; # Collision, this is an update of a file # Insert previous contents into oldimage $dbw->insertSelect('oldimage', 'image', array('oi_name' => 'img_name', 'oi_archive_name' => $dbw->addQuotes($oldver), 'oi_size' => 'img_size', 'oi_width' => 'img_width', 'oi_height' => 'img_height', 'oi_bits' => 'img_bits', 'oi_timestamp' => 'img_timestamp', 'oi_description' => 'img_description', 'oi_user' => 'img_user', 'oi_user_text' => 'img_user_text', 'oi_metadata' => 'img_metadata', 'oi_media_type' => 'img_media_type', 'oi_major_mime' => 'img_major_mime', 'oi_minor_mime' => 'img_minor_mime', 'oi_sha1' => 'img_sha1'), array('img_name' => $this->getName()), __METHOD__); # Update the current image row $dbw->update('image', array('img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->media_type, 'img_major_mime' => $this->major_mime, 'img_minor_mime' => $this->minor_mime, 'img_timestamp' => $timestamp, 'img_description' => $comment, 'img_user' => $user->getId(), 'img_user_text' => $user->getName(), 'img_metadata' => $dbw->encodeBlob($this->metadata), 'img_sha1' => $this->sha1), array('img_name' => $this->getName()), __METHOD__); } else { # This is a new file, so update the image count DeferredUpdates::addUpdate(SiteStatsUpdate::factory(array('images' => 1))); } $descTitle = $this->getTitle(); $wikiPage = new WikiFilePage($descTitle); $wikiPage->setFile($this); # Add the log entry $action = $reupload ? 'overwrite' : 'upload'; $logEntry = new ManualLogEntry('upload', $action); $logEntry->setPerformer($user); $logEntry->setComment($comment); $logEntry->setTarget($descTitle); // Allow people using the api to associate log entries with the upload. // Log has a timestamp, but sometimes different from upload timestamp. $logEntry->setParameters(array('img_sha1' => $this->sha1, 'img_timestamp' => $timestamp)); // Note we keep $logId around since during new image // creation, page doesn't exist yet, so log_page = 0 // but we want it to point to the page we're making, // so we later modify the log entry. // For a similar reason, we avoid making an RC entry // now and wait until the page exists. $logId = $logEntry->insert(); $exists = $descTitle->exists(); if ($exists) { // Page exists, do RC entry now (otherwise we wait for later). $logEntry->publish($logId); } wfProfileIn(__METHOD__ . '-edit'); if ($exists) { # Create a null revision $latest = $descTitle->getLatestRevID(); $editSummary = LogFormatter::newFromEntry($logEntry)->getPlainActionText(); $nullRevision = Revision::newNullRevision($dbw, $descTitle->getArticleID(), $editSummary, false); if (!is_null($nullRevision)) { $nullRevision->insertOn($dbw); wfRunHooks('NewRevisionFromEditComplete', array($wikiPage, $nullRevision, $latest, $user)); $wikiPage->updateRevisionOn($dbw, $nullRevision); } } # Commit the transaction now, in case something goes wrong later # The most important thing is that files don't get lost, especially archives # NOTE: once we have support for nested transactions, the commit may be moved # to after $wikiPage->doEdit has been called. $dbw->commit(__METHOD__); # Save to memcache. # We shall not saveToCache before the commit since otherwise # in case of a rollback there is an usable file from memcached # which in fact doesn't really exist (bug 24978) $this->saveToCache(); if ($exists) { # Invalidate the cache for the description page $descTitle->invalidateCache(); $descTitle->purgeSquid(); } else { # New file; create the description page. # There's already a log entry, so don't make a second RC entry # Squid and file cache for the description page are purged by doEditContent. $content = ContentHandler::makeContent($pageText, $descTitle); $status = $wikiPage->doEditContent($content, $comment, EDIT_NEW | EDIT_SUPPRESS_RC, false, $user); $dbw->begin(__METHOD__); // XXX; doEdit() uses a transaction // Now that the page exists, make an RC entry. $logEntry->publish($logId); if (isset($status->value['revision'])) { $dbw->update('logging', array('log_page' => $status->value['revision']->getPage()), array('log_id' => $logId), __METHOD__); } $dbw->commit(__METHOD__); // commit before anything bad can happen } wfProfileOut(__METHOD__ . '-edit'); if ($reupload) { # Delete old thumbnails wfProfileIn(__METHOD__ . '-purge'); $this->purgeThumbnails(); wfProfileOut(__METHOD__ . '-purge'); # Remove the old file from the squid cache SquidUpdate::purge(array($this->getURL())); } # Hooks, hooks, the magic of hooks... wfProfileIn(__METHOD__ . '-hooks'); wfRunHooks('FileUpload', array($this, $reupload, $descTitle->exists())); wfProfileOut(__METHOD__ . '-hooks'); # Invalidate cache for all pages using this file $update = new HTMLCacheUpdate($this->getTitle(), 'imagelinks'); $update->doUpdate(); if (!$reupload) { LinksUpdate::queueRecursiveJobsForTable($this->getTitle(), 'imagelinks'); } wfProfileOut(__METHOD__); return true; }
/** * Do some database updates after deletion * * @param int $id The page_id value of the page being deleted * @param Content $content Optional page content to be used when determining * the required updates. This may be needed because $this->getContent() * may already return null when the page proper was deleted. */ public function doDeleteUpdates($id, Content $content = null) { // Update site status DeferredUpdates::addUpdate(new SiteStatsUpdate(0, 1, -(int) $this->isCountable(), -1)); // Delete pagelinks, update secondary indexes, etc $updates = $this->getDeletionUpdates($content); foreach ($updates as $update) { DeferredUpdates::addUpdate($update); } // Reparse any pages transcluding this page LinksUpdate::queueRecursiveJobsForTable($this->mTitle, 'templatelinks'); // Reparse any pages including this image if ($this->mTitle->getNamespace() == NS_FILE) { LinksUpdate::queueRecursiveJobsForTable($this->mTitle, 'imagelinks'); } // Clear caches WikiPage::onArticleDelete($this->mTitle); // Reset this object and the Title object $this->loadFromRow(false, self::READ_LATEST); // Search engine DeferredUpdates::addUpdate(new SearchUpdate($id, $this->mTitle)); }
/** * Record a file upload in the upload log and the image table * @param string $oldver * @param string $comment * @param string $pageText * @param bool|array $props * @param string|bool $timestamp * @param null|User $user * @param string[] $tags * @return bool */ function recordUpload2($oldver, $comment, $pageText, $props = false, $timestamp = false, $user = null, $tags = array()) { if (is_null($user)) { global $wgUser; $user = $wgUser; } $dbw = $this->repo->getMasterDB(); # Imports or such might force a certain timestamp; otherwise we generate # it and can fudge it slightly to keep (name,timestamp) unique on re-upload. if ($timestamp === false) { $timestamp = $dbw->timestamp(); $allowTimeKludge = true; } else { $allowTimeKludge = false; } $props = $props ?: $this->repo->getFileProps($this->getVirtualUrl()); $props['description'] = $comment; $props['user'] = $user->getId(); $props['user_text'] = $user->getName(); $props['timestamp'] = wfTimestamp(TS_MW, $timestamp); // DB -> TS_MW $this->setProps($props); # Fail now if the file isn't there if (!$this->fileExists) { wfDebug(__METHOD__ . ": File " . $this->getRel() . " went missing!\n"); return false; } $dbw->startAtomic(__METHOD__); # Test to see if the row exists using INSERT IGNORE # This avoids race conditions by locking the row until the commit, and also # doesn't deadlock. SELECT FOR UPDATE causes a deadlock for every race condition. $dbw->insert('image', array('img_name' => $this->getName(), 'img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->media_type, 'img_major_mime' => $this->major_mime, 'img_minor_mime' => $this->minor_mime, 'img_timestamp' => $timestamp, 'img_description' => $comment, 'img_user' => $user->getId(), 'img_user_text' => $user->getName(), 'img_metadata' => $dbw->encodeBlob($this->metadata), 'img_sha1' => $this->sha1), __METHOD__, 'IGNORE'); $reupload = $dbw->affectedRows() == 0; if ($reupload) { if ($allowTimeKludge) { # Use LOCK IN SHARE MODE to ignore any transaction snapshotting $ltimestamp = $dbw->selectField('image', 'img_timestamp', array('img_name' => $this->getName()), __METHOD__, array('LOCK IN SHARE MODE')); $lUnixtime = $ltimestamp ? wfTimestamp(TS_UNIX, $ltimestamp) : false; # Avoid a timestamp that is not newer than the last version # TODO: the image/oldimage tables should be like page/revision with an ID field if ($lUnixtime && wfTimestamp(TS_UNIX, $timestamp) <= $lUnixtime) { sleep(1); // fast enough re-uploads would go far in the future otherwise $timestamp = $dbw->timestamp($lUnixtime + 1); $this->timestamp = wfTimestamp(TS_MW, $timestamp); // DB -> TS_MW } } # (bug 34993) Note: $oldver can be empty here, if the previous # version of the file was broken. Allow registration of the new # version to continue anyway, because that's better than having # an image that's not fixable by user operations. # Collision, this is an update of a file # Insert previous contents into oldimage $dbw->insertSelect('oldimage', 'image', array('oi_name' => 'img_name', 'oi_archive_name' => $dbw->addQuotes($oldver), 'oi_size' => 'img_size', 'oi_width' => 'img_width', 'oi_height' => 'img_height', 'oi_bits' => 'img_bits', 'oi_timestamp' => 'img_timestamp', 'oi_description' => 'img_description', 'oi_user' => 'img_user', 'oi_user_text' => 'img_user_text', 'oi_metadata' => 'img_metadata', 'oi_media_type' => 'img_media_type', 'oi_major_mime' => 'img_major_mime', 'oi_minor_mime' => 'img_minor_mime', 'oi_sha1' => 'img_sha1'), array('img_name' => $this->getName()), __METHOD__); # Update the current image row $dbw->update('image', array('img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->media_type, 'img_major_mime' => $this->major_mime, 'img_minor_mime' => $this->minor_mime, 'img_timestamp' => $timestamp, 'img_description' => $comment, 'img_user' => $user->getId(), 'img_user_text' => $user->getName(), 'img_metadata' => $dbw->encodeBlob($this->metadata), 'img_sha1' => $this->sha1), array('img_name' => $this->getName()), __METHOD__); } $descTitle = $this->getTitle(); $descId = $descTitle->getArticleID(); $wikiPage = new WikiFilePage($descTitle); $wikiPage->setFile($this); // Add the log entry... $logEntry = new ManualLogEntry('upload', $reupload ? 'overwrite' : 'upload'); $logEntry->setTimestamp($this->timestamp); $logEntry->setPerformer($user); $logEntry->setComment($comment); $logEntry->setTarget($descTitle); // Allow people using the api to associate log entries with the upload. // Log has a timestamp, but sometimes different from upload timestamp. $logEntry->setParameters(array('img_sha1' => $this->sha1, 'img_timestamp' => $timestamp)); // Note we keep $logId around since during new image // creation, page doesn't exist yet, so log_page = 0 // but we want it to point to the page we're making, // so we later modify the log entry. // For a similar reason, we avoid making an RC entry // now and wait until the page exists. $logId = $logEntry->insert(); if ($descTitle->exists()) { // Use own context to get the action text in content language $formatter = LogFormatter::newFromEntry($logEntry); $formatter->setContext(RequestContext::newExtraneousContext($descTitle)); $editSummary = $formatter->getPlainActionText(); $nullRevision = Revision::newNullRevision($dbw, $descId, $editSummary, false, $user); if ($nullRevision) { $nullRevision->insertOn($dbw); Hooks::run('NewRevisionFromEditComplete', array($wikiPage, $nullRevision, $nullRevision->getParentId(), $user)); $wikiPage->updateRevisionOn($dbw, $nullRevision); // Associate null revision id $logEntry->setAssociatedRevId($nullRevision->getId()); } $newPageContent = null; } else { // Make the description page and RC log entry post-commit $newPageContent = ContentHandler::makeContent($pageText, $descTitle); } # Defer purges, page creation, and link updates in case they error out. # The most important thing is that files and the DB registry stay synced. $dbw->endAtomic(__METHOD__); # Do some cache purges after final commit so that: # a) Changes are more likely to be seen post-purge # b) They won't cause rollback of the log publish/update above $that = $this; $dbw->onTransactionIdle(function () use($that, $reupload, $wikiPage, $newPageContent, $comment, $user, $logEntry, $logId, $descId, $tags) { # Update memcache after the commit $that->invalidateCache(); $updateLogPage = false; if ($newPageContent) { # New file page; create the description page. # There's already a log entry, so don't make a second RC entry # CDN and file cache for the description page are purged by doEditContent. $status = $wikiPage->doEditContent($newPageContent, $comment, EDIT_NEW | EDIT_SUPPRESS_RC, false, $user); if (isset($status->value['revision'])) { // Associate new page revision id $logEntry->setAssociatedRevId($status->value['revision']->getId()); } // This relies on the resetArticleID() call in WikiPage::insertOn(), // which is triggered on $descTitle by doEditContent() above. if (isset($status->value['revision'])) { /** @var $rev Revision */ $rev = $status->value['revision']; $updateLogPage = $rev->getPage(); } } else { # Existing file page: invalidate description page cache $wikiPage->getTitle()->invalidateCache(); $wikiPage->getTitle()->purgeSquid(); # Allow the new file version to be patrolled from the page footer Article::purgePatrolFooterCache($descId); } # Update associated rev id. This should be done by $logEntry->insert() earlier, # but setAssociatedRevId() wasn't called at that point yet... $logParams = $logEntry->getParameters(); $logParams['associated_rev_id'] = $logEntry->getAssociatedRevId(); $update = array('log_params' => LogEntryBase::makeParamBlob($logParams)); if ($updateLogPage) { # Also log page, in case where we just created it above $update['log_page'] = $updateLogPage; } $that->getRepo()->getMasterDB()->update('logging', $update, array('log_id' => $logId), __METHOD__); $that->getRepo()->getMasterDB()->insert('log_search', array('ls_field' => 'associated_rev_id', 'ls_value' => $logEntry->getAssociatedRevId(), 'ls_log_id' => $logId), __METHOD__); # Now that the log entry is up-to-date, make an RC entry. $recentChange = $logEntry->publish($logId); if ($tags) { ChangeTags::addTags($tags, $recentChange ? $recentChange->getAttribute('rc_id') : null, $logEntry->getAssociatedRevId(), $logId); } # Run hook for other updates (typically more cache purging) Hooks::run('FileUpload', array($that, $reupload, !$newPageContent)); if ($reupload) { # Delete old thumbnails $that->purgeThumbnails(); # Remove the old file from the CDN cache DeferredUpdates::addUpdate(new CdnCacheUpdate(array($that->getUrl())), DeferredUpdates::PRESEND); } else { # Update backlink pages pointing to this title if created LinksUpdate::queueRecursiveJobsForTable($that->getTitle(), 'imagelinks'); } }); if (!$reupload) { # This is a new file, so update the image count DeferredUpdates::addUpdate(SiteStatsUpdate::factory(array('images' => 1))); } # Invalidate cache for all pages using this file DeferredUpdates::addUpdate(new HTMLCacheUpdate($this->getTitle(), 'imagelinks')); return true; }
/** * Do standard deferred updates after page edit. * Update links tables, site stats, search index and message cache. * Every 1000th edit, prune the recent changes table. * * @private * @param $text New text of the article * @param $summary Edit summary * @param $minoredit Minor edit * @param $timestamp_of_pagechange Timestamp associated with the page change * @param $newid rev_id value of the new revision * @param $changed Whether or not the content actually changed */ function editUpdates($text, $summary, $minoredit, $timestamp_of_pagechange, $newid, $changed = true) { global $wgDeferredUpdateList, $wgMessageCache, $wgUser, $wgParser; wfProfileIn(__METHOD__); # Parse the text $options = new ParserOptions(); $options->setTidy(true); $poutput = $wgParser->parse($text, $this->mTitle, $options, true, true, $newid); # Save it to the parser cache $parserCache =& ParserCache::singleton(); $parserCache->save($poutput, $this, $wgUser); # Update the links tables $u = new LinksUpdate($this->mTitle, $poutput); $u->doUpdate(); if (wfRunHooks('ArticleEditUpdatesDeleteFromRecentchanges', array(&$this))) { wfSeedRandom(); if (0 == mt_rand(0, 999)) { # Periodically flush old entries from the recentchanges table. global $wgRCMaxAge; $dbw =& wfGetDB(DB_MASTER); $cutoff = $dbw->timestamp(time() - $wgRCMaxAge); $recentchanges = $dbw->tableName('recentchanges'); $sql = "DELETE FROM {$recentchanges} WHERE rc_timestamp < '{$cutoff}'"; $dbw->query($sql); } } $id = $this->getID(); $title = $this->mTitle->getPrefixedDBkey(); $shortTitle = $this->mTitle->getDBkey(); if (0 == $id) { wfProfileOut(__METHOD__); return; } $u = new SiteStatsUpdate(0, 1, $this->mGoodAdjustment, $this->mTotalAdjustment); array_push($wgDeferredUpdateList, $u); $u = new SearchUpdate($id, $title, $text); array_push($wgDeferredUpdateList, $u); # If this is another user's talk page, update newtalk # Don't do this if $changed = false otherwise some idiot can null-edit a # load of user talk pages and piss people off, nor if it's a minor edit # by a properly-flagged bot. if ($this->mTitle->getNamespace() == NS_USER_TALK && $shortTitle != $wgUser->getTitleKey() && $changed && !($minoredit && $wgUser->isAllowed('nominornewtalk'))) { if (wfRunHooks('ArticleEditUpdateNewTalk', array(&$this))) { $other = User::newFromName($shortTitle); if (is_null($other) && User::isIP($shortTitle)) { // An anonymous user $other = new User(); $other->setName($shortTitle); } if ($other) { $other->setNewtalk(true); } } } if ($this->mTitle->getNamespace() == NS_MEDIAWIKI) { $wgMessageCache->replace($shortTitle, $text); } wfProfileOut(__METHOD__); }
/** * Modifies ParserOutput * @param \LinksUpdate $linksUpdate * @return bool */ public static function onLinksUpdate(\LinksUpdate $linksUpdate) { $linksUpdate->mParserOutput = (new \FlagsController())->modifyParserOutputWithFlags($linksUpdate->getParserOutput(), $linksUpdate->getTitle()->getArticleID()); return true; }
/** * Updates cascading protections * * @param $parserOutput ParserOutput object for the current version */ public function doCascadeProtectionUpdates( ParserOutput $parserOutput ) { if ( wfReadOnly() || !$this->mTitle->areRestrictionsCascading() ) { return; } // templatelinks table may have become out of sync, // especially if using variable-based transclusions. // For paranoia, check if things have changed and if // so apply updates to the database. This will ensure // that cascaded protections apply as soon as the changes // are visible. // Get templates from templatelinks $id = $this->getId(); $tlTemplates = array(); $dbr = wfGetDB( DB_SLAVE ); $res = $dbr->select( array( 'templatelinks' ), array( 'tl_namespace', 'tl_title' ), array( 'tl_from' => $id ), __METHOD__ ); foreach ( $res as $row ) { $tlTemplates["{$row->tl_namespace}:{$row->tl_title}"] = true; } // Get templates from parser output. $poTemplates = array(); foreach ( $parserOutput->getTemplates() as $ns => $templates ) { foreach ( $templates as $dbk => $id ) { $poTemplates["$ns:$dbk"] = true; } } // Get the diff $templates_diff = array_diff_key( $poTemplates, $tlTemplates ); if ( count( $templates_diff ) > 0 ) { // Whee, link updates time. // Note: we are only interested in links here. We don't need to get other DataUpdate items from the parser output. $u = new LinksUpdate( $this->mTitle, $parserOutput, false ); $u->doUpdate(); } }
/** * Run a refreshLinks2 job * @return boolean success */ function run() { global $wgParser; wfProfileIn(__METHOD__); $linkCache = LinkCache::singleton(); $linkCache->clear(); if (is_null($this->title)) { $this->error = "refreshLinks2: Invalid title"; wfProfileOut(__METHOD__); return false; } if (!isset($this->params['start']) || !isset($this->params['end'])) { $this->error = "refreshLinks2: Invalid params"; wfProfileOut(__METHOD__); return false; } $start = intval($this->params['start']); $end = intval($this->params['end']); $dbr = wfGetDB(DB_SLAVE); $res = $dbr->select(array('templatelinks', 'page'), array('page_namespace', 'page_title'), array('page_id=tl_from', "tl_from >= '{$start}'", "tl_from <= '{$end}'", 'tl_namespace' => $this->title->getNamespace(), 'tl_title' => $this->title->getDBkey()), __METHOD__); # Not suitable for page load triggered job running! # Gracefully switch to refreshLinks jobs if this happens. if (php_sapi_name() != 'cli') { $jobs = array(); while ($row = $dbr->fetchObject($res)) { $title = Title::makeTitle($row->page_namespace, $row->page_title); $jobs[] = new RefreshLinksJob($title, ''); } Job::batchInsert($jobs); return true; } # Re-parse each page that transcludes this page and update their tracking links... while ($row = $dbr->fetchObject($res)) { $title = Title::makeTitle($row->page_namespace, $row->page_title); $revision = Revision::newFromTitle($title); if (!$revision) { $this->error = 'refreshLinks: Article not found "' . $title->getPrefixedDBkey() . '"'; wfProfileOut(__METHOD__); return false; } wfProfileIn(__METHOD__ . '-parse'); $options = new ParserOptions(); $parserOutput = $wgParser->parse($revision->getText(), $title, $options, true, true, $revision->getId()); wfProfileOut(__METHOD__ . '-parse'); wfProfileIn(__METHOD__ . '-update'); $update = new LinksUpdate($title, $parserOutput, false); $update->doUpdate(); wfProfileOut(__METHOD__ . '-update'); wfProfileOut(__METHOD__); } return true; }
/** * After article was edited and parsed, in case of layer page, save layers to database * * @since 3.0 * * @param LinksUpdate &$linksUpdate * * @return true */ public static function onLinksUpdateConstructed(LinksUpdate &$linksUpdate) { $title = $linksUpdate->getTitle(); self::processLayersStoreCandidate($linksUpdate->mParserOutput, $title); return true; }
/** * Unsets the approved revision for this page in the approved_revs DB * table; calls a "links update" on this page so that category * information can be stored correctly, as well as info for * extensions such as Semantic MediaWiki; and logs the action. */ public static function unsetApproval($title) { global $egApprovedRevsBlankIfUnapproved; self::deleteRevisionApproval($title); $parser = new Parser(); $parser->setTitle($title); if ($egApprovedRevsBlankIfUnapproved) { $text = ''; } else { $text = self::getPageText($title); } $options = new ParserOptions(); $parser->parse($text, $title, $options); $u = new LinksUpdate($title, $parser->getOutput()); $u->doUpdate(); self::setPageSearchText($title, $text); $log = new LogPage('approval'); $log->addEntry('unapprove', $title, ''); wfRunHooks('ApprovedRevsRevisionUnapproved', array($parser, $title)); }
/** * Run a refreshLinks2 job * @return boolean success */ function run() { global $wgParser, $wgContLang; wfProfileIn(__METHOD__); $linkCache = LinkCache::singleton(); $linkCache->clear(); if (is_null($this->title)) { $this->error = "refreshLinks2: Invalid title"; wfProfileOut(__METHOD__); return false; } if (!isset($this->params['start']) || !isset($this->params['end'])) { $this->error = "refreshLinks2: Invalid params"; wfProfileOut(__METHOD__); return false; } // Back compat for pre-r94435 jobs $table = isset($this->params['table']) ? $this->params['table'] : 'templatelinks'; $titles = $this->title->getBacklinkCache()->getLinks($table, $this->params['start'], $this->params['end']); # Not suitable for page load triggered job running! # Gracefully switch to refreshLinks jobs if this happens. if (php_sapi_name() != 'cli') { $jobs = array(); foreach ($titles as $title) { $jobs[] = new RefreshLinksJob($title, ''); } Job::batchInsert($jobs); wfProfileOut(__METHOD__); return true; } $options = ParserOptions::newFromUserAndLang(new User(), $wgContLang); # Re-parse each page that transcludes this page and update their tracking links... foreach ($titles as $title) { $revision = Revision::newFromTitle($title); if (!$revision) { $this->error = 'refreshLinks: Article not found "' . $title->getPrefixedDBkey() . '"'; wfProfileOut(__METHOD__); return false; } wfProfileIn(__METHOD__ . '-parse'); $parserOutput = $wgParser->parse($revision->getText(), $title, $options, true, true, $revision->getId()); wfProfileOut(__METHOD__ . '-parse'); wfProfileIn(__METHOD__ . '-update'); $update = new LinksUpdate($title, $parserOutput, false); $update->doUpdate(); wfProfileOut(__METHOD__ . '-update'); wfWaitForSlaves(); } wfProfileOut(__METHOD__); return true; }
protected function assertLinksUpdate(Title $title, ParserOutput $parserOutput, $table, $fields, $condition, array $expectedRows) { $update = new LinksUpdate($title, $parserOutput); $update->doUpdate(); $this->assertSelect($table, $fields, $condition, $expectedRows); }
protected function assertRecentChangeByCategorization(Title $pageTitle, ParserOutput $parserOutput, Title $categoryTitle, $expectedRows) { $update = new LinksUpdate($pageTitle, $parserOutput); $revision = Revision::newFromTitle($pageTitle); $update->setRevision($revision); $update->beginTransaction(); $update->doUpdate(); $update->commitTransaction(); $this->assertSelect('recentchanges', 'rc_title, rc_comment', array('rc_type' => RC_CATEGORIZE, 'rc_namespace' => NS_CATEGORY, 'rc_title' => $categoryTitle->getDBkey()), $expectedRows); }
/** * Do some database updates after deletion * * @param int $id The page_id value of the page being deleted * @param Content|null $content Optional page content to be used when determining * the required updates. This may be needed because $this->getContent() * may already return null when the page proper was deleted. * @param Revision|null $revision The latest page revision */ public function doDeleteUpdates($id, Content $content = null, Revision $revision = null) { try { $countable = $this->isCountable(); } catch (Exception $ex) { // fallback for deleting broken pages for which we cannot load the content for // some reason. Note that doDeleteArticleReal() already logged this problem. $countable = false; } // Update site status DeferredUpdates::addUpdate(new SiteStatsUpdate(0, 1, -(int) $countable, -1)); // Delete pagelinks, update secondary indexes, etc $updates = $this->getDeletionUpdates($content); foreach ($updates as $update) { DeferredUpdates::addUpdate($update); } // Reparse any pages transcluding this page LinksUpdate::queueRecursiveJobsForTable($this->mTitle, 'templatelinks'); // Reparse any pages including this image if ($this->mTitle->getNamespace() == NS_FILE) { LinksUpdate::queueRecursiveJobsForTable($this->mTitle, 'imagelinks'); } // Clear caches WikiPage::onArticleDelete($this->mTitle); ResourceLoaderWikiModule::invalidateModuleCache($this->mTitle, $revision, null, wfWikiID()); // Reset this object and the Title object $this->loadFromRow(false, self::READ_LATEST); // Search engine DeferredUpdates::addUpdate(new SearchUpdate($id, $this->mTitle)); }
/** * This is the meaty bit -- restores archived revisions of the given page * to the cur/old tables. If the page currently exists, all revisions will * be stuffed into old, otherwise the most recent will go into cur. * * @param array $timestamps Pass an empty array to restore all revisions, otherwise list the ones to undelete. * @param string $comment * @param array $fileVersions * * @return int number of revisions restored */ private function undeleteRevisions($timestamps) { global $wgParser, $wgDBtype; $fname = __CLASS__ . '::' . __FUNCTION__; $restoreAll = empty($timestamps); $dbw =& wfGetDB(DB_MASTER); extract($dbw->tableNames('page', 'archive')); # Does this page already exist? We'll have to update it... $article = new Article($this->title); $options = $wgDBtype == 'postgres' ? '' : 'FOR UPDATE'; $page = $dbw->selectRow('page', array('page_id', 'page_latest'), array('page_namespace' => $this->title->getNamespace(), 'page_title' => $this->title->getDBkey()), $fname, $options); if ($page) { # Page already exists. Import the history, and if necessary # we'll update the latest revision field in the record. $newid = 0; $pageId = $page->page_id; $previousRevId = $page->page_latest; } else { # Have to create a new article... $newid = $article->insertOn($dbw); $pageId = $newid; $previousRevId = 0; } if ($restoreAll) { $oldones = '1 = 1'; # All revisions... } else { $oldts = implode(',', array_map(array(&$dbw, 'addQuotes'), array_map(array(&$dbw, 'timestamp'), $timestamps))); $oldones = "ar_timestamp IN ( {$oldts} )"; } /** * Restore each revision... */ $result = $dbw->select('archive', array('ar_rev_id', 'ar_text', 'ar_comment', 'ar_user', 'ar_user_text', 'ar_timestamp', 'ar_minor_edit', 'ar_flags', 'ar_text_id'), array('ar_namespace' => $this->title->getNamespace(), 'ar_title' => $this->title->getDBkey(), $oldones), $fname, array('ORDER BY' => 'ar_timestamp')); if ($dbw->numRows($result) < count($timestamps)) { wfDebug("{$fname}: couldn't find all requested rows\n"); return false; } $revision = null; $newRevId = $previousRevId; $restored = 0; while ($row = $dbw->fetchObject($result)) { if ($row->ar_text_id) { // Revision was deleted in 1.5+; text is in // the regular text table, use the reference. // Specify null here so the so the text is // dereferenced for page length info if needed. $revText = null; } else { // Revision was deleted in 1.4 or earlier. // Text is squashed into the archive row, and // a new text table entry will be created for it. $revText = Revision::getRevisionText($row, 'ar_'); } $revision = new Revision(array('page' => $pageId, 'id' => $row->ar_rev_id, 'text' => $revText, 'comment' => $row->ar_comment, 'user' => $row->ar_user, 'user_text' => $row->ar_user_text, 'timestamp' => $row->ar_timestamp, 'minor_edit' => $row->ar_minor_edit, 'text_id' => $row->ar_text_id)); $newRevId = $revision->insertOn($dbw); $restored++; } if ($revision) { # FIXME: Update latest if newer as well... if ($newid) { # FIXME: update article count if changed... $article->updateRevisionOn($dbw, $revision, $previousRevId); # Finally, clean up the link tables $options = new ParserOptions(); $parserOutput = $wgParser->parse($revision->getText(), $this->title, $options, true, true, $newRevId); $u = new LinksUpdate($this->title, $parserOutput); $u->doUpdate(); #TODO: SearchUpdate, etc. } // WERELATE: watch article global $wgUser; $watchthis = false; if ($newid) { Article::onArticleCreate($this->title); $watchthis = $wgUser->getOption('watchcreations') || $wgUser->getOption('watchdefault'); } else { Article::onArticleEdit($this->title); $watchthis = $wgUser->getOption('watchdefault'); } if ($watchthis && !$article->getTitle()->userIsWatching()) { $article->doWatch(); } } else { # Something went terribly wrong! } # Now that it's safely stored, take it out of the archive $dbw->delete('archive', array('ar_namespace' => $this->title->getNamespace(), 'ar_title' => $this->title->getDBkey(), $oldones), $fname); return $restored; }
protected function doCascadeProtectionUpdates($parserOutput) { if (!$this->isCurrent() || wfReadOnly() || !$this->mTitle->areRestrictionsCascading()) { return; } // templatelinks table may have become out of sync, // especially if using variable-based transclusions. // For paranoia, check if things have changed and if // so apply updates to the database. This will ensure // that cascaded protections apply as soon as the changes // are visible. # Get templates from templatelinks $id = $this->mTitle->getArticleID(); $tlTemplates = array(); $dbr = wfGetDB(DB_SLAVE); $res = $dbr->select(array('templatelinks'), array('tl_namespace', 'tl_title'), array('tl_from' => $id), __METHOD__); global $wgContLang; foreach ($res as $row) { $tlTemplates["{$row->tl_namespace}:{$row->tl_title}"] = true; } # Get templates from parser output. $poTemplates = array(); foreach ($parserOutput->getTemplates() as $ns => $templates) { foreach ($templates as $dbk => $id) { $poTemplates["{$ns}:{$dbk}"] = true; } } # Get the diff # Note that we simulate array_diff_key in PHP <5.0.x $templates_diff = array_diff_key($poTemplates, $tlTemplates); if (count($templates_diff) > 0) { # Whee, link updates time. $u = new LinksUpdate($this->mTitle, $parserOutput, false); $u->doUpdate(); } }
/** * Add the primary page-view wikitext to the output buffer * Saves the text into the parser cache if possible. * Updates templatelinks if it is out of date. * * @param $text String * @param $cache Boolean */ public function outputWikiText($text, $cache = true) { global $wgParser, $wgUser, $wgOut, $wgEnableParserCache, $wgUseFileCache; $popts = $wgOut->parserOptions(); $popts->setTidy(true); $popts->enableLimitReport(); $parserOutput = $wgParser->parse($text, $this->mTitle, $popts, true, true, $this->getRevIdFetched()); $popts->setTidy(false); $popts->enableLimitReport(false); if ($wgEnableParserCache && $cache && $this && $parserOutput->getCacheTime() != -1) { $parserCache = ParserCache::singleton(); $parserCache->save($parserOutput, $this, $wgUser); } // Make sure file cache is not used on uncacheable content. // Output that has magic words in it can still use the parser cache // (if enabled), though it will generally expire sooner. if ($parserOutput->getCacheTime() == -1 || $parserOutput->containsOldMagic()) { $wgUseFileCache = false; } if ($this->isCurrent() && !wfReadOnly() && $this->mTitle->areRestrictionsCascading()) { // templatelinks table may have become out of sync, // especially if using variable-based transclusions. // For paranoia, check if things have changed and if // so apply updates to the database. This will ensure // that cascaded protections apply as soon as the changes // are visible. # Get templates from templatelinks $id = $this->mTitle->getArticleID(); $tlTemplates = array(); $dbr = wfGetDB(DB_SLAVE); $res = $dbr->select(array('templatelinks'), array('tl_namespace', 'tl_title'), array('tl_from' => $id), __METHOD__); global $wgContLang; if ($res !== false) { foreach ($res as $row) { $tlTemplates[] = $wgContLang->getNsText($row->tl_namespace) . ':' . $row->tl_title; } } # Get templates from parser output. $poTemplates_allns = $parserOutput->getTemplates(); $poTemplates = array(); foreach ($poTemplates_allns as $ns_templates) { $poTemplates = array_merge($poTemplates, $ns_templates); } # Get the diff $templates_diff = array_diff($poTemplates, $tlTemplates); if (count($templates_diff) > 0) { # Whee, link updates time. $u = new LinksUpdate($this->mTitle, $parserOutput); $u->doUpdate(); } } $wgOut->addParserOutput($parserOutput); }