public function execute() { global $wgCityId; $db = wfGetDB(DB_MASTER); (new WikiaSQL())->SELECT('*')->FROM('page')->WHERE('page_is_redirect')->EQUAL_TO(1)->runLoop($db, function ($a, $row) use($db) { $title = Title::newFromID($row->page_id); if (!$title->isDeleted()) { $rev = Revision::newFromTitle($title); $text = $rev->getText(); $rt = Title::newFromRedirectRecurse($text); if (!$rt) { // page is marked as redirect but $text is not valid redirect $this->output('Fixed ID: ' . $title->getArticleID() . ' Title: ' . $title->getText() . "\n"); // Fix page table (new WikiaSQL())->UPDATE('page')->SET('page_is_redirect', 0)->WHERE('page_id')->EQUAL_TO($row->page_id)->RUN($db); // remove redirect from redirect table (new WikiaSQL())->DELETE('redirect')->WHERE('rd_from')->EQUAL_TO($row->page_id)->RUN($db); // clear cache LinkCache::singleton()->addGoodLinkObj($row->page_id, $title, strlen($text), 0, $rev->getId()); if ($title->getNamespace() == NS_FILE) { RepoGroup::singleton()->getLocalRepo()->invalidateImageRedirect($title); } } } }); }
protected function setUp() { parent::setUp(); $this->pages_to_delete = array(); LinkCache::singleton()->clear(); # avoid cached redirect status, etc }
function fixLinksFromArticle( $id ) { global $wgTitle, $wgParser; $wgTitle = Title::newFromID( $id ); $dbw = wfGetDB( DB_MASTER ); $linkCache =& LinkCache::singleton(); $linkCache->clear(); if ( is_null( $wgTitle ) ) { return; } $dbw->begin(); $revision = Revision::newFromTitle( $wgTitle ); if ( !$revision ) { return; } $options = new ParserOptions; $parserOutput = $wgParser->parse( $revision->getText(), $wgTitle, $options, true, true, $revision->getId() ); $update = new LinksUpdate( $wgTitle, $parserOutput, false ); $update->doUpdate(); $dbw->commit(); }
/** * Run a refreshLinks job * @return boolean success */ function run() { global $wgParser; wfProfileIn(__METHOD__); $linkCache =& LinkCache::singleton(); $linkCache->clear(); if (is_null($this->title)) { $this->error = "refreshLinks: Invalid title"; wfProfileOut(__METHOD__); return false; } $revision = Revision::newFromTitle($this->title); if (!$revision) { $this->error = 'refreshLinks: Article not found "' . $this->title->getPrefixedDBkey() . '"'; wfProfileOut(__METHOD__); return false; } wfProfileIn(__METHOD__ . '-parse'); $options = new ParserOptions(); $parserOutput = $wgParser->parse($revision->getText(), $this->title, $options, true, true, $revision->getId()); wfProfileOut(__METHOD__ . '-parse'); wfProfileIn(__METHOD__ . '-update'); $update = new LinksUpdate($this->title, $parserOutput, false); $update->doUpdate(); wfProfileOut(__METHOD__ . '-update'); wfProfileOut(__METHOD__); return true; }
/** * Run job * @return boolean success */ function run() { wfProfileIn('SMWUpdateJob::run (SMW)'); global $wgParser; LinkCache::singleton()->clear(); if (is_null($this->title)) { $this->error = "SMWUpdateJob: Invalid title"; wfProfileOut('SMWUpdateJob::run (SMW)'); return false; } elseif (!$this->title->exists()) { smwfGetStore()->deleteSubject($this->title); // be sure to clear the data wfProfileOut('SMWUpdateJob::run (SMW)'); return true; } $revision = Revision::newFromTitle($this->title); if (!$revision) { $this->error = 'SMWUpdateJob: Page exists but no revision was found for "' . $this->title->getPrefixedDBkey() . '"'; wfProfileOut('SMWUpdateJob::run (SMW)'); return false; } wfProfileIn(__METHOD__ . '-parse'); $options = new ParserOptions(); $output = $wgParser->parse($revision->getText(), $this->title, $options, true, true, $revision->getID()); wfProfileOut(__METHOD__ . '-parse'); wfProfileIn(__METHOD__ . '-update'); SMWParseData::storeData($output, $this->title, false); wfProfileOut(__METHOD__ . '-update'); wfProfileOut('SMWUpdateJob::run (SMW)'); return true; }
/** * @dataProvider provider_editUserTalk */ public function testEditUserTalkFlyoutSectionLinkFragment($pattern, $sectionTitle, $format) { // Required hack so parser doesnt turn the links into redlinks which contain no fragment global $wgUser; LinkCache::singleton()->addGoodLinkObj(42, $wgUser->getTalkPage()); $event = $this->mockEvent('edit-user-talk', array('section-title' => $sectionTitle)); $this->assertRegExp($pattern, $this->format($event, $format)); }
/** * Parse the text from a given Revision * * @param Revision $revision */ function runParser(Revision $revision) { $content = $revision->getContent(); $content->getParserOutput($revision->getTitle(), $revision->getId()); if ($this->clearLinkCache) { $this->linkCache->clear(); } }
public function testGetLinkClasses() { $wanCache = ObjectCache::getMainWANInstance(); $titleFormatter = MediaWikiServices::getInstance()->getTitleFormatter(); $linkCache = new LinkCache($titleFormatter, $wanCache); $foobarTitle = new TitleValue(NS_MAIN, 'FooBar'); $redirectTitle = new TitleValue(NS_MAIN, 'Redirect'); $userTitle = new TitleValue(NS_USER, 'Someuser'); $linkCache->addGoodLinkObj(1, $foobarTitle, 10, 0); $linkCache->addGoodLinkObj(2, $redirectTitle, 10, 1); $linkCache->addGoodLinkObj(3, $userTitle, 10, 0); $linkRenderer = new LinkRenderer($titleFormatter, $linkCache); $linkRenderer->setStubThreshold(0); $this->assertEquals('', $linkRenderer->getLinkClasses($foobarTitle)); $linkRenderer->setStubThreshold(20); $this->assertEquals('stub', $linkRenderer->getLinkClasses($foobarTitle)); $linkRenderer->setStubThreshold(0); $this->assertEquals('mw-redirect', $linkRenderer->getLinkClasses($redirectTitle)); $linkRenderer->setStubThreshold(20); $this->assertEquals('', $linkRenderer->getLinkClasses($userTitle)); }
/** * Run a refreshLinks2 job * @return boolean success */ function run() { global $wgParser; wfProfileIn(__METHOD__); $linkCache = LinkCache::singleton(); $linkCache->clear(); if (is_null($this->title)) { $this->error = "refreshLinks2: Invalid title"; wfProfileOut(__METHOD__); return false; } if (!isset($this->params['start']) || !isset($this->params['end'])) { $this->error = "refreshLinks2: Invalid params"; wfProfileOut(__METHOD__); return false; } $start = intval($this->params['start']); $end = intval($this->params['end']); $dbr = wfGetDB(DB_SLAVE); $res = $dbr->select(array('templatelinks', 'page'), array('page_namespace', 'page_title'), array('page_id=tl_from', "tl_from >= '{$start}'", "tl_from <= '{$end}'", 'tl_namespace' => $this->title->getNamespace(), 'tl_title' => $this->title->getDBkey()), __METHOD__); # Not suitable for page load triggered job running! # Gracefully switch to refreshLinks jobs if this happens. if (php_sapi_name() != 'cli') { $jobs = array(); while ($row = $dbr->fetchObject($res)) { $title = Title::makeTitle($row->page_namespace, $row->page_title); $jobs[] = new RefreshLinksJob($title, ''); } Job::batchInsert($jobs); return true; } # Re-parse each page that transcludes this page and update their tracking links... while ($row = $dbr->fetchObject($res)) { $title = Title::makeTitle($row->page_namespace, $row->page_title); $revision = Revision::newFromTitle($title); if (!$revision) { $this->error = 'refreshLinks: Article not found "' . $title->getPrefixedDBkey() . '"'; wfProfileOut(__METHOD__); return false; } wfProfileIn(__METHOD__ . '-parse'); $options = new ParserOptions(); $parserOutput = $wgParser->parse($revision->getText(), $title, $options, true, true, $revision->getId()); wfProfileOut(__METHOD__ . '-parse'); wfProfileIn(__METHOD__ . '-update'); $update = new LinksUpdate($title, $parserOutput, false); $update->doUpdate(); wfProfileOut(__METHOD__ . '-update'); wfProfileOut(__METHOD__); } return true; }
/** * Run a refreshLinks2 job * @return boolean success */ function run() { global $wgParser, $wgContLang; wfProfileIn(__METHOD__); $linkCache = LinkCache::singleton(); $linkCache->clear(); if (is_null($this->title)) { $this->error = "refreshLinks2: Invalid title"; wfProfileOut(__METHOD__); return false; } if (!isset($this->params['start']) || !isset($this->params['end'])) { $this->error = "refreshLinks2: Invalid params"; wfProfileOut(__METHOD__); return false; } // Back compat for pre-r94435 jobs $table = isset($this->params['table']) ? $this->params['table'] : 'templatelinks'; $titles = $this->title->getBacklinkCache()->getLinks($table, $this->params['start'], $this->params['end']); # Not suitable for page load triggered job running! # Gracefully switch to refreshLinks jobs if this happens. if (php_sapi_name() != 'cli') { $jobs = array(); foreach ($titles as $title) { $jobs[] = new RefreshLinksJob($title, ''); } Job::batchInsert($jobs); wfProfileOut(__METHOD__); return true; } $options = ParserOptions::newFromUserAndLang(new User(), $wgContLang); # Re-parse each page that transcludes this page and update their tracking links... foreach ($titles as $title) { $revision = Revision::newFromTitle($title); if (!$revision) { $this->error = 'refreshLinks: Article not found "' . $title->getPrefixedDBkey() . '"'; wfProfileOut(__METHOD__); return false; } wfProfileIn(__METHOD__ . '-parse'); $parserOutput = $wgParser->parse($revision->getText(), $title, $options, true, true, $revision->getId()); wfProfileOut(__METHOD__ . '-parse'); wfProfileIn(__METHOD__ . '-update'); $update = new LinksUpdate($title, $parserOutput, false); $update->doUpdate(); wfProfileOut(__METHOD__ . '-update'); wfWaitForSlaves(); } wfProfileOut(__METHOD__); return true; }
/** * Run method * @return boolean success */ function run() { global $wgParser, $wgContLang; $linkCache =& LinkCache::singleton(); $linkCache->clear(); smwLog("start", "RF", "category refactoring"); $article = new Article($this->updatetitle); $latestrevision = Revision::newFromTitle($this->updatetitle); smwLog("oldtitle: " . $this->oldtitle, "RF", "category refactoring"); smwLog("newtitle: " . $this->newtitle, "RF", "category refactoring"); if (!$latestrevision) { $this->error = "SMW_UpdateCategoriesAfterMoveJob: Article not found " . $this->updatetitle->getPrefixedDBkey() . " "; wfDebug($this->error); return false; } $oldtext = $latestrevision->getRawText(); //Category X moved to Y // Links changed accordingly: $cat = $wgContLang->getNsText(NS_CATEGORY); $catlcfirst = strtolower(substr($cat, 0, 1)) . substr($cat, 1); $oldtitlelcfirst = strtolower(substr($this->oldtitle, 0, 1)) . substr($this->oldtitle, 1); // [[[C|c]ategory:[S|s]omeCategory]] -> [[[C|c]ategory:[S|s]omeOtherCategory]] $search[0] = '(\\[\\[(\\s*)' . $cat . '(\\s*):(\\s*)' . $this->oldtitle . '(\\s*)\\]\\])'; $replace[0] = '[[${1}' . $cat . '${2}:${3}' . $this->newtitle . '${4}]]'; $search[1] = '(\\[\\[(\\s*)' . $catlcfirst . '(\\s*):(\\s*)' . $this->oldtitle . '(\\s*)\\]\\])'; $replace[1] = '[[${1}' . $catlcfirst . '${2}:${3}' . $this->newtitle . '${4}]]'; $search[2] = '(\\[\\[(\\s*)' . $cat . '(\\s*):(\\s*)' . $oldtitlelcfirst . '(\\s*)\\]\\])'; $replace[2] = '[[${1}' . $cat . '${2}:${3}' . $this->newtitle . '${4}]]'; $search[3] = '(\\[\\[(\\s*)' . $catlcfirst . '(\\s*):(\\s*)' . $oldtitlelcfirst . '(\\s*)\\]\\])'; $replace[3] = '[[${1}' . $catlcfirst . '${2}:${3}' . $this->newtitle . '${4}]]'; // [[[C|c]ategory:[S|s]omeCategory | m]] -> [[[C|c]ategory:[S|s]omeOtherCategory | m ]] $search[4] = '(\\[\\[(\\s*)' . $cat . '(\\s*):(\\s*)' . $this->oldtitle . '(\\s*)\\|([^]]*)\\]\\])'; $replace[4] = '[[${1}' . $cat . '${2}:${3}' . $this->newtitle . '${4}|${5}]]'; $search[5] = '(\\[\\[(\\s*)' . $catlcfirst . '(\\s*):(\\s*)' . $this->oldtitle . '(\\s*)\\|([^]]*)\\]\\])'; $replace[5] = '[[${1}' . $catlcfirst . '${2}:${3}' . $this->newtitle . '${4}|${5}]]'; $search[6] = '(\\[\\[(\\s*)' . $cat . '(\\s*):(\\s*)' . $oldtitlelcfirst . '(\\s*)\\|([^]]*)\\]\\])'; $replace[6] = '[[${1}' . $cat . '${2}:${3}' . $this->newtitle . '${4}|${5}]]'; $search[7] = '(\\[\\[(\\s*)' . $catlcfirst . '(\\s*):(\\s*)' . $oldtitlelcfirst . '(\\s*)\\|([^]]*)\\]\\])'; $replace[7] = '[[${1}' . $catlcfirst . '${2}:${3}' . $this->newtitle . '${4}|${5}]]'; $newtext = preg_replace($search, $replace, $oldtext); $summary = 'Link(s) to ' . $this->newtitle . ' updated after page move by SMW_UpdateCategoriesAfterMoveJob. ' . $this->oldtitle . ' has been moved to ' . $this->newtitle; $article->doEdit($newtext, $summary, EDIT_FORCE_BOT); smwLog("finished editing article", "RF", "category refactoring"); $options = new ParserOptions(); $wgParser->parse($newtext, $this->updatetitle, $options, true, true, $latestrevision->getId()); smwLog("finished parsing semantic data", "RF", "category refactoring"); return true; }
/** * Run a refreshLinks2 job * @return boolean success */ function run() { global $wgUpdateRowsPerJob; $linkCache = LinkCache::singleton(); $linkCache->clear(); if (is_null($this->title)) { $this->error = "refreshLinks2: Invalid title"; return false; } // Back compat for pre-r94435 jobs $table = isset($this->params['table']) ? $this->params['table'] : 'templatelinks'; // Avoid slave lag when fetching templates. // When the outermost job is run, we know that the caller that enqueued it must have // committed the relevant changes to the DB by now. At that point, record the master // position and pass it along as the job recursively breaks into smaller range jobs. // Hopefully, when leaf jobs are popped, the slaves will have reached that position. if (isset($this->params['masterPos'])) { $masterPos = $this->params['masterPos']; } elseif (wfGetLB()->getServerCount() > 1) { $masterPos = wfGetLB()->getMasterPos(); } else { $masterPos = false; } $tbc = $this->title->getBacklinkCache(); $jobs = array(); // jobs to insert if (isset($this->params['start']) && isset($this->params['end'])) { # This is a partition job to trigger the insertion of leaf jobs... $jobs = array_merge($jobs, $this->getSingleTitleJobs($table, $masterPos)); } else { # This is a base job to trigger the insertion of partitioned jobs... if ($tbc->getNumLinks($table, $wgUpdateRowsPerJob + 1) <= $wgUpdateRowsPerJob) { # Just directly insert the single per-title jobs $jobs = array_merge($jobs, $this->getSingleTitleJobs($table, $masterPos)); } else { # Insert the partition jobs to make per-title jobs foreach ($tbc->partition($table, $wgUpdateRowsPerJob) as $batch) { list($start, $end) = $batch; $jobs[] = new RefreshLinksJob2($this->title, array('table' => $table, 'start' => $start, 'end' => $end, 'masterPos' => $masterPos) + $this->getRootJobParams()); } } } if (count($jobs)) { JobQueueGroup::singleton()->push($jobs); } return true; }
/** * Run method * @return boolean success */ function run() { global $wgParser; $linkCache =& LinkCache::singleton(); $linkCache->clear(); smwLog("start", "RF", "property refactoring"); $article = new Article($this->updatetitle); $latestrevision = Revision::newFromTitle($this->updatetitle); smwLog("oldtitle: " . $this->oldtitle, "RF", "property refactoring"); smwLog("newtitle: " . $this->newtitle, "RF", "property refactoring"); if (!$latestrevision) { $this->error = "SMW_UpdatePropertiesAfterMoveJob: Article not found " . $this->updatetitle->getPrefixedDBkey() . " "; wfDebug($this->error); return false; } $oldtext = $latestrevision->getRawText(); //Page X moved to Y // Links changed accordingly: // [[X::m]] -> [[Y::m]] $search[0] = '(\\[\\[(\\s*)' . $this->oldtitle . '(\\s*)::([^]]*)\\]\\])'; $replace[0] = '[[${1}' . $this->newtitle . '${2}::${3}]]'; // [[X:=m]] -> [[Y:=m]] $search[1] = '(\\[\\[(\\s*)' . $this->oldtitle . '(\\s*):=([^]]*)\\]\\])'; $replace[1] = '[[${1}' . $this->newtitle . '${2}:=${3}]]'; // TODO check if the wiki is case sensitive on the first letter // This is not the case for the Halo wikis $oldtitlelcfirst = strtolower(substr($this->oldtitle, 0, 1)) . substr($this->oldtitle, 1); // [[x::m]] -> [[Y::m]] $search[2] = '(\\[\\[(\\s*)' . $oldtitlelcfirst . '(\\s*)::([^]]*)\\]\\])'; $replace[2] = '[[${1}' . $this->newtitle . '${2}::${3}]]'; // [[x:=m]] -> [[Y:=m]] $search[3] = '(\\[\\[(\\s*)' . $oldtitlelcfirst . '(\\s*):=([^]]*)\\]\\])'; $replace[3] = '[[${1}' . $this->newtitle . '${2}:=${3}]]'; $newtext = preg_replace($search, $replace, $oldtext); $summary = 'Link(s) to ' . $this->newtitle . ' updated after page move by SMW_UpdatePropertiesAfterMoveJob. ' . $this->oldtitle . ' has been moved to ' . $this->newtitle; $article->doEdit($newtext, $summary, EDIT_FORCE_BOT); smwLog("finished editing article", "RF", "property refactoring"); $options = new ParserOptions(); $wgParser->parse($newtext, $this->updatetitle, $options, true, true, $latestrevision->getId()); smwLog("finished parsing semantic data", "RF", "property refactoring"); return true; }
/** * Static function to get a template * Can be overridden via ParserOptions::setTemplateCallback(). * * @param Title $title * @param Parser $parser * * @return array */ static function statelessFetchTemplate($title, $parser = false) { $text = $skip = false; $finalTitle = $title; $deps = array(); # Loop to fetch the article, with up to 1 redirect for ($i = 0; $i < 2 && is_object($title); $i++) { # Give extensions a chance to select the revision instead $id = false; # Assume current wfRunHooks('BeforeParserFetchTemplateAndtitle', array($parser, $title, &$skip, &$id)); if ($skip) { $text = false; $deps[] = array('title' => $title, 'page_id' => $title->getArticleID(), 'rev_id' => null); break; } # Get the revision $rev = $id ? Revision::newFromId($id) : Revision::newFromTitle($title, false, Revision::READ_NORMAL); $rev_id = $rev ? $rev->getId() : 0; # If there is no current revision, there is no page if ($id === false && !$rev) { $linkCache = LinkCache::singleton(); $linkCache->addBadLinkObj($title); } $deps[] = array('title' => $title, 'page_id' => $title->getArticleID(), 'rev_id' => $rev_id); if ($rev && !$title->equals($rev->getTitle())) { # We fetched a rev from a different title; register it too... $deps[] = array('title' => $rev->getTitle(), 'page_id' => $rev->getPage(), 'rev_id' => $rev_id); } if ($rev) { $content = $rev->getContent(); $text = $content ? $content->getWikitextForTransclusion() : null; if ($text === false || $text === null) { $text = false; break; } } elseif ($title->getNamespace() == NS_MEDIAWIKI) { global $wgContLang; $message = wfMessage($wgContLang->lcfirst($title->getText()))->inContentLanguage(); if (!$message->exists()) { $text = false; break; } $content = $message->content(); $text = $message->plain(); } else { break; } if (!$content) { break; } # Redirect? $finalTitle = $title; $title = $content->getRedirectTarget(); } return array('text' => $text, 'finalTitle' => $finalTitle, 'deps' => $deps); }
/** * Update the page record to point to a newly saved revision. * * @param IDatabase $dbw * @param Revision $revision For ID number, and text used to set * length and redirect status fields * @param int $lastRevision If given, will not overwrite the page field * when different from the currently set value. * Giving 0 indicates the new page flag should be set on. * @param bool $lastRevIsRedirect If given, will optimize adding and * removing rows in redirect table. * @return bool Success; false if the page row was missing or page_latest changed */ public function updateRevisionOn($dbw, $revision, $lastRevision = null, $lastRevIsRedirect = null) { global $wgContentHandlerUseDB; // Assertion to try to catch T92046 if ((int) $revision->getId() === 0) { throw new InvalidArgumentException(__METHOD__ . ': Revision has ID ' . var_export($revision->getId(), 1)); } $content = $revision->getContent(); $len = $content ? $content->getSize() : 0; $rt = $content ? $content->getUltimateRedirectTarget() : null; $conditions = ['page_id' => $this->getId()]; if (!is_null($lastRevision)) { // An extra check against threads stepping on each other $conditions['page_latest'] = $lastRevision; } $row = ['page_latest' => $revision->getId(), 'page_touched' => $dbw->timestamp($revision->getTimestamp()), 'page_is_new' => $lastRevision === 0 ? 1 : 0, 'page_is_redirect' => $rt !== null ? 1 : 0, 'page_len' => $len]; if ($wgContentHandlerUseDB) { $row['page_content_model'] = $revision->getContentModel(); } $dbw->update('page', $row, $conditions, __METHOD__); $result = $dbw->affectedRows() > 0; if ($result) { $this->updateRedirectOn($dbw, $rt, $lastRevIsRedirect); $this->setLastEdit($revision); $this->mLatest = $revision->getId(); $this->mIsRedirect = (bool) $rt; // Update the LinkCache. LinkCache::singleton()->addGoodLinkObj($this->getId(), $this->mTitle, $len, $this->mIsRedirect, $this->mLatest, $revision->getContentModel()); } return $result; }
/** * Get an array of Title objects which are articles which use this file * Also adds their IDs to the link cache * * This is mostly copied from Title::getLinksTo() * * @deprecated Use HTMLCacheUpdate, this function uses too much memory */ function getLinksTo($options = '') { wfProfileIn(__METHOD__); // Note: use local DB not repo DB, we want to know local links if ($options) { $db = wfGetDB(DB_MASTER); } else { $db = wfGetDB(DB_SLAVE); } $linkCache = LinkCache::singleton(); list($page, $imagelinks) = $db->tableNamesN('page', 'imagelinks'); $encName = $db->addQuotes($this->getName()); $sql = "SELECT page_namespace,page_title,page_id,page_len,page_is_redirect,\n\t\t\tFROM {$page},{$imagelinks} WHERE page_id=il_from AND il_to={$encName} {$options}"; $res = $db->query($sql, __METHOD__); $retVal = array(); if ($db->numRows($res)) { while ($row = $db->fetchObject($res)) { if ($titleObj = Title::newFromRow($row)) { $linkCache->addGoodLinkObj($row->page_id, $titleObj, $row->page_len, $row->page_is_redirect); $retVal[] = $titleObj; } } } $db->freeResult($res); wfProfileOut(__METHOD__); return $retVal; }
/** * Get an array of Title objects which are articles which use this file * Also adds their IDs to the link cache * * This is mostly copied from Title::getLinksTo() * * @deprecated Use HTMLCacheUpdate, this function uses too much memory */ function getLinksTo($options = array()) { wfProfileIn(__METHOD__); // Note: use local DB not repo DB, we want to know local links if (count($options) > 0) { $db = wfGetDB(DB_MASTER); } else { $db = wfGetDB(DB_SLAVE); } $linkCache = LinkCache::singleton(); $encName = $db->addQuotes($this->getName()); $res = $db->select(array('page', 'imagelinks'), array('page_namespace', 'page_title', 'page_id', 'page_len', 'page_is_redirect'), array('page_id' => 'il_from', 'il_to' => $encName), __METHOD__, $options); $retVal = array(); if ($db->numRows($res)) { while ($row = $db->fetchObject($res)) { if ($titleObj = Title::newFromRow($row)) { $linkCache->addGoodLinkObj($row->page_id, $titleObj, $row->page_len, $row->page_is_redirect); $retVal[] = $titleObj; } } } $db->freeResult($res); wfProfileOut(__METHOD__); return $retVal; }
/** * Update the page record to point to a newly saved revision. * * @param $dbw DatabaseBase: object * @param $revision Revision: For ID number, and text used to set * length and redirect status fields * @param $lastRevision Integer: if given, will not overwrite the page field * when different from the currently set value. * Giving 0 indicates the new page flag should be set * on. * @param $lastRevIsRedirect Boolean: if given, will optimize adding and * removing rows in redirect table. * @return bool true on success, false on failure * @private */ public function updateRevisionOn( $dbw, $revision, $lastRevision = null, $lastRevIsRedirect = null ) { global $wgContentHandlerUseDB; wfProfileIn( __METHOD__ ); $content = $revision->getContent(); $len = $content ? $content->getSize() : 0; $rt = $content ? $content->getUltimateRedirectTarget() : null; $conditions = array( 'page_id' => $this->getId() ); if ( !is_null( $lastRevision ) ) { // An extra check against threads stepping on each other $conditions['page_latest'] = $lastRevision; } $now = wfTimestampNow(); $row = array( /* SET */ 'page_latest' => $revision->getId(), 'page_touched' => $dbw->timestamp( $now ), 'page_is_new' => ( $lastRevision === 0 ) ? 1 : 0, 'page_is_redirect' => $rt !== null ? 1 : 0, 'page_len' => $len, ); if ( $wgContentHandlerUseDB ) { $row['page_content_model'] = $revision->getContentModel(); } $dbw->update( 'page', $row, $conditions, __METHOD__ ); $result = $dbw->affectedRows() > 0; if ( $result ) { $this->updateRedirectOn( $dbw, $rt, $lastRevIsRedirect ); $this->setLastEdit( $revision ); $this->setCachedLastEditTime( $now ); $this->mLatest = $revision->getId(); $this->mIsRedirect = (bool)$rt; // Update the LinkCache. LinkCache::singleton()->addGoodLinkObj( $this->getId(), $this->mTitle, $len, $this->mIsRedirect, $this->mLatest, $revision->getContentModel() ); } wfProfileOut( __METHOD__ ); return $result; }
/** * Get the pageid of a specified page * @param Parser $parser * @param string $title Title to get the pageid from * @return int|null|string * @since 1.23 */ public static function pageid($parser, $title = null) { $t = Title::newFromText($title); if (is_null($t)) { return ''; } // Use title from parser to have correct pageid after edit if ($t->equals($parser->getTitle())) { $t = $parser->getTitle(); return $t->getArticleID(); } // These can't have ids if (!$t->canExist() || $t->isExternal()) { return 0; } // Check the link cache, maybe something already looked it up. $linkCache = LinkCache::singleton(); $pdbk = $t->getPrefixedDBkey(); $id = $linkCache->getGoodLinkID($pdbk); if ($id != 0) { $parser->mOutput->addLink($t, $id); return $id; } if ($linkCache->isBadLink($pdbk)) { $parser->mOutput->addLink($t, 0); return $id; } // We need to load it from the DB, so mark expensive if ($parser->incrementExpensiveFunctionCount()) { $id = $t->getArticleID(); $parser->mOutput->addLink($t, $id); return $id; } return null; }
/** * Insert a new page * * @param string $pageName Page name * @param string $text Page's content * @param int $ns Unused */ protected function insertPage($pageName, $text, $ns) { $title = Title::newFromText($pageName, $ns); $user = User::newFromName('WikiSysop'); $comment = 'Search Test'; // avoid memory leak...? LinkCache::singleton()->clear(); $page = WikiPage::factory($title); $page->doEditContent(ContentHandler::makeContent($text, $title), $comment, 0, false, $user); $this->pageList[] = array($title, $page->getId()); return true; }
function importOldRevision() { $fname = "WikiImporter::importOldRevision"; $dbw =& wfGetDB(DB_MASTER); # Sneak a single revision into place $user = User::newFromName($this->getUser()); if ($user) { $userId = intval($user->getId()); $userText = $user->getName(); } else { $userId = 0; $userText = $this->getUser(); } // avoid memory leak...? $linkCache =& LinkCache::singleton(); $linkCache->clear(); $article = new Article($this->title); $pageId = $article->getId(); if ($pageId == 0) { # must create the page... $pageId = $article->insertOn($dbw); $created = true; } else { $created = false; } # FIXME: Check for exact conflicts # FIXME: Use original rev_id optionally # FIXME: blah blah blah #if( $numrows > 0 ) { # return wfMsg( "importhistoryconflict" ); #} # Insert the row $revision = new Revision(array('page' => $pageId, 'text' => $this->getText(), 'comment' => $this->getComment(), 'user' => $userId, 'user_text' => $userText, 'timestamp' => $this->timestamp, 'minor_edit' => $this->minor)); $revId = $revision->insertOn($dbw); $changed = $article->updateIfNewerOn($dbw, $revision); if ($created) { wfDebug(__METHOD__ . ": running onArticleCreate\n"); Article::onArticleCreate($this->title); } else { if ($changed) { wfDebug(__METHOD__ . ": running onArticleEdit\n"); Article::onArticleEdit($this->title); } } if ($created || $changed) { wfDebug(__METHOD__ . ": running edit updates\n"); $article->editUpdates($this->getText(), $this->getComment(), $this->minor, $this->timestamp, $revId); } return true; }
/** * Print basic definitions a list of pages ordered by their page id. * Offset and limit refer to the count of existing pages, not to the * page id. * @param integer $offset the number of the first (existing) page to * serialize a declaration for * @param integer $limit the number of pages to serialize */ public function printPageList($offset = 0, $limit = 30) { global $smwgNamespacesWithSemanticLinks; $db = wfGetDB(DB_SLAVE); $this->prepareSerialization(); $this->delay_flush = 35; // don't do intermediate flushes with default parameters $linkCache = LinkCache::singleton(); $this->serializer->startSerialization(); $this->serializer->serializeExpData(SMWExporter::getInstance()->getOntologyExpData('')); $query = ''; foreach ($smwgNamespacesWithSemanticLinks as $ns => $enabled) { if ($enabled) { if ($query !== '') { $query .= ' OR '; } $query .= 'page_namespace = ' . $db->addQuotes($ns); } } $res = $db->select($db->tableName('page'), 'page_id,page_title,page_namespace', $query, 'SMW::RDF::PrintPageList', array('ORDER BY' => 'page_id ASC', 'OFFSET' => $offset, 'LIMIT' => $limit)); $foundpages = false; foreach ($res as $row) { $foundpages = true; try { $diPage = new SMWDIWikiPage($row->page_title, $row->page_namespace, ''); $this->serializePage($diPage, 0); $this->flush(); $linkCache->clear(); } catch (SMWDataItemException $e) { // strange data, who knows, not our DB table, keep calm and carry on } } if ($foundpages) { // add link to next result page if (strpos(SMWExporter::getInstance()->expandURI('&wikiurl;'), '?') === false) { // check whether we have title as a first parameter or in URL $nexturl = SMWExporter::getInstance()->expandURI('&export;?offset=') . ($offset + $limit); } else { $nexturl = SMWExporter::getInstance()->expandURI('&export;&offset=') . ($offset + $limit); } $expData = new SMWExpData(new SMWExpResource($nexturl)); $ed = new SMWExpData(SMWExporter::getInstance()->getSpecialNsResource('owl', 'Thing')); $expData->addPropertyObjectValue(SMWExporter::getInstance()->getSpecialNsResource('rdf', 'type'), $ed); $ed = new SMWExpData(new SMWExpResource($nexturl)); $expData->addPropertyObjectValue(SMWExporter::getInstance()->getSpecialNsResource('rdfs', 'isDefinedBy'), $ed); $this->serializer->serializeExpData($expData); } $this->serializer->finishSerialization(); $this->flush(true); }
/** * Do the query and add the results to the LinkCache object * * @return Array mapping PDBK to ID */ public function execute() { $linkCache = LinkCache::singleton(); return $this->executeInto($linkCache); }
/** * Get an array of Title objects linked from this Title * Also stores the IDs in the link cache. * * WARNING: do not use this function on arbitrary user-supplied titles! * On heavily-used templates it will max out the memory. * * @param array $options May be FOR UPDATE * @param string $table Table name * @param string $prefix Fields prefix * @return array Array of Title objects linking here */ public function getLinksFrom($options = array(), $table = 'pagelinks', $prefix = 'pl') { $id = $this->getArticleID(); # If the page doesn't exist; there can't be any link from this page if (!$id) { return array(); } if (count($options) > 0) { $db = wfGetDB(DB_MASTER); } else { $db = wfGetDB(DB_SLAVE); } $blNamespace = "{$prefix}_namespace"; $blTitle = "{$prefix}_title"; $res = $db->select(array($table, 'page'), array_merge(array($blNamespace, $blTitle), WikiPage::selectFields()), array("{$prefix}_from" => $id), __METHOD__, $options, array('page' => array('LEFT JOIN', array("page_namespace={$blNamespace}", "page_title={$blTitle}")))); $retVal = array(); $linkCache = LinkCache::singleton(); foreach ($res as $row) { if ($row->page_id) { $titleObj = Title::newFromRow($row); } else { $titleObj = Title::makeTitle($row->{$blNamespace}, $row->{$blTitle}); $linkCache->addBadLinkObj($titleObj); } $retVal[] = $titleObj; } return $retVal; }
/** * Set the singleton instance to a given object. * * Since we do not have an interface for LinkCache, you have to be sure the * given object implements all the LinkCache public methods. * * @param LinkCache $instance * @since 1.22 */ public static function setSingleton(LinkCache $instance) { self::$instance = $instance; }
/** * @param object $row * @param Title $title */ protected function moveInconsistentPage($row, $title) { if ($title->exists() || $title->getInterwiki() || !$title->canExist()) { if ($title->getInterwiki() || !$title->canExist()) { $prior = $title->getPrefixedDBkey(); } else { $prior = $title->getDBkey(); } # Old cleanupTitles could move articles there. See bug 23147. $ns = $row->page_namespace; if ($ns < 0) { $ns = 0; } # Namespace which no longer exists. Put the page in the main namespace # since we don't have any idea of the old namespace name. See bug 68501. if (!MWNamespace::exists($ns)) { $ns = 0; } $clean = 'Broken/' . $prior; $verified = Title::makeTitleSafe($ns, $clean); if (!$verified || $verified->exists()) { $blah = "Broken/id:" . $row->page_id; $this->output("Couldn't legalize; form '{$clean}' exists; using '{$blah}'\n"); $verified = Title::makeTitleSafe($ns, $blah); } $title = $verified; } if (is_null($title)) { $this->error("Something awry; empty title.", true); } $ns = $title->getNamespace(); $dest = $title->getDBkey(); if ($this->dryrun) { $this->output("DRY RUN: would rename {$row->page_id} ({$row->page_namespace}," . "'{$row->page_title}') to ({$ns},'{$dest}')\n"); } else { $this->output("renaming {$row->page_id} ({$row->page_namespace}," . "'{$row->page_title}') to ({$ns},'{$dest}')\n"); $dbw = wfGetDB(DB_MASTER); $dbw->update('page', array('page_namespace' => $ns, 'page_title' => $dest), array('page_id' => $row->page_id), __METHOD__); LinkCache::singleton()->clear(); } }
/** * Run jobs of the specified number/type for the specified time * * The response map has a 'job' field that lists status of each job, including: * - type : the job type * - status : ok/failed * - error : any error message string * - time : the job run time in ms * The response map also has: * - backoffs : the (job type => seconds) map of backoff times * - elapsed : the total time spent running tasks in ms * - reached : the reason the script finished, one of (none-ready, job-limit, time-limit) * * This method outputs status information only if a debug handler was set. * Any exceptions are caught and logged, but are not reported as output. * * @param array $options Map of parameters: * - type : the job type (or false for the default types) * - maxJobs : maximum number of jobs to run * - maxTime : maximum time in seconds before stopping * - throttle : whether to respect job backoff configuration * @return array Summary response that can easily be JSON serialized */ public function run(array $options) { global $wgJobClasses, $wgTrxProfilerLimits; $response = array('jobs' => array(), 'reached' => 'none-ready'); $type = isset($options['type']) ? $options['type'] : false; $maxJobs = isset($options['maxJobs']) ? $options['maxJobs'] : false; $maxTime = isset($options['maxTime']) ? $options['maxTime'] : false; $noThrottle = isset($options['throttle']) && !$options['throttle']; if ($type !== false && !isset($wgJobClasses[$type])) { $response['reached'] = 'none-possible'; return $response; } // Bail out if in read-only mode if (wfReadOnly()) { $response['reached'] = 'read-only'; return $response; } // Catch huge single updates that lead to slave lag $trxProfiler = Profiler::instance()->getTransactionProfiler(); $trxProfiler->setLogger(LoggerFactory::getInstance('DBPerformance')); $trxProfiler->setExpectations($wgTrxProfilerLimits['JobRunner'], __METHOD__); // Bail out if there is too much DB lag. // This check should not block as we want to try other wiki queues. $maxAllowedLag = 3; list(, $maxLag) = wfGetLB(wfWikiID())->getMaxLag(); if ($maxLag >= $maxAllowedLag) { $response['reached'] = 'slave-lag-limit'; return $response; } $group = JobQueueGroup::singleton(); // Flush any pending DB writes for sanity wfGetLBFactory()->commitAll(); // Some jobs types should not run until a certain timestamp $backoffs = array(); // map of (type => UNIX expiry) $backoffDeltas = array(); // map of (type => seconds) $wait = 'wait'; // block to read backoffs the first time $stats = RequestContext::getMain()->getStats(); $jobsPopped = 0; $timeMsTotal = 0; $flags = JobQueueGroup::USE_CACHE; $startTime = microtime(true); // time since jobs started running $checkLagPeriod = 1.0; // check slave lag this many seconds $lastCheckTime = 1; // timestamp of last slave check do { // Sync the persistent backoffs with concurrent runners $backoffs = $this->syncBackoffDeltas($backoffs, $backoffDeltas, $wait); $blacklist = $noThrottle ? array() : array_keys($backoffs); $wait = 'nowait'; // less important now if ($type === false) { $job = $group->pop(JobQueueGroup::TYPE_DEFAULT, $flags, $blacklist); } elseif (in_array($type, $blacklist)) { $job = false; // requested queue in backoff state } else { $job = $group->pop($type); // job from a single queue } if ($job) { // found a job $popTime = time(); $jType = $job->getType(); // Back off of certain jobs for a while (for throttling and for errors) $ttw = $this->getBackoffTimeToWait($job); if ($ttw > 0) { // Always add the delta for other runners in case the time running the // job negated the backoff for each individually but not collectively. $backoffDeltas[$jType] = isset($backoffDeltas[$jType]) ? $backoffDeltas[$jType] + $ttw : $ttw; $backoffs = $this->syncBackoffDeltas($backoffs, $backoffDeltas, $wait); } $msg = $job->toString() . " STARTING"; $this->logger->debug($msg); $this->debugCallback($msg); // Run the job... $jobStartTime = microtime(true); try { ++$jobsPopped; $status = $job->run(); $error = $job->getLastError(); $this->commitMasterChanges($job); DeferredUpdates::doUpdates(); $this->commitMasterChanges($job); } catch (Exception $e) { MWExceptionHandler::rollbackMasterChangesAndLog($e); $status = false; $error = get_class($e) . ': ' . $e->getMessage(); MWExceptionHandler::logException($e); } // Commit all outstanding connections that are in a transaction // to get a fresh repeatable read snapshot on every connection. // Note that jobs are still responsible for handling slave lag. wfGetLBFactory()->commitAll(); // Clear out title cache data from prior snapshots LinkCache::singleton()->clear(); $timeMs = intval((microtime(true) - $jobStartTime) * 1000); $timeMsTotal += $timeMs; // Record how long jobs wait before getting popped $readyTs = $job->getReadyTimestamp(); if ($readyTs) { $pickupDelay = $popTime - $readyTs; $stats->timing('jobqueue.pickup_delay.all', 1000 * $pickupDelay); $stats->timing("jobqueue.pickup_delay.{$jType}", 1000 * $pickupDelay); } // Record root job age for jobs being run $root = $job->getRootJobParams(); if ($root['rootJobTimestamp']) { $age = $popTime - wfTimestamp(TS_UNIX, $root['rootJobTimestamp']); $stats->timing("jobqueue.pickup_root_age.{$jType}", 1000 * $age); } // Track the execution time for jobs $stats->timing("jobqueue.run.{$jType}", $timeMs); // Mark the job as done on success or when the job cannot be retried if ($status !== false || !$job->allowRetries()) { $group->ack($job); // done } // Back off of certain jobs for a while (for throttling and for errors) if ($status === false && mt_rand(0, 49) == 0) { $ttw = max($ttw, 30); // too many errors $backoffDeltas[$jType] = isset($backoffDeltas[$jType]) ? $backoffDeltas[$jType] + $ttw : $ttw; } if ($status === false) { $msg = $job->toString() . " t={$timeMs} error={$error}"; $this->logger->error($msg); $this->debugCallback($msg); } else { $msg = $job->toString() . " t={$timeMs} good"; $this->logger->info($msg); $this->debugCallback($msg); } $response['jobs'][] = array('type' => $jType, 'status' => $status === false ? 'failed' : 'ok', 'error' => $error, 'time' => $timeMs); // Break out if we hit the job count or wall time limits... if ($maxJobs && $jobsPopped >= $maxJobs) { $response['reached'] = 'job-limit'; break; } elseif ($maxTime && microtime(true) - $startTime > $maxTime) { $response['reached'] = 'time-limit'; break; } // Don't let any of the main DB slaves get backed up. // This only waits for so long before exiting and letting // other wikis in the farm (on different masters) get a chance. $timePassed = microtime(true) - $lastCheckTime; if ($timePassed >= $checkLagPeriod || $timePassed < 0) { if (!wfWaitForSlaves($lastCheckTime, false, '*', $maxAllowedLag)) { $response['reached'] = 'slave-lag-limit'; break; } $lastCheckTime = microtime(true); } // Don't let any queue slaves/backups fall behind if ($jobsPopped > 0 && $jobsPopped % 100 == 0) { $group->waitForBackups(); } // Bail if near-OOM instead of in a job if (!$this->checkMemoryOK()) { $response['reached'] = 'memory-limit'; break; } } } while ($job); // stop when there are no jobs // Sync the persistent backoffs for the next runJobs.php pass if ($backoffDeltas) { $this->syncBackoffDeltas($backoffs, $backoffDeltas, 'wait'); } $response['backoffs'] = $backoffs; $response['elapsed'] = $timeMsTotal; return $response; }
/** * Static function to get a template * Can be overridden via ParserOptions::setTemplateCallback(). */ static function statelessFetchTemplate($title, $parser = false) { $text = $skip = false; $finalTitle = $title; $deps = array(); // Loop to fetch the article, with up to 1 redirect for ($i = 0; $i < 2 && is_object($title); $i++) { # Give extensions a chance to select the revision instead $id = false; // Assume current wfRunHooks('BeforeParserFetchTemplateAndtitle', array($parser, &$title, &$skip, &$id)); if ($skip) { $text = false; $deps[] = array('title' => $title, 'page_id' => $title->getArticleID(), 'rev_id' => null); break; } $rev = $id ? Revision::newFromId($id) : Revision::newFromTitle($title); $rev_id = $rev ? $rev->getId() : 0; // If there is no current revision, there is no page if ($id === false && !$rev) { $linkCache = LinkCache::singleton(); $linkCache->addBadLinkObj($title); } $deps[] = array('title' => $title, 'page_id' => $title->getArticleID(), 'rev_id' => $rev_id); if ($rev) { $text = $rev->getText(); } elseif ($title->getNamespace() == NS_MEDIAWIKI) { global $wgContLang; $message = $wgContLang->lcfirst($title->getText()); $text = wfMsgForContentNoTrans($message); if (wfEmptyMsg($message, $text)) { $text = false; break; } } else { break; } if ($text === false) { break; } // Redirect? $finalTitle = $title; $title = Title::newFromRedirect($text); } return array('text' => $text, 'finalTitle' => $finalTitle, 'deps' => $deps); }
/** * Add an array of categories, with names in the keys * * @param array $categories Mapping category name => sort key */ public function addCategoryLinks(array $categories) { global $wgContLang; if (!is_array($categories) || count($categories) == 0) { return; } # Add the links to a LinkBatch $arr = array(NS_CATEGORY => $categories); $lb = new LinkBatch(); $lb->setArray($arr); # Fetch existence plus the hiddencat property $dbr = wfGetDB(DB_SLAVE); $fields = array('page_id', 'page_namespace', 'page_title', 'page_len', 'page_is_redirect', 'page_latest', 'pp_value'); if ($this->getConfig()->get('ContentHandlerUseDB')) { $fields[] = 'page_content_model'; } $res = $dbr->select(array('page', 'page_props'), $fields, $lb->constructSet('page', $dbr), __METHOD__, array(), array('page_props' => array('LEFT JOIN', array('pp_propname' => 'hiddencat', 'pp_page = page_id')))); # Add the results to the link cache $lb->addResultToCache(LinkCache::singleton(), $res); # Set all the values to 'normal'. $categories = array_fill_keys(array_keys($categories), 'normal'); # Mark hidden categories foreach ($res as $row) { if (isset($row->pp_value)) { $categories[$row->page_title] = 'hidden'; } } # Add the remaining categories to the skin if (Hooks::run('OutputPageMakeCategoryLinks', array(&$this, $categories, &$this->mCategoryLinks))) { foreach ($categories as $category => $type) { $origcategory = $category; $title = Title::makeTitleSafe(NS_CATEGORY, $category); if (!$title) { continue; } $wgContLang->findVariantLink($category, $title, true); if ($category != $origcategory && array_key_exists($category, $categories)) { continue; } $text = $wgContLang->convertHtml($title->getText()); $this->mCategories[] = $title->getText(); $this->mCategoryLinks[$type][] = Linker::link($title, $text); } } }
/** * Sets a hook to force that a page exists, and sets a current revision callback to return a * revision with custom content when the current revision of the page is requested. * * @since 1.25 * @param Title $title * @param Content $content * @param User $user The user that the fake revision is attributed to * @return ScopedCallback to unset the hook */ public function setupFakeRevision($title, $content, $user) { $oldCallback = $this->setCurrentRevisionCallback(function ($titleToCheck, $parser = false) use($title, $content, $user, &$oldCallback) { if ($titleToCheck->equals($title)) { return new Revision(array('page' => $title->getArticleID(), 'user_text' => $user->getName(), 'user' => $user->getId(), 'parent_id' => $title->getLatestRevId(), 'title' => $title, 'content' => $content)); } else { return call_user_func($oldCallback, $titleToCheck, $parser); } }); global $wgHooks; $wgHooks['TitleExists'][] = function ($titleToCheck, &$exists) use($title) { if ($titleToCheck->equals($title)) { $exists = true; } }; end($wgHooks['TitleExists']); $key = key($wgHooks['TitleExists']); LinkCache::singleton()->clearBadLink($title->getPrefixedDBkey()); return new ScopedCallback(function () use($title, $key) { global $wgHooks; unset($wgHooks['TitleExists'][$key]); LinkCache::singleton()->clearLink($title); }); }