/** * May throw a database error if, say, the server dies during query. * @param Database $db * @param int $id The old_id * @return string */ private function doGetText($db, $id) { $id = intval($id); $row = $db->selectRow('text', ['old_text', 'old_flags'], ['old_id' => $id], __METHOD__); $text = Revision::getRevisionText($row); if ($text === false) { return false; } return $text; }
/** * May throw a database error if, say, the server dies during query. */ function doGetText($db, $id) { $id = intval($id); $row = $db->selectRow('text', array('old_text', 'old_flags'), array('old_id' => $id), 'TextPassDumper::getText'); $text = Revision::getRevisionText($row); if ($text === false) { return false; } return $text; }
function efMessageCommonsGetMsg($msg) { global $egMessageCommonsDatabase, $egMessageCommonsPrefix; $title = Title::makeTitle(NS_MEDIAWIKI, $msg); $dbr = wfGetDB(DB_SLAVE); $row = $dbr->selectRow(array("`{$egMessageCommonsDatabase}`.`{$egMessageCommonsPrefix}page`", "`{$egMessageCommonsDatabase}`.`{$egMessageCommonsPrefix}revision`", "`{$egMessageCommonsDatabase}`.`{$egMessageCommonsPrefix}text`"), array('*'), array('page_namespace' => $title->getNamespace(), 'page_title' => $title->getDBkey(), 'page_latest = rev_id', 'old_id = rev_text_id')); if (!$row) { return null; } return Revision::getRevisionText($row); }
/** * Fetches contents for pagenames in given namespace without side effects. * * @param string|string[] $titles Database page names. * @param int $namespace The number of the namespace. * @return array ( string => array ( string, string ) ) Tuples of page * text and last author indexed by page name. */ public static function getContents($titles, $namespace) { $dbr = wfGetDB(DB_SLAVE); $rows = $dbr->select(array('page', 'revision', 'text'), array('page_title', 'old_text', 'old_flags', 'rev_user_text'), array('page_namespace' => $namespace, 'page_latest=rev_id', 'rev_text_id=old_id', 'page_title' => $titles), __METHOD__); $titles = array(); foreach ($rows as $row) { $titles[$row->page_title] = array(Revision::getRevisionText($row), $row->rev_user_text); } $rows->free(); return $titles; }
/** * Populates the search index with content from all pages */ protected function populateSearchIndex() { $res = $this->db->select('page', 'MAX(page_id) AS count'); $s = $this->db->fetchObject($res); $count = $s->count; $this->output("Rebuilding index fields for {$count} pages...\n"); $n = 0; while ($n < $count) { $this->output($n . "\n"); $end = $n + self::RTI_CHUNK_SIZE - 1; $res = $this->db->select(array('page', 'revision', 'text'), array('page_id', 'page_namespace', 'page_title', 'old_flags', 'old_text'), array("page_id BETWEEN {$n} AND {$end}", 'page_latest = rev_id', 'rev_text_id = old_id'), __METHOD__); foreach ($res as $s) { $revtext = Revision::getRevisionText($s); $u = new SearchUpdate($s->page_id, $s->page_title, $revtext); $u->doUpdate(); } $this->db->freeResult($res); $n += self::RTI_CHUNK_SIZE; } }
public function execute() { global $wgTranslateMessageNamespaces; $namespace = $this->getOption('namespace', $wgTranslateMessageNamespaces); if (is_string($namespace)) { if (!MWNamespace::exists($namespace)) { $namespace = MWNamespace::getCanonicalIndex($namespace); if ($namespace === null) { $this->error('Bad namespace', true); } } } $db = wfGetDB(DB_MASTER); $tables = array('page', 'text', 'revision'); $fields = array('page_id', 'page_title', 'page_namespace', 'rev_id', 'old_text', 'old_flags'); $conds = array('page_latest = rev_id', 'old_id = rev_text_id', 'page_namespace' => $namespace); $limit = 100; $offset = 0; while (true) { $inserts = array(); $this->output('.', 0); $options = array('LIMIT' => $limit, 'OFFSET' => $offset); $res = $db->select($tables, $fields, $conds, __METHOD__, $options); if (!$res->numRows()) { break; } foreach ($res as $r) { $text = Revision::getRevisionText($r); if (strpos($text, TRANSLATE_FUZZY) !== false) { $inserts[] = array('rt_page' => $r->page_id, 'rt_revision' => $r->rev_id, 'rt_type' => RevTag::getType('fuzzy')); } } $offset += $limit; $db->replace('revtag', 'rt_type_page_revision', $inserts, __METHOD__); } }
/** * Loads all or main part of cacheable messages from the database */ function loadFromDB() { global $wgAllMessagesEn, $wgLang; $fname = 'MessageCache::loadFromDB'; $dbr =& wfGetDB(DB_SLAVE); if (!$dbr) { throw new MWException('Invalid database object'); } $conditions = array('page_is_redirect' => 0, 'page_namespace' => NS_MEDIAWIKI); $res = $dbr->select(array('page', 'revision', 'text'), array('page_title', 'old_text', 'old_flags'), 'page_is_redirect=0 AND page_namespace=' . NS_MEDIAWIKI . ' AND page_latest=rev_id AND rev_text_id=old_id', $fname); $this->mCache = array(); for ($row = $dbr->fetchObject($res); $row; $row = $dbr->fetchObject($res)) { $this->mCache[$row->page_title] = Revision::getRevisionText($row); } # Negative caching # Go through the language array and the extension array and make a note of # any keys missing from the cache foreach ($wgAllMessagesEn as $key => $value) { $uckey = $wgLang->ucfirst($key); if (!array_key_exists($uckey, $this->mCache)) { $this->mCache[$uckey] = false; } } # Make sure all extension messages are available wfLoadAllExtensions(); # Add them to the cache foreach ($this->mExtensionMessages as $key => $value) { $uckey = $wgLang->ucfirst($key); if (!array_key_exists($uckey, $this->mCache) && (isset($this->mExtensionMessages[$key][$wgLang->getCode()]) || isset($this->mExtensionMessages[$key]['en']))) { $this->mCache[$uckey] = false; } } $dbr->freeResult($res); }
*/ $result = $dbw->select('archive', array('ar_rev_id', 'ar_text', 'ar_comment', 'ar_user', 'ar_user_text', 'ar_timestamp', 'ar_minor_edit', 'ar_flags', 'ar_text_id', 'ar_page_id', 'ar_len'), array('ar_namespace' => $page->getNamespace(), 'ar_title' => $page->getDBkey(), $oldones), __METHOD__, array('ORDER BY' => 'ar_timestamp')); $revision = null; $restored = 0; while ($row = $dbw->fetchObject($result)) { if ($row->ar_text_id) { // Revision was deleted in 1.5+; text is in // the regular text table, use the reference. // Specify null here so the so the text is // dereferenced for page length info if needed. $revText = null; } else { // Revision was deleted in 1.4 or earlier. // Text is squashed into the archive row, and // a new text table entry will be created for it. $revText = Revision::getRevisionText($row, 'ar_'); } $revision = new Revision(array('page' => $pageId, 'id' => $row->ar_rev_id, 'text' => $revText, 'comment' => $row->ar_comment, 'user' => $row->ar_user, 'user_text' => $row->ar_user_text, 'timestamp' => $row->ar_timestamp, 'minor_edit' => $row->ar_minor_edit, 'text_id' => $row->ar_text_id, 'len' => $row->ar_len)); $revision->insertOn($dbw); $restored++; } // Was anything restored at all? if ($restored == 0) { print "Nothing was restored\n"; exit(1); } if ($revision) { // Attach the latest revision to the page... $wasnew = $article->updateIfNewerOn($dbw, $revision, $previousRevId); if ($newid || $wasnew) { // Update site stats, link tables, etc
public function execute() { global $wgUser; // Before doing anything at all, let's check permissions if (!$wgUser->isAllowed('deletedhistory')) { $this->dieUsage('You don\'t have permission to view deleted revision information', 'permissiondenied'); } $db = $this->getDB(); $params = $this->extractRequestParams(false); $prop = array_flip($params['prop']); $fld_revid = isset($prop['revid']); $fld_user = isset($prop['user']); $fld_comment = isset($prop['comment']); $fld_minor = isset($prop['minor']); $fld_len = isset($prop['len']); $fld_content = isset($prop['content']); $fld_token = isset($prop['token']); $result = $this->getResult(); $pageSet = $this->getPageSet(); $titles = $pageSet->getTitles(); $data = array(); $this->addTables('archive'); $this->addFields(array('ar_title', 'ar_namespace', 'ar_timestamp')); if ($fld_revid) { $this->addFields('ar_rev_id'); } if ($fld_user) { $this->addFields('ar_user_text'); } if ($fld_comment) { $this->addFields('ar_comment'); } if ($fld_minor) { $this->addFields('ar_minor_edit'); } if ($fld_len) { $this->addFields('ar_len'); } if ($fld_content) { $this->addTables('text'); $this->addFields(array('ar_text', 'ar_text_id', 'old_text', 'old_flags')); $this->addWhere('ar_text_id = old_id'); // This also means stricter restrictions if (!$wgUser->isAllowed('undelete')) { $this->dieUsage('You don\'t have permission to view deleted revision content', 'permissiondenied'); } } // Check limits $userMax = $fld_content ? ApiBase::LIMIT_SML1 : ApiBase::LIMIT_BIG1; $botMax = $fld_content ? ApiBase::LIMIT_SML2 : ApiBase::LIMIT_BIG2; if ($limit == 'max') { $limit = $this->getMain()->canApiHighLimits() ? $botMax : $userMax; $this->getResult()->addValue('limits', 'limit', $limit); } $this->validateLimit('limit', $params['limit'], 1, $userMax, $botMax); if ($fld_token) { // Undelete tokens are identical for all pages, so we cache one here $token = $wgUser->editToken(); } // We need a custom WHERE clause that matches all titles. if (count($titles) > 0) { $lb = new LinkBatch($titles); $where = $lb->constructSet('ar', $db); $this->addWhere($where); } $this->addOption('LIMIT', $params['limit'] + 1); $this->addWhereRange('ar_timestamp', $params['dir'], $params['start'], $params['end']); if (isset($params['namespace'])) { $this->addWhereFld('ar_namespace', $params['namespace']); } $res = $this->select(__METHOD__); $pages = array(); $count = 0; // First populate the $pages array while ($row = $db->fetchObject($res)) { if ($count++ == $params['limit']) { // We've had enough $this->setContinueEnumParameter('start', wfTimestamp(TS_ISO_8601, $row->ar_timestamp)); break; } $rev = array(); $rev['timestamp'] = wfTimestamp(TS_ISO_8601, $row->ar_timestamp); if ($fld_revid) { $rev['revid'] = $row->ar_rev_id; } if ($fld_user) { $rev['user'] = $row->ar_user_text; } if ($fld_comment) { $rev['comment'] = $row->ar_comment; } if ($fld_minor) { if ($row->ar_minor_edit == 1) { $rev['minor'] = ''; } } if ($fld_len) { $rev['len'] = $row->ar_len; } if ($fld_content) { ApiResult::setContent($rev, Revision::getRevisionText($row)); } $t = Title::makeTitle($row->ar_namespace, $row->ar_title); if (!isset($pages[$t->getPrefixedText()])) { $pages[$t->getPrefixedText()] = array('title' => $t->getPrefixedText(), 'ns' => intval($row->ar_namespace), 'revisions' => array($rev)); if ($fld_token) { $pages[$t->getPrefixedText()]['token'] = $token; } } else { $pages[$t->getPrefixedText()]['revisions'][] = $rev; } } $db->freeResult($res); // We don't want entire pagenames as keys, so let's make this array indexed foreach ($pages as $page) { $result->setIndexedTagName($page['revisions'], 'rev'); $data[] = $page; } $result->setIndexedTagName($data, 'page'); $result->addValue('query', $this->getModuleName(), $data); }
/** * Dumps a "<revision>" section on the output stream, with * data filled in from the given database row. * * @param $row object * @return string * @access private */ function writeRevision($row) { wfProfileIn(__METHOD__); $out = " <revision>\n"; $out .= " " . Xml::element('id', null, strval($row->rev_id)) . "\n"; if ($row->rev_parent_id) { $out .= " " . Xml::element('parentid', null, strval($row->rev_parent_id)) . "\n"; } $out .= $this->writeTimestamp($row->rev_timestamp); if ($row->rev_deleted & Revision::DELETED_USER) { $out .= " " . Xml::element('contributor', array('deleted' => 'deleted')) . "\n"; } else { $out .= $this->writeContributor($row->rev_user, $row->rev_user_text); } if ($row->rev_minor_edit) { $out .= " <minor/>\n"; } if ($row->rev_deleted & Revision::DELETED_COMMENT) { $out .= " " . Xml::element('comment', array('deleted' => 'deleted')) . "\n"; } elseif ($row->rev_comment != '') { $out .= " " . Xml::elementClean('comment', array(), strval($row->rev_comment)) . "\n"; } if ($row->rev_sha1 && !($row->rev_deleted & Revision::DELETED_TEXT)) { $out .= " " . Xml::element('sha1', null, strval($row->rev_sha1)) . "\n"; } else { $out .= " <sha1/>\n"; } $text = ''; if ($row->rev_deleted & Revision::DELETED_TEXT) { $out .= " " . Xml::element('text', array('deleted' => 'deleted')) . "\n"; } elseif (isset($row->old_text)) { // Raw text from the database may have invalid chars $text = strval(Revision::getRevisionText($row)); $out .= " " . Xml::elementClean('text', array('xml:space' => 'preserve', 'bytes' => intval($row->rev_len)), strval($text)) . "\n"; } else { // Stub output $out .= " " . Xml::element('text', array('id' => $row->rev_text_id, 'bytes' => intval($row->rev_len)), "") . "\n"; } wfRunHooks('XmlDumpWriterWriteRevision', array(&$this, &$out, $row, $text)); $out .= " </revision>\n"; wfProfileOut(__METHOD__); return $out; }
/** * Filters list of keys according to whether the current translation * differs from the commited translation. * @param string[] $keys List of keys to filter. * @param bool $condition True to filter changed translations, false * to filter unchanged translations. * @return string[] Filtered keys. */ protected function filterChanged(array $keys, $condition) { $this->loadData($keys); $origKeys = array(); if ($condition === false) { $origKeys = $keys; } foreach ($this->dbData as $row) { $mkey = $this->rowToKey($row); if (!isset($this->infile[$mkey])) { continue; } $text = Revision::getRevisionText($row); if ($this->infile[$mkey] === $text) { // Remove unchanged messages from the list unset($keys[$mkey]); } } // Remove the messages which have not changed from the list if ($condition === false) { $keys = $this->filterOnCondition($keys, $origKeys, false); } return $keys; }
/** * Get the text from an archive row containing ar_text, ar_flags and ar_text_id * * @param object $row Database row * @return string */ function getTextFromRow($row) { if (is_null($row->ar_text_id)) { // An old row from MediaWiki 1.4 or previous. // Text is embedded in this row in classic compression format. return Revision::getRevisionText($row, 'ar_'); } // New-style: keyed to the text storage backend. $dbr = wfGetDB(DB_SLAVE); $text = $dbr->selectRow('text', array('old_text', 'old_flags'), array('old_id' => $row->ar_text_id), __METHOD__); return Revision::getRevisionText($text); }
/** * Dumps a "<revision>" section on the output stream, with * data filled in from the given database row. * * @param object $row * @return string * @access private */ function writeRevision($row) { wfProfileIn(__METHOD__); $out = " <revision>\n"; $out .= " " . Xml::element('id', null, strval($row->rev_id)) . "\n"; if (isset($row->rev_parent_id) && $row->rev_parent_id) { $out .= " " . Xml::element('parentid', null, strval($row->rev_parent_id)) . "\n"; } $out .= $this->writeTimestamp($row->rev_timestamp); if (isset($row->rev_deleted) && $row->rev_deleted & Revision::DELETED_USER) { $out .= " " . Xml::element('contributor', array('deleted' => 'deleted')) . "\n"; } else { $out .= $this->writeContributor($row->rev_user, $row->rev_user_text); } if (isset($row->rev_minor_edit) && $row->rev_minor_edit) { $out .= " <minor/>\n"; } if (isset($row->rev_deleted) && $row->rev_deleted & Revision::DELETED_COMMENT) { $out .= " " . Xml::element('comment', array('deleted' => 'deleted')) . "\n"; } elseif ($row->rev_comment != '') { $out .= " " . Xml::elementClean('comment', array(), strval($row->rev_comment)) . "\n"; } if (isset($row->rev_content_model) && !is_null($row->rev_content_model)) { $content_model = strval($row->rev_content_model); } else { // probably using $wgContentHandlerUseDB = false; $title = Title::makeTitle($row->page_namespace, $row->page_title); $content_model = ContentHandler::getDefaultModelFor($title); } $content_handler = ContentHandler::getForModelID($content_model); if (isset($row->rev_content_format) && !is_null($row->rev_content_format)) { $content_format = strval($row->rev_content_format); } else { // probably using $wgContentHandlerUseDB = false; $content_format = $content_handler->getDefaultFormat(); } $text = ''; if (isset($row->rev_deleted) && $row->rev_deleted & Revision::DELETED_TEXT) { $out .= " " . Xml::element('text', array('deleted' => 'deleted')) . "\n"; } elseif (isset($row->old_text)) { // Raw text from the database may have invalid chars $text = strval(Revision::getRevisionText($row)); $text = $content_handler->exportTransform($text, $content_format); $out .= " " . Xml::elementClean('text', array('xml:space' => 'preserve', 'bytes' => intval($row->rev_len)), strval($text)) . "\n"; } else { // Stub output $out .= " " . Xml::element('text', array('id' => $row->rev_text_id, 'bytes' => intval($row->rev_len)), "") . "\n"; } if (isset($row->rev_sha1) && $row->rev_sha1 && !($row->rev_deleted & Revision::DELETED_TEXT)) { $out .= " " . Xml::element('sha1', null, strval($row->rev_sha1)) . "\n"; } else { $out .= " <sha1/>\n"; } $out .= " " . Xml::element('model', null, strval($content_model)) . "\n"; $out .= " " . Xml::element('format', null, strval($content_format)) . "\n"; wfRunHooks('XmlDumpWriterWriteRevision', array(&$this, &$out, $row, $text)); $out .= " </revision>\n"; wfProfileOut(__METHOD__); return $out; }
/** * Compress the text in chunks after concatenating the revisions. * * @param int $startId * @param int $maxChunkSize * @param string $beginDate * @param string $endDate * @param string $extdb * @param bool|int $maxPageId * @return bool */ private function compressWithConcat($startId, $maxChunkSize, $beginDate, $endDate, $extdb = "", $maxPageId = false) { $loadStyle = self::LS_CHUNKED; $dbr = wfGetDB(DB_SLAVE); $dbw = wfGetDB(DB_MASTER); # Set up external storage if ($extdb != '') { $storeObj = new ExternalStoreDB(); } # Get all articles by page_id if (!$maxPageId) { $maxPageId = $dbr->selectField('page', 'max(page_id)', '', __METHOD__); } $this->output("Starting from {$startId} of {$maxPageId}\n"); $pageConds = array(); /* if ( $exclude_ns0 ) { print "Excluding main namespace\n"; $pageConds[] = 'page_namespace<>0'; } if ( $queryExtra ) { $pageConds[] = $queryExtra; } */ # For each article, get a list of revisions which fit the criteria # No recompression, use a condition on old_flags # Don't compress object type entities, because that might produce data loss when # overwriting bulk storage concat rows. Don't compress external references, because # the script doesn't yet delete rows from external storage. $conds = array('old_flags NOT ' . $dbr->buildLike($dbr->anyString(), 'object', $dbr->anyString()) . ' AND old_flags NOT ' . $dbr->buildLike($dbr->anyString(), 'external', $dbr->anyString())); if ($beginDate) { if (!preg_match('/^\\d{14}$/', $beginDate)) { $this->error("Invalid begin date \"{$beginDate}\"\n"); return false; } $conds[] = "rev_timestamp>'" . $beginDate . "'"; } if ($endDate) { if (!preg_match('/^\\d{14}$/', $endDate)) { $this->error("Invalid end date \"{$endDate}\"\n"); return false; } $conds[] = "rev_timestamp<'" . $endDate . "'"; } if ($loadStyle == self::LS_CHUNKED) { $tables = array('revision', 'text'); $fields = array('rev_id', 'rev_text_id', 'old_flags', 'old_text'); $conds[] = 'rev_text_id=old_id'; $revLoadOptions = 'FOR UPDATE'; } else { $tables = array('revision'); $fields = array('rev_id', 'rev_text_id'); $revLoadOptions = array(); } # Don't work with current revisions # Don't lock the page table for update either -- TS 2006-04-04 #$tables[] = 'page'; #$conds[] = 'page_id=rev_page AND rev_id != page_latest'; for ($pageId = $startId; $pageId <= $maxPageId; $pageId++) { wfWaitForSlaves(); # Wake up $dbr->ping(); # Get the page row $pageRes = $dbr->select('page', array('page_id', 'page_namespace', 'page_title', 'page_latest'), $pageConds + array('page_id' => $pageId), __METHOD__); if ($pageRes->numRows() == 0) { continue; } $pageRow = $dbr->fetchObject($pageRes); # Display progress $titleObj = Title::makeTitle($pageRow->page_namespace, $pageRow->page_title); $this->output("{$pageId}\t" . $titleObj->getPrefixedDBkey() . " "); # Load revisions $revRes = $dbw->select($tables, $fields, array_merge(array('rev_page' => $pageRow->page_id, 'rev_id < ' . $pageRow->page_latest), $conds), __METHOD__, $revLoadOptions); $revs = array(); foreach ($revRes as $revRow) { $revs[] = $revRow; } if (count($revs) < 2) { # No revisions matching, no further processing $this->output("\n"); continue; } # For each chunk $i = 0; while ($i < count($revs)) { if ($i < count($revs) - $maxChunkSize) { $thisChunkSize = $maxChunkSize; } else { $thisChunkSize = count($revs) - $i; } $chunk = new ConcatenatedGzipHistoryBlob(); $stubs = array(); $dbw->begin(__METHOD__); $usedChunk = false; $primaryOldid = $revs[$i]->rev_text_id; // @codingStandardsIgnoreStart Ignore avoid function calls in a FOR loop test part warning # Get the text of each revision and add it to the object for ($j = 0; $j < $thisChunkSize && $chunk->isHappy(); $j++) { // @codingStandardsIgnoreEnd $oldid = $revs[$i + $j]->rev_text_id; # Get text if ($loadStyle == self::LS_INDIVIDUAL) { $textRow = $dbw->selectRow('text', array('old_flags', 'old_text'), array('old_id' => $oldid), __METHOD__, 'FOR UPDATE'); $text = Revision::getRevisionText($textRow); } else { $text = Revision::getRevisionText($revs[$i + $j]); } if ($text === false) { $this->error("\nError, unable to get text in old_id {$oldid}"); #$dbw->delete( 'old', array( 'old_id' => $oldid ) ); } if ($extdb == "" && $j == 0) { $chunk->setText($text); $this->output('.'); } else { # Don't make a stub if it's going to be longer than the article # Stubs are typically about 100 bytes if (strlen($text) < 120) { $stub = false; $this->output('x'); } else { $stub = new HistoryBlobStub($chunk->addItem($text)); $stub->setLocation($primaryOldid); $stub->setReferrer($oldid); $this->output('.'); $usedChunk = true; } $stubs[$j] = $stub; } } $thisChunkSize = $j; # If we couldn't actually use any stubs because the pages were too small, do nothing if ($usedChunk) { if ($extdb != "") { # Move blob objects to External Storage $stored = $storeObj->store($extdb, serialize($chunk)); if ($stored === false) { $this->error("Unable to store object"); return false; } # Store External Storage URLs instead of Stub placeholders foreach ($stubs as $stub) { if ($stub === false) { continue; } # $stored should provide base path to a BLOB $url = $stored . "/" . $stub->getHash(); $dbw->update('text', array('old_text' => $url, 'old_flags' => 'external,utf-8'), array('old_id' => $stub->getReferrer())); } } else { # Store the main object locally $dbw->update('text', array('old_text' => serialize($chunk), 'old_flags' => 'object,utf-8'), array('old_id' => $primaryOldid)); # Store the stub objects for ($j = 1; $j < $thisChunkSize; $j++) { # Skip if not compressing and don't overwrite the first revision if ($stubs[$j] !== false && $revs[$i + $j]->rev_text_id != $primaryOldid) { $dbw->update('text', array('old_text' => serialize($stubs[$j]), 'old_flags' => 'object,utf-8'), array('old_id' => $revs[$i + $j]->rev_text_id)); } } } } # Done, next $this->output("/"); $dbw->commit(__METHOD__); $i += $thisChunkSize; wfWaitForSlaves(); } $this->output("\n"); } return true; }
/** * May throw a database error if, say, the server dies during query. */ private function getTextDb($id) { global $wgContLang; $row = $this->db->selectRow('text', array('old_text', 'old_flags'), array('old_id' => $id), __METHOD__); $text = Revision::getRevisionText($row); if ($text === false) { return false; } $stripped = str_replace("\r", "", $text); $normalized = $wgContLang->normalize($stripped); return $normalized; }
/** * This is the meaty bit -- restores archived revisions of the given page * to the cur/old tables. If the page currently exists, all revisions will * be stuffed into old, otherwise the most recent will go into cur. * * @param array $timestamps Pass an empty array to restore all revisions, otherwise list the ones to undelete. * @param string $comment * @param array $fileVersions * * @return int number of revisions restored */ private function undeleteRevisions($timestamps) { global $wgParser, $wgDBtype; $restoreAll = empty($timestamps); $dbw =& wfGetDB(DB_MASTER); extract($dbw->tableNames('page', 'archive')); # Does this page already exist? We'll have to update it... $article = new Article($this->title); $options = $wgDBtype == 'postgres' ? '' : 'FOR UPDATE'; $page = $dbw->selectRow('page', array('page_id', 'page_latest'), array('page_namespace' => $this->title->getNamespace(), 'page_title' => $this->title->getDBkey()), __METHOD__, $options); if ($page) { # Page already exists. Import the history, and if necessary # we'll update the latest revision field in the record. $newid = 0; $pageId = $page->page_id; $previousRevId = $page->page_latest; } else { # Have to create a new article... $newid = $article->insertOn($dbw); $pageId = $newid; $previousRevId = 0; } if ($restoreAll) { $oldones = '1 = 1'; # All revisions... } else { $oldts = implode(',', array_map(array(&$dbw, 'addQuotes'), array_map(array(&$dbw, 'timestamp'), $timestamps))); $oldones = "ar_timestamp IN ( {$oldts} )"; } /** * Restore each revision... */ $result = $dbw->select('archive', array('ar_rev_id', 'ar_text', 'ar_comment', 'ar_user', 'ar_user_text', 'ar_timestamp', 'ar_minor_edit', 'ar_flags', 'ar_text_id'), array('ar_namespace' => $this->title->getNamespace(), 'ar_title' => $this->title->getDBkey(), $oldones), __METHOD__, array('ORDER BY' => 'ar_timestamp')); if ($dbw->numRows($result) < count($timestamps)) { wfDebug(__METHOD__ . ": couldn't find all requested rows\n"); return false; } $revision = null; $newRevId = $previousRevId; $restored = 0; while ($row = $dbw->fetchObject($result)) { if ($row->ar_text_id) { // Revision was deleted in 1.5+; text is in // the regular text table, use the reference. // Specify null here so the so the text is // dereferenced for page length info if needed. $revText = null; } else { // Revision was deleted in 1.4 or earlier. // Text is squashed into the archive row, and // a new text table entry will be created for it. $revText = Revision::getRevisionText($row, 'ar_'); } $revision = new Revision(array('page' => $pageId, 'id' => $row->ar_rev_id, 'text' => $revText, 'comment' => $row->ar_comment, 'user' => $row->ar_user, 'user_text' => $row->ar_user_text, 'timestamp' => $row->ar_timestamp, 'minor_edit' => $row->ar_minor_edit, 'text_id' => $row->ar_text_id)); $newRevId = $revision->insertOn($dbw); $restored++; } if ($revision) { # FIXME: Update latest if newer as well... if ($newid) { // Attach the latest revision to the page... $article->updateRevisionOn($dbw, $revision, $previousRevId); // Update site stats, link tables, etc $article->createUpdates($revision); } if ($newid) { Article::onArticleCreate($this->title); } else { Article::onArticleEdit($this->title); } } else { # Something went terribly wrong! } # Now that it's safely stored, take it out of the archive $dbw->delete('archive', array('ar_namespace' => $this->title->getNamespace(), 'ar_title' => $this->title->getDBkey(), $oldones), __METHOD__); return $restored; }
function testCompressRevisionTextUtf8Gzip() { global $wgCompressRevisions; $wgCompressRevisions = true; $row = new stdClass(); $row->old_text = "Wiki est l'école superieur !"; $row->old_flags = Revision::compressRevisionText($row->old_text); $this->assertTrue(false !== strpos($row->old_flags, 'utf-8'), "Flags should contain 'utf-8'"); $this->assertTrue(false !== strpos($row->old_flags, 'gzip'), "Flags should contain 'gzip'"); $this->assertEquals("Wiki est l'école superieur !", gzinflate($row->old_text), "Direct check"); $this->assertEquals("Wiki est l'école superieur !", Revision::getRevisionText($row), "getRevisionText"); }
/** * Lazy-load the revision's text. * Currently hardcoded to the 'text' table storage engine. * * @return string * @access private */ function loadText() { $fname = 'Revision::loadText'; wfProfileIn($fname); $dbr =& wfGetDB(DB_SLAVE); $row = $dbr->selectRow('text', array('old_text', 'old_flags'), array('old_id' => $this->getTextId()), $fname); if (!$row) { $dbw =& wfGetDB(DB_MASTER); $row = $dbw->selectRow('text', array('old_text', 'old_flags'), array('old_id' => $this->getTextId()), $fname); } $text = Revision::getRevisionText($row); wfProfileOut($fname); return $text; }
public function execute() { $limit = $startid = $endid = $start = $end = $dir = $prop = null; extract($this->extractRequestParams()); // If any of those parameters are used, work in 'enumeration' mode. // Enum mode can only be used when exactly one page is provided. // Enumerating revisions on multiple pages make it extremelly // difficult to manage continuations and require additional sql indexes $enumRevMode = !is_null($limit) || !is_null($startid) || !is_null($endid) || $dir === 'newer' || !is_null($start) || !is_null($end); $pageSet = $this->getPageSet(); $pageCount = $pageSet->getGoodTitleCount(); $revCount = $pageSet->getRevisionCount(); // Optimization -- nothing to do if ($revCount === 0 && $pageCount === 0) { return; } if ($revCount > 0 && $enumRevMode) { $this->dieUsage('The revids= parameter may not be used with the list options (limit, startid, endid, dirNewer, start, end).', 'revids'); } if ($pageCount > 1 && $enumRevMode) { $this->dieUsage('titles, pageids or a generator was used to supply multiple pages, but the limit, startid, endid, dirNewer, start, and end parameters may only be used on a single page.', 'multpages'); } $this->addTables('revision'); $this->addFields(array('rev_id', 'rev_page', 'rev_text_id', 'rev_minor_edit')); $this->addWhere('rev_deleted=0'); $showContent = false; if (!is_null($prop)) { $prop = array_flip($prop); $this->addFieldsIf('rev_timestamp', isset($prop['timestamp'])); $this->addFieldsIf('rev_comment', isset($prop['comment'])); if (isset($prop['user'])) { $this->addFields('rev_user'); $this->addFields('rev_user_text'); } if (isset($prop['content'])) { $this->addTables('text'); $this->addWhere('rev_text_id=old_id'); $this->addFields('old_id'); $this->addFields('old_text'); $this->addFields('old_flags'); $showContent = true; } } $userMax = $showContent ? 50 : 500; $botMax = $showContent ? 200 : 10000; if ($enumRevMode) { // This is mostly to prevent parameter errors (and optimize sql?) if (!is_null($startid) && !is_null($start)) { $this->dieUsage('start and startid cannot be used together', 'badparams'); } if (!is_null($endid) && !is_null($end)) { $this->dieUsage('end and endid cannot be used together', 'badparams'); } // This code makes an assumption that sorting by rev_id and rev_timestamp produces // the same result. This way users may request revisions starting at a given time, // but to page through results use the rev_id returned after each page. // Switching to rev_id removes the potential problem of having more than // one row with the same timestamp for the same page. // The order needs to be the same as start parameter to avoid SQL filesort. if (is_null($startid)) { $this->addWhereRange('rev_timestamp', $dir, $start, $end); } else { $this->addWhereRange('rev_id', $dir, $startid, $endid); } // must manually initialize unset limit if (is_null($limit)) { $limit = 10; } $this->validateLimit($this->encodeParamName('limit'), $limit, 1, $userMax, $botMax); // There is only one ID, use it $this->addWhereFld('rev_page', current(array_keys($pageSet->getGoodTitles()))); } elseif ($pageCount > 0) { // When working in multi-page non-enumeration mode, // limit to the latest revision only $this->addTables('page'); $this->addWhere('page_id=rev_page'); $this->addWhere('page_latest=rev_id'); $this->validateLimit('page_count', $pageCount, 1, $userMax, $botMax); // Get all page IDs $this->addWhereFld('page_id', array_keys($pageSet->getGoodTitles())); $limit = $pageCount; // assumption testing -- we should never get more then $pageCount rows. } elseif ($revCount > 0) { $this->validateLimit('rev_count', $revCount, 1, $userMax, $botMax); // Get all revision IDs $this->addWhereFld('rev_id', array_keys($pageSet->getRevisionIDs())); $limit = $revCount; // assumption testing -- we should never get more then $revCount rows. } else { ApiBase::dieDebug(__METHOD__, 'param validation?'); } $this->addOption('LIMIT', $limit + 1); $data = array(); $count = 0; $res = $this->select(__METHOD__); $db =& $this->getDB(); while ($row = $db->fetchObject($res)) { if (++$count > $limit) { // We've reached the one extra which shows that there are additional pages to be had. Stop here... if (!$enumRevMode) { ApiBase::dieDebug(__METHOD__, 'Got more rows then expected'); } // bug report $this->setContinueEnumParameter('startid', $row->rev_id); break; } $vals = $this->addRowInfo('rev', $row); if ($vals) { if ($showContent) { ApiResult::setContent($vals, Revision::getRevisionText($row)); } $this->getResult()->addValue(array('query', 'pages', intval($row->rev_page), 'revisions'), intval($row->rev_id), $vals); } } $db->freeResult($res); // Ensure that all revisions are shown as '<rev>' elements $result = $this->getResult(); if ($result->getIsRawMode()) { $data =& $result->getData(); foreach ($data['query']['pages'] as &$page) { if (is_array($page) && array_key_exists('revisions', $page)) { $result->setIndexedTagName($page['revisions'], 'rev'); } } } }
/** * Dumps a <revision> section on the output stream, with * data filled in from the given database row. * * @param object $row * @access private */ function dumpRev($row) { $fname = 'WikiExporter::dumpRev'; wfProfileIn($fname); print " <revision>\n"; print " " . wfElement('id', null, $row->rev_id) . "\n"; $ts = wfTimestamp2ISO8601($row->rev_timestamp); print " " . wfElement('timestamp', null, $ts) . "\n"; print " <contributor>"; if ($row->rev_user) { print wfElementClean('username', null, $row->rev_user_text); print wfElement('id', null, $row->rev_user); } else { print wfElementClean('ip', null, $row->rev_user_text); } print "</contributor>\n"; if ($row->rev_minor_edit) { print " <minor/>\n"; } if ($row->rev_comment != '') { print " " . wfElementClean('comment', null, $row->rev_comment) . "\n"; } $text = Revision::getRevisionText($row); print " " . wfElementClean('text', array('xml:space' => 'preserve'), $text) . "\n"; print " </revision>\n"; wfProfileOut($fname); if (isset($this->revCallback)) { call_user_func($this->revCallback, $row); } }
/** * Checks if $this can be moved to a given Title * - Selects for update, so don't call it unless you mean business * * @param Title &$nt the new title to check */ public function isValidMoveTarget($nt) { $fname = 'Title::isValidMoveTarget'; $dbw = wfGetDB(DB_MASTER); # Is it a redirect? $id = $nt->getArticleID(); $obj = $dbw->selectRow(array('page', 'revision', 'text'), array('page_is_redirect', 'old_text', 'old_flags'), array('page_id' => $id, 'page_latest=rev_id', 'rev_text_id=old_id'), $fname, 'FOR UPDATE'); if (!$obj || 0 == $obj->page_is_redirect) { # Not a redirect wfDebug(__METHOD__ . ": not a redirect\n"); return false; } $text = Revision::getRevisionText($obj); # Does the redirect point to the source? # Or is it a broken self-redirect, usually caused by namespace collisions? $m = array(); if (preg_match("/\\[\\[\\s*([^\\]\\|]*)]]/", $text, $m)) { $redirTitle = Title::newFromText($m[1]); if (!is_object($redirTitle) || $redirTitle->getPrefixedDBkey() != $this->getPrefixedDBkey() && $redirTitle->getPrefixedDBkey() != $nt->getPrefixedDBkey()) { wfDebug(__METHOD__ . ": redirect points to other page\n"); return false; } } else { # Fail safe wfDebug(__METHOD__ . ": failsafe\n"); return false; } # Does the article have a history? $row = $dbw->selectRow(array('page', 'revision'), array('rev_id'), array('page_namespace' => $nt->getNamespace(), 'page_title' => $nt->getDBkey(), 'page_id=rev_page AND page_latest != rev_id'), $fname, 'FOR UPDATE'); # Return true if there was no history return $row === false; }
/** * May throw a database error if, say, the server dies during query. */ private function getTextDb($id) { $id = intval($id); $row = $this->db->selectRow('text', array('old_text', 'old_flags'), array('old_id' => $id), 'TextPassDumper::getText'); $text = Revision::getRevisionText($row); if ($text === false) { return false; } $stripped = str_replace("\r", "", $text); $normalized = UtfNormal::cleanUp($stripped); return $normalized; }
/** * Returns text contents by given text_id from foreign wiki * * @see Revision::loadText * @param $textId int Foreign wiki text id * @return String|false */ protected function getContentByTextId($textId) { global $wgMemc; $key = wfForeignMemcKey($this->getDatabaseName(), null, 'revisiontext', 'textid', $textId); $text = $wgMemc->get($key); if (!empty($text)) { return $text; } $row = null; // copied from Article::loadText() if (!$row) { // Text data is immutable; check slaves first. $dbr = $this->getConnection(DB_SLAVE); $row = $dbr->selectRow('text', array('old_text', 'old_flags'), array('old_id' => $textId), __METHOD__); } if (!$row && wfGetLB()->getServerCount() > 1) { // Possible slave lag! $dbw = $this->getConnection(DB_MASTER); $row = $dbw->selectRow('text', array('old_text', 'old_flags'), array('old_id' => $textId), __METHOD__); } $text = Revision::getRevisionText($row); if (!is_string($text)) { $text = false; } return $text; }
private function getPages() { global $wgTranslateMessageNamespaces; $dbr = wfGetDB(DB_SLAVE); $search = array(); foreach ($this->titles as $title) { $title = Title::newFromText($title); $ns = $title->getNamespace(); if (!isset($search[$ns])) { $search[$ns] = array(); } $search[$ns][] = 'page_title' . $dbr->buildLike($title->getDBKey(), $dbr->anyString()); } $title_conds = array(); foreach ($search as $ns => $names) { if ($ns == NS_MAIN) { $ns = $wgTranslateMessageNamespaces; } $titles = $dbr->makeList($names, LIST_OR); $title_conds[] = $dbr->makeList(array('page_namespace' => $ns, $titles), LIST_AND); } $conds = array('page_latest=rev_id', 'rev_text_id=old_id', $dbr->makeList($title_conds, LIST_OR)); if (count($this->skipLanguages)) { $skiplist = $dbr->makeList($this->skipLanguages); $conds[] = "substring_index(page_title, '/', -1) NOT IN ({$skiplist})"; } $rows = $dbr->select(array('page', 'revision', 'text'), array('page_title', 'page_namespace', 'old_text', 'old_flags'), $conds, __METHOD__); $messagesContents = array(); foreach ($rows as $row) { $title = Title::makeTitle($row->page_namespace, $row->page_title); $messagesContents[] = array($title, Revision::getRevisionText($row)); } $rows->free(); return $messagesContents; }
public function translation() { if (!isset($this->row)) { return $this->infile(); } return Revision::getRevisionText($this->row); }
/** * Dumps a <revision> section on the output stream, with * data filled in from the given database row. * * @param $row object * @return string * @access private */ function writeRevision($row) { $fname = 'WikiExporter::dumpRev'; wfProfileIn($fname); $out = " <revision>\n"; $out .= " " . Xml::element('id', null, strval($row->rev_id)) . "\n"; $out .= $this->writeTimestamp($row->rev_timestamp); if ($row->rev_deleted & Revision::DELETED_USER) { $out .= " " . Xml::element('contributor', array('deleted' => 'deleted')) . "\n"; } else { $out .= $this->writeContributor($row->rev_user, $row->rev_user_text); } if ($row->rev_minor_edit) { $out .= " <minor/>\n"; } if ($row->rev_deleted & Revision::DELETED_COMMENT) { $out .= " " . Xml::element('comment', array('deleted' => 'deleted')) . "\n"; } elseif ($row->rev_comment != '') { $out .= " " . Xml::elementClean('comment', null, strval($row->rev_comment)) . "\n"; } if ($row->rev_deleted & Revision::DELETED_TEXT) { $out .= " " . Xml::element('text', array('deleted' => 'deleted')) . "\n"; } elseif (isset($row->old_text)) { // Raw text from the database may have invalid chars $text = strval(Revision::getRevisionText($row)); $out .= " " . Xml::elementClean('text', array('xml:space' => 'preserve'), strval($text)) . "\n"; } else { // Stub output $out .= " " . Xml::element('text', array('id' => $row->rev_text_id), "") . "\n"; } $out .= " </revision>\n"; wfProfileOut($fname); return $out; }
/** * Loads cacheable messages from the database. Messages bigger than * $wgMaxMsgCacheEntrySize are assigned a special value, and are loaded * on-demand from the database later. * * @param string $code Language code * @param integer $mode Use MessageCache::FOR_UPDATE to skip process cache * @return array Loaded messages for storing in caches */ function loadFromDB($code, $mode = null) { global $wgMaxMsgCacheEntrySize, $wgLanguageCode, $wgAdaptiveMessageCache; $dbr = wfGetDB($mode == self::FOR_UPDATE ? DB_MASTER : DB_SLAVE); $cache = array(); # Common conditions $conds = array('page_is_redirect' => 0, 'page_namespace' => NS_MEDIAWIKI); $mostused = array(); if ($wgAdaptiveMessageCache && $code !== $wgLanguageCode) { if (!isset($this->mCache[$wgLanguageCode])) { $this->load($wgLanguageCode); } $mostused = array_keys($this->mCache[$wgLanguageCode]); foreach ($mostused as $key => $value) { $mostused[$key] = "{$value}/{$code}"; } } if (count($mostused)) { $conds['page_title'] = $mostused; } elseif ($code !== $wgLanguageCode) { $conds[] = 'page_title' . $dbr->buildLike($dbr->anyString(), '/', $code); } else { # Effectively disallows use of '/' character in NS_MEDIAWIKI for uses # other than language code. $conds[] = 'page_title NOT' . $dbr->buildLike($dbr->anyString(), '/', $dbr->anyString()); } # Conditions to fetch oversized pages to ignore them $bigConds = $conds; $bigConds[] = 'page_len > ' . intval($wgMaxMsgCacheEntrySize); # Load titles for all oversized pages in the MediaWiki namespace $res = $dbr->select('page', 'page_title', $bigConds, __METHOD__ . "({$code})-big"); foreach ($res as $row) { $cache[$row->page_title] = '!TOO BIG'; } # Conditions to load the remaining pages with their contents $smallConds = $conds; $smallConds[] = 'page_latest=rev_id'; $smallConds[] = 'rev_text_id=old_id'; $smallConds[] = 'page_len <= ' . intval($wgMaxMsgCacheEntrySize); $res = $dbr->select(array('page', 'revision', 'text'), array('page_title', 'old_text', 'old_flags'), $smallConds, __METHOD__ . "({$code})-small"); foreach ($res as $row) { $text = Revision::getRevisionText($row); if ($text === false) { // Failed to fetch data; possible ES errors? // Store a marker to fetch on-demand as a workaround... $entry = '!TOO BIG'; wfDebugLog('MessageCache', __METHOD__ . ": failed to load message page text for {$row->page_title} ({$code})"); } else { $entry = ' ' . $text; } $cache[$row->page_title] = $entry; } $cache['VERSION'] = MSG_CACHE_VERSION; ksort($cache); $cache['HASH'] = md5(serialize($cache)); $cache['EXPIRY'] = wfTimestamp(TS_MW, time() + $this->mExpiry); return $cache; }
/** * @static * @param Title $title * @param $message * @param $comment * @param $user * @param int $editFlags * @return array|String */ public static function doFuzzy( $title, $message, $comment, $user, $editFlags = 0 ) { global $wgUser; if ( !$wgUser->isAllowed( 'translate-manage' ) ) { return wfMsg( 'badaccess-group0' ); } $dbw = wfGetDB( DB_MASTER ); // Work on all subpages of base title. $messageInfo = TranslateEditAddons::figureMessage( $title ); $titleText = $messageInfo[0]; $conds = array( 'page_namespace' => $title->getNamespace(), 'page_latest=rev_id', 'rev_text_id=old_id', 'page_title' . $dbw->buildLike( "$titleText/", $dbw->anyString() ), ); $rows = $dbw->select( array( 'page', 'revision', 'text' ), array( 'page_title', 'page_namespace', 'old_text', 'old_flags' ), $conds, __METHOD__ ); // Edit with fuzzybot if there is no user. if ( !$user ) { $user = FuzzyBot::getUser(); } // Process all rows. $changed = array(); foreach ( $rows as $row ) { global $wgTranslateDocumentationLanguageCode; $ttitle = Title::makeTitle( $row->page_namespace, $row->page_title ); // No fuzzy for English original or documentation language code. if ( $ttitle->getSubpageText() == 'en' || $ttitle->getSubpageText() == $wgTranslateDocumentationLanguageCode ) { // Use imported text, not database text. $text = $message; } else { $text = Revision::getRevisionText( $row ); $text = self::makeTextFuzzy( $text ); } // Do actual import $changed[] = self::doImport( $ttitle, $text, $comment, $user, $editFlags ); } // Format return text $text = ''; foreach ( $changed as $c ) { $key = array_shift( $c ); $text .= "* " . wfMsgExt( $key, array(), $c ) . "\n"; } return array( 'translate-manage-import-fuzzy', "\n" . $text ); }
/** * Move an orphan text_id to the new cluster */ function doOrphanList($textIds) { // Finish incomplete moves if (!$this->copyOnly) { $this->finishIncompleteMoves(array('bt_text_id' => $textIds)); $this->syncDBs(); } $trx = new CgzCopyTransaction($this, $this->orphanBlobClass); $res = wfGetDB(DB_SLAVE)->select(array('text', 'blob_tracking'), array('old_id', 'old_text', 'old_flags'), array('old_id' => $textIds, 'bt_text_id=old_id', 'bt_moved' => 0), __METHOD__, array('DISTINCT')); foreach ($res as $row) { $text = Revision::getRevisionText($row); if ($text === false) { $this->critical("Error: cannot load revision text for old_id={$row->old_id}"); continue; } if (!$trx->addItem($text, $row->old_id)) { $this->debug("[orphan]: committing blob with " . $trx->getSize() . " rows"); $trx->commit(); $trx = new CgzCopyTransaction($this, $this->orphanBlobClass); $this->waitForSlaves(); } } $this->debug("[orphan]: committing blob with " . $trx->getSize() . " rows"); $trx->commit(); }
public function execute() { global $wgUser; // Before doing anything at all, let's check permissions if (!$wgUser->isAllowed('deletedhistory')) { $this->dieUsage('You don\'t have permission to view deleted revision information', 'permissiondenied'); } $db = $this->getDB(); $params = $this->extractRequestParams(false); $prop = array_flip($params['prop']); $fld_revid = isset($prop['revid']); $fld_user = isset($prop['user']); $fld_comment = isset($prop['comment']); $fld_minor = isset($prop['minor']); $fld_len = isset($prop['len']); $fld_content = isset($prop['content']); $fld_token = isset($prop['token']); $result = $this->getResult(); $pageSet = $this->getPageSet(); $titles = $pageSet->getTitles(); $data = array(); // This module operates in three modes: // 'revs': List deleted revs for certain titles // 'user': List deleted revs by a certain user // 'all': List all deleted revs $mode = 'all'; if (count($titles) > 0) { $mode = 'revs'; } else { if (!is_null($params['user'])) { $mode = 'user'; } } if (!is_null($params['user']) && !is_null($params['excludeuser'])) { $this->dieUsage('user and excludeuser cannot be used together', 'badparams'); } $this->addTables('archive'); $this->addWhere('ar_deleted = 0'); $this->addFields(array('ar_title', 'ar_namespace', 'ar_timestamp')); if ($fld_revid) { $this->addFields('ar_rev_id'); } if ($fld_user) { $this->addFields('ar_user_text'); } if ($fld_comment) { $this->addFields('ar_comment'); } if ($fld_minor) { $this->addFields('ar_minor_edit'); } if ($fld_len) { $this->addFields('ar_len'); } if ($fld_content) { $this->addTables('text'); $this->addFields(array('ar_text', 'ar_text_id', 'old_text', 'old_flags')); $this->addWhere('ar_text_id = old_id'); // This also means stricter restrictions if (!$wgUser->isAllowed('undelete')) { $this->dieUsage('You don\'t have permission to view deleted revision content', 'permissiondenied'); } } // Check limits $userMax = $fld_content ? ApiBase::LIMIT_SML1 : ApiBase::LIMIT_BIG1; $botMax = $fld_content ? ApiBase::LIMIT_SML2 : ApiBase::LIMIT_BIG2; $limit = $params['limit']; if ($limit == 'max') { $limit = $this->getMain()->canApiHighLimits() ? $botMax : $userMax; $this->getResult()->addValue('limits', $this->getModuleName(), $limit); } $this->validateLimit('limit', $limit, 1, $userMax, $botMax); if ($fld_token) { // Undelete tokens are identical for all pages, so we cache one here $token = $wgUser->editToken(); } // We need a custom WHERE clause that matches all titles. if ($mode == 'revs') { $lb = new LinkBatch($titles); $where = $lb->constructSet('ar', $db); $this->addWhere($where); } elseif ($mode == 'all') { $this->addWhereFld('ar_namespace', $params['namespace']); if (!is_null($params['from'])) { $from = $this->getDB()->strencode($this->titleToKey($params['from'])); $this->addWhere("ar_title >= '{$from}'"); } } if (!is_null($params['user'])) { $this->addWhereFld('ar_user_text', $params['user']); } elseif (!is_null($params['excludeuser'])) { $this->addWhere('ar_user_text != ' . $this->getDB()->addQuotes($params['excludeuser'])); } if (!is_null($params['continue']) && ($mode == 'all' || $mode == 'revs')) { $cont = explode('|', $params['continue']); if (count($cont) != 3) { $this->dieUsage("Invalid continue param. You should pass the original value returned by the previous query", "badcontinue"); } $ns = intval($cont[0]); $title = $this->getDB()->strencode($this->titleToKey($cont[1])); $ts = $this->getDB()->strencode($cont[2]); $op = $params['dir'] == 'newer' ? '>' : '<'; $this->addWhere("ar_namespace {$op} {$ns} OR " . "(ar_namespace = {$ns} AND " . "(ar_title {$op} '{$title}' OR " . "(ar_title = '{$title}' AND " . "ar_timestamp = '{$ts}')))"); } $this->addOption('LIMIT', $limit + 1); $this->addOption('USE INDEX', array('archive' => $mode == 'user' ? 'usertext_timestamp' : 'name_title_timestamp')); if ($mode == 'all') { if ($params['unique']) { $this->addOption('GROUP BY', 'ar_title'); $this->addOption('ORDER BY', 'ar_title'); } else { $this->addOption('ORDER BY', 'ar_title, ar_timestamp'); } } else { if ($mode == 'revs') { // Sort by ns and title in the same order as timestamp for efficiency $this->addWhereRange('ar_namespace', $params['dir'], null, null); $this->addWhereRange('ar_title', $params['dir'], null, null); } $this->addWhereRange('ar_timestamp', $params['dir'], $params['start'], $params['end']); } $res = $this->select(__METHOD__); $pageMap = array(); // Maps ns&title to (fake) pageid $count = 0; $newPageID = 0; while ($row = $db->fetchObject($res)) { if (++$count > $limit) { // We've had enough if ($mode == 'all' || $mode == 'revs') { $this->setContinueEnumParameter('continue', intval($row->ar_namespace) . '|' . $this->keyToTitle($row->ar_title) . '|' . $row->ar_timestamp); } else { $this->setContinueEnumParameter('start', wfTimestamp(TS_ISO_8601, $row->ar_timestamp)); } break; } $rev = array(); $rev['timestamp'] = wfTimestamp(TS_ISO_8601, $row->ar_timestamp); if ($fld_revid) { $rev['revid'] = intval($row->ar_rev_id); } if ($fld_user) { $rev['user'] = $row->ar_user_text; } if ($fld_comment) { $rev['comment'] = $row->ar_comment; } if ($fld_minor) { if ($row->ar_minor_edit == 1) { $rev['minor'] = ''; } } if ($fld_len) { $rev['len'] = $row->ar_len; } if ($fld_content) { ApiResult::setContent($rev, Revision::getRevisionText($row)); } if (!isset($pageMap[$row->ar_namespace][$row->ar_title])) { $pageID = $newPageID++; $pageMap[$row->ar_namespace][$row->ar_title] = $pageID; $t = Title::makeTitle($row->ar_namespace, $row->ar_title); $a['revisions'] = array($rev); $result->setIndexedTagName($a['revisions'], 'rev'); ApiQueryBase::addTitleInfo($a, $t); if ($fld_token) { $a['token'] = $token; } $fit = $result->addValue(array('query', $this->getModuleName()), $pageID, $a); } else { $pageID = $pageMap[$row->ar_namespace][$row->ar_title]; $fit = $result->addValue(array('query', $this->getModuleName(), $pageID, 'revisions'), null, $rev); } if (!$fit) { if ($mode == 'all' || $mode == 'revs') { $this->setContinueEnumParameter('continue', intval($row->ar_namespace) . '|' . $this->keyToTitle($row->ar_title) . '|' . $row->ar_timestamp); } else { $this->setContinueEnumParameter('start', wfTimestamp(TS_ISO_8601, $row->ar_timestamp)); } break; } } $db->freeResult($res); $result->setIndexedTagName_internal(array('query', $this->getModuleName()), 'page'); }