public function execute() { $this->output("Looking for pages with page_latest set to 0...\n"); $dbw = wfGetDB(DB_MASTER); $result = $dbw->select('page', array('page_id', 'page_namespace', 'page_title'), array('page_latest' => 0), __METHOD__); $n = 0; foreach ($result as $row) { $pageId = intval($row->page_id); $title = Title::makeTitle($row->page_namespace, $row->page_title); $name = $title->getPrefixedText(); $latestTime = $dbw->selectField('revision', 'MAX(rev_timestamp)', array('rev_page' => $pageId), __METHOD__); if (!$latestTime) { $this->output(wfWikiID() . " {$pageId} [[{$name}]] can't find latest rev time?!\n"); continue; } $revision = Revision::loadFromTimestamp($dbw, $title, $latestTime); if (is_null($revision)) { $this->output(wfWikiID() . " {$pageId} [[{$name}]] latest time {$latestTime}, can't find revision id\n"); continue; } $id = $revision->getId(); $this->output(wfWikiID() . " {$pageId} [[{$name}]] latest time {$latestTime}, rev id {$id}\n"); if ($this->hasOption('fix')) { $article = new Article($title); $article->updateRevisionOn($dbw, $revision); } $n++; } $dbw->freeResult($result); $this->output("Done! Processed {$n} pages.\n"); if (!$this->hasOption('fix')) { $this->output("This was a dry run; rerun with --fix to update page_latest.\n"); } }
function merge() { global $wgOut, $wgUser; # Get the titles directly from the IDs, in case the target page params # were spoofed. The queries are done based on the IDs, so it's best to # keep it consistent... $targetTitle = Title::newFromID($this->mTargetID); $destTitle = Title::newFromID($this->mDestID); if (is_null($targetTitle) || is_null($destTitle)) { return false; } // validate these if ($targetTitle->getArticleId() == $destTitle->getArticleId()) { return false; } # Verify that this timestamp is valid # Must be older than the destination page $dbw = wfGetDB(DB_MASTER); # Get timestamp into DB format $this->mTimestamp = $this->mTimestamp ? $dbw->timestamp($this->mTimestamp) : ''; # Max timestamp should be min of destination page $maxtimestamp = $dbw->selectField('revision', 'MIN(rev_timestamp)', array('rev_page' => $this->mDestID), __METHOD__); # Destination page must exist with revisions if (!$maxtimestamp) { $wgOut->addWikiMsg('mergehistory-fail'); return false; } # Get the latest timestamp of the source $lasttimestamp = $dbw->selectField(array('page', 'revision'), 'rev_timestamp', array('page_id' => $this->mTargetID, 'page_latest = rev_id'), __METHOD__); # $this->mTimestamp must be older than $maxtimestamp if ($this->mTimestamp >= $maxtimestamp) { $wgOut->addWikiMsg('mergehistory-fail'); return false; } # Update the revisions if ($this->mTimestamp) { $timewhere = "rev_timestamp <= {$this->mTimestamp}"; $TimestampLimit = wfTimestamp(TS_MW, $this->mTimestamp); } else { $timewhere = "rev_timestamp <= {$maxtimestamp}"; $TimestampLimit = wfTimestamp(TS_MW, $lasttimestamp); } # Do the moving... $dbw->update('revision', array('rev_page' => $this->mDestID), array('rev_page' => $this->mTargetID, $timewhere), __METHOD__); $count = $dbw->affectedRows(); # Make the source page a redirect if no revisions are left $haveRevisions = $dbw->selectField('revision', 'rev_timestamp', array('rev_page' => $this->mTargetID), __METHOD__, array('FOR UPDATE')); if (!$haveRevisions) { if ($this->mComment) { $comment = wfMsgForContent('mergehistory-comment', $targetTitle->getPrefixedText(), $destTitle->getPrefixedText(), $this->mComment); } else { $comment = wfMsgForContent('mergehistory-autocomment', $targetTitle->getPrefixedText(), $destTitle->getPrefixedText()); } $mwRedir = MagicWord::get('redirect'); $redirectText = $mwRedir->getSynonym(0) . ' [[' . $destTitle->getPrefixedText() . "]]\n"; $redirectArticle = new Article($targetTitle); $redirectRevision = new Revision(array('page' => $this->mTargetID, 'comment' => $comment, 'text' => $redirectText)); $redirectRevision->insertOn($dbw); $redirectArticle->updateRevisionOn($dbw, $redirectRevision); # Now, we record the link from the redirect to the new title. # It should have no other outgoing links... $dbw->delete('pagelinks', array('pl_from' => $this->mDestID), __METHOD__); $dbw->insert('pagelinks', array('pl_from' => $this->mDestID, 'pl_namespace' => $destTitle->getNamespace(), 'pl_title' => $destTitle->getDBkey()), __METHOD__); } else { $targetTitle->invalidateCache(); // update histories } $destTitle->invalidateCache(); // update histories # Check if this did anything if (!$count) { $wgOut->addWikiMsg('mergehistory-fail'); return false; } # Update our logs $log = new LogPage('merge'); $log->addEntry('merge', $targetTitle, $this->mComment, array($destTitle->getPrefixedText(), $TimestampLimit)); $wgOut->addHTML(wfMsgExt('mergehistory-success', array('parseinline'), $targetTitle->getPrefixedText(), $destTitle->getPrefixedText(), $count)); wfRunHooks('ArticleMergeComplete', array($targetTitle, $destTitle)); return true; }
/** * This is the meaty bit -- restores archived revisions of the given page * to the cur/old tables. If the page currently exists, all revisions will * be stuffed into old, otherwise the most recent will go into cur. * * @param array $timestamps Pass an empty array to restore all revisions, otherwise list the ones to undelete. * @param string $comment * @param array $fileVersions * * @return int number of revisions restored */ private function undeleteRevisions($timestamps) { global $wgParser, $wgDBtype; $restoreAll = empty($timestamps); $dbw =& wfGetDB(DB_MASTER); extract($dbw->tableNames('page', 'archive')); # Does this page already exist? We'll have to update it... $article = new Article($this->title); $options = $wgDBtype == 'postgres' ? '' : 'FOR UPDATE'; $page = $dbw->selectRow('page', array('page_id', 'page_latest'), array('page_namespace' => $this->title->getNamespace(), 'page_title' => $this->title->getDBkey()), __METHOD__, $options); if ($page) { # Page already exists. Import the history, and if necessary # we'll update the latest revision field in the record. $newid = 0; $pageId = $page->page_id; $previousRevId = $page->page_latest; } else { # Have to create a new article... $newid = $article->insertOn($dbw); $pageId = $newid; $previousRevId = 0; } if ($restoreAll) { $oldones = '1 = 1'; # All revisions... } else { $oldts = implode(',', array_map(array(&$dbw, 'addQuotes'), array_map(array(&$dbw, 'timestamp'), $timestamps))); $oldones = "ar_timestamp IN ( {$oldts} )"; } /** * Restore each revision... */ $result = $dbw->select('archive', array('ar_rev_id', 'ar_text', 'ar_comment', 'ar_user', 'ar_user_text', 'ar_timestamp', 'ar_minor_edit', 'ar_flags', 'ar_text_id'), array('ar_namespace' => $this->title->getNamespace(), 'ar_title' => $this->title->getDBkey(), $oldones), __METHOD__, array('ORDER BY' => 'ar_timestamp')); if ($dbw->numRows($result) < count($timestamps)) { wfDebug(__METHOD__ . ": couldn't find all requested rows\n"); return false; } $revision = null; $newRevId = $previousRevId; $restored = 0; while ($row = $dbw->fetchObject($result)) { if ($row->ar_text_id) { // Revision was deleted in 1.5+; text is in // the regular text table, use the reference. // Specify null here so the so the text is // dereferenced for page length info if needed. $revText = null; } else { // Revision was deleted in 1.4 or earlier. // Text is squashed into the archive row, and // a new text table entry will be created for it. $revText = Revision::getRevisionText($row, 'ar_'); } $revision = new Revision(array('page' => $pageId, 'id' => $row->ar_rev_id, 'text' => $revText, 'comment' => $row->ar_comment, 'user' => $row->ar_user, 'user_text' => $row->ar_user_text, 'timestamp' => $row->ar_timestamp, 'minor_edit' => $row->ar_minor_edit, 'text_id' => $row->ar_text_id)); $newRevId = $revision->insertOn($dbw); $restored++; } if ($revision) { # FIXME: Update latest if newer as well... if ($newid) { // Attach the latest revision to the page... $article->updateRevisionOn($dbw, $revision, $previousRevId); // Update site stats, link tables, etc $article->createUpdates($revision); } if ($newid) { Article::onArticleCreate($this->title); } else { Article::onArticleEdit($this->title); } } else { # Something went terribly wrong! } # Now that it's safely stored, take it out of the archive $dbw->delete('archive', array('ar_namespace' => $this->title->getNamespace(), 'ar_title' => $this->title->getDBkey(), $oldones), __METHOD__); return $restored; }
/** * Move page to a title which is either a redirect to the * source page or nonexistent * * @param $nt Title the page to move to, which should be a redirect or nonexistent * @param $reason String The reason for the move * @param $createRedirect Bool Whether to leave a redirect at the old title. Ignored * if the user doesn't have the suppressredirect right */ private function moveToInternal(&$nt, $reason = '', $createRedirect = true) { global $wgUser, $wgContLang; $moveOverRedirect = $nt->exists(); $commentMsg = $moveOverRedirect ? '1movedto2_redir' : '1movedto2'; $comment = wfMsgForContent($commentMsg, $this->getPrefixedText(), $nt->getPrefixedText()); if ($reason) { $comment .= wfMsgForContent('colon-separator') . $reason; } # Truncate for whole multibyte characters. $comment = $wgContLang->truncate($comment, 255); $oldid = $this->getArticleID(); $latest = $this->getLatestRevID(); $dbw = wfGetDB(DB_MASTER); if ($moveOverRedirect) { $rcts = $dbw->timestamp($nt->getEarliestRevTime()); $newid = $nt->getArticleID(); $newns = $nt->getNamespace(); $newdbk = $nt->getDBkey(); # Delete the old redirect. We don't save it to history since # by definition if we've got here it's rather uninteresting. # We have to remove it so that the next step doesn't trigger # a conflict on the unique namespace+title index... $dbw->delete('page', array('page_id' => $newid), __METHOD__); if (!$dbw->cascadingDeletes()) { $dbw->delete('revision', array('rev_page' => $newid), __METHOD__); global $wgUseTrackbacks; if ($wgUseTrackbacks) { $dbw->delete('trackbacks', array('tb_page' => $newid), __METHOD__); } $dbw->delete('pagelinks', array('pl_from' => $newid), __METHOD__); $dbw->delete('imagelinks', array('il_from' => $newid), __METHOD__); $dbw->delete('categorylinks', array('cl_from' => $newid), __METHOD__); $dbw->delete('templatelinks', array('tl_from' => $newid), __METHOD__); $dbw->delete('externallinks', array('el_from' => $newid), __METHOD__); $dbw->delete('langlinks', array('ll_from' => $newid), __METHOD__); $dbw->delete('iwlinks', array('iwl_from' => $newid), __METHOD__); $dbw->delete('redirect', array('rd_from' => $newid), __METHOD__); } // If the target page was recently created, it may have an entry in recentchanges still $dbw->delete('recentchanges', array('rc_timestamp' => $rcts, 'rc_namespace' => $newns, 'rc_title' => $newdbk, 'rc_new' => 1), __METHOD__); } # Save a null revision in the page's history notifying of the move $nullRevision = Revision::newNullRevision($dbw, $oldid, $comment, true); if (!is_object($nullRevision)) { throw new MWException('No valid null revision produced in ' . __METHOD__); } $nullRevId = $nullRevision->insertOn($dbw); $now = wfTimestampNow(); # Change the name of the target page: $dbw->update('page', array('page_touched' => $dbw->timestamp($now), 'page_namespace' => $nt->getNamespace(), 'page_title' => $nt->getDBkey(), 'page_latest' => $nullRevId), array('page_id' => $oldid), __METHOD__); $nt->resetArticleID($oldid); $article = new Article($nt); wfRunHooks('NewRevisionFromEditComplete', array($article, $nullRevision, $latest, $wgUser)); $article->setCachedLastEditTime($now); # Recreate the redirect, this time in the other direction. if ($createRedirect || !$wgUser->isAllowed('suppressredirect')) { $mwRedir = MagicWord::get('redirect'); $redirectText = $mwRedir->getSynonym(0) . ' [[' . $nt->getPrefixedText() . "]]\n"; $redirectArticle = new Article($this); $newid = $redirectArticle->insertOn($dbw); $redirectRevision = new Revision(array('page' => $newid, 'comment' => $comment, 'text' => $redirectText)); $redirectRevision->insertOn($dbw); $redirectArticle->updateRevisionOn($dbw, $redirectRevision, 0); wfRunHooks('NewRevisionFromEditComplete', array($redirectArticle, $redirectRevision, false, $wgUser)); # Now, we record the link from the redirect to the new title. # It should have no other outgoing links... $dbw->delete('pagelinks', array('pl_from' => $newid), __METHOD__); $dbw->insert('pagelinks', array('pl_from' => $newid, 'pl_namespace' => $nt->getNamespace(), 'pl_title' => $nt->getDBkey()), __METHOD__); $redirectSuppressed = false; } else { $this->resetArticleID(0); $redirectSuppressed = true; } # Log the move $log = new LogPage('move'); $logType = $moveOverRedirect ? 'move_redir' : 'move'; $log->addEntry($logType, $this, $reason, array(1 => $nt->getPrefixedText(), 2 => $redirectSuppressed)); # Purge caches for old and new titles if ($moveOverRedirect) { # A simple purge is enough when moving over a redirect $nt->purgeSquid(); } else { # Purge caches as per article creation, including any pages that link to this title Article::onArticleCreate($nt); } $this->purgeSquid(); }
function reportPage($title, $origTitle, $revisionCount, $successCount) { global $wgOut, $wgUser, $wgLang, $wgContLang; $skin = $wgUser->getSkin(); $this->mPageCount++; $localCount = $wgLang->formatNum($successCount); $contentCount = $wgContLang->formatNum($successCount); if ($successCount > 0) { $wgOut->addHTML("<li>" . $skin->makeKnownLinkObj($title) . " " . wfMsgExt('import-revision-count', array('parsemag', 'escape'), $localCount) . "</li>\n"); $log = new LogPage('import'); if ($this->mIsUpload) { $detail = wfMsgExt('import-logentry-upload-detail', array('content', 'parsemag'), $contentCount); if ($this->reason) { $detail .= wfMsgForContent('colon-separator') . $this->reason; } $log->addEntry('upload', $title, $detail); } else { $interwiki = '[[:' . $this->mInterwiki . ':' . $origTitle->getPrefixedText() . ']]'; $detail = wfMsgExt('import-logentry-interwiki-detail', array('content', 'parsemag'), $contentCount, $interwiki); if ($this->reason) { $detail .= wfMsgForContent('colon-separator') . $this->reason; } $log->addEntry('interwiki', $title, $detail); } $comment = $detail; // quick $dbw = wfGetDB(DB_MASTER); $latest = $title->getLatestRevID(); $nullRevision = Revision::newNullRevision($dbw, $title->getArticleId(), $comment, true); $nullRevision->insertOn($dbw); $article = new Article($title); # Update page record $article->updateRevisionOn($dbw, $nullRevision); wfRunHooks('NewRevisionFromEditComplete', array($article, $nullRevision, $latest, $wgUser)); } else { $wgOut->addHTML('<li>' . wfMsgHtml('import-nonewrevisions') . '</li>'); } }
protected function _createMainPageForWiki() { $db = $this->getDatabase(); $titleobj = Title::newFromText(wfMessage('mainpage')->inContentLanguage()->useDatabase(false)->plain()); $article = new Article($titleobj); $newid = $article->insertOn($db); $revision = new Revision(array('page' => $newid, 'text' => wfMsgForContent('farmernewwikimainpage'), 'comment' => '', 'user' => 0, 'user_text' => 'MediaWiki default')); $revid = $revision->insertOn($db); $article->updateRevisionOn($db, $revision); // site_stats table entry $db->insert('site_stats', array('ss_row_id' => 1, 'ss_total_views' => 0, 'ss_total_edits' => 0, 'ss_good_articles' => 0)); }
/** * Record a file upload in the upload log and the image table */ function recordUpload2($oldver, $comment, $pageText, $props = false, $timestamp = false) { global $wgUser; $dbw = $this->repo->getMasterDB(); if (!$props) { $props = $this->repo->getFileProps($this->getVirtualUrl()); } $props['description'] = $comment; $props['user'] = $wgUser->getID(); $props['user_text'] = $wgUser->getName(); $props['timestamp'] = wfTimestamp(TS_MW); $this->setProps($props); // Delete thumbnails and refresh the metadata cache $this->purgeThumbnails(); $this->saveToCache(); wfPurgeSquidServers(array($this->getURL())); // Fail now if the file isn't there if (!$this->fileExists) { wfDebug(__METHOD__ . ": File " . $this->getPath() . " went missing!\n"); return false; } $reupload = false; if ($timestamp === false) { $timestamp = $dbw->timestamp(); } # Test to see if the row exists using INSERT IGNORE # This avoids race conditions by locking the row until the commit, and also # doesn't deadlock. SELECT FOR UPDATE causes a deadlock for every race condition. $dbw->insert('image', array('img_name' => $this->getName(), 'img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->media_type, 'img_major_mime' => $this->major_mime, 'img_minor_mime' => $this->minor_mime, 'img_timestamp' => $timestamp, 'img_description' => $comment, 'img_user' => $wgUser->getID(), 'img_user_text' => $wgUser->getName(), 'img_metadata' => $this->metadata, 'img_sha1' => $this->sha1), __METHOD__, 'IGNORE'); if ($dbw->affectedRows() == 0) { $reupload = true; # Collision, this is an update of a file # Insert previous contents into oldimage $dbw->insertSelect('oldimage', 'image', array('oi_name' => 'img_name', 'oi_archive_name' => $dbw->addQuotes($oldver), 'oi_size' => 'img_size', 'oi_width' => 'img_width', 'oi_height' => 'img_height', 'oi_bits' => 'img_bits', 'oi_timestamp' => 'img_timestamp', 'oi_description' => 'img_description', 'oi_user' => 'img_user', 'oi_user_text' => 'img_user_text', 'oi_metadata' => 'img_metadata', 'oi_media_type' => 'img_media_type', 'oi_major_mime' => 'img_major_mime', 'oi_minor_mime' => 'img_minor_mime', 'oi_sha1' => 'img_sha1'), array('img_name' => $this->getName()), __METHOD__); # Update the current image row $dbw->update('image', array('img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->media_type, 'img_major_mime' => $this->major_mime, 'img_minor_mime' => $this->minor_mime, 'img_timestamp' => $timestamp, 'img_description' => $comment, 'img_user' => $wgUser->getID(), 'img_user_text' => $wgUser->getName(), 'img_metadata' => $this->metadata, 'img_sha1' => $this->sha1), array('img_name' => $this->getName()), __METHOD__); } else { # This is a new file # Update the image count $site_stats = $dbw->tableName('site_stats'); $dbw->query("UPDATE {$site_stats} SET ss_images=ss_images+1", __METHOD__); } $descTitle = $this->getTitle(); $article = new Article($descTitle); # Add the log entry $log = new LogPage('upload'); $action = $reupload ? 'overwrite' : 'upload'; $log->addEntry($action, $descTitle, $comment); if ($descTitle->exists()) { # Create a null revision $nullRevision = Revision::newNullRevision($dbw, $descTitle->getArticleId(), $log->getRcComment(), false); $nullRevision->insertOn($dbw); $article->updateRevisionOn($dbw, $nullRevision); # Invalidate the cache for the description page $descTitle->invalidateCache(); $descTitle->purgeSquid(); } else { // New file; create the description page. // There's already a log entry, so don't make a second RC entry $article->doEdit($pageText, $comment, EDIT_NEW | EDIT_SUPPRESS_RC); } # Hooks, hooks, the magic of hooks... wfRunHooks('FileUpload', array($this)); # Commit the transaction now, in case something goes wrong later # The most important thing is that files don't get lost, especially archives $dbw->immediateCommit(); # Invalidate cache for all pages using this file $update = new HTMLCacheUpdate($this->getTitle(), 'imagelinks'); $update->doUpdate(); return true; }
/** * Move page to non-existing title. * @param &$nt \type{Title} the new Title * @param $reason \type{\string} The reason for the move * @param $createRedirect \type{\bool} Whether to create a redirect from the old title to the new title * Ignored if the user doesn't have the suppressredirect right */ private function moveToNewTitle(&$nt, $reason = '', $createRedirect = true) { global $wgUseSquid, $wgUser; $fname = 'MovePageForm::moveToNewTitle'; $comment = wfMsgForContent('1movedto2', $this->getPrefixedText(), $nt->getPrefixedText()); if ($reason) { $comment .= wfMsgExt('colon-separator', array('escapenoentities', 'content')); $comment .= $reason; } $newid = $nt->getArticleID(); $oldid = $this->getArticleID(); $latest = $this->getLatestRevId(); $dbw = wfGetDB(DB_MASTER); $now = $dbw->timestamp(); # Save a null revision in the page's history notifying of the move $nullRevision = Revision::newNullRevision($dbw, $oldid, $comment, true); $nullRevId = $nullRevision->insertOn($dbw); $article = new Article($this); wfRunHooks('NewRevisionFromEditComplete', array($article, $nullRevision, $latest, $wgUser)); # Rename page entry $dbw->update('page', array('page_touched' => $now, 'page_namespace' => $nt->getNamespace(), 'page_title' => $nt->getDBkey(), 'page_latest' => $nullRevId), array('page_id' => $oldid), $fname); $nt->resetArticleID($oldid); if ($createRedirect || !$wgUser->isAllowed('suppressredirect')) { # Insert redirect $mwRedir = MagicWord::get('redirect'); $redirectText = $mwRedir->getSynonym(0) . ' [[' . $nt->getPrefixedText() . "]]\n"; $redirectArticle = new Article($this); $newid = $redirectArticle->insertOn($dbw); $redirectRevision = new Revision(array('page' => $newid, 'comment' => $comment, 'text' => $redirectText)); $redirectRevision->insertOn($dbw); $redirectArticle->updateRevisionOn($dbw, $redirectRevision, 0); wfRunHooks('NewRevisionFromEditComplete', array($redirectArticle, $redirectRevision, false, $wgUser)); # Record the just-created redirect's linking to the page $dbw->insert('pagelinks', array('pl_from' => $newid, 'pl_namespace' => $nt->getNamespace(), 'pl_title' => $nt->getDBkey()), $fname); $redirectSuppressed = false; } else { $this->resetArticleID(0); $redirectSuppressed = true; } # Move an image if this is a file if ($this->getNamespace() == NS_FILE) { $file = wfLocalFile($this); if ($file->exists()) { $status = $file->move($nt); if (!$status->isOk()) { $dbw->rollback(); return $status->getErrorsArray(); } } } # Log the move $log = new LogPage('move'); $log->addEntry('move', $this, $reason, array(1 => $nt->getPrefixedText(), 2 => $redirectSuppressed)); # Purge caches as per article creation Article::onArticleCreate($nt); # Purge old title from squid # The new title, and links to the new title, are purged in Article::onArticleCreate() $this->purgeSquid(); }
/** * Record a file upload in the upload log and the image table */ private function recordDownload($comment = '', $timestamp = false) { global $wgUser; $dbw = $this->repo->getMasterDB(); if ($timestamp === false) { $timestamp = $dbw->timestamp(); } list($major, $minor) = self::splitMime($this->mime); # Test to see if the row exists using INSERT IGNORE # This avoids race conditions by locking the row until the commit, and also # doesn't deadlock. SELECT FOR UPDATE causes a deadlock for every race condition. $dbw->insert('ic_image', array('img_name' => $this->getName(), 'img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->type, 'img_major_mime' => $major, 'img_minor_mime' => $minor, 'img_timestamp' => $timestamp, 'img_description' => $comment, 'img_user' => $wgUser->getID(), 'img_user_text' => $wgUser->getName(), 'img_metadata' => $this->metadata), __METHOD__, 'IGNORE'); if ($dbw->affectedRows() == 0) { # Collision, this is an update of a file # Update the current image row $dbw->update('ic_image', array('img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->media_type, 'img_major_mime' => $this->major_mime, 'img_minor_mime' => $this->minor_mime, 'img_timestamp' => $timestamp, 'img_description' => $comment, 'img_user' => $wgUser->getID(), 'img_user_text' => $wgUser->getName(), 'img_metadata' => $this->metadata), array('img_name' => $this->getName()), __METHOD__); } else { # This is a new file # Update the image count $site_stats = $dbw->tableName('site_stats'); $dbw->query("UPDATE {$site_stats} SET ss_images=ss_images+1", __METHOD__); } $descTitle = $this->getTitle(); $article = new Article($descTitle); # Add the log entry $log = new LogPage('icdownload'); $log->addEntry('InstantCommons download', $descTitle, $comment); if ($descTitle->exists()) { # Create a null revision $nullRevision = Revision::newNullRevision($dbw, $descTitle->getArticleId(), $log->getRcComment(), false); $nullRevision->insertOn($dbw); $article->updateRevisionOn($dbw, $nullRevision); # Invalidate the cache for the description page $descTitle->invalidateCache(); $descTitle->purgeSquid(); } # Commit the transaction now, in case something goes wrong later # The most important thing is that files don't get lost, especially archives $dbw->immediateCommit(); # Invalidate cache for all pages using this file $update = new HTMLCacheUpdate($this->getTitle(), 'imagelinks'); $update->doUpdate(); return true; }
echo "Looking for pages with page_latest set to 0...\n"; $dbw =& wfGetDB(DB_MASTER); $result = $dbw->select('page', array('page_id', 'page_namespace', 'page_title'), array('page_latest' => 0), $fname); $n = 0; while ($row = $dbw->fetchObject($result)) { $pageId = intval($row->page_id); $title = Title::makeTitle($row->page_namespace, $row->page_title); $name = $title->getPrefixedText(); $latestTime = $dbw->selectField('revision', 'MAX(rev_timestamp)', array('rev_page' => $pageId), $fname); if (!$latestTime) { echo "{$wgDBname} {$pageId} [[{$name}]] can't find latest rev time?!\n"; continue; } $revision = Revision::loadFromTimestamp($dbw, $title, $latestTime); if (is_null($revision)) { echo "{$wgDBname} {$pageId} [[{$name}]] latest time {$latestTime}, can't find revision id\n"; continue; } $id = $revision->getId(); echo "{$wgDBname} {$pageId} [[{$name}]] latest time {$latestTime}, rev id {$id}\n"; if ($fixit) { $article = new Article($title); $article->updateRevisionOn($dbw, $revision); } $n++; } $dbw->freeResult($result); echo "Done! Processed {$n} pages.\n"; if (!$fixit) { echo "This was a dry run; rerun with --fix to update page_latest.\n"; }
function checkSeparation($fix) { $dbw = wfGetDB(DB_MASTER); $page = $dbw->tableName('page'); $revision = $dbw->tableName('revision'); $text = $dbw->tableName('text'); if ($fix) { $dbw->query("LOCK TABLES {$page} WRITE, {$revision} WRITE, {$text} WRITE"); } echo "\nChecking for pages whose page_latest links are incorrect... (this may take a while on a large wiki)\n"; $result = $dbw->query("\n\t\tSELECT *\n\t\tFROM {$page} LEFT OUTER JOIN {$revision} ON page_latest=rev_id\n\t"); $found = 0; while ($row = $dbw->fetchObject($result)) { $result2 = $dbw->query("\n\t\t\tSELECT MAX(rev_timestamp) as max_timestamp\n\t\t\tFROM {$revision}\n\t\t\tWHERE rev_page={$row->page_id}\n\t\t"); $row2 = $dbw->fetchObject($result2); $dbw->freeResult($result2); if ($row2) { if ($row->rev_timestamp != $row2->max_timestamp) { if ($found == 0) { printf("%10s %10s %14s %14s\n", 'page_id', 'rev_id', 'timestamp', 'max timestamp'); } ++$found; printf("%10d %10d %14s %14s\n", $row->page_id, $row->page_latest, $row->rev_timestamp, $row2->max_timestamp); if ($fix) { # ... $maxId = $dbw->selectField('revision', 'rev_id', array('rev_page' => $row->page_id, 'rev_timestamp' => $row2->max_timestamp)); echo "... updating to revision {$maxId}\n"; $maxRev = Revision::newFromId($maxId); $title = Title::makeTitle($row->page_namespace, $row->page_title); $article = new Article($title); $article->updateRevisionOn($dbw, $maxRev); } } } else { echo "wtf\n"; } } if ($found) { echo "Found {$found} pages with incorrect latest revision.\n"; } else { echo "No pages with incorrect latest revision. Yay!\n"; } if (!$fix && $found > 0) { echo "Run again with --fix to remove these entries automatically.\n"; } if ($fix) { $dbw->query("UNLOCK TABLES"); } }
/** * Duplicate one page to another, including full histories * Does some basic error-catching, but not as much as the code above [should] * * @param $source Title to duplicate * @param $dest Title to save to * @return bool */ private function duplicate(&$source, &$dest) { global $wgUser, $wgBot; if (!$source->exists() || $dest->exists()) { return false; } # Source doesn't exist, or destination does $dbw = wfGetDB(DB_MASTER); $dbw->begin(); $sid = $source->getArticleId(); # Create an article representing the destination page and save it $destArticle = new Article($dest); $aid = $destArticle->insertOn($dbw); # Perform the revision duplication # An INSERT...SELECT here seems to f**k things up $res = $dbw->select('revision', '*', array('rev_page' => $sid), __METHOD__); if ($res && $dbw->numRows($res) > 0) { while ($row = $dbw->fetchObject($res)) { $values['rev_page'] = $aid; $values['rev_text_id'] = $row->rev_text_id; $values['rev_comment'] = $row->rev_comment; $values['rev_user'] = $row->rev_user; $values['rev_user_text'] = $row->rev_user_text; $values['rev_timestamp'] = $row->rev_timestamp; $values['rev_minor_edit'] = $row->rev_minor_edit; $values['rev_deleted'] = $row->rev_deleted; $dbw->insert('revision', $values, __METHOD__); } $dbw->freeResult($res); } # Update page record $latest = $dbw->selectField('revision', 'MAX(rev_id)', array('rev_page' => $aid), __METHOD__); $rev = Revision::newFromId($latest); $destArticle->updateRevisionOn($dbw, $rev); # Commit transaction $dbw->commit(); # Create a null revision with an explanation; do cache clearances, etc. $dbw->begin(); $comment = wfMsgForContent('duplicator-summary', $source->getPrefixedText()); $nr = Revision::newNullRevision($dbw, $aid, $comment, true); $nid = $nr->insertOn($dbw); $destArticle->updateRevisionOn($dbw, $nr); $destArticle->createUpdates($nr); Article::onArticleCreate($dest); $bot = $wgUser->isAllowed('bot'); RecentChange::notifyNew($nr->getTimestamp(), $dest, true, $wgUser, $comment, $bot); $dest->invalidateCache(); $dbw->commit(); return true; }
/** * Create a redirect; fails if the title already exists; does * not notify RC * * @param Title $dest the destination of the redirect * @param string $comment the comment string describing the move * @return bool true on success * @access public */ function createRedirect($dest, $comment) { if ($this->getArticleID()) { return false; } $fname = 'Title::createRedirect'; $dbw =& wfGetDB(DB_MASTER); $article = new Article($this); $newid = $article->insertOn($dbw); $revision = new Revision(array('page' => $newid, 'comment' => $comment, 'text' => "#REDIRECT [[" . $dest->getPrefixedText() . "]]\n")); $revisionId = $revision->insertOn($dbw); $article->updateRevisionOn($dbw, $revision, 0); # Link table $dbw->insert('pagelinks', array('pl_from' => $newid, 'pl_namespace' => $dest->getNamespace(), 'pl_title' => $dest->getDbKey()), $fname); Article::onArticleCreate($this); return true; }
/** * This is the meaty bit -- restores archived revisions of the given page * to the cur/old tables. If the page currently exists, all revisions will * be stuffed into old, otherwise the most recent will go into cur. * The deletion log will be updated with an undeletion notice. * * Returns true on success. * * @param array $timestamps Pass an empty array to restore all revisions, otherwise list the ones to undelete. * @return bool */ function undelete($timestamps) { global $wgUser, $wgOut, $wgLang, $wgDeferredUpdateList; global $wgUseSquid, $wgInternalServer, $wgLinkCache; global $wgDBtype; $fname = "doUndeleteArticle"; $restoreAll = empty($timestamps); $restoreRevisions = count($timestamps); $dbw =& wfGetDB(DB_MASTER); extract($dbw->tableNames('page', 'archive')); # Does this page already exist? We'll have to update it... $article = new Article($this->title); $options = $wgDBtype == 'PostgreSQL' ? '' : 'FOR UPDATE'; $page = $dbw->selectRow('page', array('page_id', 'page_latest'), array('page_namespace' => $this->title->getNamespace(), 'page_title' => $this->title->getDBkey()), $fname, $options); if ($page) { # Page already exists. Import the history, and if necessary # we'll update the latest revision field in the record. $newid = 0; $pageId = $page->page_id; $previousRevId = $page->page_latest; $previousTimestamp = $page->rev_timestamp; } else { # Have to create a new article... $newid = $article->insertOn($dbw); $pageId = $newid; $previousRevId = 0; $previousTimestamp = 0; } if ($restoreAll) { $oldones = '1'; # All revisions... } else { $oldts = implode(',', array_map(array(&$dbw, 'addQuotes'), array_map(array(&$dbw, 'timestamp'), $timestamps))); $oldones = "ar_timestamp IN ( {$oldts} )"; } /** * Restore each revision... */ $result = $dbw->select('archive', array('ar_rev_id', 'ar_text', 'ar_comment', 'ar_user', 'ar_user_text', 'ar_timestamp', 'ar_minor_edit', 'ar_flags', 'ar_text_id'), array('ar_namespace' => $this->title->getNamespace(), 'ar_title' => $this->title->getDBkey(), $oldones), $fname, array('ORDER BY' => 'ar_timestamp')); $revision = null; while ($row = $dbw->fetchObject($result)) { $revision = new Revision(array('page' => $pageId, 'id' => $row->ar_rev_id, 'text' => Revision::getRevisionText($row, 'ar_'), 'comment' => $row->ar_comment, 'user' => $row->ar_user, 'user_text' => $row->ar_user_text, 'timestamp' => $row->ar_timestamp, 'minor_edit' => $row->ar_minor_edit, 'text_id' => $row->ar_text_id)); $revision->insertOn($dbw); } if ($revision) { # FIXME: Update latest if newer as well... if ($newid) { # FIXME: update article count if changed... $article->updateRevisionOn($dbw, $revision, $previousRevId); # Finally, clean up the link tables $wgLinkCache = new LinkCache(); # Select for update $wgLinkCache->forUpdate(true); # Create a dummy OutputPage to update the outgoing links $dummyOut = new OutputPage(); $dummyOut->addWikiText($revision->getText()); $u = new LinksUpdate($newid, $this->title->getPrefixedDBkey()); array_push($wgDeferredUpdateList, $u); #TODO: SearchUpdate, etc. } if ($newid) { Article::onArticleCreate($this->title); } else { Article::onArticleEdit($this->title); } } else { # Something went terribly worong! } # Now that it's safely stored, take it out of the archive $dbw->delete('archive', array('ar_namespace' => $this->title->getNamespace(), 'ar_title' => $this->title->getDBkey(), $oldones), $fname); # Touch the log! $log = new LogPage('delete'); if ($restoreAll) { $reason = ''; } else { $reason = wfMsgForContent('undeletedrevisions', $restoreRevisions); } $log->addEntry('restore', $this->title, $reason); return true; }
/** * Move page to non-existing title. * @param Title &$nt the new Title */ private function moveToNewTitle(&$nt, $reason = '') { global $wgUseSquid; $fname = 'MovePageForm::moveToNewTitle'; $comment = wfMsgForContent('1movedto2', $this->getPrefixedText(), $nt->getPrefixedText()); if ($reason) { $comment .= ": {$reason}"; } $newid = $nt->getArticleID(); $oldid = $this->getArticleID(); $dbw = wfGetDB(DB_MASTER); $now = $dbw->timestamp(); $linkCache =& LinkCache::singleton(); # Save a null revision in the page's history notifying of the move $nullRevision = Revision::newNullRevision($dbw, $oldid, $comment, true); $nullRevId = $nullRevision->insertOn($dbw); # Rename cur entry $dbw->update('page', array('page_touched' => $now, 'page_namespace' => $nt->getNamespace(), 'page_title' => $nt->getDBkey(), 'page_latest' => $nullRevId), array('page_id' => $oldid), $fname); $linkCache->clearLink($nt->getPrefixedDBkey()); # Insert redirect $mwRedir = MagicWord::get('redirect'); $redirectText = $mwRedir->getSynonym(0) . ' [[' . $nt->getPrefixedText() . "]]\n"; $redirectArticle = new Article($this); $newid = $redirectArticle->insertOn($dbw); $redirectRevision = new Revision(array('page' => $newid, 'comment' => $comment, 'text' => $redirectText)); $redirectRevision->insertOn($dbw); $redirectArticle->updateRevisionOn($dbw, $redirectRevision, 0); $linkCache->clearLink($this->getPrefixedDBkey()); # Log the move $log = new LogPage('move'); $log->addEntry('move', $this, $reason, array(1 => $nt->getPrefixedText())); # Purge caches as per article creation Article::onArticleCreate($nt); # Record the just-created redirect's linking to the page $dbw->insert('pagelinks', array('pl_from' => $newid, 'pl_namespace' => $nt->getNamespace(), 'pl_title' => $nt->getDBkey()), $fname); # Purge old title from squid # The new title, and links to the new title, are purged in Article::onArticleCreate() $this->purgeSquid(); }
$u->addGroup("sysop"); $u->addGroup("bureaucrat"); print "<li>Created sysop account <tt>" . htmlspecialchars($conf->SysopName) . "</tt>.</li>\n"; } else { print "<li>Could not create user - already exists!</li>\n"; } } } else { print "<li>Skipped sysop account creation, no name given.</li>\n"; } $titleobj = Title::newFromText(wfMsgNoDB("mainpage")); $article = new Article($titleobj); $newid = $article->insertOn($wgDatabase); $revision = new Revision(array('page' => $newid, 'text' => wfMsg('mainpagetext') . "\n\n" . wfMsgNoTrans('mainpagedocfooter'), 'comment' => '', 'user' => 0, 'user_text' => 'MediaWiki default')); $revid = $revision->insertOn($wgDatabase); $article->updateRevisionOn($wgDatabase, $revision); } /* Write out the config file now that all is well */ print "<li style=\"list-style: none\">\n"; print "<p>Creating LocalSettings.php...</p>\n\n"; $localSettings = "<" . "?php{$endl}{$local}"; // Fix up a common line-ending problem (due to CVS on Windows) $localSettings = str_replace("\r\n", "\n", $localSettings); $f = fopen("LocalSettings.php", 'xt'); if ($f == false) { print "</li>\n"; dieout("<p>Couldn't write out LocalSettings.php. Check that the directory permissions are correct and that there isn't already a file of that name here...</p>\n" . "<p>Here's the file that would have been written, try to paste it into place manually:</p>\n" . "<pre>\n" . htmlspecialchars($localSettings) . "</pre>\n"); } if (fwrite($f, $localSettings)) { fclose($f); print "<hr/>\n";
/** * Move page to non-existing title. * * @param $nt \type{Title} the new Title * @param $reason \type{\string} The reason for the move * @param $createRedirect \type{\bool} Whether to create a redirect from the old title to the new title * Ignored if the user doesn't have the suppressredirect right */ private function moveToNewTitle(&$nt, $reason = '', $createRedirect = true) { global $wgUser, $wgContLang; $comment = wfMsgForContent('1movedto2', $this->getPrefixedText(), $nt->getPrefixedText()); if ($reason) { $comment .= wfMsgExt('colon-separator', array('escapenoentities', 'content')); $comment .= $reason; } # Truncate for whole multibyte characters. +5 bytes for ellipsis $comment = $wgContLang->truncate($comment, 250); $oldid = $this->getArticleID(); $latest = $this->getLatestRevId(); $dbw = wfGetDB(DB_MASTER); $now = $dbw->timestamp(); # Save a null revision in the page's history notifying of the move $nullRevision = Revision::newNullRevision($dbw, $oldid, $comment, true); if (!is_object($nullRevision)) { throw new MWException('No valid null revision produced in ' . __METHOD__); } $nullRevId = $nullRevision->insertOn($dbw); $article = new Article($this); wfRunHooks('NewRevisionFromEditComplete', array($article, $nullRevision, $latest, $wgUser)); # Rename page entry $dbw->update('page', array('page_touched' => $now, 'page_namespace' => $nt->getNamespace(), 'page_title' => $nt->getDBkey(), 'page_latest' => $nullRevId), array('page_id' => $oldid), __METHOD__); $nt->resetArticleID($oldid); if ($createRedirect || !$wgUser->isAllowed('suppressredirect')) { # Insert redirect $mwRedir = MagicWord::get('redirect'); $redirectText = $mwRedir->getSynonym(0) . ' [[' . $nt->getPrefixedText() . "]]\n"; $redirectArticle = new Article($this); $newid = $redirectArticle->insertOn($dbw); $redirectRevision = new Revision(array('page' => $newid, 'comment' => $comment, 'text' => $redirectText)); $redirectRevision->insertOn($dbw); $redirectArticle->updateRevisionOn($dbw, $redirectRevision, 0); wfRunHooks('NewRevisionFromEditComplete', array($redirectArticle, $redirectRevision, false, $wgUser)); # Record the just-created redirect's linking to the page $dbw->insert('pagelinks', array('pl_from' => $newid, 'pl_namespace' => $nt->getNamespace(), 'pl_title' => $nt->getDBkey()), __METHOD__); $redirectSuppressed = false; } else { $this->resetArticleID(0); $redirectSuppressed = true; } # Log the move $log = new LogPage('move'); $log->addEntry('move', $this, $reason, array(1 => $nt->getPrefixedText(), 2 => $redirectSuppressed)); # Purge caches as per article creation Article::onArticleCreate($nt); # Purge old title from squid # The new title, and links to the new title, are purged in Article::onArticleCreate() $this->purgeSquid(); }