/** * Record a file upload in the upload log and the image table */ function recordUpload2($oldver, $comment, $pageText, $props = false, $timestamp = false, $user = null) { if (is_null($user)) { global $wgUser; $user = $wgUser; } $dbw = $this->repo->getMasterDB(); $dbw->begin(); if (!$props) { $props = $this->repo->getFileProps($this->getVirtualUrl()); } $props['description'] = $comment; $props['user'] = $user->getId(); $props['user_text'] = $user->getName(); $props['timestamp'] = wfTimestamp(TS_MW); $this->setProps($props); // Delete thumbnails and refresh the metadata cache $this->purgeThumbnails(); $this->saveToCache(); SquidUpdate::purge(array($this->getURL())); /* Wikia change begin - @author: Marooned, see RT#44185 */ global $wgLogo; if ($this->url == $wgLogo) { SquidUpdate::purge(array($this->url)); } /* Wikia change end */ // Fail now if the file isn't there if (!$this->fileExists) { wfDebug(__METHOD__ . ": File " . $this->getPath() . " went missing!\n"); return false; } $reupload = false; if ($timestamp === false) { $timestamp = $dbw->timestamp(); } # Test to see if the row exists using INSERT IGNORE # This avoids race conditions by locking the row until the commit, and also # doesn't deadlock. SELECT FOR UPDATE causes a deadlock for every race condition. $dbw->insert('image', array('img_name' => $this->getName(), 'img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->media_type, 'img_major_mime' => $this->major_mime, 'img_minor_mime' => $this->minor_mime, 'img_timestamp' => $timestamp, 'img_description' => $comment, 'img_user' => $user->getId(), 'img_user_text' => $user->getName(), 'img_metadata' => $this->metadata, 'img_sha1' => $this->sha1), __METHOD__, 'IGNORE'); if ($dbw->affectedRows() == 0) { $reupload = true; # Collision, this is an update of a file # Insert previous contents into oldimage $dbw->insertSelect('oldimage', 'image', array('oi_name' => 'img_name', 'oi_archive_name' => $dbw->addQuotes($oldver), 'oi_size' => 'img_size', 'oi_width' => 'img_width', 'oi_height' => 'img_height', 'oi_bits' => 'img_bits', 'oi_timestamp' => 'img_timestamp', 'oi_description' => 'img_description', 'oi_user' => 'img_user', 'oi_user_text' => 'img_user_text', 'oi_metadata' => 'img_metadata', 'oi_media_type' => 'img_media_type', 'oi_major_mime' => 'img_major_mime', 'oi_minor_mime' => 'img_minor_mime', 'oi_sha1' => 'img_sha1'), array('img_name' => $this->getName()), __METHOD__); # Update the current image row $dbw->update('image', array('img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->media_type, 'img_major_mime' => $this->major_mime, 'img_minor_mime' => $this->minor_mime, 'img_timestamp' => $timestamp, 'img_description' => $comment, 'img_user' => $user->getId(), 'img_user_text' => $user->getName(), 'img_metadata' => $this->metadata, 'img_sha1' => $this->sha1), array('img_name' => $this->getName()), __METHOD__); } else { # This is a new file # Update the image count $site_stats = $dbw->tableName('site_stats'); $dbw->query("UPDATE {$site_stats} SET ss_images=ss_images+1", __METHOD__); } # Commit the transaction now, in case something goes wrong later # The most important thing is that files don't get lost, especially archives $dbw->commit(); # Invalidate cache for all pages using this file $update = new HTMLCacheUpdate($this->getTitle(), 'imagelinks'); $update->doUpdate(); return true; }
/** * Callback function to update what-links-here * @return bool must return true or other hooks don't get called */ function updateSourceWLH(&$article, &$user, &$text, &$summary, $minor, $dummy1, $dummy2, &$flags) { $ns = $article->getTitle()->getNamespace(); if ($ns == NS_SOURCE) { // update people and families that link to this source, because source citation could have changed $u = new HTMLCacheUpdate($article->getTitle(), 'pagelinks'); $u->doUpdate(); } return true; }
exit(1); } if ($revision) { // Attach the latest revision to the page... $wasnew = $article->updateIfNewerOn($dbw, $revision, $previousRevId); if ($newid || $wasnew) { // Update site stats, link tables, etc $article->createUpdates($revision); } if ($newid) { Article::onArticleCreate($page); } else { Article::onArticleEdit($page); } if ($page->getNamespace() == NS_IMAGE) { $update = new HTMLCacheUpdate($page, 'imagelinks'); $update->doUpdate(); } } else { // Revision couldn't be created. This is very weird print "We got an unknown error\n"; exit(1); } # Now that it's safely stored, take it out of the archive $dbw->delete('archive', array('ar_namespace' => $page->getNamespace(), 'ar_title' => $page->getDBkey(), $oldones), __METHOD__); print $page->getPrefixedText(); if ($restored) { print "\n"; } else { print " FAILED\n"; }
/** * Invalidate any necessary link lists related to page property changes * @param array $changed */ private function invalidateProperties($changed) { global $wgPagePropLinkInvalidations; foreach ($changed as $name => $value) { if (isset($wgPagePropLinkInvalidations[$name])) { $inv = $wgPagePropLinkInvalidations[$name]; if (!is_array($inv)) { $inv = array($inv); } foreach ($inv as $table) { $update = new HTMLCacheUpdate($this->mTitle, $table); $update->doUpdate(); } } } }
/** * Update page_touched timestamps and send squid purge messages for * pages linking to this title. May be sent to the job queue depending * on the number of links. Typically called on create and delete. */ public function touchLinks() { $u = new HTMLCacheUpdate($this, 'pagelinks'); $u->doUpdate(); if ($this->getNamespace() == NS_CATEGORY) { $u = new HTMLCacheUpdate($this, 'categorylinks'); $u->doUpdate(); } }
function touch(&$title) { wfProfileIn(__METHOD__); $update = new HTMLCacheUpdate($title, 'imagelinks'); $update->doUpdate(); wfProfileOut(__METHOD__); }
/** * Override handling of action=purge */ function doPurge() { $this->img = new Image($this->mTitle); if ($this->img->exists()) { wfDebug("ImagePage::doPurge purging " . $this->img->getName() . "\n"); $update = new HTMLCacheUpdate($this->mTitle, 'imagelinks'); $update->doUpdate(); $this->img->purgeCache(); } else { wfDebug("ImagePage::doPurge no image\n"); } parent::doPurge(); }
/** * This is the meaty bit -- restores archived revisions of the given page * to the cur/old tables. If the page currently exists, all revisions will * be stuffed into old, otherwise the most recent will go into cur. * * @param array $timestamps Pass an empty array to restore all revisions, * otherwise list the ones to undelete. * @param bool $unsuppress Remove all ar_deleted/fa_deleted restrictions of seletected revs * @param string $comment * @throws ReadOnlyError * @return Status Status object containing the number of revisions restored on success */ private function undeleteRevisions($timestamps, $unsuppress = false, $comment = '') { if (wfReadOnly()) { throw new ReadOnlyError(); } $restoreAll = empty($timestamps); $dbw = wfGetDB(DB_MASTER); # Does this page already exist? We'll have to update it... $article = WikiPage::factory($this->title); # Load latest data for the current page (bug 31179) $article->loadPageData('fromdbmaster'); $oldcountable = $article->isCountable(); $page = $dbw->selectRow('page', array('page_id', 'page_latest'), array('page_namespace' => $this->title->getNamespace(), 'page_title' => $this->title->getDBkey()), __METHOD__, array('FOR UPDATE')); if ($page) { $makepage = false; # Page already exists. Import the history, and if necessary # we'll update the latest revision field in the record. $previousRevId = $page->page_latest; # Get the time span of this page $previousTimestamp = $dbw->selectField('revision', 'rev_timestamp', array('rev_id' => $previousRevId), __METHOD__); if ($previousTimestamp === false) { wfDebug(__METHOD__ . ": existing page refers to a page_latest that does not exist\n"); $status = Status::newGood(0); $status->warning('undeleterevision-missing'); return $status; } } else { # Have to create a new article... $makepage = true; $previousRevId = 0; $previousTimestamp = 0; } $oldWhere = array('ar_namespace' => $this->title->getNamespace(), 'ar_title' => $this->title->getDBkey()); if (!$restoreAll) { $oldWhere['ar_timestamp'] = array_map(array(&$dbw, 'timestamp'), $timestamps); } $fields = array('ar_rev_id', 'ar_text', 'ar_comment', 'ar_user', 'ar_user_text', 'ar_timestamp', 'ar_minor_edit', 'ar_flags', 'ar_text_id', 'ar_deleted', 'ar_page_id', 'ar_len', 'ar_sha1'); if ($this->config->get('ContentHandlerUseDB')) { $fields[] = 'ar_content_format'; $fields[] = 'ar_content_model'; } /** * Select each archived revision... */ $result = $dbw->select('archive', $fields, $oldWhere, __METHOD__, array('ORDER BY' => 'ar_timestamp')); $rev_count = $result->numRows(); if (!$rev_count) { wfDebug(__METHOD__ . ": no revisions to restore\n"); $status = Status::newGood(0); $status->warning("undelete-no-results"); return $status; } $result->seek($rev_count - 1); // move to last $row = $result->fetchObject(); // get newest archived rev $oldPageId = (int) $row->ar_page_id; // pass this to ArticleUndelete hook $result->seek(0); // move back // grab the content to check consistency with global state before restoring the page. $revision = Revision::newFromArchiveRow($row, array('title' => $article->getTitle())); $user = User::newFromName($revision->getUserText(Revision::RAW), false); $content = $revision->getContent(Revision::RAW); // NOTE: article ID may not be known yet. prepareSave() should not modify the database. $status = $content->prepareSave($article, 0, -1, $user); if (!$status->isOK()) { return $status; } if ($makepage) { // Check the state of the newest to-be version... if (!$unsuppress && $row->ar_deleted & Revision::DELETED_TEXT) { return Status::newFatal("undeleterevdel"); } // Safe to insert now... $newid = $article->insertOn($dbw); $pageId = $newid; } else { // Check if a deleted revision will become the current revision... if ($row->ar_timestamp > $previousTimestamp) { // Check the state of the newest to-be version... if (!$unsuppress && $row->ar_deleted & Revision::DELETED_TEXT) { return Status::newFatal("undeleterevdel"); } } $newid = false; $pageId = $article->getId(); } $revision = null; $restored = 0; foreach ($result as $row) { // Check for key dupes due to shitty archive integrity. if ($row->ar_rev_id) { $exists = $dbw->selectField('revision', '1', array('rev_id' => $row->ar_rev_id), __METHOD__); if ($exists) { continue; // don't throw DB errors } } // Insert one revision at a time...maintaining deletion status // unless we are specifically removing all restrictions... $revision = Revision::newFromArchiveRow($row, array('page' => $pageId, 'title' => $this->title, 'deleted' => $unsuppress ? 0 : $row->ar_deleted)); $revision->insertOn($dbw); $restored++; Hooks::run('ArticleRevisionUndeleted', array(&$this->title, $revision, $row->ar_page_id)); } # Now that it's safely stored, take it out of the archive $dbw->delete('archive', $oldWhere, __METHOD__); // Was anything restored at all? if ($restored == 0) { return Status::newGood(0); } $created = (bool) $newid; // Attach the latest revision to the page... $wasnew = $article->updateIfNewerOn($dbw, $revision, $previousRevId); if ($created || $wasnew) { // Update site stats, link tables, etc $user = User::newFromName($revision->getUserText(Revision::RAW), false); $article->doEditUpdates($revision, $user, array('created' => $created, 'oldcountable' => $oldcountable)); } Hooks::run('ArticleUndelete', array(&$this->title, $created, $comment, $oldPageId)); if ($this->title->getNamespace() == NS_FILE) { $update = new HTMLCacheUpdate($this->title, 'imagelinks'); $update->doUpdate(); } return Status::newGood($restored); }
/** * Record a file upload in the upload log and the image table */ function recordUpload2($oldver, $comment, $pageText, $props = false, $timestamp = false) { global $wgUser; $dbw = $this->repo->getMasterDB(); if (!$props) { $props = $this->repo->getFileProps($this->getVirtualUrl()); } $props['description'] = $comment; $props['user'] = $wgUser->getID(); $props['user_text'] = $wgUser->getName(); $props['timestamp'] = wfTimestamp(TS_MW); $this->setProps($props); // Delete thumbnails and refresh the metadata cache $this->purgeThumbnails(); $this->saveToCache(); wfPurgeSquidServers(array($this->getURL())); // Fail now if the file isn't there if (!$this->fileExists) { wfDebug(__METHOD__ . ": File " . $this->getPath() . " went missing!\n"); return false; } $reupload = false; if ($timestamp === false) { $timestamp = $dbw->timestamp(); } # Test to see if the row exists using INSERT IGNORE # This avoids race conditions by locking the row until the commit, and also # doesn't deadlock. SELECT FOR UPDATE causes a deadlock for every race condition. $dbw->insert('image', array('img_name' => $this->getName(), 'img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->media_type, 'img_major_mime' => $this->major_mime, 'img_minor_mime' => $this->minor_mime, 'img_timestamp' => $timestamp, 'img_description' => $comment, 'img_user' => $wgUser->getID(), 'img_user_text' => $wgUser->getName(), 'img_metadata' => $this->metadata, 'img_sha1' => $this->sha1), __METHOD__, 'IGNORE'); if ($dbw->affectedRows() == 0) { $reupload = true; # Collision, this is an update of a file # Insert previous contents into oldimage $dbw->insertSelect('oldimage', 'image', array('oi_name' => 'img_name', 'oi_archive_name' => $dbw->addQuotes($oldver), 'oi_size' => 'img_size', 'oi_width' => 'img_width', 'oi_height' => 'img_height', 'oi_bits' => 'img_bits', 'oi_timestamp' => 'img_timestamp', 'oi_description' => 'img_description', 'oi_user' => 'img_user', 'oi_user_text' => 'img_user_text', 'oi_metadata' => 'img_metadata', 'oi_media_type' => 'img_media_type', 'oi_major_mime' => 'img_major_mime', 'oi_minor_mime' => 'img_minor_mime', 'oi_sha1' => 'img_sha1'), array('img_name' => $this->getName()), __METHOD__); # Update the current image row $dbw->update('image', array('img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->media_type, 'img_major_mime' => $this->major_mime, 'img_minor_mime' => $this->minor_mime, 'img_timestamp' => $timestamp, 'img_description' => $comment, 'img_user' => $wgUser->getID(), 'img_user_text' => $wgUser->getName(), 'img_metadata' => $this->metadata, 'img_sha1' => $this->sha1), array('img_name' => $this->getName()), __METHOD__); } else { # This is a new file # Update the image count $site_stats = $dbw->tableName('site_stats'); $dbw->query("UPDATE {$site_stats} SET ss_images=ss_images+1", __METHOD__); } $descTitle = $this->getTitle(); $article = new Article($descTitle); # Add the log entry $log = new LogPage('upload'); $action = $reupload ? 'overwrite' : 'upload'; $log->addEntry($action, $descTitle, $comment); if ($descTitle->exists()) { # Create a null revision $nullRevision = Revision::newNullRevision($dbw, $descTitle->getArticleId(), $log->getRcComment(), false); $nullRevision->insertOn($dbw); $article->updateRevisionOn($dbw, $nullRevision); # Invalidate the cache for the description page $descTitle->invalidateCache(); $descTitle->purgeSquid(); } else { // New file; create the description page. // There's already a log entry, so don't make a second RC entry $article->doEdit($pageText, $comment, EDIT_NEW | EDIT_SUPPRESS_RC); } # Hooks, hooks, the magic of hooks... wfRunHooks('FileUpload', array($this)); # Commit the transaction now, in case something goes wrong later # The most important thing is that files don't get lost, especially archives $dbw->immediateCommit(); # Invalidate cache for all pages using this file $update = new HTMLCacheUpdate($this->getTitle(), 'imagelinks'); $update->doUpdate(); return true; }
/** * This is the meaty bit -- restores archived revisions of the given page * to the cur/old tables. If the page currently exists, all revisions will * be stuffed into old, otherwise the most recent will go into cur. * * @param array $timestamps Pass an empty array to restore all revisions, otherwise list the ones to undelete. * @param string $comment * @param array $fileVersions * * @return mixed number of revisions restored or false on failure */ private function undeleteRevisions($timestamps) { if (wfReadOnly()) { return false; } $restoreAll = empty($timestamps); $dbw = wfGetDB(DB_MASTER); # Does this page already exist? We'll have to update it... $article = new Article($this->title); $options = 'FOR UPDATE'; $page = $dbw->selectRow('page', array('page_id', 'page_latest'), array('page_namespace' => $this->title->getNamespace(), 'page_title' => $this->title->getDBkey()), __METHOD__, $options); if ($page) { # Page already exists. Import the history, and if necessary # we'll update the latest revision field in the record. $newid = 0; $pageId = $page->page_id; $previousRevId = $page->page_latest; } else { # Have to create a new article... $newid = $article->insertOn($dbw); $pageId = $newid; $previousRevId = 0; } if ($restoreAll) { $oldones = '1 = 1'; # All revisions... } else { $oldts = implode(',', array_map(array(&$dbw, 'addQuotes'), array_map(array(&$dbw, 'timestamp'), $timestamps))); $oldones = "ar_timestamp IN ( {$oldts} )"; } /** * Restore each revision... */ $result = $dbw->select('archive', array('ar_rev_id', 'ar_text', 'ar_comment', 'ar_user', 'ar_user_text', 'ar_timestamp', 'ar_minor_edit', 'ar_flags', 'ar_text_id', 'ar_page_id', 'ar_len'), array('ar_namespace' => $this->title->getNamespace(), 'ar_title' => $this->title->getDBkey(), $oldones), __METHOD__, array('ORDER BY' => 'ar_timestamp')); if ($dbw->numRows($result) < count($timestamps)) { wfDebug(__METHOD__ . ": couldn't find all requested rows\n"); return false; } $revision = null; $restored = 0; while ($row = $dbw->fetchObject($result)) { if ($row->ar_text_id) { // Revision was deleted in 1.5+; text is in // the regular text table, use the reference. // Specify null here so the so the text is // dereferenced for page length info if needed. $revText = null; } else { // Revision was deleted in 1.4 or earlier. // Text is squashed into the archive row, and // a new text table entry will be created for it. $revText = Revision::getRevisionText($row, 'ar_'); } $revision = new Revision(array('page' => $pageId, 'id' => $row->ar_rev_id, 'text' => $revText, 'comment' => $row->ar_comment, 'user' => $row->ar_user, 'user_text' => $row->ar_user_text, 'timestamp' => $row->ar_timestamp, 'minor_edit' => $row->ar_minor_edit, 'text_id' => $row->ar_text_id, 'len' => $row->ar_len)); $revision->insertOn($dbw); $restored++; wfRunHooks('ArticleRevisionUndeleted', array(&$this->title, $revision, $row->ar_page_id)); } // Was anything restored at all? if ($restored == 0) { return 0; } if ($revision) { // Attach the latest revision to the page... $wasnew = $article->updateIfNewerOn($dbw, $revision, $previousRevId); if ($newid || $wasnew) { // Update site stats, link tables, etc $article->createUpdates($revision); } if ($newid) { wfRunHooks('ArticleUndelete', array(&$this->title, true)); Article::onArticleCreate($this->title); } else { wfRunHooks('ArticleUndelete', array(&$this->title, false)); Article::onArticleEdit($this->title); } if ($this->title->getNamespace() == NS_IMAGE) { $update = new HTMLCacheUpdate($this->title, 'imagelinks'); $update->doUpdate(); } } else { // Revision couldn't be created. This is very weird return self::UNDELETE_UNKNOWNERR; } # Now that it's safely stored, take it out of the archive $dbw->delete('archive', array('ar_namespace' => $this->title->getNamespace(), 'ar_title' => $this->title->getDBkey(), $oldones), __METHOD__); return $restored; }
/** * Record a file upload in the upload log and the image table */ private function recordDownload($comment = '', $timestamp = false) { global $wgUser; $dbw = $this->repo->getMasterDB(); if ($timestamp === false) { $timestamp = $dbw->timestamp(); } list($major, $minor) = self::splitMime($this->mime); # Test to see if the row exists using INSERT IGNORE # This avoids race conditions by locking the row until the commit, and also # doesn't deadlock. SELECT FOR UPDATE causes a deadlock for every race condition. $dbw->insert('ic_image', array('img_name' => $this->getName(), 'img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->type, 'img_major_mime' => $major, 'img_minor_mime' => $minor, 'img_timestamp' => $timestamp, 'img_description' => $comment, 'img_user' => $wgUser->getID(), 'img_user_text' => $wgUser->getName(), 'img_metadata' => $this->metadata), __METHOD__, 'IGNORE'); if ($dbw->affectedRows() == 0) { # Collision, this is an update of a file # Update the current image row $dbw->update('ic_image', array('img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->media_type, 'img_major_mime' => $this->major_mime, 'img_minor_mime' => $this->minor_mime, 'img_timestamp' => $timestamp, 'img_description' => $comment, 'img_user' => $wgUser->getID(), 'img_user_text' => $wgUser->getName(), 'img_metadata' => $this->metadata), array('img_name' => $this->getName()), __METHOD__); } else { # This is a new file # Update the image count $site_stats = $dbw->tableName('site_stats'); $dbw->query("UPDATE {$site_stats} SET ss_images=ss_images+1", __METHOD__); } $descTitle = $this->getTitle(); $article = new Article($descTitle); # Add the log entry $log = new LogPage('icdownload'); $log->addEntry('InstantCommons download', $descTitle, $comment); if ($descTitle->exists()) { # Create a null revision $nullRevision = Revision::newNullRevision($dbw, $descTitle->getArticleId(), $log->getRcComment(), false); $nullRevision->insertOn($dbw); $article->updateRevisionOn($dbw, $nullRevision); # Invalidate the cache for the description page $descTitle->invalidateCache(); $descTitle->purgeSquid(); } # Commit the transaction now, in case something goes wrong later # The most important thing is that files don't get lost, especially archives $dbw->immediateCommit(); # Invalidate cache for all pages using this file $update = new HTMLCacheUpdate($this->getTitle(), 'imagelinks'); $update->doUpdate(); return true; }
public static function onArticleDelete($title) { global $wgMessageCache; # Update existence markers on article/talk tabs... if ($title->isTalkPage()) { $other = $title->getSubjectPage(); } else { $other = $title->getTalkPage(); } $other->invalidateCache(); $other->purgeSquid(); $title->touchLinks(); $title->purgeSquid(); # File cache HTMLFileCache::clearFileCache($title); # Messages if ($title->getNamespace() == NS_MEDIAWIKI) { $wgMessageCache->replace($title->getDBkey(), false); } # Images if ($title->getNamespace() == NS_FILE) { $update = new HTMLCacheUpdate($title, 'imagelinks'); $update->doUpdate(); } # User talk pages if ($title->getNamespace() == NS_USER_TALK) { $user = User::newFromName($title->getText(), false); $user->setNewtalk(false); } }
/** * Callback function to propagate data * @return bool must return true or other hooks don't get called */ function propagateMySourceMove(&$title, &$newTitle, &$user, $pageId, $redirPageId) { $ns = $title->getNamespace(); if ($ns == NS_MYSOURCE) { // update people and families that link to this mysource, because source citation has changed $u = new HTMLCacheUpdate($title, 'pagelinks'); $u->doUpdate(); $u = new HTMLCacheUpdate($newTitle, 'pagelinks'); $u->doUpdate(); StructuredData::copyPageLinks($title, $newTitle); } return true; }
/** * Purges memcache entry and articles having current poll transcluded */ public function purge() { global $wgMemc; wfProfileIn(__METHOD__); // clear data cache $wgMemc->delete($this->mMemcacheKey); $this->mData = null; $article = Article::newFromId($this->mPollId); if (!empty($article)) { // purge poll page $article->doPurge(); // purge articles embedding this poll $updateJob = new HTMLCacheUpdate($article->getTitle(), 'templatelinks'); $updateJob->doUpdate(); // apply changes to page_touched fields $dbw = wfGetDB(DB_MASTER); $dbw->commit(); } wfDebug(__METHOD__ . ": purged poll #{$this->mPollId}\n"); wfProfileOut(__METHOD__); }
static function onArticleDelete($title) { global $wgUseFileCache, $wgMessageCache; $title->touchLinks(); $title->purgeSquid(); # File cache if ($wgUseFileCache) { $cm = new HTMLFileCache($title); @unlink($cm->fileCacheName()); } if ($title->getNamespace() == NS_MEDIAWIKI) { $wgMessageCache->replace($title->getDBkey(), false); } if ($title->getNamespace() == NS_IMAGE) { $update = new HTMLCacheUpdate($title, 'imagelinks'); $update->doUpdate(); } }
/** * Update page_touched timestamps and send squid purge messages for * pages linking to this title. May be sent to the job queue depending * on the number of links. Typically called on create and delete. */ public function touchLinks() { $u = new HTMLCacheUpdate($this, 'pagelinks'); $u->doUpdate(); // Invalidate caches of articles which include this page if ($this->getNamespace() != NS_VIDEO) { // typically the main namespace video gets purged anyway when it gets updated // this could cause a small bug on boundary cases of the page staying in the cache $u = new HTMLCacheUpdate($this, 'templatelinks'); $u->doUpdate(); } if ($this->getNamespace() == NS_CATEGORY) { $u = new HTMLCacheUpdate($this, 'categorylinks'); $u->doUpdate(); } }
/** * Override handling of action=purge */ public function doPurge() { $this->loadFile(); if ($this->img->exists()) { wfDebug("ImagePage::doPurge purging " . $this->img->getName() . "\n"); $update = new HTMLCacheUpdate($this->mTitle, 'imagelinks'); $update->doUpdate(); $this->img->upgradeRow(); $this->img->purgeCache(); } else { wfDebug("ImagePage::doPurge no image for " . $this->img->getName() . "; limiting purge to cache only\n"); // even if the file supposedly doesn't exist, force any cached information // to be updated (in case the cached information is wrong) $this->img->purgeCache(); } parent::doPurge(); }
/** * Override handling of action=purge * @return bool */ public function doPurge() { $this->loadFile(); if ($this->mFile->exists()) { wfDebug('ImagePage::doPurge purging ' . $this->mFile->getName() . "\n"); $update = new HTMLCacheUpdate($this->mTitle, 'imagelinks'); $update->doUpdate(); $this->mFile->upgradeRow(); $this->mFile->purgeCache(array('forThumbRefresh' => true)); } else { wfDebug('ImagePage::doPurge no image for ' . $this->mFile->getName() . "; limiting purge to cache only\n"); // even if the file supposedly doesn't exist, force any cached information // to be updated (in case the cached information is wrong) $this->mFile->purgeCache(array('forThumbRefresh' => true)); } if ($this->mRepo) { // Purge redirect cache $this->mRepo->invalidateImageRedirect($this->mTitle); } return parent::doPurge(); }
/** * @param $title, the page these events apply to * @param array $items list of revision ID numbers * @param int $bitfield new rev_deleted value * @param string $comment Comment for log records */ function setOldImgVisibility($title, $items, $bitfield, $comment) { global $wgOut; $userAllowedAll = $success = true; $count = 0; $set = array(); // Run through and pull all our data in one query foreach ($items as $timestamp) { $where[] = $timestamp . '!' . $title->getDBKey(); } $result = $this->dbw->select('oldimage', '*', array('oi_name' => $title->getDBKey(), 'oi_archive_name' => $where), __METHOD__); while ($row = $this->dbw->fetchObject($result)) { $filesObjs[$row->oi_archive_name] = RepoGroup::singleton()->getLocalRepo()->newFileFromRow($row); $filesObjs[$row->oi_archive_name]->user = $row->oi_user; $filesObjs[$row->oi_archive_name]->user_text = $row->oi_user_text; } // To work! foreach ($items as $timestamp) { $archivename = $timestamp . '!' . $title->getDBKey(); if (!isset($filesObjs[$archivename])) { $success = false; continue; // Must exist } else { if (!$filesObjs[$archivename]->userCan(File::DELETED_RESTRICTED)) { $userAllowedAll = false; continue; } } $transaction = true; // Which revisions did we change anything about? if ($filesObjs[$archivename]->deleted != $bitfield) { $count++; $this->dbw->begin(); $this->updateOldFiles($filesObjs[$archivename], $bitfield); // If this image is currently hidden... if ($filesObjs[$archivename]->deleted & File::DELETED_FILE) { if ($bitfield & File::DELETED_FILE) { # Leave it alone if we are not changing this... $set[] = $archivename; $transaction = true; } else { # We are moving this out $transaction = $this->makeOldImagePublic($filesObjs[$archivename]); $set[] = $transaction; } // Is it just now becoming hidden? } else { if ($bitfield & File::DELETED_FILE) { $transaction = $this->makeOldImagePrivate($filesObjs[$archivename]); $set[] = $transaction; } else { $set[] = $timestamp; } } // If our file operations fail, then revert back the db if ($transaction == false) { $this->dbw->rollback(); return false; } $this->dbw->commit(); } } // Log if something was changed if ($count > 0) { $this->updateLog($title, $count, $bitfield, $filesObjs[$archivename]->deleted, $comment, $title, 'oldimage', $set); # Purge page/history $file = wfLocalFile($title); $file->purgeCache(); $file->purgeHistory(); # Invalidate cache for all pages using this file $update = new HTMLCacheUpdate($title, 'imagelinks'); $update->doUpdate(); } // Where all revs allowed to be set? if (!$userAllowedAll) { $wgOut->permissionRequired('suppressrevision'); return false; } return $success; }
/** * This is the meaty bit -- restores archived revisions of the given page * to the cur/old tables. If the page currently exists, all revisions will * be stuffed into old, otherwise the most recent will go into cur. * * @param $timestamps Array: pass an empty array to restore all revisions, otherwise list the ones to undelete. * @param $comment String * @param $unsuppress Boolean: remove all ar_deleted/fa_deleted restrictions of seletected revs * * @return Mixed: number of revisions restored or false on failure */ private function undeleteRevisions($timestamps, $unsuppress = false, $comment = '') { if (wfReadOnly()) { return false; } $restoreAll = empty($timestamps); $dbw = wfGetDB(DB_MASTER); # Does this page already exist? We'll have to update it... $article = WikiPage::factory($this->title); # Load latest data for the current page (bug 31179) $article->loadPageData('fromdbmaster'); $oldcountable = $article->isCountable(); $page = $dbw->selectRow('page', array('page_id', 'page_latest'), array('page_namespace' => $this->title->getNamespace(), 'page_title' => $this->title->getDBkey()), __METHOD__, array('FOR UPDATE')); if ($page) { $makepage = false; # Page already exists. Import the history, and if necessary # we'll update the latest revision field in the record. $newid = 0; $pageId = $page->page_id; $previousRevId = $page->page_latest; # Get the time span of this page $previousTimestamp = $dbw->selectField('revision', 'rev_timestamp', array('rev_id' => $previousRevId), __METHOD__); if ($previousTimestamp === false) { wfDebug(__METHOD__ . ": existing page refers to a page_latest that does not exist\n"); return 0; } } else { # Have to create a new article... $makepage = true; $previousRevId = 0; $previousTimestamp = 0; } if ($restoreAll) { $oldones = '1 = 1'; # All revisions... } else { $oldts = implode(',', array_map(array(&$dbw, 'addQuotes'), array_map(array(&$dbw, 'timestamp'), $timestamps))); $oldones = "ar_timestamp IN ( {$oldts} )"; } /** * Select each archived revision... */ $result = $dbw->select('archive', array('ar_rev_id', 'ar_text', 'ar_comment', 'ar_user', 'ar_user_text', 'ar_timestamp', 'ar_minor_edit', 'ar_flags', 'ar_text_id', 'ar_deleted', 'ar_page_id', 'ar_len', 'ar_sha1'), array('ar_namespace' => $this->title->getNamespace(), 'ar_title' => $this->title->getDBkey(), $oldones), __METHOD__, array('ORDER BY' => 'ar_timestamp')); $ret = $dbw->resultObject($result); $rev_count = $dbw->numRows($result); if (!$rev_count) { wfDebug(__METHOD__ . ": no revisions to restore\n"); return false; // ??? } $ret->seek($rev_count - 1); // move to last $row = $ret->fetchObject(); // get newest archived rev $ret->seek(0); // move back if ($makepage) { // Check the state of the newest to-be version... if (!$unsuppress && $row->ar_deleted & Revision::DELETED_TEXT) { return false; // we can't leave the current revision like this! } // Safe to insert now... $newid = $article->insertOn($dbw); $pageId = $newid; } else { // Check if a deleted revision will become the current revision... if ($row->ar_timestamp > $previousTimestamp) { // Check the state of the newest to-be version... if (!$unsuppress && $row->ar_deleted & Revision::DELETED_TEXT) { return false; // we can't leave the current revision like this! } } } $revision = null; $restored = 0; foreach ($ret as $row) { // Check for key dupes due to shitty archive integrity. if ($row->ar_rev_id) { $exists = $dbw->selectField('revision', '1', array('rev_id' => $row->ar_rev_id), __METHOD__); if ($exists) { continue; // don't throw DB errors } } // Insert one revision at a time...maintaining deletion status // unless we are specifically removing all restrictions... $revision = Revision::newFromArchiveRow($row, array('page' => $pageId, 'deleted' => $unsuppress ? 0 : $row->ar_deleted)); $revision->insertOn($dbw); $restored++; wfRunHooks('ArticleRevisionUndeleted', array(&$this->title, $revision, $row->ar_page_id)); } # Now that it's safely stored, take it out of the archive $dbw->delete('archive', array('ar_namespace' => $this->title->getNamespace(), 'ar_title' => $this->title->getDBkey(), $oldones), __METHOD__); // Was anything restored at all? if ($restored == 0) { return 0; } $created = (bool) $newid; // Attach the latest revision to the page... $wasnew = $article->updateIfNewerOn($dbw, $revision, $previousRevId); if ($created || $wasnew) { // Update site stats, link tables, etc $user = User::newFromName($revision->getRawUserText(), false); $article->doEditUpdates($revision, $user, array('created' => $created, 'oldcountable' => $oldcountable)); } wfRunHooks('ArticleUndelete', array(&$this->title, $created, $comment)); if ($this->title->getNamespace() == NS_FILE) { $update = new HTMLCacheUpdate($this->title, 'imagelinks'); $update->doUpdate(); } return $restored; }
/** * Record a file upload in the upload log and the image table * @param string $oldver * @param string $comment * @param string $pageText * @param bool|array $props * @param string|bool $timestamp * @param null|User $user * @return bool */ function recordUpload2($oldver, $comment, $pageText, $props = false, $timestamp = false, $user = null) { wfProfileIn(__METHOD__); if (is_null($user)) { global $wgUser; $user = $wgUser; } $dbw = $this->repo->getMasterDB(); $dbw->begin(__METHOD__); if (!$props) { wfProfileIn(__METHOD__ . '-getProps'); $props = $this->repo->getFileProps($this->getVirtualUrl()); wfProfileOut(__METHOD__ . '-getProps'); } if ($timestamp === false) { $timestamp = $dbw->timestamp(); } $props['description'] = $comment; $props['user'] = $user->getId(); $props['user_text'] = $user->getName(); $props['timestamp'] = wfTimestamp(TS_MW, $timestamp); // DB -> TS_MW $this->setProps($props); # Fail now if the file isn't there if (!$this->fileExists) { wfDebug(__METHOD__ . ": File " . $this->getRel() . " went missing!\n"); wfProfileOut(__METHOD__); return false; } $reupload = false; # Test to see if the row exists using INSERT IGNORE # This avoids race conditions by locking the row until the commit, and also # doesn't deadlock. SELECT FOR UPDATE causes a deadlock for every race condition. $dbw->insert('image', array('img_name' => $this->getName(), 'img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->media_type, 'img_major_mime' => $this->major_mime, 'img_minor_mime' => $this->minor_mime, 'img_timestamp' => $timestamp, 'img_description' => $comment, 'img_user' => $user->getId(), 'img_user_text' => $user->getName(), 'img_metadata' => $dbw->encodeBlob($this->metadata), 'img_sha1' => $this->sha1), __METHOD__, 'IGNORE'); if ($dbw->affectedRows() == 0) { # (bug 34993) Note: $oldver can be empty here, if the previous # version of the file was broken. Allow registration of the new # version to continue anyway, because that's better than having # an image that's not fixable by user operations. $reupload = true; # Collision, this is an update of a file # Insert previous contents into oldimage $dbw->insertSelect('oldimage', 'image', array('oi_name' => 'img_name', 'oi_archive_name' => $dbw->addQuotes($oldver), 'oi_size' => 'img_size', 'oi_width' => 'img_width', 'oi_height' => 'img_height', 'oi_bits' => 'img_bits', 'oi_timestamp' => 'img_timestamp', 'oi_description' => 'img_description', 'oi_user' => 'img_user', 'oi_user_text' => 'img_user_text', 'oi_metadata' => 'img_metadata', 'oi_media_type' => 'img_media_type', 'oi_major_mime' => 'img_major_mime', 'oi_minor_mime' => 'img_minor_mime', 'oi_sha1' => 'img_sha1'), array('img_name' => $this->getName()), __METHOD__); # Update the current image row $dbw->update('image', array('img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->media_type, 'img_major_mime' => $this->major_mime, 'img_minor_mime' => $this->minor_mime, 'img_timestamp' => $timestamp, 'img_description' => $comment, 'img_user' => $user->getId(), 'img_user_text' => $user->getName(), 'img_metadata' => $dbw->encodeBlob($this->metadata), 'img_sha1' => $this->sha1), array('img_name' => $this->getName()), __METHOD__); } else { # This is a new file, so update the image count DeferredUpdates::addUpdate(SiteStatsUpdate::factory(array('images' => 1))); } $descTitle = $this->getTitle(); $wikiPage = new WikiFilePage($descTitle); $wikiPage->setFile($this); # Add the log entry $action = $reupload ? 'overwrite' : 'upload'; $logEntry = new ManualLogEntry('upload', $action); $logEntry->setPerformer($user); $logEntry->setComment($comment); $logEntry->setTarget($descTitle); // Allow people using the api to associate log entries with the upload. // Log has a timestamp, but sometimes different from upload timestamp. $logEntry->setParameters(array('img_sha1' => $this->sha1, 'img_timestamp' => $timestamp)); // Note we keep $logId around since during new image // creation, page doesn't exist yet, so log_page = 0 // but we want it to point to the page we're making, // so we later modify the log entry. // For a similar reason, we avoid making an RC entry // now and wait until the page exists. $logId = $logEntry->insert(); $exists = $descTitle->exists(); if ($exists) { // Page exists, do RC entry now (otherwise we wait for later). $logEntry->publish($logId); } wfProfileIn(__METHOD__ . '-edit'); if ($exists) { # Create a null revision $latest = $descTitle->getLatestRevID(); $editSummary = LogFormatter::newFromEntry($logEntry)->getPlainActionText(); $nullRevision = Revision::newNullRevision($dbw, $descTitle->getArticleID(), $editSummary, false); if (!is_null($nullRevision)) { $nullRevision->insertOn($dbw); wfRunHooks('NewRevisionFromEditComplete', array($wikiPage, $nullRevision, $latest, $user)); $wikiPage->updateRevisionOn($dbw, $nullRevision); } } # Commit the transaction now, in case something goes wrong later # The most important thing is that files don't get lost, especially archives # NOTE: once we have support for nested transactions, the commit may be moved # to after $wikiPage->doEdit has been called. $dbw->commit(__METHOD__); # Save to memcache. # We shall not saveToCache before the commit since otherwise # in case of a rollback there is an usable file from memcached # which in fact doesn't really exist (bug 24978) $this->saveToCache(); if ($exists) { # Invalidate the cache for the description page $descTitle->invalidateCache(); $descTitle->purgeSquid(); } else { # New file; create the description page. # There's already a log entry, so don't make a second RC entry # Squid and file cache for the description page are purged by doEditContent. $content = ContentHandler::makeContent($pageText, $descTitle); $status = $wikiPage->doEditContent($content, $comment, EDIT_NEW | EDIT_SUPPRESS_RC, false, $user); $dbw->begin(__METHOD__); // XXX; doEdit() uses a transaction // Now that the page exists, make an RC entry. $logEntry->publish($logId); if (isset($status->value['revision'])) { $dbw->update('logging', array('log_page' => $status->value['revision']->getPage()), array('log_id' => $logId), __METHOD__); } $dbw->commit(__METHOD__); // commit before anything bad can happen } wfProfileOut(__METHOD__ . '-edit'); if ($reupload) { # Delete old thumbnails wfProfileIn(__METHOD__ . '-purge'); $this->purgeThumbnails(); wfProfileOut(__METHOD__ . '-purge'); # Remove the old file from the squid cache SquidUpdate::purge(array($this->getURL())); } # Hooks, hooks, the magic of hooks... wfProfileIn(__METHOD__ . '-hooks'); wfRunHooks('FileUpload', array($this, $reupload, $descTitle->exists())); wfProfileOut(__METHOD__ . '-hooks'); # Invalidate cache for all pages using this file $update = new HTMLCacheUpdate($this->getTitle(), 'imagelinks'); $update->doUpdate(); if (!$reupload) { LinksUpdate::queueRecursiveJobsForTable($this->getTitle(), 'imagelinks'); } wfProfileOut(__METHOD__); return true; }
/** * Record an image upload in the upload log and the image table */ function recordUpload($oldver, $desc, $license = '', $copyStatus = '', $source = '', $watch = false) { global $wgUser, $wgUseCopyrightUpload; $dbw = wfGetDB(DB_MASTER); $this->checkDBSchema($dbw); // Delete thumbnails and refresh the metadata cache $this->purgeCache(); // Fail now if the image isn't there if (!$this->fileExists || $this->fromSharedDirectory) { wfDebug("Image::recordUpload: File " . $this->imagePath . " went missing!\n"); return false; } if ($wgUseCopyrightUpload) { if ($license != '') { $licensetxt = '== ' . wfMsgForContent('license') . " ==\n" . '{{' . $license . '}}' . "\n"; } $textdesc = '== ' . wfMsg('filedesc') . " ==\n" . $desc . "\n" . '== ' . wfMsgForContent('filestatus') . " ==\n" . $copyStatus . "\n" . "{$licensetxt}" . '== ' . wfMsgForContent('filesource') . " ==\n" . $source; } else { if ($license != '') { $filedesc = $desc == '' ? '' : '== ' . wfMsg('filedesc') . " ==\n" . $desc . "\n"; $textdesc = $filedesc . '== ' . wfMsgForContent('license') . " ==\n" . '{{' . $license . '}}' . "\n"; } else { $textdesc = $desc; } } $now = $dbw->timestamp(); #split mime type if (strpos($this->mime, '/') !== false) { list($major, $minor) = explode('/', $this->mime, 2); } else { $major = $this->mime; $minor = "unknown"; } # Test to see if the row exists using INSERT IGNORE # This avoids race conditions by locking the row until the commit, and also # doesn't deadlock. SELECT FOR UPDATE causes a deadlock for every race condition. $dbw->insert('image', array('img_name' => $this->name, 'img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->type, 'img_major_mime' => $major, 'img_minor_mime' => $minor, 'img_timestamp' => $now, 'img_description' => $desc, 'img_user' => $wgUser->getID(), 'img_user_text' => $wgUser->getName(), 'img_metadata' => $this->metadata), __METHOD__, 'IGNORE'); if ($dbw->affectedRows() == 0) { # Collision, this is an update of an image # Insert previous contents into oldimage $dbw->insertSelect('oldimage', 'image', array('oi_name' => 'img_name', 'oi_archive_name' => $dbw->addQuotes($oldver), 'oi_size' => 'img_size', 'oi_width' => 'img_width', 'oi_height' => 'img_height', 'oi_bits' => 'img_bits', 'oi_timestamp' => 'img_timestamp', 'oi_description' => 'img_description', 'oi_user' => 'img_user', 'oi_user_text' => 'img_user_text'), array('img_name' => $this->name), __METHOD__); # Update the current image row $dbw->update('image', array('img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->type, 'img_major_mime' => $major, 'img_minor_mime' => $minor, 'img_timestamp' => $now, 'img_description' => $desc, 'img_user' => $wgUser->getID(), 'img_user_text' => $wgUser->getName(), 'img_metadata' => $this->metadata), array('img_name' => $this->name), __METHOD__); } else { # This is a new image # Update the image count $site_stats = $dbw->tableName('site_stats'); $dbw->query("UPDATE {$site_stats} SET ss_images=ss_images+1", __METHOD__); } $descTitle = $this->getTitle(); $article = new Article($descTitle); $minor = false; $watch = $watch || $wgUser->isWatched($descTitle); $suppressRC = true; // There's already a log entry, so don't double the RC load if ($descTitle->exists()) { // TODO: insert a null revision into the page history for this update. if ($watch) { $wgUser->addWatch($descTitle); } # Invalidate the cache for the description page $descTitle->invalidateCache(); $descTitle->purgeSquid(); } else { // New image; create the description page. $article->insertNewArticle($textdesc, $desc, $minor, $watch, $suppressRC); } # Hooks, hooks, the magic of hooks... wfRunHooks('FileUpload', array($this)); # Add the log entry $log = new LogPage('upload'); $log->addEntry('upload', $descTitle, $desc); # Commit the transaction now, in case something goes wrong later # The most important thing is that images don't get lost, especially archives $dbw->immediateCommit(); # Invalidate cache for all pages using this image $update = new HTMLCacheUpdate($this->getTitle(), 'imagelinks'); $update->doUpdate(); return true; }
public function run() { $update = new HTMLCacheUpdate($this->title, $this->table, $this->start, $this->end); $update->doUpdate(); return true; }
/** * Propagate data in xml property to other articles if necessary * @param string $oldText contains text being replaced * @param String $text which we never touch when propagating places * @param bool $textChanged which we never touch when propagating places * @return bool true if propagation was successful */ protected function propagateEditData($oldText, &$text, &$textChanged) { global $wrIsGedcomUpload, $wgESINHandler; $result = true; // cache new xml - it's used right away to generate family badges on the related person pages, // if you don't cache it, the badges pick up the old html $this->cachePageXml(); // update people that link to this family, because the family-badge contents could have changed // TODO this could be made more efficient by only invalidating if names, birthdates, or deathdates have changed $u = new HTMLCacheUpdate($this->title, 'pagelinks'); $u->doUpdate(); // get current info $propagatedData = Family::getPropagatedData($this->xml); $redirTitle = Title::newFromRedirect($text); // get original info $origPropagatedData = Family::getPropagatedData(null); // don't bother construction page text from WLH in a gedcom upload because nothing will link to this new page if (!@$wrIsGedcomUpload && (!$oldText || mb_strpos($oldText, '<family>') === false)) { // oldText contains MediaWiki:noarticletext if the article is being created // construct <family> text from What Links Here $oldText = $this->getPageTextFromWLH(false); } $origXml = null; if ($oldText) { $origXml = StructuredData::getXml('family', $oldText); if (isset($origXml)) { $origPropagatedData = Family::getPropagatedData($origXml); } } // TODO!!! // Revert, Unmerge, and eventually Undo should be getting the current attrs for existing people from origPropagatedData // and getting the current attrs and redirect-titles for newly-added people from the Person pages when adding the family title to them // then unmerge wouldn't need to get them in unmerge, and revert wouldn't be broken, and undo won't break things. // This duplicates the functionality found in fromEditFields, but it allows us to update the pages without going through fromEditFields // and it doesn't require reading any pages that we weren't reading already. // Also, instead of isMerging, if this Family page is on the propagation manager blacklist, then you can't trust the prior version // and we should get the person attrs from the Person pages for _all_ family members. // Finally, make sure that after redirects we don't have 2 links to the same Person (and also two links to the same Family on Person pages). // ignore changes of the husband <-> wife role for the same person $temp = array_diff($propagatedData['husbands'], $origPropagatedData['wives']); $origPropagatedData['wives'] = array_diff($origPropagatedData['wives'], $propagatedData['husbands']); $propagatedData['husbands'] = $temp; $temp = array_diff($propagatedData['wives'], $origPropagatedData['husbands']); $origPropagatedData['husbands'] = array_diff($origPropagatedData['husbands'], $propagatedData['wives']); $propagatedData['wives'] = $temp; $result = $result && $this->propagateFamilyMemberEditData($propagatedData['husbands'], $origPropagatedData['husbands'], 'husband', 'spouse_of_family', $text, $textChanged); $result = $result && $this->propagateFamilyMemberEditData($propagatedData['wives'], $origPropagatedData['wives'], 'wife', 'spouse_of_family', $text, $textChanged); $result = $result && $this->propagateFamilyMemberEditData($propagatedData['children'], $origPropagatedData['children'], 'child', 'child_of_family', $text, $textChanged); if (StructuredData::removeDuplicateLinks('husband|wife|child', $text)) { $textChanged = true; } $result = $result && $wgESINHandler->propagateSINEdit($this->title, 'family', $this->titleString, $propagatedData, $origPropagatedData, $text, $textChanged); // ensure footer tag is still there (might have been removed by editing the last section) if ($redirTitle == null && strpos($text, ESINHandler::ESIN_FOOTER_TAG) === false) { if (strlen($text) > 0 && substr($text, strlen($text) - 1) != "\n") { $text .= "\n"; } $text .= ESINHandler::ESIN_FOOTER_TAG; $textChanged = true; } // update watchlist summary if changed $summary = Family::getSummary($this->xml, $this->title); $origSummary = Family::getSummary($origXml, $this->title); if ($summary != $origSummary) { StructuredData::updateWatchlistSummary($this->title, $summary); } // if it's a redirect, add the people, families, and images that were on this page to the redirect target // but don't bother updating the redir target during a merge if ($redirTitle != null && PropagationManager::isPropagatablePage($redirTitle)) { // get the text of the redir page $article = StructuredData::getArticle($redirTitle, true); if ($article) { $content =& $article->fetchContent(); $updated = false; // add husbands from this page to the redir page foreach ($origPropagatedData['husbands'] as $p) { // get propagated data for p $pd = Person::getPropagatedData(StructuredData::getXmlForTitle('person', Title::newFromText($p, NS_PERSON))); Family::updatePersonLink('husband', $p, $p, $pd, 'spouse_of_family', $content, $updated); } // add wives from this page to the redir page foreach ($origPropagatedData['wives'] as $p) { $pd = Person::getPropagatedData(StructuredData::getXmlForTitle('person', Title::newFromText($p, NS_PERSON))); Family::updatePersonLink('wife', $p, $p, $pd, 'spouse_of_family', $content, $updated); } // add children from this page to the redir page foreach ($origPropagatedData['children'] as $p) { $pd = Person::getPropagatedData(StructuredData::getXmlForTitle('person', Title::newFromText($p, NS_PERSON))); Family::updatePersonLink('child', $p, $p, $pd, 'child_of_family', $content, $updated); } // add images from this page to the redir page foreach ($origPropagatedData['images'] as $i) { ESINHandler::updateImageLink('family', $i['filename'], $i['filename'], $i['caption'], $content, $updated); } // update the redir page if necessary if ($updated) { $result = $result && $article->doEdit($content, 'Copy data from [[' . $this->title->getPrefixedText() . ']]', PROPAGATE_EDIT_FLAGS); } } } if (!$result) { error_log("ERROR! Family edit/rollback not propagated: {$this->titleString}\n"); } return $result; }
/** * Clears caches when article is deleted * * @param $title Title */ public static function onArticleDelete( $title ) { // Update existence markers on article/talk tabs... if ( $title->isTalkPage() ) { $other = $title->getSubjectPage(); } else { $other = $title->getTalkPage(); } $other->invalidateCache(); $other->purgeSquid(); $title->touchLinks(); $title->purgeSquid(); // File cache HTMLFileCache::clearFileCache( $title ); InfoAction::invalidateCache( $title ); // Messages if ( $title->getNamespace() == NS_MEDIAWIKI ) { MessageCache::singleton()->replace( $title->getDBkey(), false ); } // Images if ( $title->getNamespace() == NS_FILE ) { $update = new HTMLCacheUpdate( $title, 'imagelinks' ); $update->doUpdate(); } // User talk pages if ( $title->getNamespace() == NS_USER_TALK ) { $user = User::newFromName( $title->getText(), false ); if ( $user ) { $user->setNewtalk( false ); } } // Image redirects RepoGroup::singleton()->getLocalRepo()->invalidateImageRedirect( $title ); }
/** * Record a file upload in the upload log and the image table */ function recordUpload2($oldver, $comment, $pageText, $props = false, $timestamp = false, $user = null) { if (is_null($user)) { global $wgUser; $user = $wgUser; } $dbw = $this->repo->getMasterDB(); $dbw->begin(); if (!$props) { $props = $this->repo->getFileProps($this->getVirtualUrl()); } if ($timestamp === false) { $timestamp = $dbw->timestamp(); } $props['description'] = $comment; $props['user'] = $user->getId(); $props['user_text'] = $user->getName(); $props['timestamp'] = wfTimestamp(TS_MW, $timestamp); // DB -> TS_MW $this->setProps($props); # Delete thumbnails $this->purgeThumbnails(); # The file is already on its final location, remove it from the squid cache SquidUpdate::purge(array($this->getURL())); # Fail now if the file isn't there if (!$this->fileExists) { wfDebug(__METHOD__ . ": File " . $this->getRel() . " went missing!\n"); return false; } $reupload = false; # Test to see if the row exists using INSERT IGNORE # This avoids race conditions by locking the row until the commit, and also # doesn't deadlock. SELECT FOR UPDATE causes a deadlock for every race condition. $dbw->insert('image', array('img_name' => $this->getName(), 'img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->media_type, 'img_major_mime' => $this->major_mime, 'img_minor_mime' => $this->minor_mime, 'img_timestamp' => $timestamp, 'img_description' => $comment, 'img_user' => $user->getId(), 'img_user_text' => $user->getName(), 'img_metadata' => $this->metadata, 'img_sha1' => $this->sha1), __METHOD__, 'IGNORE'); if ($dbw->affectedRows() == 0) { $reupload = true; # Collision, this is an update of a file # Insert previous contents into oldimage $dbw->insertSelect('oldimage', 'image', array('oi_name' => 'img_name', 'oi_archive_name' => $dbw->addQuotes($oldver), 'oi_size' => 'img_size', 'oi_width' => 'img_width', 'oi_height' => 'img_height', 'oi_bits' => 'img_bits', 'oi_timestamp' => 'img_timestamp', 'oi_description' => 'img_description', 'oi_user' => 'img_user', 'oi_user_text' => 'img_user_text', 'oi_metadata' => 'img_metadata', 'oi_media_type' => 'img_media_type', 'oi_major_mime' => 'img_major_mime', 'oi_minor_mime' => 'img_minor_mime', 'oi_sha1' => 'img_sha1'), array('img_name' => $this->getName()), __METHOD__); # Update the current image row $dbw->update('image', array('img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->media_type, 'img_major_mime' => $this->major_mime, 'img_minor_mime' => $this->minor_mime, 'img_timestamp' => $timestamp, 'img_description' => $comment, 'img_user' => $user->getId(), 'img_user_text' => $user->getName(), 'img_metadata' => $this->metadata, 'img_sha1' => $this->sha1), array('img_name' => $this->getName()), __METHOD__); } else { # This is a new file # Update the image count $dbw->begin(); $site_stats = $dbw->tableName('site_stats'); $dbw->query("UPDATE {$site_stats} SET ss_images=ss_images+1", __METHOD__); $dbw->commit(); } $descTitle = $this->getTitle(); $article = new ImagePage($descTitle); $article->setFile($this); # Add the log entry $log = new LogPage('upload'); $action = $reupload ? 'overwrite' : 'upload'; $log->addEntry($action, $descTitle, $comment, array(), $user); if ($descTitle->exists()) { # Create a null revision $latest = $descTitle->getLatestRevID(); $nullRevision = Revision::newNullRevision($dbw, $descTitle->getArticleId(), $log->getRcComment(), false); $nullRevision->insertOn($dbw); wfRunHooks('NewRevisionFromEditComplete', array($article, $nullRevision, $latest, $user)); $article->updateRevisionOn($dbw, $nullRevision); # Invalidate the cache for the description page $descTitle->invalidateCache(); $descTitle->purgeSquid(); } else { # New file; create the description page. # There's already a log entry, so don't make a second RC entry # Squid and file cache for the description page are purged by doEdit. $article->doEdit($pageText, $comment, EDIT_NEW | EDIT_SUPPRESS_RC); } # Commit the transaction now, in case something goes wrong later # The most important thing is that files don't get lost, especially archives $dbw->commit(); # Save to cache and purge the squid # We shall not saveToCache before the commit since otherwise # in case of a rollback there is an usable file from memcached # which in fact doesn't really exist (bug 24978) $this->saveToCache(); # Hooks, hooks, the magic of hooks... wfRunHooks('FileUpload', array($this, $reupload, $descTitle->exists())); # Invalidate cache for all pages using this file $update = new HTMLCacheUpdate($this->getTitle(), 'imagelinks'); $update->doUpdate(); # Invalidate cache for all pages that redirects on this page $redirs = $this->getTitle()->getRedirectsHere(); foreach ($redirs as $redir) { $update = new HTMLCacheUpdate($redir, 'imagelinks'); $update->doUpdate(); } return true; }
/** * Purge metadata and all affected pages when the file is created, * deleted, or majorly updated. */ function purgeEverything() { // Delete thumbnails and refresh file metadata cache $this->purgeCache(); $this->purgeDescription(); // Purge cache of all pages using this file $title = $this->getTitle(); if ($title) { $update = new HTMLCacheUpdate($title, 'imagelinks'); $update->doUpdate(); } }
function run() { $update = new HTMLCacheUpdate($this->title, $this->table); $fromField = $update->getFromField(); $conds = $update->getToCondition(); if ($this->start) { $conds[] = "{$fromField} >= {$this->start}"; } if ($this->end) { $conds[] = "{$fromField} <= {$this->end}"; } $dbr =& wfGetDB(DB_SLAVE); $res = $dbr->select($this->table, $fromField, $conds, __METHOD__); $update->invalidateIDs(new ResultWrapper($dbr, $res)); $dbr->freeResult($res); return true; }