/**
  * Purge the cache for backlinking pages (that is, pages containing
  * a reference to the Title associated with this task)
  *
  * @param string|array $tables
  */
 public function purge($tables)
 {
     global $wgUseFileCache, $wgUseSquid;
     $affectedTitles = $this->getAffectedTitles((array) $tables);
     $affectedCount = count($affectedTitles);
     $this->info("Purge Request", ['title' => $this->title->getPrefixedText(), 'count' => $affectedCount, 'tables' => $tables]);
     // abort if no pages link to the associated Title
     if ($affectedCount == 0) {
         return 0;
     }
     $dbw = wfGetDB(DB_MASTER);
     (new \WikiaSQL())->UPDATE('page')->SET('page_touched', $dbw->timestamp())->WHERE('page_id')->IN(array_map(function ($t) {
         return $t->getArticleID();
     }, $affectedTitles))->run($dbw);
     // Update squid/varnish
     if ($wgUseSquid) {
         \SquidUpdate::newFromTitles($affectedTitles)->doUpdate();
     }
     // Update file cache
     if ($wgUseFileCache) {
         foreach ($affectedTitles as $title) {
             \HTMLFileCache::clearFileCache($title);
         }
     }
     return $affectedCount;
 }
Exemple #2
0
 /**
  * Invalidate a set of pages, right now
  */
 public function invalidate($startId = false, $endId = false)
 {
     global $wgUseFileCache, $wgUseSquid;
     $titleArray = $this->mCache->getLinks($this->mTable, $startId, $endId);
     if ($titleArray->count() == 0) {
         return;
     }
     $dbw = wfGetDB(DB_MASTER);
     $timestamp = $dbw->timestamp();
     # Get all IDs in this query into an array
     $ids = array();
     foreach ($titleArray as $title) {
         $ids[] = $title->getArticleID();
     }
     # Update page_touched
     $dbw->update('page', array('page_touched' => $timestamp), array('page_id IN (' . $dbw->makeList($ids) . ')'), __METHOD__);
     # Update squid
     if ($wgUseSquid) {
         $u = SquidUpdate::newFromTitles($titleArray);
         $u->doUpdate();
     }
     # Update file cache
     if ($wgUseFileCache) {
         foreach ($titleArray as $title) {
             HTMLFileCache::clearFileCache($title);
         }
     }
 }
 /**
  * Invalidate an array (or iterator) of Title objects, right now
  * @param $titleArray array
  */
 protected function invalidateTitles($titleArray)
 {
     global $wgUseFileCache, $wgUseSquid;
     $dbw = wfGetDB(DB_MASTER);
     $timestamp = $dbw->timestamp();
     # Get all IDs in this query into an array
     $ids = array();
     foreach ($titleArray as $title) {
         $ids[] = $title->getArticleID();
     }
     if (!$ids) {
         return;
     }
     # Update page_touched
     $batches = array_chunk($ids, $this->mRowsPerQuery);
     foreach ($batches as $batch) {
         $dbw->update('page', array('page_touched' => $timestamp), array('page_id' => $batch), __METHOD__);
     }
     # Update squid
     if ($wgUseSquid) {
         $u = SquidUpdate::newFromTitles($titleArray);
         $u->doUpdate();
     }
     # Update file cache
     if ($wgUseFileCache) {
         foreach ($titleArray as $title) {
             HTMLFileCache::clearFileCache($title);
         }
     }
 }
Exemple #4
0
 function recordUpload($oldver, $desc, $copyStatus = '', $source = '', $watch = false)
 {
     global $wgUser, $wgLang, $wgTitle, $wgDeferredUpdateList;
     global $wgUseCopyrightUpload, $wgUseSquid, $wgPostCommitUpdateList;
     $img = Image::newFromName($this->mUploadSaveName);
     $fname = 'Image::recordUpload';
     $dbw =& wfGetDB(DB_MASTER);
     Image::checkDBSchema($dbw);
     // Delete thumbnails and refresh the metadata cache
     $img->purgeCache();
     // Fail now if the image isn't there
     if (!$img->fileExists || $img->fromSharedDirectory) {
         wfDebug("Image::recordUpload: File " . $img->imagePath . " went missing!\n");
         return false;
     }
     if ($wgUseCopyrightUpload) {
         $textdesc = '== ' . wfMsg('filedesc') . " ==\n" . $desc . "\n" . '== ' . wfMsg('filestatus') . " ==\n" . $copyStatus . "\n" . '== ' . wfMsg('filesource') . " ==\n" . $source;
     } else {
         $textdesc = $desc;
     }
     $now = $dbw->timestamp();
     #split mime type
     if (strpos($img->mime, '/') !== false) {
         list($major, $minor) = explode('/', $img->mime, 2);
     } else {
         $major = $img->mime;
         $minor = "unknown";
     }
     # Test to see if the row exists using INSERT IGNORE
     # This avoids race conditions by locking the row until the commit, and also
     # doesn't deadlock. SELECT FOR UPDATE causes a deadlock for every race condition.
     $dbw->insert('image', array('img_name' => $img->name, 'img_size' => $img->size, 'img_width' => IntVal($img->width), 'img_height' => IntVal($img->height), 'img_bits' => $img->bits, 'img_media_type' => $img->type, 'img_major_mime' => $major, 'img_minor_mime' => $minor, 'img_timestamp' => $now, 'img_description' => $desc, 'img_user' => $wgUser->getID(), 'img_user_text' => $wgUser->getName(), 'img_metadata' => $img->metadata), $fname, 'IGNORE');
     $descTitle = $img->getTitle();
     $purgeURLs = array();
     $article = new Article($descTitle);
     $minor = false;
     $watch = $watch || $wgUser->isWatched($descTitle);
     $suppressRC = true;
     // There's already a log entry, so don't double the RC load
     if ($descTitle->exists()) {
         // TODO: insert a null revision into the page history for this update.
         if ($watch) {
             $wgUser->addWatch($descTitle);
         }
         # Invalidate the cache for the description page
         $descTitle->invalidateCache();
         $purgeURLs[] = $descTitle->getInternalURL();
     } else {
         $this->insertNewArticle($article, $textdesc, $desc, $minor, $watch, $suppressRC);
     }
     # Invalidate cache for all pages using this image
     $linksTo = $img->getLinksTo();
     if ($wgUseSquid) {
         $u = SquidUpdate::newFromTitles($linksTo, $purgeURLs);
         array_push($wgPostCommitUpdateList, $u);
     }
     Title::touchArray($linksTo);
     $log = new LogPage('upload');
     $log->addEntry('upload', $descTitle, $desc);
     return true;
 }
 /**
  * Invalidate an array (or iterator) of Title objects, right now
  * @param $titleArray array
  */
 protected function invalidateTitles($titleArray)
 {
     global $wgUseFileCache, $wgUseSquid;
     $dbw = wfGetDB(DB_MASTER);
     $timestamp = $dbw->timestamp();
     # Get all IDs in this query into an array
     $ids = array();
     foreach ($titleArray as $title) {
         $ids[] = $title->getArticleID();
     }
     if (!$ids) {
         return;
     }
     # Don't invalidated pages that were already invalidated
     $touchedCond = isset($this->params['rootJobTimestamp']) ? array("page_touched < " . $dbw->addQuotes($dbw->timestamp($this->params['rootJobTimestamp']))) : array();
     # Update page_touched
     $batches = array_chunk($ids, $this->rowsPerQuery);
     foreach ($batches as $batch) {
         $dbw->update('page', array('page_touched' => $timestamp), array('page_id' => $batch) + $touchedCond, __METHOD__);
     }
     # Update squid
     if ($wgUseSquid) {
         $u = SquidUpdate::newFromTitles($titleArray);
         $u->doUpdate();
     }
     # Update file cache
     if ($wgUseFileCache) {
         foreach ($titleArray as $title) {
             HTMLFileCache::clearFileCache($title);
         }
     }
 }
 /**
  * Invalidate a set of IDs, right now
  */
 function invalidateIDs(ResultWrapper $res)
 {
     global $wgUseFileCache, $wgUseSquid;
     if ($res->numRows() == 0) {
         return;
     }
     $dbw =& wfGetDB(DB_MASTER);
     $timestamp = $dbw->timestamp();
     $done = false;
     while (!$done) {
         # Get all IDs in this query into an array
         $ids = array();
         for ($i = 0; $i < $this->mRowsPerQuery; $i++) {
             $row = $res->fetchRow();
             if ($row) {
                 $ids[] = $row[0];
             } else {
                 $done = true;
                 break;
             }
         }
         if (!count($ids)) {
             break;
         }
         # Update page_touched
         $dbw->update('page', array('page_touched' => $timestamp), array('page_id IN (' . $dbw->makeList($ids) . ')'), __METHOD__);
         # Update squid
         if ($wgUseSquid || $wgUseFileCache) {
             $titles = Title::newFromIDs($ids);
             if ($wgUseSquid) {
                 $u = SquidUpdate::newFromTitles($titles);
                 $u->doUpdate();
             }
             # Update file cache
             if ($wgUseFileCache) {
                 foreach ($titles as $title) {
                     $cm = new CacheManager($title);
                     @unlink($cm->fileCacheName());
                 }
             }
         }
     }
 }
 /**
  * @param array $pages Map of (page ID => (namespace, DB key)) entries
  */
 protected function invalidateTitles(array $pages)
 {
     global $wgUpdateRowsPerQuery, $wgUseFileCache;
     // Get all page IDs in this query into an array
     $pageIds = array_keys($pages);
     if (!$pageIds) {
         return;
     }
     // The page_touched field will need to be bumped for these pages.
     // Only bump it to the present time if no "rootJobTimestamp" was known.
     // If it is known, it can be used instead, which avoids invalidating output
     // that was in fact generated *after* the relevant dependency change time
     // (e.g. template edit). This is particularily useful since refreshLinks jobs
     // save back parser output and usually run along side htmlCacheUpdate jobs;
     // their saved output would be invalidated by using the current timestamp.
     if (isset($this->params['rootJobTimestamp'])) {
         $touchTimestamp = $this->params['rootJobTimestamp'];
     } else {
         $touchTimestamp = wfTimestampNow();
     }
     $dbw = wfGetDB(DB_MASTER);
     // Update page_touched (skipping pages already touched since the root job).
     // Check $wgUpdateRowsPerQuery for sanity; batch jobs are sized by that already.
     foreach (array_chunk($pageIds, $wgUpdateRowsPerQuery) as $batch) {
         $dbw->commit(__METHOD__, 'flush');
         wfWaitForSlaves();
         $dbw->update('page', array('page_touched' => $dbw->timestamp($touchTimestamp)), array('page_id' => $batch, "page_touched < " . $dbw->addQuotes($dbw->timestamp($touchTimestamp))), __METHOD__);
     }
     // Get the list of affected pages (races only mean something else did the purge)
     $titleArray = TitleArray::newFromResult($dbw->select('page', array('page_namespace', 'page_title'), array('page_id' => $pageIds, 'page_touched' => $dbw->timestamp($touchTimestamp)), __METHOD__));
     // Update squid
     $u = SquidUpdate::newFromTitles($titleArray);
     $u->doUpdate();
     // Update file cache
     if ($wgUseFileCache) {
         foreach ($titleArray as $title) {
             HTMLFileCache::clearFileCache($title);
         }
     }
 }
 /**
  * Back-end article deletion
  * Deletes the article with database consistency, writes logs, purges caches
  * Returns success
  */
 function doDeleteArticle($reason)
 {
     global $wgUser;
     global $wgUseSquid, $wgDeferredUpdateList, $wgInternalServer, $wgPostCommitUpdateList;
     global $wgUseTrackbacks;
     $fname = 'Article::doDeleteArticle';
     wfDebug($fname . "\n");
     $dbw =& wfGetDB(DB_MASTER);
     $ns = $this->mTitle->getNamespace();
     $t = $this->mTitle->getDBkey();
     $id = $this->mTitle->getArticleID();
     if ($t == '' || $id == 0) {
         return false;
     }
     $u = new SiteStatsUpdate(0, 1, -$this->isCountable($this->getContent(true)), -1);
     array_push($wgDeferredUpdateList, $u);
     $linksTo = $this->mTitle->getLinksTo();
     # Squid purging
     if ($wgUseSquid) {
         $urls = array($this->mTitle->getInternalURL(), $this->mTitle->getInternalURL('history'));
         $u = SquidUpdate::newFromTitles($linksTo, $urls);
         array_push($wgPostCommitUpdateList, $u);
     }
     # Client and file cache invalidation
     Title::touchArray($linksTo);
     // For now, shunt the revision data into the archive table.
     // Text is *not* removed from the text table; bulk storage
     // is left intact to avoid breaking block-compression or
     // immutable storage schemes.
     //
     // For backwards compatibility, note that some older archive
     // table entries will have ar_text and ar_flags fields still.
     //
     // In the future, we may keep revisions and mark them with
     // the rev_deleted field, which is reserved for this purpose.
     $dbw->insertSelect('archive', array('page', 'revision'), array('ar_namespace' => 'page_namespace', 'ar_title' => 'page_title', 'ar_comment' => 'rev_comment', 'ar_user' => 'rev_user', 'ar_user_text' => 'rev_user_text', 'ar_timestamp' => 'rev_timestamp', 'ar_minor_edit' => 'rev_minor_edit', 'ar_rev_id' => 'rev_id', 'ar_text_id' => 'rev_text_id'), array('page_id' => $id, 'page_id = rev_page'), $fname);
     # Now that it's safely backed up, delete it
     $dbw->delete('revision', array('rev_page' => $id), $fname);
     $dbw->delete('page', array('page_id' => $id), $fname);
     if ($wgUseTrackbacks) {
         $dbw->delete('trackbacks', array('tb_page' => $id), $fname);
     }
     # Clean up recentchanges entries...
     $dbw->delete('recentchanges', array('rc_namespace' => $ns, 'rc_title' => $t), $fname);
     # Finally, clean up the link tables
     $t = $this->mTitle->getPrefixedDBkey();
     Article::onArticleDelete($this->mTitle);
     # Delete outgoing links
     $dbw->delete('pagelinks', array('pl_from' => $id));
     $dbw->delete('imagelinks', array('il_from' => $id));
     $dbw->delete('categorylinks', array('cl_from' => $id));
     # Log the deletion
     $log = new LogPage('delete');
     $log->addEntry('delete', $this->mTitle, $reason);
     # Clear the cached article id so the interface doesn't act like we exist
     $this->mTitle->resetArticleID(0);
     $this->mTitle->mArticleID = 0;
     return true;
 }
 /**
  * Invalidate a set of IDs, right now
  */
 public function invalidateIDs(ResultWrapper $res)
 {
     global $wgUseFileCache, $wgUseSquid;
     if ($res->numRows() == 0) {
         return;
     }
     // sanity check
     $dbw = wfGetDB(DB_MASTER);
     $timestamp = $dbw->timestamp();
     $done = false;
     while (!$done) {
         # Get all IDs in this query into an array
         $ids = array();
         for ($i = 0; $i < $this->mRowsPerQuery; $i++) {
             $row = $res->fetchRow();
             if ($row) {
                 $ids[] = $row[0];
             } else {
                 $done = true;
                 break;
             }
         }
         if (count($ids) == 0) {
             break;
         }
         # Update page_touched
         $dbw->update('page', array('page_touched' => $timestamp), array('page_id' => $ids), __METHOD__);
         # Update static caches
         if ($wgUseSquid || $wgUseFileCache) {
             $titles = Title::newFromIDs($ids);
             # Update squid cache
             if ($wgUseSquid) {
                 $u = SquidUpdate::newFromTitles($titles);
                 $u->doUpdate();
             }
             # Update file cache
             if ($wgUseFileCache) {
                 foreach ($titles as $title) {
                     HTMLFileCache::clearFileCache($title);
                 }
             }
         }
     }
 }
Exemple #10
0
 /**
  * Record an image upload in the upload log and the image table
  */
 function recordUpload($oldver, $desc, $copyStatus = '', $source = '')
 {
     global $wgUser, $wgLang, $wgTitle, $wgDeferredUpdateList;
     global $wgUseCopyrightUpload, $wgUseSquid, $wgPostCommitUpdateList;
     $fname = 'Image::recordUpload';
     $dbw =& wfGetDB(DB_MASTER);
     $this->checkDBSchema($dbw);
     // Delete thumbnails and refresh the metadata cache
     $this->purgeCache();
     // Fail now if the image isn't there
     if (!$this->fileExists || $this->fromSharedDirectory) {
         wfDebug("Image::recordUpload: File " . $this->imagePath . " went missing!\n");
         return false;
     }
     if ($wgUseCopyrightUpload) {
         $textdesc = '== ' . wfMsg('filedesc') . " ==\n" . $desc . "\n" . '== ' . wfMsg('filestatus') . " ==\n" . $copyStatus . "\n" . '== ' . wfMsg('filesource') . " ==\n" . $source;
     } else {
         $textdesc = $desc;
     }
     $now = $dbw->timestamp();
     #split mime type
     if (strpos($this->mime, '/') !== false) {
         list($major, $minor) = explode('/', $this->mime, 2);
     } else {
         $major = $this->mime;
         $minor = "unknown";
     }
     # Test to see if the row exists using INSERT IGNORE
     # This avoids race conditions by locking the row until the commit, and also
     # doesn't deadlock. SELECT FOR UPDATE causes a deadlock for every race condition.
     $dbw->insert('image', array('img_name' => $this->name, 'img_size' => $this->size, 'img_width' => IntVal($this->width), 'img_height' => IntVal($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->type, 'img_major_mime' => $major, 'img_minor_mime' => $minor, 'img_timestamp' => $now, 'img_description' => $desc, 'img_user' => $wgUser->getID(), 'img_user_text' => $wgUser->getName(), 'img_metadata' => $this->metadata), $fname, 'IGNORE');
     $descTitle = $this->getTitle();
     $purgeURLs = array();
     if ($dbw->affectedRows()) {
         # Successfully inserted, this is a new image
         $id = $descTitle->getArticleID();
         if ($id == 0) {
             $article = new Article($descTitle);
             $article->insertNewArticle($textdesc, $desc, false, false, true);
         }
     } else {
         # Collision, this is an update of an image
         # Insert previous contents into oldimage
         $dbw->insertSelect('oldimage', 'image', array('oi_name' => 'img_name', 'oi_archive_name' => $dbw->addQuotes($oldver), 'oi_size' => 'img_size', 'oi_width' => 'img_width', 'oi_height' => 'img_height', 'oi_bits' => 'img_bits', 'oi_timestamp' => 'img_timestamp', 'oi_description' => 'img_description', 'oi_user' => 'img_user', 'oi_user_text' => 'img_user_text'), array('img_name' => $this->name), $fname);
         # Update the current image row
         $dbw->update('image', array('img_size' => $this->size, 'img_width' => intval($this->width), 'img_height' => intval($this->height), 'img_bits' => $this->bits, 'img_media_type' => $this->type, 'img_major_mime' => $major, 'img_minor_mime' => $minor, 'img_timestamp' => $now, 'img_description' => $desc, 'img_user' => $wgUser->getID(), 'img_user_text' => $wgUser->getName(), 'img_metadata' => $this->metadata), array('img_name' => $this->name), $fname);
         # Invalidate the cache for the description page
         $descTitle->invalidateCache();
         $purgeURLs[] = $descTitle->getInternalURL();
     }
     # Invalidate cache for all pages using this image
     $linksTo = $this->getLinksTo();
     if ($wgUseSquid) {
         $u = SquidUpdate::newFromTitles($linksTo, $purgeURLs);
         array_push($wgPostCommitUpdateList, $u);
     }
     Title::touchArray($linksTo);
     $log = new LogPage('upload');
     $log->addEntry('upload', $descTitle, $desc);
     return true;
 }
 function doDelete()
 {
     global $wgOut, $wgUser, $wgContLang, $wgRequest;
     global $wgUseSquid, $wgInternalServer, $wgPostCommitUpdateList;
     $fname = 'ImagePage::doDelete';
     $reason = $wgRequest->getVal('wpReason');
     $oldimage = $wgRequest->getVal('oldimage');
     $dbw =& wfGetDB(DB_MASTER);
     if (!is_null($oldimage)) {
         if (strlen($oldimage) < 16) {
             $wgOut->unexpectedValueError('oldimage', htmlspecialchars($oldimage));
             return;
         }
         if (strstr($oldimage, "/") || strstr($oldimage, "\\")) {
             $wgOut->unexpectedValueError('oldimage', htmlspecialchars($oldimage));
             return;
         }
         # Invalidate description page cache
         $this->mTitle->invalidateCache();
         # Squid purging
         if ($wgUseSquid) {
             $urlArr = array($wgInternalServer . wfImageArchiveUrl($oldimage), $wgInternalServer . $this->mTitle->getFullURL());
             wfPurgeSquidServers($urlArr);
         }
         $this->doDeleteOldImage($oldimage);
         $dbw->delete('oldimage', array('oi_archive_name' => $oldimage));
         $deleted = $oldimage;
     } else {
         $image = $this->mTitle->getDBkey();
         $dest = wfImageDir($image);
         $archive = wfImageDir($image);
         # Delete the image file if it exists; due to sync problems
         # or manual trimming sometimes the file will be missing.
         $targetFile = "{$dest}/{$image}";
         if (file_exists($targetFile) && !@unlink($targetFile)) {
             # If the deletion operation actually failed, bug out:
             $wgOut->fileDeleteError($targetFile);
             return;
         }
         $dbw->delete('image', array('img_name' => $image));
         $res = $dbw->select('oldimage', array('oi_archive_name'), array('oi_name' => $image));
         # Purge archive URLs from the squid
         $urlArr = array();
         while ($s = $dbw->fetchObject($res)) {
             $this->doDeleteOldImage($s->oi_archive_name);
             $urlArr[] = $wgInternalServer . wfImageArchiveUrl($s->oi_archive_name);
         }
         # And also the HTML of all pages using this image
         $linksTo = $this->img->getLinksTo();
         if ($wgUseSquid) {
             $u = SquidUpdate::newFromTitles($linksTo, $urlArr);
             array_push($wgPostCommitUpdateList, $u);
         }
         $dbw->delete('oldimage', array('oi_name' => $image));
         # Image itself is now gone, and database is cleaned.
         # Now we remove the image description page.
         $article = new Article($this->mTitle);
         $article->doDeleteArticle($reason);
         # ignore errors
         # Invalidate parser cache and client cache for pages using this image
         # This is left until relatively late to reduce lock time
         Title::touchArray($linksTo);
         /* Delete thumbnails and refresh image metadata cache */
         $this->img->purgeCache();
         $deleted = $image;
     }
     $wgOut->setPagetitle(wfMsg('actioncomplete'));
     $wgOut->setRobotpolicy('noindex,nofollow');
     $loglink = '[[Special:Log/delete|' . wfMsg('deletionlog') . ']]';
     $text = wfMsg('deletedtext', $deleted, $loglink);
     $wgOut->addWikiText($text);
     $wgOut->returnToMain(false, $this->mTitle->getPrefixedText());
 }