/** * @param array $pages Map of (page ID => (namespace, DB key)) entries */ protected function invalidateTitles(array $pages) { global $wgUpdateRowsPerQuery, $wgUseFileCache; // Get all page IDs in this query into an array $pageIds = array_keys($pages); if (!$pageIds) { return; } // The page_touched field will need to be bumped for these pages. // Only bump it to the present time if no "rootJobTimestamp" was known. // If it is known, it can be used instead, which avoids invalidating output // that was in fact generated *after* the relevant dependency change time // (e.g. template edit). This is particularily useful since refreshLinks jobs // save back parser output and usually run along side htmlCacheUpdate jobs; // their saved output would be invalidated by using the current timestamp. if (isset($this->params['rootJobTimestamp'])) { $touchTimestamp = $this->params['rootJobTimestamp']; } else { $touchTimestamp = wfTimestampNow(); } $dbw = wfGetDB(DB_MASTER); // Update page_touched (skipping pages already touched since the root job). // Check $wgUpdateRowsPerQuery for sanity; batch jobs are sized by that already. foreach (array_chunk($pageIds, $wgUpdateRowsPerQuery) as $batch) { $dbw->commit(__METHOD__, 'flush'); wfGetLBFactory()->waitForReplication(); $dbw->update('page', ['page_touched' => $dbw->timestamp($touchTimestamp)], ['page_id' => $batch, "page_touched < " . $dbw->addQuotes($dbw->timestamp($touchTimestamp))], __METHOD__); } // Get the list of affected pages (races only mean something else did the purge) $titleArray = TitleArray::newFromResult($dbw->select('page', ['page_namespace', 'page_title'], ['page_id' => $pageIds, 'page_touched' => $dbw->timestamp($touchTimestamp)], __METHOD__)); // Update CDN $u = CdnCacheUpdate::newFromTitles($titleArray); $u->doUpdate(); // Update file cache if ($wgUseFileCache) { foreach ($titleArray as $title) { HTMLFileCache::clearFileCache($title); } } }
/** * @param array $pages Map of (page ID => (namespace, DB key)) entries */ protected function invalidateTitles(array $pages) { global $wgUpdateRowsPerQuery, $wgUseFileCache; // Get all page IDs in this query into an array $pageIds = array_keys($pages); if (!$pageIds) { return; } // Bump page_touched to the current timestamp. This used to use the root job timestamp // (e.g. template/file edit time), which was a bit more efficient when template edits are // rare and don't effect the same pages much. However, this way allows for better // de-duplication, which is much more useful for wikis with high edit rates. Note that // RefreshLinksJob, which is enqueued alongside HTMLCacheUpdateJob, saves the parser output // since it has to parse anyway. We assume that vast majority of the cache jobs finish // before the link jobs, so using the current timestamp instead of the root timestamp is // not expected to invalidate these cache entries too often. $touchTimestamp = wfTimestampNow(); $dbw = wfGetDB(DB_MASTER); $factory = wfGetLBFactory(); $ticket = $factory->getEmptyTransactionTicket(__METHOD__); // Update page_touched (skipping pages already touched since the root job). // Check $wgUpdateRowsPerQuery for sanity; batch jobs are sized by that already. foreach (array_chunk($pageIds, $wgUpdateRowsPerQuery) as $batch) { $factory->commitAndWaitForReplication(__METHOD__, $ticket); $dbw->update('page', ['page_touched' => $dbw->timestamp($touchTimestamp)], ['page_id' => $batch, "page_touched < " . $dbw->addQuotes($dbw->timestamp($touchTimestamp))], __METHOD__); } // Get the list of affected pages (races only mean something else did the purge) $titleArray = TitleArray::newFromResult($dbw->select('page', ['page_namespace', 'page_title'], ['page_id' => $pageIds, 'page_touched' => $dbw->timestamp($touchTimestamp)], __METHOD__)); // Update CDN $u = CdnCacheUpdate::newFromTitles($titleArray); $u->doUpdate(); // Update file cache if ($wgUseFileCache) { foreach ($titleArray as $title) { HTMLFileCache::clearFileCache($title); } } }