/** * This is based on part of HistoryBlobStub::getText(). * Determine if the text can be retrieved from the row in the normal way. * @param $stub * @param $secondaryRow * @return bool */ function isUnbrokenStub($stub, $secondaryRow) { $flags = explode(',', $secondaryRow->old_flags); $text = $secondaryRow->old_text; if (in_array('external', $flags)) { $url = $text; @(list(, $path) = explode('://', $url, 2)); if ($path == "") { return false; } $text = ExternalStore::fetchFromUrl($url); } if (!in_array('object', $flags)) { return false; } if (in_array('gzip', $flags)) { $obj = unserialize(gzinflate($text)); } else { $obj = unserialize($text); } if (!is_object($obj)) { // Correct for old double-serialization bug. $obj = unserialize($obj); } if (!is_object($obj)) { return false; } $obj->uncompress(); $text = $obj->getItem($stub['hash']); return $text !== false; }
<?php /** * Script tests fetching revision texts from external clusters * * @package MediaWiki * @addtopackage maintenance * * @author Władysław Bodzek */ ini_set("include_path", dirname(__FILE__) . "/../../../maintenance/"); $optionsWithArgs = array(); require_once "commandLine.inc"; $urls = array("DB://archive1/191", "DB://archive1/211", "DB://archive1/1910000000000"); foreach ($urls as $url) { $data = ExternalStore::fetchFromUrl($url); $text = gzinflate($data); var_dump($url, $data, $text); }
/** * @return string */ function getText() { $fname = 'HistoryBlobStub::getText'; if (isset(self::$blobCache[$this->mOldId])) { $obj = self::$blobCache[$this->mOldId]; } else { $dbr = wfGetDB(DB_SLAVE); $row = $dbr->selectRow('text', array('old_flags', 'old_text'), array('old_id' => $this->mOldId)); if (!$row) { return false; } $flags = explode(',', $row->old_flags); if (in_array('external', $flags)) { $url = $row->old_text; $parts = explode('://', $url, 2); if (!isset($parts[1]) || $parts[1] == '') { wfProfileOut($fname); return false; } $row->old_text = ExternalStore::fetchFromUrl($url); } if (!in_array('object', $flags)) { return false; } if (in_array('gzip', $flags)) { // This shouldn't happen, but a bug in the compress script // may at times gzip-compress a HistoryBlob object row. $obj = unserialize(gzinflate($row->old_text)); } else { $obj = unserialize($row->old_text); } if (!is_object($obj)) { // Correct for old double-serialization bug. $obj = unserialize($obj); } if (!is_object($obj)) { return false; } // Save this item for reference; if pulling many // items in a row we'll likely use it again. $obj->uncompress(); self::$blobCache = array($this->mOldId => $obj); } return $obj->getItem($this->mHash); }
/** * @brief Gets old article's text * * @desc Returns article's content from text table if fail it'll return empty string * * @param integer $textId article's text id in text table * * @return string * @author Andrzej 'nAndy' Åukaszewski */ public function getDeletedArticleTitleTxt($textId) { $dbr = wfGetDB(DB_SLAVE); $row = $dbr->selectRow('text', array('old_text', 'old_flags'), array('old_id' => $textId), __METHOD__); if (!empty($row->old_text) && !empty($row->old_flags)) { $flags = explode(',', $row->old_flags); if (in_array('gzip', $flags)) { return gzinflate(ExternalStore::fetchFromUrl($row->old_text)); } } return ''; }