/** * @access private * @static */ function sendRPC($method, $params = array()) { global $mwBlockerHost, $mwBlockerPort, $mwBlockerDebug; $client = new XML_RPC_Client('/Blocker', $mwBlockerHost, $mwBlockerPort); if ($mwBlockerDebug) { $client->debug = true; } $rpcParams = array_map(array('MWBlocker', 'outParam'), $params); $message = new XML_RPC_Message($method, $rpcParams); wfSuppressWarnings(); $start = wfTime(); $result = $client->send($message); $delta = wfTime() - $start; wfRestoreWarnings(); $debug = sprintf("MWBlocker::sendRPC for %s took %0.2fms\n", $method, $delta * 1000.0); wfDebug($debug); if ($mwBlockerDebug) { echo $debug; } if (!is_object($result)) { throw new MWException("Unknown XML-RPC error"); } elseif ($result->faultCode()) { throw new MWException($result->faultCode() . ': ' . $result->faultString()); } else { $value = $result->value(); return $value->getval(); } }
public function execute() { if (!($this->hasOption('file') ^ $this->hasOption('dump'))) { $this->error("You must provide a file or dump", true); } $this->checkOptions(); if ($this->hasOption('file')) { $revision = new WikiRevision(); $revision->setText(file_get_contents($this->getOption('file'))); $revision->setTitle(Title::newFromText(rawurldecode(basename($this->getOption('file'), '.txt')))); $this->handleRevision($revision); return; } $this->startTime = wfTime(); if ($this->getOption('dump') == '-') { $source = new ImportStreamSource($this->getStdin()); } else { $this->error("Sorry, I don't support dump filenames yet. Use - and provide it on stdin on the meantime.", true); } $importer = new WikiImporter($source); $importer->setRevisionCallback(array(&$this, 'handleRevision')); $this->from = $this->getOption('from', null); $this->count = 0; $importer->doImport(); $this->conclusions(); $delta = wfTime() - $this->startTime; $this->error("Done {$this->count} revisions in " . round($delta, 2) . " seconds "); if ($delta > 0) { $this->error(round($this->count / $delta, 2) . " pages/sec"); } # Perform the memory_get_peak_usage() when all the other data has been output so there's no damage if it dies. # It is only available since 5.2.0 (since 5.2.1 if you haven't compiled with --enable-memory-limit) $this->error("Memory peak usage of " . memory_get_peak_usage() . " bytes\n"); }
function testTime() { $start = wfTime(); $this->assertType('float', $start); $end = wfTime(); $this->assertTrue($end > $start, "Time is running backwards!"); }
function run() { $this->startTime = wfTime(); $file = fopen('php://stdin', 'rt'); $source = new ImportStreamSource($file); $importer = new WikiImporter($source); $importer->setRevisionCallback(array(&$this, 'handleRevision')); return $importer->doImport(); }
public function execute() { $this->outputDirectory = $this->getOption('output-dir'); $this->startTime = wfTime(); $source = new ImportStreamSource($this->getStdin()); $importer = new WikiImporter($source); $importer->setRevisionCallback(array(&$this, 'handleRevision')); return $importer->doImport(); }
/** * @param $trials int * @return string */ private function benchHooks($trials = 10) { $start = wfTime(); for ($i = 0; $i < $trials; $i++) { wfRunHooks('Test'); } $delta = wfTime() - $start; $pertrial = $delta / $trials; return sprintf("Took %6.2fs", $pertrial); }
/** @todo document */ function benchSquid($urls, $trials = 1) { $start = wfTime(); for ($i = 0; $i < $trials; $i++) { SquidUpdate::purge($urls); } $delta = wfTime() - $start; $pertrial = $delta / $trials; $pertitle = $pertrial / count($urls); return sprintf("%4d titles in %6.2fms (%6.2fms each)", count($urls), $pertrial * 1000.0, $pertitle * 1000.0); }
/** * Update the special pages localization cache */ public function rebuildLocalizationCache() { global $IP, $wgSpecialPageCacheUpdates, $wgQueryPages, $wgQueryCacheLimit, $wgDisableQueryPageUpdate; $dbw = wfGetDB(DB_MASTER); foreach ($wgSpecialPageCacheUpdates as $special => $call) { if (!is_callable($call)) { throw new \InvalidArgumentException("Uncallable function '{$call}' for special page {$special}"); } $start = wfTime(); call_user_func($call, $dbw); $end = wfTime(); $this->info(sprintf("%-30s completed in %.2fs", $special, $end - $start)); // Wait for the slave to catch up wfWaitForSlaves(); } // This is needed to initialise $wgQueryPages require_once "{$IP}/includes/QueryPage.php"; $disabledPages = $wgDisableQueryPageUpdate ? array_flip($wgDisableQueryPageUpdate) : []; foreach ($wgQueryPages as $page) { list($class, $special) = $page; $limit = isset($page[2]) ? $page[2] : $wgQueryCacheLimit; $queryPage = $this->getQueryPage($special, $class); if (array_key_exists($special, $disabledPages)) { // skip disabled pages $this->info(sprintf("%-30s disabled", $special)); continue; } if (!$queryPage->isExpensive()) { // don't bother with cheap pages $this->info(sprintf("%-30s skipped", $special)); continue; } $start = wfTime(); $num = $queryPage->recache($limit); $end = wfTime(); if ($num === false) { throw new \DBError($dbw, "database error"); } $this->info(sprintf("%-30s updated %d rows in %.2fs", $special, $num, $end - $start)); if (wfGetLB()->pingAll()) { // commit the changes if all connections are still open $dbw->commit(); } else { // Reopen any connections that have closed $count = 6; do { sleep(10); } while ($count-- > 0 && !wfGetLB()->pingAll()); } // Wait for the slave to catch up wfWaitForSlaves(); } }
function progress($updated) { $this->updated += $updated; $this->processed++; if ($this->processed % 100 != 0) { return; } $portion = $this->processed / $this->count; $updateRate = $this->updated / $this->processed; $now = wfTime(); $delta = $now - $this->startTime; $estimatedTotalTime = $delta / $portion; $eta = $this->startTime + $estimatedTotalTime; printf("%s: %6.2f%% done on %s; ETA %s [%d/%d] %.2f/sec <%.2f%% updated>\n", wfTimestamp(TS_DB, intval($now)), $portion * 100.0, $this->table, wfTimestamp(TS_DB, intval($eta)), $this->processed, $this->count, $this->processed / $delta, $updateRate * 100.0); flush(); }
public function execute() { $dbw = wfGetDB(DB_MASTER); $test = $dbw->tableName('test'); $dbw->query("CREATE TABLE IF NOT EXISTS /*_*/{$test} (\n test_id int unsigned NOT NULL PRIMARY KEY AUTO_INCREMENT,\n text varbinary(255) NOT NULL\n);"); $this->insertData($dbw); $start = wfTime(); $this->delete($dbw); $end = wfTime(); echo "Delete: " . $end - $start; echo "\r\n"; $this->insertData($dbw); $start = wfTime(); $this->truncate($dbw); $end = wfTime(); echo "Truncate: " . $end - $start; echo "\r\n"; $dbw->dropTable('test'); }
public function execute() { $this->outputDirectory = $this->getOption('output-dir'); $this->prefix = $this->getOption('prefix', 'wiki'); $this->startTime = wfTime(); if ($this->hasOption('parser')) { global $wgParserConf; $wgParserConf['class'] = $this->getOption('parser'); $this->prefix .= "-{$wgParserConf['class']}"; } $source = new ImportStreamSource($this->getStdin()); $importer = new WikiImporter($source); $importer->setRevisionCallback(array(&$this, 'handleRevision')); $importer->doImport(); $delta = wfTime() - $this->startTime; $this->error("Rendered {$this->count} pages in " . round($delta, 2) . " seconds "); if ($delta > 0) { $this->error(round($this->count / $delta, 2) . " pages/sec"); } $this->error("\n"); }
public function bench(array $benchs) { $bench_number = 0; $count = $this->getOption('count', 100); foreach ($benchs as $bench) { // handle empty args if (!array_key_exists('args', $bench)) { $bench['args'] = array(); } $bench_number++; $start = wfTime(); for ($i = 0; $i < $count; $i++) { call_user_func_array($bench['function'], $bench['args']); } $delta = wfTime() - $start; // function passed as a callback if (is_array($bench['function'])) { $ret = get_class($bench['function'][0]) . '->' . $bench['function'][1]; $bench['function'] = $ret; } $this->results[$bench_number] = array('function' => $bench['function'], 'arguments' => $bench['args'], 'count' => $count, 'delta' => $delta, 'average' => $delta / $count); } }
static function checkWords() { wfProfileIn(__METHOD__); $app = F::app(); $request = $app->getGlobal('wgRequest'); // get request params $lang = $request->getVal('lang', false); $words = explode(',', $request->getVal('words', '')); // benchmark $time = wfTime(); $service = new SpellCheckerService($lang); $ret = $service->checkWords($words); // BugId:2570 - log statistics $wordsCount = count($words); $suggestionsCount = count($ret['suggestions']); // finish the benchmark $time = round(wfTime() - $time, 4); if (!empty($ret)) { $ret['info']['time'] = $time; } Wikia::log(__METHOD__, __LINE__, "{$wordsCount} words checked / {$suggestionsCount} suggestions / done in {$time} sec.", true); wfProfileOut(__METHOD__); return $ret; }
/** * Chunked inserts: perform an insert if we've reached the chunk limit. * Prints a progress report with estimated completion time. * @param array &$chunk -- This will be emptied if an insert is done. * @param int $key A key identifier to use in progress estimation in * place of the number of rows inserted. Use this if * you provided a max key number instead of a count * as the final chunk number in setChunkScale() * @access private */ function addChunk(&$chunk, $key = null) { if (count($chunk) >= $this->chunkSize) { $this->insertChunk($chunk); $this->chunkCount += count($chunk); $now = wfTime(); $delta = $now - $this->chunkStartTime; $rate = $this->chunkCount / $delta; if (is_null($key)) { $completed = $this->chunkCount; } else { $completed = $key; } $portion = $completed / $this->chunkFinal; $estimatedTotalTime = $delta / $portion; $eta = $this->chunkStartTime + $estimatedTotalTime; printf("%s: %6.2f%% done on %s; ETA %s [%d/%d] %.2f/sec\n", wfTimestamp(TS_DB, intval($now)), $portion * 100.0, $this->chunkTable, wfTimestamp(TS_DB, intval($eta)), $completed, $this->chunkFinal, $rate); flush(); $chunk = array(); } }
/** * This method updates cache entries that are used in properties * that are outdated for a single webservice * */ private function updateWSResults($ws) { $log = SGAGardeningIssuesAccess::getGardeningIssuesAccess(); echo "updating " . $ws->getName() . "\n"; $parameterSets = WSStorage::getDatabase()->getWSUsages($ws->getArticleID()); $updatedEntries = 0; $affectedArticles = array(); foreach ($parameterSets as $parameterSet) { echo "\t updating paramater set " . $parameterSet["paramSetId"] . "\n"; $cacheResult = WSStorage::getDatabase()->getResultFromCache($ws->getArticleID(), $parameterSet["paramSetId"]); $refresh = false; if (count($cacheResult) < 1) { $refresh = true; } if (!$refresh) { if ($ws->getQueryPolicy() > 0) { if (wfTime() - wfTimestamp(TS_UNIX, $cacheResult["lastUpdate"]) > $ws->getQueryPolicy() * 60) { $refresh = true; } } } if ($refresh) { echo "\t\t update necessary\n"; if ($updatedEntries > 0) { sleep($ws->getUpdateDelay()); echo "\t\t sleeping " . $ws->getUpdateDelay() . "\n"; } $parameters = WSStorage::getDatabase()->getParameters($parameterSet["paramSetId"]); $parameters = $ws->initializeCallParameters($parameters); $response = $ws->getWSClient()->call($ws->getMethod(), $parameters); $goon = true; if (is_string($response)) { $log->addGardeningIssueAboutValue($this->id, SMW_GARDISSUE_ERROR_WSCACHE_ENTRIES, Title::newFromText($ws->getName()), 0); $goon = false; } if ($goon) { WSStorage::getDatabase()->storeCacheEntry($ws->getArticleID(), $parameterSet["paramSetId"], serialize($response), wfTimeStamp(TS_MW, wfTime()), wfTimeStamp(TS_MW, wfTime())); echo "\t\t update was successfully\n"; //get articles which have to be refreshed } } $tempAffectedArticles = WSStorage::getDatabase()->getUsedWSParameterSetPairs($ws->getArticleID(), $parameterSet["paramSetId"]); if ($ws->getQueryPolicy() > 0) { if ($refresh || count($tempAffectedArticles) > 1) { $affectedArticles = array_merge($affectedArticles, $tempAffectedArticles); } $updatedEntries += 1; } } return $affectedArticles; }
/** * Returns a HTML comment with the elapsed time since request. * This method has no side effects. * @return string */ function wfReportTime() { global $wgRequestTime; $now = wfTime(); $elapsed = $now - $wgRequestTime; $com = sprintf("<!-- Served by %s in %01.3f secs. -->", wfHostname(), $elapsed); return $com; }
public function execute() { global $wgUseFileCache, $wgDisableCounters, $wgContentNamespaces, $wgRequestTime; global $wgTitle, $wgArticle, $wgOut, $wgUser; if (!$wgUseFileCache) { $this->error("Nothing to do -- \$wgUseFileCache is disabled.", true); } $wgDisableCounters = false; $start = $this->getArg(0, "0"); if (!ctype_digit($start)) { $this->error("Invalid value for start parameter.", true); } $start = intval($start); $overwrite = $this->hasArg(1) && $this->getArg(1) === 'overwrite'; $this->output("Building content page file cache from page {$start}!\n"); $dbr = wfGetDB(DB_SLAVE); $start = $start > 0 ? $start : $dbr->selectField('page', 'MIN(page_id)', false, __FUNCTION__); $end = $dbr->selectField('page', 'MAX(page_id)', false, __FUNCTION__); if (!$start) { $this->error("Nothing to do.", true); } $_SERVER['HTTP_ACCEPT_ENCODING'] = 'bgzip'; // hack, no real client OutputPage::setEncodings(); # Not really used yet # Do remaining chunk $end += $this->mBatchSize - 1; $blockStart = $start; $blockEnd = $start + $this->mBatchSize - 1; $dbw = wfGetDB(DB_MASTER); // Go through each page and save the output while ($blockEnd <= $end) { // Get the pages $res = $dbr->select('page', array('page_namespace', 'page_title', 'page_id'), array('page_namespace' => $wgContentNamespaces, "page_id BETWEEN {$blockStart} AND {$blockEnd}"), array('ORDER BY' => 'page_id ASC', 'USE INDEX' => 'PRIMARY')); foreach ($res as $row) { $rebuilt = false; $wgRequestTime = wfTime(); # bug 22852 $wgTitle = Title::makeTitleSafe($row->page_namespace, $row->page_title); if (null == $wgTitle) { $this->output("Page {$row->page_id} has bad title\n"); continue; // broken title? } $wgOut->setTitle($wgTitle); // set display title $wgUser->getSkin($wgTitle); // set skin title $wgArticle = new Article($wgTitle); // If the article is cacheable, then load it if ($wgArticle->isFileCacheable()) { $cache = new HTMLFileCache($wgTitle); if ($cache->isFileCacheGood()) { if ($overwrite) { $rebuilt = true; } else { $this->output("Page {$row->page_id} already cached\n"); continue; // done already! } } ob_start(array(&$cache, 'saveToFileCache')); // save on ob_end_clean() $wgUseFileCache = false; // hack, we don't want $wgArticle fiddling with filecache $wgArticle->view(); @$wgOut->output(); // header notices $wgUseFileCache = true; ob_end_clean(); // clear buffer $wgOut = new OutputPage(); // empty out any output page garbage if ($rebuilt) { $this->output("Re-cached page {$row->page_id}\n"); } else { $this->output("Cached page {$row->page_id}\n"); } } else { $this->output("Page {$row->page_id} not cacheable\n"); } $dbw->commit(); // commit any changes } $blockStart += $this->mBatchSize; $blockEnd += $this->mBatchSize; wfWaitForSlaves(5); } $this->output("Done!\n"); // Remove these to be safe if (isset($wgTitle)) { unset($wgTitle); } if (isset($wgArticle)) { unset($wgArticle); } }
/** * @todo document */ function logProfilingData() { global $wgRequestTime, $wgDebugLogFile, $wgDebugRawPage, $wgRequest; global $wgProfiling, $wgProfileStack, $wgProfileLimit, $wgUser; $now = wfTime(); list($usec, $sec) = explode(' ', $wgRequestTime); $start = (double) $sec + (double) $usec; $elapsed = $now - $start; if ($wgProfiling) { $prof = wfGetProfilingOutput($start, $elapsed); $forward = ''; if (!empty($_SERVER['HTTP_X_FORWARDED_FOR'])) { $forward = ' forwarded for ' . $_SERVER['HTTP_X_FORWARDED_FOR']; } if (!empty($_SERVER['HTTP_CLIENT_IP'])) { $forward .= ' client IP ' . $_SERVER['HTTP_CLIENT_IP']; } if (!empty($_SERVER['HTTP_FROM'])) { $forward .= ' from ' . $_SERVER['HTTP_FROM']; } if ($forward) { $forward = "\t(proxied via {$_SERVER['REMOTE_ADDR']}{$forward})"; } if ($wgUser->isAnon()) { $forward .= ' anon'; } $log = sprintf("%s\t%04.3f\t%s\n", gmdate('YmdHis'), $elapsed, urldecode($_SERVER['REQUEST_URI'] . $forward)); if ('' != $wgDebugLogFile && ($wgRequest->getVal('action') != 'raw' || $wgDebugRawPage)) { error_log($log . $prof, 3, $wgDebugLogFile); } } }
/** * Puts the specified file to Amazon S3 storage * * if $bPublic, the file will be available for all users * if $sMimeType is set then the specified mime tipe is set, otherwise * let AmazonS3 decide on mime type. */ public static function putToAmazonS3($sPath, $bPublic = true, $sMimeType = null) { $time = wfTime(); $sDestination = wfEscapeShellArg('s3://wikia_xml_dumps/' . DumpsOnDemand::getPath(basename($sPath))); $sPath = wfEscapeShellArg($sPath); $sCmd = 'sudo /usr/bin/s3cmd -c /root/.s3cfg --add-header=Content-Disposition:attachment'; if (!is_null($sMimeType)) { $sMimeType = wfEscapeShellArg($sMimeType); $sCmd .= " --mime-type={$sMimeType}"; } $sCmd .= $bPublic ? ' --acl-public' : ''; $sCmd .= " put {$sPath} {$sDestination}"; wfShellExec($sCmd, $iStatus); $time = Wikia::timeDuration(wfTime() - $time); Wikia::log(__METHOD__, "info", "Put {$sPath} to Amazon S3 storage: status: {$iStatus}, time: {$time}", true, true); return $iStatus; }
/** * main entry point, create wiki with given parameters * * @throw CreateWikiException an exception with status of operation set */ public function create() { global $wgExternalSharedDB, $wgSharedDB, $wgUser; $then = microtime(true); // Set this flag to ensure that all select operations go against master // Slave lag can cause random errors during wiki creation process global $wgForceMasterDatabase; $wgForceMasterDatabase = true; wfProfileIn(__METHOD__); if (wfReadOnly()) { wfProfileOut(__METHOD__); throw new CreateWikiException('DB is read only', self::ERROR_READONLY); } // check founder if ($this->mFounder->isAnon()) { wfProfileOut(__METHOD__); throw new CreateWikiException('Founder is anon', self::ERROR_USER_IN_ANON); } // check executables $status = $this->checkExecutables(); if ($status != 0) { wfProfileOut(__METHOD__); throw new CreateWikiException('checkExecutables() failed', $status); } // check domains $status = $this->checkDomain(); if ($status != 0) { wfProfileOut(__METHOD__); throw new CreateWikiException('Check domain failed', $status); } // prepare all values needed for creating wiki $this->prepareValues(); // prevent domain to be registered more than once if (!AutoCreateWiki::lockDomain($this->mDomain)) { wfProfileOut(__METHOD__); throw new CreateWikiException('Domain name taken', self::ERROR_DOMAIN_NAME_TAKEN); } // start counting time $this->mCurrTime = wfTime(); // check and create database $this->mDBw = wfGetDB(DB_MASTER, array(), $wgExternalSharedDB); # central /// // local database handled is handler to cluster we create new wiki. // It doesn't have to be the same like wikifactory cluster or db cluster // where Special:CreateWiki exists. // // @todo do not use hardcoded name, code below is only for test // // set $activeCluster to false if you want to create wikis on first // cluster // $this->mClusterDB = self::ACTIVE_CLUSTER ? "wikicities_" . self::ACTIVE_CLUSTER : "wikicities"; $this->mNewWiki->dbw = wfGetDB(DB_MASTER, array(), $this->mClusterDB); // database handler, old $dbwTarget // check if database is creatable // @todo move all database creation checkers to canCreateDatabase if (!$this->canCreateDatabase()) { wfProfileOut(__METHOD__); throw new CreateWikiException('DB exists - ' . $this->mNewWiki->dbname, self::ERROR_DATABASE_ALREADY_EXISTS); } else { $this->mNewWiki->dbw->query(sprintf("CREATE DATABASE `%s`", $this->mNewWiki->dbname)); wfDebugLog("createwiki", "Database {$this->mNewWiki->dbname} created\n", true); } /** * create position in wiki.factory * (I like sprintf construction, so sue me) */ if (!$this->addToCityList()) { wfDebugLog("createwiki", __METHOD__ . ": Cannot set data in city_list table\n", true); wfProfileOut(__METHOD__); throw new CreateWikiException('Cannot add wiki to city_list', self::ERROR_DATABASE_WRITE_TO_CITY_LIST_BROKEN); } // set new city_id $this->mNewWiki->city_id = $this->mDBw->insertId(); if (empty($this->mNewWiki->city_id)) { wfProfileOut(__METHOD__); throw new CreateWikiException('Cannot set data in city_list table. city_id is empty after insert', self::ERROR_DATABASE_WIKI_FACTORY_TABLES_BROKEN); } wfDebugLog("createwiki", __METHOD__ . ": Row added added into city_list table, city_id = {$this->mNewWiki->city_id}\n", true); /** * add domain and www.domain to the city_domains table */ if (!$this->addToCityDomains()) { wfProfileOut(__METHOD__); throw new CreateWikiException('Cannot set data in city_domains table', self::ERROR_DATABASE_WRITE_TO_CITY_DOMAINS_BROKEN); } wfDebugLog("createwiki", __METHOD__ . ": Row added into city_domains table, city_id = {$this->mNewWiki->city_id}\n", true); /** * create image folder */ global $wgEnableSwiftFileBackend; if (empty($wgEnableSwiftFileBackend)) { wfMkdirParents("{$this->mNewWiki->images_dir}"); wfDebugLog("createwiki", __METHOD__ . ": Folder {$this->mNewWiki->images_dir} created\n", true); } // Force initialize uploader user from correct shared db $uploader = User::newFromName('CreateWiki script'); $uploader->getId(); $oldUser = $wgUser; $wgUser = $uploader; /** * wikifactory variables */ wfDebugLog("createwiki", __METHOD__ . ": Populating city_variables\n", true); $this->setWFVariables(); $tmpSharedDB = $wgSharedDB; $wgSharedDB = $this->mNewWiki->dbname; $this->mDBw->commit(__METHOD__); // commit shared DB changes /** * we got empty database created, now we have to create tables and * populate it with some default values */ wfDebugLog("createwiki", __METHOD__ . ": Creating tables in database\n", true); $this->mNewWiki->dbw = wfGetDB(DB_MASTER, array(), $this->mNewWiki->dbname); if (!$this->createTables()) { wfProfileOut(__METHOD__); throw new CreateWikiException('Creating tables not finished', self::ERROR_SQL_FILE_BROKEN); } /** * import language starter */ if (!$this->importStarter()) { wfProfileOut(__METHOD__); throw new CreateWikiException('Starter import failed', self::ERROR_SQL_FILE_BROKEN); } /** * making the wiki founder a sysop/bureaucrat */ wfDebugLog("createwiki", __METHOD__ . ": Create user sysop/bureaucrat for user: {$this->mNewWiki->founderId} \n", true); if (!$this->addUserToGroups()) { wfDebugLog("createwiki", __METHOD__ . ": Create user sysop/bureaucrat for user: {$this->mNewWiki->founderId} failed \n", true); } /** * init site_stats table (add empty row) */ $this->mNewWiki->dbw->insert("site_stats", array("ss_row_id" => "1"), __METHOD__); /** * copy default logo */ $res = ImagesService::uploadImageFromUrl(self::CREATEWIKI_LOGO, (object) ['name' => 'Wiki.png'], $uploader); if ($res['status'] === true) { wfDebugLog("createwiki", __METHOD__ . ": Default logo has been uploaded\n", true); } else { wfDebugLog("createwiki", __METHOD__ . ": Default logo has not been uploaded - " . print_r($res['errors'], true) . "\n", true); } /** * destroy connection to newly created database */ $this->waitForSlaves(__METHOD__); $wgSharedDB = $tmpSharedDB; $oHub = WikiFactoryHub::getInstance(); $oHub->setVertical($this->mNewWiki->city_id, $this->mNewWiki->vertical, "CW Setup"); wfDebugLog("createwiki", __METHOD__ . ": Wiki added to the vertical: {$this->mNewWiki->vertical} \n", true); for ($i = 0; $i < count($this->mNewWiki->categories); $i++) { $oHub->addCategory($this->mNewWiki->city_id, $this->mNewWiki->categories[$i]); wfDebugLog("createwiki", __METHOD__ . ": Wiki added to the category: {$this->mNewWiki->categories[$i]} \n", true); } /** * define wiki type */ $wiki_type = 'default'; /** * modify variables */ global $wgUniversalCreationVariables; if (!empty($wgUniversalCreationVariables) && !empty($wiki_type) && isset($wgUniversalCreationVariables[$wiki_type])) { $this->addCustomSettings(0, $wgUniversalCreationVariables[$wiki_type], "universal"); wfDebugLog("createwiki", __METHOD__ . ": Custom settings added for wiki_type: {$wiki_type} \n", true); } /** * set variables per language */ global $wgLangCreationVariables; $langCreationVar = isset($wgLangCreationVariables[$wiki_type]) ? $wgLangCreationVariables[$wiki_type] : $wgLangCreationVariables; $this->addCustomSettings($this->mNewWiki->language, $langCreationVar, "language"); wfDebugLog("createwiki", __METHOD__ . ": Custom settings added for wiki_type: {$wiki_type} and language: {$this->mNewWiki->language} \n", true); /** * set tags per language and per hub * @FIXME the switch is !@#$ creazy, but I didn't find a core function */ $tags = new WikiFactoryTags($this->mNewWiki->city_id); $langTag = $this->mNewWiki->language; if ($langTag !== 'en') { switch ($langTag) { case 'pt-br': $langTag = 'pt'; break; case 'zh-tw': case 'zh-hk': case 'zh-clas': case 'zh-class': case 'zh-classical': case 'zh-cn': case 'zh-hans': case 'zh-hant': case 'zh-min-': case 'zh-min-n': case 'zh-mo': case 'zh-sg': case 'zh-yue': $langTag = 'zh'; break; } $tags->addTagsByName($langTag); } /** * move main page -> this code exists in CreateWikiLocalJob - so it is not needed anymore */ /** * Unset database from mNewWiki, because database objects cannot be serialized from MW1.19 */ unset($this->mNewWiki->dbw); // Restore wgUser $wgUser = $oldUser; unset($oldUser); /** * Schedule an async task */ $creationTask = new \Wikia\Tasks\Tasks\CreateNewWikiTask(); $job_params = new stdClass(); foreach ($this->mNewWiki as $id => $value) { if (!is_object($value)) { $job_params->{$id} = $value; } } // BugId:15644 - I need to pass this to CreateWikiLocalJob::changeStarterContributions $job_params->sDbStarter = $this->sDbStarter; $task_id = (new \Wikia\Tasks\AsyncTaskList())->wikiId($this->mNewWiki->city_id)->prioritize()->add($creationTask->call('postCreationSetup', $job_params))->add($creationTask->call('maintenance', rtrim($this->mNewWiki->url, "/")))->queue(); wfDebugLog("createwiki", __METHOD__ . ": Local maintenance task added as {$task_id}\n", true); $this->info(__METHOD__ . ': done', ['task_id' => $task_id, 'took' => microtime(true) - $then]); wfProfileOut(__METHOD__); }
/** * Do a job from the job queue */ function doJobs() { global $wgJobRunRate; if ($wgJobRunRate <= 0 || wfReadOnly()) { return; } if ($wgJobRunRate < 1) { $max = mt_getrandmax(); if (mt_rand(0, $max) > $max * $wgJobRunRate) { return; } $n = 1; } else { $n = intval($wgJobRunRate); } while ($n-- && false != ($job = Job::pop())) { $output = $job->toString() . "\n"; $t = -wfTime(); $success = $job->run(); $t += wfTime(); $t = round($t * 1000); if (!$success) { $output .= "Error: " . $job->getLastError() . ", Time: {$t} ms\n"; } else { $output .= "Success, Time: {$t} ms\n"; } wfDebugLog('jobqueue', $output); } }
/** * This does all the heavy lifting for outputWikitext, except it returns the parser * output instead of sending it straight to $wgOut. Makes things nice and simple for, * say, embedding thread pages within a discussion system (LiquidThreads) * * @param $text string * @param $cache boolean * @param $parserOptions parsing options, defaults to false * @return string containing parsed output */ public function getOutputFromWikitext($text, $cache = true, $parserOptions = false) { global $wgParser, $wgEnableParserCache, $wgUseFileCache; if (!$parserOptions) { $parserOptions = $this->getParserOptions(); } $time = -wfTime(); $this->mParserOutput = $wgParser->parse($text, $this->mTitle, $parserOptions, true, true, $this->getRevIdFetched()); $time += wfTime(); # Timing hack if ($time > 3) { wfDebugLog('slow-parse', sprintf("%-5.2f %s", $time, $this->mTitle->getPrefixedDBkey())); } if ($wgEnableParserCache && $cache && $this->mParserOutput->isCacheable()) { $parserCache = ParserCache::singleton(); $parserCache->save($this->mParserOutput, $this, $parserOptions); } // Make sure file cache is not used on uncacheable content. // Output that has magic words in it can still use the parser cache // (if enabled), though it will generally expire sooner. if (!$this->mParserOutput->isCacheable() || $this->mParserOutput->containsOldMagic()) { $wgUseFileCache = false; } $this->doCascadeProtectionUpdates($this->mParserOutput); return $this->mParserOutput; }
function logFinishedQuery() { global $wgSqlLogFile, $wgLogQueries; if (!$wgLogQueries) { return; } $interval = wfTime() - $this->starttime; $f = fopen($wgSqlLogFile, 'a'); fputs($f, 'finished at ' . wfTimestampNow() . "; took {$interval} secs\n"); fclose($f); }
/** * This is the default action of the script: just view the page of * the given title. */ public function view() { global $wgUser, $wgOut, $wgRequest, $wgContLang; global $wgEnableParserCache, $wgStylePath, $wgParser; global $wgUseTrackbacks, $wgNamespaceRobotPolicies, $wgArticleRobotPolicies; global $wgDefaultRobotPolicy; wfProfileIn(__METHOD__); # Get variables from query string $oldid = $this->getOldID(); # Try file cache if ($oldid === 0 && $this->checkTouched()) { global $wgUseETag; if ($wgUseETag) { $parserCache = ParserCache::singleton(); $wgOut->setETag($parserCache->getETag($this, $wgUser)); } if ($wgOut->checkLastModified($this->getTouched())) { wfProfileOut(__METHOD__); return; } else { if ($this->tryFileCache()) { # tell wgOut that output is taken care of $wgOut->disable(); $this->viewUpdates(); wfProfileOut(__METHOD__); return; } } } $ns = $this->mTitle->getNamespace(); # shortcut $sk = $wgUser->getSkin(); # getOldID may want us to redirect somewhere else if ($this->mRedirectUrl) { $wgOut->redirect($this->mRedirectUrl); wfProfileOut(__METHOD__); return; } $diff = $wgRequest->getVal('diff'); $rcid = $wgRequest->getVal('rcid'); $rdfrom = $wgRequest->getVal('rdfrom'); $diffOnly = $wgRequest->getBool('diffonly', $wgUser->getOption('diffonly')); $purge = $wgRequest->getVal('action') == 'purge'; $return404 = false; $wgOut->setArticleFlag(true); # Discourage indexing of printable versions, but encourage following if ($wgOut->isPrintable()) { $policy = 'noindex,follow'; } elseif (isset($wgArticleRobotPolicies[$this->mTitle->getPrefixedText()])) { $policy = $wgArticleRobotPolicies[$this->mTitle->getPrefixedText()]; } elseif (isset($wgNamespaceRobotPolicies[$ns])) { # Honour customised robot policies for this namespace $policy = $wgNamespaceRobotPolicies[$ns]; } else { $policy = $wgDefaultRobotPolicy; } $wgOut->setRobotPolicy($policy); # If we got diff and oldid in the query, we want to see a # diff page instead of the article. if (!is_null($diff)) { $wgOut->setPageTitle($this->mTitle->getPrefixedText()); $diff = $wgRequest->getVal('diff'); $htmldiff = $wgRequest->getVal('htmldiff', false); $de = new DifferenceEngine($this->mTitle, $oldid, $diff, $rcid, $purge, $htmldiff); // DifferenceEngine directly fetched the revision: $this->mRevIdFetched = $de->mNewid; $de->showDiffPage($diffOnly); // Needed to get the page's current revision $this->loadPageData(); if ($diff == 0 || $diff == $this->mLatest) { # Run view updates for current revision only $this->viewUpdates(); } wfProfileOut(__METHOD__); return; } # Should the parser cache be used? $pcache = $this->useParserCache($oldid); wfDebug('Article::view using parser cache: ' . ($pcache ? 'yes' : 'no') . "\n"); if ($wgUser->getOption('stubthreshold')) { wfIncrStats('pcache_miss_stub'); } $wasRedirected = false; if (isset($this->mRedirectedFrom)) { // This is an internally redirected page view. // We'll need a backlink to the source page for navigation. if (wfRunHooks('ArticleViewRedirect', array(&$this))) { $redir = $sk->makeKnownLinkObj($this->mRedirectedFrom, '', 'redirect=no'); $s = wfMsgExt('redirectedfrom', array('parseinline', 'replaceafter'), $redir); $wgOut->setSubtitle($s); // Set the fragment if one was specified in the redirect if (strval($this->mTitle->getFragment()) != '') { $fragment = Xml::escapeJsString($this->mTitle->getFragmentForURL()); $wgOut->addInlineScript("redirectToFragment(\"{$fragment}\");"); } $wasRedirected = true; } } elseif (!empty($rdfrom)) { // This is an externally redirected view, from some other wiki. // If it was reported from a trusted site, supply a backlink. global $wgRedirectSources; if ($wgRedirectSources && preg_match($wgRedirectSources, $rdfrom)) { $redir = $sk->makeExternalLink($rdfrom, $rdfrom); $s = wfMsgExt('redirectedfrom', array('parseinline', 'replaceafter'), $redir); $wgOut->setSubtitle($s); $wasRedirected = true; } } $outputDone = false; wfRunHooks('ArticleViewHeader', array(&$this, &$outputDone, &$pcache)); if ($pcache && $wgOut->tryParserCache($this, $wgUser)) { // Ensure that UI elements requiring revision ID have // the correct version information. $wgOut->setRevisionId($this->mLatest); $outputDone = true; } # Fetch content and check for errors if (!$outputDone) { # If the article does not exist and was deleted, show the log if ($this->getID() == 0) { $this->showDeletionLog(); } $text = $this->getContent(); if ($text === false) { # Failed to load, replace text with error message $t = $this->mTitle->getPrefixedText(); if ($oldid) { $d = wfMsgExt('missingarticle-rev', array('escape'), $oldid); $text = wfMsg('missing-article', $t, $d); } else { $text = wfMsg('noarticletext'); } } # Non-existent pages if ($this->getID() === 0) { $wgOut->setRobotPolicy('noindex,nofollow'); $text = "<div class='noarticletext'>\n{$text}\n</div>"; if (!$this->hasViewableContent()) { // If there's no backing content, send a 404 Not Found // for better machine handling of broken links. $return404 = true; } } if ($return404) { $wgRequest->response()->header("HTTP/1.x 404 Not Found"); } # Another whitelist check in case oldid is altering the title if (!$this->mTitle->userCanRead()) { $wgOut->loginToUse(); $wgOut->output(); $wgOut->disable(); wfProfileOut(__METHOD__); return; } # For ?curid=x urls, disallow indexing if ($wgRequest->getInt('curid')) { $wgOut->setRobotPolicy('noindex,follow'); } # We're looking at an old revision if (!empty($oldid)) { $wgOut->setRobotPolicy('noindex,nofollow'); if (is_null($this->mRevision)) { // FIXME: This would be a nice place to load the 'no such page' text. } else { $this->setOldSubtitle(isset($this->mOldId) ? $this->mOldId : $oldid); if ($this->mRevision->isDeleted(Revision::DELETED_TEXT)) { if (!$this->mRevision->userCan(Revision::DELETED_TEXT)) { $wgOut->addWikiMsg('rev-deleted-text-permission'); $wgOut->setPageTitle($this->mTitle->getPrefixedText()); wfProfileOut(__METHOD__); return; } else { $wgOut->addWikiMsg('rev-deleted-text-view'); // and we are allowed to see... } } } } $wgOut->setRevisionId($this->getRevIdFetched()); // Pages containing custom CSS or JavaScript get special treatment if ($this->mTitle->isCssOrJsPage() || $this->mTitle->isCssJsSubpage()) { $wgOut->addHTML(wfMsgExt('clearyourcache', 'parse')); // Give hooks a chance to customise the output if (wfRunHooks('ShowRawCssJs', array($this->mContent, $this->mTitle, $wgOut))) { // Wrap the whole lot in a <pre> and don't parse $m = array(); preg_match('!\\.(css|js)$!u', $this->mTitle->getText(), $m); $wgOut->addHTML("<pre class=\"mw-code mw-{$m[1]}\" dir=\"ltr\">\n"); $wgOut->addHTML(htmlspecialchars($this->mContent)); $wgOut->addHTML("\n</pre>\n"); } } else { if ($rt = Title::newFromRedirect($text)) { # Don't append the subtitle if this was an old revision $wgOut->addHTML($this->viewRedirect($rt, !$wasRedirected && $this->isCurrent())); $parseout = $wgParser->parse($text, $this->mTitle, ParserOptions::newFromUser($wgUser)); $wgOut->addParserOutputNoText($parseout); } else { if ($pcache) { # Display content and save to parser cache $this->outputWikiText($text); } else { # Display content, don't attempt to save to parser cache # Don't show section-edit links on old revisions... this way lies madness. if (!$this->isCurrent()) { $oldEditSectionSetting = $wgOut->parserOptions()->setEditSection(false); } # Display content and don't save to parser cache # With timing hack -- TS 2006-07-26 $time = -wfTime(); $this->outputWikiText($text, false); $time += wfTime(); # Timing hack if ($time > 3) { wfDebugLog('slow-parse', sprintf("%-5.2f %s", $time, $this->mTitle->getPrefixedDBkey())); } if (!$this->isCurrent()) { $wgOut->parserOptions()->setEditSection($oldEditSectionSetting); } } } } } /* title may have been set from the cache */ $t = $wgOut->getPageTitle(); if (empty($t)) { $wgOut->setPageTitle($this->mTitle->getPrefixedText()); # For the main page, overwrite the <title> element with the con- # tents of 'pagetitle-view-mainpage' instead of the default (if # that's not empty). if ($this->mTitle->equals(Title::newMainPage()) && wfMsgForContent('pagetitle-view-mainpage') !== '') { $wgOut->setHTMLTitle(wfMsgForContent('pagetitle-view-mainpage')); } } # check if we're displaying a [[User talk:x.x.x.x]] anonymous talk page if ($ns == NS_USER_TALK && IP::isValid($this->mTitle->getText())) { $wgOut->addWikiMsg('anontalkpagetext'); } # If we have been passed an &rcid= parameter, we want to give the user a # chance to mark this new article as patrolled. if (!empty($rcid) && $this->mTitle->exists() && $this->mTitle->userCan('patrol')) { $wgOut->addHTML("<div class='patrollink'>" . wfMsgHtml('markaspatrolledlink', $sk->makeKnownLinkObj($this->mTitle, wfMsgHtml('markaspatrolledtext'), "action=markpatrolled&rcid={$rcid}")) . '</div>'); } # Trackbacks if ($wgUseTrackbacks) { $this->addTrackbacks(); } $this->viewUpdates(); wfProfileOut(__METHOD__); }
/** * Overridden to include prefetch ratio if enabled. */ function showReport() { if (!$this->prefetch) { return parent::showReport(); } if ($this->reporting) { $delta = wfTime() - $this->startTime; $now = wfTimestamp(TS_DB); if ($delta) { $rate = $this->pageCount / $delta; $revrate = $this->revCount / $delta; $portion = $this->revCount / $this->maxCount; $eta = $this->startTime + $delta / $portion; $etats = wfTimestamp(TS_DB, intval($eta)); $fetchrate = 100.0 * $this->prefetchCount / $this->fetchCount; } else { $rate = '-'; $revrate = '-'; $etats = '-'; $fetchrate = '-'; } $this->progress(sprintf("%s: %s %d pages (%0.3f/sec), %d revs (%0.3f/sec), %0.1f%% prefetched, ETA %s [max %d]", $now, wfWikiID(), $this->pageCount, $rate, $this->revCount, $revrate, $fetchrate, $etats, $this->maxCount)); } }
function importFromHandle($handle) { $this->startTime = wfTime(); $source = new ImportStreamSource($handle); $importer = new WikiImporter($source); $importer->setDebug($this->debug); $importer->setPageCallback(array(&$this, 'reportPage')); $this->importCallback = $importer->setRevisionCallback(array(&$this, 'handleRevision')); $this->uploadCallback = $importer->setUploadCallback(array(&$this, 'handleUpload')); $this->logItemCallback = $importer->setLogItemCallback(array(&$this, 'handleLogItem')); return $importer->doImport(); }
/** * run backup for range of wikis */ function runBackups($from, $to, $full, $options) { global $IP, $wgWikiaLocalSettingsPath, $wgWikiaAdminSettingsPath, $wgMaxShellTime, $wgMaxShellFileSize, $wgDumpsDisabledWikis; $range = array(); /** * shortcut for full & current together */ $both = isset($options["both"]) ? true : false; /** * store backup in another folder, not available for users */ $hide = isset($options["hide"]) ? true : false; /** * silly trick, if we have id defined we are defining $from & $to from it * if we have db param defined we first resolve which id is connected to this * database */ if (isset($options["db"]) && is_string($options["db"])) { $city_id = WikiFactory::DBtoID($options["db"]); if ($city_id) { $from = $to = $city_id; $to++; } } elseif (isset($options["id"]) && is_numeric($options["id"])) { $from = $to = $options["id"]; $to++; } elseif (isset($options["even"])) { $range[] = "city_id % 2 = 0"; $range[] = "city_public = 1"; } elseif (isset($options["odd"])) { $range[] = "city_id % 2 <> 0"; $range[] = "city_public = 1"; } else { /** * if all only for active */ $range[] = "city_public = 1"; } /** * exclude wikis with dumps disabled */ if (!empty($wgDumpsDisabledWikis) && is_array($wgDumpsDisabledWikis)) { $range[] = 'city_id NOT IN (' . implode(',', $wgDumpsDisabledWikis) . ')'; } /** * switch off limits for dumps */ $wgMaxShellTime = 0; $wgMaxShellFileSize = 0; if ($from !== false && $to !== false) { $range[] = sprintf("city_id >= %d AND city_id < %d", $from, $to); Wikia::log(__METHOD__, "info", "Running from {$from} to {$to}", true, true); } else { Wikia::log(__METHOD__, "info", "Running for all wikis", true, true); } $dbw = Wikifactory::db(DB_MASTER); $sth = $dbw->select(array("city_list"), array("city_id", "city_dbname"), $range, __METHOD__, array("ORDER BY" => "city_id")); while ($row = $dbw->fetchObject($sth)) { /** * get cluster for this wiki */ $cluster = WikiFactory::getVarValueByName("wgDBcluster", $row->city_id); $server = wfGetDB(DB_SLAVE, 'dumps', $row->city_dbname)->getProperty("mServer"); /** * build command */ $status = false; $basedir = getDirectory($row->city_dbname, $hide); if ($full || $both) { $path = sprintf("%s/pages_full.xml.gz", $basedir); $time = wfTime(); Wikia::log(__METHOD__, "info", "{$row->city_id} {$row->city_dbname} {$path}", true, true); $cmd = array("SERVER_ID={$row->city_id}", "php", "{$IP}/maintenance/dumpBackup.php", "--conf {$wgWikiaLocalSettingsPath}", "--aconf {$wgWikiaAdminSettingsPath}", "--full", "--xml", "--quiet", "--server={$server}", "--output=gzip:{$path}"); wfShellExec(implode(" ", $cmd), $status); $time = Wikia::timeDuration(wfTime() - $time); Wikia::log(__METHOD__, "info", "{$row->city_id} {$row->city_dbname} status: {$status}, time: {$time}", true, true); } if (!$full || $both) { $path = sprintf("%s/pages_current.xml.gz", $basedir); $time = wfTime(); Wikia::log(__METHOD__, "info", "{$row->city_id} {$row->city_dbname} {$path}", true, true); $cmd = array("SERVER_ID={$row->city_id}", "php", "{$IP}/maintenance/dumpBackup.php", "--conf {$wgWikiaLocalSettingsPath}", "--aconf {$wgWikiaAdminSettingsPath}", "--current", "--xml", "--quiet", "--server={$server}", "--output=gzip:{$path}"); wfShellExec(implode(" ", $cmd), $status); $time = Wikia::timeDuration(wfTime() - $time); Wikia::log(__METHOD__, "info", "{$row->city_id} {$row->city_dbname} status: {$status}, time: {$time}", true, true); } /** * generate index.json */ $jsonfile = sprintf("%s/index.json", $basedir); $json = array(); /** * open dir and read info about files */ if (is_dir($basedir)) { $dh = opendir($basedir); while (($file = readdir($dh)) !== false) { $fullpath = $basedir . "/" . $file; if (is_file($fullpath)) { $json[$file] = array("name" => $file, "timestamp" => filectime($fullpath), "mwtimestamp" => wfTimestamp(TS_MW, filectime($fullpath))); } } closedir($dh); } if (count($json)) { file_put_contents($jsonfile, json_encode($json)); } } }
/** * Returns a HTML comment with the elapsed time since request. * This method has no side effects. * * @return string */ function wfReportTime() { global $wgRequestTime, $wgShowHostnames; $now = wfTime(); $elapsed = $now - $wgRequestTime; return $wgShowHostnames ? sprintf('<!-- Served by %s in %01.3f secs. -->', wfHostname(), $elapsed) : sprintf('<!-- Served in %01.3f secs. -->', $elapsed); }
function importFromHandle($handle) { $this->startTime = wfTime(); $source = new ImportStreamSource($handle); $importer = new WikiImporter($source); if ($this->hasOption('debug')) { $importer->setDebug(true); } $importer->setPageCallback(array(&$this, 'reportPage')); $this->importCallback = $importer->setRevisionCallback(array(&$this, 'handleRevision')); $this->uploadCallback = $importer->setUploadCallback(array(&$this, 'handleUpload')); $this->logItemCallback = $importer->setLogItemCallback(array(&$this, 'handleLogItem')); if ($this->uploads) { $importer->setImportUploads(true); } if ($this->imageBasePath) { $importer->setImageBasePath($this->imageBasePath); } if ($this->dryRun) { $importer->setPageOutCallback(null); } return $importer->doImport(); }
function importFromHandle($handle) { $this->startTime = wfTime(); $source = new ImportStreamSource($handle); $importer = new WikiImporter($source); $importer->setPageCallback(array(&$this, 'reportPage')); $this->importCallback = $importer->setRevisionCallback(array(&$this, 'handleRevision')); $importer->doImport(); }