public function execute() { global $wgCommandLineMode; if ($this->hasOption('procs')) { $procs = intval($this->getOption('procs')); if ($procs < 1 || $procs > 1000) { $this->error("Invalid argument to --procs", true); } elseif ($procs != 1) { $fc = new ForkController($procs); if ($fc->start() != 'child') { exit(0); } } } $outputJSON = $this->getOption('result') === 'json'; // Enable DBO_TRX for atomicity; JobRunner manages transactions // and works well in web server mode already (@TODO: this is a hack) $wgCommandLineMode = false; $runner = new JobRunner(LoggerFactory::getInstance('runJobs')); if (!$outputJSON) { $runner->setDebugHandler(array($this, 'debugInternal')); } $response = $runner->run(array('type' => $this->getOption('type', false), 'maxJobs' => $this->getOption('maxjobs', false), 'maxTime' => $this->getOption('maxtime', false), 'throttle' => $this->hasOption('nothrottle') ? false : true)); if ($outputJSON) { $this->output(FormatJson::encode($response, true)); } $wgCommandLineMode = true; }
public function execute() { if (wfReadOnly()) { $this->error("Unable to run jobs; the wiki is in read-only mode.", 1); // die } if ($this->hasOption('procs')) { $procs = intval($this->getOption('procs')); if ($procs < 1 || $procs > 1000) { $this->error("Invalid argument to --procs", true); } elseif ($procs != 1) { $fc = new ForkController($procs); if ($fc->start() != 'child') { exit(0); } } } $json = $this->getOption('result') === 'json'; $runner = new JobRunner(); if (!$json) { $runner->setDebugHandler(array($this, 'debugInternal')); } $response = $runner->run(array('type' => $this->getOption('type', false), 'maxJobs' => $this->getOption('maxjobs', false), 'maxTime' => $this->getOption('maxtime', false), 'throttle' => $this->hasOption('nothrottle') ? false : true)); if ($json) { $this->output(FormatJson::encode($response, true)); } }
public function execute($par = '') { $this->getOutput()->disable(); if (wfReadOnly()) { // HTTP 423 Locked HttpStatus::header(423); print 'Wiki is in read-only mode'; return; } elseif (!$this->getRequest()->wasPosted()) { HttpStatus::header(400); print 'Request must be POSTed'; return; } $optional = array('maxjobs' => 0, 'maxtime' => 30, 'type' => false, 'async' => true); $required = array_flip(array('title', 'tasks', 'signature', 'sigexpiry')); $params = array_intersect_key($this->getRequest()->getValues(), $required + $optional); $missing = array_diff_key($required, $params); if (count($missing)) { HttpStatus::header(400); print 'Missing parameters: ' . implode(', ', array_keys($missing)); return; } $squery = $params; unset($squery['signature']); $correctSignature = self::getQuerySignature($squery, $this->getConfig()->get('SecretKey')); $providedSignature = $params['signature']; $verified = is_string($providedSignature) && hash_equals($correctSignature, $providedSignature); if (!$verified || $params['sigexpiry'] < time()) { HttpStatus::header(400); print 'Invalid or stale signature provided'; return; } // Apply any default parameter values $params += $optional; if ($params['async']) { // Client will usually disconnect before checking the response, // but it needs to know when it is safe to disconnect. Until this // reaches ignore_user_abort(), it is not safe as the jobs won't run. ignore_user_abort(true); // jobs may take a bit of time // HTTP 202 Accepted HttpStatus::header(202); ob_flush(); flush(); // Once the client receives this response, it can disconnect } // Do all of the specified tasks... if (in_array('jobs', explode('|', $params['tasks']))) { $runner = new JobRunner(LoggerFactory::getInstance('runJobs')); $response = $runner->run(array('type' => $params['type'], 'maxJobs' => $params['maxjobs'] ? $params['maxjobs'] : 1, 'maxTime' => $params['maxtime'] ? $params['maxjobs'] : 30)); if (!$params['async']) { print FormatJson::encode($response, true); } } }
public function execute() { global $wgCommandLineMode; if ($this->hasOption('procs')) { $procs = intval($this->getOption('procs')); if ($procs < 1 || $procs > 1000) { $this->error("Invalid argument to --procs", true); } elseif ($procs != 1) { $fc = new ForkController($procs); if ($fc->start() != 'child') { exit(0); } } } $outputJSON = $this->getOption('result') === 'json'; $wait = $this->hasOption('wait'); // Enable DBO_TRX for atomicity; JobRunner manages transactions // and works well in web server mode already (@TODO: this is a hack) $wgCommandLineMode = false; $runner = new JobRunner(LoggerFactory::getInstance('runJobs')); if (!$outputJSON) { $runner->setDebugHandler([$this, 'debugInternal']); } $type = $this->getOption('type', false); $maxJobs = $this->getOption('maxjobs', false); $maxTime = $this->getOption('maxtime', false); $throttle = !$this->hasOption('nothrottle'); while (true) { $response = $runner->run(['type' => $type, 'maxJobs' => $maxJobs, 'maxTime' => $maxTime, 'throttle' => $throttle]); if ($outputJSON) { $this->output(FormatJson::encode($response, true)); } if (!$wait || $response['reached'] === 'time-limit' || $response['reached'] === 'job-limit' || $response['reached'] === 'memory-limit') { break; } if ($maxJobs !== false) { $maxJobs -= count($response['jobs']); } sleep(1); } $wgCommandLineMode = true; }
public function execute() { if ($this->hasOption('procs')) { $procs = intval($this->getOption('procs')); if ($procs < 1 || $procs > 1000) { $this->error("Invalid argument to --procs", true); } elseif ($procs != 1) { $fc = new ForkController($procs); if ($fc->start() != 'child') { exit(0); } } } $outputJSON = $this->getOption('result') === 'json'; $wait = $this->hasOption('wait'); $runner = new JobRunner(LoggerFactory::getInstance('runJobs')); if (!$outputJSON) { $runner->setDebugHandler([$this, 'debugInternal']); } $type = $this->getOption('type', false); $maxJobs = $this->getOption('maxjobs', false); $maxTime = $this->getOption('maxtime', false); $throttle = !$this->hasOption('nothrottle'); while (true) { $response = $runner->run(['type' => $type, 'maxJobs' => $maxJobs, 'maxTime' => $maxTime, 'throttle' => $throttle]); if ($outputJSON) { $this->output(FormatJson::encode($response, true)); } if (!$wait || $response['reached'] === 'time-limit' || $response['reached'] === 'job-limit' || $response['reached'] === 'memory-limit') { break; } if ($maxJobs !== false) { $maxJobs -= count($response['jobs']); } sleep(1); } }
/** * Potentially open a socket and sent an HTTP request back to the server * to run a specified number of jobs. This registers a callback to cleanup * the socket once it's done. */ public function triggerJobs() { $jobRunRate = $this->config->get('JobRunRate'); if ($this->getTitle()->isSpecial('RunJobs')) { return; // recursion guard } elseif ($jobRunRate <= 0 || wfReadOnly()) { return; } if ($jobRunRate < 1) { $max = mt_getrandmax(); if (mt_rand(0, $max) > $max * $jobRunRate) { return; // the higher the job run rate, the less likely we return here } $n = 1; } else { $n = intval($jobRunRate); } $runJobsLogger = LoggerFactory::getInstance('runJobs'); // Fall back to running the job(s) while the user waits if needed if (!$this->config->get('RunJobsAsync')) { $runner = new JobRunner($runJobsLogger); $runner->run(['maxJobs' => $n]); return; } // Do not send request if there are probably no jobs try { $group = JobQueueGroup::singleton(); if (!$group->queuesHaveJobs(JobQueueGroup::TYPE_DEFAULT)) { return; } } catch (JobQueueError $e) { MWExceptionHandler::logException($e); return; // do not make the site unavailable } $query = ['title' => 'Special:RunJobs', 'tasks' => 'jobs', 'maxjobs' => $n, 'sigexpiry' => time() + 5]; $query['signature'] = SpecialRunJobs::getQuerySignature($query, $this->config->get('SecretKey')); $errno = $errstr = null; $info = wfParseUrl($this->config->get('CanonicalServer')); $host = $info ? $info['host'] : null; $port = 80; if (isset($info['scheme']) && $info['scheme'] == 'https') { $host = "tls://" . $host; $port = 443; } if (isset($info['port'])) { $port = $info['port']; } MediaWiki\suppressWarnings(); $sock = $host ? fsockopen($host, $port, $errno, $errstr, 0.1) : false; MediaWiki\restoreWarnings(); $invokedWithSuccess = true; if ($sock) { $special = SpecialPageFactory::getPage('RunJobs'); $url = $special->getPageTitle()->getCanonicalURL($query); $req = "POST {$url} HTTP/1.1\r\n" . "Host: {$info['host']}\r\n" . "Connection: Close\r\n" . "Content-Length: 0\r\n\r\n"; $runJobsLogger->info("Running {$n} job(s) via '{$url}'"); // Send a cron API request to be performed in the background. // Give up if this takes too long to send (which should be rare). stream_set_timeout($sock, 2); $bytes = fwrite($sock, $req); if ($bytes !== strlen($req)) { $invokedWithSuccess = false; $runJobsLogger->error("Failed to start cron API (socket write error)"); } else { // Do not wait for the response (the script should handle client aborts). // Make sure that we don't close before that script reaches ignore_user_abort(). $start = microtime(true); $status = fgets($sock); $sec = microtime(true) - $start; if (!preg_match('#^HTTP/\\d\\.\\d 202 #', $status)) { $invokedWithSuccess = false; $runJobsLogger->error("Failed to start cron API: received '{$status}' ({$sec})"); } } fclose($sock); } else { $invokedWithSuccess = false; $runJobsLogger->error("Failed to start cron API (socket error {$errno}): {$errstr}"); } // Fall back to running the job(s) while the user waits if needed if (!$invokedWithSuccess) { $runJobsLogger->warning("Jobs switched to blocking; Special:RunJobs disabled"); $runner = new JobRunner($runJobsLogger); $runner->run(['maxJobs' => $n]); } }
/** * Potentially open a socket and sent an HTTP request back to the server * to run a specified number of jobs. This registers a callback to cleanup * the socket once it's done. */ public function triggerJobs() { $jobRunRate = $this->config->get('JobRunRate'); if ($jobRunRate <= 0 || wfReadOnly()) { return; } elseif ($this->getTitle()->isSpecial('RunJobs')) { return; // recursion guard } if ($jobRunRate < 1) { $max = mt_getrandmax(); if (mt_rand(0, $max) > $max * $jobRunRate) { return; // the higher the job run rate, the less likely we return here } $n = 1; } else { $n = intval($jobRunRate); } $runJobsLogger = LoggerFactory::getInstance('runJobs'); if (!$this->config->get('RunJobsAsync')) { // Fall back to running the job here while the user waits $runner = new JobRunner($runJobsLogger); $runner->run(array('maxJobs' => $n)); return; } try { if (!JobQueueGroup::singleton()->queuesHaveJobs(JobQueueGroup::TYPE_DEFAULT)) { return; // do not send request if there are probably no jobs } } catch (JobQueueError $e) { MWExceptionHandler::logException($e); return; // do not make the site unavailable } $query = array('title' => 'Special:RunJobs', 'tasks' => 'jobs', 'maxjobs' => $n, 'sigexpiry' => time() + 5); $query['signature'] = SpecialRunJobs::getQuerySignature($query, $this->config->get('SecretKey')); $errno = $errstr = null; $info = wfParseUrl($this->config->get('Server')); MediaWiki\suppressWarnings(); $sock = fsockopen($info['host'], isset($info['port']) ? $info['port'] : 80, $errno, $errstr, 0.1); MediaWiki\restoreWarnings(); if (!$sock) { $runJobsLogger->error("Failed to start cron API (socket error {$errno}): {$errstr}"); // Fall back to running the job here while the user waits $runner = new JobRunner($runJobsLogger); $runner->run(array('maxJobs' => $n)); return; } $url = wfAppendQuery(wfScript('index'), $query); $req = "POST {$url} HTTP/1.1\r\n" . "Host: {$info['host']}\r\n" . "Connection: Close\r\n" . "Content-Length: 0\r\n\r\n"; $runJobsLogger->info("Running {$n} job(s) via '{$url}'"); // Send a cron API request to be performed in the background. // Give up if this takes too long to send (which should be rare). stream_set_timeout($sock, 1); $bytes = fwrite($sock, $req); if ($bytes !== strlen($req)) { $runJobsLogger->error("Failed to start cron API (socket write error)"); } else { // Do not wait for the response (the script should handle client aborts). // Make sure that we don't close before that script reaches ignore_user_abort(). $status = fgets($sock); if (!preg_match('#^HTTP/\\d\\.\\d 202 #', $status)) { $runJobsLogger->error("Failed to start cron API: received '{$status}'"); } } fclose($sock); }
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * http://www.gnu.org/copyleft/gpl.html * * @file * @author Aaron Schulz */ if (!in_array($_SERVER['REMOTE_ADDR'], array('127.0.0.1', '0:0:0:0:0:0:0:1', '::1'), true)) { die("Only loopback requests are allowed.\n"); } elseif ($_SERVER['REQUEST_METHOD'] !== 'POST') { die("Request must use POST.\n"); } require_once __DIR__ . '/../multiversion/MWVersion.php'; $wiki = isset($_GET['wiki']) ? $_GET['wiki'] : ''; require getMediaWiki('includes/WebStart.php', $wiki); error_reporting(E_ERROR); // fatals but not random I/O warnings ini_set('display_errors', 1); $wgShowExceptionDetails = true; if (method_exists('LBFactory', 'disableChronologyProtection')) { // This is not helpful here and will slow things down in some cases wfGetLBFactory()->disableChronologyProtection(); } try { $mediawiki = new MediaWiki(); $runner = new JobRunner(); $response = $runner->run(array('type' => isset($_GET['type']) ? $_GET['type'] : false, 'maxJobs' => isset($_GET['maxjobs']) ? $_GET['maxjobs'] : false, 'maxTime' => isset($_GET['maxtime']) ? $_GET['maxtime'] : 30)); print json_encode($response, JSON_PRETTY_PRINT | JSON_UNESCAPED_SLASHES | JSON_UNESCAPED_UNICODE); $mediawiki->restInPeace(); } catch (Exception $e) { MWExceptionHandler::handleException($e); }