Example #1
0
    public static function makeArchiver($url, InputInterface $input) // TODO: remove url from this function
    {
        $archiver = new CronArchive();

        $archiver->disableScheduledTasks = $input->getOption('disable-scheduled-tasks');
        $archiver->acceptInvalidSSLCertificate = $input->getOption("accept-invalid-ssl-certificate");
        $archiver->shouldArchiveAllSites = (bool) $input->getOption("force-all-websites");
        $archiver->shouldStartProfiler = (bool) $input->getOption("xhprof");
        $archiver->shouldArchiveSpecifiedSites = self::getSitesListOption($input, "force-idsites");
        $archiver->shouldSkipSpecifiedSites = self::getSitesListOption($input, "skip-idsites");
        $archiver->forceTimeoutPeriod = $input->getOption("force-timeout-for-periods");
        $archiver->shouldArchiveAllPeriodsSince = $input->getOption("force-all-periods");
        $archiver->restrictToDateRange = $input->getOption("force-date-range");

        $restrictToPeriods = $input->getOption("force-periods");
        $restrictToPeriods = explode(',', $restrictToPeriods);
        $archiver->restrictToPeriods = array_map('trim', $restrictToPeriods);

        $archiver->dateLastForced = $input->getOption('force-date-last-n');
        $archiver->concurrentRequestsPerWebsite = $input->getOption('concurrent-requests-per-website');

        $archiver->disableSegmentsArchiving = $input->getOption('skip-all-segments');

        $segmentIds = $input->getOption('force-idsegments');
        $segmentIds = explode(',', $segmentIds);
        $segmentIds = array_map('trim', $segmentIds);
        $archiver->setSegmentsToForceFromSegmentIds($segmentIds);

        return $archiver;
    }
 /**
  * Tracker requests will automatically trigger the Scheduled tasks.
  * This is useful for users who don't setup the cron,
  * but still want daily/weekly/monthly PDF reports emailed automatically.
  *
  * This is similar to calling the API CoreAdminHome.runScheduledTasks
  */
 public function runScheduledTasks()
 {
     $now = time();
     // Currently, there are no hourly tasks. When there are some,
     // this could be too aggressive minimum interval (some hours would be skipped in case of low traffic)
     $minimumInterval = TrackerConfig::getConfigValue('scheduled_tasks_min_interval');
     // If the user disabled browser archiving, he has already setup a cron
     // To avoid parallel requests triggering the Scheduled Tasks,
     // Get last time tasks started executing
     $cache = Cache::getCacheGeneral();
     if ($minimumInterval <= 0 || empty($cache['isBrowserTriggerEnabled'])) {
         Common::printDebug("-> Scheduled tasks not running in Tracker: Browser archiving is disabled.");
         return;
     }
     $nextRunTime = $cache['lastTrackerCronRun'] + $minimumInterval;
     if (defined('DEBUG_FORCE_SCHEDULED_TASKS') && DEBUG_FORCE_SCHEDULED_TASKS || $cache['lastTrackerCronRun'] === false || $nextRunTime < $now) {
         $cache['lastTrackerCronRun'] = $now;
         Cache::setCacheGeneral($cache);
         Tracker::initCorePiwikInTrackerMode();
         Option::set('lastTrackerCronRun', $cache['lastTrackerCronRun']);
         Common::printDebug('-> Scheduled Tasks: Starting...');
         // save current user privilege and temporarily assume Super User privilege
         $isSuperUser = Piwik::hasUserSuperUserAccess();
         // Scheduled tasks assume Super User is running
         Piwik::setUserHasSuperUserAccess();
         $tokens = CronArchive::getSuperUserTokenAuths();
         $tokenAuth = reset($tokens);
         $invokeScheduledTasksUrl = SettingsPiwik::getPiwikUrl() . "?module=API&format=csv&convertToUnicode=0&method=CoreAdminHome.runScheduledTasks&trigger=archivephp&token_auth={$tokenAuth}";
         $cliMulti = new CliMulti();
         $responses = $cliMulti->request(array($invokeScheduledTasksUrl));
         $resultTasks = reset($responses);
         // restore original user privilege
         Piwik::setUserHasSuperUserAccess($isSuperUser);
         Common::printDebug($resultTasks);
         Common::printDebug('Finished Scheduled Tasks.');
     } else {
         Common::printDebug("-> Scheduled tasks not triggered.");
     }
     Common::printDebug("Next run will be from: " . date('Y-m-d H:i:s', $nextRunTime) . ' UTC');
 }
Example #3
0
 /**
  * Tracker requests will automatically trigger the Scheduled tasks.
  * This is useful for users who don't setup the cron,
  * but still want daily/weekly/monthly PDF reports emailed automatically.
  *
  * This is similar to calling the API CoreAdminHome.runScheduledTasks
  */
 protected static function runScheduledTasks()
 {
     $now = time();
     // Currently, there are no hourly tasks. When there are some,
     // this could be too aggressive minimum interval (some hours would be skipped in case of low traffic)
     $minimumInterval = Config::getInstance()->Tracker['scheduled_tasks_min_interval'];
     // If the user disabled browser archiving, he has already setup a cron
     // To avoid parallel requests triggering the Scheduled Tasks,
     // Get last time tasks started executing
     $cache = Cache::getCacheGeneral();
     if ($minimumInterval <= 0 || empty($cache['isBrowserTriggerEnabled'])) {
         Common::printDebug("-> Scheduled tasks not running in Tracker: Browser archiving is disabled.");
         return;
     }
     $nextRunTime = $cache['lastTrackerCronRun'] + $minimumInterval;
     if (isset($GLOBALS['PIWIK_TRACKER_DEBUG_FORCE_SCHEDULED_TASKS']) && $GLOBALS['PIWIK_TRACKER_DEBUG_FORCE_SCHEDULED_TASKS'] || $cache['lastTrackerCronRun'] === false || $nextRunTime < $now) {
         $cache['lastTrackerCronRun'] = $now;
         Cache::setCacheGeneral($cache);
         self::initCorePiwikInTrackerMode();
         Option::set('lastTrackerCronRun', $cache['lastTrackerCronRun']);
         Common::printDebug('-> Scheduled Tasks: Starting...');
         // save current user privilege and temporarily assume Super User privilege
         $isSuperUser = Piwik::hasUserSuperUserAccess();
         // Scheduled tasks assume Super User is running
         Piwik::setUserHasSuperUserAccess();
         // While each plugins should ensure that necessary languages are loaded,
         // we ensure English translations at least are loaded
         Translate::loadEnglishTranslation();
         ob_start();
         CronArchive::$url = SettingsPiwik::getPiwikUrl();
         $cronArchive = new CronArchive();
         $cronArchive->runScheduledTasksInTrackerMode();
         $resultTasks = ob_get_contents();
         ob_clean();
         // restore original user privilege
         Piwik::setUserHasSuperUserAccess($isSuperUser);
         foreach (explode('</pre>', $resultTasks) as $resultTask) {
             Common::printDebug(str_replace('<pre>', '', $resultTask));
         }
         Common::printDebug('Finished Scheduled Tasks.');
     } else {
         Common::printDebug("-> Scheduled tasks not triggered.");
     }
     Common::printDebug("Next run will be from: " . date('Y-m-d H:i:s', $nextRunTime) . ' UTC');
 }
Example #4
0
use Exception;
/*
Ideas for improvements:
	- Known limitation: when adding new segments to preprocess, script will assume that data was processed for this segment in the past
      Workaround: run --force-all-websites --force-all-periods=10000000 to archive everything.
	- Possible performance improvement
      - Run first websites which are faster to process (weighted by visits and/or time to generate the last daily report)
	    This would make sure that huge websites do not 'block' processing of smaller websites' reports.
*/
if (!defined('PIWIK_INCLUDE_PATH')) {
    define('PIWIK_INCLUDE_PATH', realpath(dirname(__FILE__) . "/../.."));
}
if (!defined('PIWIK_USER_PATH')) {
    define('PIWIK_USER_PATH', PIWIK_INCLUDE_PATH);
}
define('PIWIK_ENABLE_DISPATCH', false);
define('PIWIK_ENABLE_ERROR_HANDLER', false);
define('PIWIK_ENABLE_SESSION_START', false);
if (!defined('PIWIK_MODE_ARCHIVE')) {
    define('PIWIK_MODE_ARCHIVE', true);
}
require_once PIWIK_INCLUDE_PATH . "/index.php";
$archiving = new CronArchive();
try {
    $archiving->init();
    $archiving->run();
    $archiving->runScheduledTasks();
    $archiving->end();
} catch (Exception $e) {
    $archiving->logFatalError($e->getMessage());
}
Example #5
0
 /**
  * Initiates cron archiving via web request.
  *
  * @hideExceptForSuperUser
  */
 public function runCronArchiving()
 {
     Piwik::checkUserHasSuperUserAccess();
     // HTTP request: logs needs to be dumped in the HTTP response (on top of existing log destinations)
     /** @var \Monolog\Logger $logger */
     $logger = StaticContainer::get('Psr\\Log\\LoggerInterface');
     $handler = new StreamHandler('php://output', Logger::INFO);
     $handler->setFormatter(StaticContainer::get('Piwik\\Plugins\\Monolog\\Formatter\\LineMessageFormatter'));
     $logger->pushHandler($handler);
     $archiver = new CronArchive();
     $archiver->main();
 }
Example #6
0
    public function test_output()
    {
        \Piwik\Tests\Framework\Mock\FakeCliMulti::$specifiedResults = array('/method=API.get/' => serialize(array(array('nb_visits' => 1))));
        Fixture::createWebsite('2014-12-12 00:01:02');
        SegmentAPI::getInstance()->add('foo', 'actions>=2', 1, true, true);
        SegmentAPI::getInstance()->add('burr', 'actions>=4', 1, true, true);
        $logger = new FakeLogger();
        $archiver = new CronArchive(null, $logger);
        $archiver->shouldArchiveAllSites = true;
        $archiver->shouldArchiveAllPeriodsSince = true;
        $archiver->segmentsToForce = array('actions>=2;browserCode=FF', 'actions>=2');
        $archiver->init();
        $archiver->run();
        $expected = <<<LOG
---------------------------
INIT
Running Piwik %s as Super User
---------------------------
NOTES
- If you execute this script at least once per hour (or more often) in a crontab, you may disable 'Browser trigger archiving' in Piwik UI > Settings > General Settings.
  See the doc at: http://piwik.org/docs/setup-auto-archiving/
- Reports for today will be processed at most every %s seconds. You can change this value in Piwik UI > Settings > General Settings.
- Reports for the current week/month/year will be refreshed at most every %s seconds.
- Will process all 1 websites
- Limiting segment archiving to following segments:
  * actions>=2;browserCode=FF
  * actions>=2
---------------------------
START
Starting Piwik reports archiving...
Will pre-process for website id = 1, day period
- pre-processing all visits
- skipping segment archiving for 'actions>=4'.
- pre-processing segment 1/1 actions>=2
Archived website id = 1, period = day, 1 segments, 0 visits in last %s days, 0 visits today, Time elapsed: %s
Will pre-process for website id = 1, week period
- pre-processing all visits
- skipping segment archiving for 'actions>=4'.
- pre-processing segment 1/1 actions>=2
Archived website id = 1, period = week, 1 segments, 1 visits in last %s weeks, 1 visits this week, Time elapsed: %s
Will pre-process for website id = 1, month period
- pre-processing all visits
- skipping segment archiving for 'actions>=4'.
- pre-processing segment 1/1 actions>=2
Archived website id = 1, period = month, 1 segments, 1 visits in last %s months, 1 visits this month, Time elapsed: %s
Will pre-process for website id = 1, year period
- pre-processing all visits
- skipping segment archiving for 'actions>=4'.
- pre-processing segment 1/1 actions>=2
Archived website id = 1, period = year, 1 segments, 1 visits in last %s years, 1 visits this year, Time elapsed: %s
Archived website id = 1, %s API requests, Time elapsed: %s [1/1 done]
Done archiving!
---------------------------
SUMMARY
Total visits for today across archived websites: 1
Archived today's reports for 1 websites
Archived week/month/year for 1 websites
Skipped 0 websites: no new visit since the last script execution
Skipped 0 websites day archiving: existing daily reports are less than 150 seconds old
Skipped 0 websites week/month/year archiving: existing periods reports are less than 3600 seconds old
Total API requests: %s
done: 1/1 100%, 1 vtoday, 1 wtoday, 1 wperiods, %s req, %s ms, no error
Time elapsed: %s

LOG;
        $this->assertStringMatchesFormat($expected, $logger->output);
    }
    public function test_shouldNotStopProcessingWhenOneSiteIsInvalid()
    {
        \Piwik\Tests\Framework\Mock\FakeCliMulti::$specifiedResults = array('/method=API.get/' => serialize(array(array('nb_visits' => 1))));
        Fixture::createWebsite('2014-12-12 00:01:02');
        $logger = new FakeLogger();
        $archiver = new CronArchive(null, $logger);
        $archiver->shouldArchiveSpecifiedSites = array(99999, 1);
        $archiver->init();
        $archiver->run();
        $expected = <<<LOG
- Will process 2 websites (--force-idsites)
Will ignore websites and help finish a previous started queue instead. IDs: 1
---------------------------
START
Starting Piwik reports archiving...
Will pre-process for website id = 1, period = day, date = last52
- pre-processing all visits
LOG;
        $this->assertContains($expected, $logger->output);
    }