getPurgeDataSettings() public static method

Returns the settings for the data purging feature.
public static getPurgeDataSettings ( ) : array
return array
 public function test_getPurgeDataSettings_shouldAlsoUseOptionValuesIfUIisEnabled()
 {
     $this->setUIEnabled(true);
     $settings = $this->manager->getPurgeDataSettings();
     $expected = $this->getDefaultPurgeSettings();
     $expected['delete_logs_enable'] = 1;
     $expected['delete_logs_older_than'] = 270;
     $expected['delete_reports_keep_week_reports'] = 1;
     $this->assertEquals($expected, $settings);
 }
Example #2
0
 public function redirectDashboardToWelcomePage(&$module, &$action)
 {
     if ($module !== 'CoreHome' || $action !== 'index') {
         return;
     }
     $siteId = Common::getRequestVar('idSite', false, 'int');
     if (!$siteId) {
         return;
     }
     // Skip the screen if purging logs is enabled
     $settings = PrivacyManager::getPurgeDataSettings();
     if ($settings['delete_logs_enable'] == 1) {
         return;
     }
     $trackerModel = new TrackerModel();
     if ($trackerModel->isSiteEmpty($siteId)) {
         $module = 'SitesManager';
         $action = 'siteWithoutData';
     }
 }
Example #3
0
 /**
  * When tracking data in the past (using Tracking API), this function
  * can be used to invalidate reports for the idSites and dates where new data
  * was added.
  * DEV: If you call this API, the UI should display the data correctly, but will process
  *      in real time, which could be very slow after large data imports.
  *      After calling this function via REST, you can manually force all data
  *      to be reprocessed by visiting the script as the Super User:
  *      http://example.net/piwik/misc/cron/archive.php?token_auth=$SUPER_USER_TOKEN_AUTH_HERE
  * REQUIREMENTS: On large piwik setups, you will need in PHP configuration: max_execution_time = 0
  *    We recommend to use an hourly schedule of the script.
  *    More information: http://piwik.org/setup-auto-archiving/
  *
  * @param string $idSites Comma separated list of idSite that have had data imported for the specified dates
  * @param string $dates Comma separated list of dates to invalidate for all these websites
  * @throws Exception
  * @return array
  */
 public function invalidateArchivedReports($idSites, $dates)
 {
     $idSites = Site::getIdSitesFromIdSitesString($idSites);
     if (empty($idSites)) {
         throw new Exception("Specify a value for &idSites= as a comma separated list of website IDs, for which your token_auth has 'admin' permission");
     }
     Piwik::checkUserHasAdminAccess($idSites);
     // Ensure the specified dates are valid
     $toInvalidate = $invalidDates = array();
     $dates = explode(',', trim($dates));
     $dates = array_unique($dates);
     foreach ($dates as $theDate) {
         $theDate = trim($theDate);
         try {
             $date = Date::factory($theDate);
         } catch (Exception $e) {
             $invalidDates[] = $theDate;
             continue;
         }
         if ($date->toString() == $theDate) {
             $toInvalidate[] = $date;
         } else {
             $invalidDates[] = $theDate;
         }
     }
     // If using the feature "Delete logs older than N days"...
     $purgeDataSettings = PrivacyManager::getPurgeDataSettings();
     $logsAreDeletedBeforeThisDate = $purgeDataSettings['delete_logs_schedule_lowest_interval'];
     $logsDeleteEnabled = $purgeDataSettings['delete_logs_enable'];
     $minimumDateWithLogs = false;
     if ($logsDeleteEnabled && $logsAreDeletedBeforeThisDate) {
         $minimumDateWithLogs = Date::factory('today')->subDay($logsAreDeletedBeforeThisDate);
     }
     // Given the list of dates, process which tables they should be deleted from
     $minDate = false;
     $warningDates = $processedDates = array();
     /* @var $date Date */
     foreach ($toInvalidate as $date) {
         // we should only delete reports for dates that are more recent than N days
         if ($minimumDateWithLogs && $date->isEarlier($minimumDateWithLogs)) {
             $warningDates[] = $date->toString();
         } else {
             $processedDates[] = $date->toString();
         }
         $month = $date->toString('Y_m');
         // For a given date, we must invalidate in the monthly archive table
         $datesByMonth[$month][] = $date->toString();
         // But also the year stored in January
         $year = $date->toString('Y_01');
         $datesByMonth[$year][] = $date->toString();
         // but also weeks overlapping several months stored in the month where the week is starting
         /* @var $week Week */
         $week = Period\Factory::build('week', $date);
         $weekAsString = $week->getDateStart()->toString('Y_m');
         $datesByMonth[$weekAsString][] = $date->toString();
         // Keep track of the minimum date for each website
         if ($minDate === false || $date->isEarlier($minDate)) {
             $minDate = $date;
         }
     }
     if (empty($minDate)) {
         throw new Exception("Check the 'dates' parameter is a valid date.");
     }
     // In each table, invalidate day/week/month/year containing this date
     $archiveTables = ArchiveTableCreator::getTablesArchivesInstalled();
     foreach ($archiveTables as $table) {
         // Extract Y_m from table name
         $suffix = ArchiveTableCreator::getDateFromTableName($table);
         if (!isset($datesByMonth[$suffix])) {
             continue;
         }
         // Dates which are to be deleted from this table
         $datesToDeleteInTable = $datesByMonth[$suffix];
         // Build one statement to delete all dates from the given table
         $sql = $bind = array();
         $datesToDeleteInTable = array_unique($datesToDeleteInTable);
         foreach ($datesToDeleteInTable as $dateToDelete) {
             $sql[] = '(date1 <= ? AND ? <= date2)';
             $bind[] = $dateToDelete;
             $bind[] = $dateToDelete;
         }
         $sql = implode(" OR ", $sql);
         $query = "DELETE FROM {$table} " . " WHERE ( {$sql} ) " . " AND idsite IN (" . implode(",", $idSites) . ")";
         Db::query($query, $bind);
     }
     \Piwik\Plugins\SitesManager\API::getInstance()->updateSiteCreatedTime($idSites, $minDate);
     // Force to re-process data for these websites in the next cron core:archive command run
     $invalidatedIdSites = self::getWebsiteIdsToInvalidate();
     $invalidatedIdSites = array_merge($invalidatedIdSites, $idSites);
     $invalidatedIdSites = array_unique($invalidatedIdSites);
     $invalidatedIdSites = array_values($invalidatedIdSites);
     Option::set(self::OPTION_INVALIDATED_IDSITES, serialize($invalidatedIdSites));
     Site::clearCache();
     $output = array();
     // output logs
     if ($warningDates) {
         $output[] = 'Warning: the following Dates have not been invalidated, because they are earlier than your Log Deletion limit: ' . implode(", ", $warningDates) . "\n The last day with logs is " . $minimumDateWithLogs . ". " . "\n Please disable 'Delete old Logs' or set it to a higher deletion threshold (eg. 180 days or 365 years).'.";
     }
     $output[] = "Success. The following dates were invalidated successfully: " . implode(", ", $processedDates);
     return $output;
 }
Example #4
0
 protected function getDeleteDataInfo()
 {
     Piwik::checkUserHasSuperUserAccess();
     $deleteDataInfos = array();
     $taskScheduler = new TaskScheduler();
     $deleteDataInfos["config"] = PrivacyManager::getPurgeDataSettings();
     $deleteDataInfos["deleteTables"] = "<br/>" . implode(", ", LogDataPurger::getDeleteTableLogTables());
     $scheduleTimetable = $taskScheduler->getScheduledTimeForMethod("PrivacyManager", "deleteLogTables");
     $optionTable = Option::get(self::OPTION_LAST_DELETE_PIWIK_LOGS);
     //If task was already rescheduled, read time from taskTimetable. Else, calculate next possible runtime.
     if (!empty($scheduleTimetable) && $scheduleTimetable - time() > 0) {
         $nextPossibleSchedule = (int) $scheduleTimetable;
     } else {
         $date = Date::factory("today");
         $nextPossibleSchedule = $date->addDay(1)->getTimestamp();
     }
     //deletion schedule did not run before
     if (empty($optionTable)) {
         $deleteDataInfos["lastRun"] = false;
         //next run ASAP (with next schedule run)
         $date = Date::factory("today");
         $deleteDataInfos["nextScheduleTime"] = $nextPossibleSchedule;
     } else {
         $deleteDataInfos["lastRun"] = $optionTable;
         $deleteDataInfos["lastRunPretty"] = Date::factory((int) $optionTable)->getLocalized('%day% %shortMonth% %longYear%');
         //Calculate next run based on last run + interval
         $nextScheduleRun = (int) ($deleteDataInfos["lastRun"] + $deleteDataInfos["config"]["delete_logs_schedule_lowest_interval"] * 24 * 60 * 60);
         //is the calculated next run in the past? (e.g. plugin was disabled in the meantime or something) -> run ASAP
         if ($nextScheduleRun - time() <= 0) {
             $deleteDataInfos["nextScheduleTime"] = $nextPossibleSchedule;
         } else {
             $deleteDataInfos["nextScheduleTime"] = $nextScheduleRun;
         }
     }
     $deleteDataInfos["nextRunPretty"] = MetricsFormatter::getPrettyTimeFromSeconds($deleteDataInfos["nextScheduleTime"] - time());
     return $deleteDataInfos;
 }
Example #5
0
 private function findOlderDateWithLogs()
 {
     // If using the feature "Delete logs older than N days"...
     $purgeDataSettings = PrivacyManager::getPurgeDataSettings();
     $logsDeletedWhenOlderThanDays = $purgeDataSettings['delete_logs_older_than'];
     $logsDeleteEnabled = $purgeDataSettings['delete_logs_enable'];
     if ($logsDeleteEnabled && $logsDeletedWhenOlderThanDays) {
         $this->minimumDateWithLogs = Date::factory('today')->subDay($logsDeletedWhenOlderThanDays);
     }
 }