protected static function beforeTableDataCached() { $date = Date::factory('2010-03-01'); $archiveTableCreator = new ArchiveTableCreator(); $archiveTableCreator->getBlobTable($date); $archiveTableCreator->getNumericTable($date); }
public static function setUpBeforeClass() { parent::setUpBeforeClass(); ArchiveTableCreator::getNumericTable(Date::factory('2015-01-01')); ArchiveTableCreator::getNumericTable(Date::factory('2015-02-02')); ArchiveTableCreator::getNumericTable(Date::factory('2015-03-03')); }
public function setUp() { parent::setUp(); $this->archiveTableDao = self::$fixture->piwikEnvironment->getContainer()->get('Piwik\\DataAccess\\ArchiveTableDao'); ArchiveTableCreator::getBlobTable(Date::factory('2015-01-01')); ArchiveTableCreator::getNumericTable(Date::factory('2015-01-01')); }
public function setUp() { $archivingTables = ArchiveTableCreator::getTablesArchivesInstalled(); if (empty($archivingTables)) { $this->archivingLaunched = true; APIVisitsSummary::getInstance()->get(self::$fixture->idSite, self::$fixture->period, self::$fixture->date); } }
public function tearDown() { // clean up your test here if needed $tables = ArchiveTableCreator::getTablesArchivesInstalled(); if (!empty($tables)) { Db::dropTables($tables); } parent::tearDown(); }
public function test_UpdateCommand_ReturnsCorrectExitCode_WhenErrorOccurs() { // create a blob table, then drop it manually so update 2.10.0-b10 will fail $tableName = ArchiveTableCreator::getBlobTable(Date::factory('2015-01-01')); Db::exec("DROP TABLE {$tableName}"); $result = $this->applicationTester->run(array('command' => 'core:update', '--yes' => true)); $this->assertEquals(1, $result, $this->getCommandDisplayOutputErrorMessage()); $this->assertContains("Piwik could not be updated! See above for more information.", $this->applicationTester->getDisplay()); }
public static function getSql() { $sqls = array(); $archiveTables = ArchiveTableCreator::getTablesArchivesInstalled(); $archiveBlobTables = array_filter($archiveTables, function ($name) { return ArchiveTableCreator::getTypeFromTableName($name) == ArchiveTableCreator::BLOB_TABLE; }); foreach ($archiveBlobTables as $table) { $sqls["UPDATE " . $table . " SET name = 'DevicePlugins_plugin' WHERE name = 'UserSettings_plugin'"] = false; } return $sqls; }
public function getMigrations(Updater $updater) { $migrations = array(); $archiveTables = ArchiveTableCreator::getTablesArchivesInstalled(); $archiveBlobTables = array_filter($archiveTables, function ($name) { return ArchiveTableCreator::getTypeFromTableName($name) == ArchiveTableCreator::BLOB_TABLE; }); foreach ($archiveBlobTables as $table) { $migrations[] = $this->migration->db->sql("UPDATE {$table} SET name = 'DevicePlugins_plugin' WHERE name = 'UserSettings_plugin'"); } return $migrations; }
/** * Returns all available archive blob tables * * @return array */ public static function getAllArchiveBlobTables() { if (empty(self::$archiveBlobTables)) { $archiveTables = ArchiveTableCreator::getTablesArchivesInstalled(); self::$archiveBlobTables = array_filter($archiveTables, function ($name) { return ArchiveTableCreator::getTypeFromTableName($name) == ArchiveTableCreator::BLOB_TABLE; }); // sort tables so we have them in order of their date rsort(self::$archiveBlobTables); } return (array) self::$archiveBlobTables; }
public function getMigrationQueries(Updater $updater) { $sqls = array(); $archiveTables = ArchiveTableCreator::getTablesArchivesInstalled(); $archiveBlobTables = array_filter($archiveTables, function ($name) { return ArchiveTableCreator::getTypeFromTableName($name) == ArchiveTableCreator::BLOB_TABLE; }); foreach ($archiveBlobTables as $table) { $sqls["UPDATE " . $table . " SET name = 'UserLanguage_language' WHERE name = 'UserSettings_language'"] = false; } return $sqls; }
protected static function deleteArchiveIds(Date $date, $idArchivesToDelete) { $batches = array_chunk($idArchivesToDelete, 1000); foreach ($batches as $idsToDelete) { $query = "DELETE FROM %s WHERE idarchive IN (" . implode(',', $idsToDelete) . ")"; Db::query(sprintf($query, ArchiveTableCreator::getNumericTable($date))); try { Db::query(sprintf($query, ArchiveTableCreator::getBlobTable($date))); } catch (Exception $e) { // Individual blob tables could be missing } } }
private function addArchivingIdMigrationQueries($sql) { $tables = ArchiveTableCreator::getTablesArchivesInstalled(); foreach ($tables as $table) { $type = ArchiveTableCreator::getTypeFromTableName($table); if ($type === ArchiveTableCreator::NUMERIC_TABLE) { $maxId = Db::fetchOne('SELECT MAX(idarchive) FROM ' . $table); if (!empty($maxId)) { $maxId = (int) $maxId + 500; } else { $maxId = 1; } $sql[] = $this->migration->db->insert($this->sequenceTable, array('name' => $table, 'value' => $maxId)); } } return $sql; }
private static function addArchivingIdMigrationQueries($sql) { $tables = ArchiveTableCreator::getTablesArchivesInstalled(); foreach ($tables as $table) { $type = ArchiveTableCreator::getTypeFromTableName($table); if ($type === ArchiveTableCreator::NUMERIC_TABLE) { $maxId = Db::fetchOne('SELECT MAX(idarchive) FROM ' . $table); if (!empty($maxId)) { $maxId = (int) $maxId + 500; } else { $maxId = 1; } $query = self::getQueryToCreateSequence($table, $maxId); // refs #6696, ignores Integrity constraint violation: 1062 Duplicate entry 'piwik_archive_numeric_2010_01' for key 'PRIMARY' $sql[$query] = '1062'; } } return $sql; }
/** * Drops all archive tables. */ public static function deleteArchiveTables() { foreach (ArchiveTableCreator::getTablesArchivesInstalled() as $table) { Log::debug("Dropping table {$table}"); Db::query("DROP TABLE IF EXISTS `{$table}`"); } ArchiveTableCreator::refreshTableList($forceReload = true); }
/** * When tracking data in the past (using Tracking API), this function * can be used to invalidate reports for the idSites and dates where new data * was added. * DEV: If you call this API, the UI should display the data correctly, but will process * in real time, which could be very slow after large data imports. * After calling this function via REST, you can manually force all data * to be reprocessed by visiting the script as the Super User: * http://example.net/piwik/misc/cron/archive.php?token_auth=$SUPER_USER_TOKEN_AUTH_HERE * REQUIREMENTS: On large piwik setups, you will need in PHP configuration: max_execution_time = 0 * We recommend to use an hourly schedule of the script. * More information: http://piwik.org/setup-auto-archiving/ * * @param string $idSites Comma separated list of idSite that have had data imported for the specified dates * @param string $dates Comma separated list of dates to invalidate for all these websites * @throws Exception * @return array */ public function invalidateArchivedReports($idSites, $dates) { $idSites = Site::getIdSitesFromIdSitesString($idSites); if (empty($idSites)) { throw new Exception("Specify a value for &idSites= as a comma separated list of website IDs, for which your token_auth has 'admin' permission"); } Piwik::checkUserHasAdminAccess($idSites); // Ensure the specified dates are valid $toInvalidate = $invalidDates = array(); $dates = explode(',', trim($dates)); $dates = array_unique($dates); foreach ($dates as $theDate) { $theDate = trim($theDate); try { $date = Date::factory($theDate); } catch (Exception $e) { $invalidDates[] = $theDate; continue; } if ($date->toString() == $theDate) { $toInvalidate[] = $date; } else { $invalidDates[] = $theDate; } } // If using the feature "Delete logs older than N days"... $purgeDataSettings = PrivacyManager::getPurgeDataSettings(); $logsAreDeletedBeforeThisDate = $purgeDataSettings['delete_logs_schedule_lowest_interval']; $logsDeleteEnabled = $purgeDataSettings['delete_logs_enable']; $minimumDateWithLogs = false; if ($logsDeleteEnabled && $logsAreDeletedBeforeThisDate) { $minimumDateWithLogs = Date::factory('today')->subDay($logsAreDeletedBeforeThisDate); } // Given the list of dates, process which tables they should be deleted from $minDate = false; $warningDates = $processedDates = array(); /* @var $date Date */ foreach ($toInvalidate as $date) { // we should only delete reports for dates that are more recent than N days if ($minimumDateWithLogs && $date->isEarlier($minimumDateWithLogs)) { $warningDates[] = $date->toString(); } else { $processedDates[] = $date->toString(); } $month = $date->toString('Y_m'); // For a given date, we must invalidate in the monthly archive table $datesByMonth[$month][] = $date->toString(); // But also the year stored in January $year = $date->toString('Y_01'); $datesByMonth[$year][] = $date->toString(); // but also weeks overlapping several months stored in the month where the week is starting /* @var $week Week */ $week = Period\Factory::build('week', $date); $weekAsString = $week->getDateStart()->toString('Y_m'); $datesByMonth[$weekAsString][] = $date->toString(); // Keep track of the minimum date for each website if ($minDate === false || $date->isEarlier($minDate)) { $minDate = $date; } } if (empty($minDate)) { throw new Exception("Check the 'dates' parameter is a valid date."); } // In each table, invalidate day/week/month/year containing this date $archiveTables = ArchiveTableCreator::getTablesArchivesInstalled(); foreach ($archiveTables as $table) { // Extract Y_m from table name $suffix = ArchiveTableCreator::getDateFromTableName($table); if (!isset($datesByMonth[$suffix])) { continue; } // Dates which are to be deleted from this table $datesToDeleteInTable = $datesByMonth[$suffix]; // Build one statement to delete all dates from the given table $sql = $bind = array(); $datesToDeleteInTable = array_unique($datesToDeleteInTable); foreach ($datesToDeleteInTable as $dateToDelete) { $sql[] = '(date1 <= ? AND ? <= date2)'; $bind[] = $dateToDelete; $bind[] = $dateToDelete; } $sql = implode(" OR ", $sql); $query = "DELETE FROM {$table} " . " WHERE ( {$sql} ) " . " AND idsite IN (" . implode(",", $idSites) . ")"; Db::query($query, $bind); } \Piwik\Plugins\SitesManager\API::getInstance()->updateSiteCreatedTime($idSites, $minDate); // Force to re-process data for these websites in the next cron core:archive command run $invalidatedIdSites = self::getWebsiteIdsToInvalidate(); $invalidatedIdSites = array_merge($invalidatedIdSites, $idSites); $invalidatedIdSites = array_unique($invalidatedIdSites); $invalidatedIdSites = array_values($invalidatedIdSites); Option::set(self::OPTION_INVALIDATED_IDSITES, serialize($invalidatedIdSites)); Site::clearCache(); $output = array(); // output logs if ($warningDates) { $output[] = 'Warning: the following Dates have not been invalidated, because they are earlier than your Log Deletion limit: ' . implode(", ", $warningDates) . "\n The last day with logs is " . $minimumDateWithLogs . ". " . "\n Please disable 'Delete old Logs' or set it to a higher deletion threshold (eg. 180 days or 365 years).'."; } $output[] = "Success. The following dates were invalidated successfully: " . implode(", ", $processedDates); return $output; }
/** * Queries and returns archive data using a set of archive IDs. * * @param array $archiveIds The IDs of the archives to get data from. * @param array $recordNames The names of the data to retrieve (ie, nb_visits, nb_actions, etc.) * @param string $archiveDataType The archive data type (either, 'blob' or 'numeric'). * @param bool $loadAllSubtables Whether to pre-load all subtables * @throws Exception * @return array */ public static function getArchiveData($archiveIds, $recordNames, $archiveDataType, $loadAllSubtables) { // create the SQL to select archive data $inNames = Common::getSqlStringFieldsArray($recordNames); if ($loadAllSubtables) { $name = reset($recordNames); // select blobs w/ name like "$name_[0-9]+" w/o using RLIKE $nameEnd = strlen($name) + 2; $whereNameIs = "(name = ?\n OR (name LIKE ?\n AND SUBSTRING(name, {$nameEnd}, 1) >= '0'\n AND SUBSTRING(name, {$nameEnd}, 1) <= '9') )"; $bind = array($name, $name . '%'); } else { $whereNameIs = "name IN ({$inNames})"; $bind = array_values($recordNames); } $getValuesSql = "SELECT value, name, idsite, date1, date2, ts_archived\n FROM %s\n WHERE idarchive IN (%s)\n AND " . $whereNameIs; // get data from every table we're querying $rows = array(); foreach ($archiveIds as $period => $ids) { if (empty($ids)) { throw new Exception("Unexpected: id archive not found for period '{$period}' '"); } // $period = "2009-01-04,2009-01-04", $date = Date::factory(substr($period, 0, 10)); if ($archiveDataType == 'numeric') { $table = ArchiveTableCreator::getNumericTable($date); } else { $table = ArchiveTableCreator::getBlobTable($date); } $sql = sprintf($getValuesSql, $table, implode(',', $ids)); $dataRows = Db::fetchAll($sql, $bind); foreach ($dataRows as $row) { $rows[] = $row; } } return $rows; }
/** * @param int $lastN * @return string[] */ private function getLastNTableMonths($lastN) { $now = Date::factory('now'); $result = array(); for ($i = 0; $i < $lastN; ++$i) { $date = $now->subMonth($i + 1); $result[] = ArchiveTableCreator::getTableMonthFromDate($date); } return $result; }
private function hasEnoughTablesToReuseDb($tablesInstalled) { if (empty($tablesInstalled) || !is_array($tablesInstalled)) { return false; } $archiveTables = ArchiveTableCreator::getTablesArchivesInstalled(); $baseTablesInstalled = count($tablesInstalled) - count($archiveTables); $minimumCountPiwikTables = 12; return $baseTablesInstalled >= $minimumCountPiwikTables; }
public function performTearDown() { // Note: avoid run SQL in the *tearDown() metohds because it randomly fails on Travis CI // with error Error while sending QUERY packet. PID=XX $this->tearDown(); self::unloadAllPlugins(); if ($this->dropDatabaseInTearDown) { $this->dropDatabase(); } DataTableManager::getInstance()->deleteAll(); Option::clearCache(); Site::clearCache(); Cache::deleteTrackerCache(); Config::getInstance()->clear(); ArchiveTableCreator::clear(); \Piwik\Plugins\ScheduledReports\API::$cache = array(); \Piwik\Registry::unsetInstance(); \Piwik\EventDispatcher::getInstance()->clearAllObservers(); $_GET = $_REQUEST = array(); Translate::unloadEnglishTranslation(); Config::unsetInstance(); \Piwik\Config::getInstance()->Plugins; // make sure Plugins exists in a config object for next tests that use Plugin\Manager // since Plugin\Manager uses getFromGlobalConfig which doesn't init the config object }
public function createArchiveTable($tableName, $tableNamePrefix) { $db = Db::get(); $sql = DbHelper::getTableCreateSql($tableNamePrefix); // replace table name template by real name $tableNamePrefix = Common::prefixTable($tableNamePrefix); $sql = str_replace($tableNamePrefix, $tableName, $sql); try { $db->query($sql); } catch (Exception $e) { // accept mysql error 1050: table already exists, throw otherwise if (!$db->isErrNo($e, '1050')) { throw $e; } } try { if (ArchiveTableCreator::NUMERIC_TABLE === ArchiveTableCreator::getTypeFromTableName($tableName)) { $sequence = new Sequence($tableName); $sequence->create(); } } catch (Exception $e) { } }
private function insertArchiveRow($idSite, $date, $periodLabel) { $periodObject = \Piwik\Period\Factory::build($periodLabel, $date); $dateStart = $periodObject->getDateStart(); $dateEnd = $periodObject->getDateEnd(); $table = ArchiveTableCreator::getNumericTable($dateStart); $idArchive = (int) Db::fetchOne("SELECT MAX(idarchive) FROM {$table} WHERE name LIKE 'done%'"); $idArchive = $idArchive + 1; $periodId = Piwik::$idPeriods[$periodLabel]; $doneFlag = 'done'; if ($idArchive % 5 == 1) { $doneFlag = Rules::getDoneFlagArchiveContainsAllPlugins(self::$segment1); } else { if ($idArchive % 5 == 2) { $doneFlag .= '.VisitsSummary'; } else { if ($idArchive % 5 == 3) { $doneFlag = Rules::getDoneFlagArchiveContainsOnePlugin(self::$segment1, 'UserCountry'); } else { if ($idArchive % 5 == 4) { $doneFlag = Rules::getDoneFlagArchiveContainsAllPlugins(self::$segment2); } } } } $sql = "INSERT INTO {$table} (idarchive, name, idsite, date1, date2, period, ts_archived)\n VALUES ({$idArchive}, 'nb_visits', {$idSite}, '{$dateStart}', '{$dateEnd}', {$periodId}, NOW()),\n ({$idArchive}, '{$doneFlag}', {$idSite}, '{$dateStart}', '{$dateEnd}', {$periodId}, NOW())"; Db::query($sql); }
protected function getTableNumeric() { return ArchiveTableCreator::getNumericTable($this->dateStart); }
/** * @param $idSites * @param $period string * @param $datesByMonth array * @throws \Exception */ private function markArchivesInvalidatedFor($idSites, $period, $datesByMonth) { $invalidateForPeriodId = $this->getPeriodId($period); // In each table, invalidate day/week/month/year containing this date $archiveTables = ArchiveTableCreator::getTablesArchivesInstalled(); $archiveNumericTables = array_filter($archiveTables, function ($name) { return ArchiveTableCreator::getTypeFromTableName($name) == ArchiveTableCreator::NUMERIC_TABLE; }); foreach ($archiveNumericTables as $table) { // Extract Y_m from table name $suffix = ArchiveTableCreator::getDateFromTableName($table); if (!isset($datesByMonth[$suffix])) { continue; } // Dates which are to be deleted from this table $datesToDelete = $datesByMonth[$suffix]; self::getModel()->updateArchiveAsInvalidated($table, $idSites, $invalidateForPeriodId, $datesToDelete); } }
/** * Test that purgeData works when there's no data. * * @group Integration */ public function testPurgeDataDeleteLogsNoData() { \Piwik\DbHelper::truncateAllTables(); foreach (ArchiveTableCreator::getTablesArchivesInstalled() as $table) { Db::exec("DROP TABLE {$table}"); } // get purge data prediction $prediction = PrivacyManager::getPurgeEstimate(); // perform checks on prediction $expectedPrediction = array(); $this->assertEquals($expectedPrediction, $prediction); // purge data $this->_setTimeToRun(); $this->assertTrue($this->instance->deleteLogData()); $this->assertTrue($this->instance->deleteReportData()); // perform checks $this->assertEquals(0, $this->_getTableCount('log_visit')); $this->assertEquals(0, $this->_getTableCount('log_conversion')); $this->assertEquals(0, $this->_getTableCount('log_link_visit_action')); $this->assertEquals(0, $this->_getTableCount('log_conversion_item')); $archiveTables = self::_getArchiveTableNames(); $this->assertFalse($this->_tableExists($archiveTables['numeric'][0])); // January $this->assertFalse($this->_tableExists($archiveTables['numeric'][1])); // February $this->assertFalse($this->_tableExists($archiveTables['blob'][0])); // January $this->assertFalse($this->_tableExists($archiveTables['blob'][1])); // February }
private function getAllRowsFromArchiveBlobTable() { $table = ArchiveTableCreator::getBlobTable(Date::factory($this->date)); $rows = Db::fetchAll("SELECT * FROM " . $table); return $rows; }
/** * @dataProvider getTestDataForGetTablesArchivesInstalled */ public function test_getTablesArchivesInstalled_CorrectlyFiltersTableNames($type, $expectedTables) { ArchiveTableCreator::$tablesAlreadyInstalled = $this->tables; $tables = ArchiveTableCreator::getTablesArchivesInstalled($type); $this->assertEquals($expectedTables, $tables); }
public function assertArchivesExist($expectedPresentArchiveIds, $archiveDate) { $numericTable = ArchiveTableCreator::getNumericTable($archiveDate); $blobTable = ArchiveTableCreator::getBlobTable($archiveDate); $numericArchiveCount = $this->getArchiveRowCountWithId($numericTable, $expectedPresentArchiveIds); $expectedNumericRowCount = count($expectedPresentArchiveIds) * 3; // two metrics + 1 done row $this->assertEquals($expectedNumericRowCount, $numericArchiveCount); $blobArchiveCount = $this->getArchiveRowCountWithId($blobTable, $expectedPresentArchiveIds); $expectedBlobRowCount = count($expectedPresentArchiveIds) * 2; // two blob rows $this->assertEquals($expectedBlobRowCount, $blobArchiveCount); }
/** * Deletes by batches Archive IDs in the specified month, * * @param Date $date * @param $idArchivesToDelete * @return int Number of rows deleted from both numeric + blob table. */ protected function deleteArchiveIds(Date $date, $idArchivesToDelete) { $batches = array_chunk($idArchivesToDelete, 1000); $numericTable = ArchiveTableCreator::getNumericTable($date); $blobTable = ArchiveTableCreator::getBlobTable($date); $deletedCount = 0; foreach ($batches as $idsToDelete) { $deletedCount += $this->model->deleteArchiveIds($numericTable, $blobTable, $idsToDelete); } return $deletedCount; }
/** * Testing plain inserts (BLOB) * @group Core */ public function testTableInsertBatchIterateBlob() { $dateLabel = '2011-03-31'; $table = ArchiveTableCreator::getBlobTable(Date::factory($dateLabel)); $data = $this->_getBlobDataInsert(); BatchInsert::tableInsertBatchIterate($table, array('idarchive', 'name', 'idsite', 'date1', 'date2', 'period', 'ts_archived', 'value'), $data); $this->_checkTableIsExpectedBlob($table, $data); // If we insert AGAIN, expect to throw an error because the primary key already exist try { BatchInsert::tableInsertBatchIterate($table, array('idarchive', 'name', 'idsite', 'date1', 'date2', 'period', 'ts_archived', 'value'), $data, $ignoreWhenDuplicate = false); } catch (Exception $e) { // However if we insert with keyword REPLACE, then the new data should be saved BatchInsert::tableInsertBatchIterate($table, array('idarchive', 'name', 'idsite', 'date1', 'date2', 'period', 'ts_archived', 'value'), $data, $ignoreWhenDuplicate = true); $this->_checkTableIsExpectedBlob($table, $data); return; } $this->fail('Exception expected'); }
public function clearInMemoryCaches() { Archive::clearStaticCache(); DataTableManager::getInstance()->deleteAll(); Option::clearCache(); Site::clearCache(); Cache::deleteTrackerCache(); PiwikCache::getTransientCache()->flushAll(); PiwikCache::getEagerCache()->flushAll(); PiwikCache::getLazyCache()->flushAll(); ArchiveTableCreator::clear(); \Piwik\Plugins\ScheduledReports\API::$cache = array(); Singleton::clearAll(); PluginsArchiver::$archivers = array(); $_GET = $_REQUEST = array(); Translate::reset(); self::getConfig()->Plugins; // make sure Plugins exists in a config object for next tests that use Plugin\Manager // since Plugin\Manager uses getFromGlobalConfig which doesn't init the config object }