tableInsertBatch() публичный статический Метод

Performs a batch insert into a specific table using either LOAD DATA INFILE or plain INSERTs, as a fallback. On MySQL, LOAD DATA INFILE is 20x faster than a series of plain INSERTs.
public static tableInsertBatch ( string $tableName, array $fields, array $values, boolean $throwException = false, string $charset = 'utf8' ) : boolean
$tableName string PREFIXED table name! you must call Common::prefixTable() before passing the table name
$fields array array of unquoted field names
$values array array of data to be inserted
$throwException boolean Whether to throw an exception that was caught while trying LOAD DATA INFILE, or not.
$charset string The charset to use, defaults to utf8
Результат boolean True if the bulk LOAD was used, false if we fallback to plain INSERTs
 public function execute()
 {
     $isPiwikInstalling = !Config::getInstance()->existsLocalConfig();
     if ($isPiwikInstalling) {
         // Skip the diagnostic if Piwik is being installed
         return array();
     }
     $label = $this->translator->translate('Installation_DatabaseAbilities');
     $optionTable = Common::prefixTable('option');
     $testOptionNames = array('test_system_check1', 'test_system_check2');
     $loadDataInfile = false;
     $errorMessage = null;
     try {
         $loadDataInfile = Db\BatchInsert::tableInsertBatch($optionTable, array('option_name', 'option_value'), array(array($testOptionNames[0], '1'), array($testOptionNames[1], '2')), $throwException = true);
     } catch (\Exception $ex) {
         $errorMessage = str_replace("\n", "<br/>", $ex->getMessage());
     }
     // delete the temporary rows that were created
     Db::exec("DELETE FROM `{$optionTable}` WHERE option_name IN ('" . implode("','", $testOptionNames) . "')");
     if ($loadDataInfile) {
         return array(DiagnosticResult::singleResult($label, DiagnosticResult::STATUS_OK, 'LOAD DATA INFILE'));
     }
     $comment = sprintf('LOAD DATA INFILE<br/>%s<br/>%s', $this->translator->translate('Installation_LoadDataInfileUnavailableHelp', array('LOAD DATA INFILE', 'FILE')), $this->translator->translate('Installation_LoadDataInfileRecommended'));
     if ($errorMessage) {
         $comment .= sprintf('<br/><strong>%s:</strong> %s<br/>%s', $this->translator->translate('General_Error'), $errorMessage, 'Troubleshooting: <a target="_blank" href="?module=Proxy&action=redirect&url=http://piwik.org/faq/troubleshooting/%23faq_194">FAQ on piwik.org</a>');
     }
     return array(DiagnosticResult::singleResult($label, DiagnosticResult::STATUS_WARNING, $comment));
 }
Пример #2
0
 /**
  * Testing batch insert (BLOB)
  * @group Core
  */
 public function testTableInsertBatchBlob()
 {
     $dateLabel = '2011-03-31';
     $table = ArchiveTableCreator::getBlobTable(Date::factory($dateLabel));
     $data = $this->_getBlobDataInsert();
     try {
         $didWeUseBulk = BatchInsert::tableInsertBatch($table, array('idarchive', 'name', 'idsite', 'date1', 'date2', 'period', 'ts_archived', 'value'), $data, $throwException = true);
     } catch (Exception $e) {
         $didWeUseBulk = $e->getMessage();
     }
     $this->_checkLoadDataInFileWasUsed($didWeUseBulk);
     // If bulk wasn't used the exception was caught and the INSERT didn't work
     if ($didWeUseBulk === true) {
         $this->_checkTableIsExpectedBlob($table, $data);
     }
     // INSERT again the bulk. Because we use keyword LOCAL the data will be REPLACED automatically (see mysql doc)
     $didWeUseBulk = BatchInsert::tableInsertBatch($table, array('idarchive', 'name', 'idsite', 'date1', 'date2', 'period', 'ts_archived', 'value'), $data);
     if ($didWeUseBulk === true) {
         $this->_checkTableIsExpectedBlob($table, $data);
     }
 }
Пример #3
0
 static function update()
 {
     $returningMetrics = array('nb_visits_returning', 'nb_actions_returning', 'max_actions_returning', 'sum_visit_length_returning', 'bounce_count_returning', 'nb_visits_converted_returning', 'nb_uniq_visitors_returning');
     $now = Date::factory('now')->getDatetime();
     $archiveNumericTables = Db::get()->fetchCol("SHOW TABLES LIKE '%archive_numeric%'");
     // for each numeric archive table, copy *_returning metrics to VisitsSummary metrics w/ the appropriate
     // returning visit segment
     foreach ($archiveNumericTables as $table) {
         // get archives w/ *._returning
         $sql = "SELECT idarchive, idsite, period, date1, date2\n                      FROM {$table}\n                     WHERE name IN ('" . implode("','", $returningMetrics) . "')\n                  GROUP BY idarchive";
         $idArchivesWithReturning = Db::fetchAll($sql);
         // get archives for visitssummary returning visitor segment
         $sql = "SELECT idarchive, idsite, period, date1, date2\n                      FROM {$table}\n                     WHERE name = ?\n                  GROUP BY idarchive";
         $visitSummaryReturningSegmentDone = Rules::getDoneFlagArchiveContainsOnePlugin(new Segment(VisitFrequencyApi::RETURNING_VISITOR_SEGMENT, $idSites = array()), 'VisitsSummary');
         $idArchivesWithVisitReturningSegment = Db::fetchAll($sql, array($visitSummaryReturningSegmentDone));
         // collect info for new visitssummary archives have to be created to match archives w/ *._returning
         // metrics
         $missingIdArchives = array();
         $idArchiveMappings = array();
         foreach ($idArchivesWithReturning as $row) {
             $withMetricsIdArchive = $row['idarchive'];
             foreach ($idArchivesWithVisitReturningSegment as $segmentRow) {
                 if ($row['idsite'] == $segmentRow['idsite'] && $row['period'] == $segmentRow['period'] && $row['date1'] == $segmentRow['date1'] && $row['date2'] == $segmentRow['date2']) {
                     $idArchiveMappings[$withMetricsIdArchive] = $segmentRow['idarchive'];
                 }
             }
             if (!isset($idArchiveMappings[$withMetricsIdArchive])) {
                 $missingIdArchives[$withMetricsIdArchive] = $row;
             }
         }
         // if there are missing idarchives, fill out new archive row values
         if (!empty($missingIdArchives)) {
             $newIdArchiveStart = Db::fetchOne("SELECT MAX(idarchive) FROM {$table}") + 1;
             foreach ($missingIdArchives as $withMetricsIdArchive => &$rowToInsert) {
                 $idArchiveMappings[$withMetricsIdArchive] = $newIdArchiveStart;
                 $rowToInsert['idarchive'] = $newIdArchiveStart;
                 $rowToInsert['ts_archived'] = $now;
                 $rowToInsert['name'] = $visitSummaryReturningSegmentDone;
                 $rowToInsert['value'] = ArchiveWriter::DONE_OK;
                 ++$newIdArchiveStart;
             }
             // add missing archives
             try {
                 $params = array();
                 foreach ($missingIdArchives as $missingIdArchive) {
                     $params[] = array_values($missingIdArchive);
                 }
                 BatchInsert::tableInsertBatch($table, array_keys(reset($missingIdArchives)), $params, $throwException = false);
             } catch (\Exception $ex) {
                 Updater::handleQueryError($ex, "<batch insert>", false, __FILE__);
             }
         }
         // update idarchive & name columns in rows with *._returning metrics
         $updateSqlPrefix = "UPDATE {$table}\n                                   SET idarchive = CASE idarchive ";
         $updateSqlSuffix = " END, name = CASE name ";
         foreach ($returningMetrics as $metric) {
             $newMetricName = substr($metric, 0, strlen($metric) - strlen(VisitFrequencyApi::COLUMN_SUFFIX));
             $updateSqlSuffix .= "WHEN '{$metric}' THEN '" . $newMetricName . "' ";
         }
         $updateSqlSuffix .= " END WHERE idarchive IN (%s)\n                                        AND name IN ('" . implode("','", $returningMetrics) . "')";
         // update only 1000 rows at a time so we don't send too large an SQL query to MySQL
         foreach (array_chunk($missingIdArchives, 1000, $preserveKeys = true) as $chunk) {
             $idArchives = array();
             $updateSql = $updateSqlPrefix;
             foreach ($chunk as $withMetricsIdArchive => $row) {
                 $updateSql .= "WHEN {$withMetricsIdArchive} THEN {$row['idarchive']} ";
                 $idArchives[] = $withMetricsIdArchive;
             }
             $updateSql .= sprintf($updateSqlSuffix, implode(',', $idArchives));
             Updater::executeMigrationQuery($updateSql, false, __FILE__);
         }
     }
 }
Пример #4
0
 private static function checkLoadDataInfile(&$result)
 {
     // check if LOAD DATA INFILE works
     $optionTable = Common::prefixTable('option');
     $testOptionNames = array('test_system_check1', 'test_system_check2');
     $result['load_data_infile_available'] = false;
     try {
         $result['load_data_infile_available'] = \Piwik\Db\BatchInsert::tableInsertBatch($optionTable, array('option_name', 'option_value'), array(array($testOptionNames[0], '1'), array($testOptionNames[1], '2')), $throwException = true);
     } catch (\Exception $ex) {
         $result['load_data_infile_error'] = str_replace("\n", "<br/>", $ex->getMessage());
     }
     // delete the temporary rows that were created
     Db::exec("DELETE FROM `{$optionTable}` WHERE option_name IN ('" . implode("','", $testOptionNames) . "')");
 }
Пример #5
0
 protected function insertBulkRecords($records)
 {
     // Using standard plain INSERT if there is only one record to insert
     if ($DEBUG_DO_NOT_USE_BULK_INSERT = false || count($records) == 1) {
         foreach ($records as $record) {
             $this->insertRecord($record[0], $record[1]);
         }
         return true;
     }
     $bindSql = $this->getInsertRecordBind();
     $values = array();
     $valueSeen = false;
     foreach ($records as $record) {
         // don't record zero
         if (empty($record[1])) {
             continue;
         }
         $bind = $bindSql;
         $bind[] = $record[0];
         // name
         $bind[] = $record[1];
         // value
         $values[] = $bind;
         $valueSeen = $record[1];
     }
     if (empty($values)) {
         return true;
     }
     $tableName = $this->getTableNameToInsert($valueSeen);
     $fields = $this->getInsertFields();
     BatchInsert::tableInsertBatch($tableName, $fields, $values);
     return true;
 }
Пример #6
0
 public function exec()
 {
     Db\BatchInsert::tableInsertBatch($this->table, $this->columnNames, $this->values, $this->throwException, $this->charset);
 }