) 
        ENGINE = SummingMergeTree(event_date, (site_id,hash_id, event_time, event_date), 8192)
    ');
    echo "Table EXISTS:" . json_encode($db->showTables()) . "\n";
    echo "----------------------------------- CREATE csv file -----------------------------------------------------------------\n";
    $file_data_names = ['/tmp/clickHouseDB_test.part.1.data', '/tmp/clickHouseDB_test.part.2.data', '/tmp/clickHouseDB_test.part.3.data'];
    $c = 0;
    foreach ($file_data_names as $file_name) {
        $c++;
        makeSomeDataFileBigOldDates($file_name, $c);
    }
    echo "--------------------------------------- insert -------------------------------------------------------------\n";
    echo "insert ALL file async + GZIP:\n";
    $db->enableHttpCompression(true);
    $time_start = microtime(true);
    $result_insert = $db->insertBatchFiles('summing_partions_views', $file_data_names, ['event_time', 'site_id', 'hash_id', 'views']);
    echo "use time:" . round(microtime(true) - $time_start, 2) . " sec.\n";
    foreach ($result_insert as $fileName => $state) {
        echo "{$fileName} => " . json_encode($state->info_upload()) . "\n";
    }
}
echo "--------------------------------------- select -------------------------------------------------------------\n";
print_r($db->select('select min(event_date),max(event_date) from summing_partions_views ')->rows());
echo "--------------------------------------- list partitions -------------------------------------------------------------\n";
echo "databaseSize : " . json_encode($db->databaseSize()) . "\n";
echo "tableSize    : " . json_encode($db->tableSize('summing_partions_views')) . "\n";
echo "partitions    : " . json_encode($db->partitions('summing_partions_views', 2)) . "\n";
echo "--------------------------------------- drop partitions -------------------------------------------------------------\n";
echo "dropOldPartitions -30 days    : " . json_encode($db->dropOldPartitions('summing_partions_views', 30)) . "\n";
echo "--------------------------------------- list partitions -------------------------------------------------------------\n";
echo "databaseSize : " . json_encode($db->databaseSize()) . "\n";
    ENGINE = SummingMergeTree(event_date, (site_id, url_hash, event_time, event_date), 8192)
');
echo "Table EXISTS:" . json_encode($db->showTables()) . "\n";
// ------------------------------------------------------------------------------------------------------
echo "----------------------------------- CREATE big csv file -----------------------------------------------------------------\n";
$file_data_names = ['/tmp/clickHouseDB_test.b.1.data', '/tmp/clickHouseDB_test.b.2.data', '/tmp/clickHouseDB_test.b.3.data', '/tmp/clickHouseDB_test.b.4.data', '/tmp/clickHouseDB_test.b.5.data'];
$c = 0;
foreach ($file_data_names as $file_name) {
    $c++;
    makeSomeDataFileBig($file_name, 40 * $c);
}
echo "----------------------------------------------------------------------------------------------------\n";
echo "insert ALL file async NO gzip:\n";
$db->settings()->max_execution_time(200);
$time_start = microtime(true);
$result_insert = $db->insertBatchFiles('summing_url_views', $file_data_names, ['event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55']);
echo "use time:" . round(microtime(true) - $time_start, 2) . "\n";
foreach ($result_insert as $state) {
    echo "Info : " . json_encode($state->info_upload()) . "\n";
}
print_r($db->select('select sum(views) from summing_url_views')->rows());
echo "--------------------------------------- enableHttpCompression -------------------------------------------------------------\n";
echo "insert ALL file async + GZIP:\n";
$db->enableHttpCompression(true);
$time_start = microtime(true);
$result_insert = $db->insertBatchFiles('summing_url_views', $file_data_names, ['event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55']);
echo "use time:" . round(microtime(true) - $time_start, 2) . "\n";
foreach ($result_insert as $fileName => $state) {
    echo "{$fileName} => " . json_encode($state->info_upload()) . "\n";
}
print_r($db->select('select sum(views) from summing_url_views')->rows());
        v_00 Int32,
        v_55 Int32
    ) 
    ENGINE = SummingMergeTree(event_date, (site_id, url_hash, event_time, event_date), 8192)
');
echo "Table EXISTS: " . json_encode($db->showTables()) . "\n";
// --------------------------------  CREATE csv file ----------------------------------------------------------------
// ----------------------------------------------------------------------------------------------------
$file_data_names = ['/tmp/clickHouseDB_test.1.data', '/tmp/clickHouseDB_test.2.data', '/tmp/clickHouseDB_test.3.data', '/tmp/clickHouseDB_test.4.data', '/tmp/clickHouseDB_test.5.data'];
foreach ($file_data_names as $file_name) {
    makeSomeDataFile($file_name, 5);
}
// ----------------------------------------------------------------------------------------------------
echo "insert ONE file:\n";
$time_start = microtime(true);
$stat = $db->insertBatchFiles('summing_url_views', ['/tmp/clickHouseDB_test.1.data'], ['event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55']);
echo "use time:" . round(microtime(true) - $time_start, 2) . "\n";
print_r($db->select('select sum(views) from summing_url_views')->rows());
echo "insert ALL file async:\n";
$time_start = microtime(true);
$result_insert = $db->insertBatchFiles('summing_url_views', $file_data_names, ['event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55']);
echo "use time:" . round(microtime(true) - $time_start, 2) . "\n";
print_r($db->select('select sum(views) from summing_url_views')->rows());
// ------------------------------------------------------------------------------------------------
foreach ($file_data_names as $fileName) {
    echo $fileName . " : " . $result_insert[$fileName]->totalTimeRequest() . "\n";
}
// ------------------------------------------------------------------------------------------------
/*
Table EXISTSs:[{"name":"summing_url_views"}]
Created file  [/tmp/clickHouseDB_test.1.data]: 22200 rows...
Example #4
0
           ints Int32,
           arr1 Array(UInt8),
           arrs Array(String)
        ) ENGINE = Log(event_date, (event_time, keyz,keyb), 8192)');
@unlink($fileName);
$data = [['event_time' => date('Y-m-d H:i:s'), 'strs' => 'SOME STRING', 'flos' => 1.1, 'ints' => 1, 'arr1' => [1, 2, 3], 'arrs' => ["A", "B"]], ['event_time' => date('Y-m-d H:i:s'), 'strs' => 'SOME STRING', 'flos' => 2.3, 'ints' => 2, 'arr1' => [1, 2, 3], 'arrs' => ["A", "B"]], ['event_time' => date('Y-m-d H:i:s'), 'strs' => 'SOME\'STRING', 'flos' => 0, 'ints' => 0, 'arr1' => [1, 2, 3], 'arrs' => ["A", "B"]], ['event_time' => date('Y-m-d H:i:s'), 'strs' => 'SOME\'"TRING', 'flos' => 0, 'ints' => 0, 'arr1' => [1, 2, 3], 'arrs' => ["A", "B"]], ['event_time' => date('Y-m-d H:i:s'), 'strs' => "SOMET\nRI\n\"N\"G\\XX_ABCDEFG", 'flos' => 0, 'ints' => 0, 'arr1' => [1, 2, 3], 'arrs' => ["A", "B\nD\nC"]], ['event_time' => date('Y-m-d H:i:s'), 'strs' => "ID_ARRAY", 'flos' => 0, 'ints' => 0, 'arr1' => [1, 2, 3], 'arrs' => ["A", "B\nD\nC"]]];
//// 1.1 + 2.3 = 3.3999999761581
//
foreach ($data as $row) {
    file_put_contents($fileName, \ClickHouseDB\FormatLine::CSV($row) . "\n", FILE_APPEND);
}
//
echo "FILE:\n\n";
echo file_get_contents($fileName) . "\n\n----\n";
//
$db->insertBatchFiles('testRFCCSVWrite', [$fileName], ['event_time', 'strs', 'flos', 'ints', 'arr1', 'arrs']);
$st = $db->select('SELECT * FROM testRFCCSVWrite');
print_r($st->rows());
//
echo "\n<<<<< TAB >>>>\n";
$fileName = '/tmp/testRFCCSVWrite.TAB';
@unlink($fileName);
$db->write("DROP TABLE IF EXISTS testTABWrite");
$db->write('CREATE TABLE testTABWrite (
           event_date Date DEFAULT toDate(event_time),
           event_time DateTime,
           strs String,
           flos Float32,
           ints Int32,
           arr1 Array(UInt8),
           arrs Array(String)