url_hash String,
        site_id Int32,
        views Int32,
        v_00 Int32,
        v_55 Int32
    ) 
    ENGINE = SummingMergeTree(event_date, (site_id, url_hash, event_time, event_date), 8192)
');
echo "Table EXISTS:" . json_encode($db->showTables()) . "\n";
// ------------------------------------------------------------------------------------------------------
echo "----------------------------------- CREATE big csv file -----------------------------------------------------------------\n";
$file_data_names = ['/tmp/clickHouseDB_test.b.1.data', '/tmp/clickHouseDB_test.b.2.data', '/tmp/clickHouseDB_test.b.3.data', '/tmp/clickHouseDB_test.b.4.data', '/tmp/clickHouseDB_test.b.5.data'];
$c = 0;
foreach ($file_data_names as $file_name) {
    $c++;
    makeSomeDataFileBig($file_name, 40 * $c);
}
echo "----------------------------------------------------------------------------------------------------\n";
echo "insert ALL file async NO gzip:\n";
$db->settings()->max_execution_time(200);
$time_start = microtime(true);
$result_insert = $db->insertBatchFiles('summing_url_views', $file_data_names, ['event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55']);
echo "use time:" . round(microtime(true) - $time_start, 2) . "\n";
foreach ($result_insert as $state) {
    echo "Info : " . json_encode($state->info_upload()) . "\n";
}
print_r($db->select('select sum(views) from summing_url_views')->rows());
echo "--------------------------------------- enableHttpCompression -------------------------------------------------------------\n";
echo "insert ALL file async + GZIP:\n";
$db->enableHttpCompression(true);
$time_start = microtime(true);
                    site_id Int32,
                    views Int32,
                    v_00 Int32,
                    v_55 Int32
                ) 
                ENGINE = SummingMergeTree(event_date, cityHash64(site_id,event_time),(site_id, url_hash, event_time, event_date,cityHash64(site_id,event_time)), 8192)
            ');
    echo "Table EXISTS:" . print_r($db->showTables()) . "\n";
    // ------------------------------------------------------------------------------------------------------
    echo "----------------------------------- CREATE big csv file -----------------------------------------------------------------\n";
    $file_data_names = ['/tmp/clickHouseDB_test.big.1.data', '/tmp/clickHouseDB_test.big.2.data', '/tmp/clickHouseDB_test.big.3.data'];
    $c = 0;
    foreach ($file_data_names as $file_name) {
        $c++;
        $shift_days = -1 * $c * 3;
        makeSomeDataFileBig($file_name, 23 * $c, $shift_days);
    }
    echo "----------------------------------------------------------------------------------------------------\n";
    echo "insert ALL file async + GZIP:\n";
    $db->enableHttpCompression(true);
    $time_start = microtime(true);
    $result_insert = $db->insertBatchFiles('summing_url_views_cityHash64_site_id', $file_data_names, ['event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55']);
    echo "use time:" . round(microtime(true) - $time_start, 2) . "\n";
    foreach ($result_insert as $fileName => $state) {
        echo "{$fileName} => " . json_encode($state->info_upload()) . "\n";
    }
}
echo "------------------------------- COMPARE event_date ---------------------------------------------------------------------\n";
$rows = $db->select('select event_date,sum(views) as v from summing_url_views_cityHash64_site_id GROUP BY event_date ORDER BY event_date')->rowsAsTree('event_date');
$samp = $db->select('select event_date,(sum(views)*10) as v from summing_url_views_cityHash64_site_id SAMPLE 0.1 GROUP BY event_date ORDER BY event_date ')->rowsAsTree('event_date');
foreach ($rows as $event_date => $data) {