'); echo "Table EXISTS:" . json_encode($db->showTables()) . "\n"; echo "----------------------------------- CREATE csv file -----------------------------------------------------------------\n"; $file_data_names = ['/tmp/clickHouseDB_test.part.1.data', '/tmp/clickHouseDB_test.part.2.data', '/tmp/clickHouseDB_test.part.3.data']; $c = 0; foreach ($file_data_names as $file_name) { $c++; makeSomeDataFileBigOldDates($file_name, $c); } echo "--------------------------------------- insert -------------------------------------------------------------\n"; echo "insert ALL file async + GZIP:\n"; $db->enableHttpCompression(true); $time_start = microtime(true); $result_insert = $db->insertBatchFiles('summing_partions_views', $file_data_names, ['event_time', 'site_id', 'hash_id', 'views']); echo "use time:" . round(microtime(true) - $time_start, 2) . " sec.\n"; foreach ($result_insert as $fileName => $state) { echo "{$fileName} => " . json_encode($state->info_upload()) . "\n"; } } echo "--------------------------------------- select -------------------------------------------------------------\n"; print_r($db->select('select min(event_date),max(event_date) from summing_partions_views ')->rows()); echo "--------------------------------------- list partitions -------------------------------------------------------------\n"; echo "databaseSize : " . json_encode($db->databaseSize()) . "\n"; echo "tableSize : " . json_encode($db->tableSize('summing_partions_views')) . "\n"; echo "partitions : " . json_encode($db->partitions('summing_partions_views', 2)) . "\n"; echo "--------------------------------------- drop partitions -------------------------------------------------------------\n"; echo "dropOldPartitions -30 days : " . json_encode($db->dropOldPartitions('summing_partions_views', 30)) . "\n"; echo "--------------------------------------- list partitions -------------------------------------------------------------\n"; echo "databaseSize : " . json_encode($db->databaseSize()) . "\n"; echo "tableSize : " . json_encode($db->tableSize('summing_partions_views')) . "\n"; echo "partitions : " . json_encode($db->partitions('summing_partions_views', 2)) . "\n";
<?php include_once __DIR__ . '/../include.php'; include_once __DIR__ . '/lib_example.php'; $config = ['host' => '192.168.1.20', 'port' => '8123', 'username' => 'default', 'password' => '']; $db = new ClickHouseDB\Client($config); $_flag_create_table = false; $db->write("DROP TABLE IF EXISTS summing_url_views_intHash32_site_id"); $size = $db->tableSize('summing_url_views_intHash32_site_id'); echo "Site table summing_url_views_intHash32_site_id : " . (isset($size['size']) ? $size['size'] : 'false') . "\n"; if (!isset($size['size'])) { $_flag_create_table = true; } if ($_flag_create_table) { $db->write("DROP TABLE IF EXISTS summing_url_views_intHash32_site_id"); $re = $db->write(' CREATE TABLE IF NOT EXISTS summing_url_views_intHash32_site_id ( event_date Date DEFAULT toDate(event_time), event_time DateTime, url_hash String, site_id Int32, views Int32, v_00 Int32, v_55 Int32 ) ENGINE = SummingMergeTree(event_date, intHash32(event_time,site_id),(site_id, url_hash, event_time, event_date,intHash32(event_time,site_id)), 8192) '); echo "Table EXISTS:" . print_r($db->showTables()) . "\n"; // ------------------------------------------------------------------------------------------------------ echo "----------------------------------- CREATE big csv file -----------------------------------------------------------------\n"; $file_data_names = ['/tmp/clickHouseDB_test.big.1.data', '/tmp/clickHouseDB_test.big.2.data', '/tmp/clickHouseDB_test.big.3.data'];
<?php include_once __DIR__ . '/../include.php'; include_once __DIR__ . '/lib_example.php'; $config = ['host' => '192.168.1.20', 'port' => '8123', 'username' => 'default', 'password' => '']; $db = new ClickHouseDB\Client($config); $_flag_create_table = false; $size = $db->tableSize('summing_url_views_cityHash64_site_id'); echo "Site table summing_url_views_cityHash64_site_id : " . (isset($size['size']) ? $size['size'] : 'false') . "\n"; if (!isset($size['size'])) { $_flag_create_table = true; } if ($_flag_create_table) { $db->write("DROP TABLE IF EXISTS summing_url_views_cityHash64_site_id"); $re = $db->write(' CREATE TABLE IF NOT EXISTS summing_url_views_cityHash64_site_id ( event_date Date DEFAULT toDate(event_time), event_time DateTime, url_hash String, site_id Int32, views Int32, v_00 Int32, v_55 Int32 ) ENGINE = SummingMergeTree(event_date, cityHash64(site_id,event_time),(site_id, url_hash, event_time, event_date,cityHash64(site_id,event_time)), 8192) '); echo "Table EXISTS:" . print_r($db->showTables()) . "\n"; // ------------------------------------------------------------------------------------------------------ echo "----------------------------------- CREATE big csv file -----------------------------------------------------------------\n"; $file_data_names = ['/tmp/clickHouseDB_test.big.1.data', '/tmp/clickHouseDB_test.big.2.data', '/tmp/clickHouseDB_test.big.3.data']; $c = 0;
<?php include_once __DIR__ . '/../include.php'; include_once __DIR__ . '/lib_example.php'; $config = ['host' => '192.168.1.20', 'port' => '8123', 'username' => 'default', 'password' => '']; $db = new ClickHouseDB\Client($config); $_flag_create_table = false; $size = $db->tableSize('summing_url_views_big'); echo "Site table summing_url_views_big : " . (isset($size['size']) ? $size['size'] : 'false') . "\n"; if (!isset($size['size'])) { $_flag_create_table = true; } if ($_flag_create_table) { $db->write("DROP TABLE IF EXISTS summing_url_views_big"); $db->write(' CREATE TABLE IF NOT EXISTS summing_url_views_big ( event_date Date DEFAULT toDate(event_time), event_time DateTime, url_hash String, site_id Int32, views Int32, v_00 Int32, v_55 Int32 ) ENGINE = SummingMergeTree(event_date, (site_id, url_hash, event_time, event_date), 8192) '); echo "Table EXISTS:" . json_encode($db->showTables()) . "\n"; // ------------------------------------------------------------------------------------------------------ echo "----------------------------------- CREATE big csv file -----------------------------------------------------------------\n"; $file_data_names = ['/tmp/clickHouseDB_test.big.1.data', '/tmp/clickHouseDB_test.big.2.data', '/tmp/clickHouseDB_test.big.3.data']; $c = 0;