exit; // -------------------------------- CREATE csv file ---------------------------------------------------------------- $file_data_names = ['/tmp/clickHouseDB_test.1.data', '/tmp/clickHouseDB_test.2.data']; foreach ($file_data_names as $file_name) { makeSomeDataFile($file_name, 2); } // ---------------------------------------------------------------------------------------------------- echo "insert ALL file async:\n"; $time_start = microtime(true); $result_insert = $db->insertBatchFiles('summing_url_views', $file_data_names, ['event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55']); echo "use time:" . round(microtime(true) - $time_start, 2) . "\n"; print_r($db->select('select sum(views) from summing_url_views')->rows()); // ------------------------------------------------------------------------------------------------ $WriteToFile = new ClickHouseDB\WriteToFile('/tmp/_1_select.csv'); $statement = $db->select('select * from summing_url_views', [], null, $WriteToFile); print_r($statement->info()); // $db->selectAsync('select * from summing_url_views limit 4', [], null, new ClickHouseDB\WriteToFile('/tmp/_2_select.csv')); $db->selectAsync('select * from summing_url_views limit 4', [], null, new ClickHouseDB\WriteToFile('/tmp/_3_select.tab', true, 'TabSeparatedWithNames')); $db->selectAsync('select * from summing_url_views limit 4', [], null, new ClickHouseDB\WriteToFile('/tmp/_4_select.tab', true, 'TabSeparated')); $statement = $db->selectAsync('select * from summing_url_views limit 54', [], null, new ClickHouseDB\WriteToFile('/tmp/_5_select.csv', true, ClickHouseDB\WriteToFile::FORMAT_CSV)); $db->executeAsync(); print_r($statement->info()); echo "END SELECT\n"; echo "TRY GZIP\n"; $WriteToFile = new ClickHouseDB\WriteToFile('/tmp/_0_select.csv.gz'); $WriteToFile->setFormat(ClickHouseDB\WriteToFile::FORMAT_TabSeparatedWithNames); $WriteToFile->setGzip(true); // cat /tmp/_0_select.csv.gz | gzip -dc > /tmp/w.result $statement = $db->select('select * from summing_url_views', [], null, $WriteToFile); print_r($statement->info());
$db = new ClickHouseDB\Client($config); $input_params = ['select_date' => ['2000-10-10', '2000-10-11', '2000-10-12'], 'limit' => 5, 'from_table' => 'table']; $db->enableQueryConditions(); $select = ' SELECT * FROM {from_table} WHERE {if select_date} event_date IN (:select_date) {else} event_date=today() {/if} {if limit} LIMIT {limit} {/if} '; $statement = $db->selectAsync($select, $input_params); echo $statement->sql(); echo "\n"; /* SELECT * FROM table WHERE event_date IN ('2000-10-10','2000-10-11','2000-10-12') LIMIT 5 FORMAT JSON */ $input_params['select_date'] = false; $statement = $db->selectAsync($select, $input_params); echo $statement->sql(); echo "\n"; /* SELECT * FROM table
v_55 Int32 ) ENGINE = SummingMergeTree(event_date, (site_id, url_hash, event_time, event_date), 8192) '); echo "Table EXISTSs:" . json_encode($db->showTables()) . "\n"; // -------------------------------- CREATE csv file ---------------------------------------------------------------- $file_data_names = ['/tmp/clickHouseDB_test.1.data', '/tmp/clickHouseDB_test.2.data']; foreach ($file_data_names as $file_name) { makeSomeDataFile($file_name, 1); } // ---------------------------------------------------------------------------------------------------- echo "insert ONE file:\n"; $time_start = microtime(true); $version_test = 3; if ($version_test == 1) { $statselect1 = $db->selectAsync('SELECT * FROM summing_url_views LIMIT 1'); $statselect2 = $db->selectAsync('SELECT * FROM summing_url_views LIMIT 1'); $stat = $db->insertBatchFiles('summing_url_views', ['/tmp/clickHouseDB_test.1.data'], ['event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55']); // 'Exception' with message 'Queue must be empty, before insertBatch,need executeAsync' } // if ($version_test == 2) { $statselect1 = $db->selectAsync('SELECT * FROM summing_url_views LIMIT 1'); print_r($statselect1->rows()); // 'Exception' with message 'Not have response' } // good if ($version_test == 3) { $statselect2 = $db->selectAsync('SELECT * FROM summing_url_views LIMIT 1'); $db->executeAsync(); $stat = $db->insertBatchFiles('summing_url_views', ['/tmp/clickHouseDB_test.1.data'], ['event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55']);
<?php include_once __DIR__ . '/../include.php'; $config = ['host' => '192.168.1.20', 'port' => '8123', 'username' => 'default', 'password' => '']; $db = new ClickHouseDB\Client($config); //$db->verbose(); $db->settings()->readonly(false); $result = $db->select('SELECT 12 as {key} WHERE {key} = :value', ['key' => 'ping', 'value' => 12]); if ($result->fetchOne('ping') != 12) { echo "Error : ? \n"; } print_r($result->fetchOne()); // ---------------------------- ASYNC SELECT ---------------------------- $state1 = $db->selectAsync('SELECT 1 as {key} WHERE {key} = :value', ['key' => 'ping', 'value' => 1]); $state2 = $db->selectAsync('SELECT 2 as ping'); $db->executeAsync(); print_r($state1->fetchOne()); print_r($state1->rows()); print_r($state2->fetchOne('ping')); //----------------------------------------//----------------------------------------
$result = $db->select($sql, [], $whereIn); print_r($result->rows()); // ----------------------------------------------- ASYNC ------------------------------------------------------------------------------------------ echo "\n----------------------- ASYNC ------------ \n"; $sql = ' SELECT site_id, group, SUM(views) as views FROM aggr.summing_url_views WHERE event_date = today() AND site_id IN (SELECT site_id FROM namex) GROUP BY site_id, group ORDER BY views DESC LIMIT {limit} '; $bindings['limit'] = 3; $statements = []; $whereIn = new \ClickHouseDB\WhereInFile(); $whereIn->attachFile($file_name_data1, 'namex', ['site_id' => 'Int32', 'site_hash' => 'String'], \ClickHouseDB\WhereInFile::FORMAT_CSV); $statements[0] = $db->selectAsync($sql, $bindings, $whereIn); // change data file - for statement two $whereIn = new \ClickHouseDB\WhereInFile(); $whereIn->attachFile($file_name_data2, 'namex', ['site_id' => 'Int32', 'site_hash' => 'String'], \ClickHouseDB\WhereInFile::FORMAT_CSV); $statements[1] = $db->selectAsync($sql, $bindings, $whereIn); $db->executeAsync(); foreach ($statements as $statement) { print_r($statement->rows()); } /* Не перечисляйте слишком большое количество значений (миллионы) явно. Если множество большое - лучше загрузить его во временную таблицу (например, смотрите раздел "Внешние данные для обработки запроса"), и затем воспользоваться подзапросом. Внешние данные для обработки запроса При использовании HTTP интерфейса, внешние данные передаются в формате multipart/form-data. Каждая таблица передаётся отдельным файлом. Имя таблицы берётся из имени файла. В query_string передаются параметры name_format, name_types, name_structure, где name - имя таблицы, которой соответствуют эти параметры. Смысл параметров такой же, как при использовании клиента командной строки.