<?php

include_once __DIR__ . '/../include.php';
include_once __DIR__ . '/lib_example.php';
class CustomDegeneration implements \ClickHouseDB\Query\Degeneration
{
    private $bindings = [];
    public function bindParams(array $bindings)
    {
        $this->bindings = $bindings;
    }
    public function process($sql)
    {
        if (sizeof($this->bindings)) {
            foreach ($this->bindings as $key => $value) {
                $sql = str_ireplace('%' . $key . '%', $value, $sql);
            }
        }
        return str_ireplace('XXXX', 'SELECT', $sql);
    }
}
$config = ['host' => '192.168.1.20', 'port' => '8123', 'username' => 'default', 'password' => ''];
$db = new ClickHouseDB\Client($config);
print_r($db->select('SELECT 1 as ping')->fetchOne());
// CustomConditions
$db->addQueryDegeneration(new CustomDegeneration());
// strreplace XXXX=>SELECT
print_r($db->select('XXXX 1 as ping')->fetchOne());
// SELECT 1 as ping
print_r($db->select('XXXX 1 as %ZX%', ['ZX' => 'ping'])->fetchOne());
    ');
    echo "Table EXISTS:" . json_encode($db->showTables()) . "\n";
    echo "----------------------------------- CREATE csv file -----------------------------------------------------------------\n";
    $file_data_names = ['/tmp/clickHouseDB_test.part.1.data', '/tmp/clickHouseDB_test.part.2.data', '/tmp/clickHouseDB_test.part.3.data'];
    $c = 0;
    foreach ($file_data_names as $file_name) {
        $c++;
        makeSomeDataFileBigOldDates($file_name, $c);
    }
    echo "--------------------------------------- insert -------------------------------------------------------------\n";
    echo "insert ALL file async + GZIP:\n";
    $db->enableHttpCompression(true);
    $time_start = microtime(true);
    $result_insert = $db->insertBatchFiles('summing_partions_views', $file_data_names, ['event_time', 'site_id', 'hash_id', 'views']);
    echo "use time:" . round(microtime(true) - $time_start, 2) . " sec.\n";
    foreach ($result_insert as $fileName => $state) {
        echo "{$fileName} => " . json_encode($state->info_upload()) . "\n";
    }
}
echo "--------------------------------------- select -------------------------------------------------------------\n";
print_r($db->select('select min(event_date),max(event_date) from summing_partions_views ')->rows());
echo "--------------------------------------- list partitions -------------------------------------------------------------\n";
echo "databaseSize : " . json_encode($db->databaseSize()) . "\n";
echo "tableSize    : " . json_encode($db->tableSize('summing_partions_views')) . "\n";
echo "partitions    : " . json_encode($db->partitions('summing_partions_views', 2)) . "\n";
echo "--------------------------------------- drop partitions -------------------------------------------------------------\n";
echo "dropOldPartitions -30 days    : " . json_encode($db->dropOldPartitions('summing_partions_views', 30)) . "\n";
echo "--------------------------------------- list partitions -------------------------------------------------------------\n";
echo "databaseSize : " . json_encode($db->databaseSize()) . "\n";
echo "tableSize    : " . json_encode($db->tableSize('summing_partions_views')) . "\n";
echo "partitions    : " . json_encode($db->partitions('summing_partions_views', 2)) . "\n";
Example #3
0
// Проверяем соединение с базой
$client->ping();
// Создаём таблицу
$client->write('CREATE DATABASE IF NOT EXISTS articles');
$client->write('DROP TABLE IF EXISTS articles.events');
$client->write("\n    CREATE TABLE articles.events (\n        event_date Date DEFAULT toDate(event_time),\n        event_time DateTime,\n        event_type Enum8('VIEWS' = 1, 'CLICKS' = 2),\n        site_id Int32,\n        article_id Int32,\n        ip String,\n        city String,\n        user_uuid String,\n        referer String,\n        utm String DEFAULT extractURLParameter(referer, 'utm_campaign')\n    ) ENGINE = MergeTree(event_date, (site_id, event_type, article_id), 8192)\n");
// Выбираем default базу
$client->database('articles');
// Получим список таблиц
print_r($client->showTables());
// Для упрощения выставляем принудительно таймзону
date_default_timezone_set('Europe/Moscow');
// Простая вставка данных `$db->insert(имя_таблицы, [данные], [колонки]);`
$client->insert('events', [[time(), 'CLICKS', 1, 1234, '192.168.1.11', 'Moscow', 'user_11', ''], [time(), 'CLICKS', 1, 1235, '192.168.1.11', 'Moscow', 'user_11', 'http://yandex.ru?utm_campaign=abc'], [time(), 'CLICKS', 1, 1236, '192.168.1.11', 'Moscow', 'user_11', 'http://smi2.ru?utm_campaign=abc'], [time(), 'CLICKS', 1, 1237, '192.168.1.11', 'Moscow', 'user_11', ''], [time(), 'CLICKS', 1, 1237, '192.168.1.13', 'Moscow', 'user_13', ''], [time(), 'CLICKS', 1, 1237, '192.168.1.14', 'Moscow', 'user_14', ''], [time(), 'VIEWS', 1, 1237, '192.168.1.11', 'Moscow', 'user_11', ''], [time(), 'VIEWS', 1, 1237, '192.168.1.12', 'Moscow', 'user_12', ''], [time(), 'VIEWS', 1, 1237, '192.168.1.1', 'Rwanda', 'user_55', 'http://smi2.ru?utm_campaign=abc'], [time(), 'VIEWS', 1, 1237, '192.168.1.1', 'Banaadir', 'user_54', 'http://smi2.ru?utm_campaign=abc'], [time(), 'VIEWS', 1, 1237, '192.168.1.1', 'Tobruk', 'user_32', 'http://smi2.ru?utm_campaign=CM1'], [time(), 'VIEWS', 1, 1237, '192.168.1.1', 'Gisborne', 'user_12', 'http://smi2.ru?utm_campaign=CM1'], [time(), 'VIEWS', 1, 1237, '192.168.1.1', 'Moscow', 'user_43', 'http://smi2.ru?utm_campaign=CM3']], ['event_time', 'event_type', 'site_id', 'article_id', 'ip', 'city', 'user_uuid', 'referer']);
// Достанем результат вставки данных
print_r($client->select('SELECT * FROM events')->rows());
// Допустим нам нужно посчитать сколько уникальных пользователей просмотрело за сутки
print_r($client->select('
        SELECT 
            event_date, 
            uniqCombined(user_uuid) as count_users 
        FROM 
            events 
        WHERE 
            site_id=1    
        GROUP BY 
            event_date 
        ORDER BY 
            event_date
    ')->rows());
// Сколько пользователей, которые просматривали и совершили клики
    ENGINE = SummingMergeTree(event_date, (site_id, url_hash, event_time, event_date), 8192)
');
echo "Table EXISTS: " . json_encode($db->showTables()) . "\n";
echo $db->showCreateTable('summing_url_views');
exit;
// --------------------------------  CREATE csv file ----------------------------------------------------------------
$file_data_names = ['/tmp/clickHouseDB_test.1.data', '/tmp/clickHouseDB_test.2.data'];
foreach ($file_data_names as $file_name) {
    makeSomeDataFile($file_name, 2);
}
// ----------------------------------------------------------------------------------------------------
echo "insert ALL file async:\n";
$time_start = microtime(true);
$result_insert = $db->insertBatchFiles('summing_url_views', $file_data_names, ['event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55']);
echo "use time:" . round(microtime(true) - $time_start, 2) . "\n";
print_r($db->select('select sum(views) from summing_url_views')->rows());
// ------------------------------------------------------------------------------------------------
$WriteToFile = new ClickHouseDB\WriteToFile('/tmp/_1_select.csv');
$statement = $db->select('select * from summing_url_views', [], null, $WriteToFile);
print_r($statement->info());
//
$db->selectAsync('select * from summing_url_views limit 4', [], null, new ClickHouseDB\WriteToFile('/tmp/_2_select.csv'));
$db->selectAsync('select * from summing_url_views limit 4', [], null, new ClickHouseDB\WriteToFile('/tmp/_3_select.tab', true, 'TabSeparatedWithNames'));
$db->selectAsync('select * from summing_url_views limit 4', [], null, new ClickHouseDB\WriteToFile('/tmp/_4_select.tab', true, 'TabSeparated'));
$statement = $db->selectAsync('select * from summing_url_views limit 54', [], null, new ClickHouseDB\WriteToFile('/tmp/_5_select.csv', true, ClickHouseDB\WriteToFile::FORMAT_CSV));
$db->executeAsync();
print_r($statement->info());
echo "END SELECT\n";
echo "TRY GZIP\n";
$WriteToFile = new ClickHouseDB\WriteToFile('/tmp/_0_select.csv.gz');
$WriteToFile->setFormat(ClickHouseDB\WriteToFile::FORMAT_TabSeparatedWithNames);
    ) 
    ENGINE = SummingMergeTree(event_date, (site_id, url_hash, event_time, event_date), 8192)
');
echo "Table EXISTS: " . json_encode($db->showTables()) . "\n";
// --------------------------------  CREATE csv file ----------------------------------------------------------------
// ----------------------------------------------------------------------------------------------------
$file_data_names = ['/tmp/clickHouseDB_test.1.data', '/tmp/clickHouseDB_test.2.data', '/tmp/clickHouseDB_test.3.data', '/tmp/clickHouseDB_test.4.data', '/tmp/clickHouseDB_test.5.data'];
foreach ($file_data_names as $file_name) {
    makeSomeDataFile($file_name, 5);
}
// ----------------------------------------------------------------------------------------------------
echo "insert ONE file:\n";
$time_start = microtime(true);
$stat = $db->insertBatchFiles('summing_url_views', ['/tmp/clickHouseDB_test.1.data'], ['event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55']);
echo "use time:" . round(microtime(true) - $time_start, 2) . "\n";
print_r($db->select('select sum(views) from summing_url_views')->rows());
echo "insert ALL file async:\n";
$time_start = microtime(true);
$result_insert = $db->insertBatchFiles('summing_url_views', $file_data_names, ['event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55']);
echo "use time:" . round(microtime(true) - $time_start, 2) . "\n";
print_r($db->select('select sum(views) from summing_url_views')->rows());
// ------------------------------------------------------------------------------------------------
foreach ($file_data_names as $fileName) {
    echo $fileName . " : " . $result_insert[$fileName]->totalTimeRequest() . "\n";
}
// ------------------------------------------------------------------------------------------------
/*
Table EXISTSs:[{"name":"summing_url_views"}]
Created file  [/tmp/clickHouseDB_test.1.data]: 22200 rows...
Created file  [/tmp/clickHouseDB_test.2.data]: 22200 rows...
Created file  [/tmp/clickHouseDB_test.3.data]: 22200 rows...
        $c++;
        $shift_days = -1 * $c * 3;
        makeSomeDataFileBig($file_name, 4 * $c, $shift_days);
    }
    echo "----------------------------------------------------------------------------------------------------\n";
    echo "insert ALL file async + GZIP:\n";
    $db->enableHttpCompression(true);
    $time_start = microtime(true);
    $result_insert = $db->insertBatchFiles('summing_url_views_intHash32_site_id', $file_data_names, ['event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55']);
    echo "use time:" . round(microtime(true) - $time_start, 2) . "\n";
    foreach ($result_insert as $fileName => $state) {
        echo "{$fileName} => " . json_encode($state->info_upload()) . "\n";
    }
}
echo "------------------------------- COMPARE event_date ---------------------------------------------------------------------\n";
$rows = $db->select('select event_date,sum(views) as v from summing_url_views_intHash32_site_id GROUP BY event_date ORDER BY event_date')->rowsAsTree('event_date');
$samp = $db->select('select event_date,sum(views) as v from summing_url_views_intHash32_site_id SAMPLE 0.5 GROUP BY event_date ORDER BY event_date ')->rowsAsTree('event_date');
foreach ($rows as $event_date => $data) {
    echo $event_date . "\t" . $data['v'] . "\t" . @$samp[$event_date]['v'] * (1 / 0.5) . "\n";
}
$rows = $db->select('select site_id,sum(views) as v from summing_url_views_intHash32_site_id GROUP BY site_id ORDER BY site_id')->rowsAsTree('site_id');
$samp = $db->select('select site_id,(sum(views)) as v from summing_url_views_intHash32_site_id SAMPLE 0.5 GROUP BY site_id ORDER BY site_id ')->rowsAsTree('site_id');
foreach ($rows as $event_date => $data) {
    echo $event_date . "\t" . $data['v'] . "\t" . intval(@$samp[$event_date]['v']) . "\n";
}
/*

Когда мы семплируем данные по ключу intHash32(site_id), и достаем данные  GROUP BY site_id
Сумма показов по ключу site_id даст точное кол-во показов , но в выборке будет отобранно только тот процент который указан

select site_id,(sum(views)) as v from summing_url_views_intHash32_site_id SAMPLE 0.1 GROUP BY site_id ORDER BY site_id
        $c++;
        $shift_days = -1 * $c * 3;
        makeSomeDataFileBig($file_name, 23 * $c, $shift_days);
    }
    echo "----------------------------------------------------------------------------------------------------\n";
    echo "insert ALL file async + GZIP:\n";
    $db->enableHttpCompression(true);
    $time_start = microtime(true);
    $result_insert = $db->insertBatchFiles('summing_url_views_cityHash64_site_id', $file_data_names, ['event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55']);
    echo "use time:" . round(microtime(true) - $time_start, 2) . "\n";
    foreach ($result_insert as $fileName => $state) {
        echo "{$fileName} => " . json_encode($state->info_upload()) . "\n";
    }
}
echo "------------------------------- COMPARE event_date ---------------------------------------------------------------------\n";
$rows = $db->select('select event_date,sum(views) as v from summing_url_views_cityHash64_site_id GROUP BY event_date ORDER BY event_date')->rowsAsTree('event_date');
$samp = $db->select('select event_date,(sum(views)*10) as v from summing_url_views_cityHash64_site_id SAMPLE 0.1 GROUP BY event_date ORDER BY event_date ')->rowsAsTree('event_date');
foreach ($rows as $event_date => $data) {
    echo $event_date . "\t" . $data['v'] . "\t" . @$samp[$event_date]['v'] . "\n";
}
$rows = $db->select('select site_id,sum(views) as v from summing_url_views_cityHash64_site_id GROUP BY site_id ORDER BY site_id')->rowsAsTree('site_id');
$samp = $db->select('select site_id,(sum(views)) as v from summing_url_views_cityHash64_site_id SAMPLE 0.5 GROUP BY site_id ORDER BY site_id ')->rowsAsTree('site_id');
foreach ($rows as $event_date => $data) {
    echo $event_date . "\t" . $data['v'] . "\t" . intval(@$samp[$event_date]['v']) . "\n";
}
for ($f = 1; $f <= 9; $f++) {
    $SAMPLE = $f / 10;
    $CQL = 'select site_id,(sum(views)) as v from summing_url_views_cityHash64_site_id SAMPLE ' . $SAMPLE . ' WHERE site_id=34 GROUP BY site_id ORDER BY site_id ';
    echo $CQL . "\n";
    $rows = $db->select('select site_id,sum(views) as v from summing_url_views_cityHash64_site_id WHERE site_id=34 GROUP BY site_id ORDER BY site_id')->rowsAsTree('site_id');
    $samp = $db->select($CQL)->rowsAsTree('site_id');
<?php

include_once __DIR__ . '/../include.php';
$config = ['host' => '192.168.1.20', 'port' => '8123', 'username' => 'default', 'password' => ''];
$db = new ClickHouseDB\Client($config);
$db->enableLogQueries()->enableHttpCompression();
//----------------------------------------
print_r($db->select('SELECT * FROM system.query_log')->rows());
//----------------------------------------
<?php

include_once __DIR__ . '/../include.php';
$config = ['host' => '192.168.1.20', 'port' => '8123', 'username' => 'default', 'password' => ''];
$db = new ClickHouseDB\Client($config);
//$db->verbose();
$db->settings()->readonly(false);
$result = $db->select('SELECT 12 as {key} WHERE {key} = :value', ['key' => 'ping', 'value' => 12]);
if ($result->fetchOne('ping') != 12) {
    echo "Error : ? \n";
}
print_r($result->fetchOne());
echo 'elapsed   :' . $result->statistics('elapsed') . "\n";
echo 'rows_read :' . $result->statistics('rows_read') . "\n";
echo 'bytes_read:' . $result->statistics('bytes_read') . "\n";
//
$result = $db->select("SELECT 12 as ping");
print_r($result->statistics());
/*
	"statistics":
	{
		"elapsed": 0.000029702,
		"rows_read": 1,
		"bytes_read": 1
	}
*/
Example #10
0
    $file_data_names = ['/tmp/clickHouseDB_test.big.1.data', '/tmp/clickHouseDB_test.big.2.data', '/tmp/clickHouseDB_test.big.3.data'];
    $c = 0;
    foreach ($file_data_names as $file_name) {
        $c++;
        makeSomeDataFileBig($file_name, 40 * $c);
    }
    echo "----------------------------------------------------------------------------------------------------\n";
    echo "insert ALL file async + GZIP:\n";
    $db->enableHttpCompression(true);
    $time_start = microtime(true);
    $result_insert = $db->insertBatchFiles('summing_url_views_big', $file_data_names, ['event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55']);
    echo "use time:" . round(microtime(true) - $time_start, 2) . "\n";
    foreach ($result_insert as $fileName => $state) {
        echo "{$fileName} => " . json_encode($state->info_upload()) . "\n";
    }
    print_r($db->select('select sum(views) from summing_url_views_big')->rows());
    echo "----------------------------------------------------------------------------------------------------\n";
}
echo "php_ini.memory_limit = " . ini_get("memory_limit") . "\n";
ini_set("memory_limit", "1256M");
echo "php_ini.memory_limit = " . ini_get("memory_limit") . "\n";
memoryUsage::show();
$sql = 'select * from summing_url_views_big LIMIT 50000';
echo ">>> {$sql}\n";
$db->select($sql);
memoryUsage::show();
$rows = $db->select($sql)->rows();
memoryUsage::show('select rows');
unset($rows);
memoryUsage::show('unset rows ');
$rows = $db->select($sql)->rawData();
    $client->database('articles');
    $client->enableHttpCompression(true);
    echo "\n> insertBatchFiles....\n";
    $result_insert = $client->insertBatchTSVFiles('events', [$fileName], ['event_date', 'event_time', 'event_type', 'site_id', 'article_id', 'ip', 'city', 'user_uuid', 'referer', 'utm']);
    echo "insert done\n";
    echo $fileName . " : " . $result_insert[$fileName]->totalTimeRequest() . "\n";
}
$client->database('articles');
// Допустим нам нужно посчитать сколько уникальных пользователей просмотрело за сутки
print_r($client->select('
        SELECT
            event_date,
            uniqCombined(user_uuid) as count_users
        FROM
            events
        WHERE
            site_id=1
        GROUP BY
            event_date
        ORDER BY
            event_date
        LIMIT 4
    ')->rows());
//
//
// Сколько пользователей, которые просматривали и совершили клики
print_r($client->select("\n        SELECT\n            user_uuid,\n            count() as clicks\n        FROM\n            articles.events\n        WHERE\n            event_type IN ( 'CLICKS' )\n            AND site_id = 1\n            AND user_uuid IN  (\n                SELECT\n                    user_uuid\n                FROM\n                    articles.events\n                WHERE\n                    event_type IN ( 'VIEWS' ) AND site_id = 1\n                GROUP BY\n                    user_uuid\n            )\n        GROUP BY user_uuid\n        LIMIT 5\n    ")->rows());
//
//
// Какие UTM метки давали большое кол-во показов и кликов
print_r($client->select("\n        SELECT\n            utm,\n            countIf(event_type IN('VIEWS')) as views,\n            countIf(event_type IN('CLICKS')) as clicks\n        FROM\n            events\n        WHERE\n            event_date = today()\n            AND site_id = 1\n        GROUP BY\n            utm\n        ORDER BY\n            views DESC\n        LIMIT 15\n    ")->rows());
Example #12
0
// Конфигурация
$config = ['host' => '192.168.1.20', 'port' => '8123', 'username' => 'default', 'password' => ''];
$client = new \ClickHouseDB\Client($config);
// Проверяем соединение с базой
$client->ping();
// Создаём таблицу
$client->write('CREATE DATABASE IF NOT EXISTS articles');
$client->write('DROP TABLE IF EXISTS articles.events');
$client->write("\n    CREATE TABLE articles.events (\n        event_date Date DEFAULT toDate(event_time),\n        event_time DateTime,\n        event_type Enum8('VIEWS' = 1, 'CLICKS' = 2),\n        site_id Int32,\n        article_id Int32,\n        ip String,\n        city String,\n        user_uuid String,\n        referer String,\n        utm String DEFAULT extractURLParameter(referer, 'utm_campaign')\n    ) ENGINE = MergeTree(event_date, (site_id, event_type, article_id), 8192)\n");
// Выбираем default базу
$client->database('articles');
// Получим список таблиц
print_r($client->showTables());
// Для упрощения выставляем принудительно таймзону
date_default_timezone_set('Europe/Moscow');
// Простая вставка данных `$db->insert(имя_таблицы, [данные], [колонки]);`
$client->insert('events', [[time(), 'CLICKS', 1, 1234, '192.168.1.11', 'Moscow', 'user_11', ''], [time(), 'CLICKS', 1, 1235, '192.168.1.11', 'Moscow', 'user_11', 'http://yandex.ru?utm_campaign=abc'], [time(), 'CLICKS', 1, 1236, '192.168.1.11', 'Moscow', 'user_11', 'http://smi2.ru?utm_campaign=abc'], [time(), 'CLICKS', 1, 1237, '192.168.1.11', 'Moscow', 'user_11', ''], [time(), 'CLICKS', 1, 1237, '192.168.1.13', 'Moscow', 'user_13', ''], [time(), 'CLICKS', 1, 1237, '192.168.1.14', 'Moscow', 'user_14', ''], [time(), 'VIEWS', 1, 1237, '192.168.1.11', 'Moscow', 'user_11', ''], [time(), 'VIEWS', 1, 1237, '192.168.1.12', 'Moscow', 'user_12', ''], [time(), 'VIEWS', 1, 27, '192.168.1.1', 'Rwanda', 'user_55', 'http://smi2.ru?utm_campaign=abc'], [time(), 'VIEWS', 1, 27, '192.168.1.1', 'Banaadir', 'user_54', 'http://smi2.ru?utm_campaign=abc'], [time(), 'VIEWS', 1, 27, '192.168.1.1', 'Tobruk', 'user_32', 'http://smi2.ru?utm_campaign=CM1'], [time(), 'VIEWS', 1, 28, '192.168.1.1', 'Gisborne', 'user_12', 'http://smi2.ru?utm_campaign=CM1'], [time(), 'VIEWS', 1, 26, '192.168.1.1', 'Moscow', 'user_43', 'http://smi2.ru?utm_campaign=CM3']], ['event_time', 'event_type', 'site_id', 'article_id', 'ip', 'city', 'user_uuid', 'referer']);
print_r($client->select('SELECT count() as count_rows FROM articles.events')->fetchOne());
// Для  WHERE IN - создаем файл InsertRow
$hand = fopen('/tmp/articles_list.csv', 'w');
foreach ([1237, 27, 1234] as $article_id) {
    fputcsv($hand, [$article_id]);
}
fclose($hand);
//
$whereIn = new \ClickHouseDB\WhereInFile();
$whereIn->attachFile('/tmp/articles_list.csv', 'namex', ['article_id' => 'Int32'], \ClickHouseDB\WhereInFile::FORMAT_CSV);
//
$sql = "\n    SELECT \n        article_id, \n        countIf(event_type = 'CLICKS') as count_clicks, \n        countIf(event_type = 'VIEWS') as count_views \n    FROM \n        articles.events\n    WHERE \n        article_id IN (SELECT article_id FROM namex)\n    GROUP BY \n        article_id\n    ORDER BY \n        count_views DESC\n";
$statement = $client->select($sql, [], $whereIn);
print_r($statement->rows());
Example #13
0
include_once __DIR__ . '/../include.php';
$config = ['host' => '192.168.1.20', 'port' => '8123', 'username' => 'default', 'password' => ''];
$db = new ClickHouseDB\Client($config);
$db->write("DROP TABLE IF EXISTS arrays_test");
$res = $db->write('
    CREATE TABLE IF NOT EXISTS arrays_test (
        s_key String,
        s_arr Array(UInt8)
    ) ENGINE = Memory
');
//------------------------------------------------------------------------------
echo "Insert\n";
$stat = $db->insert('arrays_test', [['HASH1', [11, 22, 33]], ['HASH1', [11, 22, 55]]], ['s_key', 's_arr']);
echo "Insert Done\n";
print_r($db->select('SELECT s_key, s_arr FROM arrays_test ARRAY JOIN s_arr')->rows());
$db->write("DROP TABLE IF EXISTS arrays_test_string");
$res = $db->write('
    CREATE TABLE IF NOT EXISTS arrays_test_string (
        s_key String,
        s_arr Array(String)
    ) ENGINE = Memory
');
echo "Insert\n";
$stat = $db->insert('arrays_test_string', [['HASH1', ["a", "dddd", "xxx"]], ['HASH1', ["b'\tx"]]], ['s_key', 's_arr']);
echo "Insert Done\n";
print_r($db->select('SELECT s_key, s_arr FROM arrays_test_string ARRAY JOIN s_arr')->rows());
echo "\ntestRFCCSVWrite>>>>\n";
$fileName = '/tmp/testRFCCSVWrite.CSV';
date_default_timezone_set('Europe/Moscow');
$db->write("DROP TABLE IF EXISTS testRFCCSVWrite");
Example #14
0
        v_00 Int32,
        v_55 Int32
    ) 
    ENGINE = SummingMergeTree(event_date, (site_id, url_hash, event_time, event_date), 8192)
');
echo 'Table EXISTS: ' . json_encode($db->showTables()) . PHP_EOL;
/*
Table EXISTS: [{"name": "summing_url_views"}]
*/
//------------------------------------------------------------------------------
echo "Insert\n";
$stat = $db->insert('summing_url_views', [[time(), 'HASH1', 2345, 22, 20, 2], [time(), 'HASH2', 2345, 12, 9, 3], [time(), 'HASH3', 5345, 33, 33, 0], [time(), 'HASH3', 5345, 55, 0, 55]], ['event_time', 'url_hash', 'site_id', 'views', 'v_00', 'v_55']);
echo "Insert Done\n";
//------------------------------------------------------------------------------
echo "Try select \n";
$st = $db->select('SELECT * FROM summing_url_views LIMIT 2');
echo "Count select rows:" . $st->count() . "\n";
echo "Count all rows:" . $st->countAll() . "\n";
echo "First row:\n";
print_r($st->fetchOne());
echo "extremes_min:\n";
print_r($st->extremesMin());
echo "totals:\n";
print_r($st->totals());
$st = $db->select('SELECT event_date,url_hash,sum(views),avg(views) FROM summing_url_views WHERE site_id<3333 GROUP BY event_date,url_hash WITH TOTALS');
echo "Count select rows:" . $st->count() . "\n";
/*
2
*/
echo "Count all rows:" . $st->countAll() . "\n";
/*
Example #15
0
$config = ['host' => 'x', 'port' => '8123', 'username' => 'x', 'password' => 'x'];
$db = new ClickHouseDB\Client($config);
try {
    $db->ping();
} catch (ClickHouseDB\QueryException $E) {
    echo "ERROR:" . $E->getMessage() . "\nOK\n";
}
// ------------------
$config = ['host' => '192.168.1.20', 'port' => '8123', 'username' => 'x', 'password' => 'x'];
$db = new ClickHouseDB\Client($config);
try {
    $db->ping();
} catch (ClickHouseDB\QueryException $E) {
    echo "ERROR:" . $E->getMessage() . "\nOK\n";
}
// ------------------
$config = ['host' => '192.168.1.20', 'port' => '8123', 'username' => 'default', 'password' => ''];
$db = new ClickHouseDB\Client($config);
try {
    $db->ping();
    echo "PING : OK!\n";
} catch (ClickHouseDB\QueryException $E) {
    echo "ERROR:" . $E->getMessage() . "\nOK\n";
}
try {
    $db->select("SELECT xxx as PPPP FROM ZZZZZ ")->rows();
} catch (ClickHouseDB\DatabaseException $E) {
    echo "ERROR : DatabaseException : " . $E->getMessage() . "\n";
    // Table default.ZZZZZ doesn't exist.
}
// ----------------------------
Example #16
0
<?php

include_once __DIR__ . '/../include.php';
include_once __DIR__ . '/lib_example.php';
include_once __DIR__ . '/../../_clickhouse_config_product.php';
$config = ['host' => '192.168.1.20', 'port' => '8123', 'username' => 'default', 'password' => ''];
$start_time = microtime(true);
$db = new ClickHouseDB\Client($config);
$db->database('aggr');
print_r($db->select('
    SELECT event_date, site_id, group, SUM(views) as views FROM aggr.summing_url_views
    WHERE event_date = today() AND site_id = 14776
    GROUP BY event_date, site_id, group
    ORDER BY views DESC
    LIMIT 3
')->rows());
$sql = '
    SELECT site_id, group, SUM(views) as views FROM aggr.summing_url_views
    WHERE 
       event_date = today() 
       AND ( 
          site_id IN (SELECT site_id FROM namex)
          OR 
          site_id IN (SELECT site_id FROM site_keys)
       )
    GROUP BY site_id, group
    ORDER BY views DESC
    LIMIT 5
';
// some file names to data
$file_name_data1 = "/tmp/temp_csv.txt";