コード例 #1
0
define('HASHMARK_CREATESAMPLES_TYPE', 'decimal');
define('HASHMARK_CREATESAMPLES_SCALARS', 1);
define('HASHMARK_CREATESAMPLES_COUNT', 10000);
define('HASHMARK_CREATESAMPLES_STARTDATE', '2009-01-01 00:00:00 UTC');
define('HASHMARK_CREATESAMPLES_ENDDATE', '2009-01-01 23:59:59 UTC');
$db = Hashmark::getModule('DbHelper')->openDb('unittest');
$core = Hashmark::getModule('Core', '', $db);
$partition = $core->getModule('Partition');
$rndSampleTime = 0;
$createScalarTime = 0;
$createSampleTime = 0;
$totalSampleCnt = 0;
$startDatetime = gmdate(HASHMARK_DATETIME_FORMAT);
for ($scalars = 0; $scalars < HASHMARK_CREATESAMPLES_SCALARS; $scalars++) {
    $start = microtime(true);
    $samples = hashmark_random_samples(HASHMARK_CREATESAMPLES_TYPE, HASHMARK_CREATESAMPLES_STARTDATE, HASHMARK_CREATESAMPLES_ENDDATE, HASHMARK_CREATESAMPLES_COUNT);
    $end = microtime(true);
    $rndSampleTime += $end - $start;
    $scalarFields = array('type' => HASHMARK_CREATESAMPLES_TYPE, 'name' => Hashmark_Util::randomSha1());
    $start = microtime(true);
    $scalarId = $core->createScalar($scalarFields);
    $end = microtime(true);
    $createScalarTime += $end - $start;
    $start = microtime(true);
    $end = microtime(true);
    $sampleCnt = count($samples);
    $start = microtime(true);
    foreach ($samples as $timeData => $value) {
        list($time) = explode('=', $timeData);
        $partition->createSample($scalarId, $value, $time);
    }
コード例 #2
0
$baseTableDef = $partition->getPartitionDefinition(HASHMARK_DUMP_RANDOMSAMPLES_TYPE);
$tableDef = preg_replace('/\\n|CREATE TABLE `samples_' . HASHMARK_DUMP_RANDOMSAMPLES_TYPE . '`/', '', $baseTableDef);
// We will be incrementing manually.
$info = $partition->getTableInfo('scalars');
$scalarId = $info['AUTO_INCREMENT'];
for ($scalars = 0; $scalars < HASHMARK_DUMP_RANDOMSAMPLES_SCALARS; $scalars++) {
    $scalarFields = array('type' => HASHMARK_DUMP_RANDOMSAMPLES_TYPE, 'name' => Hashmark_Util::randomSha1());
    $sql = 'INSERT IGNORE INTO `scalars` ' . '(`id`, `type`) ' . 'VALUES (' . $scalarId++ . ', \'' . HASHMARK_DUMP_RANDOMSAMPLES_TYPE . "');\n";
    file_put_contents(HASHMARK_DUMP_RANDOMSAMPLES_FILE, $sql);
    $scalarSampleCnt = array();
    $createdTables = array();
    // Chunk random samples into sets to avoid memory limit.
    $scalarSampleCnt = 0;
    while ($scalarSampleCnt < HASHMARK_DUMP_RANDOMSAMPLES_COUNT) {
        // Last parameter will sort $samples by date ascending.
        $samples = hashmark_random_samples(HASHMARK_DUMP_RANDOMSAMPLES_TYPE, HASHMARK_DUMP_RANDOMSAMPLES_STARTDATE, HASHMARK_DUMP_RANDOMSAMPLES_ENDDATE, min(HASHMARK_DUMP_RANDOMSAMPLES_RANDOM_SET_MAX, HASHMARK_DUMP_RANDOMSAMPLES_COUNT - $scalarSampleCnt), false, false, null, null, true);
        $scalarSampleCnt += count($samples);
        $buffer = '';
        $bufferSize = 0;
        foreach ($samples as $timeData => $value) {
            list($time) = explode('=', $timeData);
            $sampleDate = Hashmark_Util::toDatetime($time);
            if ('string' == HASHMARK_DUMP_RANDOMSAMPLES_TYPE) {
                $value = $partition->escape($value);
            }
            // Create partitions as needed based on sample date.
            $tableName = $partition->getIntervalTableName($scalarId, $sampleDate);
            if (!isset($createdTables[$tableName])) {
                $createdTables[$tableName] = 1;
                $buffer .= "CREATE TABLE IF NOT EXISTS `{$tableName}` {$tableDef} AUTO_INCREMENT=1;\n";
                $bufferSize++;