$igbinary_pack += $profiling[1]['diff']; $igbinary_unpack += $profiling[2]['diff']; $igbinary_size += strlen($pack); if ($unpack === $value || is_object($value) && $unpack == $value) { $igbinary_status = 'OK'; } } //msgpack $pack = null; $unpack = null; $t = new Benchmark_Timer(); $t->start(); for ($i = 0; $i < $loop; $i++) { $pack = msgpack_serialize($value); } $t->setMarker('msgpack_serialize'); for ($i = 0; $i < $loop; $i++) { $unpack = msgpack_unserialize($pack); } $t->stop(); //$t->display(); $profiling = $t->getProfiling(); unset($t); $msgpack_pack += $profiling[1]['diff']; $msgpack_unpack += $profiling[2]['diff']; $msgpack_size += strlen($pack); if ($unpack === $value || is_object($value) && $unpack == $value) { $msgpack_status = 'OK'; } } $serialize_pack /= $retry;
<?php require 'Benchmark/Timer.php'; $timer = new Benchmark_Timer(); $mysql = new mysqli("localhost", "testuser", "testpass", "test"); $points = array(1000, 10000, 100000, 500000, 1000000); foreach ($points as $val) { flush_mysql(); $startmark = $val . ' start'; $stopmark = $val . ' stop'; $timer->setMarker($startmark); cycle($val); $timer->setMarker($stopmark); print $val . ': ' . $timer->timeElapsed($startmark, $stopmark) . "\n"; } $mysql->close(); function flush_mysql() { global $mysql; $mysql->query("DROP TABLE IF EXISTS `php_insert_bm`"); $mysql->query("DROP TABLE IF EXISTS `php_insert_bm_coords`"); $mysql->query("CREATE TABLE `php_insert_bm_coords`(\n `id` int unsigned not null auto_increment,\n `x` int not null,\n `y` int not null,\n PRIMARY KEY (`id`)\n )"); $mysql->query("CREATE TABLE `php_insert_bm`(\n `id` int unsigned not null auto_increment,\n `text` varchar(255) not null,\n `count` int not null,\n `coords_id` int unsigned not null,\n PRIMARY KEY (`id`)\n )"); # empty init entries $mysql->query("INSERT php_insert_bm_coords(x,y) VALUES(0, 0)"); $mysql->query("INSERT php_insert_bm(text, count, coords_id) VALUES('', 0, " . $mysql->insert_id . ')'); } function cycle($c) { global $mysql; for ($i = 0; $i < $c; $i++) {
<title>ベンチマークを取得したい(1)</title> </head> <body> <div> <?php // PEAR::Benchmark_Timerをインクルードします require_once 'Benchmark/Timer.php'; // タイマーオブジェクトをnew演算子で作成する $timer = new Benchmark_Timer(); // 計測を開始する $timer->start(); // 計測対象(1)(sha1()関数を1000回実行) for ($i = 0; $i < 1000; $i++) { sha1($i); } // マーカーをセットする $timer->setMarker('sha1()関数を1000回位実行'); // b計測対象(2)(md()関数を1000回実行) for ($i = 0; $i < 1000; $i++) { md5($i); } $timer->setMarker('md5()関数を1000回実行'); // 計測を終了し、結果を表示する $timer->stop(); $timer->display(); ?> </div> </body> </html>
include_once dirname(__FILE__) . '/../code/PEG.php'; include_once 'Benchmark/Timer.php'; /** * メモ化の有無での処理時間の差を見るサンプル * ここではメモ化するのとしないのとでは著しく違いが出る文法規則を元にパーサを組み立てている */ $t = new Benchmark_Timer(); $str = '((((((((1))))))))'; $t->start(); // メモ化していないパーサ $a = PEG::ref($a_ref); $p = PEG::ref($p_ref); $a_ref = PEG::choice(PEG::seq($p, '+', $a), PEG::seq($p, '-', $a), $p); $p_ref = PEG::choice(PEG::seq('(', $a, ')'), '1'); $a->parse(PEG::context($str)); $t->setMarker('no memoize'); // メモ化しているパーサ $a = PEG::ref($a_ref); $p = PEG::ref($p_ref); $a_ref = PEG::memo(PEG::choice(PEG::seq($p, '+', $a), PEG::seq($p, '-', $a), $p)); $p_ref = PEG::memo(PEG::choice(PEG::seq('(', $a, ')'), '1')); $a->parse($c = PEG::context($str)); $t->setMarker('memoize'); $t->stop(); $t->display(); /* 結果 --------------------------------------------------------- marker time index ex time perct --------------------------------------------------------- Start 1242400475.10093900 - 0.00% ---------------------------------------------------------
} $db_type = $_SERVER['argv'][1]; // db select is contained with bmark_connect if (strlen($db_type) <= 0) { $db_type = 'drizzle'; } // phpinfo(); die(); // use software partition $timer = new Benchmark_Timer(); $timer->start(); $login = '******'; $dao = new Dao($db_type, 'db.yaml'); $dao->connect(); $dao->find('users', 'login', $login, '='); $dao->close(); $timer->setMarker('Test_Code_Partition'); echo "Elapsed time between Start and Test_Code_Partition: " . $timer->timeElapsed('Start', 'Test_Code_Partition') . "\n"; // use backend partition $dao2 = new Dao($db_type, 'db.yaml'); $dao2->connect(); $dao2->find('users', 'login', $login, '=', 'mysql'); $dao2->close(); $timer->setMarker('DB_Partition'); echo "Elapsed time between Test_Code_Partition and DB_Partition: " . $timer->timeElapsed('Test_Code_Partition', 'DB_Partition') . "\n"; // use no partition $dao3 = new Dao($db_type, 'db.yaml'); $dao3->connect(); $dao3->find('users', 'login', $login, '=', 'nopart'); $dao3->close(); $timer->setMarker('No_Partition'); echo "Elapsed time between DB_Partition and No_Partition: " . $timer->timeElapsed('DB_Partition', 'No_Partition') . "\n";
if (!isset($files_path)) { $files_path = ''; } if (!isset($server_encoding)) { $server_encoding = ''; } if (!isset($browser_encoding)) { $browser_encoding = ''; } include $files_path . 'date_time.php'; //--------------------------------------- if (isset($use_benchmark) && $use_benchmark) { require_once $_SERVER['DOCUMENT_ROOT'] . '/' . $root_folder . '_modules/benchmark/library/Timer.php'; $timer = new Benchmark_Timer(); $timer->start(); $timer->setMarker('Mark1'); } //--------------------------------------- // if (!$site_blocked) { include $files_path . 'week.php'; } $metaKeywords = ''; $pg_author = ''; $pg_description = ''; $q_val = 'select pu.`description`,pu.`meta_keywords`, pu.`title`,u.fio_short as author from `pg_uploads` pu left join users u on u.id=pu.user_id_insert where pu.`name` like "' . $curpage . '"'; $pg_stat = getRowSqlVar($q_val); if (isset($pg_stat[0])) { $pg_stat = $pg_stat[0];
<?php declare (ticks=1); if (PHP_SAPI != 'cli') { die('Hey, CLI only!'); } define('TEST_ITERATIONS', 1000); define('TEST_CREATIVES', 10); define('TEST_ZONES', 10); require "Benchmark/Timer.php"; require 'testCases/BucketDB.php'; $t = new Benchmark_Timer(); $t->start(); $aTests = array(25, 50, 75, 100); $t->setMarker('Script init'); foreach ($aTests as $concurrency) { $oTest = bucketDB::factory(array('type' => 'MySQL', 'host' => 'localhost', 'user' => 'root', 'password' => 'password', 'dbname' => 'test_bucket', 'engine' => 'MyISAM')); test_update($oTest, $concurrency, $t, 'MyISAM'); $oTest = bucketDB::factory(array('type' => 'MySQL', 'host' => 'localhost', 'user' => 'root', 'password' => '', 'dbname' => 'test_bucket', 'engine' => 'MyISAM', 'pkIndexType' => 'USING HASH')); test_update($oTest, $concurrency, $t, 'MyISAMHashPk'); $oTest = bucketDB::factory(array('type' => 'MySQL', 'host' => 'localhost', 'user' => 'root', 'password' => '', 'dbname' => 'test_bucket', 'engine' => 'InnoDB')); test_update($oTest, $concurrency, $t, 'InnoDB'); $oTest = bucketDB::factory(array('type' => 'MySQL', 'host' => 'localhost', 'user' => 'root', 'password' => '', 'dbname' => 'test_bucket', 'engine' => 'InnoDB', 'pkIndexType' => 'USING HASH')); test_update($oTest, $concurrency, $t, 'InnoDBHashPk'); $oTest = bucketDB::factory(array('type' => 'MySQL', 'host' => 'localhost', 'user' => 'root', 'password' => '', 'dbname' => 'test_bucket', 'engine' => 'MEMORY')); test_update($oTest, $concurrency, $t, 'MEMORY'); $oTest = bucketDB::factory(array('type' => 'MySQL', 'host' => 'localhost', 'user' => 'root', 'password' => 'password', 'dbname' => 'test_bucket', 'engine' => 'MEMORY', 'additionalIndexes' => ', INDEX USING BTREE (date_time)')); //Test using additional B-TREE index on date_time - slower than MEMORY //test_update($oTest, $concurrency, $t, 'MEMBTR'); $oTest = bucketDB::factory(array('type' => 'MySQL', 'host' => 'localhost', 'user' => 'root', 'password' => 'password', 'dbname' => 'test_bucket', 'engine' => 'MEMORY', 'pkIndexType' => 'USING BTREE')); //Test using Primay Key as B-TREE index
function viewUpload() { $form = new SessionUploadForm(); $view = Core_View::factory('sessionsfileupload'); $view->UploadStatusMsg = ""; $view->UploadStatus = "Error"; if ($form->validate()) { $timer = new Benchmark_Timer(); $timer->start(); $upload = $form->getSubmitValue('upload'); $timer->setMarker('Decode Sessions - Start'); exec('/usr/local/bin/fitdecode -s ' . $upload['tmp_name'], $xml_session); $xml_session = implode("\n", $xml_session); $sessions = parseSessions($xml_session); $timer->setMarker('Decode Sessions - End'); /* There should only be one session */ if (is_array($sessions)) { $session = $sessions[0]; unset($sessions); } $db = Zend_Registry::get('db'); $db->beginTransaction(); try { $api = new Module_Sessions_API(); /* Insert the session data into the database */ $api->createSessionFull($session->start_time, 'E1', 'Untitled', $session->total_timer_time, $session->total_distance, $session->total_calories, $session->avg_heart_rate, $session->max_heart_rate, $session->avg_speed, $session->max_speed, $session->total_ascent, $session->total_descent, ''); /* Find the seconds since epoch so we can do simple maths */ $ftime = strptime($session->start_time, '%FT%T%z'); $session_epoch = mktime($ftime['tm_hour'], $ftime['tm_min'], $ftime['tm_sec'], 1, $ftime['tm_yday'] + 1, $ftime['tm_year'] + 1900); $session_timestamp = $session->start_time; unset($session); unset($sessions); $timer->setMarker('Decode Records - Start'); exec('/usr/local/bin/fitdecode -r ' . $upload['tmp_name'], $xml_records); $xml_records = implode("\n", $xml_records); $records_input = parseRecords($xml_records, $session_epoch); $timer->setMarker('Decode Records - End'); if (is_array($records_input)) { $record_prev = $records_input[0]; } /* Get the array of records, removing duplicates */ $records = array(); foreach ($records_input as $record) { if (!isset($record_last) || $record_last->interval != $record->interval) { $records[] = $record; } $record_last = $record; } unset($records_input); unset($record_last); $UserAPI = Module_UserManagement_API::getInstance(); $user = $UserAPI->getUser(); /* Add the matching data points */ foreach ($records as $record) { /* Skip duplicates, they will cause issues in graphs */ if (!isset($record->power)) { $record->power = $api->getPower($record->gradient, $record->temperature, $record->altitude, $record->speed, $record->speed - $record_prev->speed, $record->interval - $record_prev->interval, $user['rider_weight'], $user['bike_weight']); } $record_prev = $record; } unset($user); unset($UserAPI); $timer->setMarker('Record insertion - start'); $api->insertAllSessionData($session_timestamp, $records); /* Insert all the data */ $timer->setMarker('Record insertion - end'); /* Calculate the climbs */ $climbs = $api->getClimbCategories(); $timer->setMarker('Climb - Start'); $min_climb = $climbs[0]; /* 500m with an average gradient of more than 3% (cat 5)*/ /* Find the points that have a distance of 500m */ $window_distance = 0; $window_altitude = 0; $cat = -1; $climb_num = 1; $num_records = count($records); $num_climbs = count($climbs); for ($front = 0, $back = 0; $front < $num_records; $front++) { $window_distance += $records[$front]->delta_distance * 1000; $window_altitude += $records[$front]->delta_altitude; if ($window_distance > $min_climb['min_distance']) { $window_gradient = $window_altitude / $window_distance * 100; /* Check if we have found the start of a climb */ if ($cat == -1 && $window_gradient >= $climbs[$cat + 1]['min_gradient']) { $cat++; /* Go through and find the minimum height */ $min = $back; for ($i = $back; $i < $front; $i++) { if ($records[$i]->altitude <= $records[$min]->altitude) { $min = $i; } } $climb['bottom'] = $records[$min]->interval; $climb['min_altitude'] = $records[$min]->altitude; } /* Check if we have finished the climb */ if ($cat != -1 && $window_gradient < $climbs[$cat]['min_gradient']) { /* Need to go back and find the maximum altitude */ $max = $back; for ($i = $back; $i < $front; $i++) { if ($records[$i]->altitude > $records[$max]->altitude) { $max = $i; } } $climb['top'] = $records[$max]->interval; $climb['max_altitude'] = $records[$max]->altitude; /* Get the max gradient */ $climb['gradient_max'] = $records[$min]->gradient; for ($i = $min; $i <= $max; $i++) { if ($climb['gradient_max'] < $records[$i]->gradient) { $climb['gradient_max'] = $records[$i]->gradient; } } /* Tally the totals */ $climb['total_climbed'] = 0; for ($i = $min + 1; $i <= $max; $i++) { $climb['total_climbed'] += $records[$i]->delta_altitude; } $climb['total_distance'] = round($records[$max]->distance - $records[$min]->distance, 2); $climb['gradient_avg'] = round($climb['total_climbed'] / ($climb['total_distance'] * 1000) * 100, 2); /* Find the category of the climb */ $cat = -1; while ($cat + 1 < $num_climbs && $climb['gradient_avg'] >= $climbs[$cat + 1]['min_gradient'] && $climb['total_distance'] * 1000 >= $climbs[$cat + 1]['min_distance'] && $climb['total_climbed'] >= $climbs[$cat + 1]['min_height']) { $cat++; } $climb['cat'] = $cat; if ($cat != -1) { /* Store it into the database */ $api->insertClimb($session_timestamp, $climb_num++, $climb['bottom'], $climb['top'], $climb['gradient_avg'], $climb['gradient_max'], $climb['total_distance'], $climb['total_climbed'], $climb['min_altitude'], $climb['max_altitude']); /* Start search for the next climb */ $front = $max; $back = $max; $window_distance = 0; $window_altitude = 0; } else { /* It was a false climb, either not steep enough, * too short, and the window just masked this * Keep searching for the next climb */ } $cat = -1; } /* Move the back of the window up */ while ($window_distance > $min_climb['min_distance'] && $back < $num_records) { $window_distance -= $records[$back]->delta_distance * 1000; $window_altitude -= $records[$back]->delta_altitude; $back++; } } } $timer->setMarker('Climb - End'); /* * Bikes * userid * name * description * type, TT or Road * weight * picture? * Assign a bike to an exercise session at creation time? */ unset($records); $timer->setMarker('Laps - Start'); exec('/usr/local/bin/fitdecode -l ' . $upload['tmp_name'], $xml_laps); $xml_laps = implode("\n", $xml_laps); $laps = parseLaps($xml_laps); $timer->setMarker('Laps - End'); $lap_num = 1; foreach ($laps as $lap) { $ftime = strptime($lap->start_time, '%FT%T%z'); $start_epoch = mktime($ftime['tm_hour'], $ftime['tm_min'], $ftime['tm_sec'], 1, $ftime['tm_yday'] + 1, $ftime['tm_year'] + 1900); $lap_start = $start_epoch - $session_epoch; $api->insertLap($session_timestamp, $lap_num, $lap_start, $lap->start_position_lat, $lap->start_position_long, $lap->total_timer_time, $lap->total_elapsed_time, $lap->total_calories, $lap->avg_heart_rate, $lap->max_heart_rate, $lap->avg_speed, $lap->max_speed, $lap->total_ascent, $lap->total_descent, $lap->total_distance); $lap_num++; } //$timer->display(); $db->commit(); $plans = Module_Plans_API::getInstance(); $view->planned = $plans->getClosestPlan($session_timestamp); $view->session_timestamp = $session_timestamp; $view->UploadStatusMsg = "Is this session the planned exercise session on at ere"; $view->UploadStatus = "Success"; } catch (Exception $e) { $db->rollback(); $view->UploadStatusMsg = "Failed to upload"; $view->UploadStatus = "Error"; echo $e->getMessage(); } $timer->display(); } $view->addForm($form); $view->subTemplate = 'genericForm.tpl'; echo $view->render(); }
<?php require_once 'Benchmark/Timer.php'; $timer = new Benchmark_Timer(true); $timer->start(); // some setup code here $timer->setMarker('setup'); // some more code executed here $timer->setMarker('middle'); // even yet still more code here $timer->setmarker('done'); // and a last bit of code here $timer->stop(); $timer->display();
function parseRecords($xml, $session_epoch) { $records = array(); $timer = new Benchmark_Timer(); $timer->start(); $timer->setMarker('Parse XML to tags - start'); /* Parse the XML into tags */ $parser = xml_parser_create(); xml_parser_set_option($parser, XML_OPTION_CASE_FOLDING, 0); xml_parser_set_option($parser, XML_OPTION_SKIP_WHITE, 1); xml_parse_into_struct($parser, $xml, $values, $tags); xml_parser_free($parser); // loop through the structures $timer->setMarker('tags to arrays - start'); foreach ($tags as $key => $value) { if ($key == "record") { $molranges = $value; // each contiguous pair of array entries are the // lower and upper range for each molecule definition for ($i = 0; $i < count($molranges); $i += 2) { $offset = $molranges[$i] + 1; $len = $molranges[$i + 1] - $offset; $records[] = parseRecord($values, $offset, $len); } } else { continue; } } $timer->setMarker('tags to array - done'); $i = 0; $timer->setMarker('gradient calcs - start'); /* Gradient calc constants */ $NUM_GRADIENT_SAMPES = 11; $LOW_OFFSET = floor($NUM_GRADIENT_SAMPES / 2); $HIGH_OFFSET = floor($NUM_GRADIENT_SAMPES / 2); /* Create the window function */ /* Tukey window */ $alpha = 0.5; $window = array(); for ($i = 0; $i < $NUM_GRADIENT_SAMPES; $i++) { if ($i <= $alpha * $NUM_GRADIENT_SAMPES / 2) { $window[$i] = 0.5 * (1 + cos(M_PI * (2 * $i / ($alpha * $NUM_GRADIENT_SAMPES) - 1))); } else { if ($i <= $NUM_GRADIENT_SAMPES * (1 - $alpha / 2)) { $window[$i] = 1.0; } else { $window[$i] = 0.5 * (1 + cos(M_PI * (2 * $i / ($alpha * $NUM_GRADIENT_SAMPES) - 2 / $alpha + 1))); } } } $i = 0; $num_records = count($records); foreach ($records as $record) { /* Convert the timestamp into an interval */ $ftime = strptime($record->timestamp, '%FT%T%z'); $record_epoch = mktime($ftime['tm_hour'], $ftime['tm_min'], $ftime['tm_sec'], 1, $ftime['tm_yday'] + 1, $ftime['tm_year'] + 1900); $record->interval = $record_epoch - $session_epoch; if ($i > 0) { $record->delta_distance = $record->distance - $records[$i - 1]->distance; $record->delta_altitude = round($record->altitude - $records[$i - 1]->altitude, 2); } else { $record->delta_distance = 0; $record->delta_altitude = 0; } /* Calculate the average gradient */ $total_rise = 0; $total_distance = 0; unset($first_distance); $last_distance = 0; for ($g = $i - $LOW_OFFSET, $j = 0; $g <= $i + $HIGH_OFFSET; $g++, $j++) { if ($g >= 0 && $g < $num_records) { if (!isset($first_distance)) { $first_distance = $records[$g]->distance; } $total_rise += ($records[$g]->altitude - $record->altitude) * $window[$j]; $last_distance = $records[$g]->distance; } } $avg_rise = $total_rise / $NUM_GRADIENT_SAMPES; $avg_distance = ($last_distance - $first_distance) / $NUM_GRADIENT_SAMPES * 1000; if ($avg_distance) { $record->gradient = round($avg_rise / $avg_distance * 100, 1); } else { $record->gradient = 0; } /* TODO: Calculate the power */ $i++; } $timer->setMarker('gradient calcs - done'); //$timer->display(); return $records; }
<?php //testA require_once "Benchmark/Timer.php"; require_once "./sampleData.php"; $timer = new Benchmark_Timer(); $timer->start(); //$r = array_search("kFJeaj08OKSMc7jD", $array); //var_dump($array[$r]); $_flip = array_flip($array); $timer->setMarker('function END'); var_dump($_flip["kFJeaj08OKSMc7jD"]); $timer->stop(); $timer->display();
<li>5</li> <li>6</li> <li>7</li> <li>8</li> <li>9</li> </ul> </root> EOS; $css = <<<EOS h1:not(.foo) EOS; $timer = new Benchmark_Timer(); //$nb_iterations = 1; $timer->start(); $source = new Source\String($css); $timer->setMarker("Source init"); $lexer = new Css\Lexer(); $lexer->setSource($source); $timer->setMarker("Lexer init"); $token = $lexer->nextToken(); while ($token->type !== Css\Lexer::T_EOF) { echo $lexer->getLiteral($token) . PHP_EOL; $token = $lexer->nextToken(); } echo $lexer->getLiteral($token) . PHP_EOL; $timer->setMarker("Tokenization end"); $parser = new Css\Parser($lexer); $selector = $parser->parseSelector(); $timer->setMarker("Parsing end"); //var_dump($selector); //echo $selector->toXpath() . PHP_EOL;
$xhprof_on = false; if (defined('_PS_DEBUG')) { $xhprof_on = true; if (extension_loaded('xhprof')) { include_once LIB_ROOT . 'include/xhprof/utils/xhprof_lib.php'; include_once LIB_ROOT . 'include/xhprof/utils/xhprof_runs.php'; xhprof_enable(XHPROF_FLAGS_CPU + XHPROF_FLAGS_MEMORY); } } defined('LIB_ROOT') || define('LIB_ROOT', CONF_ROOT . '/../library/'); if (defined('ENABLE_BENCHMARK') && TRUE === ENABLE_BENCHMARK) { // for Benchmark require_once LIB_ROOT . 'Benchmark/Timer.php'; $g_timer = new Benchmark_Timer(); $g_timer->start(); $g_timer->setMarker('web.init: start'); } // Global Loader include_once LIB_ROOT . 'class/Loader.php'; isset($g_timer) && $g_timer->setMarker('lib.loader loaded'); if (PHP_SAPI === 'cli') { // command line isset($argv) || ($argv = $_SERVER['argv']); } elseif (isset($_SERVER['HTTP_HOST'])) { // http mod, cgi, cgi-fcgi if (headers_sent()) { exit('headers already sent'); } $format = 'html'; if (isset($_GET['format'])) { $format = $_GET['format'];
<?php require_once 'Benchmark/Timer.php'; require_once 'autoload.php'; $timer = new Benchmark_Timer(); $nb_iterations = 1000000; $timer->start(); $str = "Sàéœpïô¬"; $pad = "ð"; for ($i = 0; $i < $nb_iterations; $i++) { $result = ju1ius\Text\MultiByte::str_pad($str, 60, $pad, STR_PAD_LEFT); } var_dump($result); $timer->setMarker('mb_strpad'); echo $timer->getOutput();
h1{ color: red; rotation: 77\$\$ } p { color:green; color{;color:maroon} color:blue; color:yellow; border:none } p{color:red} p{ foo:bar; bar:calc(2 + 5 * (3-6)); baz:boo } */ /* Discards the rule til end of stylesheet, since no matching bracket can be found */ /*p{ foo:bar; foo{;bar("baz)};"; baz:boo } h1{}*/ EOS; $timer = new Benchmark_Timer(); //$nb_iterations = 1; $timer->start(); $source = new Source\String($css); $timer->setMarker(sprintf("Source init: %s", $source->getEncoding())); $lexer = new Css\Lexer(); $lexer->setSource($source); $timer->setMarker("Lexer init"); //$token = $lexer->nextToken(); //while ($token->type !== Css\Lexer::T_EOF) { //echo $lexer->getLiteral($token) . PHP_EOL; //$token = $lexer->nextToken(); //} //echo $lexer->getLiteral($token) . PHP_EOL; //$timer->setMarker("Tokenization end"); $parser = new Css\Parser($lexer); $parser->setStrict(false); $stylesheet = $parser->parseStyleSheet(); $timer->setMarker("Parsing end"); foreach ($parser->errors as $error) {
<?php require_once 'Benchmark/Timer.php'; $timer = new Benchmark_Timer(); $timer->start(); $id = 9919; $conn = pg_connect("user=juno dbname=sandbox"); $result = pg_query($conn, "SELECT * FROM people WHERE id = " . $id); $person = pg_fetch_array($result); $timer->setMarker('fetched'); pg_close($conn); print_r($person); $timer->stop(); $timer->display();
ob_start('ob_gzhandler'); // Create an output buffer to capture console output, separately from the // gzip handler. ob_start(); // start benchmarking $timer = new Benchmark_Timer(); $timer->start(); // load the client $client = new NDB_Client(); $client->initialize(); // require additional libraries $TestName = isset($_REQUEST['test_name']) ? $_REQUEST['test_name'] : 'dashboard'; $subtest = isset($_REQUEST['subtest']) ? $_REQUEST['subtest'] : ''; // make local instances of objects $config =& NDB_Config::singleton(); $timer->setMarker('Loaded client'); //-------------------------------------------------- /** * Extracts a parameter from request in a safe way, * and sets the $tpl_data to said request. If request does * not contain parameter, will set the smarty variable to * empty * * @param string $param The $_REQUEST parameter to convert to a smarty variable * * @return none, side effect of modifying $tpl_data */ function tplFromRequest($param) { global $tpl_data; if (isset($_REQUEST[$param])) {
//Look likes if I send a JSON object it gets automatically converted in an array. if (!isset($value['f']) || !isset($value['l']) || !isset($value['v'])) { error('missing parameters'); } } elseif (isset($_REQUEST['f']) && $_REQUEST['f'] == "insert_user_XX") { $col = 'login'; $value = $_REQUEST['v']; //Look likes if I send a JSON object it gets automatically converted in an array. if (!isset($value['f']) || !isset($value['l']) || !isset($value['v']) || !isset($value['u'])) { error('missing parameters'); } $u = $value['u']; } else { error('unrecognized command'); } $timer->setMarker('setup'); //print "u: $u, ids_text: $ids_text<br>"; if ($col == 'login') { if ($realUser == $u) { //Do not overwrite name in mock mode! $good = User::SetDataFromFB($u, $value['f'], $value['l'], $value['v']); } } else { if (!User::SetColumn($u, $col, $value)) { error('Problem on the server, this field cannot be updated. Please retry later.'); } } $timer->setMarker('query'); InsertBenchmarkDB($timer); //Save the data on DB echo '{"result": "OK"}';
$dsn = $argv[1]; } $testSuite = new PHPUnit2_Framework_TestSuite($dsn); // ---------------------------------------------------------------------------- // TESTS ---------------------------------------------------------------------- // ---------------------------------------------------------------------------- $timer = new Benchmark_Timer(); $timer->start(); // (1) Add Generic (non-Driver) Tests // ---------------------------------- require_once 'creole/CreoleTest.php'; $testSuite->addTestSuite(new ReflectionClass('CreoleTest')); require_once 'creole/util/sql/SQLStatementExtractorTest.php'; $testSuite->addTestSuite(new ReflectionClass('SQLStatementExtractorTest')); // (2) Driver Tests // ---------------- include_once 'creole/drivers/DriverTestManager.php'; print "--------------------------------------\n"; print "| Running driver tests |\n"; print "--------------------------------------\n"; $timer->setMarker("start driver tests"); print "DSN: " . $dsn . "\n\n"; print "[It is safe to ignore any errors related to dropping nonexistant tables.]\n\n"; try { DriverTestManager::addSuite($testSuite, $dsn); PHPUnit2_TextUI_TestRunner::run($testSuite); } catch (Exception $e) { print "Could not add suite for " . $dsn . ": " . $e->getMessage(); } $timer->stop(); $timer->display();
$s_mid_table = padNumber($mid_table, 2); $sql = "drop table if exists users_no_partition"; $result = bmark_query($sql, $dbh); # print $sql . "\n"; $sql = "CREATE TABLE users_no_partition ( id INT NOT NULL primary key AUTO_INCREMENT , login varchar(255), email varchar(255), im varchar(255), twitter varchar(255), pass varchar(255), datejoined datetime) ENGINE=InnoDB DEFAULT CHARSET=utf8"; $result = bmark_query($sql, $dbh); # print $sql . "\n"; $sql = 'create index login_index on users_no_partition (login)'; $result = bmark_query($sql, $dbh); # print $sql . "\n"; for ($i = 0; $i < $max_rows; $i++) { $sql = "insert into users_no_partition (login, pass) values (\"" . md5(rand(1, 5000) . microtime()) . "user{$i}\", \"" . md5("pass{$i}") . "\")"; $result = bmark_query($sql, $dbh); # print $sql . "\n"; } $timer->setMarker('No_Partition'); echo "Elapsed time between Start and Test_Code_Partition: " . $timer->timeElapsed('Start', 'No_Partition') . "\n"; $prefix = "users_"; $k = 1; for ($i = 0; $i < $parts; $i++) { $table = $prefix . padNumber($i, 2); $sql = "drop table if exists {$table}"; # print "table: $table\n"; # print $sql . "\n"; $result = bmark_query($sql, $dbh); $sql = "CREATE TABLE {$table} ( id INT NOT NULL primary key AUTO_INCREMENT , login varchar(255), email varchar(255), im varchar(255), twitter varchar(255), pass varchar(255), datejoined datetime) ENGINE=InnoDB DEFAULT CHARSET=utf8"; $result = bmark_query($sql, $dbh); $sql = "create index login_index on {$table} (login)"; $result = bmark_query($sql, $dbh); for ($j = 0; $j < $perpart; $j++) { $sql = "insert into {$table} (id, login, pass) values ({$k}, \"" . md5(rand(1, 5000) . microtime()) . "user{$j}\", \"" . md5('pass$j') . "\")";
<?php /* This test script requires PEAR/Benchmark to measure the script runtime. */ if (function_exists('xdebug_start_code_coverage')) { xdebug_start_code_coverage(); } require_once 'Benchmark/Timer.php'; $timer = new Benchmark_Timer(); $timer->start(); require '../xajax_core/xajax.inc.php'; //require( '../xajax_core/xajaxAIO.inc.php' ); $timer->setMarker('xajax included'); // -- testing session serialization for xajax object //session_start(); // //unset($_SESSION['xjxcore']); // //$xajax = null; // //if (false == isset($_SESSION['xjxcore'])) //{ // $xajax = new xajax(); // // $_SESSION['xjxcore'] = $xajax; //} //else //{ // $xajax = $_SESSION['xjxcore'];
use ju1ius\Css; $timer = new Benchmark_Timer(); $timer->start(); $dir = dir(__DIR__ . '/../files/full/'); $files = array(); while ($file = $dir->read()) { if ($file === '.' || $file === '..') { continue; } $path = $dir->path . '/' . $file; $size = filesize($path); echo "Loading file {$file} ({$size} bytes) \n"; $files[$size] = file_get_contents($path); } ksort($files, SORT_NUMERIC); $timer->setMarker("Files loaded"); foreach ($files as $size => $string) { $before = memory_get_usage(); $lines = mb_split('\\r\\n|\\n', $string); $after = memory_get_usage(); $size /= 1024; printf("Split %s lines, filesize: %s\n", count($lines), $size); printf("Memory allocated: %s Ko\n", ($after - $before) / 1024); $timer->setMarker("Splitting " . count($lines) . " lines - {$size} Ko total"); unset($lines); } /* echo "Substr test\n"; // substr test foreach ($files as $size => $string) { $result = b_substr($string);
function sparql($I) { ##Parse the query and build the dataset #global $timer; if (is_file(S3DB_SERVER_ROOT . '/pearlib/Benchmark/Timer.php')) { require_once S3DB_SERVER_ROOT . '/pearlib/Benchmark/Timer.php'; $timer = new Benchmark_Timer(); $timer->start(); } extract($I); ##To use SPARQL with ARC library, we will need it to work with a remote endpoint. That means that we do not want to configure ARC as a datastore, but rather to retrieve the data from s3db deployments, convert it to RDF and then use ARC to run the query on it /* ARC2 static class inclusion */ ini_set("include_path", S3DB_SERVER_ROOT . "/pearlib/arc" . PATH_SEPARATOR . ini_get("include_path")); include_once "ARC2.php"; $s3ql['url'] = $in['url'] != '' ? $in['url'] : $default_uri; $s3ql['key'] = $in['key'] != '' ? $in['key'] : get_user_key($user_id, $db); $q = $in['query']; list($query, $triples, $prefixes) = parse_sparql_query($q, $s3ql); $bq .= "PREFIX " . implode("\n PREFIX ", $query['prefix']) . "\n "; $bq .= "SELECT " . $query['select'][0] . "\n "; $bq .= "FROM" . implode(" FROM ", $query['from']) . "\n "; $bq .= "WHERE " . $query['where'][0] . "\n "; preg_match_all('(\\?[A-Za-z0-9]+) ', $bq, $vars); if ($vars[0]) { $vars = array_unique($vars[0]); $sparql_vars = implode(" ", $vars); } if ($query['select'][0] != "" && $query['select'][0] != "*") { $outputCols = explode(" ", trim($query['select'][0])); $outputCols = array_filter($outputCols); $outputCols = array_intersect($vars, $outputCols); } $sparql = ereg_replace("FROM(.*)WHERE", "WHERE", $bq); #lets preprocess the order by which the must be queries must be performed to optimize speedness list($iterations, $scrambled) = iterationOrder($triples, $prefixes, true); ##$rdf_results will contain the totality of triples retrieved from s3db; ##Start a rdf-api model $iterations = array_values($iterations); $rdf = S3DB_URI_BASE . '/s3dbcore/model.n3'; #base s3db rdf model $filename = md5($rdf); $file_place = $GLOBALS['uploads'] . '/'; #$queryModel = rdf2php($rdf); #$data = $queryModel->sparqlQuery($sparql); #echo '<pre>';print_r($data);exit; if ($timer) { $timer->setMarker('Core model read into results'); } $rdf_results = array(); $performedQueries = array(); $r = 0; foreach ($iterations as $it => $triples2query) { $S3QL = array(); $S3QLfinal = array(); foreach ($triples2query as $i => $tripleInd) { $tripleString = $tripleInd; list($subject, $predicate, $object) = explode(' ', trim($tripleString)); $subject = ereg_replace('^<|>$', '', $subject); $predicate = ereg_replace('^<|>$', '', $predicate); $object = ereg_replace('^<|>$', '', $object); $triple = compact('subject', 'predicate', 'object'); #sparql triple is used to calculate the values of the variables in the triple #$sparql_triple = $sparql_prefixes_default.' SELECT * WHERE { '.ltrim($tripleString).' . }'; #now lets interpret the triple to explore the space of possible queries on S3QL $pack = compact('triple', 's3ql', 'user_id', 'db', 'prefixes', 'varType', 'discoveredData', 'it', 'varTypeWhere', 'collected_data', 'performedQueries'); $sp = sparql_navigator($pack); extract($sp); # if($timer) $timer->setMarker('Built query '.$i); ##Remove queries that were already performed if ($S3QL[0]) { foreach ($S3QL as $s => $q) { $S3QLfinal[] = $q; $queried_elements[] = $element[$s]; } $localQueries[$tripleString] = $localQueries[0]; $remoteQueries[$tripleString] = $remoteQueries[0]; $localQueries = array_filter($localQueries); $remoteQueries = array_filter($remoteQueries); } } $S3QL = $S3QLfinal; ##Remove repeated queries $S3QL = array_unique($S3QL); #if only the s3ql is requested, we can return it now if ($in['output'] == 'S3QL') { foreach ($localQueries as $sparqlVersion => $s3qlVersion) { $Q[]['S3QL'] = S3QLQuery($s3qlVersion); } foreach ($remoteQueries as $rq) { $Q[]['S3QL'] = $rq; } $root = 's3ql'; #root is just the word that xml should parse as the root for each entry $data = $Q; $cols = array('S3QL'); $format = $in['format'] == '' ? 'html' : $in['format']; $z = compact('data', 'cols', 'format', 'root'); $out = outputFormat($z); return array(true, $out); } #If paralel library is activated, use it for the data. Otherwise use the custom version #$query_answers_file = 'sparql_query_ans'.rand(100,200); $a=fopen($query_answers_file, 'a'); if (!empty($S3QL)) { if (extension_loaded('curl') && $goparallel) { // Create cURL handlers if ($timer) { $timer->setMarker('Starting queries from group ' . $it); } foreach ($S3QL as $k => $url) { $qURL = $url; $ch[$k] = curl_init(); // Set options curl_setopt($ch[$k], CURLOPT_URL, $qURL . '&format=php'); curl_setopt($ch[$k], CURLOPT_RETURNTRANSFER, 1); } $mh = curl_multi_init(); foreach ($S3QL as $k => $url) { curl_multi_add_handle($mh, $ch[$k]); } $running = null; do { curl_multi_exec($mh, $running); if ($timer) { $timer->setMarker('Query ' . $k . ' of group ' . $it . ' executed'); } } while ($running > 0); foreach ($S3QL as $k => $url) { $answer[$k] = curl_multi_getcontent($ch[$k]); if (!empty($answer[$k])) { #@fwrite($a, $answer[$k]); ##This is what takes the longest after the query, can it be replaced? $ans = unserialize($answer[$k]); $letter = $queried_elements[$r][0]; if (empty($ans)) { ##is this query part is not optional, then the result will be null ##TO BE DEVELOPED SOON } else { $rdf_results[$letter][] = $ans; } $r++; ##Add the triples to already existing triples #Line up the answer with the model if ($timer) { $timer->setMarker('Query ' . $it . '=>' . $k . ' converted to php '); } } } curl_multi_close($mh); ####Time count #$time_end = microtime(true); #$time = $time_end - $time_start; #echo "Query took ".$time." seconds\n";exit; ### } else { #Now solve the remaining triples with the constants found in this one if (is_array($localQueries) && !empty($localQueries)) { foreach ($localQueries as $sparql_triple => $s3ql) { $s3ql = array_filter(array_diff_key($s3ql, array('url' => ''))); $answer = localQ($s3ql); if (!empty($answer)) { $rdfanswer = rdf2php($answer); #Line up the answer with the model $queryModel->addModel($rdfanswer); #Now perform the query on the small model to find a constant for the remaining queries #list($data,$discovered, $discoveredData,$queryModel) = executeQuery($queryModel,$sparql_triple,$discovered,$format); } } } if (is_array($remoteQueries) && !empty($remoteQueries)) { foreach ($remoteQueries as $remoteQuery) { $answer = remoteQ($remoteQuery); if (!empty($answer)) { $rdfanswer = rdf2php($answer); #Line up the answer with the model $queryModel->addModel($rdfanswer); #Now perform the query on the small model to find a constant for the remaining queries #list($data,$discovered, $discoveredData,$queryModel) = executeQuery($queryModel,$sparql_triple,$discovered,$format); } } } } } } ##Get the data from the file ##Now, add the dictionary data if ($complete) { include_once S3DB_SERVER_ROOT . '/s3dbcore/dictionary.php'; $s3qlN = compact('user_id', 'db'); $s3qlN['from'] = 'link'; $s3qlN['format'] = 'php'; $links = query_user_dictionaries($s3qlN, $db, $user_id); $links = unserialize($links); $rdf_results['E'][0] = $links; $s3qlN = compact('user_id', 'db'); $s3qlN['from'] = 'namespaces'; $s3qlN['format'] = 'php'; $ns = query_user_dictionaries($s3qlN, $db, $user_id); $ns = unserialize($ns); if ($timer) { $timer->setMarker('Dictionary links retrieved'); } } ##Convert the result into an RDF file $data_triples = array(); if (is_array($rdf_results)) { foreach ($rdf_results as $letter => $results2rdfize) { $dont_skip_core_name = false; $dont_skip_serialized = true; if (ereg('S', $letter)) { $dont_skip_serialized = false; } if (ereg('C|R|P', $letter)) { $dont_skip_core_name = true; } foreach ($results2rdfize as $k => $data) { $tmp_triples = rdf_encode($data, $letter, 'array', $s3ql['db'], $ns, $collected_data, $dont_skip_serialized, $dont_skip_core_name); if (is_array($tmp_triples)) { $data_triples = array_merge($data_triples, $tmp_triples); } } } } if (!empty($data_triples)) { $tmp['ns'] = $prefixes; /* #this one for turtle $parser = ARC2::getComponent('TurtleParser', $a); $index = ARC2::getSimpleIndex($triples, false) ; # false -> non-flat version $rdf_doc = $parser->toTurtle($index,$prefixes); */ $parser = ARC2::getComponent('RDFXMLParser', $tmp); $index = ARC2::getSimpleIndex($data_triples, false); /* false -> non-flat version */ $rdf_doc = $parser->toRDFXML($index, $prefixes); $filename = S3DB_SERVER_ROOT . '/tmp/' . random_string(15) . '.rdf'; $rr = fopen($filename, 'a+'); fwrite($rr, $rdf_doc); fclose($rr); if ($timer) { $timer->setMarker(count($data_triples) . ' triples written to file ' . $filename); } ##The better strategy would be to let the client cpu resolve the query; return the graphs with the rdf so that a sparql on the client can handle it if ($return_file_name) { if (filesize($filename) > 0) { return array(true, $filename); } else { return array(false); } exit; } if ($redirect) { ##And now use an external service ( I gave up with ARC) to parse the query $url2search = str_replace(S3DB_SERVER_ROOT, S3DB_URI_BASE, $filename); ##Giving up on ARC, surrender to sparql.com $remote_endpoint = "http://sparql.org/sparql?query="; $bq = ereg_replace("FROM <.*>", "FROM <" . $url2search . ">", $bq); $bq = urlencode($bq); $remote_endpoint .= $bq . '&default-graph-uri=&stylesheet=/xml-to-html.xsl'; return array(true, $remote_endpoint); } #echo $filename;exit; #And finally perform the query on the model. $queryModel = rdf2php($filename); $format = $in['format'] != '' ? $in['format'] : 'html'; unlink($filename); if ($timer) { $timer->setMarker('Data converted to a model the rdf-api can query'); } if (eregi('^(sparql-xml|sparql-html)$', $format)) { switch ($format) { case 'sparql-xml': $result = $queryModel->sparqlQuery($sparql, 'XML'); break; case 'sparql-html': $result = $queryModel->sparqlQuery($sparql, 'HTML'); if ($_REQUEST['su3d']) { $timer->stop(); $profiling = $timer->getProfiling(); echo "Query took " . $profiling[count($profiling) - 1]['total'] . ' sec'; } break; } if ($result) { return array(true, $result); } else { return false; } } elseif ($format == 'html.form') { $form .= ' <html> <head> </head><body> <form method="GET" action="sparql.php" id="sparqlform"> <h5>Target Deployment(s)</h5> <input type="hidden" name="key" value="' . $s3ql['key'] . '"/> <input type="hidden" name="format" value="' . $_REQUEST['format'] . '"/> <input type = "text" id="url" size = "100%" value="' . $GLOBALS['url'] . '" name="url"> <h5>SPARQL <a href="http://www.w3.org/TR/rdf-sparql-query/" target="_blank">(help!!)</a></h5> <br /> <textarea cols="100" id="sparql" rows="10" name = "query">' . stripslashes($sparql) . '</textarea><br /> <input type="submit" value="SPARQL this!" id="submitsparql"></body> </form> '; $form .= '<br />' . count($data) . " rows"; $form .= '<br />Query took ' . (strtotime(date('His')) - $start) . ' sec'; if (count($data) > 0) { return array(true, $form); } else { return array(false); } } else { #and output the result according to requested format $data = $queryModel->sparqlQuery($sparql); if ($timer) { $timer->setMarker('Query on SPARQL data executed by rdf-api.'); } if (is_array($outputCols) && !empty($outputCols)) { ##only this one are to be shown in the final result $vars = $outputCols; } $cleanCols = array(); foreach ($vars as $varname) { $cleanCols[] = ereg_replace('^\\?', '', $varname); } $outputData = array(); if (is_array($data)) { foreach ($data as $s => $sparql_line) { foreach ($sparql_line as $sparql_var => $sparql_var_value) { if ($sparql_var_value->uri != '') { $outputData[$s][ereg_replace('^\\?', '', $sparql_var)] = $sparql_var_value->uri; } elseif ($sparql_var_value->label != '') { $outputData[$s][ereg_replace('^\\?', '', $sparql_var)] = $sparql_var_value->label; } else { $outputData[$s][ereg_replace('^\\?', '', $sparql_var)] = ""; } } } } if ($timer) { $timer->setMarker('Data converted in a format that fun outputformat can read'); } #$timer ->display(); #root is just the word that xml should parse as the root for each entry $root = 'sparql'; if ($timer) { $timer->setMarker('All variables fitted into their places to represent in the final output'); } $data = $outputData; $cols = $cleanCols; if ($_REQUEST['su3d']) { $timer->stop(); $profiling = $timer->getProfiling(); echo "Query took " . $profiling[count($profiling) - 1]['total'] . ' sec<br>'; } $z = compact('data', 'cols', 'format', 'root'); $out = outputFormat($z); echo $out; exit; if (count($data) > 0) { return array(true, $out); } else { return array(false); } } } else { return array(false); } #else { #$out= formatReturn($GLOBALS['error_codes']['no_results'], 'Your query did not return any results.', $format,''); #} }
$client->getValue('key_' . $v); } reset($range); $t->setMarker('Remove 1000 items.'); foreach ($range as $v) { $client->removeValue('key_' . $v); } reset($range); $t->setMarker('Close connection.'); $client->close(); $t->stop(); $t->display(); echo '\\Net\\Okuyama\\Adapter\\Socket' . PHP_EOL; $t = new Benchmark_Timer(); $t->start(); $t->setMarker('Connect to Okuyama.'); require_once 'Net/Okuyama/Adapter.php'; require_once 'Net/Okuyama/Exception.php'; require_once 'Net/Okuyama/Adapter/Socket.php'; $client = new \Net\Okuyama\Adapter\Socket(); $client->autoConnect($hosts); $t->setMarker('Set 1000 items.'); foreach ($range as $v) { $client->set('key_' . $v, 'val_' . $v); } reset($range); $t->setMarker('Get 1000 items.'); foreach ($range as $v) { $client->get('key_' . $v); } reset($range);