<?php xp_import('time'); $precision = 10; $signal = new \time\SIG_Awake($precision, TIME_MILLISECONDS); $signal->time = milliseconds(); $precision_timing = []; $function = xp_exhaust(100, function ($signal) use($precision, &$precision_timing) { $timing = intval(floatval(milliseconds() - $signal->time)) - $precision; echo $timing . PHP_EOL; // if ($timing > 100000) { // // Second change // $timing = 0; // } $precision_timing[] = [$timing, 0]; $signal->time = milliseconds(); }); xp_signal($signal, $function); xp_on_shutdown(function () use(&$precision_timing) { array_shift($precision_timing); $results = ['msPrecision' => $precision_timing]; ob_start(); include dirname(realpath(__FILE__)) . '/chart.php'; $data = ob_get_contents(); ob_end_clean(); file_put_contents('millisecond_precision.html', $data); echo "Performance chart in millisecond_precision.html" . PHP_EOL; });
/** * Registers a standard output mechanism for test results. * * @return void */ function generate_output() { // enable the event history xp_set_signal_history(true); // Startup xp_on_start(function () { if (XPSPL_DEBUG) { logger(XPSPL_LOG)->info('Unittest begin'); } define('UNITTEST_START_TIME', milliseconds()); }); // Shutdown xp_on_shutdown(function () { if (XPSPL_DEBUG) { logger(XPSPL_LOG)->info('Unittest end'); } define('UNITTEST_END_TIME', milliseconds()); $tests = 0; $pass = 0; $fail = 0; $skip = 0; $output = Output::instance(); $tests_run = []; foreach (xp_signal_history() as $_node) { if ($_node[0] instanceof SIG_Test) { // suites $tests++; $tests_run[] = $_node[0]; $failures = []; // Get passedXPSPL foreach ($_node[0]->get_assertion_results() as $_assertion) { if ($_assertion[0] === true) { $pass++; } elseif ($_assertion[0] === null) { $skip++; } else { $fail++; $failures[] = $_assertion; } } if (count($failures) != 0) { $output->send_linebreak(Output::ERROR); foreach ($failures as $_failure) { $output->send("FAILURE", Output::ERROR); $output->send("ASSERTION : " . $_failure[1], Output::ERROR, true); $output->send("MESSAGE : " . $_failure[0], Output::ERROR, true); $output->send(sprintf('ARGUMENTS : %s', $output->variable($_failure[2])), Output::ERROR, true); $trace = $_failure[3][1]; $output->send("FILE : " . $trace["file"], Output::ERROR, true); $output->send("LINE : " . $trace["line"], Output::ERROR); $output->send_linebreak(Output::ERROR); } } } } $size = function ($size) { /** * This was authored by another individual by whom i don't know */ $filesizename = array(" Bytes", "KB", "MB", "GB", "TB", "PB", " EB", "ZB", "YB"); return $size ? round($size / pow(1024, $i = floor(log($size, 1024))), 2) . $filesizename[$i] : '0 Bytes'; }; $output->send_linebreak(); $output->send(sprintf("Ran %s tests in %sms and used %s memory", $tests, UNITTEST_END_TIME - UNITTEST_START_TIME, $size(memory_get_peak_usage())), Output::SYSTEM, true); $output->send(sprintf("%s Assertions: %s Passed, %s Failed, %s Skipped", $pass + $fail + $skip, $pass, $fail, $skip), Output::SYSTEM, true); }); }
xp_on_shutdown(function () { $exclude = ['api.php', 'XPSPL.php', '__init__.php', 'examples', 'tests', 'module']; $coverage = xdebug_get_code_coverage(); xdebug_stop_code_coverage(); $dir = new \RegexIterator(new \RecursiveIteratorIterator(new \RecursiveDirectoryIterator(XPSPL_PATH)), '/^.+\\.php$/i', \RecursiveRegexIterator::GET_MATCH); $c = []; $avg = []; foreach ($coverage as $_k => $_v) { $c[strtolower($_k)] = $_v; } $coverage = $c; unset($c); foreach ($dir as $_file) { array_map(function ($i) use($coverage, &$avg, $exclude) { $include = true; $file = strtolower($i); //trim(str_replace(XPSPL_PATH, '', $i)); foreach ($exclude as $_exclude) { if (stripos($file, $_exclude) !== false) { return; } } if (isset($coverage[$file])) { $lines = 0; $total = 0; foreach ($coverage[$file] as $_v) { if ($_v != -2) { $lines++; } if ($_v >= 1) { $total++; } } $avg[$file] = round($total / $lines * 100, 2); } else { $avg[$file] = 0; } }, $_file); } $total = 0.0; foreach ($avg as $_c) { $total += $_c; } \unittest\Output::send('--------------------', \unittest\Output::DEBUG, true); \unittest\Output::send(sprintf('Total Test Coverage : %s%%', round($total / (count($avg) * 100) * 100, 2)), \unittest\Output::DEBUG, true); \unittest\Output::send('--------------------', \unittest\Output::DEBUG, true); foreach ($avg as $_k => $_c) { \unittest\Output::send(sprintf('File : %s', $_k), \unittest\Output::DEBUG, true); \unittest\Output::send(sprintf('Coverage : %s%%', $_c), \unittest\Output::DEBUG, true); \unittest\Output::send('--------------------', \unittest\Output::DEBUG, true); } });