/** * Runs the test suite. */ public function run() { $results = array(); $command = '(mkdir -p build && cd build && cmake ..)'; $command .= '&& make -C build all'; $command .= '&& make -C build test'; // Execute the test command & time it. $timeStart = microtime(true); $future = new ExecFuture($command); do { $future->read(); sleep(0.5); } while (!$future->isReady()); list($error, $stdout, $stderr) = $future->resolve(); $timeEnd = microtime(true); // Create a unit test result structure. $result = new ArcanistUnitTestResult(); $result->setNamespace('DerpVision'); $result->setName('Core'); $result->setDuration($timeEnd - $timeStart); if ($error == 0) { $result->setResult(ArcanistUnitTestResult::RESULT_PASS); } else { $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setUserData($stdout . $stderr); } $results[] = $result; return $results; }
private function parseOutput($output) { $results = array(); $json = json_decode($output, true); foreach ($json['examples'] as $example) { $result = new ArcanistUnitTestResult(); $result->setName($example['full_description']); if (array_key_exists('run_time', $example)) { $result->setDuration($example['run_time']); } switch ($example['status']) { case 'passed': $result->setResult(ArcanistUnitTestResult::RESULT_PASS); break; case 'failed': $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setUserData($example['exception']['message']); break; case 'pending': $result->setResult(ArcanistUnitTestResult::RESULT_SKIP); break; } $results[] = $result; } return $results; }
private function parseTestResults($test, $err, $stdout, $stderr) { $result = new ArcanistUnitTestResult(); $result->setName($test); $result->setUserData($stdout . $stderr); $result->setResult($err == 0 ? ArcanistUnitTestResult::RESULT_PASS : ArcanistUnitTestResult::RESULT_FAIL); if (preg_match("/# ELAPSED: (\\d+)ms/", $stderr, $M)) { $result->setDuration($M[1] / 1000); } return $result; }
/** * Parse test results from phpunit json report * * @param string $path Path to test * @param string $test_results String containing phpunit json report * * @return array */ public function parseTestResults($path, $test_results) { $report = $this->getJsonReport($test_results); // coverage is for all testcases in the executed $path $coverage = array(); if ($this->enableCoverage !== false) { $coverage = $this->readCoverage(); } $results = array(); foreach ($report as $event) { if ('test' != $event->event) { continue; } $status = ArcanistUnitTestResult::RESULT_PASS; $user_data = ''; if ('fail' == $event->status) { $status = ArcanistUnitTestResult::RESULT_FAIL; $user_data .= $event->message . "\n"; foreach ($event->trace as $trace) { $user_data .= sprintf("\n%s:%s", $trace->file, $trace->line); } } else { if ('error' == $event->status) { if (strpos($event->message, 'Skipped Test') !== false) { $status = ArcanistUnitTestResult::RESULT_SKIP; $user_data .= $event->message; } else { if (strpos($event->message, 'Incomplete Test') !== false) { $status = ArcanistUnitTestResult::RESULT_SKIP; $user_data .= $event->message; } else { $status = ArcanistUnitTestResult::RESULT_BROKEN; $user_data .= $event->message; foreach ($event->trace as $trace) { $user_data .= sprintf("\n%s:%s", $trace->file, $trace->line); } } } } } $name = preg_replace('/ \\(.*\\)/', '', $event->test); $result = new ArcanistUnitTestResult(); $result->setName($name); $result->setResult($status); $result->setDuration($event->time); $result->setCoverage($coverage); $result->setUserData($user_data); $results[] = $result; } return $results; }
public function runJs() { // First, check to see if karma is on $PATH: list($err, $stdout, $_) = exec_manual("which karma"); if ($err != 0) { $result = new ArcanistUnitTestResult(); $result->setName("Karma not found. Skipping js tests..."); $result->setResult(ArcanistUnitTestResult::RESULT_SKIP); $result->setDuration(0); return array($result); } // Karma IS on the path. $old_dir = getcwd(); $project_root = $this->getWorkingCopy()->getProjectRoot(); chdir($project_root . '/client/js'); exec_manual("karma start karma-conf-oneshot.js"); chdir($old_dir); // Read from the text-results.xml file. $xml = file_get_contents($project_root . '/client/test-results.xml'); $doc = new SimpleXMLElement($xml); // Destroy the test-results.xml file. unlink($project_root . '/client/test-results.xml'); // Extract all the test cases. $results = array(); foreach ($doc->testsuite as $suite) { $suite_name = $suite['name']; foreach ($suite->testcase as $case) { $case_name = $case['name']; $time = $case['time']; $fixture_name = substr($case['classname'], strlen($suite_name) + 1); // Did we fail? $failure = (string) $case->failure; // Convert each to a ArcanistUnitTestResult $result = new ArcanistUnitTestResult(); $result->setName($fixture_name . ' ' . $case_name); $result->setResult($failure ? ArcanistUnitTestResult::RESULT_FAIL : ArcanistUnitTestResult::RESULT_PASS); $result->setUserData($failure); $result->setDuration($time); $results[] = $result; } } return $results; }
public function run() { $results = array(); $build_start = microtime(true); $config_manager = $this->getConfigurationManager(); if ($this->getEnableCoverage() !== false) { $command = $config_manager->getConfigFromAnySource('unit.engine.tap.cover'); } else { $command = $config_manager->getConfigFromAnySource('unit.engine.tap.command'); } $timeout = $config_manager->getConfigFromAnySource('unit.engine.tap.timeout'); if (!$timeout) { $timeout = 15; } $future = new ExecFuture('%C', $command); $future->setTimeout($timeout); $result = new ArcanistUnitTestResult(); $result->setName($command ? $command : 'unknown'); try { list($stdout, $stderr) = $future->resolvex(); $result->setResult(ArcanistUnitTestResult::RESULT_PASS); if ($this->getEnableCoverage() !== false) { $coverage = $this->readCoverage('coverage/cobertura-coverage.xml'); $result->setCoverage($coverage); } } catch (CommandException $exc) { $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); if ($future->getWasKilledByTimeout()) { print "Process stdout:\n" . $exc->getStdout() . "\nProcess stderr:\n" . $exc->getStderr() . "\nExceeded timeout of {$timeout} secs.\nMake unit tests faster."; } else { $result->setUserdata($exc->getStdout() . $exc->getStderr()); } } $result->setDuration(microtime(true) - $build_start); $results[] = $result; return $results; }
/** * Parse test results from Go test report * (e.g. `go test -v`) * * @param string $path Path to test * @param string $test_results String containing Go test output * * @return array */ public function parseTestResults($path, $test_results) { $test_results = explode("\n", $test_results); $results = array(); // We'll get our full test case name at the end and add it back in $test_case_name = ''; // Temp store for test case results (in case we run multiple test cases) $test_case_results = array(); foreach ($test_results as $i => $line) { if (strncmp($line, '--- PASS', 8) === 0) { // We have a passing test $meta = array(); preg_match('/^--- PASS: (?P<test_name>.+) \\((?P<time>.+) seconds\\).*/', $line, $meta); $result = new ArcanistUnitTestResult(); // For now set name without test case, we'll add it later $result->setName($meta['test_name']); $result->setResult(ArcanistUnitTestResult::RESULT_PASS); $result->setDuration($meta['time']); $test_case_results[] = $result; continue; } if (strncmp($line, '--- FAIL', 8) === 0) { // We have a failing test $reason = trim($test_results[$i + 1]); $meta = array(); preg_match('/^--- FAIL: (?P<test_name>.+) \\((?P<time>.+) seconds\\).*/', $line, $meta); $result = new ArcanistUnitTestResult(); $result->setName($meta['test_name']); $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setDuration($meta['time']); $result->setUserData($reason . "\n"); $test_case_results[] = $result; continue; } if (strncmp($line, 'ok', 2) === 0) { $meta = array(); preg_match('/^ok[\\s\\t]+(?P<test_name>\\w.*)[\\s\\t]+(?P<time>.*)s.*/', $line, $meta); $test_case_name = str_replace('/', '::', $meta['test_name']); // Our test case passed // check to make sure we were in verbose (-v) mode if (empty($test_case_results)) { // We weren't in verbose mode // create one successful result for the whole test case $test_name = 'Go::TestCase::' . $test_case_name; $result = new ArcanistUnitTestResult(); $result->setName($test_name); $result->setResult(ArcanistUnitTestResult::RESULT_PASS); $result->setDuration($meta['time']); $results[] = $result; } else { $test_case_results = $this->fixNames($test_case_results, $test_case_name); $results = array_merge($results, $test_case_results); $test_case_results = array(); } continue; } if (strncmp($line, "FAIL\t", 5) === 0) { $meta = array(); preg_match('/^FAIL[\\s\\t]+(?P<test_name>\\w.*)[\\s\\t]+.*/', $line, $meta); $test_case_name = str_replace('/', '::', $meta['test_name']); $test_case_results = $this->fixNames($test_case_results, $test_case_name); $results = array_merge($results, $test_case_results); $test_case_results = array(); continue; } } return $results; }
private function parseTestResults($targets, $status) { $code = $status[0]; $output = $status[1]; $lines = explode("\n", $output); if ($code == 4) { print "No tests affected...\n"; return []; } else { if ($code == 1) { throw new Exception($output . "\n" . $status[2]); } } $query_command = $this->bazelCommand(["query", "-k", "%s"]); $future = new ExecFuture($query_command, 'tests(set(' . join(" ", $targets) . '))'); $future->setCWD($this->project_root); $testTargets = explode("\n", trim($future->resolvex()[0])); $results = array(); foreach ($testTargets as $test) { $data = $this->parseTestResultFile($test); $result = new ArcanistUnitTestResult(); $result->setName($test); if (property_exists($data, "test_case")) { $testCase = $data->{"test_case"}; if (property_exists($testCase, "run_duration_millis")) { $result->setDuration($testCase->{"run_duration_millis"} / 1000); } } if ($data->{"test_passed"}) { $result->setResult(ArcanistUnitTestResult::RESULT_PASS); } else { if ($data->{"status"} == 4) { $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); } else { $result->setResult(ArcanistUnitTestResult::RESULT_BROKEN); } } $results[] = $result; } return $results; }
private final function resultTest($test_result, $reason) { $coverage = $this->endCoverage(); $result = new ArcanistUnitTestResult(); $result->setCoverage($coverage); $result->setNamespace(get_class($this)); $result->setName($this->runningTest); $result->setLink($this->getLink($this->runningTest)); $result->setResult($test_result); $result->setDuration(microtime(true) - $this->testStartTime); $result->setUserData($reason); $this->results[] = $result; if ($this->renderer) { echo $this->renderer->renderUnitResult($result); } }
private function processCoverageResults($project_root, $results) { $time_start = microtime_float(); // generate annotated source files to find out which lines have // coverage // limit files to only those "*.py" files in getPaths() $pythonPathsStr = join(" ", $this->getPythonPaths()); $future = new ExecFuture("coverage annotate {$pythonPathsStr}"); $future->setCWD($project_root); try { $future->resolvex(); } catch (CommandException $exc) { if ($exc->getError() > 1) { // 'nose' returns 1 when tests are failing/broken. throw $exc; } } // store all the coverage results for this project $coverageArray = array(); $lines_total = 0; $lines_executable = 0; $lines_not_executable = 0; $lines_covered = 0; $lines_not_covered = 0; // walk through project directory, searching for all ",cover" files // that coverage.py left behind foreach (new RecursiveIteratorIterator(new RecursiveDirectoryIterator(".")) as $path) { // paths are given as "./path/to/file.py,cover", so match the // "path/to/file.py" part if (!preg_match(":^\\./(.*),cover\$:", $path, $matches)) { continue; } $srcFilePath = $matches[1]; $coverageStr = ""; foreach (file($path) as $coverLine) { /* python coverage > executed ! missing(not executed) - excluded phab coverage N Not executable. This is a comment or whitespace which should be ignored when computing test coverage. C Covered. This line has test coverage. U Uncovered. This line is executable but has no test coverage. X Unreachable. If your coverage analysis can detect unreachable code, you can report it here. */ $lines_total++; switch ($coverLine[0]) { case '>': $lines_covered++; $lines_executable++; $coverageStr .= 'C'; break; case '!': $lines_not_covered++; $lines_executable++; $coverageStr .= 'U'; break; case ' ': $lines_not_executable++; $coverageStr .= 'N'; break; case '-': $coverageStr .= 'X'; break; default: break; } } // delete the ,cover file unlink($path); // only add to coverage report if the path was originally // specified by arc if (in_array($srcFilePath, $this->getPaths())) { $coverageArray[$srcFilePath] = $coverageStr; } } $lines_percentage = bcdiv($lines_covered, $lines_executable, 4) * 100; $time_end = microtime_float(); $time = $time_end - $time_start; // python does not support per-test coverage results so just put all the coverage // in a single 'coverage test' $result = new ArcanistUnitTestResult(); $result->setNamespace('coverage'); $result->setName('coverage'); $result->setResult('pass'); $result->setDuration($time); $result->setUserData("coverage: {$lines_percentage}% executable: {$lines_executable} / covered: {$lines_covered}"); $result->setCoverage($coverageArray); $results[] = $result; return $results; }
/** * Mark the current running test as skipped. * * @param string Description for why this test was skipped. * @return void * @task internal */ private final function skipTest($reason) { $coverage = $this->endCoverage(); $result = new ArcanistUnitTestResult(); $result->setCoverage($coverage); $result->setName($this->runningTest); $result->setResult(ArcanistUnitTestResult::RESULT_SKIP); $result->setDuration(microtime(true) - $this->testStartTime); $result->setUserData($reason); $this->results[] = $result; }
public function parseTestResults($path, $xunit_tmp, $cover_tmp) { // xunit xsd: https://gist.github.com/959290 $xunit_dom = new DOMDocument(); $xunit_dom->loadXML(Filesystem::readFile($xunit_tmp)); // coverage is for all testcases in the executed $path $coverage = array(); if ($this->getEnableCoverage() !== false) { $coverage = $this->readCoverage($cover_tmp); } $results = array(); $testcases = $xunit_dom->getElementsByTagName("testcase"); foreach ($testcases as $testcase) { $classname = $testcase->getAttribute("classname"); $name = $testcase->getAttribute("name"); $time = $testcase->getAttribute("time"); $status = ArcanistUnitTestResult::RESULT_PASS; $user_data = ""; // A skipped test is a test which was ignored using framework // mechanizms (e.g. @skip decorator) $skipped = $testcase->getElementsByTagName("skipped"); if ($skipped->length > 0) { $status = ArcanistUnitTestResult::RESULT_SKIP; $messages = array(); for ($ii = 0; $ii < $skipped->length; $ii++) { $messages[] = trim($skipped->item($ii)->nodeValue, " \n"); } $user_data .= implode("\n", $messages); } // Failure is a test which the code has explicitly failed by using // the mechanizms for that purpose. e.g., via an assertEquals $failures = $testcase->getElementsByTagName("failure"); if ($failures->length > 0) { $status = ArcanistUnitTestResult::RESULT_FAIL; $messages = array(); for ($ii = 0; $ii < $failures->length; $ii++) { $messages[] = trim($failures->item($ii)->nodeValue, " \n"); } $user_data .= implode("\n", $messages) . "\n"; } // An errored test is one that had an unanticipated problem. e.g., an // unchecked throwable, or a problem with an implementation of the // test. $errors = $testcase->getElementsByTagName("error"); if ($errors->length > 0) { $status = ArcanistUnitTestResult::RESULT_BROKEN; $messages = array(); for ($ii = 0; $ii < $errors->length; $ii++) { $messages[] = trim($errors->item($ii)->nodeValue, " \n"); } $user_data .= implode("\n", $messages) . "\n"; } $result = new ArcanistUnitTestResult(); $result->setName($classname . "." . $name); $result->setResult($status); $result->setDuration($time); $result->setCoverage($coverage); $result->setUserData($user_data); $results[] = $result; } return $results; }
/** * Parse test results from phpunit json report * * @param string $path Path to test * @param string $test_results String containing phpunit json report * * @return array */ public function parseTestResults($path, $test_results) { if (!$test_results) { $result = id(new ArcanistUnitTestResult())->setName($path)->setUserData($this->stderr)->setResult(ArcanistUnitTestResult::RESULT_BROKEN); return array($result); } $report = $this->getJsonReport($test_results); // coverage is for all testcases in the executed $path $coverage = array(); if ($this->enableCoverage !== false) { $coverage = $this->readCoverage(); } $last_test_finished = true; $results = array(); foreach ($report as $event) { switch (idx($event, 'event')) { case 'test': break; case 'testStart': $last_test_finished = false; // fall through // fall through default: continue 2; // switch + loop } $status = ArcanistUnitTestResult::RESULT_PASS; $user_data = ''; if ('fail' == idx($event, 'status')) { $status = ArcanistUnitTestResult::RESULT_FAIL; $user_data .= idx($event, 'message') . "\n"; foreach (idx($event, 'trace') as $trace) { $user_data .= sprintf("\n%s:%s", idx($trace, 'file'), idx($trace, 'line')); } } else { if ('error' == idx($event, 'status')) { if (strpos(idx($event, 'message'), 'Skipped Test') !== false) { $status = ArcanistUnitTestResult::RESULT_SKIP; $user_data .= idx($event, 'message'); } else { if (strpos(idx($event, 'message'), 'Incomplete Test') !== false) { $status = ArcanistUnitTestResult::RESULT_SKIP; $user_data .= idx($event, 'message'); } else { $status = ArcanistUnitTestResult::RESULT_BROKEN; $user_data .= idx($event, 'message'); foreach (idx($event, 'trace') as $trace) { $user_data .= sprintf("\n%s:%s", idx($trace, 'file'), idx($trace, 'line')); } } } } } $name = preg_replace('/ \\(.*\\)/s', '', idx($event, 'test')); $result = new ArcanistUnitTestResult(); $result->setName($name); $result->setResult($status); $result->setDuration(idx($event, 'time')); $result->setCoverage($coverage); $result->setUserData($user_data); $results[] = $result; $last_test_finished = true; } if (!$last_test_finished) { $results[] = id(new ArcanistUnitTestResult())->setName(idx($event, 'test'))->setUserData($this->stderr)->setResult(ArcanistUnitTestResult::RESULT_BROKEN); } return $results; }
public function run() { $projectRoot = $this->getWorkingCopy()->getProjectRoot(); $cwd = getcwd(); $buildDir = $this->findBuildDirectory($projectRoot, $cwd); $polliObjDir = $buildDir; if (is_dir($buildDir . DIRECTORY_SEPARATOR . "tools" . DIRECTORY_SEPARATOR . "polly" . DIRECTORY_SEPARATOR . "tools" . DIRECTORY_SEPARATOR . "polli")) { $polliObjDir = $buildDir . DIRECTORY_SEPARATOR . "tools" . DIRECTORY_SEPARATOR . "polly" . DIRECTORY_SEPARATOR . "tools" . DIRECTORY_SEPARATOR . "polli"; } $polliTestDir = $polliObjDir . DIRECTORY_SEPARATOR . "test"; if (is_dir($buildDir . DIRECTORY_SEPARATOR . "bin") && file_exists($buildDir . DIRECTORY_SEPARATOR . "bin" . DIRECTORY_SEPARATOR . "llvm-lit")) { $lit = $buildDir . DIRECTORY_SEPARATOR . "bin" . DIRECTORY_SEPARATOR . "llvm-lit"; $cmd = "ninja -C " . $buildDir; print "Running ninja (" . $cmd . ")\n"; exec($cmd); } else { $makeVars = $this->getMakeVars($buildDir); $lit = $this->findLitExecutable($makeVars); } print "Using lit executable '{$lit}'\n"; // We have to modify the format string, because llvm-lit does not like a '' argument $cmd = '%s %s 2>&1'; $litFuture = new ExecFuture($cmd, $lit, $polliTestDir); $out = ""; $results = array(); $lastTime = microtime(true); $ready = false; $dots = ""; $numTests = 0; while (!$ready) { $ready = $litFuture->isReady(); $newout = $litFuture->readStdout(); if (strlen($newout) == 0) { usleep(100); continue; } $out .= $newout; if ($ready && strlen($out) > 0 && substr($out, -1) != "\n") { $out .= "\n"; } while (($nlPos = strpos($out, "\n")) !== FALSE) { $line = substr($out, 0, $nlPos + 1); $out = substr($out, $nlPos + 1); $res = ArcanistUnitTestResult::RESULT_UNSOUND; if (substr($line, 0, 6) == "PASS: "******"FAIL: ") { $res = ArcanistUnitTestResult::RESULT_FAIL; } elseif (substr($line, 0, 7) == "XPASS: "******"XFAIL: ") { $res = ArcanistUnitTestResult::RESULT_PASS; } elseif (substr($line, 0, 13) == "UNSUPPORTED: ") { $res = ArcanistUnitTestResult::RESULT_SKIP; } elseif (!$numTests && preg_match('/Testing: ([0-9]+) tests/', $line, $matches)) { $numTests = (int) $matches[1]; } if ($res == ArcanistUnitTestResult::RESULT_FAIL) { print "[0A"; } if ($res != ArcanistUnitTestResult::RESULT_SKIP && $res != ArcanistUnitTestResult::RESULT_PASS) { print "[K[0A" . $line . self::progress($results, $numTests); } if ($res == ArcanistUnitTestResult::RESULT_UNSOUND) { continue; } $result = new ArcanistUnitTestResult(); $result->setName(trim(substr($line, strpos($line, ':') + 1))); $result->setResult($res); $newTime = microtime(true); $result->setDuration($newTime - $lastTime); $lastTime = $newTime; $results[] = $result; $dots .= "."; print "\r[K[0A" . self::progress($results, $numTests); } } list($out1, $out2) = $litFuture->read(); print $out1; if ($out2) { throw new Exception('There was error output, even though it should have been redirected to stdout.'); } print "\n"; $timeThreshold = 0.05; $interestingTests = array(); foreach ($results as $result) { if ($result->getResult() != "pass") { $interestingTests[] = $result; } if ($result->getDuration() > $timeThreshold) { $interestingTests[] = $result; } } return $interestingTests; }
public function runIntegrationTests($tests) { // Now find all the test programs $root = $this->getProjectRoot(); $test_dir = $root . "/tests/integration/"; if (!$tests) { $paths = glob($test_dir . "*.php"); $paths[] = 'ruby/ruby-watchman/spec/ruby_watchman_spec.rb'; } else { $paths = $tests; } foreach (array('/tmp/watchman-test.log', '/tmp/watchman-valgrind.log', '/tmp/watchman-valgrind.xml', '/tmp/watchman-callgrind.txt') as $log) { @unlink($log); } foreach ($paths as $path) { if (preg_match("/\\.php\$/", $path) && file_exists($path)) { // Don't pull in files starting with "_"; we're using // those as helpers for triggers $base = basename($path); if ($base[0] != '_') { require_once $path; } } } // We test for this in a test case putenv("WATCHMAN_EMPTY_ENV_VAR="); $coverage = $this->getEnableCoverage(); if (!$this->first_inst) { $this->first_inst = new WatchmanInstance($root, $coverage); } $first_inst = $this->first_inst; $instances = array($first_inst); // Helper for python or other language tests putenv("WATCHMAN_SOCK=" . $first_inst->getFullSockName()); // Exercise the different serialization combinations $cli_matrix = array(); // Find all the test cases that were declared $results = array(); foreach (get_declared_classes() as $name) { $ref = new ReflectionClass($name); if (!$ref->isSubclassOf('WatchmanTestCase')) { continue; } // Good enough; let's use it $test_case = newv($name, array()); $config = $test_case->getGlobalConfig(); if ($config) { $instance = new WatchmanInstance($root, $coverage, $config); $instances[] = $instance; } else { $instance = $first_inst; } $test_case->setWatchmanInstance($instance); if (!$instance->getProcessID()) { $res = new ArcanistUnitTestResult(); $res->setName('dead'); $res->setUserData('died before test start'); $res->setResult(ArcanistUnitTestResult::RESULT_FAIL); $results[] = array($res); break; } $test_case->setRoot($root); $test_case->setPaths($paths); $results[] = $test_case->run(); if (!$test_case->needsLiveConnection()) { foreach ($cli_matrix as $mname => $args) { $test_case->useCLI($args); $cli_results = $test_case->run(); foreach ($cli_results as $res) { $res->setName($res->getName() . " [CLI: {$mname}]"); } $results[] = $cli_results; } } } foreach ($paths as $path) { if (!preg_match('/\\.rb$/', $path)) { continue; } if (!file_exists($path)) { // Was deleted in this (pending) rev continue; } $start = microtime(true); $future = new ExecFuture("PATH=\"{$root}:\$PATH\" \${MAKE:-make} rb-tests"); $future->setTimeout(10); list($status, $out, $err) = $future->resolve(); $end = microtime(true); $res = new ArcanistUnitTestResult(); $res->setName($path); $res->setUserData($out . $err); $res->setDuration($end - $start); $res->setResult($status == 0 ? ArcanistUnitTestResult::RESULT_PASS : ArcanistUnitTestResult::RESULT_FAIL); $results[] = array($res); } foreach ($instances as $instance) { $results[] = $instance->generateValgrindTestResults(); } $results = array_mergev($results); return $results; }
/** * Parses the test results from xUnit. * * @param string The name of the xUnit results file. * @param string The name of the coverage file if one was provided by * `buildTestFuture`. This is passed through to * `parseCoverageResult`. * @return array Test results. */ private function parseTestResult($xunit_tmp, $coverage) { $xunit_dom = new DOMDocument(); $xunit_dom->loadXML(Filesystem::readFile($xunit_tmp)); $results = array(); $tests = $xunit_dom->getElementsByTagName('test'); foreach ($tests as $test) { $name = $test->getAttribute('name'); $time = $test->getAttribute('time'); $status = ArcanistUnitTestResult::RESULT_UNSOUND; switch ($test->getAttribute('result')) { case 'Pass': $status = ArcanistUnitTestResult::RESULT_PASS; break; case 'Fail': $status = ArcanistUnitTestResult::RESULT_FAIL; break; case 'Skip': $status = ArcanistUnitTestResult::RESULT_SKIP; break; } $userdata = ''; $reason = $test->getElementsByTagName('reason'); $failure = $test->getElementsByTagName('failure'); if ($reason->length > 0 || $failure->length > 0) { $node = $reason->length > 0 ? $reason : $failure; $message = $node->item(0)->getElementsByTagName('message'); if ($message->length > 0) { $userdata = $message->item(0)->nodeValue; } $stacktrace = $node->item(0)->getElementsByTagName('stack-trace'); if ($stacktrace->length > 0) { $userdata .= "\n" . $stacktrace->item(0)->nodeValue; } } $result = new ArcanistUnitTestResult(); $result->setName($name); $result->setResult($status); $result->setDuration($time); $result->setUserData($userdata); if ($coverage != null) { $result->setCoverage($this->parseCoverageResult($coverage)); } $results[] = $result; } return $results; }
/** * Parse test results from provided input and return an array * of @{class:ArcanistUnitTestResult}. * * @param string $test_results String containing test results * * @return array ArcanistUnitTestResult */ public function parseTestResults($test_results) { if (!strlen($test_results)) { throw new Exception(pht('%s argument to %s must not be empty', 'test_results', 'parseTestResults()')); } // xunit xsd: https://gist.github.com/959290 $xunit_dom = new DOMDocument(); $load_success = @$xunit_dom->loadXML($test_results); if (!$load_success) { $input_start = id(new PhutilUTF8StringTruncator())->setMaximumGlyphs(150)->truncateString($test_results); throw new Exception(sprintf("%s\n\n%s", pht('Failed to load XUnit report; Input starts with:'), $input_start)); } $results = array(); $testcases = $xunit_dom->getElementsByTagName('testcase'); foreach ($testcases as $testcase) { $classname = $testcase->getAttribute('classname'); $name = $testcase->getAttribute('name'); $time = $testcase->getAttribute('time'); $status = ArcanistUnitTestResult::RESULT_PASS; $user_data = ''; // A skipped test is a test which was ignored using framework // mechanisms (e.g. @skip decorator) $skipped = $testcase->getElementsByTagName('skipped'); if ($skipped->length > 0) { $status = ArcanistUnitTestResult::RESULT_SKIP; $messages = array(); for ($ii = 0; $ii < $skipped->length; $ii++) { $messages[] = trim($skipped->item($ii)->nodeValue, " \n"); } $user_data .= implode("\n", $messages); } // Failure is a test which the code has explicitly failed by using // the mechanisms for that purpose. e.g., via an assertEquals $failures = $testcase->getElementsByTagName('failure'); if ($failures->length > 0) { $status = ArcanistUnitTestResult::RESULT_FAIL; $messages = array(); for ($ii = 0; $ii < $failures->length; $ii++) { $messages[] = trim($failures->item($ii)->nodeValue, " \n"); } $user_data .= implode("\n", $messages) . "\n"; } // An errored test is one that had an unanticipated problem. e.g., an // unchecked throwable, or a problem with an implementation of the test. $errors = $testcase->getElementsByTagName('error'); if ($errors->length > 0) { $status = ArcanistUnitTestResult::RESULT_BROKEN; $messages = array(); for ($ii = 0; $ii < $errors->length; $ii++) { $messages[] = trim($errors->item($ii)->nodeValue, " \n"); } $user_data .= implode("\n", $messages) . "\n"; } $result = new ArcanistUnitTestResult(); $result->setName($classname . '.' . $name); $result->setResult($status); $result->setDuration((double) $time); $result->setUserData($user_data); $results[] = $result; } return $results; }
public function run() { $projectRoot = $this->getWorkingCopy()->getProjectRoot(); $cwd = getcwd(); $buildDir = $this->findBuildDirectory($projectRoot, $cwd); print "Using build directory '{$buildDir}'\n"; $makeVars = $this->getMakeVars($buildDir); $lit = $this->findLitExecutable($makeVars); print "Using lit executable '{$lit}'\n"; // We have to modify the format string, because llvm-lit does not like a '' argument $cmd = '%s ' . ($this->getEnableAsyncTests() ? '' : '-j1 ') . '%s 2>&1'; $litFuture = new ExecFuture($cmd, $lit, $buildDir . "/test"); $out = ""; $results = array(); $lastTime = microtime(true); $ready = false; $dots = ""; $numTests = 0; while (!$ready) { $ready = $litFuture->isReady(); $newout = $litFuture->readStdout(); if (strlen($newout) == 0) { usleep(100); continue; } $out .= $newout; if ($ready && strlen($out) > 0 && substr($out, -1) != "\n") { $out .= "\n"; } while (($nlPos = strpos($out, "\n")) !== FALSE) { $line = substr($out, 0, $nlPos + 1); $out = substr($out, $nlPos + 1); $res = ArcanistUnitTestResult::RESULT_UNSOUND; if (substr($line, 0, 6) == "PASS: "******"FAIL: ") { $res = ArcanistUnitTestResult::RESULT_FAIL; } elseif (substr($line, 0, 7) == "XPASS: "******"XFAIL: ") { $res = ArcanistUnitTestResult::RESULT_PASS; } elseif (substr($line, 0, 13) == "UNSUPPORTED: ") { $res = ArcanistUnitTestResult::RESULT_SKIP; } elseif (!$numTests && preg_match('/Testing: ([0-9]+) tests/', $line, $matches)) { $numTests = (int) $matches[1]; } if ($res == ArcanistUnitTestResult::RESULT_FAIL) { print "[1A"; } if ($res != ArcanistUnitTestResult::RESULT_SKIP && $res != ArcanistUnitTestResult::RESULT_PASS) { print "\r[K[1A" . $line . self::progress($results, $numTests); } if ($res == ArcanistUnitTestResult::RESULT_UNSOUND) { continue; } $result = new ArcanistUnitTestResult(); $result->setName(trim(substr($line, strpos($line, ':') + 1))); $result->setResult($res); $newTime = microtime(true); $result->setDuration($newTime - $lastTime); $lastTime = $newTime; $results[] = $result; $dots .= "."; print "\r[K[1A" . self::progress($results, $numTests); } } list($out1, $out2) = $litFuture->read(); print $out1; if ($out2) { throw new Exception('There was error output, even though it should have been redirected to stdout.'); } print "\n"; return $results; }
/** * Parse test results from Go test report * (e.g. `go test -v`) * * @param string $path Path to test * @param string $stdout the Stdout of the command. * @param string $stderr the Stderr of the command. * * @return array */ public function parseTestResults($path, $stdout, $stderr = '') { $test_results = $stderr . $stdout; $test_results = explode("\n", $test_results); $results = array(); // We'll get our full test case name at the end and add it back in $test_case_name = ''; // Temp store for test case results (in case we run multiple test cases) $test_case_results = array(); for ($i = 0; $i < count($test_results); $i++) { $line = $test_results[$i]; if (strlen($line) >= 18 && strncmp($line, '==================', 18) === 0 && strncmp($test_results[$i + 1], 'WARNING: DATA RACE', 18) === 0) { // We have a race condition $i++; // Advance to WARNING: DATA RACE $reason = ''; $test_name = ''; // loop to collect all data and move to the === line while (strncmp($test_results[$i], '==================', 18) !== 0) { if (strncmp($test_results[$i], 'Goroutine', 9) === 0) { $meta = array(); preg_match('/^.*\\.(?P<test_name>[^\\.]+)$/', $test_results[$i + 1], $meta); $test_name = $meta['test_name'] . ' Race Detected'; } $reason .= $test_results[$i++] . "\n"; // Are we out of lines? if ($i > count($test_results)) { return false; } } $result = new ArcanistUnitTestResult(); $result->setName($test_name); $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setUserData($reason); $test_case_results[] = $result; continue; } if (strncmp($line, '--- PASS', 8) === 0) { // We have a passing test $meta = array(); preg_match('/^--- PASS: (?P<test_name>.+) \\((?P<time>.+)\\s*s(?:econds)?\\).*/', $line, $meta); $result = new ArcanistUnitTestResult(); // For now set name without test case, we'll add it later $result->setName($meta['test_name']); $result->setResult(ArcanistUnitTestResult::RESULT_PASS); $result->setDuration((double) $meta['time']); $test_case_results[] = $result; continue; } if (strncmp($line, '--- FAIL', 8) === 0) { // We have a failing test $reason = trim($test_results[$i + 1]); $meta = array(); preg_match('/^--- FAIL: (?P<test_name>.+) \\((?P<time>.+)\\s*s(?:econds)?\\).*/', $line, $meta); $result = new ArcanistUnitTestResult(); $result->setName($meta['test_name']); $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setDuration((double) $meta['time']); $result->setUserData($reason . "\n"); $test_case_results[] = $result; continue; } if (strncmp($line, 'ok', 2) === 0) { $meta = array(); preg_match('/^ok[\\s\\t]+(?P<test_name>\\w.*)[\\s\\t]+(?P<time>.*)s.*/', $line, $meta); $test_case_name = str_replace('/', '::', $meta['test_name']); // Our test case passed // check to make sure we were in verbose (-v) mode if (empty($test_case_results)) { // We weren't in verbose mode // create one successful result for the whole test case $test_name = 'Go::TestCase::' . $test_case_name; $result = new ArcanistUnitTestResult(); $result->setName($test_name); $result->setResult(ArcanistUnitTestResult::RESULT_PASS); $result->setDuration((double) $meta['time']); $results[] = $result; } else { $test_case_results = $this->fixNames($test_case_results, $test_case_name); $results = array_merge($results, $test_case_results); $test_case_results = array(); } continue; } if (strncmp($line, "FAIL\t", 5) === 0) { $meta = array(); preg_match('/^FAIL[\\s\\t]+(?P<test_name>\\w.*)[\\s\\t]+.*/', $line, $meta); $test_case_name = str_replace('/', '::', $meta['test_name']); $test_case_results = $this->fixNames($test_case_results, $test_case_name); $results = array_merge($results, $test_case_results); $test_case_results = array(); continue; } } return $results; }
public function runIntegrationTests($tests) { $this->make('all'); // Now find all the test programs $root = $this->getProjectRoot(); $test_dir = $root . "/tests/integration/"; if (!$tests) { $paths = glob($test_dir . "*.php"); foreach (glob('python/tests/*.py') as $file) { $paths[] = $file; } $paths[] = 'ruby/ruby-watchman/spec/ruby_watchman_spec.rb'; } else { $paths = $tests; } foreach (array('/tmp/watchman-test.log', '/tmp/watchman-valgrind.log', '/tmp/watchman-valgrind.xml', '/tmp/watchman-callgrind.txt') as $log) { @unlink($log); } foreach ($paths as $path) { if (preg_match("/\\.php\$/", $path) && file_exists($path)) { require_once $path; } } // We test for this in a test case putenv("WATCHMAN_EMPTY_ENV_VAR="); $coverage = $this->getEnableCoverage(); $first_inst = new WatchmanInstance($root, $coverage); $instances = array($first_inst); // Helper for python or other language tests putenv("WATCHMAN_SOCK=" . $first_inst->getFullSockName()); // Exercise the different serialization combinations $cli_matrix = array('bser/json' => '--server-encoding=bser --output-encoding=json', 'json/json' => '--server-encoding=json --output-encoding=json'); // Find all the test cases that were declared $results = array(); foreach (get_declared_classes() as $name) { $ref = new ReflectionClass($name); if (!$ref->isSubclassOf('WatchmanTestCase')) { continue; } // Good enough; let's use it $test_case = newv($name, array()); $config = $test_case->getGlobalConfig(); if ($config) { $instance = new WatchmanInstance($root, $coverage, $config); $instances[] = $instance; } else { $instance = $first_inst; } $test_case->setWatchmanInstance($instance); if (!$instance->getProcessID()) { $res = new ArcanistUnitTestResult(); $res->setName('dead'); $res->setUserData('died before test start'); $res->setResult(ArcanistUnitTestResult::RESULT_FAIL); $results[] = array($res); break; } $test_case->setRoot($root); $test_case->setPaths($paths); $results[] = $test_case->run(); if (!$test_case->needsLiveConnection()) { foreach ($cli_matrix as $mname => $args) { $test_case->useCLI($args); $cli_results = $test_case->run(); foreach ($cli_results as $res) { $res->setName($res->getName() . " [CLI: {$mname}]"); } $results[] = $cli_results; } } } // Also run the python tests if we built them foreach ($paths as $path) { if (!preg_match('/test.*\\.py$/', $path)) { continue; } if (!file_exists($path)) { // Was deleted in this (pending) rev continue; } if (!file_exists("python/pywatchman/bser.so")) { // Not enabled by the build continue; } // Note that this implicitly starts the instance if we haven't // yet done so. This is important if the only test paths are // python paths if (!$first_inst->getProcessID()) { $res = new ArcanistUnitTestResult(); $res->setName('dead'); $res->setUserData('died before test start'); $res->setResult(ArcanistUnitTestResult::RESULT_FAIL); $results[] = array($res); break; } // our Makefile contains the detected python, so just run the // rule from the makefile to pick it up $start = microtime(true); $future = new ExecFuture("PATH=\"{$root}:\$PATH\" PYTHONPATH={$root}/python " . "TESTNAME={$path} \${MAKE:-make} py-tests"); $future->setTimeout(10); list($status, $out, $err) = $future->resolve(); $end = microtime(true); $res = new ArcanistUnitTestResult(); $res->setName($path); $res->setUserData($out . $err); $res->setDuration($end - $start); $res->setResult($status == 0 ? ArcanistUnitTestResult::RESULT_PASS : ArcanistUnitTestResult::RESULT_FAIL); $results[] = array($res); } foreach ($paths as $path) { if (!preg_match('/\\.rb$/', $path)) { continue; } if (!file_exists($path)) { // Was deleted in this (pending) rev continue; } $start = microtime(true); $future = new ExecFuture("PATH=\"{$root}:\$PATH\" \${MAKE:-make} rb-tests"); $future->setTimeout(10); list($status, $out, $err) = $future->resolve(); $end = microtime(true); $res = new ArcanistUnitTestResult(); $res->setName($path); $res->setUserData($out . $err); $res->setDuration($end - $start); $res->setResult($status == 0 ? ArcanistUnitTestResult::RESULT_PASS : ArcanistUnitTestResult::RESULT_FAIL); $results[] = array($res); } foreach ($instances as $instance) { $results[] = $instance->generateValgrindTestResults(); } $results = array_mergev($results); return $results; }