private function parseOutput($output) { $results = array(); $json = json_decode($output, true); foreach ($json['examples'] as $example) { $result = new ArcanistUnitTestResult(); $result->setName($example['full_description']); if (array_key_exists('run_time', $example)) { $result->setDuration($example['run_time']); } switch ($example['status']) { case 'passed': $result->setResult(ArcanistUnitTestResult::RESULT_PASS); break; case 'failed': $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setUserData($example['exception']['message']); break; case 'pending': $result->setResult(ArcanistUnitTestResult::RESULT_SKIP); break; } $results[] = $result; } return $results; }
/** * Runs the test suite. */ public function run() { $results = array(); $command = '(mkdir -p build && cd build && cmake ..)'; $command .= '&& make -C build all'; $command .= '&& make -C build test'; // Execute the test command & time it. $timeStart = microtime(true); $future = new ExecFuture($command); do { $future->read(); sleep(0.5); } while (!$future->isReady()); list($error, $stdout, $stderr) = $future->resolve(); $timeEnd = microtime(true); // Create a unit test result structure. $result = new ArcanistUnitTestResult(); $result->setNamespace('DerpVision'); $result->setName('Core'); $result->setDuration($timeEnd - $timeStart); if ($error == 0) { $result->setResult(ArcanistUnitTestResult::RESULT_PASS); } else { $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setUserData($stdout . $stderr); } $results[] = $result; return $results; }
private function parseOutput($output) { $results = array(); $json = json_decode($output); foreach ($json as $feature) { if (property_exists($feature, 'elements')) { foreach ($feature->elements as $element) { $passed = true; $result = new ArcanistUnitTestResult(); $result->setName($feature->description); foreach ($element->steps as $step) { switch ($step->result->status) { case 'passed': break; case 'failed': $passed = false; $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setUserData($step->result->error_message); break; } } if ($passed) { $result->setResult(ArcanistUnitTestResult::RESULT_PASS); } $results[] = $result; } } } return $results; }
private function parseOutput($output) { $results = array(); $lines = explode(PHP_EOL, $output); foreach ($lines as $index => $line) { preg_match('/^(not ok|ok)\\s+\\d+\\s+-?(.*)/', $line, $matches); if (count($matches) < 3) { continue; } $result = new ArcanistUnitTestResult(); $result->setName(trim($matches[2])); switch (trim($matches[1])) { case 'ok': $result->setResult(ArcanistUnitTestResult::RESULT_PASS); break; case 'not ok': $exception_message = trim($lines[$index + 1]); $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setUserData($exception_message); break; default: continue; } $results[] = $result; } return $results; }
private function runTests() { $root = $this->getWorkingCopy()->getProjectRoot(); $script = $this->getConfiguredScript(); $path = $this->getConfiguredTestResultPath(); foreach (glob($root . DIRECTORY_SEPARATOR . $path . "/*.xml") as $filename) { // Remove existing files so we cannot report old results $this->unlink($filename); } // Provide changed paths to process putenv("ARCANIST_DIFF_PATHS=" . implode(PATH_SEPARATOR, $this->getPaths())); $future = new ExecFuture('%C %s', $script, $path); $future->setCWD($root); $err = null; try { $future->resolvex(); } catch (CommandException $exc) { $err = $exc; } $results = $this->parseTestResults($root . DIRECTORY_SEPARATOR . $path); if ($err) { $result = new ArcanistUnitTestResult(); $result->setName('Unit Test Script'); $result->setResult(ArcanistUnitTestResult::RESULT_BROKEN); $result->setUserData("ERROR: Command failed with code {$err->getError()}\nCOMMAND: `{$err->getCommand()}`"); $results[] = $result; } return $results; }
private function checkNonEmptyTestPlan() { $result = new ArcanistUnitTestResult(); $result->setName("Test Plan"); $lines = join(' ', $this->getMessage()); $testPlanExists = preg_match('/\\sTest Plan:/', $lines); if (!$testPlanExists) { $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); print 'Test Plan not found!'; return array($result); } $platforms = $this->getPlatformTestsFromLines($this->getMessage()); if ($platforms) { print "Found tests for the following platforms:\n"; foreach ($platforms as $platform => $tests) { print "{$platform}:\n "; print join("\n ", $tests); print "\n"; } $result->setResult(ArcanistUnitTestResult::RESULT_PASS); print 'Test Plan found!'; return array($result); } $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); print "No tests found to run on CI! Check your repo's README for instructions\n"; return array($result); }
private function runCommand($command) { exec($command, $output, $return_code); $result = new ArcanistUnitTestResult(); $result->setName($command); $result->setResult($return_code == 0 ? ArcanistUnitTestResult::RESULT_PASS : ArcanistUnitTestResult::RESULT_FAIL); return array($result); }
public function run() { // For a call to `arc call-conduit differential.updateunitresults` to // succeed we need at least one entry here. $result = new ArcanistUnitTestResult(); $result->setName("dummy_placeholder_entry"); $result->setResult(ArcanistUnitTestResult::RESULT_PASS); return array($result); }
private function checkNonEmptyTestPlan() { $result = new ArcanistUnitTestResult(); $lines = implode(' ', $this->getMessage()); $test_plan_exists = preg_match('/\\sTest Plan:/', $lines); if (!$test_plan_exists) { $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setName('Test Plan not found!'); return array($result); } $test_plan_empty = preg_match('/\\sTest Plan:\\s*?Reviewers:/', $lines); if ($test_plan_empty) { $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setName('Test Plan cannot be empty!'); return array($result); } $result->setResult(ArcanistUnitTestResult::RESULT_PASS); $result->setName('Test Plan found!'); return array($result); }
private function parseTestResults($test, $err, $stdout, $stderr) { $result = new ArcanistUnitTestResult(); $result->setName($test); $result->setUserData($stdout . $stderr); $result->setResult($err == 0 ? ArcanistUnitTestResult::RESULT_PASS : ArcanistUnitTestResult::RESULT_FAIL); if (preg_match("/# ELAPSED: (\\d+)ms/", $stderr, $M)) { $result->setDuration($M[1] / 1000); } return $result; }
public function run() { $working_copy = $this->getWorkingCopy(); $this->project_root = $working_copy->getProjectRoot(); // We only want to report results for tests that actually ran, so // we'll compare the test result files' timestamps to the start time // of the test run. This will probably break if multiple test runs // are happening in parallel, but if that's happening then we can't // count on the results files being intact anyway. $start_time = time(); $maven_top_dirs = $this->findTopLevelMavenDirectories(); // We'll figure out if any of the modified files we're testing are in // Maven directories. We won't want to run a bunch of Java tests for // changes to CSS files or whatever. $modified_paths = $this->getModifiedPaths(); $maven_failed = false; foreach ($maven_top_dirs as $dir) { $dir_with_trailing_slash = $dir . '/'; foreach ($modified_paths as $path) { if ($dir_with_trailing_slash === substr($path, 0, strlen($dir_with_trailing_slash))) { $future = new ExecFuture('mvn test'); $future->setCWD($dir); list($status, $stdout, $stderr) = $future->resolve(); if ($status) { // Maven exits with a nonzero status if there were test failures // or if there was a compilation error. $maven_failed = true; break 2; } break; } } } $testResults = $this->parseTestResultsSince($start_time); if ($maven_failed) { // If there wasn't a test failure, then synthesize one to represent // the failure of the test run as a whole, since it probably means the // code failed to compile. $found_failure = false; foreach ($testResults as $testResult) { if ($testResult->getResult() === ArcanistUnitTestResult::RESULT_FAIL || $testResult->getResult() === ArcanistUnitTestResult::RESULT_BROKEN) { $found_failure = true; break; } } if (!$found_failure) { $testResult = new ArcanistUnitTestResult(); $testResult->setResult(ArcanistUnitTestResult::RESULT_BROKEN); $testResult->setName('mvn test'); $testResults[] = $testResult; } } return $testResults; }
public function run() { // Here we create a new unit test "jenkins_async_test" and promise we'll // update the results later. // Jenkins updates the results using `arc call-conduit // differential.updateunitresults` call. If you change the name here, also // make sure to change the name in Jenkins script that updates the test // result -- they have to be the same. $result = new ArcanistUnitTestResult(); $result->setName("jenkins_async_test"); $result->setResult(ArcanistUnitTestResult::RESULT_POSTPONED); return array($result); }
private function checkNonEmptyRevertPlan() { $result = new ArcanistUnitTestResult(); $lines = implode(' ', $this->getMessage()); $revert_plan_exists = preg_match('/\\sRevert Plan:/', $lines); if (!$revert_plan_exists) { $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setName('Revert Plan not found! (See http://t.uber.com/revert for more info)'); return array($result); } $revert_plan_empty = preg_match('/\\sRevert Plan:\\s*?$/', $lines); if ($revert_plan_empty) { $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setName('Revert Plan cannot be empty! (See http://t.uber.com/revert for more info)'); return array($result); } $result->setResult(ArcanistUnitTestResult::RESULT_PASS); $result->setName('Revert Plan found!'); return array($result); }
/** * Parse test results from phpunit json report * * @param string $path Path to test * @param string $test_results String containing phpunit json report * * @return array */ public function parseTestResults($path, $test_results) { $report = $this->getJsonReport($test_results); // coverage is for all testcases in the executed $path $coverage = array(); if ($this->enableCoverage !== false) { $coverage = $this->readCoverage(); } $results = array(); foreach ($report as $event) { if ('test' != $event->event) { continue; } $status = ArcanistUnitTestResult::RESULT_PASS; $user_data = ''; if ('fail' == $event->status) { $status = ArcanistUnitTestResult::RESULT_FAIL; $user_data .= $event->message . "\n"; foreach ($event->trace as $trace) { $user_data .= sprintf("\n%s:%s", $trace->file, $trace->line); } } else { if ('error' == $event->status) { if (strpos($event->message, 'Skipped Test') !== false) { $status = ArcanistUnitTestResult::RESULT_SKIP; $user_data .= $event->message; } else { if (strpos($event->message, 'Incomplete Test') !== false) { $status = ArcanistUnitTestResult::RESULT_SKIP; $user_data .= $event->message; } else { $status = ArcanistUnitTestResult::RESULT_BROKEN; $user_data .= $event->message; foreach ($event->trace as $trace) { $user_data .= sprintf("\n%s:%s", $trace->file, $trace->line); } } } } } $name = preg_replace('/ \\(.*\\)/', '', $event->test); $result = new ArcanistUnitTestResult(); $result->setName($name); $result->setResult($status); $result->setDuration($event->time); $result->setCoverage($coverage); $result->setUserData($user_data); $results[] = $result; } return $results; }
public function run() { $working_copy = $this->getWorkingCopy(); $this->projectRoot = $working_copy->getProjectRoot(); $future = new ExecFuture('npm run coverage'); $future->setCWD($this->projectRoot); list($err, $stdout, $stderr) = $future->resolve(); $result = new ArcanistUnitTestResult(); $result->setName("Node test engine"); $result->setUserData($stdout); if ($err) { $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); } else { $result->setResult(ArcanistUnitTestResult::RESULT_PASS); } return array($result); }
public function run() { // If we are running asynchronously, mark all tests as postponed // and return those results. Otherwise, run the tests and collect // the actual results. if ($this->getEnableAsyncTests()) { $results = array(); $result = new ArcanistUnitTestResult(); $result->setName("jcommon_build"); $result->setResult(ArcanistUnitTestResult::RESULT_POSTPONED); $results[] = $result; return $results; } else { $server = new FacebookBuildServer(); $server->startProjectBuilds(false); return array(); } }
public function runJs() { // First, check to see if karma is on $PATH: list($err, $stdout, $_) = exec_manual("which karma"); if ($err != 0) { $result = new ArcanistUnitTestResult(); $result->setName("Karma not found. Skipping js tests..."); $result->setResult(ArcanistUnitTestResult::RESULT_SKIP); $result->setDuration(0); return array($result); } // Karma IS on the path. $old_dir = getcwd(); $project_root = $this->getWorkingCopy()->getProjectRoot(); chdir($project_root . '/client/js'); exec_manual("karma start karma-conf-oneshot.js"); chdir($old_dir); // Read from the text-results.xml file. $xml = file_get_contents($project_root . '/client/test-results.xml'); $doc = new SimpleXMLElement($xml); // Destroy the test-results.xml file. unlink($project_root . '/client/test-results.xml'); // Extract all the test cases. $results = array(); foreach ($doc->testsuite as $suite) { $suite_name = $suite['name']; foreach ($suite->testcase as $case) { $case_name = $case['name']; $time = $case['time']; $fixture_name = substr($case['classname'], strlen($suite_name) + 1); // Did we fail? $failure = (string) $case->failure; // Convert each to a ArcanistUnitTestResult $result = new ArcanistUnitTestResult(); $result->setName($fixture_name . ' ' . $case_name); $result->setResult($failure ? ArcanistUnitTestResult::RESULT_FAIL : ArcanistUnitTestResult::RESULT_PASS); $result->setUserData($failure); $result->setDuration($time); $results[] = $result; } } return $results; }
public function run() { $results = array(); $build_start = microtime(true); $config_manager = $this->getConfigurationManager(); if ($this->getEnableCoverage() !== false) { $command = $config_manager->getConfigFromAnySource('unit.engine.tap.cover'); } else { $command = $config_manager->getConfigFromAnySource('unit.engine.tap.command'); } $timeout = $config_manager->getConfigFromAnySource('unit.engine.tap.timeout'); if (!$timeout) { $timeout = 15; } $future = new ExecFuture('%C', $command); $future->setTimeout($timeout); $result = new ArcanistUnitTestResult(); $result->setName($command ? $command : 'unknown'); try { list($stdout, $stderr) = $future->resolvex(); $result->setResult(ArcanistUnitTestResult::RESULT_PASS); if ($this->getEnableCoverage() !== false) { $coverage = $this->readCoverage('coverage/cobertura-coverage.xml'); $result->setCoverage($coverage); } } catch (CommandException $exc) { $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); if ($future->getWasKilledByTimeout()) { print "Process stdout:\n" . $exc->getStdout() . "\nProcess stderr:\n" . $exc->getStderr() . "\nExceeded timeout of {$timeout} secs.\nMake unit tests faster."; } else { $result->setUserdata($exc->getStdout() . $exc->getStderr()); } } $result->setDuration(microtime(true) - $build_start); $results[] = $result; return $results; }
function generateValgrindTestResults() { $this->terminateProcess(); if ($this->coverage) { return $this->generateCoverageResults(); } if (!$this->valgrind) { return array(); } $definite_leaks = array(); $possible_leaks = array(); $errors = array(); $descriptors = array(); // valgrind seems to use an interesting definition of valid XML. // Tolerate having multiple documents in one file. // Confluence of weird bugs; hhvm has very low preg_match limits // so we have to grovel around to make sure that we read this // stuff in properly :-/ $documents = array(); $in_doc = false; $doc = null; foreach (file($this->vg_log . '.xml') as $line) { if ($in_doc) { $doc[] = $line; if (preg_match(',</valgrindoutput>,', $line)) { $documents[] = implode("\n", $doc); $doc = null; } } else { if (preg_match(',<valgrindoutput>,', $line)) { $doc = array($line); $in_doc = true; } } } libxml_use_internal_errors(true); foreach ($documents as $data) { libxml_clear_errors(); $vg = @simplexml_load_string($data); if (is_object($vg)) { foreach ($vg->error as $err) { $render = $this->renderVGResult($err); switch ($err->kind) { case 'Leak_DefinitelyLost': $definite_leaks[] = $render; break; case 'Leak_PossiblyLost': $possible_leaks[] = $render; break; default: $errors[] = $render; } } // These look like fd leak records, but they're not documented // as such. These go away if we turn off track-fds foreach ($vg->stack as $stack) { // Suppressing this for now: posix_spawn seems to confuse // some valgrind's, particularly the version we run on travis, // as it records open descriptors from the exec'ing child // $descriptors[] = $this->renderVGStack($stack); } } else { $why = 'failed to parse xml'; $lines = explode("\n", $data); foreach (libxml_get_errors() as $err) { $slice = array_slice($lines, $err->line - 3, 6); $slice = implode("\n", $slice); $why .= sprintf("\n%s (line %d col %d) %s", $err->message, $err->line, $err->column, $slice); } printf("parsing valgrind output: %s\n", $why); } } $results = array(); $res = new ArcanistUnitTestResult(); $res->setName('valgrind possible leaks'); $res->setUserData(implode("\n\n", $possible_leaks)); $res->setResult(count($possible_leaks) ? ArcanistUnitTestResult::RESULT_SKIP : ArcanistUnitTestResult::RESULT_PASS); $results[] = $res; $res = new ArcanistUnitTestResult(); $res->setName('descriptor leaks'); $res->setUserData(implode("\n\n", $descriptors)); $res->setResult(count($descriptors) ? ArcanistUnitTestResult::RESULT_FAIL : ArcanistUnitTestResult::RESULT_PASS); $results[] = $res; $res = new ArcanistUnitTestResult(); $res->setName('valgrind leaks'); $res->setUserData(implode("\n\n", $definite_leaks)); $leak_res = count($definite_leaks) ? ArcanistUnitTestResult::RESULT_FAIL : ArcanistUnitTestResult::RESULT_PASS; if ($leak_res == ArcanistUnitTestResult::RESULT_FAIL && getenv('TRAVIS') == 'true') { // Travis has false positives at this time, downgrade $leak_res = ArcanistUnitTestResult::RESULT_SKIP; } $res->setResult($leak_res); $results[] = $res; $res = new ArcanistUnitTestResult(); $res->setName('valgrind errors'); $res->setUserData(implode("\n\n", $errors)); $res->setResult(count($errors) ? ArcanistUnitTestResult::RESULT_FAIL : ArcanistUnitTestResult::RESULT_PASS); $results[] = $res; return $results; }
/** * Parses the test results from xUnit. * * @param string The name of the xUnit results file. * @param string The name of the coverage file if one was provided by * `buildTestFuture`. This is passed through to * `parseCoverageResult`. * @return array Test results. */ private function parseTestResult($xunit_tmp, $coverage) { $xunit_dom = new DOMDocument(); $xunit_dom->loadXML(Filesystem::readFile($xunit_tmp)); $results = array(); $tests = $xunit_dom->getElementsByTagName('test'); foreach ($tests as $test) { $name = $test->getAttribute('name'); $time = $test->getAttribute('time'); $status = ArcanistUnitTestResult::RESULT_UNSOUND; switch ($test->getAttribute('result')) { case 'Pass': $status = ArcanistUnitTestResult::RESULT_PASS; break; case 'Fail': $status = ArcanistUnitTestResult::RESULT_FAIL; break; case 'Skip': $status = ArcanistUnitTestResult::RESULT_SKIP; break; } $userdata = ''; $reason = $test->getElementsByTagName('reason'); $failure = $test->getElementsByTagName('failure'); if ($reason->length > 0 || $failure->length > 0) { $node = $reason->length > 0 ? $reason : $failure; $message = $node->item(0)->getElementsByTagName('message'); if ($message->length > 0) { $userdata = $message->item(0)->nodeValue; } $stacktrace = $node->item(0)->getElementsByTagName('stack-trace'); if ($stacktrace->length > 0) { $userdata .= "\n" . $stacktrace->item(0)->nodeValue; } } $result = new ArcanistUnitTestResult(); $result->setName($name); $result->setResult($status); $result->setDuration($time); $result->setUserData($userdata); if ($coverage != null) { $result->setCoverage($this->parseCoverageResult($coverage)); } $results[] = $result; } return $results; }
/** * Parse test results from provided input and return an array * of @{class:ArcanistUnitTestResult}. * * @param string $test_results String containing test results * * @return array ArcanistUnitTestResult */ public function parseTestResults($test_results) { if (!strlen($test_results)) { throw new Exception(pht('%s argument to %s must not be empty', 'test_results', 'parseTestResults()')); } // xunit xsd: https://gist.github.com/959290 $xunit_dom = new DOMDocument(); $load_success = @$xunit_dom->loadXML($test_results); if (!$load_success) { $input_start = id(new PhutilUTF8StringTruncator())->setMaximumGlyphs(150)->truncateString($test_results); throw new Exception(sprintf("%s\n\n%s", pht('Failed to load XUnit report; Input starts with:'), $input_start)); } $results = array(); $testcases = $xunit_dom->getElementsByTagName('testcase'); foreach ($testcases as $testcase) { $classname = $testcase->getAttribute('classname'); $name = $testcase->getAttribute('name'); $time = $testcase->getAttribute('time'); $status = ArcanistUnitTestResult::RESULT_PASS; $user_data = ''; // A skipped test is a test which was ignored using framework // mechanisms (e.g. @skip decorator) $skipped = $testcase->getElementsByTagName('skipped'); if ($skipped->length > 0) { $status = ArcanistUnitTestResult::RESULT_SKIP; $messages = array(); for ($ii = 0; $ii < $skipped->length; $ii++) { $messages[] = trim($skipped->item($ii)->nodeValue, " \n"); } $user_data .= implode("\n", $messages); } // Failure is a test which the code has explicitly failed by using // the mechanisms for that purpose. e.g., via an assertEquals $failures = $testcase->getElementsByTagName('failure'); if ($failures->length > 0) { $status = ArcanistUnitTestResult::RESULT_FAIL; $messages = array(); for ($ii = 0; $ii < $failures->length; $ii++) { $messages[] = trim($failures->item($ii)->nodeValue, " \n"); } $user_data .= implode("\n", $messages) . "\n"; } // An errored test is one that had an unanticipated problem. e.g., an // unchecked throwable, or a problem with an implementation of the test. $errors = $testcase->getElementsByTagName('error'); if ($errors->length > 0) { $status = ArcanistUnitTestResult::RESULT_BROKEN; $messages = array(); for ($ii = 0; $ii < $errors->length; $ii++) { $messages[] = trim($errors->item($ii)->nodeValue, " \n"); } $user_data .= implode("\n", $messages) . "\n"; } $result = new ArcanistUnitTestResult(); $result->setName($classname . '.' . $name); $result->setResult($status); $result->setDuration((double) $time); $result->setUserData($user_data); $results[] = $result; } return $results; }
public function parseTestResults($path, $xunit_tmp, $cover_tmp) { // xunit xsd: https://gist.github.com/959290 $xunit_dom = new DOMDocument(); $xunit_dom->loadXML(Filesystem::readFile($xunit_tmp)); // coverage is for all testcases in the executed $path $coverage = array(); if ($this->getEnableCoverage() !== false) { $coverage = $this->readCoverage($cover_tmp); } $results = array(); $testcases = $xunit_dom->getElementsByTagName("testcase"); foreach ($testcases as $testcase) { $classname = $testcase->getAttribute("classname"); $name = $testcase->getAttribute("name"); $time = $testcase->getAttribute("time"); $status = ArcanistUnitTestResult::RESULT_PASS; $user_data = ""; // A skipped test is a test which was ignored using framework // mechanizms (e.g. @skip decorator) $skipped = $testcase->getElementsByTagName("skipped"); if ($skipped->length > 0) { $status = ArcanistUnitTestResult::RESULT_SKIP; $messages = array(); for ($ii = 0; $ii < $skipped->length; $ii++) { $messages[] = trim($skipped->item($ii)->nodeValue, " \n"); } $user_data .= implode("\n", $messages); } // Failure is a test which the code has explicitly failed by using // the mechanizms for that purpose. e.g., via an assertEquals $failures = $testcase->getElementsByTagName("failure"); if ($failures->length > 0) { $status = ArcanistUnitTestResult::RESULT_FAIL; $messages = array(); for ($ii = 0; $ii < $failures->length; $ii++) { $messages[] = trim($failures->item($ii)->nodeValue, " \n"); } $user_data .= implode("\n", $messages) . "\n"; } // An errored test is one that had an unanticipated problem. e.g., an // unchecked throwable, or a problem with an implementation of the // test. $errors = $testcase->getElementsByTagName("error"); if ($errors->length > 0) { $status = ArcanistUnitTestResult::RESULT_BROKEN; $messages = array(); for ($ii = 0; $ii < $errors->length; $ii++) { $messages[] = trim($errors->item($ii)->nodeValue, " \n"); } $user_data .= implode("\n", $messages) . "\n"; } $result = new ArcanistUnitTestResult(); $result->setName($classname . "." . $name); $result->setResult($status); $result->setDuration($time); $result->setCoverage($coverage); $result->setUserData($user_data); $results[] = $result; } return $results; }
/** * Parse test results from Go test report * (e.g. `go test -v`) * * @param string $path Path to test * @param string $test_results String containing Go test output * * @return array */ public function parseTestResults($path, $test_results) { $test_results = explode("\n", $test_results); $results = array(); // We'll get our full test case name at the end and add it back in $test_case_name = ''; // Temp store for test case results (in case we run multiple test cases) $test_case_results = array(); foreach ($test_results as $i => $line) { if (strncmp($line, '--- PASS', 8) === 0) { // We have a passing test $meta = array(); preg_match('/^--- PASS: (?P<test_name>.+) \\((?P<time>.+) seconds\\).*/', $line, $meta); $result = new ArcanistUnitTestResult(); // For now set name without test case, we'll add it later $result->setName($meta['test_name']); $result->setResult(ArcanistUnitTestResult::RESULT_PASS); $result->setDuration($meta['time']); $test_case_results[] = $result; continue; } if (strncmp($line, '--- FAIL', 8) === 0) { // We have a failing test $reason = trim($test_results[$i + 1]); $meta = array(); preg_match('/^--- FAIL: (?P<test_name>.+) \\((?P<time>.+) seconds\\).*/', $line, $meta); $result = new ArcanistUnitTestResult(); $result->setName($meta['test_name']); $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setDuration($meta['time']); $result->setUserData($reason . "\n"); $test_case_results[] = $result; continue; } if (strncmp($line, 'ok', 2) === 0) { $meta = array(); preg_match('/^ok[\\s\\t]+(?P<test_name>\\w.*)[\\s\\t]+(?P<time>.*)s.*/', $line, $meta); $test_case_name = str_replace('/', '::', $meta['test_name']); // Our test case passed // check to make sure we were in verbose (-v) mode if (empty($test_case_results)) { // We weren't in verbose mode // create one successful result for the whole test case $test_name = 'Go::TestCase::' . $test_case_name; $result = new ArcanistUnitTestResult(); $result->setName($test_name); $result->setResult(ArcanistUnitTestResult::RESULT_PASS); $result->setDuration($meta['time']); $results[] = $result; } else { $test_case_results = $this->fixNames($test_case_results, $test_case_name); $results = array_merge($results, $test_case_results); $test_case_results = array(); } continue; } if (strncmp($line, "FAIL\t", 5) === 0) { $meta = array(); preg_match('/^FAIL[\\s\\t]+(?P<test_name>\\w.*)[\\s\\t]+.*/', $line, $meta); $test_case_name = str_replace('/', '::', $meta['test_name']); $test_case_results = $this->fixNames($test_case_results, $test_case_name); $results = array_merge($results, $test_case_results); $test_case_results = array(); continue; } } return $results; }
public function run() { $projectRoot = $this->getWorkingCopy()->getProjectRoot(); $cwd = getcwd(); $buildDir = $this->findBuildDirectory($projectRoot, $cwd); $polliObjDir = $buildDir; if (is_dir($buildDir . DIRECTORY_SEPARATOR . "tools" . DIRECTORY_SEPARATOR . "polly" . DIRECTORY_SEPARATOR . "tools" . DIRECTORY_SEPARATOR . "polli")) { $polliObjDir = $buildDir . DIRECTORY_SEPARATOR . "tools" . DIRECTORY_SEPARATOR . "polly" . DIRECTORY_SEPARATOR . "tools" . DIRECTORY_SEPARATOR . "polli"; } $polliTestDir = $polliObjDir . DIRECTORY_SEPARATOR . "test"; if (is_dir($buildDir . DIRECTORY_SEPARATOR . "bin") && file_exists($buildDir . DIRECTORY_SEPARATOR . "bin" . DIRECTORY_SEPARATOR . "llvm-lit")) { $lit = $buildDir . DIRECTORY_SEPARATOR . "bin" . DIRECTORY_SEPARATOR . "llvm-lit"; $cmd = "ninja -C " . $buildDir; print "Running ninja (" . $cmd . ")\n"; exec($cmd); } else { $makeVars = $this->getMakeVars($buildDir); $lit = $this->findLitExecutable($makeVars); } print "Using lit executable '{$lit}'\n"; // We have to modify the format string, because llvm-lit does not like a '' argument $cmd = '%s %s 2>&1'; $litFuture = new ExecFuture($cmd, $lit, $polliTestDir); $out = ""; $results = array(); $lastTime = microtime(true); $ready = false; $dots = ""; $numTests = 0; while (!$ready) { $ready = $litFuture->isReady(); $newout = $litFuture->readStdout(); if (strlen($newout) == 0) { usleep(100); continue; } $out .= $newout; if ($ready && strlen($out) > 0 && substr($out, -1) != "\n") { $out .= "\n"; } while (($nlPos = strpos($out, "\n")) !== FALSE) { $line = substr($out, 0, $nlPos + 1); $out = substr($out, $nlPos + 1); $res = ArcanistUnitTestResult::RESULT_UNSOUND; if (substr($line, 0, 6) == "PASS: "******"FAIL: ") { $res = ArcanistUnitTestResult::RESULT_FAIL; } elseif (substr($line, 0, 7) == "XPASS: "******"XFAIL: ") { $res = ArcanistUnitTestResult::RESULT_PASS; } elseif (substr($line, 0, 13) == "UNSUPPORTED: ") { $res = ArcanistUnitTestResult::RESULT_SKIP; } elseif (!$numTests && preg_match('/Testing: ([0-9]+) tests/', $line, $matches)) { $numTests = (int) $matches[1]; } if ($res == ArcanistUnitTestResult::RESULT_FAIL) { print "[0A"; } if ($res != ArcanistUnitTestResult::RESULT_SKIP && $res != ArcanistUnitTestResult::RESULT_PASS) { print "[K[0A" . $line . self::progress($results, $numTests); } if ($res == ArcanistUnitTestResult::RESULT_UNSOUND) { continue; } $result = new ArcanistUnitTestResult(); $result->setName(trim(substr($line, strpos($line, ':') + 1))); $result->setResult($res); $newTime = microtime(true); $result->setDuration($newTime - $lastTime); $lastTime = $newTime; $results[] = $result; $dots .= "."; print "\r[K[0A" . self::progress($results, $numTests); } } list($out1, $out2) = $litFuture->read(); print $out1; if ($out2) { throw new Exception('There was error output, even though it should have been redirected to stdout.'); } print "\n"; $timeThreshold = 0.05; $interestingTests = array(); foreach ($results as $result) { if ($result->getResult() != "pass") { $interestingTests[] = $result; } if ($result->getDuration() > $timeThreshold) { $interestingTests[] = $result; } } return $interestingTests; }
/** * Mark the current running test as skipped. * * @param string Description for why this test was skipped. * @return void * @task internal */ private final function skipTest($reason) { $coverage = $this->endCoverage(); $result = new ArcanistUnitTestResult(); $result->setCoverage($coverage); $result->setName($this->runningTest); $result->setResult(ArcanistUnitTestResult::RESULT_SKIP); $result->setDuration(microtime(true) - $this->testStartTime); $result->setUserData($reason); $this->results[] = $result; }
public function run() { $projectRoot = $this->getWorkingCopy()->getProjectRoot(); $cwd = getcwd(); $buildDir = $this->findBuildDirectory($projectRoot, $cwd); print "Using build directory '{$buildDir}'\n"; $makeVars = $this->getMakeVars($buildDir); $lit = $this->findLitExecutable($makeVars); print "Using lit executable '{$lit}'\n"; // We have to modify the format string, because llvm-lit does not like a '' argument $cmd = '%s ' . ($this->getEnableAsyncTests() ? '' : '-j1 ') . '%s 2>&1'; $litFuture = new ExecFuture($cmd, $lit, $buildDir . "/test"); $out = ""; $results = array(); $lastTime = microtime(true); $ready = false; $dots = ""; $numTests = 0; while (!$ready) { $ready = $litFuture->isReady(); $newout = $litFuture->readStdout(); if (strlen($newout) == 0) { usleep(100); continue; } $out .= $newout; if ($ready && strlen($out) > 0 && substr($out, -1) != "\n") { $out .= "\n"; } while (($nlPos = strpos($out, "\n")) !== FALSE) { $line = substr($out, 0, $nlPos + 1); $out = substr($out, $nlPos + 1); $res = ArcanistUnitTestResult::RESULT_UNSOUND; if (substr($line, 0, 6) == "PASS: "******"FAIL: ") { $res = ArcanistUnitTestResult::RESULT_FAIL; } elseif (substr($line, 0, 7) == "XPASS: "******"XFAIL: ") { $res = ArcanistUnitTestResult::RESULT_PASS; } elseif (substr($line, 0, 13) == "UNSUPPORTED: ") { $res = ArcanistUnitTestResult::RESULT_SKIP; } elseif (!$numTests && preg_match('/Testing: ([0-9]+) tests/', $line, $matches)) { $numTests = (int) $matches[1]; } if ($res == ArcanistUnitTestResult::RESULT_FAIL) { print "[1A"; } if ($res != ArcanistUnitTestResult::RESULT_SKIP && $res != ArcanistUnitTestResult::RESULT_PASS) { print "\r[K[1A" . $line . self::progress($results, $numTests); } if ($res == ArcanistUnitTestResult::RESULT_UNSOUND) { continue; } $result = new ArcanistUnitTestResult(); $result->setName(trim(substr($line, strpos($line, ':') + 1))); $result->setResult($res); $newTime = microtime(true); $result->setDuration($newTime - $lastTime); $lastTime = $newTime; $results[] = $result; $dots .= "."; print "\r[K[1A" . self::progress($results, $numTests); } } list($out1, $out2) = $litFuture->read(); print $out1; if ($out2) { throw new Exception('There was error output, even though it should have been redirected to stdout.'); } print "\n"; return $results; }
private final function resultTest($test_result, $reason) { $coverage = $this->endCoverage(); $result = new ArcanistUnitTestResult(); $result->setCoverage($coverage); $result->setNamespace(get_class($this)); $result->setName($this->runningTest); $result->setLink($this->getLink($this->runningTest)); $result->setResult($test_result); $result->setDuration(microtime(true) - $this->testStartTime); $result->setUserData($reason); $this->results[] = $result; if ($this->renderer) { echo $this->renderer->renderUnitResult($result); } }
private function parseTestResults($targets, $status) { $code = $status[0]; $output = $status[1]; $lines = explode("\n", $output); if ($code == 4) { print "No tests affected...\n"; return []; } else { if ($code == 1) { throw new Exception($output . "\n" . $status[2]); } } $query_command = $this->bazelCommand(["query", "-k", "%s"]); $future = new ExecFuture($query_command, 'tests(set(' . join(" ", $targets) . '))'); $future->setCWD($this->project_root); $testTargets = explode("\n", trim($future->resolvex()[0])); $results = array(); foreach ($testTargets as $test) { $data = $this->parseTestResultFile($test); $result = new ArcanistUnitTestResult(); $result->setName($test); if (property_exists($data, "test_case")) { $testCase = $data->{"test_case"}; if (property_exists($testCase, "run_duration_millis")) { $result->setDuration($testCase->{"run_duration_millis"} / 1000); } } if ($data->{"test_passed"}) { $result->setResult(ArcanistUnitTestResult::RESULT_PASS); } else { if ($data->{"status"} == 4) { $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); } else { $result->setResult(ArcanistUnitTestResult::RESULT_BROKEN); } } $results[] = $result; } return $results; }
/** * Parse test results from Go test report * (e.g. `go test -v`) * * @param string $path Path to test * @param string $stdout the Stdout of the command. * @param string $stderr the Stderr of the command. * * @return array */ public function parseTestResults($path, $stdout, $stderr = '') { $test_results = $stderr . $stdout; $test_results = explode("\n", $test_results); $results = array(); // We'll get our full test case name at the end and add it back in $test_case_name = ''; // Temp store for test case results (in case we run multiple test cases) $test_case_results = array(); for ($i = 0; $i < count($test_results); $i++) { $line = $test_results[$i]; if (strlen($line) >= 18 && strncmp($line, '==================', 18) === 0 && strncmp($test_results[$i + 1], 'WARNING: DATA RACE', 18) === 0) { // We have a race condition $i++; // Advance to WARNING: DATA RACE $reason = ''; $test_name = ''; // loop to collect all data and move to the === line while (strncmp($test_results[$i], '==================', 18) !== 0) { if (strncmp($test_results[$i], 'Goroutine', 9) === 0) { $meta = array(); preg_match('/^.*\\.(?P<test_name>[^\\.]+)$/', $test_results[$i + 1], $meta); $test_name = $meta['test_name'] . ' Race Detected'; } $reason .= $test_results[$i++] . "\n"; // Are we out of lines? if ($i > count($test_results)) { return false; } } $result = new ArcanistUnitTestResult(); $result->setName($test_name); $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setUserData($reason); $test_case_results[] = $result; continue; } if (strncmp($line, '--- PASS', 8) === 0) { // We have a passing test $meta = array(); preg_match('/^--- PASS: (?P<test_name>.+) \\((?P<time>.+)\\s*s(?:econds)?\\).*/', $line, $meta); $result = new ArcanistUnitTestResult(); // For now set name without test case, we'll add it later $result->setName($meta['test_name']); $result->setResult(ArcanistUnitTestResult::RESULT_PASS); $result->setDuration((double) $meta['time']); $test_case_results[] = $result; continue; } if (strncmp($line, '--- FAIL', 8) === 0) { // We have a failing test $reason = trim($test_results[$i + 1]); $meta = array(); preg_match('/^--- FAIL: (?P<test_name>.+) \\((?P<time>.+)\\s*s(?:econds)?\\).*/', $line, $meta); $result = new ArcanistUnitTestResult(); $result->setName($meta['test_name']); $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setDuration((double) $meta['time']); $result->setUserData($reason . "\n"); $test_case_results[] = $result; continue; } if (strncmp($line, 'ok', 2) === 0) { $meta = array(); preg_match('/^ok[\\s\\t]+(?P<test_name>\\w.*)[\\s\\t]+(?P<time>.*)s.*/', $line, $meta); $test_case_name = str_replace('/', '::', $meta['test_name']); // Our test case passed // check to make sure we were in verbose (-v) mode if (empty($test_case_results)) { // We weren't in verbose mode // create one successful result for the whole test case $test_name = 'Go::TestCase::' . $test_case_name; $result = new ArcanistUnitTestResult(); $result->setName($test_name); $result->setResult(ArcanistUnitTestResult::RESULT_PASS); $result->setDuration((double) $meta['time']); $results[] = $result; } else { $test_case_results = $this->fixNames($test_case_results, $test_case_name); $results = array_merge($results, $test_case_results); $test_case_results = array(); } continue; } if (strncmp($line, "FAIL\t", 5) === 0) { $meta = array(); preg_match('/^FAIL[\\s\\t]+(?P<test_name>\\w.*)[\\s\\t]+.*/', $line, $meta); $test_case_name = str_replace('/', '::', $meta['test_name']); $test_case_results = $this->fixNames($test_case_results, $test_case_name); $results = array_merge($results, $test_case_results); $test_case_results = array(); continue; } } return $results; }
/** * Parse test results from phpunit json report * * @param string $path Path to test * @param string $test_results String containing phpunit json report * * @return array */ public function parseTestResults($path, $test_results) { if (!$test_results) { $result = id(new ArcanistUnitTestResult())->setName($path)->setUserData($this->stderr)->setResult(ArcanistUnitTestResult::RESULT_BROKEN); return array($result); } $report = $this->getJsonReport($test_results); // coverage is for all testcases in the executed $path $coverage = array(); if ($this->enableCoverage !== false) { $coverage = $this->readCoverage(); } $last_test_finished = true; $results = array(); foreach ($report as $event) { switch (idx($event, 'event')) { case 'test': break; case 'testStart': $last_test_finished = false; // fall through // fall through default: continue 2; // switch + loop } $status = ArcanistUnitTestResult::RESULT_PASS; $user_data = ''; if ('fail' == idx($event, 'status')) { $status = ArcanistUnitTestResult::RESULT_FAIL; $user_data .= idx($event, 'message') . "\n"; foreach (idx($event, 'trace') as $trace) { $user_data .= sprintf("\n%s:%s", idx($trace, 'file'), idx($trace, 'line')); } } else { if ('error' == idx($event, 'status')) { if (strpos(idx($event, 'message'), 'Skipped Test') !== false) { $status = ArcanistUnitTestResult::RESULT_SKIP; $user_data .= idx($event, 'message'); } else { if (strpos(idx($event, 'message'), 'Incomplete Test') !== false) { $status = ArcanistUnitTestResult::RESULT_SKIP; $user_data .= idx($event, 'message'); } else { $status = ArcanistUnitTestResult::RESULT_BROKEN; $user_data .= idx($event, 'message'); foreach (idx($event, 'trace') as $trace) { $user_data .= sprintf("\n%s:%s", idx($trace, 'file'), idx($trace, 'line')); } } } } } $name = preg_replace('/ \\(.*\\)/s', '', idx($event, 'test')); $result = new ArcanistUnitTestResult(); $result->setName($name); $result->setResult($status); $result->setDuration(idx($event, 'time')); $result->setCoverage($coverage); $result->setUserData($user_data); $results[] = $result; $last_test_finished = true; } if (!$last_test_finished) { $results[] = id(new ArcanistUnitTestResult())->setName(idx($event, 'test'))->setUserData($this->stderr)->setResult(ArcanistUnitTestResult::RESULT_BROKEN); } return $results; }