private function parseOutput($output) { $results = array(); $json = json_decode($output); foreach ($json as $feature) { if (property_exists($feature, 'elements')) { foreach ($feature->elements as $element) { $passed = true; $result = new ArcanistUnitTestResult(); $result->setName($feature->description); foreach ($element->steps as $step) { switch ($step->result->status) { case 'passed': break; case 'failed': $passed = false; $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setUserData($step->result->error_message); break; } } if ($passed) { $result->setResult(ArcanistUnitTestResult::RESULT_PASS); } $results[] = $result; } } } return $results; }
private function parseOutput($output) { $results = array(); $json = json_decode($output, true); foreach ($json['examples'] as $example) { $result = new ArcanistUnitTestResult(); $result->setName($example['full_description']); if (array_key_exists('run_time', $example)) { $result->setDuration($example['run_time']); } switch ($example['status']) { case 'passed': $result->setResult(ArcanistUnitTestResult::RESULT_PASS); break; case 'failed': $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setUserData($example['exception']['message']); break; case 'pending': $result->setResult(ArcanistUnitTestResult::RESULT_SKIP); break; } $results[] = $result; } return $results; }
/** * Runs the test suite. */ public function run() { $results = array(); $command = '(mkdir -p build && cd build && cmake ..)'; $command .= '&& make -C build all'; $command .= '&& make -C build test'; // Execute the test command & time it. $timeStart = microtime(true); $future = new ExecFuture($command); do { $future->read(); sleep(0.5); } while (!$future->isReady()); list($error, $stdout, $stderr) = $future->resolve(); $timeEnd = microtime(true); // Create a unit test result structure. $result = new ArcanistUnitTestResult(); $result->setNamespace('DerpVision'); $result->setName('Core'); $result->setDuration($timeEnd - $timeStart); if ($error == 0) { $result->setResult(ArcanistUnitTestResult::RESULT_PASS); } else { $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setUserData($stdout . $stderr); } $results[] = $result; return $results; }
private function runTests() { $root = $this->getWorkingCopy()->getProjectRoot(); $script = $this->getConfiguredScript(); $path = $this->getConfiguredTestResultPath(); foreach (glob($root . DIRECTORY_SEPARATOR . $path . "/*.xml") as $filename) { // Remove existing files so we cannot report old results $this->unlink($filename); } // Provide changed paths to process putenv("ARCANIST_DIFF_PATHS=" . implode(PATH_SEPARATOR, $this->getPaths())); $future = new ExecFuture('%C %s', $script, $path); $future->setCWD($root); $err = null; try { $future->resolvex(); } catch (CommandException $exc) { $err = $exc; } $results = $this->parseTestResults($root . DIRECTORY_SEPARATOR . $path); if ($err) { $result = new ArcanistUnitTestResult(); $result->setName('Unit Test Script'); $result->setResult(ArcanistUnitTestResult::RESULT_BROKEN); $result->setUserData("ERROR: Command failed with code {$err->getError()}\nCOMMAND: `{$err->getCommand()}`"); $results[] = $result; } return $results; }
private function parseOutput($output) { $results = array(); $lines = explode(PHP_EOL, $output); foreach ($lines as $index => $line) { preg_match('/^(not ok|ok)\\s+\\d+\\s+-?(.*)/', $line, $matches); if (count($matches) < 3) { continue; } $result = new ArcanistUnitTestResult(); $result->setName(trim($matches[2])); switch (trim($matches[1])) { case 'ok': $result->setResult(ArcanistUnitTestResult::RESULT_PASS); break; case 'not ok': $exception_message = trim($lines[$index + 1]); $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setUserData($exception_message); break; default: continue; } $results[] = $result; } return $results; }
private function parseTestResults($test, $err, $stdout, $stderr) { $result = new ArcanistUnitTestResult(); $result->setName($test); $result->setUserData($stdout . $stderr); $result->setResult($err == 0 ? ArcanistUnitTestResult::RESULT_PASS : ArcanistUnitTestResult::RESULT_FAIL); if (preg_match("/# ELAPSED: (\\d+)ms/", $stderr, $M)) { $result->setDuration($M[1] / 1000); } return $result; }
/** * Parse test results from phpunit json report * * @param string $path Path to test * @param string $test_results String containing phpunit json report * * @return array */ public function parseTestResults($path, $test_results) { $report = $this->getJsonReport($test_results); // coverage is for all testcases in the executed $path $coverage = array(); if ($this->enableCoverage !== false) { $coverage = $this->readCoverage(); } $results = array(); foreach ($report as $event) { if ('test' != $event->event) { continue; } $status = ArcanistUnitTestResult::RESULT_PASS; $user_data = ''; if ('fail' == $event->status) { $status = ArcanistUnitTestResult::RESULT_FAIL; $user_data .= $event->message . "\n"; foreach ($event->trace as $trace) { $user_data .= sprintf("\n%s:%s", $trace->file, $trace->line); } } else { if ('error' == $event->status) { if (strpos($event->message, 'Skipped Test') !== false) { $status = ArcanistUnitTestResult::RESULT_SKIP; $user_data .= $event->message; } else { if (strpos($event->message, 'Incomplete Test') !== false) { $status = ArcanistUnitTestResult::RESULT_SKIP; $user_data .= $event->message; } else { $status = ArcanistUnitTestResult::RESULT_BROKEN; $user_data .= $event->message; foreach ($event->trace as $trace) { $user_data .= sprintf("\n%s:%s", $trace->file, $trace->line); } } } } } $name = preg_replace('/ \\(.*\\)/', '', $event->test); $result = new ArcanistUnitTestResult(); $result->setName($name); $result->setResult($status); $result->setDuration($event->time); $result->setCoverage($coverage); $result->setUserData($user_data); $results[] = $result; } return $results; }
public function run() { $working_copy = $this->getWorkingCopy(); $this->projectRoot = $working_copy->getProjectRoot(); $future = new ExecFuture('npm run coverage'); $future->setCWD($this->projectRoot); list($err, $stdout, $stderr) = $future->resolve(); $result = new ArcanistUnitTestResult(); $result->setName("Node test engine"); $result->setUserData($stdout); if ($err) { $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); } else { $result->setResult(ArcanistUnitTestResult::RESULT_PASS); } return array($result); }
public function runJs() { // First, check to see if karma is on $PATH: list($err, $stdout, $_) = exec_manual("which karma"); if ($err != 0) { $result = new ArcanistUnitTestResult(); $result->setName("Karma not found. Skipping js tests..."); $result->setResult(ArcanistUnitTestResult::RESULT_SKIP); $result->setDuration(0); return array($result); } // Karma IS on the path. $old_dir = getcwd(); $project_root = $this->getWorkingCopy()->getProjectRoot(); chdir($project_root . '/client/js'); exec_manual("karma start karma-conf-oneshot.js"); chdir($old_dir); // Read from the text-results.xml file. $xml = file_get_contents($project_root . '/client/test-results.xml'); $doc = new SimpleXMLElement($xml); // Destroy the test-results.xml file. unlink($project_root . '/client/test-results.xml'); // Extract all the test cases. $results = array(); foreach ($doc->testsuite as $suite) { $suite_name = $suite['name']; foreach ($suite->testcase as $case) { $case_name = $case['name']; $time = $case['time']; $fixture_name = substr($case['classname'], strlen($suite_name) + 1); // Did we fail? $failure = (string) $case->failure; // Convert each to a ArcanistUnitTestResult $result = new ArcanistUnitTestResult(); $result->setName($fixture_name . ' ' . $case_name); $result->setResult($failure ? ArcanistUnitTestResult::RESULT_FAIL : ArcanistUnitTestResult::RESULT_PASS); $result->setUserData($failure); $result->setDuration($time); $results[] = $result; } } return $results; }
private final function resultTest($test_result, $reason) { $coverage = $this->endCoverage(); $result = new ArcanistUnitTestResult(); $result->setCoverage($coverage); $result->setNamespace(get_class($this)); $result->setName($this->runningTest); $result->setLink($this->getLink($this->runningTest)); $result->setResult($test_result); $result->setDuration(microtime(true) - $this->testStartTime); $result->setUserData($reason); $this->results[] = $result; if ($this->renderer) { echo $this->renderer->renderUnitResult($result); } }
function generateValgrindTestResults() { $this->terminateProcess(); if ($this->coverage) { return $this->generateCoverageResults(); } if (!$this->valgrind) { return array(); } $definite_leaks = array(); $possible_leaks = array(); $errors = array(); $descriptors = array(); // valgrind seems to use an interesting definition of valid XML. // Tolerate having multiple documents in one file. // Confluence of weird bugs; hhvm has very low preg_match limits // so we have to grovel around to make sure that we read this // stuff in properly :-/ $documents = array(); $in_doc = false; $doc = null; foreach (file($this->vg_log . '.xml') as $line) { if ($in_doc) { $doc[] = $line; if (preg_match(',</valgrindoutput>,', $line)) { $documents[] = implode("\n", $doc); $doc = null; } } else { if (preg_match(',<valgrindoutput>,', $line)) { $doc = array($line); $in_doc = true; } } } libxml_use_internal_errors(true); foreach ($documents as $data) { libxml_clear_errors(); $vg = @simplexml_load_string($data); if (is_object($vg)) { foreach ($vg->error as $err) { $render = $this->renderVGResult($err); switch ($err->kind) { case 'Leak_DefinitelyLost': $definite_leaks[] = $render; break; case 'Leak_PossiblyLost': $possible_leaks[] = $render; break; default: $errors[] = $render; } } // These look like fd leak records, but they're not documented // as such. These go away if we turn off track-fds foreach ($vg->stack as $stack) { // Suppressing this for now: posix_spawn seems to confuse // some valgrind's, particularly the version we run on travis, // as it records open descriptors from the exec'ing child // $descriptors[] = $this->renderVGStack($stack); } } else { $why = 'failed to parse xml'; $lines = explode("\n", $data); foreach (libxml_get_errors() as $err) { $slice = array_slice($lines, $err->line - 3, 6); $slice = implode("\n", $slice); $why .= sprintf("\n%s (line %d col %d) %s", $err->message, $err->line, $err->column, $slice); } printf("parsing valgrind output: %s\n", $why); } } $results = array(); $res = new ArcanistUnitTestResult(); $res->setName('valgrind possible leaks'); $res->setUserData(implode("\n\n", $possible_leaks)); $res->setResult(count($possible_leaks) ? ArcanistUnitTestResult::RESULT_SKIP : ArcanistUnitTestResult::RESULT_PASS); $results[] = $res; $res = new ArcanistUnitTestResult(); $res->setName('descriptor leaks'); $res->setUserData(implode("\n\n", $descriptors)); $res->setResult(count($descriptors) ? ArcanistUnitTestResult::RESULT_FAIL : ArcanistUnitTestResult::RESULT_PASS); $results[] = $res; $res = new ArcanistUnitTestResult(); $res->setName('valgrind leaks'); $res->setUserData(implode("\n\n", $definite_leaks)); $leak_res = count($definite_leaks) ? ArcanistUnitTestResult::RESULT_FAIL : ArcanistUnitTestResult::RESULT_PASS; if ($leak_res == ArcanistUnitTestResult::RESULT_FAIL && getenv('TRAVIS') == 'true') { // Travis has false positives at this time, downgrade $leak_res = ArcanistUnitTestResult::RESULT_SKIP; } $res->setResult($leak_res); $results[] = $res; $res = new ArcanistUnitTestResult(); $res->setName('valgrind errors'); $res->setUserData(implode("\n\n", $errors)); $res->setResult(count($errors) ? ArcanistUnitTestResult::RESULT_FAIL : ArcanistUnitTestResult::RESULT_PASS); $results[] = $res; return $results; }
/** * Parses the test results from xUnit. * * @param string The name of the xUnit results file. * @param string The name of the coverage file if one was provided by * `buildTestFuture`. This is passed through to * `parseCoverageResult`. * @return array Test results. */ private function parseTestResult($xunit_tmp, $coverage) { $xunit_dom = new DOMDocument(); $xunit_dom->loadXML(Filesystem::readFile($xunit_tmp)); $results = array(); $tests = $xunit_dom->getElementsByTagName('test'); foreach ($tests as $test) { $name = $test->getAttribute('name'); $time = $test->getAttribute('time'); $status = ArcanistUnitTestResult::RESULT_UNSOUND; switch ($test->getAttribute('result')) { case 'Pass': $status = ArcanistUnitTestResult::RESULT_PASS; break; case 'Fail': $status = ArcanistUnitTestResult::RESULT_FAIL; break; case 'Skip': $status = ArcanistUnitTestResult::RESULT_SKIP; break; } $userdata = ''; $reason = $test->getElementsByTagName('reason'); $failure = $test->getElementsByTagName('failure'); if ($reason->length > 0 || $failure->length > 0) { $node = $reason->length > 0 ? $reason : $failure; $message = $node->item(0)->getElementsByTagName('message'); if ($message->length > 0) { $userdata = $message->item(0)->nodeValue; } $stacktrace = $node->item(0)->getElementsByTagName('stack-trace'); if ($stacktrace->length > 0) { $userdata .= "\n" . $stacktrace->item(0)->nodeValue; } } $result = new ArcanistUnitTestResult(); $result->setName($name); $result->setResult($status); $result->setDuration($time); $result->setUserData($userdata); if ($coverage != null) { $result->setCoverage($this->parseCoverageResult($coverage)); } $results[] = $result; } return $results; }
/** * Parse test results from phpunit json report * * @param string $path Path to test * @param string $test_results String containing phpunit json report * * @return array */ public function parseTestResults($path, $test_results) { if (!$test_results) { $result = id(new ArcanistUnitTestResult())->setName($path)->setUserData($this->stderr)->setResult(ArcanistUnitTestResult::RESULT_BROKEN); return array($result); } $report = $this->getJsonReport($test_results); // coverage is for all testcases in the executed $path $coverage = array(); if ($this->enableCoverage !== false) { $coverage = $this->readCoverage(); } $last_test_finished = true; $results = array(); foreach ($report as $event) { switch (idx($event, 'event')) { case 'test': break; case 'testStart': $last_test_finished = false; // fall through // fall through default: continue 2; // switch + loop } $status = ArcanistUnitTestResult::RESULT_PASS; $user_data = ''; if ('fail' == idx($event, 'status')) { $status = ArcanistUnitTestResult::RESULT_FAIL; $user_data .= idx($event, 'message') . "\n"; foreach (idx($event, 'trace') as $trace) { $user_data .= sprintf("\n%s:%s", idx($trace, 'file'), idx($trace, 'line')); } } else { if ('error' == idx($event, 'status')) { if (strpos(idx($event, 'message'), 'Skipped Test') !== false) { $status = ArcanistUnitTestResult::RESULT_SKIP; $user_data .= idx($event, 'message'); } else { if (strpos(idx($event, 'message'), 'Incomplete Test') !== false) { $status = ArcanistUnitTestResult::RESULT_SKIP; $user_data .= idx($event, 'message'); } else { $status = ArcanistUnitTestResult::RESULT_BROKEN; $user_data .= idx($event, 'message'); foreach (idx($event, 'trace') as $trace) { $user_data .= sprintf("\n%s:%s", idx($trace, 'file'), idx($trace, 'line')); } } } } } $name = preg_replace('/ \\(.*\\)/s', '', idx($event, 'test')); $result = new ArcanistUnitTestResult(); $result->setName($name); $result->setResult($status); $result->setDuration(idx($event, 'time')); $result->setCoverage($coverage); $result->setUserData($user_data); $results[] = $result; $last_test_finished = true; } if (!$last_test_finished) { $results[] = id(new ArcanistUnitTestResult())->setName(idx($event, 'test'))->setUserData($this->stderr)->setResult(ArcanistUnitTestResult::RESULT_BROKEN); } return $results; }
public function runIntegrationTests($tests) { // Now find all the test programs $root = $this->getProjectRoot(); $test_dir = $root . "/tests/integration/"; if (!$tests) { $paths = glob($test_dir . "*.php"); $paths[] = 'ruby/ruby-watchman/spec/ruby_watchman_spec.rb'; } else { $paths = $tests; } foreach (array('/tmp/watchman-test.log', '/tmp/watchman-valgrind.log', '/tmp/watchman-valgrind.xml', '/tmp/watchman-callgrind.txt') as $log) { @unlink($log); } foreach ($paths as $path) { if (preg_match("/\\.php\$/", $path) && file_exists($path)) { // Don't pull in files starting with "_"; we're using // those as helpers for triggers $base = basename($path); if ($base[0] != '_') { require_once $path; } } } // We test for this in a test case putenv("WATCHMAN_EMPTY_ENV_VAR="); $coverage = $this->getEnableCoverage(); if (!$this->first_inst) { $this->first_inst = new WatchmanInstance($root, $coverage); } $first_inst = $this->first_inst; $instances = array($first_inst); // Helper for python or other language tests putenv("WATCHMAN_SOCK=" . $first_inst->getFullSockName()); // Exercise the different serialization combinations $cli_matrix = array(); // Find all the test cases that were declared $results = array(); foreach (get_declared_classes() as $name) { $ref = new ReflectionClass($name); if (!$ref->isSubclassOf('WatchmanTestCase')) { continue; } // Good enough; let's use it $test_case = newv($name, array()); $config = $test_case->getGlobalConfig(); if ($config) { $instance = new WatchmanInstance($root, $coverage, $config); $instances[] = $instance; } else { $instance = $first_inst; } $test_case->setWatchmanInstance($instance); if (!$instance->getProcessID()) { $res = new ArcanistUnitTestResult(); $res->setName('dead'); $res->setUserData('died before test start'); $res->setResult(ArcanistUnitTestResult::RESULT_FAIL); $results[] = array($res); break; } $test_case->setRoot($root); $test_case->setPaths($paths); $results[] = $test_case->run(); if (!$test_case->needsLiveConnection()) { foreach ($cli_matrix as $mname => $args) { $test_case->useCLI($args); $cli_results = $test_case->run(); foreach ($cli_results as $res) { $res->setName($res->getName() . " [CLI: {$mname}]"); } $results[] = $cli_results; } } } foreach ($paths as $path) { if (!preg_match('/\\.rb$/', $path)) { continue; } if (!file_exists($path)) { // Was deleted in this (pending) rev continue; } $start = microtime(true); $future = new ExecFuture("PATH=\"{$root}:\$PATH\" \${MAKE:-make} rb-tests"); $future->setTimeout(10); list($status, $out, $err) = $future->resolve(); $end = microtime(true); $res = new ArcanistUnitTestResult(); $res->setName($path); $res->setUserData($out . $err); $res->setDuration($end - $start); $res->setResult($status == 0 ? ArcanistUnitTestResult::RESULT_PASS : ArcanistUnitTestResult::RESULT_FAIL); $results[] = array($res); } foreach ($instances as $instance) { $results[] = $instance->generateValgrindTestResults(); } $results = array_mergev($results); return $results; }
public function run() { $working_copy = $this->getWorkingCopy(); $project_root = $working_copy->getProjectRoot(); $this->setEnableCoverage($this->getConfig("unit.coverage", true)); // run everything relative to project root, so that our paths match up // with $this->getPaths() chdir($this->getWorkingCopy()->getProjectRoot()); $resultsArray = array(); // find all test files $testFileName = $this->getPythonTestFileName(); $testFileDirs = $this->getTestFileDirs(); // delete the previous test results $this->removeDirectory("test_results"); // each test found is a django project to test foreach ($testFileDirs as $testFileDir) { $testFilePath = $testFileDir . $testFileName; $testResults = $this->runPythonTestSuite($project_root, $testFilePath); $testLines = $testResults["testLines"]; $testExitCode = $testResults["testExitCode"]; $results = $testResults["results"]; // if we have not found any tests in the output, but the exit code // wasn't 0, the entire test suite has failed to run, since it ran // no tests if (count($results) == 0 && $testExitCode != 0) { // name the test "Failed to run tests: " followed by the path // of the test file $failTestName = "Failed to run: " . $testFilePath; $result = new ArcanistUnitTestResult(); $result->setName($failTestName); $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); // set the UserData to the raw output of the failed test run $result->setUserData(join("\n", $testLines)); // add to final results array $resultsArray[$failTestName] = $result; // skip coverage as there is none continue; } if ($this->getEnableCoverage() !== false) { $results = $this->processCoverageResults($project_root, $results); } $resultsArray = array_merge($resultsArray, $results); } return $resultsArray; }
/** * Parse test results from Go test report * (e.g. `go test -v`) * * @param string $path Path to test * @param string $test_results String containing Go test output * * @return array */ public function parseTestResults($path, $test_results) { $test_results = explode("\n", $test_results); $results = array(); // We'll get our full test case name at the end and add it back in $test_case_name = ''; // Temp store for test case results (in case we run multiple test cases) $test_case_results = array(); foreach ($test_results as $i => $line) { if (strncmp($line, '--- PASS', 8) === 0) { // We have a passing test $meta = array(); preg_match('/^--- PASS: (?P<test_name>.+) \\((?P<time>.+) seconds\\).*/', $line, $meta); $result = new ArcanistUnitTestResult(); // For now set name without test case, we'll add it later $result->setName($meta['test_name']); $result->setResult(ArcanistUnitTestResult::RESULT_PASS); $result->setDuration($meta['time']); $test_case_results[] = $result; continue; } if (strncmp($line, '--- FAIL', 8) === 0) { // We have a failing test $reason = trim($test_results[$i + 1]); $meta = array(); preg_match('/^--- FAIL: (?P<test_name>.+) \\((?P<time>.+) seconds\\).*/', $line, $meta); $result = new ArcanistUnitTestResult(); $result->setName($meta['test_name']); $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setDuration($meta['time']); $result->setUserData($reason . "\n"); $test_case_results[] = $result; continue; } if (strncmp($line, 'ok', 2) === 0) { $meta = array(); preg_match('/^ok[\\s\\t]+(?P<test_name>\\w.*)[\\s\\t]+(?P<time>.*)s.*/', $line, $meta); $test_case_name = str_replace('/', '::', $meta['test_name']); // Our test case passed // check to make sure we were in verbose (-v) mode if (empty($test_case_results)) { // We weren't in verbose mode // create one successful result for the whole test case $test_name = 'Go::TestCase::' . $test_case_name; $result = new ArcanistUnitTestResult(); $result->setName($test_name); $result->setResult(ArcanistUnitTestResult::RESULT_PASS); $result->setDuration($meta['time']); $results[] = $result; } else { $test_case_results = $this->fixNames($test_case_results, $test_case_name); $results = array_merge($results, $test_case_results); $test_case_results = array(); } continue; } if (strncmp($line, "FAIL\t", 5) === 0) { $meta = array(); preg_match('/^FAIL[\\s\\t]+(?P<test_name>\\w.*)[\\s\\t]+.*/', $line, $meta); $test_case_name = str_replace('/', '::', $meta['test_name']); $test_case_results = $this->fixNames($test_case_results, $test_case_name); $results = array_merge($results, $test_case_results); $test_case_results = array(); continue; } } return $results; }
public function parseTestResults($path, $xunit_tmp, $cover_tmp) { // xunit xsd: https://gist.github.com/959290 $xunit_dom = new DOMDocument(); $xunit_dom->loadXML(Filesystem::readFile($xunit_tmp)); // coverage is for all testcases in the executed $path $coverage = array(); if ($this->getEnableCoverage() !== false) { $coverage = $this->readCoverage($cover_tmp); } $results = array(); $testcases = $xunit_dom->getElementsByTagName("testcase"); foreach ($testcases as $testcase) { $classname = $testcase->getAttribute("classname"); $name = $testcase->getAttribute("name"); $time = $testcase->getAttribute("time"); $status = ArcanistUnitTestResult::RESULT_PASS; $user_data = ""; // A skipped test is a test which was ignored using framework // mechanizms (e.g. @skip decorator) $skipped = $testcase->getElementsByTagName("skipped"); if ($skipped->length > 0) { $status = ArcanistUnitTestResult::RESULT_SKIP; $messages = array(); for ($ii = 0; $ii < $skipped->length; $ii++) { $messages[] = trim($skipped->item($ii)->nodeValue, " \n"); } $user_data .= implode("\n", $messages); } // Failure is a test which the code has explicitly failed by using // the mechanizms for that purpose. e.g., via an assertEquals $failures = $testcase->getElementsByTagName("failure"); if ($failures->length > 0) { $status = ArcanistUnitTestResult::RESULT_FAIL; $messages = array(); for ($ii = 0; $ii < $failures->length; $ii++) { $messages[] = trim($failures->item($ii)->nodeValue, " \n"); } $user_data .= implode("\n", $messages) . "\n"; } // An errored test is one that had an unanticipated problem. e.g., an // unchecked throwable, or a problem with an implementation of the // test. $errors = $testcase->getElementsByTagName("error"); if ($errors->length > 0) { $status = ArcanistUnitTestResult::RESULT_BROKEN; $messages = array(); for ($ii = 0; $ii < $errors->length; $ii++) { $messages[] = trim($errors->item($ii)->nodeValue, " \n"); } $user_data .= implode("\n", $messages) . "\n"; } $result = new ArcanistUnitTestResult(); $result->setName($classname . "." . $name); $result->setResult($status); $result->setDuration($time); $result->setCoverage($coverage); $result->setUserData($user_data); $results[] = $result; } return $results; }
/** * Parse test results from provided input and return an array * of @{class:ArcanistUnitTestResult}. * * @param string $test_results String containing test results * * @return array ArcanistUnitTestResult */ public function parseTestResults($test_results) { if (!strlen($test_results)) { throw new Exception(pht('%s argument to %s must not be empty', 'test_results', 'parseTestResults()')); } // xunit xsd: https://gist.github.com/959290 $xunit_dom = new DOMDocument(); $load_success = @$xunit_dom->loadXML($test_results); if (!$load_success) { $input_start = id(new PhutilUTF8StringTruncator())->setMaximumGlyphs(150)->truncateString($test_results); throw new Exception(sprintf("%s\n\n%s", pht('Failed to load XUnit report; Input starts with:'), $input_start)); } $results = array(); $testcases = $xunit_dom->getElementsByTagName('testcase'); foreach ($testcases as $testcase) { $classname = $testcase->getAttribute('classname'); $name = $testcase->getAttribute('name'); $time = $testcase->getAttribute('time'); $status = ArcanistUnitTestResult::RESULT_PASS; $user_data = ''; // A skipped test is a test which was ignored using framework // mechanisms (e.g. @skip decorator) $skipped = $testcase->getElementsByTagName('skipped'); if ($skipped->length > 0) { $status = ArcanistUnitTestResult::RESULT_SKIP; $messages = array(); for ($ii = 0; $ii < $skipped->length; $ii++) { $messages[] = trim($skipped->item($ii)->nodeValue, " \n"); } $user_data .= implode("\n", $messages); } // Failure is a test which the code has explicitly failed by using // the mechanisms for that purpose. e.g., via an assertEquals $failures = $testcase->getElementsByTagName('failure'); if ($failures->length > 0) { $status = ArcanistUnitTestResult::RESULT_FAIL; $messages = array(); for ($ii = 0; $ii < $failures->length; $ii++) { $messages[] = trim($failures->item($ii)->nodeValue, " \n"); } $user_data .= implode("\n", $messages) . "\n"; } // An errored test is one that had an unanticipated problem. e.g., an // unchecked throwable, or a problem with an implementation of the test. $errors = $testcase->getElementsByTagName('error'); if ($errors->length > 0) { $status = ArcanistUnitTestResult::RESULT_BROKEN; $messages = array(); for ($ii = 0; $ii < $errors->length; $ii++) { $messages[] = trim($errors->item($ii)->nodeValue, " \n"); } $user_data .= implode("\n", $messages) . "\n"; } $result = new ArcanistUnitTestResult(); $result->setName($classname . '.' . $name); $result->setResult($status); $result->setDuration((double) $time); $result->setUserData($user_data); $results[] = $result; } return $results; }
/** * Mark the current running test as skipped. * * @param string Description for why this test was skipped. * @return void * @task internal */ private final function skipTest($reason) { $coverage = $this->endCoverage(); $result = new ArcanistUnitTestResult(); $result->setCoverage($coverage); $result->setName($this->runningTest); $result->setResult(ArcanistUnitTestResult::RESULT_SKIP); $result->setDuration(microtime(true) - $this->testStartTime); $result->setUserData($reason); $this->results[] = $result; }
/** * Parse test results from Go test report * (e.g. `go test -v`) * * @param string $path Path to test * @param string $stdout the Stdout of the command. * @param string $stderr the Stderr of the command. * * @return array */ public function parseTestResults($path, $stdout, $stderr = '') { $test_results = $stderr . $stdout; $test_results = explode("\n", $test_results); $results = array(); // We'll get our full test case name at the end and add it back in $test_case_name = ''; // Temp store for test case results (in case we run multiple test cases) $test_case_results = array(); for ($i = 0; $i < count($test_results); $i++) { $line = $test_results[$i]; if (strlen($line) >= 18 && strncmp($line, '==================', 18) === 0 && strncmp($test_results[$i + 1], 'WARNING: DATA RACE', 18) === 0) { // We have a race condition $i++; // Advance to WARNING: DATA RACE $reason = ''; $test_name = ''; // loop to collect all data and move to the === line while (strncmp($test_results[$i], '==================', 18) !== 0) { if (strncmp($test_results[$i], 'Goroutine', 9) === 0) { $meta = array(); preg_match('/^.*\\.(?P<test_name>[^\\.]+)$/', $test_results[$i + 1], $meta); $test_name = $meta['test_name'] . ' Race Detected'; } $reason .= $test_results[$i++] . "\n"; // Are we out of lines? if ($i > count($test_results)) { return false; } } $result = new ArcanistUnitTestResult(); $result->setName($test_name); $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setUserData($reason); $test_case_results[] = $result; continue; } if (strncmp($line, '--- PASS', 8) === 0) { // We have a passing test $meta = array(); preg_match('/^--- PASS: (?P<test_name>.+) \\((?P<time>.+)\\s*s(?:econds)?\\).*/', $line, $meta); $result = new ArcanistUnitTestResult(); // For now set name without test case, we'll add it later $result->setName($meta['test_name']); $result->setResult(ArcanistUnitTestResult::RESULT_PASS); $result->setDuration((double) $meta['time']); $test_case_results[] = $result; continue; } if (strncmp($line, '--- FAIL', 8) === 0) { // We have a failing test $reason = trim($test_results[$i + 1]); $meta = array(); preg_match('/^--- FAIL: (?P<test_name>.+) \\((?P<time>.+)\\s*s(?:econds)?\\).*/', $line, $meta); $result = new ArcanistUnitTestResult(); $result->setName($meta['test_name']); $result->setResult(ArcanistUnitTestResult::RESULT_FAIL); $result->setDuration((double) $meta['time']); $result->setUserData($reason . "\n"); $test_case_results[] = $result; continue; } if (strncmp($line, 'ok', 2) === 0) { $meta = array(); preg_match('/^ok[\\s\\t]+(?P<test_name>\\w.*)[\\s\\t]+(?P<time>.*)s.*/', $line, $meta); $test_case_name = str_replace('/', '::', $meta['test_name']); // Our test case passed // check to make sure we were in verbose (-v) mode if (empty($test_case_results)) { // We weren't in verbose mode // create one successful result for the whole test case $test_name = 'Go::TestCase::' . $test_case_name; $result = new ArcanistUnitTestResult(); $result->setName($test_name); $result->setResult(ArcanistUnitTestResult::RESULT_PASS); $result->setDuration((double) $meta['time']); $results[] = $result; } else { $test_case_results = $this->fixNames($test_case_results, $test_case_name); $results = array_merge($results, $test_case_results); $test_case_results = array(); } continue; } if (strncmp($line, "FAIL\t", 5) === 0) { $meta = array(); preg_match('/^FAIL[\\s\\t]+(?P<test_name>\\w.*)[\\s\\t]+.*/', $line, $meta); $test_case_name = str_replace('/', '::', $meta['test_name']); $test_case_results = $this->fixNames($test_case_results, $test_case_name); $results = array_merge($results, $test_case_results); $test_case_results = array(); continue; } } return $results; }
public function runIntegrationTests($tests) { $this->make('all'); // Now find all the test programs $root = $this->getProjectRoot(); $test_dir = $root . "/tests/integration/"; if (!$tests) { $paths = glob($test_dir . "*.php"); foreach (glob('python/tests/*.py') as $file) { $paths[] = $file; } $paths[] = 'ruby/ruby-watchman/spec/ruby_watchman_spec.rb'; } else { $paths = $tests; } foreach (array('/tmp/watchman-test.log', '/tmp/watchman-valgrind.log', '/tmp/watchman-valgrind.xml', '/tmp/watchman-callgrind.txt') as $log) { @unlink($log); } foreach ($paths as $path) { if (preg_match("/\\.php\$/", $path) && file_exists($path)) { require_once $path; } } // We test for this in a test case putenv("WATCHMAN_EMPTY_ENV_VAR="); $coverage = $this->getEnableCoverage(); $first_inst = new WatchmanInstance($root, $coverage); $instances = array($first_inst); // Helper for python or other language tests putenv("WATCHMAN_SOCK=" . $first_inst->getFullSockName()); // Exercise the different serialization combinations $cli_matrix = array('bser/json' => '--server-encoding=bser --output-encoding=json', 'json/json' => '--server-encoding=json --output-encoding=json'); // Find all the test cases that were declared $results = array(); foreach (get_declared_classes() as $name) { $ref = new ReflectionClass($name); if (!$ref->isSubclassOf('WatchmanTestCase')) { continue; } // Good enough; let's use it $test_case = newv($name, array()); $config = $test_case->getGlobalConfig(); if ($config) { $instance = new WatchmanInstance($root, $coverage, $config); $instances[] = $instance; } else { $instance = $first_inst; } $test_case->setWatchmanInstance($instance); if (!$instance->getProcessID()) { $res = new ArcanistUnitTestResult(); $res->setName('dead'); $res->setUserData('died before test start'); $res->setResult(ArcanistUnitTestResult::RESULT_FAIL); $results[] = array($res); break; } $test_case->setRoot($root); $test_case->setPaths($paths); $results[] = $test_case->run(); if (!$test_case->needsLiveConnection()) { foreach ($cli_matrix as $mname => $args) { $test_case->useCLI($args); $cli_results = $test_case->run(); foreach ($cli_results as $res) { $res->setName($res->getName() . " [CLI: {$mname}]"); } $results[] = $cli_results; } } } // Also run the python tests if we built them foreach ($paths as $path) { if (!preg_match('/test.*\\.py$/', $path)) { continue; } if (!file_exists($path)) { // Was deleted in this (pending) rev continue; } if (!file_exists("python/pywatchman/bser.so")) { // Not enabled by the build continue; } // Note that this implicitly starts the instance if we haven't // yet done so. This is important if the only test paths are // python paths if (!$first_inst->getProcessID()) { $res = new ArcanistUnitTestResult(); $res->setName('dead'); $res->setUserData('died before test start'); $res->setResult(ArcanistUnitTestResult::RESULT_FAIL); $results[] = array($res); break; } // our Makefile contains the detected python, so just run the // rule from the makefile to pick it up $start = microtime(true); $future = new ExecFuture("PATH=\"{$root}:\$PATH\" PYTHONPATH={$root}/python " . "TESTNAME={$path} \${MAKE:-make} py-tests"); $future->setTimeout(10); list($status, $out, $err) = $future->resolve(); $end = microtime(true); $res = new ArcanistUnitTestResult(); $res->setName($path); $res->setUserData($out . $err); $res->setDuration($end - $start); $res->setResult($status == 0 ? ArcanistUnitTestResult::RESULT_PASS : ArcanistUnitTestResult::RESULT_FAIL); $results[] = array($res); } foreach ($paths as $path) { if (!preg_match('/\\.rb$/', $path)) { continue; } if (!file_exists($path)) { // Was deleted in this (pending) rev continue; } $start = microtime(true); $future = new ExecFuture("PATH=\"{$root}:\$PATH\" \${MAKE:-make} rb-tests"); $future->setTimeout(10); list($status, $out, $err) = $future->resolve(); $end = microtime(true); $res = new ArcanistUnitTestResult(); $res->setName($path); $res->setUserData($out . $err); $res->setDuration($end - $start); $res->setResult($status == 0 ? ArcanistUnitTestResult::RESULT_PASS : ArcanistUnitTestResult::RESULT_FAIL); $results[] = array($res); } foreach ($instances as $instance) { $results[] = $instance->generateValgrindTestResults(); } $results = array_mergev($results); return $results; }