public function custom_test_support_check() { /* As of Phoronix Test Suite 4.4, the software will check for the presence of a 'support-check' file. Any test profile can optionally include a support-check.sh file to check for arbitrary commands not covered by the rest of the PTS testing architecture, e.g. to check for the presence of systemd on the target system. If the script finds that the system is incompatible with the test, it can write a custom error message to the file specified by the $TEST_CUSTOM_ERROR environment variable. If the $TEST_CUSTOM_ERROR target is written to, the PTS client will abort the test installation with the specified error message. */ $support_check_file = $this->get_resource_dir() . 'support-check.sh'; if (PTS_IS_CLIENT && is_file($support_check_file)) { $environment['TEST_CUSTOM_ERROR'] = pts_client::temporary_directory() . '/PTS-' . $this->get_identifier_base_name() . '-' . rand(1000, 9999); $support_check = pts_tests::call_test_script($this, 'support-check', null, null, $environment, false); if (is_file($environment['TEST_CUSTOM_ERROR'])) { $support_result = pts_file_io::file_get_contents($environment['TEST_CUSTOM_ERROR']); pts_file_io::delete($environment['TEST_CUSTOM_ERROR']); return $support_result; } } return true; }
protected static function install_test_process(&$test_install_request, $no_prompts) { // Install a test $identifier = $test_install_request->test_profile->get_identifier(); $test_install_directory = $test_install_request->test_profile->get_install_dir(); pts_file_io::mkdir(dirname($test_install_directory)); pts_file_io::mkdir($test_install_directory); $installed = false; if (ceil(disk_free_space($test_install_directory) / 1048576) < $test_install_request->test_profile->get_download_size() + 128) { self::test_install_error(null, $test_install_request, 'There is not enough space at ' . $test_install_directory . ' for the test files.'); } else { if (ceil(disk_free_space($test_install_directory) / 1048576) < $test_install_request->test_profile->get_environment_size(false) + 128) { self::test_install_error(null, $test_install_request, 'There is not enough space at ' . $test_install_directory . ' for this test.'); } else { pts_test_installer::setup_test_install_directory($test_install_request, true); // Download test files $download_test_files = pts_test_installer::download_test_files($test_install_request, false, $no_prompts); if ($download_test_files == false) { self::test_install_error(null, $test_install_request, 'Downloading of needed test files failed.'); return false; } if ($test_install_request->test_profile->get_file_installer() != false) { self::create_compiler_mask($test_install_request); pts_module_manager::module_process('__pre_test_install', $identifier); pts_client::$display->test_install_begin($test_install_request); $pre_install_message = $test_install_request->test_profile->get_pre_install_message(); $post_install_message = $test_install_request->test_profile->get_post_install_message(); $install_agreement = $test_install_request->test_profile->get_installation_agreement_message(); if (!empty($install_agreement)) { if (pts_strings::is_url($install_agreement)) { $install_agreement = pts_network::http_get_contents($install_agreement); if (empty($install_agreement)) { self::test_install_error(null, $test_install_request, 'The user agreement could not be found. Test installation aborted.'); return false; } } echo $install_agreement . PHP_EOL; if (!$no_prompts) { $user_agrees = pts_user_io::prompt_bool_input('Do you agree to these terms', false, 'INSTALL_AGREEMENT'); if (!$user_agrees) { self::test_install_error(null, $test_install_request, 'User agreement failed; this test will not be installed.'); return false; } } } pts_client::$display->display_interrupt_message($pre_install_message); $install_time_length_start = microtime(true); $install_log = pts_tests::call_test_script($test_install_request->test_profile, 'install', null, $test_install_directory, $test_install_request->special_environment_vars, false); $test_install_request->install_time_duration = ceil(microtime(true) - $install_time_length_start); pts_client::$display->display_interrupt_message($post_install_message); if (!empty($install_log)) { file_put_contents($test_install_directory . 'install.log', $install_log); pts_file_io::unlink($test_install_directory . 'install-failed.log'); pts_client::$display->test_install_output($install_log); } if (is_file($test_install_directory . 'install-exit-status')) { // If the installer writes its exit status to ~/install-exit-status, if it's non-zero the install failed $install_exit_status = pts_file_io::file_get_contents($test_install_directory . 'install-exit-status'); unlink($test_install_directory . 'install-exit-status'); if ($install_exit_status != 0 && phodevi::is_windows() == false) { $install_error = null; // TODO: perhaps better way to handle this than to remove pts-install.xml pts_file_io::unlink($test_install_directory . 'pts-install.xml'); if (is_file($test_install_directory . 'install.log')) { $install_log = pts_file_io::file_get_contents($test_install_directory . 'install.log'); $install_error = pts_tests::scan_for_error($install_log, $test_install_directory); copy($test_install_directory . 'install.log', $test_install_directory . 'install-failed.log'); } //pts_test_installer::setup_test_install_directory($test_install_request, true); // Remove installed files from the bunked installation self::test_install_error(null, $test_install_request, 'The installer exited with a non-zero exit status.'); if ($install_error != null) { $test_install_request->install_error = pts_tests::pretty_error_string($install_error); if ($test_install_request->install_error != null) { self::test_install_error(null, $test_install_request, 'ERROR: ' . $test_install_request->install_error); } } pts_client::$display->test_install_error('LOG: ' . str_replace(pts_core::user_home_directory(), '~/', $test_install_directory) . 'install-failed.log' . PHP_EOL); if (pts_client::do_anonymous_usage_reporting()) { // If anonymous usage reporting enabled, report test install failure to OpenBenchmarking.org pts_openbenchmarking_client::upload_usage_data('test_install_failure', array($test_install_request, $install_error)); } return false; } } pts_module_manager::module_process('__post_test_install', $identifier); $installed = true; if (pts_config::read_bool_config('PhoronixTestSuite/Options/Installation/RemoveDownloadFiles', 'FALSE')) { // Remove original downloaded files foreach ($test_install_request->get_download_objects() as $download_object) { pts_file_io::unlink($test_install_directory . $download_object->get_filename()); } } } else { pts_client::$display->test_install_error('No installation script found.'); $installed = true; } // Additional validation checks? $custom_validated_output = pts_tests::call_test_script($test_install_request->test_profile, 'validate-install', PHP_EOL . 'Validating Installation...' . PHP_EOL, $test_install_directory, null, false); if (!empty($custom_validated_output) && !pts_strings::string_bool($custom_validated_output)) { $installed = false; } } } echo PHP_EOL; return $installed; }
public static function run_test(&$test_run_manager, &$test_run_request) { $test_identifier = $test_run_request->test_profile->get_identifier(); $extra_arguments = $test_run_request->get_arguments(); $arguments_description = $test_run_request->get_arguments_description(); $full_output = pts_config::read_bool_config('PhoronixTestSuite/Options/General/FullOutput', 'FALSE'); // Do the actual test running process $test_directory = $test_run_request->test_profile->get_install_dir(); if (!is_dir($test_directory)) { return false; } $lock_file = $test_directory . 'run_lock'; if (pts_client::create_lock($lock_file) == false && $test_run_manager->is_multi_test_stress_run() == false) { self::test_run_error($test_run_manager, $test_run_request, 'The ' . $test_identifier . ' test is already running.'); return false; } $active_result_buffer = new pts_test_result_buffer_active(); $test_run_request->active =& $active_result_buffer; $execute_binary = $test_run_request->test_profile->get_test_executable(); $times_to_run = $test_run_request->test_profile->get_times_to_run(); $ignore_runs = $test_run_request->test_profile->get_runs_to_ignore(); $test_type = $test_run_request->test_profile->get_test_hardware_type(); $allow_cache_share = $test_run_request->test_profile->allow_cache_share(); $min_length = $test_run_request->test_profile->get_min_length(); $max_length = $test_run_request->test_profile->get_max_length(); if ($test_run_request->test_profile->get_environment_testing_size() > 1 && ceil(disk_free_space($test_directory) / 1048576) < $test_run_request->test_profile->get_environment_testing_size()) { // Ensure enough space is available on disk during testing process self::test_run_error($test_run_manager, $test_run_request, 'There is not enough space (at ' . $test_directory . ') for this test to run.'); pts_client::release_lock($lock_file); return false; } $to_execute = $test_run_request->test_profile->get_test_executable_dir(); $pts_test_arguments = trim($test_run_request->test_profile->get_default_arguments() . ' ' . str_replace($test_run_request->test_profile->get_default_arguments(), '', $extra_arguments) . ' ' . $test_run_request->test_profile->get_default_post_arguments()); $extra_runtime_variables = pts_tests::extra_environmental_variables($test_run_request->test_profile); // Start $cache_share_pt2so = $test_directory . 'cache-share-' . PTS_INIT_TIME . '.pt2so'; $cache_share_present = $allow_cache_share && is_file($cache_share_pt2so); $test_run_request->set_used_arguments_description($arguments_description); pts_module_manager::module_process('__pre_test_run', $test_run_request); $time_test_start = time(); pts_client::$display->test_run_start($test_run_manager, $test_run_request); if (!$cache_share_present) { $pre_output = pts_tests::call_test_script($test_run_request->test_profile, 'pre', 'Running Pre-Test Script', $pts_test_arguments, $extra_runtime_variables, true); if ($pre_output != null && (pts_client::is_debug_mode() || $full_output)) { pts_client::$display->test_run_instance_output($pre_output); } if (is_file($test_directory . 'pre-test-exit-status')) { // If the pre script writes its exit status to ~/pre-test-exit-status, if it's non-zero the test run failed $exit_status = pts_file_io::file_get_contents($test_directory . 'pre-test-exit-status'); unlink($test_directory . 'pre-test-exit-status'); if ($exit_status != 0) { self::test_run_instance_error($test_run_manager, $test_run_request, 'The pre run script exited with a non-zero exit status.' . PHP_EOL); self::test_run_error($test_run_manager, $test_run_request, 'This test execution has been abandoned.'); return false; } } } pts_client::$display->display_interrupt_message($test_run_request->test_profile->get_pre_run_message()); $runtime_identifier = time(); $execute_binary_prepend = ''; if ($test_run_request->exec_binary_prepend != null) { $execute_binary_prepend = $test_run_request->exec_binary_prepend; } if (!$cache_share_present && $test_run_request->test_profile->is_root_required()) { if (phodevi::is_root() == false) { pts_client::$display->test_run_error('This test must be run as the root / administrator account.'); } $execute_binary_prepend .= ' ' . PTS_CORE_STATIC_PATH . 'root-access.sh '; } if ($allow_cache_share && !is_file($cache_share_pt2so)) { $cache_share = new pts_storage_object(false, false); } if ($test_run_manager->get_results_identifier() != null && $test_run_manager->get_file_name() != null && pts_config::read_bool_config('PhoronixTestSuite/Options/Testing/SaveTestLogs', 'FALSE')) { $backup_test_log_dir = PTS_SAVE_RESULTS_PATH . $test_run_manager->get_file_name() . '/test-logs/active/' . $test_run_manager->get_results_identifier() . '/'; pts_file_io::delete($backup_test_log_dir); pts_file_io::mkdir($backup_test_log_dir, 0777, true); } else { $backup_test_log_dir = false; } for ($i = 0, $abort_testing = false, $time_test_start_actual = time(), $defined_times_to_run = $times_to_run; $i < $times_to_run && $i < 256 && !$abort_testing; $i++) { pts_client::$display->test_run_instance_header($test_run_request); $test_log_file = $test_directory . basename($test_identifier) . '-' . $runtime_identifier . '-' . ($i + 1) . '.log'; $is_expected_last_run = $i == $times_to_run - 1; $test_extra_runtime_variables = array_merge($extra_runtime_variables, array('LOG_FILE' => $test_log_file, 'DISPLAY' => getenv('DISPLAY'), 'PATH' => getenv('PATH'))); $restored_from_cache = false; if ($cache_share_present) { $cache_share = pts_storage_object::recover_from_file($cache_share_pt2so); if ($cache_share) { $test_result = $cache_share->read_object('test_results_output_' . $i); $test_extra_runtime_variables['LOG_FILE'] = $cache_share->read_object('log_file_location_' . $i); if ($test_extra_runtime_variables['LOG_FILE'] != null) { file_put_contents($test_extra_runtime_variables['LOG_FILE'], $cache_share->read_object('log_file_' . $i)); $test_run_time = 0; // This wouldn't be used for a cache share since it would always be the same, but declare the value so the variable is at least initialized $restored_from_cache = true; } } unset($cache_share); } if ($restored_from_cache == false) { $test_run_command = 'cd ' . $to_execute . ' && ' . $execute_binary_prepend . './' . $execute_binary . ' ' . $pts_test_arguments . ' 2>&1'; pts_client::test_profile_debug_message('Test Run Command: ' . $test_run_command); $is_monitoring = pts_test_result_parser::system_monitor_task_check($test_run_request->test_profile); $test_run_time_start = time(); if (phodevi::is_windows() || pts_client::read_env('USE_PHOROSCRIPT_INTERPRETER') != false) { $phoroscript = new pts_phoroscript_interpreter($to_execute . '/' . $execute_binary, $test_extra_runtime_variables, $to_execute); $phoroscript->execute_script($pts_test_arguments); $test_result = null; } else { //$test_result = pts_client::shell_exec($test_run_command, $test_extra_runtime_variables); $descriptorspec = array(0 => array('pipe', 'r'), 1 => array('pipe', 'w'), 2 => array('pipe', 'w')); $test_process = proc_open('exec ' . $execute_binary_prepend . './' . $execute_binary . ' ' . $pts_test_arguments . ' 2>&1', $descriptorspec, $pipes, $to_execute, array_merge($_ENV, pts_client::environmental_variables(), $test_extra_runtime_variables)); if (is_resource($test_process)) { //echo proc_get_status($test_process)['pid']; pts_module_manager::module_process('__test_running', $test_process); $test_result = stream_get_contents($pipes[1]); fclose($pipes[1]); fclose($pipes[2]); $return_value = proc_close($test_process); } } $test_run_time = time() - $test_run_time_start; $monitor_result = $is_monitoring ? pts_test_result_parser::system_monitor_task_post_test($test_run_request->test_profile) : 0; } if (!isset($test_result[10240]) || pts_client::is_debug_mode() || $full_output) { pts_client::$display->test_run_instance_output($test_result); } if (is_file($test_log_file) && trim($test_result) == null && (filesize($test_log_file) < 10240 || pts_client::is_debug_mode() || $full_output)) { $test_log_file_contents = file_get_contents($test_log_file); pts_client::$display->test_run_instance_output($test_log_file_contents); unset($test_log_file_contents); } $test_run_request->test_result_standard_output = $test_result; $exit_status_pass = true; if (is_file($test_directory . 'test-exit-status')) { // If the test script writes its exit status to ~/test-exit-status, if it's non-zero the test run failed $exit_status = pts_file_io::file_get_contents($test_directory . 'test-exit-status'); unlink($test_directory . 'test-exit-status'); if ($exit_status != 0) { self::test_run_instance_error($test_run_manager, $test_run_request, 'The test exited with a non-zero exit status.'); if ($is_expected_last_run && is_file($test_log_file)) { $scan_log = pts_file_io::file_get_contents($test_log_file); $test_run_error = pts_tests::scan_for_error($scan_log, $test_run_request->test_profile->get_test_executable_dir()); if ($test_run_error) { self::test_run_instance_error($test_run_manager, $test_run_request, 'E: ' . $test_run_error); } } $exit_status_pass = false; } } if (!in_array($i + 1, $ignore_runs) && $exit_status_pass) { if (isset($monitor_result) && $monitor_result != 0) { $test_run_request->active->active_result = $monitor_result; } else { pts_test_result_parser::parse_result($test_run_request, $test_extra_runtime_variables['LOG_FILE']); } pts_client::test_profile_debug_message('Test Result Value: ' . $test_run_request->active->active_result); if (!empty($test_run_request->active->active_result)) { if ($test_run_time < 2 && intval($test_run_request->active->active_result) == $test_run_request->active->active_result && $test_run_request->test_profile->get_estimated_run_time() > 60 && !$restored_from_cache) { // If the test ended in less than two seconds, outputted some int, and normally the test takes much longer, then it's likely some invalid run self::test_run_instance_error($test_run_manager, $test_run_request, 'The test run ended prematurely.'); if ($is_expected_last_run && is_file($test_log_file)) { $scan_log = pts_file_io::file_get_contents($test_log_file); $test_run_error = pts_tests::scan_for_error($scan_log, $test_run_request->test_profile->get_test_executable_dir()); if ($test_run_error) { self::test_run_instance_error($test_run_manager, $test_run_request, 'E: ' . $test_run_error); } } } else { // TODO integrate active_result into active buffer $active_result_buffer->add_trial_run_result($test_run_request->active->active_result, $test_run_request->active->active_min_result, $test_run_request->active->active_max_result); } } else { if ($test_run_request->test_profile->get_display_format() != 'NO_RESULT') { self::test_run_instance_error($test_run_manager, $test_run_request, 'The test run did not produce a result.'); if ($is_expected_last_run && is_file($test_log_file)) { $scan_log = pts_file_io::file_get_contents($test_log_file); $test_run_error = pts_tests::scan_for_error($scan_log, $test_run_request->test_profile->get_test_executable_dir()); if ($test_run_error) { self::test_run_instance_error($test_run_manager, $test_run_request, 'E: ' . $test_run_error); } } } } if ($allow_cache_share && !is_file($cache_share_pt2so)) { $cache_share->add_object('test_results_output_' . $i, $test_run_request->active->active_result); $cache_share->add_object('log_file_location_' . $i, $test_extra_runtime_variables['LOG_FILE']); $cache_share->add_object('log_file_' . $i, is_file($test_log_file) ? file_get_contents($test_log_file) : null); } } if ($is_expected_last_run && $active_result_buffer->get_trial_run_count() > floor(($i - 2) / 2) && !$cache_share_present && $test_run_manager->do_dynamic_run_count()) { // The later check above ensures if the test is failing often the run count won't uselessly be increasing // Should we increase the run count? $increase_run_count = false; if ($defined_times_to_run == $i + 1 && $active_result_buffer->get_trial_run_count() > 0 && $active_result_buffer->get_trial_run_count() < $defined_times_to_run && $i < 64) { // At least one run passed, but at least one run failed to produce a result. Increase count to try to get more successful runs $increase_run_count = $defined_times_to_run - $active_result_buffer->get_trial_run_count(); } else { if ($active_result_buffer->get_trial_run_count() >= 2) { // Dynamically increase run count if needed for statistical significance or other reasons $increase_run_count = $test_run_manager->increase_run_count_check($active_result_buffer, $defined_times_to_run, $test_run_time); if ($increase_run_count === -1) { $abort_testing = true; } else { if ($increase_run_count == true) { // Just increase the run count one at a time $increase_run_count = 1; } } } } if ($increase_run_count > 0) { $times_to_run += $increase_run_count; $is_expected_last_run = false; //$test_run_request->test_profile->set_times_to_run($times_to_run); } } if ($times_to_run > 1 && $i < $times_to_run - 1) { if ($cache_share_present == false) { $interim_output = pts_tests::call_test_script($test_run_request->test_profile, 'interim', 'Running Interim Test Script', $pts_test_arguments, $extra_runtime_variables, true); if ($interim_output != null && (pts_client::is_debug_mode() || $full_output)) { pts_client::$display->test_run_instance_output($interim_output); } //sleep(2); // Rest for a moment between tests } pts_module_manager::module_process('__interim_test_run', $test_run_request); } if (is_file($test_log_file)) { if ($is_expected_last_run) { // For now just passing the last test log file... // TODO XXX: clean this up with log files to preserve when needed, let multiple log files exist for extra_data, etc pts_test_result_parser::generate_extra_data($test_run_request, $test_log_file); } if ($backup_test_log_dir) { copy($test_log_file, $backup_test_log_dir . basename($test_log_file)); } if (pts_client::test_profile_debug_message('Log File At: ' . $test_log_file) == false) { unlink($test_log_file); } } if (is_file(PTS_USER_PATH . 'halt-testing') || is_file(PTS_USER_PATH . 'skip-test')) { pts_client::release_lock($lock_file); return false; } pts_client::$display->test_run_instance_complete($test_run_request); } $time_test_end_actual = time(); if ($cache_share_present == false) { $post_output = pts_tests::call_test_script($test_run_request->test_profile, 'post', 'Running Post-Test Script', $pts_test_arguments, $extra_runtime_variables, true); if ($post_output != null && (pts_client::is_debug_mode() || $full_output)) { pts_client::$display->test_run_instance_output($post_output); } if (is_file($test_directory . 'post-test-exit-status')) { // If the post script writes its exit status to ~/post-test-exit-status, if it's non-zero the test run failed $exit_status = pts_file_io::file_get_contents($test_directory . 'post-test-exit-status'); unlink($test_directory . 'post-test-exit-status'); if ($exit_status != 0) { self::test_run_instance_error($test_run_manager, $test_run_request, 'The post run script exited with a non-zero exit status.' . PHP_EOL); $abort_testing = true; } } } if ($abort_testing) { self::test_run_error($test_run_manager, $test_run_request, 'This test execution has been abandoned.'); return false; } // End $time_test_end = time(); $time_test_elapsed = $time_test_end - $time_test_start; $time_test_elapsed_actual = $time_test_end_actual - $time_test_start_actual; if (!empty($min_length)) { if ($min_length > $time_test_elapsed_actual) { // The test ended too quickly, results are not valid self::test_run_error($test_run_manager, $test_run_request, 'This test ended prematurely.'); return false; } } if (!empty($max_length)) { if ($max_length < $time_test_elapsed_actual) { // The test took too much time, results are not valid self::test_run_error($test_run_manager, $test_run_request, 'This test run was exhausted.'); return false; } } if ($allow_cache_share && !is_file($cache_share_pt2so) && $cache_share instanceof pts_storage_object) { $cache_share->save_to_file($cache_share_pt2so); unset($cache_share); } if ($test_run_manager->get_results_identifier() != null && pts_config::read_bool_config('PhoronixTestSuite/Options/Testing/SaveInstallationLogs', 'FALSE')) { if (is_file($test_run_request->test_profile->get_install_dir() . 'install.log')) { $backup_log_dir = PTS_SAVE_RESULTS_PATH . $test_run_manager->get_file_name() . '/installation-logs/' . $test_run_manager->get_results_identifier() . '/'; pts_file_io::mkdir($backup_log_dir, 0777, true); copy($test_run_request->test_profile->get_install_dir() . 'install.log', $backup_log_dir . basename($test_identifier) . '.log'); } } // Fill in missing test details if (empty($arguments_description)) { $arguments_description = $test_run_request->test_profile->get_test_subtitle(); } $file_var_checks = array(array('pts-results-scale', 'set_result_scale', null), array('pts-results-proportion', 'set_result_proportion', null), array('pts-results-quantifier', 'set_result_quantifier', null), array('pts-test-version', 'set_version', null), array('pts-test-description', null, 'set_used_arguments_description'), array('pts-footnote', null, null)); foreach ($file_var_checks as &$file_check) { list($file, $set_function, $result_set_function) = $file_check; if (is_file($test_directory . $file)) { $file_contents = pts_file_io::file_get_contents($test_directory . $file); unlink($test_directory . $file); if (!empty($file_contents)) { if ($set_function != null) { call_user_func(array($test_run_request->test_profile, $set_function), $file_contents); } else { if ($result_set_function != null) { if ($result_set_function == 'set_used_arguments_description') { $arguments_description = $file_contents; } else { call_user_func(array($test_run_request, $result_set_function), $file_contents); } } else { if ($file == 'pts-footnote') { $test_run_request->test_profile->test_installation->set_install_footnote($file_contents); } } } } } } if (empty($arguments_description)) { $arguments_description = 'Phoronix Test Suite v' . PTS_VERSION; } foreach (pts_client::environmental_variables() as $key => $value) { $arguments_description = str_replace('$' . $key, $value, $arguments_description); if (!in_array($key, array('VIDEO_MEMORY', 'NUM_CPU_CORES', 'NUM_CPU_JOBS'))) { $extra_arguments = str_replace('$' . $key, $value, $extra_arguments); } } // Any device notes to add to PTS test notes area? foreach (phodevi::read_device_notes($test_type) as $note) { pts_test_notes_manager::add_note($note); } // As of PTS 4.4, this is removed and superceded effectively by reporting the notes to table // Any special information (such as forced AA/AF levels for graphics) to add to the description string of the result? /* if(($special_string = phodevi::read_special_settings_string($test_type)) != null) { if(strpos($arguments_description, $special_string) === false) { if($arguments_description != null) { $arguments_description .= ' | '; } $arguments_description .= $special_string; } } */ // Result Calculation $test_run_request->set_used_arguments_description($arguments_description); $test_run_request->set_used_arguments($extra_arguments); pts_test_result_parser::calculate_end_result($test_run_request, $active_result_buffer); // Process results pts_client::$display->test_run_end($test_run_request); pts_client::$display->display_interrupt_message($test_run_request->test_profile->get_post_run_message()); pts_module_manager::module_process('__post_test_run', $test_run_request); $report_elapsed_time = $cache_share_present == false && $test_run_request->active->get_result() != 0; pts_tests::update_test_install_xml($test_run_request->test_profile, $report_elapsed_time ? $time_test_elapsed : 0); pts_storage_object::add_in_file(PTS_CORE_STORAGE, 'total_testing_time', $time_test_elapsed / 60); if ($report_elapsed_time && pts_client::do_anonymous_usage_reporting() && $time_test_elapsed >= 60) { // If anonymous usage reporting enabled, report test run-time to OpenBenchmarking.org pts_openbenchmarking_client::upload_usage_data('test_complete', array($test_run_request, $time_test_elapsed)); } // Remove lock pts_client::release_lock($lock_file); return $active_result_buffer; }