public static function init() { self::$flags = 0; self::$os_identifier_sha1 = sha1(phodevi::read_property('system', 'vendor-identifier')); self::$is_live_cd = 1 << 1; self::$no_network_communication = 1 << 2; self::$no_openbenchmarking_reporting = 1 << 3; self::$user_agreement_skip = 1 << 4; self::$skip_md5_checks = 1 << 5; self::$remove_test_on_completion = 1 << 6; self::$no_phodevi_cache = 1 << 7; self::$no_external_dependencies = 1 << 8; self::$upload_to_openbenchmarking = 1 << 9; switch (self::$os_identifier_sha1) { case 'b28d6a7148b34595c5b397dfcf5b12ac7932b3dc': // Moscow 2011-04 client self::$flags = self::$is_live_cd | self::$no_network_communication | self::$no_openbenchmarking_reporting | self::$user_agreement_skip | self::$skip_md5_checks | self::$remove_test_on_completion; break; } if (pts_client::read_env('NO_FILE_HASH_CHECKS') != false || pts_client::read_env('NO_MD5_CHECKS') != false) { self::$flags |= self::$skip_md5_checks; } if (pts_config::read_bool_config('PhoronixTestSuite/Options/Testing/RemoveTestInstallOnCompletion', 'FALSE')) { self::$flags |= self::$remove_test_on_completion; } if (pts_config::read_bool_config('PhoronixTestSuite/Options/Testing/AlwaysUploadResultsToOpenBenchmarking', 'FALSE')) { self::$flags |= self::$upload_to_openbenchmarking; } if (pts_client::read_env('NO_PHODEVI_CACHE') != false) { self::$flags |= self::$no_phodevi_cache; } if (pts_client::read_env('NO_EXTERNAL_DEPENDENCIES') != false || pts_client::read_env('SKIP_EXTERNAL_DEPENDENCIES') == 1) { // NO_EXTERNAL_DEPENDENCIES was deprecated in PTS 3.6 and replaced by more versatile SKIP_EXTERNAL_DEPENDENCIES self::$flags |= self::$no_external_dependencies; } }
protected static function install_test_process(&$test_install_request, $no_prompts) { // Install a test $identifier = $test_install_request->test_profile->get_identifier(); $test_install_directory = $test_install_request->test_profile->get_install_dir(); pts_file_io::mkdir(dirname($test_install_directory)); pts_file_io::mkdir($test_install_directory); $installed = false; if (ceil(disk_free_space($test_install_directory) / 1048576) < $test_install_request->test_profile->get_download_size() + 128) { self::test_install_error(null, $test_install_request, 'There is not enough space at ' . $test_install_directory . ' for the test files.'); } else { if (ceil(disk_free_space($test_install_directory) / 1048576) < $test_install_request->test_profile->get_environment_size(false) + 128) { self::test_install_error(null, $test_install_request, 'There is not enough space at ' . $test_install_directory . ' for this test.'); } else { pts_test_installer::setup_test_install_directory($test_install_request, true); // Download test files $download_test_files = pts_test_installer::download_test_files($test_install_request, false, $no_prompts); if ($download_test_files == false) { self::test_install_error(null, $test_install_request, 'Downloading of needed test files failed.'); return false; } if ($test_install_request->test_profile->get_file_installer() != false) { self::create_compiler_mask($test_install_request); pts_module_manager::module_process('__pre_test_install', $identifier); pts_client::$display->test_install_begin($test_install_request); $pre_install_message = $test_install_request->test_profile->get_pre_install_message(); $post_install_message = $test_install_request->test_profile->get_post_install_message(); $install_agreement = $test_install_request->test_profile->get_installation_agreement_message(); if (!empty($install_agreement)) { if (pts_strings::is_url($install_agreement)) { $install_agreement = pts_network::http_get_contents($install_agreement); if (empty($install_agreement)) { self::test_install_error(null, $test_install_request, 'The user agreement could not be found. Test installation aborted.'); return false; } } echo $install_agreement . PHP_EOL; if (!$no_prompts) { $user_agrees = pts_user_io::prompt_bool_input('Do you agree to these terms', false, 'INSTALL_AGREEMENT'); if (!$user_agrees) { self::test_install_error(null, $test_install_request, 'User agreement failed; this test will not be installed.'); return false; } } } pts_client::$display->display_interrupt_message($pre_install_message); $install_time_length_start = microtime(true); $install_log = pts_tests::call_test_script($test_install_request->test_profile, 'install', null, $test_install_directory, $test_install_request->special_environment_vars, false); $test_install_request->install_time_duration = ceil(microtime(true) - $install_time_length_start); pts_client::$display->display_interrupt_message($post_install_message); if (!empty($install_log)) { file_put_contents($test_install_directory . 'install.log', $install_log); pts_file_io::unlink($test_install_directory . 'install-failed.log'); pts_client::$display->test_install_output($install_log); } if (is_file($test_install_directory . 'install-exit-status')) { // If the installer writes its exit status to ~/install-exit-status, if it's non-zero the install failed $install_exit_status = pts_file_io::file_get_contents($test_install_directory . 'install-exit-status'); unlink($test_install_directory . 'install-exit-status'); if ($install_exit_status != 0 && phodevi::is_windows() == false) { $install_error = null; // TODO: perhaps better way to handle this than to remove pts-install.xml pts_file_io::unlink($test_install_directory . 'pts-install.xml'); if (is_file($test_install_directory . 'install.log')) { $install_log = pts_file_io::file_get_contents($test_install_directory . 'install.log'); $install_error = pts_tests::scan_for_error($install_log, $test_install_directory); copy($test_install_directory . 'install.log', $test_install_directory . 'install-failed.log'); } //pts_test_installer::setup_test_install_directory($test_install_request, true); // Remove installed files from the bunked installation self::test_install_error(null, $test_install_request, 'The installer exited with a non-zero exit status.'); if ($install_error != null) { $test_install_request->install_error = pts_tests::pretty_error_string($install_error); if ($test_install_request->install_error != null) { self::test_install_error(null, $test_install_request, 'ERROR: ' . $test_install_request->install_error); } } pts_client::$display->test_install_error('LOG: ' . str_replace(pts_core::user_home_directory(), '~/', $test_install_directory) . 'install-failed.log' . PHP_EOL); if (pts_client::do_anonymous_usage_reporting()) { // If anonymous usage reporting enabled, report test install failure to OpenBenchmarking.org pts_openbenchmarking_client::upload_usage_data('test_install_failure', array($test_install_request, $install_error)); } return false; } } pts_module_manager::module_process('__post_test_install', $identifier); $installed = true; if (pts_config::read_bool_config('PhoronixTestSuite/Options/Installation/RemoveDownloadFiles', 'FALSE')) { // Remove original downloaded files foreach ($test_install_request->get_download_objects() as $download_object) { pts_file_io::unlink($test_install_directory . $download_object->get_filename()); } } } else { pts_client::$display->test_install_error('No installation script found.'); $installed = true; } // Additional validation checks? $custom_validated_output = pts_tests::call_test_script($test_install_request->test_profile, 'validate-install', PHP_EOL . 'Validating Installation...' . PHP_EOL, $test_install_directory, null, false); if (!empty($custom_validated_output) && !pts_strings::string_bool($custom_validated_output)) { $installed = false; } } } echo PHP_EOL; return $installed; }
public static function do_anonymous_usage_reporting() { return pts_config::read_bool_config('PhoronixTestSuite/Options/OpenBenchmarking/AnonymousUsageReporting', 0); }
public static function upload_test_result(&$object, $return_json_data = false) { if ($object instanceof pts_test_run_manager) { $result_file = new pts_result_file($object->get_file_name()); $local_file_name = $object->get_file_name(); $results_identifier = $object->get_results_identifier(); } else { if ($object instanceof pts_result_file) { $result_file =& $object; $local_file_name = $result_file->get_identifier(); $results_identifier = null; } } // Ensure the results can be shared if (self::result_upload_supported($result_file) == false) { return false; } if (pts_network::internet_support_available() == false) { echo PHP_EOL . 'No network support available.' . PHP_EOL; return false; } $composite_xml = $result_file->getRawXml(); $system_log_dir = PTS_SAVE_RESULTS_PATH . $result_file->get_identifier() . '/system-logs/'; $upload_system_logs = false; if (is_dir($system_log_dir)) { if (pts_config::read_bool_config('PhoronixTestSuite/Options/OpenBenchmarking/AlwaysUploadSystemLogs', 'FALSE')) { $upload_system_logs = true; } else { if (isset(self::$client_settings['UploadSystemLogsByDefault'])) { $upload_system_logs = self::$client_settings['UploadSystemLogsByDefault']; } else { if (is_dir($system_log_dir)) { $upload_system_logs = pts_user_io::prompt_bool_input('Would you like to attach the system logs (lspci, dmesg, lsusb, etc) to the test result', true, 'UPLOAD_SYSTEM_LOGS'); } } } } $system_logs = null; $system_logs_hash = null; if ($upload_system_logs) { $is_valid_log = true; $finfo = function_exists('finfo_open') ? finfo_open(FILEINFO_MIME_TYPE) : false; foreach (pts_file_io::glob($system_log_dir . '*') as $log_dir) { if ($is_valid_log == false || !is_dir($log_dir)) { $is_valid_log = false; break; } foreach (pts_file_io::glob($log_dir . '/*') as $log_file) { if (!is_file($log_file)) { $is_valid_log = false; break; } if ($finfo && substr(finfo_file($finfo, $log_file), 0, 5) != 'text/') { $is_valid_log = false; break; } } } if ($is_valid_log) { $system_logs_zip = pts_client::create_temporary_file('.zip'); pts_compression::zip_archive_create($system_logs_zip, $system_log_dir); if (filesize($system_logs_zip) < 2097152) { // If it's over 2MB, probably too big $system_logs = base64_encode(file_get_contents($system_logs_zip)); $system_logs_hash = sha1($system_logs); } else { trigger_error('The systems log attachment is too large to upload to OpenBenchmarking.org.', E_USER_WARNING); } unlink($system_logs_zip); } } $composite_xml_hash = sha1($composite_xml); $composite_xml_type = 'composite_xml'; // Compress the result file XML if it's big if (isset($composite_xml[50000]) && function_exists('gzdeflate')) { $composite_xml_gz = gzdeflate($composite_xml); if ($composite_xml_gz != false) { $composite_xml = $composite_xml_gz; $composite_xml_type = 'composite_xml_gz'; } } $to_post = array($composite_xml_type => base64_encode($composite_xml), 'composite_xml_hash' => $composite_xml_hash, 'local_file_name' => $local_file_name, 'this_results_identifier' => $results_identifier, 'system_logs_zip' => $system_logs, 'system_logs_hash' => $system_logs_hash); if (isset(self::$client_settings['ResultUploadsDefaultDisplayStatus']) && is_numeric(self::$client_settings['ResultUploadsDefaultDisplayStatus'])) { $to_post['display_status'] = self::$client_settings['ResultUploadsDefaultDisplayStatus']; } $json_response = pts_openbenchmarking::make_openbenchmarking_request('upload_test_result', $to_post); $json_response = json_decode($json_response, true); if (!is_array($json_response)) { trigger_error('Unhandled Exception', E_USER_ERROR); return false; } if (isset($json_response['openbenchmarking']['upload']['error'])) { trigger_error($json_response['openbenchmarking']['upload']['error'], E_USER_ERROR); } if (isset($json_response['openbenchmarking']['upload']['url'])) { echo PHP_EOL . 'Results Uploaded To: ' . $json_response['openbenchmarking']['upload']['url'] . PHP_EOL; pts_module_manager::module_process('__event_openbenchmarking_upload', $json_response); } //$json['openbenchmarking']['upload']['id'] if (isset(self::$client_settings['RemoveLocalResultsOnUpload']) && self::$client_settings['RemoveLocalResultsOnUpload'] && $local_file_name != null) { pts_client::remove_saved_result_file($local_file_name); } if ($return_json_data) { return isset($json_response['openbenchmarking']['upload']) ? $json_response['openbenchmarking']['upload'] : false; } return isset($json_response['openbenchmarking']['upload']['url']) ? $json_response['openbenchmarking']['upload']['url'] : false; }
public static function download_cache_locations() { static $cache_directories = null; if ($cache_directories == null) { $cache_directories = array(); // Phoronix Test Suite System Cache Directories $additional_dir_checks = array('/var/cache/phoronix-test-suite/download-cache/', '/var/cache/phoronix-test-suite/'); foreach ($additional_dir_checks as $dir_check) { if (is_dir($dir_check)) { $cache_directories[] = $dir_check; break; } } // User Defined Directory Checking $dir_string = ($dir = pts_client::read_env('PTS_DOWNLOAD_CACHE')) != false ? $dir : null; foreach (array_merge(self::$extra_caches, pts_strings::colon_explode($dir_string)) as $dir_check) { if ($dir_check == null) { continue; } $dir_check = pts_strings::parse_for_home_directory($dir_check); if (pts_strings::is_url($dir_check) == false && !is_dir($dir_check)) { continue; } $cache_directories[] = pts_strings::add_trailing_slash($dir_check); } if (pts_config::read_bool_config('PhoronixTestSuite/Options/Installation/SearchMediaForCache', 'TRUE')) { $download_cache_dirs = array_merge(pts_file_io::glob('/media/*/download-cache/'), pts_file_io::glob('/media/*/*/download-cache/'), pts_file_io::glob('/run/media/*/*/download-cache/'), pts_file_io::glob('/Volumes/*/download-cache/')); foreach ($download_cache_dirs as $dir) { $cache_directories[] = $dir; } } } return $cache_directories; }
public static function client_startup() { if (($proxy_address = pts_config::read_user_config('PhoronixTestSuite/Options/Networking/ProxyAddress', false)) && ($proxy_port = pts_config::read_user_config('PhoronixTestSuite/Options/Networking/ProxyPort', false))) { self::$network_proxy['proxy'] = $proxy_address . ':' . $proxy_port; self::$network_proxy['address'] = $proxy_address; self::$network_proxy['port'] = $proxy_port; } else { if (($env_proxy = getenv('http_proxy')) != false && count($env_proxy = pts_strings::colon_explode($env_proxy)) == 2) { self::$network_proxy['proxy'] = $env_proxy[0] . ':' . $env_proxy[1]; self::$network_proxy['address'] = $env_proxy[0]; self::$network_proxy['port'] = $env_proxy[1]; } } self::$network_timeout = pts_config::read_user_config('PhoronixTestSuite/Options/Networking/Timeout', 20); if (ini_get('allow_url_fopen') == 'Off') { if (!defined('PHOROMATIC_SERVER')) { echo PHP_EOL . 'The allow_url_fopen option in your PHP configuration must be enabled for network support.' . PHP_EOL . PHP_EOL; } self::$disable_network_support = true; } else { if (pts_config::read_bool_config('PhoronixTestSuite/Options/Networking/NoInternetCommunication', 'FALSE')) { if (!defined('PHOROMATIC_SERVER')) { echo PHP_EOL . 'Internet Communication Is Disabled Per Your User Configuration.' . PHP_EOL . PHP_EOL; } self::$disable_internet_support = true; } else { if (pts_config::read_bool_config('PhoronixTestSuite/Options/Networking/NoNetworkCommunication', 'FALSE')) { if (!defined('PHOROMATIC_SERVER')) { echo PHP_EOL . 'Network Communication Is Disabled Per Your User Configuration.' . PHP_EOL . PHP_EOL; } self::$disable_network_support = true; } else { if (pts_flags::no_network_communication() == true) { //echo PHP_EOL . 'Network Communication Is Disabled Per Your User Configuration.' . PHP_EOL . PHP_EOL; self::$disable_network_support = true; } else { if (!PTS_IS_WEB_CLIENT) { $server_response = pts_network::http_get_contents('http://www.phoronix-test-suite.com/PTS', false, false); if ($server_response != 'PTS') { // Failed to connect to PTS server // As a last resort, see if it can resolve IP to Google.com as a test for Internet connectivity... // i.e. in case Phoronix server is down or some other issue, so just see if Google will resolve // If google.com fails to resolve, it will simply return the original string if (gethostbyname('google.com') == 'google.com') { echo PHP_EOL; if (PTS_IS_DAEMONIZED_SERVER_PROCESS) { // Wait some seconds in case network is still coming up foreach (array(20, 40) as $time_to_wait) { sleep($time_to_wait); $server_response = pts_network::http_get_contents('http://www.phoronix-test-suite.com/PTS', false, false); if ($server_response != 'PTS' && gethostbyname('google.com') == 'google.com') { trigger_error('No Internet Connectivity After Wait', E_USER_WARNING); self::$disable_internet_support = true; } else { self::$disable_internet_support = false; break; } } } else { trigger_error('No Internet Connectivity', E_USER_WARNING); self::$disable_internet_support = true; } } } } } } } } if (pts_network::network_support_available() == false && ini_get('file_uploads') == 'Off') { echo PHP_EOL . 'The file_uploads option in your PHP configuration must be enabled for network support.' . PHP_EOL . PHP_EOL; } }
public static function run_test(&$test_run_manager, &$test_run_request) { $test_identifier = $test_run_request->test_profile->get_identifier(); $extra_arguments = $test_run_request->get_arguments(); $arguments_description = $test_run_request->get_arguments_description(); $full_output = pts_config::read_bool_config('PhoronixTestSuite/Options/General/FullOutput', 'FALSE'); // Do the actual test running process $test_directory = $test_run_request->test_profile->get_install_dir(); if (!is_dir($test_directory)) { return false; } $lock_file = $test_directory . 'run_lock'; if (pts_client::create_lock($lock_file) == false && $test_run_manager->is_multi_test_stress_run() == false) { self::test_run_error($test_run_manager, $test_run_request, 'The ' . $test_identifier . ' test is already running.'); return false; } $active_result_buffer = new pts_test_result_buffer_active(); $test_run_request->active =& $active_result_buffer; $execute_binary = $test_run_request->test_profile->get_test_executable(); $times_to_run = $test_run_request->test_profile->get_times_to_run(); $ignore_runs = $test_run_request->test_profile->get_runs_to_ignore(); $test_type = $test_run_request->test_profile->get_test_hardware_type(); $allow_cache_share = $test_run_request->test_profile->allow_cache_share(); $min_length = $test_run_request->test_profile->get_min_length(); $max_length = $test_run_request->test_profile->get_max_length(); if ($test_run_request->test_profile->get_environment_testing_size() > 1 && ceil(disk_free_space($test_directory) / 1048576) < $test_run_request->test_profile->get_environment_testing_size()) { // Ensure enough space is available on disk during testing process self::test_run_error($test_run_manager, $test_run_request, 'There is not enough space (at ' . $test_directory . ') for this test to run.'); pts_client::release_lock($lock_file); return false; } $to_execute = $test_run_request->test_profile->get_test_executable_dir(); $pts_test_arguments = trim($test_run_request->test_profile->get_default_arguments() . ' ' . str_replace($test_run_request->test_profile->get_default_arguments(), '', $extra_arguments) . ' ' . $test_run_request->test_profile->get_default_post_arguments()); $extra_runtime_variables = pts_tests::extra_environmental_variables($test_run_request->test_profile); // Start $cache_share_pt2so = $test_directory . 'cache-share-' . PTS_INIT_TIME . '.pt2so'; $cache_share_present = $allow_cache_share && is_file($cache_share_pt2so); $test_run_request->set_used_arguments_description($arguments_description); pts_module_manager::module_process('__pre_test_run', $test_run_request); $time_test_start = time(); pts_client::$display->test_run_start($test_run_manager, $test_run_request); if (!$cache_share_present) { $pre_output = pts_tests::call_test_script($test_run_request->test_profile, 'pre', 'Running Pre-Test Script', $pts_test_arguments, $extra_runtime_variables, true); if ($pre_output != null && (pts_client::is_debug_mode() || $full_output)) { pts_client::$display->test_run_instance_output($pre_output); } if (is_file($test_directory . 'pre-test-exit-status')) { // If the pre script writes its exit status to ~/pre-test-exit-status, if it's non-zero the test run failed $exit_status = pts_file_io::file_get_contents($test_directory . 'pre-test-exit-status'); unlink($test_directory . 'pre-test-exit-status'); if ($exit_status != 0) { self::test_run_instance_error($test_run_manager, $test_run_request, 'The pre run script exited with a non-zero exit status.' . PHP_EOL); self::test_run_error($test_run_manager, $test_run_request, 'This test execution has been abandoned.'); return false; } } } pts_client::$display->display_interrupt_message($test_run_request->test_profile->get_pre_run_message()); $runtime_identifier = time(); $execute_binary_prepend = ''; if ($test_run_request->exec_binary_prepend != null) { $execute_binary_prepend = $test_run_request->exec_binary_prepend; } if (!$cache_share_present && $test_run_request->test_profile->is_root_required()) { if (phodevi::is_root() == false) { pts_client::$display->test_run_error('This test must be run as the root / administrator account.'); } $execute_binary_prepend .= ' ' . PTS_CORE_STATIC_PATH . 'root-access.sh '; } if ($allow_cache_share && !is_file($cache_share_pt2so)) { $cache_share = new pts_storage_object(false, false); } if ($test_run_manager->get_results_identifier() != null && $test_run_manager->get_file_name() != null && pts_config::read_bool_config('PhoronixTestSuite/Options/Testing/SaveTestLogs', 'FALSE')) { $backup_test_log_dir = PTS_SAVE_RESULTS_PATH . $test_run_manager->get_file_name() . '/test-logs/active/' . $test_run_manager->get_results_identifier() . '/'; pts_file_io::delete($backup_test_log_dir); pts_file_io::mkdir($backup_test_log_dir, 0777, true); } else { $backup_test_log_dir = false; } for ($i = 0, $abort_testing = false, $time_test_start_actual = time(), $defined_times_to_run = $times_to_run; $i < $times_to_run && $i < 256 && !$abort_testing; $i++) { pts_client::$display->test_run_instance_header($test_run_request); $test_log_file = $test_directory . basename($test_identifier) . '-' . $runtime_identifier . '-' . ($i + 1) . '.log'; $is_expected_last_run = $i == $times_to_run - 1; $test_extra_runtime_variables = array_merge($extra_runtime_variables, array('LOG_FILE' => $test_log_file, 'DISPLAY' => getenv('DISPLAY'), 'PATH' => getenv('PATH'))); $restored_from_cache = false; if ($cache_share_present) { $cache_share = pts_storage_object::recover_from_file($cache_share_pt2so); if ($cache_share) { $test_result = $cache_share->read_object('test_results_output_' . $i); $test_extra_runtime_variables['LOG_FILE'] = $cache_share->read_object('log_file_location_' . $i); if ($test_extra_runtime_variables['LOG_FILE'] != null) { file_put_contents($test_extra_runtime_variables['LOG_FILE'], $cache_share->read_object('log_file_' . $i)); $test_run_time = 0; // This wouldn't be used for a cache share since it would always be the same, but declare the value so the variable is at least initialized $restored_from_cache = true; } } unset($cache_share); } if ($restored_from_cache == false) { $test_run_command = 'cd ' . $to_execute . ' && ' . $execute_binary_prepend . './' . $execute_binary . ' ' . $pts_test_arguments . ' 2>&1'; pts_client::test_profile_debug_message('Test Run Command: ' . $test_run_command); $is_monitoring = pts_test_result_parser::system_monitor_task_check($test_run_request->test_profile); $test_run_time_start = time(); if (phodevi::is_windows() || pts_client::read_env('USE_PHOROSCRIPT_INTERPRETER') != false) { $phoroscript = new pts_phoroscript_interpreter($to_execute . '/' . $execute_binary, $test_extra_runtime_variables, $to_execute); $phoroscript->execute_script($pts_test_arguments); $test_result = null; } else { //$test_result = pts_client::shell_exec($test_run_command, $test_extra_runtime_variables); $descriptorspec = array(0 => array('pipe', 'r'), 1 => array('pipe', 'w'), 2 => array('pipe', 'w')); $test_process = proc_open('exec ' . $execute_binary_prepend . './' . $execute_binary . ' ' . $pts_test_arguments . ' 2>&1', $descriptorspec, $pipes, $to_execute, array_merge($_ENV, pts_client::environmental_variables(), $test_extra_runtime_variables)); if (is_resource($test_process)) { //echo proc_get_status($test_process)['pid']; pts_module_manager::module_process('__test_running', $test_process); $test_result = stream_get_contents($pipes[1]); fclose($pipes[1]); fclose($pipes[2]); $return_value = proc_close($test_process); } } $test_run_time = time() - $test_run_time_start; $monitor_result = $is_monitoring ? pts_test_result_parser::system_monitor_task_post_test($test_run_request->test_profile) : 0; } if (!isset($test_result[10240]) || pts_client::is_debug_mode() || $full_output) { pts_client::$display->test_run_instance_output($test_result); } if (is_file($test_log_file) && trim($test_result) == null && (filesize($test_log_file) < 10240 || pts_client::is_debug_mode() || $full_output)) { $test_log_file_contents = file_get_contents($test_log_file); pts_client::$display->test_run_instance_output($test_log_file_contents); unset($test_log_file_contents); } $test_run_request->test_result_standard_output = $test_result; $exit_status_pass = true; if (is_file($test_directory . 'test-exit-status')) { // If the test script writes its exit status to ~/test-exit-status, if it's non-zero the test run failed $exit_status = pts_file_io::file_get_contents($test_directory . 'test-exit-status'); unlink($test_directory . 'test-exit-status'); if ($exit_status != 0) { self::test_run_instance_error($test_run_manager, $test_run_request, 'The test exited with a non-zero exit status.'); if ($is_expected_last_run && is_file($test_log_file)) { $scan_log = pts_file_io::file_get_contents($test_log_file); $test_run_error = pts_tests::scan_for_error($scan_log, $test_run_request->test_profile->get_test_executable_dir()); if ($test_run_error) { self::test_run_instance_error($test_run_manager, $test_run_request, 'E: ' . $test_run_error); } } $exit_status_pass = false; } } if (!in_array($i + 1, $ignore_runs) && $exit_status_pass) { if (isset($monitor_result) && $monitor_result != 0) { $test_run_request->active->active_result = $monitor_result; } else { pts_test_result_parser::parse_result($test_run_request, $test_extra_runtime_variables['LOG_FILE']); } pts_client::test_profile_debug_message('Test Result Value: ' . $test_run_request->active->active_result); if (!empty($test_run_request->active->active_result)) { if ($test_run_time < 2 && intval($test_run_request->active->active_result) == $test_run_request->active->active_result && $test_run_request->test_profile->get_estimated_run_time() > 60 && !$restored_from_cache) { // If the test ended in less than two seconds, outputted some int, and normally the test takes much longer, then it's likely some invalid run self::test_run_instance_error($test_run_manager, $test_run_request, 'The test run ended prematurely.'); if ($is_expected_last_run && is_file($test_log_file)) { $scan_log = pts_file_io::file_get_contents($test_log_file); $test_run_error = pts_tests::scan_for_error($scan_log, $test_run_request->test_profile->get_test_executable_dir()); if ($test_run_error) { self::test_run_instance_error($test_run_manager, $test_run_request, 'E: ' . $test_run_error); } } } else { // TODO integrate active_result into active buffer $active_result_buffer->add_trial_run_result($test_run_request->active->active_result, $test_run_request->active->active_min_result, $test_run_request->active->active_max_result); } } else { if ($test_run_request->test_profile->get_display_format() != 'NO_RESULT') { self::test_run_instance_error($test_run_manager, $test_run_request, 'The test run did not produce a result.'); if ($is_expected_last_run && is_file($test_log_file)) { $scan_log = pts_file_io::file_get_contents($test_log_file); $test_run_error = pts_tests::scan_for_error($scan_log, $test_run_request->test_profile->get_test_executable_dir()); if ($test_run_error) { self::test_run_instance_error($test_run_manager, $test_run_request, 'E: ' . $test_run_error); } } } } if ($allow_cache_share && !is_file($cache_share_pt2so)) { $cache_share->add_object('test_results_output_' . $i, $test_run_request->active->active_result); $cache_share->add_object('log_file_location_' . $i, $test_extra_runtime_variables['LOG_FILE']); $cache_share->add_object('log_file_' . $i, is_file($test_log_file) ? file_get_contents($test_log_file) : null); } } if ($is_expected_last_run && $active_result_buffer->get_trial_run_count() > floor(($i - 2) / 2) && !$cache_share_present && $test_run_manager->do_dynamic_run_count()) { // The later check above ensures if the test is failing often the run count won't uselessly be increasing // Should we increase the run count? $increase_run_count = false; if ($defined_times_to_run == $i + 1 && $active_result_buffer->get_trial_run_count() > 0 && $active_result_buffer->get_trial_run_count() < $defined_times_to_run && $i < 64) { // At least one run passed, but at least one run failed to produce a result. Increase count to try to get more successful runs $increase_run_count = $defined_times_to_run - $active_result_buffer->get_trial_run_count(); } else { if ($active_result_buffer->get_trial_run_count() >= 2) { // Dynamically increase run count if needed for statistical significance or other reasons $increase_run_count = $test_run_manager->increase_run_count_check($active_result_buffer, $defined_times_to_run, $test_run_time); if ($increase_run_count === -1) { $abort_testing = true; } else { if ($increase_run_count == true) { // Just increase the run count one at a time $increase_run_count = 1; } } } } if ($increase_run_count > 0) { $times_to_run += $increase_run_count; $is_expected_last_run = false; //$test_run_request->test_profile->set_times_to_run($times_to_run); } } if ($times_to_run > 1 && $i < $times_to_run - 1) { if ($cache_share_present == false) { $interim_output = pts_tests::call_test_script($test_run_request->test_profile, 'interim', 'Running Interim Test Script', $pts_test_arguments, $extra_runtime_variables, true); if ($interim_output != null && (pts_client::is_debug_mode() || $full_output)) { pts_client::$display->test_run_instance_output($interim_output); } //sleep(2); // Rest for a moment between tests } pts_module_manager::module_process('__interim_test_run', $test_run_request); } if (is_file($test_log_file)) { if ($is_expected_last_run) { // For now just passing the last test log file... // TODO XXX: clean this up with log files to preserve when needed, let multiple log files exist for extra_data, etc pts_test_result_parser::generate_extra_data($test_run_request, $test_log_file); } if ($backup_test_log_dir) { copy($test_log_file, $backup_test_log_dir . basename($test_log_file)); } if (pts_client::test_profile_debug_message('Log File At: ' . $test_log_file) == false) { unlink($test_log_file); } } if (is_file(PTS_USER_PATH . 'halt-testing') || is_file(PTS_USER_PATH . 'skip-test')) { pts_client::release_lock($lock_file); return false; } pts_client::$display->test_run_instance_complete($test_run_request); } $time_test_end_actual = time(); if ($cache_share_present == false) { $post_output = pts_tests::call_test_script($test_run_request->test_profile, 'post', 'Running Post-Test Script', $pts_test_arguments, $extra_runtime_variables, true); if ($post_output != null && (pts_client::is_debug_mode() || $full_output)) { pts_client::$display->test_run_instance_output($post_output); } if (is_file($test_directory . 'post-test-exit-status')) { // If the post script writes its exit status to ~/post-test-exit-status, if it's non-zero the test run failed $exit_status = pts_file_io::file_get_contents($test_directory . 'post-test-exit-status'); unlink($test_directory . 'post-test-exit-status'); if ($exit_status != 0) { self::test_run_instance_error($test_run_manager, $test_run_request, 'The post run script exited with a non-zero exit status.' . PHP_EOL); $abort_testing = true; } } } if ($abort_testing) { self::test_run_error($test_run_manager, $test_run_request, 'This test execution has been abandoned.'); return false; } // End $time_test_end = time(); $time_test_elapsed = $time_test_end - $time_test_start; $time_test_elapsed_actual = $time_test_end_actual - $time_test_start_actual; if (!empty($min_length)) { if ($min_length > $time_test_elapsed_actual) { // The test ended too quickly, results are not valid self::test_run_error($test_run_manager, $test_run_request, 'This test ended prematurely.'); return false; } } if (!empty($max_length)) { if ($max_length < $time_test_elapsed_actual) { // The test took too much time, results are not valid self::test_run_error($test_run_manager, $test_run_request, 'This test run was exhausted.'); return false; } } if ($allow_cache_share && !is_file($cache_share_pt2so) && $cache_share instanceof pts_storage_object) { $cache_share->save_to_file($cache_share_pt2so); unset($cache_share); } if ($test_run_manager->get_results_identifier() != null && pts_config::read_bool_config('PhoronixTestSuite/Options/Testing/SaveInstallationLogs', 'FALSE')) { if (is_file($test_run_request->test_profile->get_install_dir() . 'install.log')) { $backup_log_dir = PTS_SAVE_RESULTS_PATH . $test_run_manager->get_file_name() . '/installation-logs/' . $test_run_manager->get_results_identifier() . '/'; pts_file_io::mkdir($backup_log_dir, 0777, true); copy($test_run_request->test_profile->get_install_dir() . 'install.log', $backup_log_dir . basename($test_identifier) . '.log'); } } // Fill in missing test details if (empty($arguments_description)) { $arguments_description = $test_run_request->test_profile->get_test_subtitle(); } $file_var_checks = array(array('pts-results-scale', 'set_result_scale', null), array('pts-results-proportion', 'set_result_proportion', null), array('pts-results-quantifier', 'set_result_quantifier', null), array('pts-test-version', 'set_version', null), array('pts-test-description', null, 'set_used_arguments_description'), array('pts-footnote', null, null)); foreach ($file_var_checks as &$file_check) { list($file, $set_function, $result_set_function) = $file_check; if (is_file($test_directory . $file)) { $file_contents = pts_file_io::file_get_contents($test_directory . $file); unlink($test_directory . $file); if (!empty($file_contents)) { if ($set_function != null) { call_user_func(array($test_run_request->test_profile, $set_function), $file_contents); } else { if ($result_set_function != null) { if ($result_set_function == 'set_used_arguments_description') { $arguments_description = $file_contents; } else { call_user_func(array($test_run_request, $result_set_function), $file_contents); } } else { if ($file == 'pts-footnote') { $test_run_request->test_profile->test_installation->set_install_footnote($file_contents); } } } } } } if (empty($arguments_description)) { $arguments_description = 'Phoronix Test Suite v' . PTS_VERSION; } foreach (pts_client::environmental_variables() as $key => $value) { $arguments_description = str_replace('$' . $key, $value, $arguments_description); if (!in_array($key, array('VIDEO_MEMORY', 'NUM_CPU_CORES', 'NUM_CPU_JOBS'))) { $extra_arguments = str_replace('$' . $key, $value, $extra_arguments); } } // Any device notes to add to PTS test notes area? foreach (phodevi::read_device_notes($test_type) as $note) { pts_test_notes_manager::add_note($note); } // As of PTS 4.4, this is removed and superceded effectively by reporting the notes to table // Any special information (such as forced AA/AF levels for graphics) to add to the description string of the result? /* if(($special_string = phodevi::read_special_settings_string($test_type)) != null) { if(strpos($arguments_description, $special_string) === false) { if($arguments_description != null) { $arguments_description .= ' | '; } $arguments_description .= $special_string; } } */ // Result Calculation $test_run_request->set_used_arguments_description($arguments_description); $test_run_request->set_used_arguments($extra_arguments); pts_test_result_parser::calculate_end_result($test_run_request, $active_result_buffer); // Process results pts_client::$display->test_run_end($test_run_request); pts_client::$display->display_interrupt_message($test_run_request->test_profile->get_post_run_message()); pts_module_manager::module_process('__post_test_run', $test_run_request); $report_elapsed_time = $cache_share_present == false && $test_run_request->active->get_result() != 0; pts_tests::update_test_install_xml($test_run_request->test_profile, $report_elapsed_time ? $time_test_elapsed : 0); pts_storage_object::add_in_file(PTS_CORE_STORAGE, 'total_testing_time', $time_test_elapsed / 60); if ($report_elapsed_time && pts_client::do_anonymous_usage_reporting() && $time_test_elapsed >= 60) { // If anonymous usage reporting enabled, report test run-time to OpenBenchmarking.org pts_openbenchmarking_client::upload_usage_data('test_complete', array($test_run_request, $time_test_elapsed)); } // Remove lock pts_client::release_lock($lock_file); return $active_result_buffer; }
public function determine_tests_to_run(&$to_run_objects) { $unique_test_count = count(array_unique($to_run_objects)); $run_contains_a_no_result_type = false; $request_results_save = false; foreach ($to_run_objects as &$run_object) { // TODO: determine whether to print the titles of what's being run? if ($run_object instanceof pts_test_profile) { if ($run_object->get_identifier() == null || $run_object->get_title() == null || $this->validate_test_to_run($run_object) == false) { continue; } if ($run_contains_a_no_result_type == false && $run_object->get_display_format() == 'NO_RESULT') { $run_contains_a_no_result_type = true; } if ($request_results_save == false && $run_object->do_auto_save_results()) { $request_results_save = true; } foreach (self::test_prompts_to_result_objects($run_object) as $result_object) { $this->add_test_result_object($result_object); } } else { if ($run_object instanceof pts_test_suite) { $this->pre_run_message = $run_object->get_pre_run_message(); $this->post_run_message = $run_object->get_post_run_message(); if ($run_object->get_run_mode() == 'PCQS') { $this->is_pcqs = true; } foreach ($run_object->get_contained_test_result_objects() as $result_object) { $this->add_test_result_object($result_object); } } else { if ($run_object instanceof pts_virtual_test_queue) { foreach ($run_object->get_contained_test_result_objects() as $result_object) { $this->add_test_result_object($result_object); } } else { if ($run_object instanceof pts_result_file) { // Print the $to_run ? $this->run_description = $run_object->get_description(); $preset_vars = $run_object->get_preset_environment_variables(); $result_objects = $run_object->get_result_objects(); $this->set_save_name($run_object->get_identifier(), false); $this->file_name_title = $run_object->get_title(); pts_module_manager::process_environment_variables_string_to_set($preset_vars); foreach ($result_objects as &$result_object) { if ($result_object->test_profile->get_identifier() == null) { continue; } $test_result = new pts_test_result($result_object->test_profile); $test_result->set_used_arguments($result_object->get_arguments()); $test_result->set_used_arguments_description($result_object->get_arguments_description()); $this->add_test_result_object($test_result); } } else { if ($run_object instanceof pts_virtual_test_suite) { $virtual_suite_tests = $run_object->get_contained_test_profiles(); foreach (array_keys($virtual_suite_tests) as $i) { if ($virtual_suite_tests[$i]->is_supported(false) == false || $this->validate_test_to_run($virtual_suite_tests[$i]) == false) { unset($virtual_suite_tests[$i]); } } sort($virtual_suite_tests); if (count($virtual_suite_tests) > 1) { array_push($virtual_suite_tests, 'All Tests In Suite'); } if (!$this->auto_mode && !$this->batch_mode) { $run_index = explode(',', pts_user_io::prompt_text_menu('Select the tests in the virtual suite to run', $virtual_suite_tests, true, true)); } else { $run_index = -1; } if (count($virtual_suite_tests) > 2 && in_array(count($virtual_suite_tests) - 1, $run_index) || $run_index == -1) { // The appended 'All Tests In Suite' was selected, so run all } else { foreach (array_keys($virtual_suite_tests) as $i) { if (!in_array($i, $run_index)) { unset($virtual_suite_tests[$i]); } } } foreach ($virtual_suite_tests as &$test_profile) { if ($test_profile instanceof pts_test_profile) { // The user is to configure virtual suites manually foreach (self::test_prompts_to_result_objects($test_profile) as $result_object) { $this->add_test_result_object($result_object); } } } } else { trigger_error($run_object . ' is not recognized.', E_USER_ERROR); continue; } } } } } } // AlwaysUploadResultsToOpenBenchmarking AutoSortRunQueue if (pts_config::read_bool_config('PhoronixTestSuite/Options/Testing/AutoSortRunQueue', 'TRUE') && $this->force_save_results == false) { // Not that it matters much, but if $this->force_save_results is set that means likely running from a result file... // so if running a result file, don't change the ordering of the existing results // Sort the run order so that all tests that are similar are grouped together, etc usort($this->tests_to_run, array('pts_test_run_manager', 'cmp_result_object_sort')); } if (pts_client::read_env('RUN_TESTS_IN_RANDOM_ORDER')) { shuffle($this->tests_to_run); } $this->prompt_save_results = $run_contains_a_no_result_type == false || $unique_test_count > 1; $this->force_save_results = $this->force_save_results || $request_results_save; }