public static function run_test(&$test_run_manager, &$test_run_request) { $test_identifier = $test_run_request->test_profile->get_identifier(); $extra_arguments = $test_run_request->get_arguments(); $arguments_description = $test_run_request->get_arguments_description(); $full_output = pts_config::read_bool_config('PhoronixTestSuite/Options/General/FullOutput', 'FALSE'); // Do the actual test running process $test_directory = $test_run_request->test_profile->get_install_dir(); if (!is_dir($test_directory)) { return false; } $lock_file = $test_directory . 'run_lock'; if (pts_client::create_lock($lock_file) == false && $test_run_manager->is_multi_test_stress_run() == false) { self::test_run_error($test_run_manager, $test_run_request, 'The ' . $test_identifier . ' test is already running.'); return false; } $active_result_buffer = new pts_test_result_buffer_active(); $test_run_request->active =& $active_result_buffer; $execute_binary = $test_run_request->test_profile->get_test_executable(); $times_to_run = $test_run_request->test_profile->get_times_to_run(); $ignore_runs = $test_run_request->test_profile->get_runs_to_ignore(); $test_type = $test_run_request->test_profile->get_test_hardware_type(); $allow_cache_share = $test_run_request->test_profile->allow_cache_share(); $min_length = $test_run_request->test_profile->get_min_length(); $max_length = $test_run_request->test_profile->get_max_length(); if ($test_run_request->test_profile->get_environment_testing_size() > 1 && ceil(disk_free_space($test_directory) / 1048576) < $test_run_request->test_profile->get_environment_testing_size()) { // Ensure enough space is available on disk during testing process self::test_run_error($test_run_manager, $test_run_request, 'There is not enough space (at ' . $test_directory . ') for this test to run.'); pts_client::release_lock($lock_file); return false; } $to_execute = $test_run_request->test_profile->get_test_executable_dir(); $pts_test_arguments = trim($test_run_request->test_profile->get_default_arguments() . ' ' . str_replace($test_run_request->test_profile->get_default_arguments(), '', $extra_arguments) . ' ' . $test_run_request->test_profile->get_default_post_arguments()); $extra_runtime_variables = pts_tests::extra_environmental_variables($test_run_request->test_profile); // Start $cache_share_pt2so = $test_directory . 'cache-share-' . PTS_INIT_TIME . '.pt2so'; $cache_share_present = $allow_cache_share && is_file($cache_share_pt2so); $test_run_request->set_used_arguments_description($arguments_description); pts_module_manager::module_process('__pre_test_run', $test_run_request); $time_test_start = time(); pts_client::$display->test_run_start($test_run_manager, $test_run_request); if (!$cache_share_present) { $pre_output = pts_tests::call_test_script($test_run_request->test_profile, 'pre', 'Running Pre-Test Script', $pts_test_arguments, $extra_runtime_variables, true); if ($pre_output != null && (pts_client::is_debug_mode() || $full_output)) { pts_client::$display->test_run_instance_output($pre_output); } if (is_file($test_directory . 'pre-test-exit-status')) { // If the pre script writes its exit status to ~/pre-test-exit-status, if it's non-zero the test run failed $exit_status = pts_file_io::file_get_contents($test_directory . 'pre-test-exit-status'); unlink($test_directory . 'pre-test-exit-status'); if ($exit_status != 0) { self::test_run_instance_error($test_run_manager, $test_run_request, 'The pre run script exited with a non-zero exit status.' . PHP_EOL); self::test_run_error($test_run_manager, $test_run_request, 'This test execution has been abandoned.'); return false; } } } pts_client::$display->display_interrupt_message($test_run_request->test_profile->get_pre_run_message()); $runtime_identifier = time(); $execute_binary_prepend = ''; if ($test_run_request->exec_binary_prepend != null) { $execute_binary_prepend = $test_run_request->exec_binary_prepend; } if (!$cache_share_present && $test_run_request->test_profile->is_root_required()) { if (phodevi::is_root() == false) { pts_client::$display->test_run_error('This test must be run as the root / administrator account.'); } $execute_binary_prepend .= ' ' . PTS_CORE_STATIC_PATH . 'root-access.sh '; } if ($allow_cache_share && !is_file($cache_share_pt2so)) { $cache_share = new pts_storage_object(false, false); } if ($test_run_manager->get_results_identifier() != null && $test_run_manager->get_file_name() != null && pts_config::read_bool_config('PhoronixTestSuite/Options/Testing/SaveTestLogs', 'FALSE')) { $backup_test_log_dir = PTS_SAVE_RESULTS_PATH . $test_run_manager->get_file_name() . '/test-logs/active/' . $test_run_manager->get_results_identifier() . '/'; pts_file_io::delete($backup_test_log_dir); pts_file_io::mkdir($backup_test_log_dir, 0777, true); } else { $backup_test_log_dir = false; } for ($i = 0, $abort_testing = false, $time_test_start_actual = time(), $defined_times_to_run = $times_to_run; $i < $times_to_run && $i < 256 && !$abort_testing; $i++) { pts_client::$display->test_run_instance_header($test_run_request); $test_log_file = $test_directory . basename($test_identifier) . '-' . $runtime_identifier . '-' . ($i + 1) . '.log'; $is_expected_last_run = $i == $times_to_run - 1; $test_extra_runtime_variables = array_merge($extra_runtime_variables, array('LOG_FILE' => $test_log_file, 'DISPLAY' => getenv('DISPLAY'), 'PATH' => getenv('PATH'))); $restored_from_cache = false; if ($cache_share_present) { $cache_share = pts_storage_object::recover_from_file($cache_share_pt2so); if ($cache_share) { $test_result = $cache_share->read_object('test_results_output_' . $i); $test_extra_runtime_variables['LOG_FILE'] = $cache_share->read_object('log_file_location_' . $i); if ($test_extra_runtime_variables['LOG_FILE'] != null) { file_put_contents($test_extra_runtime_variables['LOG_FILE'], $cache_share->read_object('log_file_' . $i)); $test_run_time = 0; // This wouldn't be used for a cache share since it would always be the same, but declare the value so the variable is at least initialized $restored_from_cache = true; } } unset($cache_share); } if ($restored_from_cache == false) { $test_run_command = 'cd ' . $to_execute . ' && ' . $execute_binary_prepend . './' . $execute_binary . ' ' . $pts_test_arguments . ' 2>&1'; pts_client::test_profile_debug_message('Test Run Command: ' . $test_run_command); $is_monitoring = pts_test_result_parser::system_monitor_task_check($test_run_request->test_profile); $test_run_time_start = time(); if (phodevi::is_windows() || pts_client::read_env('USE_PHOROSCRIPT_INTERPRETER') != false) { $phoroscript = new pts_phoroscript_interpreter($to_execute . '/' . $execute_binary, $test_extra_runtime_variables, $to_execute); $phoroscript->execute_script($pts_test_arguments); $test_result = null; } else { //$test_result = pts_client::shell_exec($test_run_command, $test_extra_runtime_variables); $descriptorspec = array(0 => array('pipe', 'r'), 1 => array('pipe', 'w'), 2 => array('pipe', 'w')); $test_process = proc_open('exec ' . $execute_binary_prepend . './' . $execute_binary . ' ' . $pts_test_arguments . ' 2>&1', $descriptorspec, $pipes, $to_execute, array_merge($_ENV, pts_client::environmental_variables(), $test_extra_runtime_variables)); if (is_resource($test_process)) { //echo proc_get_status($test_process)['pid']; pts_module_manager::module_process('__test_running', $test_process); $test_result = stream_get_contents($pipes[1]); fclose($pipes[1]); fclose($pipes[2]); $return_value = proc_close($test_process); } } $test_run_time = time() - $test_run_time_start; $monitor_result = $is_monitoring ? pts_test_result_parser::system_monitor_task_post_test($test_run_request->test_profile) : 0; } if (!isset($test_result[10240]) || pts_client::is_debug_mode() || $full_output) { pts_client::$display->test_run_instance_output($test_result); } if (is_file($test_log_file) && trim($test_result) == null && (filesize($test_log_file) < 10240 || pts_client::is_debug_mode() || $full_output)) { $test_log_file_contents = file_get_contents($test_log_file); pts_client::$display->test_run_instance_output($test_log_file_contents); unset($test_log_file_contents); } $test_run_request->test_result_standard_output = $test_result; $exit_status_pass = true; if (is_file($test_directory . 'test-exit-status')) { // If the test script writes its exit status to ~/test-exit-status, if it's non-zero the test run failed $exit_status = pts_file_io::file_get_contents($test_directory . 'test-exit-status'); unlink($test_directory . 'test-exit-status'); if ($exit_status != 0) { self::test_run_instance_error($test_run_manager, $test_run_request, 'The test exited with a non-zero exit status.'); if ($is_expected_last_run && is_file($test_log_file)) { $scan_log = pts_file_io::file_get_contents($test_log_file); $test_run_error = pts_tests::scan_for_error($scan_log, $test_run_request->test_profile->get_test_executable_dir()); if ($test_run_error) { self::test_run_instance_error($test_run_manager, $test_run_request, 'E: ' . $test_run_error); } } $exit_status_pass = false; } } if (!in_array($i + 1, $ignore_runs) && $exit_status_pass) { if (isset($monitor_result) && $monitor_result != 0) { $test_run_request->active->active_result = $monitor_result; } else { pts_test_result_parser::parse_result($test_run_request, $test_extra_runtime_variables['LOG_FILE']); } pts_client::test_profile_debug_message('Test Result Value: ' . $test_run_request->active->active_result); if (!empty($test_run_request->active->active_result)) { if ($test_run_time < 2 && intval($test_run_request->active->active_result) == $test_run_request->active->active_result && $test_run_request->test_profile->get_estimated_run_time() > 60 && !$restored_from_cache) { // If the test ended in less than two seconds, outputted some int, and normally the test takes much longer, then it's likely some invalid run self::test_run_instance_error($test_run_manager, $test_run_request, 'The test run ended prematurely.'); if ($is_expected_last_run && is_file($test_log_file)) { $scan_log = pts_file_io::file_get_contents($test_log_file); $test_run_error = pts_tests::scan_for_error($scan_log, $test_run_request->test_profile->get_test_executable_dir()); if ($test_run_error) { self::test_run_instance_error($test_run_manager, $test_run_request, 'E: ' . $test_run_error); } } } else { // TODO integrate active_result into active buffer $active_result_buffer->add_trial_run_result($test_run_request->active->active_result, $test_run_request->active->active_min_result, $test_run_request->active->active_max_result); } } else { if ($test_run_request->test_profile->get_display_format() != 'NO_RESULT') { self::test_run_instance_error($test_run_manager, $test_run_request, 'The test run did not produce a result.'); if ($is_expected_last_run && is_file($test_log_file)) { $scan_log = pts_file_io::file_get_contents($test_log_file); $test_run_error = pts_tests::scan_for_error($scan_log, $test_run_request->test_profile->get_test_executable_dir()); if ($test_run_error) { self::test_run_instance_error($test_run_manager, $test_run_request, 'E: ' . $test_run_error); } } } } if ($allow_cache_share && !is_file($cache_share_pt2so)) { $cache_share->add_object('test_results_output_' . $i, $test_run_request->active->active_result); $cache_share->add_object('log_file_location_' . $i, $test_extra_runtime_variables['LOG_FILE']); $cache_share->add_object('log_file_' . $i, is_file($test_log_file) ? file_get_contents($test_log_file) : null); } } if ($is_expected_last_run && $active_result_buffer->get_trial_run_count() > floor(($i - 2) / 2) && !$cache_share_present && $test_run_manager->do_dynamic_run_count()) { // The later check above ensures if the test is failing often the run count won't uselessly be increasing // Should we increase the run count? $increase_run_count = false; if ($defined_times_to_run == $i + 1 && $active_result_buffer->get_trial_run_count() > 0 && $active_result_buffer->get_trial_run_count() < $defined_times_to_run && $i < 64) { // At least one run passed, but at least one run failed to produce a result. Increase count to try to get more successful runs $increase_run_count = $defined_times_to_run - $active_result_buffer->get_trial_run_count(); } else { if ($active_result_buffer->get_trial_run_count() >= 2) { // Dynamically increase run count if needed for statistical significance or other reasons $increase_run_count = $test_run_manager->increase_run_count_check($active_result_buffer, $defined_times_to_run, $test_run_time); if ($increase_run_count === -1) { $abort_testing = true; } else { if ($increase_run_count == true) { // Just increase the run count one at a time $increase_run_count = 1; } } } } if ($increase_run_count > 0) { $times_to_run += $increase_run_count; $is_expected_last_run = false; //$test_run_request->test_profile->set_times_to_run($times_to_run); } } if ($times_to_run > 1 && $i < $times_to_run - 1) { if ($cache_share_present == false) { $interim_output = pts_tests::call_test_script($test_run_request->test_profile, 'interim', 'Running Interim Test Script', $pts_test_arguments, $extra_runtime_variables, true); if ($interim_output != null && (pts_client::is_debug_mode() || $full_output)) { pts_client::$display->test_run_instance_output($interim_output); } //sleep(2); // Rest for a moment between tests } pts_module_manager::module_process('__interim_test_run', $test_run_request); } if (is_file($test_log_file)) { if ($is_expected_last_run) { // For now just passing the last test log file... // TODO XXX: clean this up with log files to preserve when needed, let multiple log files exist for extra_data, etc pts_test_result_parser::generate_extra_data($test_run_request, $test_log_file); } if ($backup_test_log_dir) { copy($test_log_file, $backup_test_log_dir . basename($test_log_file)); } if (pts_client::test_profile_debug_message('Log File At: ' . $test_log_file) == false) { unlink($test_log_file); } } if (is_file(PTS_USER_PATH . 'halt-testing') || is_file(PTS_USER_PATH . 'skip-test')) { pts_client::release_lock($lock_file); return false; } pts_client::$display->test_run_instance_complete($test_run_request); } $time_test_end_actual = time(); if ($cache_share_present == false) { $post_output = pts_tests::call_test_script($test_run_request->test_profile, 'post', 'Running Post-Test Script', $pts_test_arguments, $extra_runtime_variables, true); if ($post_output != null && (pts_client::is_debug_mode() || $full_output)) { pts_client::$display->test_run_instance_output($post_output); } if (is_file($test_directory . 'post-test-exit-status')) { // If the post script writes its exit status to ~/post-test-exit-status, if it's non-zero the test run failed $exit_status = pts_file_io::file_get_contents($test_directory . 'post-test-exit-status'); unlink($test_directory . 'post-test-exit-status'); if ($exit_status != 0) { self::test_run_instance_error($test_run_manager, $test_run_request, 'The post run script exited with a non-zero exit status.' . PHP_EOL); $abort_testing = true; } } } if ($abort_testing) { self::test_run_error($test_run_manager, $test_run_request, 'This test execution has been abandoned.'); return false; } // End $time_test_end = time(); $time_test_elapsed = $time_test_end - $time_test_start; $time_test_elapsed_actual = $time_test_end_actual - $time_test_start_actual; if (!empty($min_length)) { if ($min_length > $time_test_elapsed_actual) { // The test ended too quickly, results are not valid self::test_run_error($test_run_manager, $test_run_request, 'This test ended prematurely.'); return false; } } if (!empty($max_length)) { if ($max_length < $time_test_elapsed_actual) { // The test took too much time, results are not valid self::test_run_error($test_run_manager, $test_run_request, 'This test run was exhausted.'); return false; } } if ($allow_cache_share && !is_file($cache_share_pt2so) && $cache_share instanceof pts_storage_object) { $cache_share->save_to_file($cache_share_pt2so); unset($cache_share); } if ($test_run_manager->get_results_identifier() != null && pts_config::read_bool_config('PhoronixTestSuite/Options/Testing/SaveInstallationLogs', 'FALSE')) { if (is_file($test_run_request->test_profile->get_install_dir() . 'install.log')) { $backup_log_dir = PTS_SAVE_RESULTS_PATH . $test_run_manager->get_file_name() . '/installation-logs/' . $test_run_manager->get_results_identifier() . '/'; pts_file_io::mkdir($backup_log_dir, 0777, true); copy($test_run_request->test_profile->get_install_dir() . 'install.log', $backup_log_dir . basename($test_identifier) . '.log'); } } // Fill in missing test details if (empty($arguments_description)) { $arguments_description = $test_run_request->test_profile->get_test_subtitle(); } $file_var_checks = array(array('pts-results-scale', 'set_result_scale', null), array('pts-results-proportion', 'set_result_proportion', null), array('pts-results-quantifier', 'set_result_quantifier', null), array('pts-test-version', 'set_version', null), array('pts-test-description', null, 'set_used_arguments_description'), array('pts-footnote', null, null)); foreach ($file_var_checks as &$file_check) { list($file, $set_function, $result_set_function) = $file_check; if (is_file($test_directory . $file)) { $file_contents = pts_file_io::file_get_contents($test_directory . $file); unlink($test_directory . $file); if (!empty($file_contents)) { if ($set_function != null) { call_user_func(array($test_run_request->test_profile, $set_function), $file_contents); } else { if ($result_set_function != null) { if ($result_set_function == 'set_used_arguments_description') { $arguments_description = $file_contents; } else { call_user_func(array($test_run_request, $result_set_function), $file_contents); } } else { if ($file == 'pts-footnote') { $test_run_request->test_profile->test_installation->set_install_footnote($file_contents); } } } } } } if (empty($arguments_description)) { $arguments_description = 'Phoronix Test Suite v' . PTS_VERSION; } foreach (pts_client::environmental_variables() as $key => $value) { $arguments_description = str_replace('$' . $key, $value, $arguments_description); if (!in_array($key, array('VIDEO_MEMORY', 'NUM_CPU_CORES', 'NUM_CPU_JOBS'))) { $extra_arguments = str_replace('$' . $key, $value, $extra_arguments); } } // Any device notes to add to PTS test notes area? foreach (phodevi::read_device_notes($test_type) as $note) { pts_test_notes_manager::add_note($note); } // As of PTS 4.4, this is removed and superceded effectively by reporting the notes to table // Any special information (such as forced AA/AF levels for graphics) to add to the description string of the result? /* if(($special_string = phodevi::read_special_settings_string($test_type)) != null) { if(strpos($arguments_description, $special_string) === false) { if($arguments_description != null) { $arguments_description .= ' | '; } $arguments_description .= $special_string; } } */ // Result Calculation $test_run_request->set_used_arguments_description($arguments_description); $test_run_request->set_used_arguments($extra_arguments); pts_test_result_parser::calculate_end_result($test_run_request, $active_result_buffer); // Process results pts_client::$display->test_run_end($test_run_request); pts_client::$display->display_interrupt_message($test_run_request->test_profile->get_post_run_message()); pts_module_manager::module_process('__post_test_run', $test_run_request); $report_elapsed_time = $cache_share_present == false && $test_run_request->active->get_result() != 0; pts_tests::update_test_install_xml($test_run_request->test_profile, $report_elapsed_time ? $time_test_elapsed : 0); pts_storage_object::add_in_file(PTS_CORE_STORAGE, 'total_testing_time', $time_test_elapsed / 60); if ($report_elapsed_time && pts_client::do_anonymous_usage_reporting() && $time_test_elapsed >= 60) { // If anonymous usage reporting enabled, report test run-time to OpenBenchmarking.org pts_openbenchmarking_client::upload_usage_data('test_complete', array($test_run_request, $time_test_elapsed)); } // Remove lock pts_client::release_lock($lock_file); return $active_result_buffer; }
public static function install_dependencies(&$test_profiles) { // PTS External Dependencies install on distribution if (phodevi::is_windows() || phodevi::is_macosx() || pts_flags::no_external_dependencies()) { // Windows doesn't use any external dependencies return true; } // Find all the tests that need to be checked $tests_to_check = array(); foreach ($test_profiles as $test_profile) { if (!in_array($test_profile, $tests_to_check) && $test_profile->is_supported()) { array_push($tests_to_check, $test_profile); } } // Find all of the POSSIBLE test dependencies $required_test_dependencies = array(); foreach ($tests_to_check as &$test_profile) { foreach ($test_profile->get_dependencies() as $test_dependency) { if (empty($test_dependency)) { continue; } if (isset($required_test_dependencies[$test_dependency]) == false) { $required_test_dependencies[$test_dependency] = array(); } array_push($required_test_dependencies[$test_dependency], $test_profile); } } if (pts_c::$test_flags & pts_c::skip_tests_with_missing_dependencies) { // Remove tests that have external dependencies that aren't satisfied and then return $generic_packages_needed = array(); $required_test_dependencies_copy = $required_test_dependencies; $dependencies_to_install = self::check_dependencies_missing_from_system($required_test_dependencies_copy, $generic_packages_needed); self::remove_tests_with_missing_dependencies($test_profiles, $generic_packages_needed, $required_test_dependencies); return true; } if (!empty($required_test_dependencies)) { // The 'common-dependencies' package is any general non-explicitly-required but nice-to-have packages like mesa-utils for providing glxinfo about the system // So if we're going to be installing external dependencies anyways, might as well try to see the common-dependencies are satisfied $required_test_dependencies['common-dependencies'] = array(); } // Does the user wish to skip any particular dependencies? if (pts_client::read_env('SKIP_EXTERNAL_DEPENDENCIES')) { $dependencies_to_skip = explode(',', pts_client::read_env('SKIP_EXTERNAL_DEPENDENCIES')); foreach ($dependencies_to_skip as $dependency_name) { if (isset($required_test_dependencies[$dependency_name])) { unset($required_test_dependencies[$dependency_name]); } } } // Make a copy for use to check at end of process to see if all dependencies were actually found $required_test_dependencies_copy = $required_test_dependencies; // Find the dependencies that are actually missing from the system $dependencies_to_install = self::check_dependencies_missing_from_system($required_test_dependencies); // If it's automated and can't install without root, return true if there are no dependencies to do otherwise false if (pts_c::$test_flags & pts_c::auto_mode && phodevi::is_root() == false) { return count($dependencies_to_install) == 0; } // Do the actual dependency install process if (count($dependencies_to_install) > 0) { self::install_packages_on_system($dependencies_to_install); } // There were some dependencies not supported on this OS or are missing from the distro's XML file if (count($required_test_dependencies) > 0 && count($dependencies_to_install) == 0) { $exdep_generic_parser = new pts_exdep_generic_parser(); $to_report = array(); foreach (array_keys($required_test_dependencies) as $dependency) { $dependency_data = $exdep_generic_parser->get_package_data($dependency); if ($dependency_data['possible_packages'] != null) { array_push($to_report, $dependency_data['title'] . PHP_EOL . 'Possible Package Names: ' . $dependency_data['possible_packages']); } } if (count($to_report) > 0) { echo PHP_EOL . 'Some additional dependencies are required, but they could not be installed automatically for your operating system.' . PHP_EOL . 'Below are the software packages that must be installed.' . PHP_EOL . PHP_EOL; foreach ($to_report as $report) { pts_client::$display->generic_heading($report); } if (pts_c::$test_flags ^ pts_c::batch_mode && pts_c::$test_flags ^ pts_c::auto_mode) { echo 'The above dependencies should be installed before proceeding. Press any key when you\'re ready to continue.'; pts_user_io::read_user_input(); echo PHP_EOL; } } } // Find the dependencies that are still missing from the system if (pts_c::$test_flags ^ pts_c::batch_mode && pts_c::$test_flags ^ pts_c::auto_mode && !defined('PHOROMATIC_PROCESS')) { $generic_packages_needed = array(); $required_test_dependencies = $required_test_dependencies_copy; $dependencies_to_install = self::check_dependencies_missing_from_system($required_test_dependencies_copy, $generic_packages_needed); if (count($generic_packages_needed) > 0) { echo PHP_EOL . 'There are dependencies still missing from the system:' . PHP_EOL; echo pts_user_io::display_text_list(self::generic_names_to_titles($generic_packages_needed)); $actions = array('IGNORE' => 'Ignore missing dependencies and proceed with installation.', 'SKIP_TESTS_WITH_MISSING_DEPS' => 'Skip installing the tests with missing dependencies.', 'REATTEMPT_DEP_INSTALL' => 'Re-attempt to install the missing dependencies.', 'QUIT' => 'Quit the current Phoronix Test Suite process.'); $selected_action = pts_user_io::prompt_text_menu('Missing dependencies action', $actions, false, true); switch ($selected_action) { case 'IGNORE': break; case 'SKIP_TESTS_WITH_MISSING_DEPS': // Unset the tests that have dependencies still missing self::remove_tests_with_missing_dependencies($test_profiles, $generic_packages_needed, $required_test_dependencies); break; case 'REATTEMPT_DEP_INSTALL': self::install_packages_on_system($dependencies_to_install); break; case 'QUIT': exit(0); } } } return true; }
public static function test_profile_system_compatibility_check(&$test_profile, $report_errors = false) { $valid_test_profile = true; $test_type = $test_profile->get_test_hardware_type(); $skip_tests = pts_client::read_env('SKIP_TESTS') ? pts_strings::comma_explode(pts_client::read_env('SKIP_TESTS')) : false; $skip_test_subsystems = pts_client::read_env('SKIP_TESTING_SUBSYSTEMS') ? pts_strings::comma_explode(strtolower(pts_client::read_env('SKIP_TESTING_SUBSYSTEMS'))) : false; $display_driver = phodevi::read_property('system', 'display-driver'); $gpu = phodevi::read_name('gpu'); if ($test_profile->is_supported(false) == false) { $valid_test_profile = false; } else { if ($test_type == 'Graphics' && pts_client::read_env('DISPLAY') == false && pts_client::read_env('WAYLAND_DISPLAY') == false && phodevi::is_windows() == false && phodevi::is_macosx() == false) { $report_errors && pts_client::$display->test_run_error('No display server was found, cannot run ' . $test_profile); $valid_test_profile = false; } else { if ($test_type == 'Graphics' && in_array($display_driver, array('vesa', 'nv', 'cirrus')) && stripos($gpu, 'LLVM') === false) { // These display drivers end up being in known configurations without 3D hardware support so unless an LLVM-based string is reported as the GPU, don't advertise 3D tests $report_errors && pts_client::$display->test_run_error('3D acceleration support not available, cannot run ' . $test_profile); $valid_test_profile = false; } else { if ($test_type == 'Disk' && stripos(phodevi::read_property('system', 'filesystem'), 'SquashFS') !== false) { $report_errors && pts_client::$display->test_run_error('Running on a RAM-based live file-system, cannot run ' . $test_profile); $valid_test_profile = false; } else { if (pts_client::read_env('NO_' . strtoupper($test_type) . '_TESTS') || $skip_tests && (in_array($test_profile, $skip_tests) || in_array($test_type, $skip_tests) || in_array($test_profile->get_identifier(false), $skip_tests) || in_array($test_profile->get_identifier_base_name(), $skip_tests))) { $report_errors && pts_client::$display->test_run_error('Due to a pre-set environmental variable, skipping ' . $test_profile); $valid_test_profile = false; } else { if ($skip_test_subsystems && in_array(strtolower($test_profile->get_test_hardware_type()), $skip_test_subsystems)) { $report_errors && pts_client::$display->test_run_error('Due to a pre-set environmental variable, skipping ' . $test_profile); $valid_test_profile = false; } else { if ($test_profile->is_root_required() && $this->batch_mode && phodevi::is_root() == false) { $report_errors && pts_client::$display->test_run_error('Cannot run ' . $test_profile . ' in batch mode as root access is required.'); $valid_test_profile = false; } } } } } } } if ($valid_test_profile == false && pts_client::read_env('SKIP_ALL_TEST_SUPPORT_CHECKS')) { $report_errors && pts_client::$display->test_run_error('SKIP_ALL_TEST_SUPPORT_CHECKS is set for ' . $test_profile . '.'); $valid_test_profile = true; } return $valid_test_profile; }