Best Phoronix-test-suite code snippet using pts_env.read
pts_test_run_manager.php
Source:pts_test_run_manager.php
...54 public $test_run_success_counter = 0;55 public $remove_tests_on_completion = false;56 public function __construct($batch_mode = false, $auto_mode = false)57 {58 $this->do_dynamic_run_count = pts_config::read_bool_config('PhoronixTestSuite/Options/TestResultValidation/DynamicRunCount', 'TRUE') && pts_env::read('FORCE_TIMES_TO_RUN') == false;59 $this->dynamic_run_count_on_length_or_less = 60; //pts_config::read_user_config('PhoronixTestSuite/Options/TestResultValidation/LimitIncreasingRunCountForTestsOverLength', 60);60 $this->dynamic_run_count_std_deviation_threshold = pts_config::read_user_config('PhoronixTestSuite/Options/TestResultValidation/StandardDeviationThreshold', 3.0);61 $this->dynamic_run_count_export_script = pts_config::read_user_config('PhoronixTestSuite/Options/TestResultValidation/ExportResultsTo', null);62 $this->drop_noisy_results = pts_config::read_bool_config('PhoronixTestSuite/Options/TestResultValidation/DropNoisyResults', 'FALSE');63 $this->result_file = new pts_result_file(null);64 if($batch_mode)65 {66 $this->set_batch_mode($batch_mode);67 }68 // 1/true is normal auto mode, 2 = auto + default benchmark mode69 $this->auto_mode = $auto_mode;70 $this->benchmark_log = new pts_logger(null, 'phoronix-test-suite-benchmark.log');71 $this->test_run_success_counter = 0;72 $this->remove_tests_on_completion = pts_config::read_bool_config('PhoronixTestSuite/Options/Testing/RemoveTestInstallOnCompletion', 'FALSE') || pts_env::read('REMOVE_TESTS_ON_COMPLETION');73 pts_module_manager::module_process('__run_manager_setup', $this);74 }75 public function is_interactive_mode()76 {77 return $this->auto_mode == false && $this->batch_mode == false;78 }79 public function do_dynamic_run_count()80 {81 return $this->do_dynamic_run_count;82 }83 public function allow_test_cache_share()84 {85 return $this->allow_test_cache_share;86 }87 public function disable_dynamic_run_count()88 {89 $this->do_dynamic_run_count = false;90 }91 public function auto_upload_to_openbenchmarking($do = true)92 {93 $this->auto_upload_to_openbenchmarking = ($do == true);94 }95 public function do_skip_post_execution_options()96 {97 $this->skip_post_execution_options = true;98 }99 public function increase_run_count_check(&$test_run_request, &$active_result_buffer, $scheduled_times_to_run, $time_test_started = 0)100 {101 // returning false here will not yield extra test run, returning true will yield additional test run, returning -1 will abort/not-save current test result102 if(max($active_result_buffer->results) > 100)103 {104 // On some tests due to handling bugs, "1" will get print as a result when clearly shouldn't be... This check seeks to clear those out and then increase the run count105 // TODO XXX this could potentially be expanded in the future where if given result is 1000x difference from other results, just outright clear?106 $was_hit = false;107 foreach($active_result_buffer->results as $i => $r)108 {109 if($r === 1)110 {111 // Clear out the result112 unset($active_result_buffer->results[$i]);113 }114 }115 if($was_hit)116 {117 return true;118 }119 }120 if($time_test_started && ($min_duration = pts_env::read('FORCE_MIN_DURATION_PER_TEST')) != false && is_numeric($min_duration) && is_numeric($min_duration) > 0)121 {122 // FORCE_MIN_DURATION_PER_TEST if wanting to force a test to run at least for a given amount of time (minutes)123 $time_test_elapsed_so_far = microtime(true) - $time_test_started;124 if($time_test_elapsed_so_far < ($min_duration * 60))125 {126 return true;127 }128 }129 // Compute average time taking per test run (in seconds)130 $avg_test_run_time = pts_math::arithmetic_mean($test_run_request->test_run_times);131 // First make sure this test doesn't take too long to run where we don't want dynamic handling132 if(floor($avg_test_run_time / 60) > $this->dynamic_run_count_on_length_or_less)133 {134 // The default value to not deal with dynamic run counts is when greater than 1 hour (60 minutes)135 // For tests taking an enormous amount of time, by default don't increase run count...136 return false;137 }138 // Determine if results are statistically significant, otherwise up the run count139 $std_dev = pts_math::percent_standard_deviation($active_result_buffer->results);140 if($std_dev >= $this->dynamic_run_count_std_deviation_threshold || pts_math::values_outside_three_sigma_limits($active_result_buffer->results))141 {142 static $test_run_pos; // keeping track of run index for what test in the run queue we are at143 static $run_std_devs; // an array of standard deviations up to this point for the current test144 $times_already_ran = count($active_result_buffer->results); // times test has ran so far145 if($this->test_run_pos != $test_run_pos)146 {147 // We're now onto a new test so clear out the array148 $test_run_pos = $this->test_run_pos;149 $run_std_devs = array();150 }151 $run_std_devs[$times_already_ran] = $std_dev;152 if($avg_test_run_time < 120)153 {154 // If test run time is 2 minutes or less, safely use a 4x multiple for how many times to run for statistical accuracy...155 $maximum_times_to_run = $scheduled_times_to_run * 4;156 }157 else if($avg_test_run_time < 240)158 {159 // If test run time is 4 minutes or less, safely use a 3x multiple for how many times to run for statistical accuracy...160 $maximum_times_to_run = $scheduled_times_to_run * 3;161 }162 else163 {164 // For longer running tests, just consider going up to 2x original expected run count165 $maximum_times_to_run = $scheduled_times_to_run * 2;166 }167 // If we haven't reached scheduled times to run x 2, increase count straight away168 if($times_already_ran < $maximum_times_to_run)169 {170 return true;171 }172 else if($times_already_ran < ($maximum_times_to_run + $scheduled_times_to_run))173 {174 // More aggressive determination whether to still keep increasing the run count beyond the expected maximum...175 $first_and_last_diff_in_deviation = abs(pts_arrays::first_element($run_std_devs) - pts_arrays::last_element($run_std_devs));176 // Increasing the run count at least if it looks to be helping...177 if($first_and_last_diff_in_deviation < (pts_arrays::first_element($run_std_devs) / 2))178 {179 // If we are at least making progress in the right direction, increase the run count some more180 return true;181 }182 // could add more checks and take better advantage of the array of data to better determine if it's still worth increasing183 }184 }185 // Check to see if there is an external/custom script to export the results to in determining whether results are valid186 if(($ex_file = $this->dynamic_run_count_export_script) != null && is_executable($ex_file) || is_executable(($ex_file = PTS_USER_PATH . $this->dynamic_run_count_export_script)))187 {188 $exit_status = trim(shell_exec($ex_file . ' ' . $active_result_buffer->get_values_as_string() . ' > /dev/null 2>&1; echo $?'));189 switch($exit_status)190 {191 case 1:192 // Run the test again193 return true;194 case 2:195 // Results are bad, abandon testing and do not record results196 return -1;197 case 0:198 default:199 // Return was 0 or something else, results are valid, or was some other exit status200 break;201 }202 }203 // see if we should be dropping noisy results204 if($this->drop_noisy_results && $std_dev > 25.0)205 {206 // drop the result from being saved as the noise level exceeds threshold207 return -1;208 }209 // No reason to increase the run count with none of the previous checks requesting otherwise210 return false;211 }212 protected function add_test_result_object(&$test_result)213 {214 $hash = $test_result->get_comparison_hash(true, false);215 if(!isset($this->hashes_of_tests_to_run[$hash]))216 {217 if($test_result->test_profile->test_installation == false || $test_result->test_profile->test_installation->is_installed() == false)218 {219 // Test is not installed, see if should upgrade to new minor release220 $tp = pts_openbenchmarking_client::test_profile_newer_minor_version_available($test_result->test_profile);221 $tests_missing = array();222 if($tp && $this->cleanup_test_profile_valid($tp, $tests_missing, false))223 {224 pts_client::$display->display_interrupt_message('Using ' . $tp->get_identifier() . ' in place of ' . $test_result->test_profile->get_identifier());225 $current_overrides = $test_result->test_profile->get_override_values();226 $test_result->test_profile = $tp;227 $test_result->test_profile->set_override_values($current_overrides);228 }229 }230 if($this->validate_test_to_run($test_result->test_profile))231 {232 $this->hashes_of_tests_to_run[$hash] = $hash;233 $this->tests_to_run[] = $test_result;234 }235 }236 }237 public function get_tests_to_run()238 {239 return $this->tests_to_run;240 }241 public function get_tests_to_run_identifiers()242 {243 $identifiers = array();244 foreach($this->tests_to_run as &$test_run_request)245 {246 $identifiers[] = $test_run_request->test_profile->get_identifier();247 }248 array_unique($identifiers);249 return $identifiers;250 }251 public function get_estimated_run_time($index = -1)252 {253 if($index == -1)254 {255 $index = $this->last_test_run_index;256 }257 $already_added = array();258 $estimated_time = 0;259 foreach(array_slice($this->tests_to_run, $index) as $test_run_request)260 {261 if($test_run_request->test_profile->has_test_options() == false && in_array($test_run_request->test_profile->get_identifier(), $already_added))262 {263 continue;264 }265 $estimated_time += $test_run_request->get_estimated_run_time();266 $already_added[] = $test_run_request->test_profile->get_identifier();267 }268 return $estimated_time;269 }270 public function get_percent_complete()271 {272 return round($this->last_test_run_index / count($this->tests_to_run) * 100);273 }274 public function get_test_to_run($index)275 {276 $this->last_test_run_index = $index;277 return is_numeric($index) && isset($this->tests_to_run[$index]) ? $this->tests_to_run[$index] : false;278 }279 public function get_test_count()280 {281 return count($this->tests_to_run);282 }283 public function force_results_save()284 {285 $this->force_save_results = true;286 }287 public function do_save_results()288 {289 return $this->file_name != null;290 }291 public function get_file_name()292 {293 return $this->file_name;294 }295 public function get_title()296 {297 return $this->file_name_title;298 }299 public function get_results_identifier()300 {301 return $this->results_identifier;302 }303 public function get_results_identifier_simplified()304 {305 return pts_strings::simplify_string_for_file_handling($this->get_results_identifier());306 }307 public function get_description()308 {309 return $this->run_description;310 }311 public function get_notes()312 {313 return null; // TODO: Not Yet Implemented314 }315 public function get_internal_tags()316 {317 return null;318 }319 public function get_reference_id()320 {321 return null;322 }323 public function get_preset_environment_variables()324 {325 return pts_module_manager::var_store_string();326 }327 public function result_already_contains_identifier()328 {329 $contains = false;330 foreach($this->result_file->get_systems() as $s)331 {332 if($s->get_identifier() == $this->results_identifier)333 {334 $contains = true;335 break;336 }337 }338 return $contains;339 }340 public function set_save_name($save_name, $is_new_save = true)341 {342 if(empty($save_name))343 {344 $save_name = date('Y-m-d-Hi', pts_client::current_time());345 }346 $this->file_name = self::clean_save_name($save_name, $is_new_save);347 $this->file_name_title = $save_name;348 $this->force_save_results = true;349 $this->result_file = new pts_result_file($this->file_name);350 $this->benchmark_log->log('SAVE IDENTIFIER: ' . $this->file_name);351 $this->is_new_result_file = $this->result_file->get_system_count() == 0;352 return $this->file_name;353 }354 public function set_results_identifier($identifier)355 {356 $this->results_identifier = self::clean_results_identifier($identifier);357 }358 public function prompt_save_name()359 {360 if($this->file_name != null)361 {362 return $this->file_name;363 }364 // Prompt to save a file when running a test365 $save_name = null;366 if(($env = pts_env::read('TEST_RESULTS_NAME')))367 {368 $save_name = $env;369 //echo 'Saving Results To: ' . $proposed_name . PHP_EOL;370 }371 if(!$this->batch_mode || $this->batch_mode['PromptSaveName'])372 {373 $is_reserved_word = false;374 // Be of help to the user by showing recently saved test results375 if($save_name == null)376 {377 pts_tests::recently_saved_results(' ');378 }379 $save_name_length = $save_name != null ? strlen($save_name) : 0;380 while(empty($save_name) || ($is_reserved_word = pts_types::is_test_or_suite($save_name)) || $save_name_length > 126)381 {382 if($is_reserved_word)383 {384 echo PHP_EOL . 'The name of the saved file cannot be the same as a test/suite: ' . $save_name . PHP_EOL;385 $is_reserved_word = false;386 }387 if($save_name_length > 126)388 {389 echo PHP_EOL . 'The name of the saved file must have between 2 and 126 characters in length.' . PHP_EOL;390 }391 $prompt = ' Enter a name for the result file: ';392 if(function_exists('readline') && function_exists('readline_completion_function'))393 {394 pts_user_io::$readline_completion_possibilities = pts_tests::test_results_by_date();395 readline_completion_function(array('pts_user_io', 'readline_completion_handler'));396 $save_name = readline($prompt);397 }398 else399 {400 $save_name = pts_user_io::read_user_input($prompt);401 }402 }403 }404 return $this->set_save_name($save_name);405 }406 public function prompt_results_identifier()407 {408 if(!empty($this->results_identifier))409 {410 return $this->results_identifier;411 }412 // Prompt for a results identifier413 $results_identifier = null;414 $show_identifiers = array();415 $no_repeated_tests = true;416 if(!$this->is_new_result_file)417 {418 // Running on an already-saved result419 $current_identifiers = array();420 $current_hardware = array();421 $current_software = array();422 foreach($this->result_file->get_systems() as $s)423 {424 $current_hardware[] = $s->get_hardware();425 $current_software[] = $s->get_software();426 $current_identifiers[] = $s->get_identifier();427 }428 $hashes = array();429 foreach($this->result_file->get_result_objects() as $result)430 {431 $hashes[] = $result->get_comparison_hash(true, false);432 }433 foreach($this->tests_to_run as &$run_request)434 {435 if($run_request instanceof pts_test_result && in_array($run_request->get_comparison_hash(true, false), $hashes))436 {437 $no_repeated_tests = false;438 break;439 }440 }441 }442 else443 {444 // Fresh run445 $current_identifiers = array();446 $current_hardware = array();447 $current_software = array();448 }449 if((!$this->batch_mode || $this->batch_mode['PromptForTestIdentifier']) && !$this->auto_mode)450 {451 if(count($current_identifiers) > 0)452 {453 echo PHP_EOL . pts_client::cli_just_bold('Current Test Identifiers:') . PHP_EOL;454 echo pts_user_io::display_text_list($current_identifiers);455 echo PHP_EOL;456 }457 $times_tried = 0;458 do459 {460 if($times_tried == 0 && ($env_identifier = pts_env::read('TEST_RESULTS_IDENTIFIER')))461 {462 $results_identifier = isset($env_identifier) ? self::clean_results_identifier($env_identifier) : null;463 echo 'Test Identifier: ' . $results_identifier . PHP_EOL;464 }465 else466 {467 $prompt = ' Enter a unique name to describe this test run / configuration: ';468 if(function_exists('readline') && function_exists('readline_completion_function'))469 {470 pts_user_io::$readline_completion_possibilities = array_map(array('pts_strings', 'trim_search_query'), array_merge(phodevi::system_hardware(false), phodevi::system_software(false)));471 readline_completion_function(array('pts_user_io', 'readline_completion_handler'));472 $results_identifier = readline($prompt);473 }474 else475 {476 $results_identifier = pts_user_io::read_user_input($prompt);477 }478 $results_identifier = self::clean_results_identifier($results_identifier);479 }480 $times_tried++;481 $identifier_pos = (($p = array_search($results_identifier, $current_identifiers)) !== false ? $p : -1);482 }483 while((!$no_repeated_tests && $identifier_pos != -1) || (isset($current_hardware[$identifier_pos]) && $current_hardware[$identifier_pos] != phodevi::system_hardware(true)) || (isset($current_software[$identifier_pos]) && $current_software[$identifier_pos] != phodevi::system_software(true)));484 }485 else if(($env_identifier = pts_env::read('TEST_RESULTS_IDENTIFIER')))486 {487 $results_identifier = self::clean_results_identifier($env_identifier);488 }489 if(empty($results_identifier))490 {491 $results_identifier = $this->auto_generate_results_identifier();492 }493 $this->results_identifier = $results_identifier;494 return $this->results_identifier;495 }496 public function auto_generate_results_identifier()497 {498 // If the save result identifier is empty, try to come up with something based upon the tests being run.499 $results_identifier = null;500 $subsystem_r = array();501 $subsystems_to_test = $this->subsystems_under_test();502 if(!$this->is_new_result_file)503 {504 $result_file_intent = pts_result_file_analyzer::analyze_result_file_intent($this->result_file);505 if(is_array($result_file_intent) && $result_file_intent[0] != 'Unknown')506 {507 array_unshift($subsystems_to_test, $result_file_intent[0]);508 }509 }510 foreach($subsystems_to_test as $subsystem)511 {512 $components = pts_result_file_analyzer::system_component_string_to_array(phodevi::system_hardware(true) . ', ' . phodevi::system_software(true));513 if($subsystem != null && isset($components[$subsystem]))514 {515 $subsystem_name = trim(pts_strings::trim_search_query($components[$subsystem]));516 if(!empty($subsystem_name) && phodevi::is_vendor_string($subsystem_name) && !in_array($subsystem_name, $subsystem_r))517 {518 $subsystem_r[] = $subsystem_name;519 }520 if(isset($subsystem_r[2]) || isset($subsystem_name[19]))521 {522 break;523 }524 }525 }526 if(isset($subsystem_r[0]))527 {528 $results_identifier = implode(' - ', $subsystem_r);529 }530 if(empty($results_identifier) && !$this->batch_mode)531 {532 $results_identifier = phodevi::read_property('cpu', 'model') . ' - ' . phodevi::read_property('gpu', 'model') . ' - ' . phodevi::read_property('motherboard', 'identifier');533 }534 if(strlen($results_identifier) > 55)535 {536 $results_identifier = substr($results_identifier, 0, 54);537 $results_identifier = substr($results_identifier, 0, strrpos($results_identifier, ' '));538 }539 if(empty($results_identifier))540 {541 $results_identifier = date('Y-m-d H:i', pts_client::current_time());542 }543 $this->results_identifier = $results_identifier;544 $this->benchmark_log->log('RESULTS IDENTIFIER: ' . $results_identifier);545 return $results_identifier;546 }547 public static function clean_results_identifier($results_identifier)548 {549 $results_identifier = trim(pts_client::swap_variables($results_identifier, array('pts_test_run_manager', 'user_run_save_variables')));550 $results_identifier = pts_strings::remove_redundant(pts_strings::keep_in_string($results_identifier, pts_strings::CHAR_LETTER | pts_strings::CHAR_NUMERIC | pts_strings::CHAR_DASH | pts_strings::CHAR_UNDERSCORE | pts_strings::CHAR_COLON | pts_strings::CHAR_COMMA | pts_strings::CHAR_SLASH | pts_strings::CHAR_SPACE | pts_strings::CHAR_DECIMAL | pts_strings::CHAR_AT | pts_strings::CHAR_PLUS | pts_strings::CHAR_SEMICOLON | pts_strings::CHAR_EQUAL), ' ');551 return $results_identifier;552 }553 public function get_test_run_position()554 {555 return ($this->get_test_count() * ($this->loop_run_pos - 1)) + $this->test_run_pos + 1;556 }557 public function get_test_run_count_reported()558 {559 return $this->test_run_count;560 }561 public function call_test_runs()562 {563 // Create a lock564 $lock_path = pts_client::temporary_directory() . '/phoronix-test-suite.active';565 pts_client::create_lock($lock_path);566 if($this->pre_run_message != null)567 {568 pts_client::$display->display_interrupt_message($this->pre_run_message);569 }570 // Hook into the module framework571 self::$test_run_process_active = true;572 pts_module_manager::module_process('__pre_run_process', $this);573 pts_file_io::unlink(PTS_USER_PATH . 'halt-testing');574 pts_file_io::unlink(PTS_USER_PATH . 'skip-test');575 $continue_test_flag = true;576 $tests_to_run_count = $this->get_test_count();577 pts_client::$display->test_run_process_start($this);578 $total_loop_count = (($t = pts_env::read('TOTAL_LOOP_COUNT')) && is_numeric($t) && $t > 0) ? $t : 1;579 $total_loop_time = (($t = pts_env::read('TOTAL_LOOP_TIME')) && is_numeric($t) && $t > 9) ? ($t * 60) : -1;580 $loop_end_time = $total_loop_time != -1 ? (time() + $total_loop_time) : false;581 $this->test_run_count = ($tests_to_run_count * $total_loop_count);582 for($loop = 1; $loop <= $total_loop_count && $continue_test_flag; $loop++)583 {584 $this->loop_run_pos = $loop;585 for($i = 0; $i < $tests_to_run_count && $continue_test_flag; $i++)586 {587 $this->test_run_pos = $i;588 $continue_test_flag = $this->process_test_run_request($i);589 if($continue_test_flag === 'SKIP')590 {591 $continue_test_flag = true;592 continue;593 }594 if($this->remove_tests_on_completion)595 {596 // Remove the installed test if it's no longer needed in this run queue597 $this_test_profile_identifier = $this->get_test_to_run($this->test_run_pos)->test_profile->get_identifier();598 $still_in_queue = false;599 for($j = ($this->test_run_pos + 1); $j < $tests_to_run_count && $still_in_queue == false; $j++)600 {601 if($this->get_test_to_run($j)->test_profile->get_identifier() == $this_test_profile_identifier)602 {603 $still_in_queue = true;604 }605 }606 if($still_in_queue == false)607 {608 pts_tests::remove_installed_test($this->get_test_to_run($this->test_run_pos)->test_profile);609 }610 }611 if($loop_end_time)612 {613 if(time() > $loop_end_time)614 {615 $continue_test_flag = false;616 }617 else if($this->test_run_count == ($i + 1))618 {619 // There's still time remaining so increase the run count....620 $this->test_run_count += $tests_to_run_count;621 }622 }623 }624 }625 pts_file_io::unlink(PTS_SAVE_RESULTS_PATH . $this->get_file_name() . '/active.xml');626 foreach($this->tests_to_run as &$run_request)627 {628 // Remove cache shares629 foreach(pts_file_io::glob($run_request->test_profile->get_install_dir() . 'cache-share-*.pt2so') as $cache_share_file)630 {631 unlink($cache_share_file);632 }633 }634 pts_triggered_system_events::test_requested_queued_reboot_check();635 if($this->post_run_message != null)636 {637 pts_client::$display->display_interrupt_message($this->post_run_message);638 }639 self::$test_run_process_active = -1;640 pts_module_manager::module_process('__post_run_process', $this);641 pts_client::release_lock($lock_path);642 // Report any tests that failed to properly run643 if(pts_client::is_debug_mode() || $this->get_test_count() > 3)644 {645 if(count($this->failed_tests_to_run) > 0)646 {647 echo PHP_EOL . PHP_EOL . 'The following tests failed to properly run:' . PHP_EOL . PHP_EOL;648 foreach($this->failed_tests_to_run as &$run_request)649 {650 echo ' - ' . $run_request->test_profile->get_identifier() . ($run_request->get_arguments_description() != null ? ': ' . $run_request->get_arguments_description() : null) . PHP_EOL;651 }652 echo PHP_EOL;653 }654 }655 }656 public static function test_run_process_active()657 {658 return self::$test_run_process_active = true;659 }660 public function process_test_run_request($run_index)661 {662 $result = false;663 if($this->do_save_results())664 {665 $this->result_file->get_xml(PTS_SAVE_RESULTS_PATH . $this->get_file_name() . '/composite.xml');666 }667 if(is_object($run_index))668 {669 $test_run_request = $run_index;670 $run_index = 0;671 }672 else673 {674 $test_run_request = $this->get_test_to_run($run_index);675 }676 if($test_run_request == false)677 {678 return;679 }680 if($this->result_file->has_matching_test_and_run_identifier($test_run_request, $this->get_results_identifier()) && pts_env::read('TOTAL_LOOP_COUNT') == false && pts_env::read('TOTAL_LOOP_TIME') == false)681 {682 // There already is a match for this test in this particular result buffer683 // except if using one of the loop controls where it may be repeated...684 return true;685 }686 $skip_tests_with_args = ($e = pts_env::read('SKIP_TESTS_HAVING_ARGS')) ? pts_strings::comma_explode($e) : false;687 if($skip_tests_with_args)688 {689 foreach($skip_tests_with_args as $skip_test_if_arg_matches)690 {691 if(stripos($test_run_request->get_arguments_description(), $skip_test_if_arg_matches) !== false)692 {693 return true;694 }695 }696 }697 if(($run_index != 0 && count(pts_file_io::glob($test_run_request->test_profile->get_install_dir() . 'cache-share-*.pt2so')) == 0))698 {699 // Sleep for six seconds between tests by default700 sleep(6);701 }702 $this->benchmark_log->log('Executing Test: ' . $test_run_request->test_profile->get_identifier());703 $test_successful = pts_test_execution::run_test($this, $test_run_request);704 if(pts_file_io::unlink(PTS_USER_PATH . 'halt-testing'))705 {706 // Stop the testing process entirely707 return false;708 }709 else if(pts_file_io::unlink(PTS_USER_PATH . 'skip-test'))710 {711 // Just skip the current test and do not save the results, but continue testing712 return 'SKIP';713 }714 else if(pts_env::read('LIMIT_ELAPSED_TEST_TIME') > 0 && (PTS_INIT_TIME + (pts_env::read('LIMIT_ELAPSED_TEST_TIME') * 60)) > time())715 {716 // Allocated amount of time has expired717 return false;718 }719 if($test_successful == false && $test_run_request->test_profile->get_identifier() != null)720 {721 $this->failed_tests_to_run[] = $test_run_request;722 }723 pts_module_manager::module_process('__post_test_run_process', $this->result_file);724 return true;725 }726 public static function process_json_report_attributes(&$test_run_request, $report_error = null)727 {728 // XXX : add to attributes JSON here729 $json_report_attributes = null;730 if(is_object($test_run_request->test_profile->test_installation))731 {732 if(($t = $test_run_request->test_profile->test_installation->get_compiler_data()))733 {734 $json_report_attributes['compiler-options'] = $t;735 }736 if(($t = $test_run_request->test_profile->test_installation->get_install_footnote()))737 {738 $json_report_attributes['install-footnote'] = $t;739 }740 }741 if(($t = $test_run_request->active->get_min_result()) != 0)742 {743 $json_report_attributes['min-result'] = $t;744 }745 if(($t = $test_run_request->active->get_max_result()) != 0)746 {747 $json_report_attributes['max-result'] = $t;748 }749 if(!empty($test_run_request->test_run_times))750 {751 $json_report_attributes['test-run-times'] = implode(':', $test_run_request->test_run_times);752 }753 if(!empty($report_error))754 {755 $json_report_attributes['error'] = $report_error;756 }757 return $json_report_attributes;758 }759 public static function clean_save_name($input, $is_new_save = true)760 {761 $input = pts_client::swap_variables($input, array('pts_test_run_manager', 'user_run_save_variables'));762 $input = pts_strings::remove_redundant(pts_strings::keep_in_string(str_replace(' ', '-', trim($input)), pts_strings::CHAR_LETTER | pts_strings::CHAR_NUMERIC | pts_strings::CHAR_DASH), '-');763 if($is_new_save)764 {765 $input = strtolower($input);766 }767 if(strlen($input) > 126)768 {769 $input = substr($input, 0, 126);770 }771 return $input;772 }773 public function initial_checks(&$to_run, $override_display_mode = false)774 {775 // Refresh the pts_client::$display in case we need to run in debug mode776 if(pts_client::$display == false || !(pts_client::$display instanceof pts_websocket_display_mode))777 {778 pts_client::init_display_mode($override_display_mode);779 }780 $to_run = pts_types::identifiers_to_objects($to_run);781 if($this->batch_mode && $this->batch_mode['Configured'] == false && !$this->auto_mode)782 {783 trigger_error('The batch mode must first be configured.' . PHP_EOL . 'To configure, run phoronix-test-suite batch-setup', E_USER_ERROR);784 return false;785 }786 if(!is_writable(pts_client::test_install_root_path()))787 {788 trigger_error('The test installation directory is not writable.' . PHP_EOL . 'Location: ' . pts_client::test_install_root_path(), E_USER_ERROR);789 return false;790 }791 $mount_options = phodevi::read_property('disk', 'mount-options');792 if(isset($mount_options['mount-options']) && strpos($mount_options['mount-options'], 'noexec') !== false)793 {794 trigger_error('The test installation directory is on a file-system mounted with the \'noexec\' mount option. Re-mount the file-system appropriately or change the Phoronix Test Suite user configuration file to point to an alternative mount point.' . PHP_EOL . 'Location: ' . pts_client::test_install_root_path(), E_USER_ERROR);795 return false;796 }797 // Cleanup tests to run798 if($this->cleanup_tests_to_run($to_run) == false)799 {800 return false;801 }802 else if(count($to_run) == 0)803 {804 //trigger_error('You must enter at least one test, suite, or result identifier to run.', E_USER_ERROR);805 return false;806 }807 return true;808 }809 public function pre_execution_process()810 {811 if($this->is_new_result_file || $this->result_already_contains_identifier() == false)812 {813 $this->result_file->set_title($this->file_name_title);814 $this->result_file->set_description($this->run_description);815 $this->result_file->set_notes($this->get_notes());816 $this->result_file->set_internal_tags($this->get_internal_tags());817 $this->result_file->set_reference_id($this->get_reference_id());818 $this->result_file->set_preset_environment_variables($this->get_preset_environment_variables());819 // TODO XXX JSON In null and notes820 $json_attr = $this->generate_json_system_attributes();821 $sys = new pts_result_file_system($this->results_identifier, phodevi::system_hardware(true), phodevi::system_software(true), $json_attr, pts_client::current_user(), null, date('Y-m-d H:i:s', pts_client::current_time()), PTS_VERSION, $this->result_file);822 $this->result_file->add_system($sys);823 }824 if($this->do_save_results())825 {826 pts_client::setup_test_result_directory($this->get_file_name());827 }828 }829 protected function generate_json_system_attributes()830 {831 $test_external_dependencies = array();832 $test_hardware_types = array();833 $test_internal_tags = array();834 foreach($this->tests_to_run as &$test_to_run)835 {836 $test_external_dependencies = array_merge($test_external_dependencies, $test_to_run->test_profile->get_external_dependencies());837 $test_internal_tags = array_merge($test_internal_tags, $test_to_run->test_profile->get_internal_tags());838 pts_arrays::unique_push($test_hardware_types, $test_to_run->test_profile->get_test_hardware_type());839 }840 return self::pull_test_notes(false, $test_external_dependencies, $test_internal_tags, $test_hardware_types);841 }842 public static function pull_test_notes($show_all = false, $test_external_dependencies = array(), $test_internal_tags = array(), $test_hardware_types = array())843 {844 $notes = null;845 if($show_all || in_array('build-utilities', $test_external_dependencies))846 {847 // So compiler tests were run....848 $test = false;849 $compiler_mask_dir = pts_test_installer::create_compiler_mask($test);850 if($compiler_mask_dir && is_executable($compiler_mask_dir . 'cc'))851 {852 $compiler_configuration = phodevi_system::sw_compiler_build_configuration($compiler_mask_dir . 'cc');853 pts_file_io::delete($compiler_mask_dir, null, true);854 if(!empty($compiler_configuration))855 {856 $notes['compiler-configuration'] = $compiler_configuration;857 }858 }859 }860 if($show_all || in_array('OpenCL', $test_internal_tags) || in_array('opencl', $test_external_dependencies))861 {862 // So OpenCL tests were run....863 $gpu_compute_cores = phodevi::read_property('gpu', 'compute-cores');864 if($gpu_compute_cores > 0)865 {866 $notes['graphics-compute-cores'] = $gpu_compute_cores;867 }868 }869 if($show_all || in_array('Disk', $test_hardware_types))870 {871 // A disk test was run so report some disk information...872 $disk_scheduler = phodevi::read_property('disk', 'scheduler');873 if($disk_scheduler)874 {875 $notes['disk-scheduler'] = $disk_scheduler;876 }877 $mount_options = phodevi::read_property('disk', 'mount-options');878 if(isset($mount_options['mount-options']) && $mount_options['mount-options'] != null)879 {880 $notes['disk-mount-options'] = $mount_options['mount-options'];881 }882 $extra = phodevi::read_property('disk', 'extra-disk-details');883 if($extra != null)884 {885 $notes['disk-details'] = $extra;886 }887 }888 if(true || $show_all || in_array('Processor', $test_hardware_types) || in_array('System', $test_hardware_types))889 {890 // makes sense always reporting the CPU scaling governor891 $scaling_governor = phodevi::read_property('cpu', 'scaling-governor');892 if($scaling_governor)893 {894 $notes['cpu-scaling-governor'] = $scaling_governor;895 }896 $cpu_microcode = phodevi::read_property('cpu', 'microcode-version');897 if($cpu_microcode)898 {899 $notes['cpu-microcode'] = $cpu_microcode;900 }901 if(phodevi::is_linux() && pts_client::executable_in_path('thermald') && (pts_client::is_process_running('thermald') || phodevi_linux_parser::systemctl_active('thermald')))902 {903 $thermald_version = trim(shell_exec('thermald --version 2>/dev/null'));904 if(!empty($thermald_version) && pts_strings::is_version($thermald_version))905 {906 $notes['cpu-thermald'] = $thermald_version;907 }908 }909 // POWER processors have configurable SMT, 1-8 per core.910 $smt = phodevi::read_property('cpu', 'smt');911 if($smt)912 {913 $notes['cpu-smt'] = $smt;914 }915 $cpu_pm = phodevi::read_property('cpu', 'power-management');916 if($cpu_pm)917 {918 $notes['cpu-pm'] = $cpu_pm;919 }920 $platform_profile = phodevi::read_property('system', 'platform-profile');921 if($platform_profile)922 {923 $notes['platform-profile'] = $platform_profile;924 }925 }926 if($show_all || in_array('Graphics', $test_hardware_types))927 {928 $accel_2d = phodevi::read_property('gpu', '2d-acceleration');929 if($accel_2d)930 {931 $notes['graphics-2d-acceleration'] = $accel_2d;932 }933 $aa = phodevi::read_property('gpu', 'aa-level');934 if($aa)935 {936 $notes['graphics-aa'] = $aa;937 }938 $af = phodevi::read_property('gpu', 'af-level');939 if($af)940 {941 $notes['graphics-af'] = $af;942 }943 $oc_offset = phodevi::read_property('gpu', 'oc-offset-string');944 if(!empty($oc_offset))945 {946 $notes['graphics-oc'] = $oc_offset;947 }948 $bar1_visible_vram_rebar = phodevi::read_property('gpu', 'bar1-visible-vram');949 if(!empty($bar1_visible_vram_rebar))950 {951 $notes['bar1-visible-vram'] = $bar1_visible_vram_rebar;952 }953 }954 if($show_all || phodevi::read_property('system', 'kernel-parameters'))955 {956 $notes['kernel-parameters'] = phodevi::read_property('system', 'kernel-parameters');957 }958 if($show_all || phodevi::read_property('system', 'kernel-extra-details'))959 {960 $notes['kernel-extra-details'] = phodevi::read_property('system', 'kernel-extra-details');961 }962 if($show_all || phodevi::read_property('system', 'environment-variables', false))963 {964 $notes['environment-variables'] = phodevi::read_property('system', 'environment-variables', false);965 }966 if($show_all || in_array('Java', $test_internal_tags) || in_array('java', $test_external_dependencies))967 {968 $notes['java'] = phodevi::read_property('system', 'java-version');969 }970 if($show_all || in_array('Python', $test_internal_tags) || in_array('python', $test_external_dependencies))971 {972 $notes['python'] = phodevi::read_property('system', 'python-version');973 }974 if(in_array('wine', $test_external_dependencies))975 {976 phodevi_system::$report_wine_override = true;977 }978 $notes['security'] = phodevi::read_property('system', 'security-features');979 foreach($notes as $key => $value)980 {981 if(empty($value))982 {983 unset($notes[$key]);984 }985 }986 return $notes;987 }988 public function post_execution_process()989 {990 $this->benchmark_log->log('Test Run Process Ended');991 if($this->do_save_results() && !$this->skip_post_execution_options)992 {993 // Save the results994 echo PHP_EOL;995 pts_module_manager::module_process('__event_results_process', $this);996 pts_client::save_test_result($this->get_file_name() . '/composite.xml', $this->result_file->get_xml(), true, $this->results_identifier);997 pts_module_manager::module_process('__event_results_saved', $this);998 if($this->test_run_success_counter == 0 && $this->is_new_result_file)999 {1000 return false;1001 }1002 }1003 if($this->test_run_success_counter > 3 && pts_config::read_bool_config('PhoronixTestSuite/Options/Testing/ShowPostRunStatistics', 'TRUE'))1004 {1005 // Show any post run statistics1006 pts_module_manager::module_process('__event_post_run_stats', $this);1007 if($this->result_file->get_system_count() == 2)1008 {1009 $highlights = pts_result_file_analyzer::display_results_baseline_two_way_compare($this->result_file, true, false, true, ' ');1010 if($highlights)1011 {1012 echo ' ' . pts_client::cli_just_bold('Result Highlights') . PHP_EOL;1013 echo $highlights . PHP_EOL;1014 }1015 }1016 if($this->result_file->get_system_count() > 2)1017 {1018 // Display winners and losers1019 echo pts_result_file_analyzer::display_results_wins_losses($this->result_file, $this->get_results_identifier(), ' ') . PHP_EOL;1020 }1021 if($this->result_file->get_system_count() > 1)1022 {1023 echo pts_result_file_analyzer::display_result_file_stats_pythagorean_means($this->result_file, $this->get_results_identifier());1024 }1025 }1026 if($this->do_save_results() && !$this->skip_post_execution_options)1027 {1028 // See if the results should be displayed1029 //echo PHP_EOL . 'Results Saved To: ; . PTS_SAVE_RESULTS_PATH . $this->get_file_name() . ;/composite.xml' . PHP_EOL;1030 if(!$this->auto_mode)1031 {1032 if($this->batch_mode)1033 {1034 if($this->batch_mode['OpenBrowser'])1035 {1036 pts_client::display_result_view($this->result_file, true, null);1037 }1038 }1039 else1040 {1041 if(!phodevi::is_display_server_active() && !defined('PHOROMATIC_PROCESS'))1042 {1043 pts_client::display_result_view($this->result_file, false, 'Do you want to view the text results of the testing');1044 }1045 else1046 {1047 pts_client::display_result_view($this->result_file, false, '');1048 }1049 }1050 }1051 if($this->allow_sharing_of_results && pts_network::internet_support_available() && pts_openbenchmarking::ob_upload_support_available())1052 {1053 if($this->auto_upload_to_openbenchmarking || pts_openbenchmarking_client::auto_upload_results() || pts_config::read_bool_config('PhoronixTestSuite/Options/Testing/AlwaysUploadResultsToOpenBenchmarking', 'FALSE'))1054 {1055 $upload_results = true;1056 }1057 else if($this->batch_mode)1058 {1059 $upload_results = $this->batch_mode['UploadResults'];1060 }1061 else if(!$this->auto_mode)1062 {1063 $upload_results = pts_user_io::prompt_bool_input('Would you like to upload the results to OpenBenchmarking.org', -1);1064 }1065 else1066 {1067 $upload_results = false;1068 }1069 if($upload_results)1070 {1071 $this->openbenchmarking_results_data = pts_openbenchmarking::upload_test_result($this, true, (!$this->auto_mode && !$this->batch_mode));1072 if($this->get_results_url())1073 {1074 if(!$this->auto_mode && !$this->batch_mode && pts_openbenchmarking_client::auto_upload_results() == false)1075 {1076 pts_client::display_web_page($this->get_results_url(), 'Do you want to launch OpenBenchmarking.org', true);1077 }1078 }1079 else1080 {1081 echo PHP_EOL . 'Results Failed To Upload.' . PHP_EOL;1082 }1083 }1084 }1085 }1086 }1087 public function get_results_url()1088 {1089 return isset($this->openbenchmarking_results_data['url']) ? $this->openbenchmarking_results_data['url'] : false;1090 }1091 public function set_batch_mode($custom_preset = false)1092 {1093 $this->batch_mode = array(1094 'UploadResults' => pts_config::read_bool_config('PhoronixTestSuite/Options/BatchMode/UploadResults', 'TRUE'),1095 'SaveResults' => pts_config::read_bool_config('PhoronixTestSuite/Options/BatchMode/SaveResults', 'TRUE'),1096 'PromptForTestDescription' => pts_config::read_bool_config('PhoronixTestSuite/Options/BatchMode/PromptForTestDescription', 'FALSE'),1097 'RunAllTestCombinations' => pts_config::read_bool_config('PhoronixTestSuite/Options/BatchMode/RunAllTestCombinations', 'TRUE'),1098 'PromptSaveName' => pts_config::read_bool_config('PhoronixTestSuite/Options/BatchMode/PromptSaveName', 'FALSE'),1099 'PromptForTestIdentifier' => pts_config::read_bool_config('PhoronixTestSuite/Options/BatchMode/PromptForTestIdentifier', 'TRUE'),1100 'Configured' => pts_config::read_bool_config('PhoronixTestSuite/Options/BatchMode/Configured', 'FALSE'),1101 'OpenBrowser' => pts_config::read_bool_config('PhoronixTestSuite/Options/BatchMode/OpenBrowser', 'FALSE'),1102 );1103 if($custom_preset && is_array($custom_preset))1104 {1105 foreach($custom_preset as $key => $value)1106 {1107 $this->batch_mode[$key] = $value;1108 }1109 $this->batch_mode['Configured'] = true;1110 }1111 }1112 public function cleanup_test_profile_valid(&$test_profile, &$tests_missing, $check_for_new_on_fail = true)1113 {1114 if($test_profile->get_title() == null)1115 {1116 pts_client::$display->generic_sub_heading('Not A Test: ' . $test_profile);1117 return false;1118 }1119 else1120 {1121 if($test_profile->is_supported(false) == false)1122 {1123 return false;1124 }1125 if(!$test_profile->test_installation || $test_profile->test_installation->is_installed() == false)1126 {1127 // If the desired test version is not installed, see if a newer version in that release stream (minor version difference) is installed1128 if($check_for_new_on_fail)1129 {1130 $tp = pts_openbenchmarking_client::test_profile_newer_minor_version_available($test_profile);1131 if($tp && $this->cleanup_test_profile_valid($tp, $tests_missing, false))1132 {1133 return $tp;1134 }1135 }1136 if($test_profile->get_identifier(false) != null)1137 {1138 $tests_missing[] = $test_profile;1139 }1140 return false;1141 }1142 }1143 return $test_profile;1144 }1145 public function cleanup_tests_to_run(&$to_run_objects)1146 {1147 $skip_tests = ($e = pts_env::read('SKIP_TESTS')) ? pts_strings::comma_explode($e) : false;1148 $tests_verified = array();1149 $tests_missing = array();1150 foreach($to_run_objects as &$run_object)1151 {1152 if($skip_tests && (in_array($run_object->get_identifier(false), $skip_tests) || ($run_object instanceof pts_test_profile && in_array($run_object->get_identifier_base_name(), $skip_tests))))1153 {1154 pts_client::$display->generic_sub_heading('Skipping: ' . $run_object->get_identifier());1155 continue;1156 }1157 else if($run_object instanceof pts_test_profile)1158 {1159 $valid = $this->cleanup_test_profile_valid($run_object, $tests_missing);1160 if($valid == false)1161 {1162 continue;1163 }1164 // Set $valid to $run_object in case it's a newer version of the test profile that was upgraded1165 $run_object = $valid;1166 }1167 else if($run_object instanceof pts_result_file)1168 {1169 $num_installed = 0;1170 foreach($run_object->get_contained_test_profiles() as $test_profile)1171 {1172 $valid = $this->cleanup_test_profile_valid($test_profile, $tests_missing);1173 if($valid == false)1174 {1175 continue;1176 }1177 $num_installed++;1178 }1179 if($num_installed == 0)1180 {1181 continue;1182 }1183 }1184 else if($run_object instanceof pts_test_suite)1185 {1186 if($run_object->is_core_version_supported() == false)1187 {1188 pts_client::$display->generic_sub_heading($run_object->get_title() . ' is a suite not supported by this version of the Phoronix Test Suite.');1189 continue;1190 }1191 $num_installed = 0;1192 foreach($run_object->get_contained_test_profiles() as $test_profile)1193 {1194 $valid = $this->cleanup_test_profile_valid($test_profile, $tests_missing);1195 if($valid == false)1196 {1197 continue;1198 }1199 $num_installed++;1200 }1201 if($num_installed == 0)1202 {1203 continue;1204 }1205 }1206 else1207 {1208 pts_client::$display->generic_sub_heading('Not Recognized: ' . $run_object);1209 continue;1210 }1211 $tests_verified[] = $run_object;1212 }1213 $to_run_objects = $tests_verified;1214 if(count($tests_missing) > 0 && !defined('PHOROMATIC_PROCESS'))1215 {1216 $tests_missing = array_unique($tests_missing);1217 if(count($tests_missing) == 1)1218 {1219 trigger_error($tests_missing[0] . ' is not installed.', E_USER_ERROR);1220 // PHP_EOL . 'To install, run: phoronix-test-suite install ' . $tests_missing[0]1221 }1222 else1223 {1224 $message = PHP_EOL . PHP_EOL . 'Multiple tests are not installed:' . PHP_EOL . PHP_EOL;1225 $message .= pts_user_io::display_text_list($tests_missing);1226 //$message .= PHP_EOL . 'To install, run: phoronix-test-suite install ' . implode(' ', $tests_missing) . PHP_EOL . PHP_EOL;1227 echo $message;1228 }1229 if(!$this->batch_mode && !$this->auto_mode && pts_client::current_command() != 'benchmark')1230 {1231 $stop_and_install = pts_user_io::prompt_bool_input('Would you like to stop and install these tests now', true);1232 if($stop_and_install)1233 {1234 pts_test_installer::standard_install($tests_missing);1235 $to_run_objects = array_merge($to_run_objects, $tests_missing);1236 $this->cleanup_tests_to_run($to_run_objects);1237 }1238 }1239 }1240 return true;1241 }1242 public function auto_save_results($save_name, $result_identifier, $description = null, $is_new_save = false)1243 {1244 $this->set_save_name($save_name, $is_new_save);1245 $this->set_results_identifier($result_identifier);1246 $this->set_description($description);1247 }1248 public function set_description($description)1249 {1250 $this->run_description = $description == null ? self::auto_generate_description() : $description;1251 }1252 public function subsystems_under_test()1253 {1254 $subsystems_to_test = array();1255 foreach($this->tests_to_run as &$test_run_request)1256 {1257 pts_arrays::unique_push($subsystems_to_test, $test_run_request->test_profile->get_test_hardware_type());1258 }1259 return $subsystems_to_test;1260 }1261 protected function auto_generate_description()1262 {1263 $hw_components = array(pts_result_file_analyzer::system_component_string_to_array(phodevi::system_hardware(true)));1264 $sw_components = array(pts_result_file_analyzer::system_component_string_to_array(phodevi::system_software(true)));1265 if($this->is_new_result_file)1266 {1267 $existing_identifiers = array();1268 $hw_components = array();1269 $sw_components = array();1270 foreach($this->result_file->get_systems() as $s)1271 {1272 $hw_components[] = pts_result_file_analyzer::system_component_string_to_array($s->get_hardware());1273 $sw_components[] = pts_result_file_analyzer::system_component_string_to_array($s->get_software());1274 $existing_identifiers[] = $s->get_identifier();1275 }1276 $existing_identifier_count = count($existing_identifiers);1277 }1278 else1279 {1280 $existing_identifier_count = 0;1281 }1282 $auto_description = 'Running ' . implode(', ', array_unique($this->get_tests_to_run_identifiers()));1283 $subsystems_to_test = $this->subsystems_under_test();1284 // TODO: hook into $hw_components and $sw_components for leveraging existing result file data for comparisons already in existent1285 // dropped: count($subsystems_to_test) == 1 && $1286 if($existing_identifier_count == 0)1287 {1288 switch($subsystems_to_test)1289 {1290 case 'Graphics':1291 $auto_description = phodevi::read_property('gpu', 'model') . ' graphics testing with ' . phodevi::read_property('system', 'display-driver-string') . ' / ' . phodevi::read_property('system', 'opengl-driver');1292 break;1293 case 'Disk':1294 $auto_description = phodevi::read_name('disk') . ' testing on ' . phodevi::read_property('system', 'operating-system') . ' with a ' . phodevi::read_property('system', 'filesystem') . ' file-system';1295 break;1296 case 'Memory':1297 case 'Processor':1298 $auto_description = phodevi::read_property('cpu', 'model') . ' testing with a ' . phodevi::read_name('motherboard') . ' on ' . phodevi::read_property('system', 'operating-system');1299 break;1300 default:1301 if(phodevi::read_property('system', 'system-layer'))1302 {1303 // Virtualization, Wine testing...1304 $auto_description = phodevi::read_property('system', 'system-layer') . ' testing on ' . phodevi::read_property('system', 'operating-system');1305 }1306 else if(phodevi::read_name('motherboard') != null && phodevi::read_property('gpu', 'model') != null)1307 {1308 // Standard description1309 $auto_description = phodevi::read_property('cpu', 'model') . ' testing with a ' . phodevi::read_name('motherboard') . ' and ' . phodevi::read_property('gpu', 'model') . ' on ' . phodevi::read_property('system', 'operating-system');1310 }1311 else1312 {1313 // A virtualized environment or a BSD or other OS where not all hardware info is available...1314 $auto_description = phodevi::read_property('cpu', 'model') . ' testing on ' . phodevi::read_property('system', 'operating-system');1315 }1316 break;1317 }1318 }1319 else1320 {1321 if($this->is_new_result_file)1322 {1323 $result_file_intent = pts_result_file_analyzer::analyze_result_file_intent($this->result_file);1324 if(is_array($result_file_intent) && $result_file_intent[0] != 'Unknown')1325 {1326 $auto_description = 'A ' . $result_file_intent[0] . ' comparison';1327 }1328 }1329 }1330 $auto_description .= ' via the Phoronix Test Suite.';1331 return $auto_description;1332 }1333 public function save_results_prompt()1334 {1335 if(!$this->auto_mode)1336 {1337 pts_client::$display->generic_heading('System Information');1338 echo phodevi::system_centralized_view() . PHP_EOL;1339 }1340 if(($this->prompt_save_results || $this->force_save_results) && count($this->tests_to_run) > 0) // or check for DO_NOT_SAVE_RESULTS == false1341 {1342 if($this->force_save_results || pts_env::read('TEST_RESULTS_NAME'))1343 {1344 $save_results = true;1345 }1346 else if($this->batch_mode)1347 {1348 $save_results = $this->batch_mode['SaveResults'];1349 }1350 else if(pts_client::is_debug_mode())1351 {1352 $save_results = false;1353 }1354 else1355 {1356 $save_results = pts_user_io::prompt_bool_input('Would you like to save these test results', true);1357 }1358 if($save_results)1359 {1360 // Prompt Save File Name1361 $this->prompt_save_name();1362 // Prompt Identifier1363 $this->prompt_results_identifier();1364 if(!isset($this->run_description[16]) || strpos($this->run_description, 'via the Phoronix Test Suite') !== false)1365 {1366 // Write the auto-description if nothing is set or attempt to auto-detect if it was a previous auto-description saved1367 $this->run_description = self::auto_generate_description();1368 }1369 // Prompt Description1370 if(!$this->batch_mode || $this->batch_mode['PromptForTestDescription'])1371 {1372 if($this->run_description == null)1373 {1374 $this->run_description = 'N/A';1375 }1376 if(($td = pts_env::read('TEST_RESULTS_DESCRIPTION')) !== false)1377 {1378 $this->run_description = $td;1379 echo 'Test Description: ' . $this->run_description . PHP_EOL;1380 }1381 else if(!$this->auto_mode)1382 {1383 //echo PHP_EOL . 'Current Title: ' . $this->file_name_title . PHP_EOL;1384 pts_client::$display->generic_heading('If desired, enter a new description below to better describe this result set / system configuration under test.' . PHP_EOL . 'Press ENTER to proceed without changes.');1385 echo pts_client::cli_just_bold('Current Description: ') . $this->run_description . PHP_EOL . PHP_EOL . pts_client::cli_just_bold('New Description: ');1386 $new_test_description = pts_user_io::read_user_input();1387 if(!empty($new_test_description))1388 {1389 $this->run_description = $new_test_description;1390 }1391 }1392 }1393 }1394 }1395 }1396 public function load_tests_to_run(&$to_run_objects)1397 {1398 // Determine what to run1399 $unique_test_count = count(array_unique($to_run_objects));1400 $run_contains_a_no_result_type = false;1401 $request_results_save = false;1402 foreach($to_run_objects as &$run_object)1403 {1404 if($run_object instanceof pts_test_profile)1405 {1406 if($run_object->get_identifier() == null || $run_object->get_title() == null || $this->validate_test_to_run($run_object) == false)1407 {1408 continue;1409 }1410 if($run_contains_a_no_result_type == false && $run_object->get_display_format() == 'NO_RESULT')1411 {1412 $run_contains_a_no_result_type = true;1413 }1414 if($request_results_save == false && $run_object->do_auto_save_results())1415 {1416 $request_results_save = true;1417 }1418 foreach(self::test_prompts_to_result_objects($run_object) as $result_object)1419 {1420 $this->add_test_result_object($result_object);1421 }1422 }1423 else if($run_object instanceof pts_virtual_test_suite)1424 {1425 $virtual_suite_tests = $run_object->get_contained_test_profiles();1426 foreach(array_keys($virtual_suite_tests) as $i)1427 {1428 if($virtual_suite_tests[$i]->is_supported(false) == false || $this->validate_test_to_run($virtual_suite_tests[$i]) == false)1429 {1430 unset($virtual_suite_tests[$i]);1431 }1432 }1433 sort($virtual_suite_tests);1434 if(count($virtual_suite_tests) > 1)1435 {1436 $virtual_suite_tests[] = 'All Tests In Suite';1437 }1438 if(!$this->auto_mode && !$this->batch_mode)1439 {1440 $run_index = pts_user_io::prompt_text_menu('Select the tests in the virtual suite to run', $virtual_suite_tests, true, true);1441 }1442 else1443 {1444 $run_index = -1;1445 }1446 if((count($virtual_suite_tests) > 2 && is_array($run_index) && in_array((count($virtual_suite_tests) - 1), $run_index)) || $run_index == -1)1447 {1448 // The appended 'All Tests In Suite' was selected, so run all1449 }1450 else1451 {1452 foreach(array_keys($virtual_suite_tests) as $i)1453 {1454 if(!in_array($i, $run_index))1455 {1456 unset($virtual_suite_tests[$i]);1457 }1458 }1459 }1460 foreach($virtual_suite_tests as &$test_profile)1461 {1462 if($test_profile instanceof pts_test_profile)1463 {1464 // The user is to configure virtual suites manually1465 foreach(self::test_prompts_to_result_objects($test_profile) as $result_object)1466 {1467 $this->add_test_result_object($result_object);1468 }1469 }1470 }1471 }1472 else if($run_object instanceof pts_test_suite)1473 {1474 $this->pre_run_message = $run_object->get_pre_run_message();1475 $this->post_run_message = $run_object->get_post_run_message();1476 $tests_contained = $run_object->get_contained_test_result_objects();1477 if($this->prompt_to_test_subset() && !$this->auto_mode && !$this->batch_mode)1478 {1479 $this->prompt_subset_of_result_objects_to_run($tests_contained);1480 }1481 foreach($tests_contained as $result_object)1482 {1483 $this->add_test_result_object($result_object);1484 }1485 }1486 else if($run_object instanceof pts_result_file)1487 {1488 // Print the $to_run ?1489 $this->run_description = $run_object->get_description();1490 $preset_vars = $run_object->get_preset_environment_variables();1491 $result_objects = $run_object->get_result_objects();1492 $this->set_save_name($run_object->get_identifier(), false);1493 $this->file_name_title = $run_object->get_title();1494 pts_module_manager::process_environment_variables_string_to_set($preset_vars);1495 if($this->prompt_to_test_subset() && !$this->auto_mode && !$this->batch_mode)1496 {1497 $this->prompt_subset_of_result_objects_to_run($result_objects);1498 }1499 foreach($result_objects as &$result_object)1500 {1501 if($result_object->test_profile->get_identifier() == null)1502 {1503 continue;1504 }1505 // Check to ensure that nothing extra may have somehow wound up in the execution argument string of a saved result file...1506 if(pts_strings::has_in_string($result_object->get_arguments(), array('; ', '&&', '|')))1507 {1508 continue;1509 }1510 $test_result = new pts_test_result($result_object->test_profile);1511 $test_result->set_used_arguments($result_object->get_arguments());1512 $test_result->set_used_arguments_description($result_object->get_arguments_description());1513 $this->add_test_result_object($test_result);1514 }1515 }1516 else1517 {1518 trigger_error($run_object . ' is not recognized.', E_USER_ERROR);1519 continue;1520 }1521 }1522 // AutoSortRunQueue1523 if(pts_config::read_bool_config('PhoronixTestSuite/Options/Testing/AutoSortRunQueue', 'TRUE') && ($this->force_save_results == false || pts_env::read('TEST_EXECUTION_SORT')))1524 {1525 // Not that it matters much, but if $this->force_save_results is set that means likely running from a result file...1526 // so if running a result file, don't change the ordering of the existing results1527 // Sort the run order so that all tests that are similar are grouped together, etc1528 switch(strtolower(pts_env::read('TEST_EXECUTION_SORT')))1529 {1530 case 'none': // natural order1531 break;1532 case 'random':1533 shuffle($this->tests_to_run);1534 break;1535 case 'dependencies':1536 usort($this->tests_to_run, array('pts_test_run_manager', 'compare_result_objects_by_dependencies'));1537 break;1538 case 'test-estimated-time':1539 usort($this->tests_to_run, array('pts_test_run_manager', 'compare_result_objects_by_estimated_time'));1540 break;1541 case 'test-estimated-time-desc':1542 usort($this->tests_to_run, array('pts_test_run_manager', 'compare_result_objects_by_estimated_time'));1543 $this->tests_to_run = array_reverse($this->tests_to_run);1544 break;1545 case 'test':1546 usort($this->tests_to_run, array('pts_test_run_manager', 'compare_result_objects_by_test_identifier'));1547 break;1548 case 'default':1549 default:1550 usort($this->tests_to_run, array('pts_test_run_manager', 'compare_result_objects_by_subsystem_and_types'));1551 break;1552 }1553 }1554 $this->prompt_save_results = $run_contains_a_no_result_type == false || $unique_test_count > 1;1555 $this->force_save_results = $this->force_save_results || $request_results_save;1556 // Is there something to run?1557 return $this->get_test_count() > 0;1558 }1559 public function load_result_file_to_run($save_name, $result_identifier, &$result_file, $tests_to_complete = null)1560 {1561 // Determine what to run1562 $this->auto_save_results($save_name, $result_identifier);1563 $this->run_description = $result_file->get_description();1564 $result_objects = $result_file->get_result_objects();1565 // Unset result objects that shouldn't be run1566 if(is_array($tests_to_complete))1567 {1568 foreach(array_keys($result_objects) as $i)1569 {1570 if(!in_array($i, $tests_to_complete))1571 {1572 unset($result_objects[$i]);1573 }1574 }1575 }1576 if(count($result_objects) == 0)1577 {1578 return false;1579 }1580 foreach($result_objects as &$result_object)1581 {1582 if($this->validate_test_to_run($result_object->test_profile))1583 {1584 // Check to ensure that nothing extra may have somehow wound up in the execution argument string of a saved result file...1585 if(pts_strings::has_in_string($result_object->get_arguments(), array('; ', '&&', '|')))1586 {1587 echo PHP_EOL . 'Exception loading a result object.' . PHP_EOL;1588 continue;1589 }1590 $test_result = new pts_test_result($result_object->test_profile);1591 $test_result->set_used_arguments($result_object->get_arguments());1592 $test_result->set_used_arguments_description($result_object->get_arguments_description());1593 $this->add_test_result_object($test_result);1594 }1595 }1596 // Is there something to run?1597 return $this->get_test_count() > 0;1598 }1599 public function is_multi_test_stress_run()1600 {1601 return $this->multi_test_stress_run;1602 }1603 protected function test_prompts_to_result_objects(&$test_profile)1604 {1605 $result_objects = array();1606 if($this->batch_mode && $this->batch_mode['RunAllTestCombinations'])1607 {1608 $opts = pts_test_run_options::batch_user_options($test_profile);1609 }1610 else if($this->batch_mode && (pts_env::read('PRESET_OPTIONS') || pts_env::read('PRESET_OPTIONS_VALUES')))1611 {1612 $opts = pts_test_run_options::prompt_user_options($test_profile, null, true);1613 }1614 else if($this->auto_mode == 2)1615 {1616 $opts = pts_test_run_options::default_user_options($test_profile);1617 }1618 else1619 {1620 $opts = pts_test_run_options::prompt_user_options($test_profile);1621 }1622 if($opts == false)1623 {1624 return array();1625 }1626 list($test_arguments, $test_arguments_description) = $opts;1627 foreach(array_keys($test_arguments) as $i)1628 {1629 $test_result = new pts_test_result($test_profile);1630 $test_result->set_used_arguments($test_arguments[$i]);1631 $test_result->set_used_arguments_description($test_arguments_description[$i]);1632 $result_objects[] = $test_result;1633 }1634 return $result_objects;1635 }1636 public function prompt_subset_of_result_objects_to_run(&$result_objects_contained)1637 {1638 $ros = array();1639 foreach($result_objects_contained as $key => $ro)1640 {1641 $ros[$key] = trim($ro->test_profile->get_title() . PHP_EOL . $ro->get_arguments_description());1642 }1643 $run_ids = pts_user_io::prompt_text_menu('Select the test(s) to run', $ros, true, true);1644 foreach($result_objects_contained as $id => $ro)1645 {1646 if(!in_array($id, $run_ids))1647 {1648 unset($result_objects_contained[$id]);1649 }1650 }1651 }1652 public function do_prompt_to_test_subset()1653 {1654 $this->test_subset = true;1655 }1656 public function prompt_to_test_subset()1657 {1658 return $this->test_subset;1659 }1660 public static function compare_result_objects_by_subsystem_and_types($a, $b)1661 {1662 $a_comp = $a->test_profile->get_test_hardware_type() . $a->test_profile->get_test_software_type() . $a->test_profile->get_internal_tags_raw() . $a->test_profile->get_result_scale_formatted() . $a->test_profile->get_identifier(true);1663 $b_comp = $b->test_profile->get_test_hardware_type() . $b->test_profile->get_test_software_type() . $b->test_profile->get_internal_tags_raw() . $b->test_profile->get_result_scale_formatted() . $b->test_profile->get_identifier(true);1664 if($a_comp == $b_comp)1665 {1666 // So it's the same test being compared... try to sort in ascending order (such that 800 x 600 resolution comes before 1024 x 768), below way is an attempt to recognize such in weird manner1667 if(strlen($a->get_arguments_description()) == strlen($b->get_arguments_description()))1668 {1669 return strcmp($a->get_arguments_description(), $b->get_arguments_description());1670 }1671 else1672 {1673 return strcmp(strlen($a->get_arguments_description()), strlen($b->get_arguments_description()));1674 }1675 }1676 return strcmp($a_comp, $b_comp);1677 }1678 public static function compare_result_objects_by_test_identifier($a, $b)1679 {1680 return strcmp($a->test_profile->get_identifier(), $b->test_profile->get_identifier());1681 }1682 public static function compare_result_objects_by_estimated_time($a, $b)1683 {1684 return $a->get_estimated_run_time() < $b->get_estimated_run_time() ? -1 : 1;1685 }1686 public static function compare_result_objects_by_dependencies($a, $b)1687 {1688 $a_exdeps = $a->test_profile->get_external_dependencies();1689 $b_exdeps = $a->test_profile->get_external_dependencies();1690 sort($a_exdeps);1691 sort($b_exdeps);1692 return strcmp(implode(' ', $a_exdeps), implode(' ', $b_exdeps));1693 }1694 public static function test_result_system_compatibility_check(&$test_result, $report_errors = false)1695 {1696 $error = null;1697 if(pts_test_run_options::validate_test_arguments_compatibility($test_result->get_arguments_description(), $test_result->test_profile, $error) == false)1698 {1699 if($report_errors)1700 {1701 self::test_pre_run_error($test_result->test_profile, '[' . $test_result->test_profile->get_identifier() . ' ' . $test_result->get_arguments_description() . '] ' . $error);1702 }1703 return false;1704 }1705 return true;1706 }1707 public static function test_profile_system_compatibility_check(&$test_profile, $report_errors = false, $is_batch_mode = false)1708 {1709 $valid_test_profile = true;1710 $test_type = $test_profile->get_test_hardware_type();1711 $skip_tests = pts_env::read('SKIP_TESTS') ? pts_strings::comma_explode(pts_env::read('SKIP_TESTS')) : false;1712 $skip_test_subsystems = pts_env::read('SKIP_TESTING_SUBSYSTEMS') ? pts_strings::comma_explode(strtolower(pts_env::read('SKIP_TESTING_SUBSYSTEMS'))) : false;1713 $display_driver = phodevi::read_property('system', 'display-driver');1714 $gpu = phodevi::read_name('gpu');1715 $test_error = null;1716 if($test_profile->is_supported(false, $test_error) == false)1717 {1718 $valid_test_profile = false;1719 }1720 else if($test_profile->is_display_required() && !phodevi::is_display_server_active())1721 {1722 $test_error = 'No display server was found, skipping ' . $test_profile;1723 $valid_test_profile = false;1724 }1725 else if($test_profile->is_network_required() && !pts_network::network_support_available())1726 {1727 $test_error = 'No network connection was found or is disabled, skipping ' . $test_profile;1728 $valid_test_profile = false;1729 }1730 else if($test_profile->is_internet_required() && !pts_network::internet_support_available())1731 {1732 $test_error = 'No Internet connection was found or is disabled, skipping ' . $test_profile;1733 $valid_test_profile = false;1734 }1735 else if($test_type == 'Graphics' && in_array($display_driver, array('vesa', 'nv', 'cirrus')) && stripos($gpu, 'LLVM') === false)1736 {1737 // These display drivers end up being in known configurations without 3D hardware support so unless an LLVM-based string is reported as the GPU, don't advertise 3D tests1738 $test_error = '3D acceleration support not available, skipping ' . $test_profile;1739 $valid_test_profile = false;1740 }1741 else if($test_type == 'Disk' && stripos(phodevi::read_property('system', 'filesystem'), 'SquashFS') !== false)1742 {1743 $test_error = 'Running on a RAM-based live file-system, skipping ' . $test_profile;1744 $valid_test_profile = false;1745 }1746 else if(($test_type != null && getenv('NO_' . strtoupper($test_type) . '_TESTS')) ||($skip_tests && (in_array($test_profile, $skip_tests) || in_array($test_type, $skip_tests) || in_array($test_profile->get_identifier(false), $skip_tests) || in_array($test_profile->get_identifier_base_name(), $skip_tests))))1747 {1748 $test_error = 'Due to a pre-set environment variable, skipping ' . $test_profile;1749 $valid_test_profile = false;1750 }1751 else if($skip_test_subsystems && in_array(strtolower($test_profile->get_test_hardware_type()), $skip_test_subsystems))1752 {1753 $test_error = 'Due to a pre-set environment variable, skipping ' . $test_profile;1754 $valid_test_profile = false;1755 }1756 else if($test_profile->is_root_required() && $is_batch_mode && phodevi::is_root() == false)1757 {1758 $test_error = 'Running in batch mode as a user but this test requires root access, skipping ' . $test_profile;1759 $valid_test_profile = false;1760 }1761 if($valid_test_profile == false && getenv('SKIP_ALL_TEST_SUPPORT_CHECKS'))1762 {1763 $test_error = 'SKIP_ALL_TEST_SUPPORT_CHECKS is set for ' . $test_profile;1764 $valid_test_profile = true;1765 }1766 if($report_errors && !empty($test_error))1767 {1768 self::test_pre_run_error($test_profile, $test_error);1769 }1770 return $valid_test_profile;1771 }1772 protected function validate_test_to_run(&$test_profile)1773 {1774 static $test_checks = null;1775 if(!isset($test_checks[$test_profile->get_identifier()]))1776 {1777 $valid_test_profile = true;1778 if(self::test_profile_system_compatibility_check($test_profile, true, $this->batch_mode) == false)1779 {1780 $valid_test_profile = false;1781 }1782 else if($test_profile->get_test_executable_dir() == null)1783 {1784 self::test_pre_run_error($test_profile, 'The test executable for ' . pts_client::cli_just_bold($test_profile) . ' could not be located. Looking for ' . pts_client::cli_just_bold($test_profile->get_test_executable()) . ' in ' . pts_client::cli_just_italic($test_profile->get_install_dir()));1785 $valid_test_profile = false;1786 }1787 if($valid_test_profile && $this->allow_sharing_of_results && $test_profile->allow_results_sharing() == false)1788 {1789 $this->allow_sharing_of_results = false;1790 }1791 $test_checks[$test_profile->get_identifier()] = $valid_test_profile;1792 }1793 return $test_checks[$test_profile->get_identifier()];1794 }1795 protected static function test_pre_run_error(&$test_profile, $error_msg)1796 {1797 pts_client::$display->test_run_error($error_msg);1798 $error_obj = array($test_profile, $error_msg);1799 pts_module_manager::module_process('__event_pre_run_error', $error_obj);1800 }1801 public function standard_run($to_run)1802 {1803 if($this->initial_checks($to_run) == false)1804 {1805 return false;1806 }1807 // Load the tests to run1808 if($this->load_tests_to_run($to_run) == false)1809 {1810 return false;1811 }1812 // Save results?1813 $this->save_results_prompt();1814 // Run the actual tests1815 $this->pre_execution_process();1816 $this->call_test_runs();1817 $this->post_execution_process();1818 }1819 public static function user_run_save_variables()1820 {1821 static $runtime_variables = null;1822 if($runtime_variables == null)1823 {1824 $runtime_variables = array(1825 'VIDEO_RESOLUTION' => phodevi::read_property('gpu', 'screen-resolution-string'),1826 'VIDEO_CARD' => phodevi::read_name('gpu'),1827 'VIDEO_DRIVER' => phodevi::read_property('system', 'display-driver-string'),1828 'OPENGL_DRIVER' => str_replace('(', '', phodevi::read_property('system', 'opengl-driver')),1829 'OPERATING_SYSTEM' => phodevi::read_property('system', 'operating-system'),1830 'PROCESSOR' => phodevi::read_name('cpu'),1831 'MOTHERBOARD' => phodevi::read_name('motherboard'),1832 'CHIPSET' => phodevi::read_name('chipset'),1833 'KERNEL_VERSION' => phodevi::read_property('system', 'kernel'),1834 'COMPILER' => phodevi::read_property('system', 'compiler'),1835 'HOSTNAME' => phodevi::read_property('system', 'hostname')1836 );1837 }1838 return $runtime_variables;1839 }1840}1841?>...
watchdog.php
Source:watchdog.php
...29 return array('WATCHDOG_SENSOR', 'WATCHDOG_SENSOR_THRESHOLD', 'WATCHDOG_MAXIMUM_WAIT');30 }31 public static function __run_manager_setup(&$test_run_manager)32 {33 $sensor_list = pts_strings::comma_explode(pts_env::read('WATCHDOG_SENSOR'));34 $to_monitor = array();35 // A LOT OF THIS CODE IN THIS FUNCTION PORTED OVER FROM system_monitor MODULE36 foreach($sensor_list as $sensor)37 {38 $sensor_split = pts_strings::trim_explode('.', $sensor);39 $type = &$sensor_split[0];40 $name = &$sensor_split[1];41 $parameter = &$sensor_split[2];42 if(empty($to_monitor[$type][$name]))43 {44 $to_monitor[$type][$name] = array();45 }46 if($parameter !== NULL)47 {48 $to_monitor[$type][$name][] = $parameter;49 }50 }51 foreach(phodevi::supported_sensors() as $sensor)52 {53 if(array_key_exists($sensor[0], $to_monitor) && array_key_exists($sensor[1], $to_monitor[$sensor[0]]))54 {55 $supported_devices = call_user_func(array($sensor[2], 'get_supported_devices'));56 $instance_no = 0;57 if($supported_devices === NULL)58 {59 self::create_single_sensor_instance($sensor, 0, NULL);60 }61 else62 {63 foreach($supported_devices as $device)64 {65 self::create_single_sensor_instance($sensor, $instance_no++, $device);66 }67 }68 }69 }70 // END OF PORTED CODE FROM system_monitor71 if(empty(self::$to_monitor))72 {73 echo PHP_EOL . 'UNLOADING WATCHDOG AS NO SENSORS TO MONITOR' . PHP_EOL;74 return pts_module::MODULE_UNLOAD;75 }76 $watchdog_threshold = pts_env::read('WATCHDOG_SENSOR_THRESHOLD');77 if(!is_numeric($watchdog_threshold) || $watchdog_threshold < 2)78 {79 echo PHP_EOL . 'UNLOADING WATCHDOG AS NO USEFUL DATA SET FOR WATCHDOG_SENSOR_THRESHOLD ENVIRONMENT VARIABLE' . PHP_EOL;80 return pts_module::MODULE_UNLOAD;81 }82 self::$monitor_threshold = $watchdog_threshold;83 echo PHP_EOL . pts_client::cli_just_bold('WATCHDOG ACTIVATED - TESTS WILL ABORT/DELAY IF ANY SENSOR CROSSES: ' . self::$monitor_threshold) . PHP_EOL;84 echo 'WATCHDOG MONITORING: ' . PHP_EOL;85 $monitors = array();86 foreach(self::$to_monitor as $sensor)87 {88 $monitors[] = array(strtoupper(phodevi::sensor_object_name($sensor)), phodevi::read_sensor($sensor), strtoupper(phodevi::read_sensor_object_unit($sensor)));89 }90 echo pts_user_io::display_text_table($monitors, ' ', 1) . PHP_EOL . PHP_EOL;91 $min_maximum_wait = pts_env::read('WATCHDOG_MAXIMUM_WAIT');92 if(is_numeric($min_maximum_wait) && $min_maximum_wait >= 1)93 {94 self::$maximum_wait = $min_maximum_wait;95 }96 echo PHP_EOL . pts_client::cli_just_bold('WATCHDOG WILL SLEEP SYSTEM UP TO ' . pts_strings::plural_handler(self::$maximum_wait, 'MINUTE') . ' IF/WHEN THRESHOLD BREACHED') . PHP_EOL;97 }98 public static function __pre_run_process()99 {100 self::check_watchdog();101 }102 public static function __pre_test_run()103 {104 self::check_watchdog();105 }106 public static function __interim_test_run()107 {108 self::check_watchdog();109 }110 protected static function check_watchdog()111 {112 foreach(self::$to_monitor as $sensor)113 {114 $val = phodevi::read_sensor($sensor);115 if($val > self::$monitor_threshold)116 {117 pts_client::$display->test_run_message(pts_client::cli_colored_text('Watchdog ' . phodevi::sensor_object_name($sensor) . ' Exceeded Threshold: ' . $val . ' ' . phodevi::read_sensor_object_unit($sensor), 'red', true));118 $freq_to_poll = 10;119 pts_client::$display->test_run_message(pts_client::cli_colored_text('Suspending testing; will wait up to ' . pts_strings::plural_handler(self::$maximum_wait, 'minute') . ' to settle.', 'red', false));120 for($i = 0; $i < (self::$maximum_wait * 60); $i += $freq_to_poll)121 {122 sleep($freq_to_poll);123 if(phodevi::read_sensor($sensor) < self::$monitor_threshold)124 {125 pts_client::$display->test_run_message(pts_client::cli_colored_text('Watchdog Restoring Process: ' . phodevi::sensor_object_name($sensor) . ': ' . phodevi::read_sensor($sensor) . ' ' . phodevi::read_sensor_object_unit($sensor), 'green', true));126 return true;127 }128 }129 pts_client::$display->test_run_message('Watchdog waited ' . pts_strings::plural_handler(self::$maximum_wait, 'minute') . ' but ' . phodevi::sensor_object_name($sensor) . ' at ' . phodevi::read_sensor($sensor) . ' ' . phodevi::read_sensor_object_unit($sensor));130 //exit;131 }132 }133 }134 private static function create_single_sensor_instance($sensor, $instance, $param)135 {136 if(call_user_func(array($sensor[2], 'parameter_check'), $param) === true)137 {138 $sensor_object = new $sensor[2]($instance, $param);139 self::$to_monitor[] = $sensor_object;140 }141 }142}143?>...
read
Using AI Code Generation
1$env = new pts_env();2$env->read();3$env = new pts_env();4$env->write();5$env = new pts_env();6$env->delete();7$env = new pts_env();8$env->exists();9$env = new pts_env();10$env->get();11$env = new pts_env();12$env->set();13$env = new pts_env();14$env->get_all();15$env = new pts_env();16$env->set_all();17$env = new pts_env();18$env->get_all();19$env = new pts_env();20$env->set_all();21$env = new pts_env();22$env->get_all();23$env = new pts_env();24$env->set_all();25$env = new pts_env();26$env->get_all();27$env = new pts_env();28$env->set_all();29$env = new pts_env();30$env->get_all();31$env = new pts_env();32$env->set_all();33$env = new pts_env();34$env->get_all();35$env = new pts_env();36$env->set_all();37$env = new pts_env();38$env->get_all();39$env = new pts_env();40$env->set_all();41$env = new pts_env();
read
Using AI Code Generation
1$env = new pts_env();2$env->read();3$env = new pts_env();4$env->write();5$env = new pts_env();6$env->read();7$env = new pts_env();8$env->write();9$env = new pts_env();10$env->read();11$env = new pts_env();12$env->write();13$env = new pts_env();14$env->read();15$env = new pts_env();16$env->write();17$env = new pts_env();18$env->read();19$env = new pts_env();20$env->write();21$env = new pts_env();22$env->read();23$env = new pts_env();24$env->write();25$env = new pts_env();26$env->read();27$env = new pts_env();28$env->write();29$env = new pts_env();30$env->read();31$env = new pts_env();32$env->write();33$env = new pts_env();34$env->read();35$env = new pts_env();36$env->write();37$env = new pts_env();38$env->read();
read
Using AI Code Generation
1require_once 'pts_env.php';2$pts = new pts_env();3$pts->read();4require_once 'pts_env.php';5$pts = new pts_env();6$pts->write();7require_once 'pts_env.php';8$pts = new pts_env();9$pts->read();10require_once 'pts_env.php';11$pts = new pts_env();12$pts->write();13require_once 'pts_env.php';14$pts = new pts_env();15$pts->read();16require_once 'pts_env.php';17$pts = new pts_env();18$pts->write();19require_once 'pts_env.php';20$pts = new pts_env();21$pts->read();22require_once 'pts_env.php';23$pts = new pts_env();24$pts->write();25require_once 'pts_env.php';26$pts = new pts_env();27$pts->read();28require_once 'pts_env.php';29$pts = new pts_env();30$pts->write();31require_once 'pts_env.php';32$pts = new pts_env();33$pts->read();34require_once 'pts_env.php';35$pts = new pts_env();36$pts->write();37require_once 'pts_env.php';38$pts = new pts_env();39$pts->read();40require_once 'pts_env.php';41$pts = new pts_env();42$pts->write();43require_once 'pts_env.php';44$pts = new pts_env();45$pts->read();
read
Using AI Code Generation
1$env = new pts_env();2$env->read('test');3$env = new pts_env();4$env->write('test', 'test');5$env = new pts_env();6$env->delete('test');7$env = new pts_env();8$env->get('test');9$env = new pts_env();10$env->set('test', 'test');11$env = new pts_env();12$env->read('test');13$env = new pts_env();14$env->write('test', 'test');15$env = new pts_env();16$env->delete('test');17$env = new pts_env();18$env->get('test');19$env = new pts_env();20$env->set('test', 'test');21$env = new pts_env();22$env->read('test');23$env = new pts_env();24$env->write('test', 'test');25$env = new pts_env();26$env->delete('test');27$env = new pts_env();28$env->get('test');29$env = new pts_env();30$env->set('test', 'test');
read
Using AI Code Generation
1$env = new pts_env();2$env->read('test.txt');3echo $env->get('test');4$env = new pts_env();5$env->read('test.txt');6$env->set('test', 'test');7$env->write('test.txt');8$env = new pts_env();9$env->read('test.txt');10$env->set('test', 'test');11$env->write('test.txt');12$env = new pts_env();13$env->read('test.txt');14$env->set('test', 'test');15$env->write('test.txt');16$env = new pts_env();17$env->read('test.txt');18$env->set('test', 'test');19$env->write('test.txt');20$env = new pts_env();21$env->read('test.txt');22$env->set('test', 'test');23$env->write('test.txt');24$env = new pts_env();25$env->read('test.txt');26$env->set('test', 'test');27$env->write('test.txt');28$env = new pts_env();29$env->read('test.txt');30$env->set('test', 'test');31$env->write('test.txt');32$env = new pts_env();33$env->read('test.txt');34$env->set('test', 'test');35$env->write('test.txt');36$env = new pts_env();37$env->read('test
read
Using AI Code Generation
1$env = new pts_env();2$env->read('test');3echo $env->get('test');4$env = new pts_env();5$env->write('test', 'value');6echo $env->get('test');7$env = new pts_env();8$env->delete('test');9echo $env->get('test');10$env = new pts_env();11$env->write('test', 'value');12echo $env->get('test');13$env->delete('test');14echo $env->get('test');15$env = new pts_env();16$env->write('test', 'value');17echo $env->get('test');18$env->clear();19echo $env->get('test');20$env = new pts_env();21$env->write('test', 'value');22echo $env->get('test');23$env->clear();24echo $env->get('test');25$env->write('test', 'value');26echo $env->get('test');27$env = new pts_env();28$env->write('test', 'value');29echo $env->get('test');30$env->clear();31echo $env->get('test');32$env->write('test', 'value');33echo $env->get('test');34$env->clear();35echo $env->get('test');36$env = new pts_env();37$env->write('test', 'value');38echo $env->get('test');
read
Using AI Code Generation
1$env = new pts_env();2$env->read();3$env = new pts_env();4$env->write();5include 'pts_env.php';6$env = new pts_env();7$env->read();8include 'pts_env.php';9$env = new pts_env();10$env->write();
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Execute automation tests with read on a cloud-based Grid of 3000+ real browsers and operating systems for both web and mobile applications.
Test now for FreeGet 100 minutes of automation test minutes FREE!!