def _add_timeout_command(self, commands, max_test_runtime, expected_suite_runtime): """ Add an evergreen command to override the default timeouts to the list of commands. :param commands: List of commands to add timeout command to. :param max_test_runtime: Maximum runtime of any test in the sub-suite. :param expected_suite_runtime: Expected runtime of the entire sub-suite. """ repeat_factor = self.options.repeat_suites if max_test_runtime or expected_suite_runtime: cmd_timeout = CmdTimeoutUpdate() if max_test_runtime: timeout = calculate_timeout(max_test_runtime, 3) * repeat_factor LOGGER.debug("Setting timeout", timeout=timeout, max_runtime=max_test_runtime, factor=repeat_factor) cmd_timeout.timeout(timeout) if expected_suite_runtime: exec_timeout = calculate_timeout(expected_suite_runtime, 3) * repeat_factor LOGGER.debug("Setting exec_timeout", exec_timeout=exec_timeout, suite_runtime=expected_suite_runtime, factor=repeat_factor) cmd_timeout.exec_timeout(exec_timeout) commands.append(cmd_timeout.validate().resolve())
def generate_task(sub_suite_name, sub_task_name, max_test_runtime=None, expected_suite_runtime=None): """Generate evergreen config for a resmoke task.""" task_names.append(sub_task_name) spec = TaskSpec(sub_task_name) if options.use_large_distro: spec.distro(options.large_distro_name) task_specs.append(spec) task = evg_config.task(sub_task_name) target_suite_file = os.path.join(CONFIG_DIR, sub_suite_name) run_tests_vars = { "resmoke_args": "--suites={0}.yml {1}".format(target_suite_file, options.resmoke_args), "run_multiple_jobs": options.run_multiple_jobs, "task": options.task, } if options.resmoke_jobs_max: run_tests_vars["resmoke_jobs_max"] = options.resmoke_jobs_max commands = [] if max_test_runtime or expected_suite_runtime: cmd_timeout = CmdTimeoutUpdate() if max_test_runtime: cmd_timeout.timeout(int(math.ceil(max_test_runtime * 3))) if expected_suite_runtime: cmd_timeout.exec_timeout(int(math.ceil(expected_suite_runtime * 3))) commands.append(cmd_timeout.validate().resolve()) commands += [ CommandDefinition().function("do setup"), CommandDefinition().function("run generated tests").vars(run_tests_vars) ] task.dependency(TaskDependency("compile")).commands(commands)
def _generate_timeouts(options, commands, test, task_avg_test_runtime_stats): """ Add timeout.update command to list of commands for a burn in execution task. :param options: Command line options. :param commands: List of commands for a burn in execution task. :param test: Test name. :param task_avg_test_runtime_stats: Teststat data. """ if task_avg_test_runtime_stats: avg_test_runtime = _parse_avg_test_runtime( test, task_avg_test_runtime_stats) if avg_test_runtime: cmd_timeout = CmdTimeoutUpdate() LOGGER.debug("Avg test runtime", test=test, runtime=avg_test_runtime) timeout = _calculate_timeout(avg_test_runtime) cmd_timeout.timeout(timeout) exec_timeout = _calculate_exec_timeout(options, avg_test_runtime) cmd_timeout.exec_timeout(exec_timeout) commands.append(cmd_timeout.validate().resolve())
def _add_timeout_command(commands, max_test_runtime, expected_suite_runtime): if max_test_runtime or expected_suite_runtime: cmd_timeout = CmdTimeoutUpdate() if max_test_runtime: cmd_timeout.timeout(calculate_timeout(max_test_runtime, 3)) if expected_suite_runtime: cmd_timeout.exec_timeout(calculate_timeout(expected_suite_runtime, 3)) commands.append(cmd_timeout.validate().resolve())
def test_params(self): c = CmdTimeoutUpdate() c.exec_timeout(100) c.timeout(60) p = params(c) assert 100 == p["exec_timeout_secs"] assert 60 == p["timeout_secs"]
def cmd(self): """Create a command that sets timeouts as specified.""" if not self.use_defaults: timeout_cmd = CmdTimeoutUpdate() if self.timeout: timeout_cmd.timeout(self.timeout) if self.exec_timeout: timeout_cmd.exec_timeout(self.exec_timeout) return timeout_cmd.validate().resolve() return None
def _add_timeout_command(self, commands, max_test_runtime, expected_suite_runtime): repeat_factor = self.options.repeat_suites if max_test_runtime or expected_suite_runtime: cmd_timeout = CmdTimeoutUpdate() if max_test_runtime: timeout = calculate_timeout(max_test_runtime, 3) * repeat_factor LOGGER.debug("Setting timeout to: %d (max=%d, repeat=%d)", timeout, max_test_runtime, repeat_factor) cmd_timeout.timeout(timeout) if expected_suite_runtime: exec_timeout = calculate_timeout(expected_suite_runtime, 3) * repeat_factor LOGGER.debug("Setting exec_timeout to: %d (runtime=%d, repeat=%d)", exec_timeout, expected_suite_runtime, repeat_factor) cmd_timeout.exec_timeout(exec_timeout) commands.append(cmd_timeout.validate().resolve())
def generate_task(sub_suite_name, sub_task_name, max_test_runtime=None, expected_suite_runtime=None): """Generate evergreen config for a resmoke task.""" task_names.append(sub_task_name) spec = TaskSpec(sub_task_name) if options.use_large_distro: spec.distro(options.large_distro_name) task_specs.append(spec) task = evg_config.task(sub_task_name) target_suite_file = os.path.join(CONFIG_DIR, sub_suite_name) run_tests_vars = { "resmoke_args": "--suites={0}.yml {1}".format(target_suite_file, options.resmoke_args), "run_multiple_jobs": options.run_multiple_jobs, "task": options.task, } if options.resmoke_jobs_max: run_tests_vars["resmoke_jobs_max"] = options.resmoke_jobs_max commands = [] if max_test_runtime or expected_suite_runtime: cmd_timeout = CmdTimeoutUpdate() if max_test_runtime: cmd_timeout.timeout(int(math.ceil(max_test_runtime * 3))) if expected_suite_runtime: cmd_timeout.exec_timeout( int(math.ceil(expected_suite_runtime * 3))) commands.append(cmd_timeout.validate().resolve()) commands += [ CommandDefinition().function("do setup"), CommandDefinition().function("run generated tests").vars( run_tests_vars) ] task.dependency(TaskDependency("compile")).commands(commands)
def _add_timeout_command(self, commands, max_test_runtime, expected_suite_runtime): """ Add an evergreen command to override the default timeouts to the list of commands. :param commands: List of commands to add timeout command to. :param max_test_runtime: Maximum runtime of any test in the sub-suite. :param expected_suite_runtime: Expected runtime of the entire sub-suite. """ repeat_factor = self.options.repeat_suites if max_test_runtime or expected_suite_runtime: cmd_timeout = CmdTimeoutUpdate() if max_test_runtime: timeout = calculate_timeout(max_test_runtime, 3) * repeat_factor LOGGER.debug("Setting timeout to: %d (max=%d, repeat=%d)", timeout, max_test_runtime, repeat_factor) cmd_timeout.timeout(timeout) if expected_suite_runtime: exec_timeout = calculate_timeout(expected_suite_runtime, 3) * repeat_factor LOGGER.debug("Setting exec_timeout to: %d (runtime=%d, repeat=%d)", exec_timeout, expected_suite_runtime, repeat_factor) cmd_timeout.exec_timeout(exec_timeout) commands.append(cmd_timeout.validate().resolve())
def test_invalid_timeout(self): c = CmdTimeoutUpdate() with pytest.raises(TypeError): c.timeout("hello")