def _generate_sub_task(self, mixed_version_config, task_index, suite, num_suites): """Generate a sub task to be run with the provided suite and mixed version config.""" # Create a sub task name appended with the task_index and build variant name. task = "{0}_{1}".format(self.task, mixed_version_config) sub_task_name = taskname.name_generated_task(task, task_index, num_suites, self.options.variant) self.task_names.append(sub_task_name) self.task_specs.append(TaskSpec(sub_task_name)) task = self.evg_config.task(sub_task_name) commands = [CommandDefinition().function("do setup")] # Fetch and download the proper mongod binaries before running multiversion tests. commands.append(CommandDefinition().function("do multiversion setup")) exclude_tags = "requires_fcv_44" # TODO(SERVER-43306): Remove --dryRun command line option once we start turning on multiversion tests. run_tests_vars = { "resmoke_args": "{0} --suite={1} --mixedBinVersions={2} --excludeWithAnyTags={3} --dryRun=tests". format(self.options.resmoke_args, suite, mixed_version_config, exclude_tags), "task": self.task, } commands.append(CommandDefinition().function("run generated tests").vars(run_tests_vars)) task.dependency(TaskDependency("compile")).commands(commands)
def _generate_sub_task(self, mixed_version_config, task, task_index, suite, num_suites, burn_in_test=None): # pylint: disable=too-many-arguments """Generate a sub task to be run with the provided suite and mixed version config.""" # Create a sub task name appended with the task_index and build variant name. task_name = "{0}_{1}".format(task, mixed_version_config) sub_task_name = taskname.name_generated_task(task_name, task_index, num_suites, self.options.variant) self.task_names.append(sub_task_name) self.task_specs.append(TaskSpec(sub_task_name)) task = self.evg_config.task(sub_task_name) gen_task_name = BURN_IN_TASK if burn_in_test is not None else self.task commands = [ CommandDefinition().function("do setup"), # Fetch and download the proper mongod binaries before running multiversion tests. CommandDefinition().function("do multiversion setup") ] # TODO(SERVER-43306): Remove --dryRun command line option once we start turning on # multiversion tests. run_tests_vars = { "resmoke_args": "{0} --suite={1} --mixedBinVersions={2} --excludeWithAnyTags={3} ".format( self.options.resmoke_args, suite, mixed_version_config, EXCLUDE_TAGS), "task": gen_task_name, } if burn_in_test is not None: run_tests_vars["resmoke_args"] += burn_in_test commands.append(CommandDefinition().function("run generated tests").vars(run_tests_vars)) task.dependency(TaskDependency("compile")).commands(commands)
def _generate_task(self, sub_suite_name, sub_task_name, max_test_runtime=None, expected_suite_runtime=None): """Generate evergreen config for a resmoke task.""" spec = TaskSpec(sub_task_name) self._set_task_distro(spec) self.task_specs.append(spec) self.task_names.append(sub_task_name) task = self.evg_config.task(sub_task_name) target_suite_file = os.path.join(CONFIG_DIR, sub_suite_name) run_tests_vars = self._get_run_tests_vars(target_suite_file) commands = [] self._add_timeout_command(commands, max_test_runtime, expected_suite_runtime) commands.append(CommandDefinition().function("do setup")) if self.options.use_multiversion: commands.append( CommandDefinition().function("do multiversion setup")) commands.append(CommandDefinition().function( "run generated tests").vars(run_tests_vars)) self._add_dependencies(task).commands(commands)
def all_tasks_legacy(tasks: List[GeneratedTask]) -> Configuration: c = Configuration() # Maybe make this a static/constant thing. It never changes. timeout_params = { "exec_timeout_secs": 86400, "timeout_secs": 7200 } # 24 hours for task in tasks: prep_vars = { "test": task.name, "auto_workload_path": task.workload.relative_path } if task.mongodb_setup: prep_vars["setup"] = task.mongodb_setup t = c.task(task.name) t.priority(5) t.commands([ CommandDefinition().command("timeout.update").params( timeout_params), CommandDefinition().function("prepare environment").vars( prep_vars), CommandDefinition().function("deploy cluster"), CommandDefinition().function("run test"), CommandDefinition().function("analyze"), ]) return c
def create_generate_tasks_file(options, tests_by_task): """Create the Evergreen generate.tasks file.""" evg_config = Configuration() task_specs = [] task_names = [BURN_IN_TESTS_GEN_TASK] for task in sorted(tests_by_task): multiversion_path = tests_by_task[task].get("use_multiversion") for test_num, test in enumerate(tests_by_task[task]["tests"]): sub_task_name = _sub_task_name(options.buildvariant, task, test_num) task_names.append(sub_task_name) evg_sub_task = evg_config.task(sub_task_name) evg_sub_task.dependency(TaskDependency("compile")) task_spec = TaskSpec(sub_task_name) if options.distro: task_spec.distro(options.distro) task_specs.append(task_spec) run_tests_vars = { "resmoke_args": "{} {} {}".format(tests_by_task[task]["resmoke_args"], get_resmoke_repeat_options(options), test), } commands = [] commands.append(CommandDefinition().function("do setup")) if multiversion_path: run_tests_vars["task_path_suffix"] = multiversion_path commands.append(CommandDefinition().function("do multiversion setup")) commands.append(CommandDefinition().function("run tests").vars(run_tests_vars)) evg_sub_task.commands(commands) display_task = DisplayTaskDefinition(BURN_IN_TESTS_TASK).execution_tasks(task_names) evg_config.variant(_get_run_buildvariant(options)).tasks(task_specs).display_task(display_task) _write_json_file(evg_config.to_map(), options.generate_tasks_file)
def test_variants_can_be_added(self): cd = CommandDefinition() cd.variant("variant 0").variants(["variant 1", "variant 2"]) obj = cd.to_map() assert "variant 0" in obj["variants"] assert "variant 1" in obj["variants"] assert "variant 2" in obj["variants"]
def test_variables_can_be_added(self): cd = CommandDefinition() cd.var("x", 5).vars({"y": 6, "z": 7}) obj = cd.to_map() assert 5 == obj["vars"]["x"] assert 6 == obj["vars"]["y"] assert 7 == obj["vars"]["z"]
def test_parameters_can_be_added(self): cd = CommandDefinition() cd.param("x", 5).params({"y": 6, "z": 7}) obj = cd.to_map() assert 5 == obj["params"]["x"] assert 6 == obj["params"]["y"] assert 7 == obj["params"]["z"]
def test_flat_values_in_map(self): cd = CommandDefinition() cd.function("function name").type("test").name("display name").command( "command name").timeout(300) obj = cd.to_map() assert "function name" == obj["func"] assert "test" == obj["type"] assert "display name" == obj["display_name"] assert "command name" == obj["command"] assert 300 == obj["timeout_secs"]
def _generate_evg_tasks(options): """ Generate an evergreen configuration for fuzzers based on the options given. :param options: task options. :return: An evergreen configuration. """ evg_config = Configuration() task_names = [] task_specs = [] for task_index in range(options.num_tasks): name = taskname.name_generated_task(options.name, task_index, options.num_tasks, options.variant) task_names.append(name) task_specs.append(TaskSpec(name)) task = evg_config.task(name) commands = [CommandDefinition().function("do setup")] if options.use_multiversion: commands.append( CommandDefinition().function("do multiversion setup")) commands.append(CommandDefinition().function("setup jstestfuzz")) commands.append(CommandDefinition().function("run jstestfuzz").vars({ "jstestfuzz_vars": "--numGeneratedFiles {0} {1}".format(options.num_files, options.jstestfuzz_vars), "npm_command": options.npm_command })) run_tests_vars = { "continue_on_failure": options.continue_on_failure, "resmoke_args": options.resmoke_args, "resmoke_jobs_max": options.resmoke_jobs_max, "should_shuffle": options.should_shuffle, "task_path_suffix": options.use_multiversion, "timeout_secs": options.timeout_secs, } commands.append( CommandDefinition().function("run tests").vars(run_tests_vars)) task.dependency(TaskDependency("compile")).commands(commands) dt = DisplayTaskDefinition(options.name).execution_tasks(task_names)\ .execution_task("{0}_gen".format(options.name)) evg_config.variant(options.variant).tasks(task_specs).display_task(dt) return evg_config
def construct_all_tasks_json(): """ :return: json representation of tasks for all workloads in the /src/workloads directory relative to the genny root. """ c = Configuration() workload_dir = '{}/src/workloads'.format(get_project_root()) all_workloads = glob.glob('{}/**/*.yml'.format(workload_dir), recursive=True) all_workloads = [s.split('/src/workloads/')[1] for s in all_workloads] for fname in all_workloads: basename = os.path.basename(fname) base_parts = os.path.splitext(basename) if base_parts[1] != '.yml': # Not a .yml workload file, ignore it. continue task_name = to_snake_case(base_parts[0]) t = c.task(task_name) t.priority(5) # The default priority in system_perf.yml prepare_environment_vars = { 'test': task_name, 'auto_workload_path': fname } full_filename = '{}/src/workloads/{}'.format(get_project_root(), fname) with open(full_filename, 'r') as handle: try: workload_dict = yaml.safe_load(handle) autorun_spec = AutoRunSpec.create_from_workload_yaml( workload_dict) if autorun_spec is not None and autorun_spec.prepare_environment_with is not None: prepare_environment_vars.update( autorun_spec.prepare_environment_with) except Exception as e: pass t.commands([ CommandDefinition().function('prepare environment').vars( prepare_environment_vars), CommandDefinition().function('deploy cluster'), CommandDefinition().function('run test'), CommandDefinition().function('analyze'), ]) return c.to_json()
def _cmd_by_name(cmd_name): """ Create a command definition of a function with the given name. :param cmd_name: Name of function. :return: Command Definition for function. """ return CommandDefinition().function(cmd_name)
def create_generate_tasks_config(evergreen_api, evg_config, options, tests_by_task, include_gen_task): """Create the config for the Evergreen generate.tasks file.""" # pylint: disable=too-many-locals task_specs = [] task_names = [] if include_gen_task: task_names.append(BURN_IN_TESTS_GEN_TASK) for task in sorted(tests_by_task): multiversion_path = tests_by_task[task].get("use_multiversion") task_avg_test_runtime_stats = _get_task_runtime_history( evergreen_api, options.project, task, options.buildvariant) for test_num, test in enumerate(tests_by_task[task]["tests"]): sub_task_name = _sub_task_name(options, task, test_num) task_names.append(sub_task_name) evg_sub_task = evg_config.task(sub_task_name) evg_sub_task.dependency(TaskDependency("compile")) task_spec = TaskSpec(sub_task_name) if options.distro: task_spec.distro(options.distro) task_specs.append(task_spec) run_tests_vars = { "resmoke_args": "{} {} {}".format(tests_by_task[task]["resmoke_args"], get_resmoke_repeat_options(options), test), } commands = [] _generate_timeouts(options, commands, test, task_avg_test_runtime_stats) commands.append(CommandDefinition().function("do setup")) if multiversion_path: run_tests_vars["task_path_suffix"] = multiversion_path commands.append( CommandDefinition().function("do multiversion setup")) commands.append( CommandDefinition().function("run tests").vars(run_tests_vars)) evg_sub_task.commands(commands) display_task = DisplayTaskDefinition(BURN_IN_TESTS_TASK).execution_tasks( task_names) evg_config.variant(_get_run_buildvariant(options)).tasks( task_specs).display_task(display_task) return evg_config
def test_functions(self): c = Configuration() f = c.function("func 0") c.function("func 1") c.function("func 2") f.add(CommandDefinition().function("f")) assert "f" == c.function("func 0").to_map()[0]["func"] assert 3 == len(c.to_map()["functions"])
def generate_task(sub_suite_name, sub_task_name, max_test_runtime=None, expected_suite_runtime=None): """Generate evergreen config for a resmoke task.""" task_names.append(sub_task_name) spec = TaskSpec(sub_task_name) if options.use_large_distro: spec.distro(options.large_distro_name) task_specs.append(spec) task = evg_config.task(sub_task_name) target_suite_file = os.path.join(CONFIG_DIR, sub_suite_name) run_tests_vars = { "resmoke_args": "--suites={0}.yml {1}".format(target_suite_file, options.resmoke_args), "run_multiple_jobs": options.run_multiple_jobs, "task": options.task, } if options.resmoke_jobs_max: run_tests_vars["resmoke_jobs_max"] = options.resmoke_jobs_max commands = [] if max_test_runtime or expected_suite_runtime: cmd_timeout = CmdTimeoutUpdate() if max_test_runtime: cmd_timeout.timeout(int(math.ceil(max_test_runtime * 3))) if expected_suite_runtime: cmd_timeout.exec_timeout( int(math.ceil(expected_suite_runtime * 3))) commands.append(cmd_timeout.validate().resolve()) commands += [ CommandDefinition().function("do setup"), CommandDefinition().function("run generated tests").vars( run_tests_vars) ] task.dependency(TaskDependency("compile")).commands(commands)
def function(self, fn): """ Append a function to the end of the command sequence. :param fn: function to append. :return: instance of task being modified. """ if not isinstance(fn, str): raise TypeError("function only accepts a str") self._commands.add(CommandDefinition().function(fn)) return self
def configure(): n_tasks = 10 c = Configuration() task_names = [] task_specs = [] for i in range(n_tasks): name = "aggregation_multiversion_fuzzer_{0:03d}".format(i) task_names.append(name) task_specs.append(TaskSpec(name)) t = c.task(name) t.dependency(TaskDependency("compile")).commands([ CommandDefinition().function("do setup"), CommandDefinition().function("do multiversion setup"), CommandDefinition().function("run jstestfuzz").vars({ "jstestfuzz_var": "--numGeneratedFiles 5", "npm_command": "agg-fuzzer", }), CommandDefinition().function("run tests").vars({ "continue_on_failure": "false", "resmoke_args": "--suites=generational_fuzzer", "should_shuffle": "false", "task_path_suffix": "false", "timeout_secs": "1800", }) ]) dt = DisplayTaskDefinition("aggregation_multiversion_fuzzer")\ .execution_tasks(task_names) c.variant("linux-64").tasks(task_specs).display_task(dt) return c
def teardown_group(self): """ Creates a new command definition and appends it to the teardown group. :return: command definition that was added. """ if not self._teardown_group: self._teardown_group = CommandSequence() c = CommandDefinition() self._teardown_group.add(c) return c
def all_tasks_modern(tasks: List[GeneratedTask]) -> Configuration: c = Configuration() c.exec_timeout(64800) # 18 hours for task in tasks: bootstrap = { "test_control": task.name, "auto_workload_path": task.workload.relative_path, } if task.mongodb_setup: bootstrap["mongodb_setup"] = task.mongodb_setup t = c.task(task.name) t.priority(5) t.commands( [ CommandDefinition() .command("timeout.update") .params({"exec_timeout_secs": 86400, "timeout_secs": 7200}), # 24 hours CommandDefinition().function("f_run_dsi_workload").vars(bootstrap), ] ) return c
def generate_task(sub_suite_name, sub_task_name): """Generate evergreen config for a resmoke task.""" task_names.append(sub_task_name) task_specs.append(TaskSpec(sub_task_name)) task = evg_config.task(sub_task_name) target_suite_file = os.path.join(CONFIG_DIR, sub_suite_name) run_tests_vars = { "resmoke_args": "--suites={0} {1}".format(target_suite_file, options.resmoke_args), "run_multiple_jobs": options.run_multiple_jobs, } if options.resmoke_jobs_max: run_tests_vars["resmoke_jobs_max"] = options.resmoke_jobs_max commands = [ CommandDefinition().function("do setup"), CommandDefinition().function("run tests").vars(run_tests_vars) ] task.dependency(TaskDependency("compile")).commands(commands)
def construct_all_tasks_json(): """ :return: json representation of tasks for all workloads in the /src/workloads directory relative to the genny root. """ c = Configuration() c.exec_timeout(64800) # 18 hours workload_dir = '{}/src/workloads'.format(get_project_root()) all_workloads = glob.glob('{}/**/*.yml'.format(workload_dir), recursive=True) all_workloads = [s.split('/src/workloads/')[1] for s in all_workloads] for fname in all_workloads: basename = os.path.basename(fname) base_parts = os.path.splitext(basename) if base_parts[1] != '.yml': # Not a .yml workload file, ignore it. continue task_name = to_snake_case(base_parts[0]) prepare_environment_vars = get_prepare_environment_vars( task_name, fname) for prep_var in prepare_environment_vars: t = c.task(prep_var['test']) t.priority(5) # The default priority in system_perf.yml t.commands([ CommandDefinition().function('prepare environment').vars( prep_var), CommandDefinition().function('deploy cluster'), CommandDefinition().function('run test'), CommandDefinition().function('analyze'), ]) return c.to_json()
def function_with_vars(self, fn, var_map): """ Append a function to the end of the command sequence with a list of variables to be passed to the function. :param fn: function to append :param var_map: dictionary of variables to pass to function. :return: instance of task being modified. """ if not isinstance(fn, str): raise TypeError("function_with_vars only accepts a str") if not isinstance(var_map, dict): raise TypeError("function_with_vars only accepts a dict") self._commands.add(CommandDefinition().function(fn).vars(var_map)) return self
def resolve(self): """Create a CommandDefinition from this object.""" cmd = CommandDefinition().command(self._command_type()) return cmd.params(self._export_params())
def test_invalid_param(self): cd = CommandDefinition() with pytest.raises(TypeError): cd.param(42, "v")
def test_invalid_vars(self): cd = CommandDefinition() with pytest.raises(TypeError): cd.vars(42)
def test_invalid_timeout(self): cd = CommandDefinition() with pytest.raises(TypeError): cd.timeout("hello world")
def test_empty_command_definition(self): cd = CommandDefinition() assert {} == cd.to_map()
def test_invalid_name(self): cd = CommandDefinition() with pytest.raises(TypeError): cd.name(42)
def test_invalid_stepback(self): cd = CommandDefinition() with pytest.raises(TypeError): cd.function(42)
def generate_evg_tasks(options, evg_config, task_name_suffix=None, display_task=None): """ Generate an evergreen configuration for fuzzers based on the options given. :param options: task options. :param evg_config: evergreen configuration. :param task_name_suffix: suffix to be appended to each task name. :param display_task: an existing display task definition to append to. :return: An evergreen configuration. """ task_names = [] task_specs = [] for task_index in range(options.num_tasks): task_name = options.name if not task_name_suffix else f"{options.name}_{task_name_suffix}" name = taskname.name_generated_task(task_name, task_index, options.num_tasks, options.variant) task_names.append(name) task_specs.append(TaskSpec(name)) task = evg_config.task(name) commands = [CommandDefinition().function("do setup")] if options.use_multiversion: commands.append( CommandDefinition().function("do multiversion setup")) commands.append(CommandDefinition().function("setup jstestfuzz")) commands.append(CommandDefinition().function("run jstestfuzz").vars({ "jstestfuzz_vars": "--numGeneratedFiles {0} {1}".format(options.num_files, options.jstestfuzz_vars), "npm_command": options.npm_command })) # Unix path separators are used because Evergreen only runs this script in unix shells, # even on Windows. suite_arg = f"--suites={CONFIG_DIRECTORY}/{options.suite}.yml" run_tests_vars = { "continue_on_failure": options.continue_on_failure, "resmoke_args": f"{suite_arg} {options.resmoke_args}", "resmoke_jobs_max": options.resmoke_jobs_max, "should_shuffle": options.should_shuffle, "task_path_suffix": options.use_multiversion, "timeout_secs": options.timeout_secs, "task": options.name } # yapf: disable commands.append(CommandDefinition().function( "run generated tests").vars(run_tests_vars)) task.dependency(TaskDependency("compile")).commands(commands) # Create a new DisplayTaskDefinition or append to the one passed in. dt = DisplayTaskDefinition(task_name) if not display_task else display_task dt.execution_tasks(task_names) evg_config.variant(options.variant).tasks(task_specs) if not display_task: dt.execution_task("{0}_gen".format(options.name)) evg_config.variant(options.variant).display_task(dt) return evg_config