def test_dflt_var_complex(self): file1 = """ job-group: setup-vars: - var: var1 val: val1 jobs: - name: j1 - name: j2 setup-vars: - var: var1 val: val2 """ sp = StreamProviderTesting({ "job.yaml" : file1 }) loader = JobspecLoader(sp) loader.tool_s.append("default") specs : JobSpecSet = loader.load("job.yaml") self.assertEqual(len(specs.jobspecs), 2) self.assertEqual(specs.jobspecs[0].name, "j1") self.assertEqual(specs.jobspecs[0].fullname, "j1") self.assertEqual(specs.jobspecs[1].name, "j2") self.assertEqual(specs.jobspecs[1].fullname, "j2") self.assertEqual(specs.jobspecs[0].setupvars["var1"], "val1") self.assertEqual(specs.jobspecs[1].setupvars["var1"], "val2")
def runLoadJobSpec(self, file_m : Dict[str,str], exp_s : set(), specfile=None): for f in file_m.keys(): f_dir = os.path.dirname(f) if f_dir != "" and not os.path.isdir(f_dir): os.makedirs(f_dir) with open(f, "w") as fp: fp.write(file_m[f]) loader = JobspecLoader() jobspec_s = loader.load( os.getcwd(), specfile, prefix="test") for i,gen in enumerate(jobspec_s.jobspec_gen): dir = "gen_%d" % i os.makedirs(dir) jobset_sg = JobSpecGenLoader(dir).load(gen) jobspec_s.jobspecs.extend(jobset_sg.jobspecs) for j in jobspec_s.jobspecs: self.assertIn(j.fullname, exp_s) exp_s.remove(j.fullname) self.assertEqual(0, len(exp_s))
def test_load_single_job_group_cascading_tool(self): text = """ job-group: name: abc tool: tool1 labels: - def - def jobs: - name: abc - name: def tool: tool2 """ sp = StreamProviderTesting({ "job.yaml" : text }) loader = JobspecLoader(sp) loader.tool_s.append("default") specs : JobSpecSet = loader.load("job.yaml") self.assertEqual(len(specs.jobspecs), 2) self.assertEqual(specs.jobspecs[0].name, "abc") self.assertEqual(specs.jobspecs[0].fullname, "abc.abc") self.assertEqual(specs.jobspecs[0].tool, "tool1") self.assertEqual(specs.jobspecs[1].name, "def") self.assertEqual(specs.jobspecs[1].fullname, "abc.def") self.assertEqual(specs.jobspecs[1].tool, "tool2")
def test_load_single_job_group_two_files(self): file1 = """ job-group: name: abc labels: - def - def jobs: - path: file2.yaml """ file2 = """ job-group: jobs: - name: abc - name: def """ sp = StreamProviderTesting({ "job.yaml" : file1, "file2.yaml" : file2 }) loader = JobspecLoader(sp) loader.tool_s.append("default") specs : JobSpecSet = loader.load("job.yaml") self.assertEqual(len(specs.jobspecs), 2) self.assertEqual(specs.jobspecs[0].name, "abc") self.assertEqual(specs.jobspecs[1].name, "def") self.assertEqual(specs.jobspecs[0].fullname, "abc.abc") self.assertEqual(specs.jobspecs[1].fullname, "abc.def")
def test_same_runner_two_diff_settings(self): text = """ job-group: name: top jobs: - name: j1 setup-vars: - var1: val1 - name: j2 setup-vars: - var1: val2 - name: j3 setup-vars: - var1: val2 """ sp = StreamProviderTesting({"job.yaml": text}) loader = JobspecLoader(sp) loader.tool_s.append("default") specs: JobSpecSet = loader.load("job.yaml") self.assertEqual(len(specs.jobspecs), 3) queue_s = JobQueueBuilder().build(specs.jobspecs) self.assertEqual(len(queue_s.queues), 2) self.assertEqual(len(queue_s.queues[0].jobs), 2) self.assertEqual(len(queue_s.queues[1].jobs), 3)
def test_load_single_job(self): text = """ job: name: abc """ sp = StreamProviderTesting({ "job.yaml" : text }) loader = JobspecLoader(sp) loader.tool_s.append("default") specs : JobSpecSet = loader.load("job.yaml") self.assertEqual(len(specs.jobspecs), 1) self.assertEqual(specs.jobspecs[0].name, "abc") self.assertEqual(specs.jobspecs[0].fullname, "abc")
def test_load_single_job_setup_generator(self): file1 = """ job: name: abc setup-generators: - id: abc config: a: b c: d runner: id: makefile config: path: ${basedir}/Makefile properties: - a - b """ file2 = """ job-group: jobs: - job: name: abc - job: name: def """ sp = StreamProviderTesting({ "job.yaml" : file1, "file2.yaml" : file2 }) loader = JobspecLoader(sp) loader.tool_s.append("default") specs : JobSpecSet = loader.load("job.yaml") self.assertEqual(len(specs.jobspecs), 1) self.assertEqual(specs.jobspecs[0].name, "abc") self.assertEqual(specs.jobspecs[0].fullname, "abc")
def load(self, spec : JobSpecGen): if not os.path.isdir(self.rundir): os.makedirs(self.rundir) with open(os.path.join(self.rundir, "cmd.sh"), "w") as fp: fp.write("#!/bin/sh\n") fp.write("set -e\n") fp.write(spec.cmd) os.chmod( os.path.join(self.rundir, "cmd.sh"), 0o755) # TODO: how do we know which shell? env = os.environ.copy() env["MKDV_MK_DIR"] = os.path.dirname(spec.mkdv_mk) env["MKDV_MK_JOBID"] = spec.root_id ret = subprocess.run( "./cmd.sh", cwd=self.rundir, shell="/bin/bash", env=env) if ret.returncode != 0: raise Exception("Job-spec generation command failed") if not os.path.isfile(os.path.join(self.rundir, spec.path)): raise Exception( "Expected to find generated jobspec file \"%s\"" % spec.path) jobset_s = JobspecLoader().load( self.rundir, specfile=os.path.join(self.rundir, spec.path), dflt_mkdv_mk=spec.mkdv_mk, prefix=spec.root_id) return jobset_s
def run(args): loader = JobspecLoader() specs = loader.load(os.getcwd()) pass
def list_tests(args): loader = JobspecLoader() specs = loader.load(os.getcwd())
def cmd_regress(args): specfiles = [] cm = CoreManagerW() packages_dir = get_packages_dir() project_dir = os.path.dirname(packages_dir) cm.add_library(Library("project", project_dir)) cwd_is_lib = False if args.library_path is not None: for path in args.library_path: if os.getcwd().startswith(path): cwd_is_lib = True cm.add_library(Library(os.path.basename(path), path)) if not cwd_is_lib: cm.add_library(Library(os.path.basename(os.getcwd()), os.getcwd())) if hasattr(args, "jobspecs") and args.jobspecs is not None: specfiles.extend(args.jobspecs) if len(specfiles) == 0: if os.path.isfile(os.path.join(os.getcwd(), "mkdv.yaml")): specfiles.append(os.path.join(os.getcwd(), "mkdv.yaml")) else: raise Exception("No specfiles specified") loader = JobspecLoader(core_mgr=cm) jobset_s = loader.load_specs(specfiles) regress = os.path.join(os.getcwd(), "regress") rundir = os.path.join(regress, datetime.now().strftime("%Y%m%d_%H%M%S")) gendir = os.path.join(rundir, "gen") specs = jobset_s.jobspecs.copy() gen_specs = [] for s in jobset_s.jobspec_gen: spec_gendir = os.path.join(gendir, s.root_id) os.makedirs(spec_gendir) gen_jobset_s = JobSpecGenLoader(spec_gendir).load(s) specs.extend(gen_jobset_s.jobspecs) # filter specs specs = JobSpecFilter( args.include if args.include is not None else [], args.exclude if args.exclude is not None else []).filter(specs) # Expand any jobs that have a count >1 specs_exp = JobCountExpander.expand(specs) # Now, assign each unique job an id and seed for i, s in enumerate(specs_exp): s.id = i s.seed = i os.makedirs(rundir, exist_ok=True) backend = backends.backend(args.backend) r = JobRunner(rundir, backend, specs_exp) if hasattr(args, "limit_time") and args.limit_time is not None: r.limit_time = args.limit_time r.tool = args.tool r.rerun_failing = args.rerun_failing # TODO: should query the job runner if args.max_par is not None: r.maxpar = int(args.max_par) print("--> run " + str(r)) loop = asyncio.get_event_loop() loop.run_until_complete(r.runjobs()) print("<-- run")