Ejemplo n.º 1
0
 def test_trigger_success_listener(self):
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_trigger_success_listener')
     jh.add_job(run_script=self.run_script_trigger_success, name='test')
     jh.run_jobs()
     self.assertIs(jh.jobs.test.status, Status.SUCCESS)
Ejemplo n.º 2
0
 def test_run_max(self):
     ## This effectively only tests if run_jobs finishes or not
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_run_max',
                     run_max=1)
     for i in range(3):
         jh.add_job(run_script=self.run_script, name='test_{}'.format(i))
     jh.run_jobs()
     for i in range(3):
         self.assertIs(jh['test_{}'.format(i)].status, Status.SUCCESS)
Ejemplo n.º 3
0
 def test_post_process(self):
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_post_process')
     output_file = os.path.join(jh.config.output_dir, 'test')
     post_func = TestPostFunction(output_file)
     jh.add_job(run_script=self.run_script,
                name='test',
                post_func=post_func)
     jh.run_jobs()
     time.sleep(1)
     self.assertIs(jh.jobs.test.status, Status.SUCCESS)
     self.assertTrue(os.path.isfile(output_file))
Ejemplo n.º 4
0
 def test_chain_fail(self):
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_chain',
                     listens=False)
     jh.add_job(run_script=self.run_script_fail,
                name='test_parent',
                tags='parent')
     jh.add_job(run_script=self.run_script_success,
                name='test_child',
                parent_tags='parent')
     jh.run_jobs()
     self.assertIs(jh.jobs.test_parent.status, Status.FAILED)
     self.assertIs(jh.jobs.test_child.status, Status.CANCELLED)
Ejemplo n.º 5
0
 def test_output_listener(self):
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_output_listener')
     jh.add_job(run_script=self.run_script_touch_file,
                name='test',
                output=self.output_file)
     jh.run_jobs()
     self.assertIs(jh.jobs.test.status, Status.SUCCESS)
     jh.reset()
     jh.jobs.test.config.backend.run_script = self.run_script_success
     jh.jobs.test.config.output = 'jwoigjwoijegoijwoijegoiwoeg'
     jh.run_jobs()
     self.assertIs(jh.jobs.test.status, Status.FAILED)
Ejemplo n.º 6
0
 def test_chain(self):
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_chain',
                     listens=False)
     jh.add_job(run_script=self.run_script_touch_file,
                name='test_parent',
                tags='parent')
     jh.add_job(run_script=self.run_script_ls_file,
                name='test_child',
                parent_tags='parent')
     jh.run_jobs()
     self.assertIs(jh.jobs.test_parent.status, Status.SUCCESS)
     self.assertIs(jh.jobs.test_child.status, Status.SUCCESS)
Ejemplo n.º 7
0
 def test_batch_listener(self):
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_batch_listener')
     jh.add_job(run_script=self.run_script_fail, name='test')
     jh.run_jobs()
     status_fail = jh.jobs.test.status
     id_first = jh.jobs.test.id
     jh.jobs.test.config.backend.run_script = self.run_script_success
     jh.run_jobs(retry=True)
     status_success = jh.jobs.test.status
     id_second = jh.jobs.test.id
     self.assertIsNot(id_first, id_second)
     self.assertIs(status_fail, Status.FAILED)
     self.assertIs(status_success, Status.SUCCESS)
Ejemplo n.º 8
0
def main():
    """Batch submission using slurmy."""
    ## Set up the JobHandler
    jh = JobHandler(
        local_max=8,
        local_dynamic=True,
        work_dir=mkdir('batch'),
        printer_bar_mode=True,
        wrapper=SingularityWrapper('docker://philippgadow/checkmate'))
    ## Define the run script content

    ## Add a job
    jh.add_job(run_script='/work/run_example_in_docker.sh',
               job_type=Type.LOCAL)
    ## Run all jobs
    jh.run_jobs()
Ejemplo n.º 9
0
 def test_reset(self):
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_reset',
                     listens=False)
     jh.add_job(run_script=self.run_script, name='test')
     jh.run_jobs()
     self.assertIs(jh.jobs.test.status, Status.SUCCESS)
     id_first = jh.jobs.test.id
     jh.reset()
     self.assertIs(jh.jobs.test.status, Status.CONFIGURED)
     jh.run_jobs()
     self.assertIs(jh.jobs.test.status, Status.SUCCESS)
     id_second = jh.jobs.test.id
     self.assertIsNot(id_first, id_second)
Ejemplo n.º 10
0
 def test_type_local(self):
     from slurmy import JobHandler, Type
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_jobconfig_type_local',
                     do_snapshot=False,
                     local_max=1)
     job = jh.add_job(run_script=self.run_script, job_type=Type.LOCAL)
     self.assertIs(job.type, Type.LOCAL)
Ejemplo n.º 11
0
 def test_mix_batch_local_listener(self):
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_mix_batch_local_listener',
                     local_max=1,
                     local_dynamic=True)
     jh.add_job(run_script=self.run_script_fail, name='test_1')
     jh.add_job(run_script=self.run_script_fail, name='test_2')
     jh.run_jobs()
     self.assertIsNot(jh.jobs.test_1.type, jh.jobs.test_2.type)
     self.assertIs(jh.jobs.test_1.status, Status.FAILED)
     self.assertIs(jh.jobs.test_2.status, Status.FAILED)
     jh.jobs.test_1.config.backend.run_script = self.run_script_success
     jh.jobs.test_2.config.backend.run_script = self.run_script_success
     jh.run_jobs(retry=True)
     self.assertIs(jh.jobs.test_1.status, Status.SUCCESS)
     self.assertIs(jh.jobs.test_2.status, Status.SUCCESS)
Ejemplo n.º 12
0
 def test_local_listener(self):
     from slurmy import JobHandler, Status, Type, test_mode
     test_mode(True)
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_local_listener',
                     local_max=1)
     jh.add_job(run_script=self.run_script_fail,
                name='test',
                job_type=Type.LOCAL)
     jh.run_jobs()
     status_fail = jh.jobs.test.status
     jh.jobs.test.config.backend.run_script = self.run_script_success
     jh.run_jobs(retry=True)
     status_success = jh.jobs.test.status
     test_mode(False)
     self.assertIs(status_fail, Status.FAILED)
     self.assertIs(status_success, Status.SUCCESS)
Ejemplo n.º 13
0
 def test_chain_multiparent(self):
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_chain_multiparent',
                     listens=False)
     output1 = '@SLURMY.output_dir/parent1'
     run_script1 = '#!/bin/bash\ntouch {}; sleep 2;'.format(output1)
     jh.add_job(run_script=run_script1,
                name='test_parent1',
                tags='parent1',
                output=output1)
     output2 = '@SLURMY.output_dir/parent2'
     run_script2 = '#!/bin/bash\ntouch {}; sleep 2;'.format(output2)
     jh.add_job(run_script=run_script2,
                name='test_parent2',
                tags='parent2',
                output=output2)
     run_script3 = '#!/bin/bash\nls {} {};'.format(output1, output2)
     jh.add_job(run_script=run_script3,
                name='test_child',
                parent_tags=['parent1', 'parent2'])
     jh.run_jobs()
     self.assertIs(jh.jobs.test_parent1.status, Status.SUCCESS)
     self.assertIs(jh.jobs.test_parent2.status, Status.SUCCESS)
     self.assertIs(jh.jobs.test_child.status, Status.SUCCESS)
Ejemplo n.º 14
0
    "MET_SoftTrk_ScaleUp",
    "MUON_ID",
    "MUON_MS",
    "MUON_SAGITTA_RESBIAS",
    "MUON_SAGITTA_RHO",
    "MUON_SCALE",
    # "NoSys",
]

for process in processes:
    print "Starting with process {}".format(process)
    for sys in systematics:
        outputfile= os.path.join(output_path, "{}_merged_{}.root".format(process,sys))
        name = "{}_merged_{}".format(process,sys)

        to_merge = []
        for campaign in ["mc16e","mc16d","mc16a"]:
            # for f in glob.glob("/project/etp4/eschanet/ntuples/common/{}/{}/merged/{}_*tree_{}*done_noLHE.root".format(campaign,tag,process,sys)):
            for f in glob.glob("/project/etp4/eschanet/ntuples/common/{}/{}/merged/{}_*merged_processed_tree_{}*.root*".format(campaign,tag,process,sys)):
                if process == "ttbar" and "ttbar_allhad" in f:
                    continue
                print f
                to_merge.append(f)
        # if len(to_merge)%3!=0:
        #     print("{} probably incomplete!".format(process))
        print(len(to_merge))
        ft = FinishedTrigger(outputfile)
        jh.add_job(name=name+"_hadd", run_script=merge_script.format(outputfile=outputfile,finished_func=ft, inputfiles=" ".join(to_merge)))

jh.run_jobs()
Ejemplo n.º 15
0
        # print(inputfile)
        for skipEvents in range(0, totalEvents - maxEvents + 1, maxEvents):
            # print("Adding job for inputfile={}, skipEvents={}".format(inputfile, skipEvents))
            dirname = os.path.basename(os.path.normpath(indirectory))
            dirname = dirname.replace("EVNT",
                                      "DAOD_TRUTH3").replace("evgen", "deriv")
            outputfile = os.path.join(outdir, dirname, inputfile).replace(
                "EVNT", "DAOD_TRUTH3")
            myoutdir = os.path.join(outdir, dirname)
            outputfile = outputfile + "_{}".format(skipEvents)

            st = SuccessTrigger(outputfile, 20)
            test_finished = os.path.join(outdir, dirname,
                                         inputfile + "_testfile")
            # print(test_finished)
            jh.add_job(
                run_script=run_script.format(inputfile=inputfile,
                                             indir=indirectory,
                                             outdir=myoutdir,
                                             outputfile=outputfile,
                                             finished_file=test_finished,
                                             skipEvents=skipEvents,
                                             maxEvents=maxEvents),
                name="pantagruel_{}_{}".format(inputfile.replace(".", "_"),
                                               skipEvents),
                output=outputfile,
                success_func=st,
                backend=Slurm(mem="3500mb"))

jh.run_jobs()
Ejemplo n.º 16
0
class Test(unittest.TestCase):
    def setUp(self):
        from slurmy import JobHandler, test_mode
        test_mode(True)
        self.test_dir = 'slurmy_unittest/jobconfig'
        self.jh = JobHandler(work_dir=self.test_dir,
                             verbosity=0,
                             name='test_jobconfig',
                             do_snapshot=False)
        self.run_script = 'echo "test"'
        self.run_script_trigger = '@SLURMY.FINISHED; @SLURMY.SUCCESS;'

    def tearDown(self):
        from slurmy import test_mode
        test_mode(False)

    ##TODO: run_script test --> with direct string and path to file

    def test_run_args(self):
        job = self.jh.add_job(run_script=self.run_script, run_args='test')
        self.assertIs(job.config.backend.run_args, 'test')

    def test_name(self):
        job = self.jh.add_job(run_script=self.run_script, name='test')
        self.assertIs(job.name, 'test')
        self.assertIn('test', self.jh.jobs)
        self.assertIs(self.jh.jobs.test.name, 'test')

    def test_type_local(self):
        from slurmy import JobHandler, Type
        jh = JobHandler(work_dir=self.test_dir,
                        verbosity=0,
                        name='test_jobconfig_type_local',
                        do_snapshot=False,
                        local_max=1)
        job = jh.add_job(run_script=self.run_script, job_type=Type.LOCAL)
        self.assertIs(job.type, Type.LOCAL)

    def test_finished_func(self):
        from slurmy import Status, Mode
        job = self.jh.add_job(run_script=self.run_script,
                              finished_func=lambda x: x)
        self.assertIs(job.get_mode(Status.RUNNING), Mode.ACTIVE)
        self.assertTrue(job.config.finished_func(True))

    def test_success_func(self):
        from slurmy import Status, Mode
        job = self.jh.add_job(run_script=self.run_script,
                              success_func=lambda x: x)
        self.assertIs(job.get_mode(Status.FINISHED), Mode.ACTIVE)
        self.assertTrue(job.config.success_func(True))

    def test_post_func(self):
        from slurmy import Status
        job = self.jh.add_job(run_script=self.run_script,
                              post_func=lambda x: x)
        self.assertTrue(job.config.post_func(True))

    def test_output(self):
        from slurmy import Status, Mode
        job = self.jh.add_job(run_script=self.run_script, output='test')
        self.assertIs(job.get_mode(Status.FINISHED), Mode.PASSIVE)
        self.assertIsNotNone(job.output)

    def test_tags(self):
        job = self.jh.add_job(run_script=self.run_script, tags='hans')
        self.assertIn('hans', job.tags)
        job = self.jh.add_job(run_script=self.run_script,
                              tags=['hans', 'horst'])
        self.assertIn('hans', job.tags)
        self.assertIn('horst', job.tags)

    def test_parent_tags(self):
        job = self.jh.add_job(run_script=self.run_script, parent_tags='hans')
        self.assertIn('hans', job.parent_tags)
        job = self.jh.add_job(run_script=self.run_script,
                              parent_tags=['hans', 'horst'])
        self.assertIn('hans', job.parent_tags)
        self.assertIn('horst', job.parent_tags)

    def test_variable_substitution(self):
        from slurmy import Status
        job = self.jh.add_job(run_script=self.run_script,
                              output='@SLURMY.output_dir/test')
        output = os.path.join(self.jh.config.output_dir, 'test')
        self.assertTrue(job.output == output)
Ejemplo n.º 17
0
                print point
                #TODO: make nicer
                if "onestep" in point.lower():
                    #onestep points have different formatting at tree- and AOD-level
                    point = point.replace("onestepCC", "oneStep")
                elif "LQ" in point:
                    point = "leptoquark_" + point.replace('ld_0p3_beta_0p5_hnd_1p0_','') + "_"
                searcher = "{}*merged_processed*.root".format(point)
                print(searcher)
                files_mc16a = glob.glob("/project/etp4/eschanet/ntuples/common/mc16a/{}/merged/{}".format(args.production,searcher))
                files_mc16d = glob.glob("/project/etp4/eschanet/ntuples/common/mc16d/{}/merged/{}".format(args.production,searcher))
                files_mc16e = glob.glob("/project/etp4/eschanet/ntuples/common/mc16e/{}/merged/{}".format(args.production,searcher))

                if len(files_mc16a)==0 or len(files_mc16d)==0 or len(files_mc16e)==0:
                    print "mc16a: %i"%len(files_mc16a)
                    print "mc16d: %i"%len(files_mc16d)
                    print "mc16e: %i"%len(files_mc16e)
                    print "WARNING  -  {} is not complete".format(point)

                    if not args.ignore_incomplete:
                        continue
                    print "WARNING  -  Ignoring incomplete!".format(point)

                inputfiles = " ".join(sorted(files_mc16a)) + " " + " ".join(sorted(files_mc16d)) + " " + " ".join(sorted(files_mc16e))
                #pprint.pprint(inputfiles)
                outputfile = output_path + "{}_merged_processed.root".format(point)
                jh.add_job(name=point+"_campaignHadder", run_script=merge_script.format(outputfile=outputfile, inputfiles=inputfiles), output=outputfile)

jh.run_jobs()
Ejemplo n.º 18
0
def run():
    parser = argparse.ArgumentParser(description='Run stuff locally on etp')
    parser.add_argument('files', type=argparse.FileType('r'), nargs='+')
    parser.add_argument("-s",help="input sample",default=None)
    parser.add_argument("-selector",help="selector",default="OneLep")
    parser.add_argument("-writeTrees",help="sys or nominal",default="1")
    parser.add_argument("-deepConfig",help="input sample",default="SusySkim1LInclusive_Rel21.config")
    parser.add_argument("-outputPath",help="output path",default=None)
    parser.add_argument("-process",help="process tag to find your output",default=None)

    args = parser.parse_args()


    jobscript = """
    echo Running on host `hostname`
    echo Time is `date`
    echo Directory is `pwd`

    shopt -s expand_aliases

    export ATLAS_LOCAL_ROOT_BASE=/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase
    source ${ATLAS_LOCAL_ROOT_BASE}/user/atlasLocalSetup.sh

    pushd $testarea

    set -- ""
    acmSetup

    popd

    echo "run_xAODNtMaker"
    echo "    -s $sample"
    echo "    -selector $selector"
    echo "    -writeTrees $writeTrees"
    echo "    -deepConfig $deepConfig"
    echo "    -MaxEvents $maxEvents"
    echo "    -SkipEvents $skipEvents"

    run_xAODNtMaker -s $sample -selector $selector -writeTrees $writeTrees -deepConfig $deepConfig -MaxEvents $maxEvents -SkipEvents $skipEvents

    [[ "$?" = "0" ]] && mv ${outputPath}/submitDir/data-tree/  ${outputPath}/${groupset}/${mytag}/merged/${process}_${minEvent}_${maxEvent}_merged_processed_${writeTrees}.root
    """

    sw = SingularityWrapper('/cvmfs/atlas.cern.ch/repo/containers/images/singularity/x86_64-centos7.img')
    jh = JobHandler(wrapper = sw,work_dir="/project/etp2/eschanet/collect", name="run_ntuple")#, run_max=50)

    # sf = SuccessOutputFile()

    for exportopts, optDict in params:
        slurm = Slurm(export = exportopts)

        outputfile = os.path.abspath("{path}/{groupset}/{mytag}/merged/{process}_{minEvent}_{maxEvent}_merged_processed_{sys}.root.done".format(**optDict))

        jobname = "run_xAODNtMaker_{groupset}_{process}_{sys}_{minEvent}".format(**optDict)

        jobname = clean_jobname(jobname)

        print(jobname)
        ft = FinishedTrigger(outputfile)
        jh.add_job(backend = slurm, run_script = jobscript, output = outputfile, success_func = ft, name = jobname)

    jh.run_jobs()
Ejemplo n.º 19
0
            ['"{}"'.format(b) for b in lhe_weights])

# if there are more than 50 trees, split into several jobs and merge later
if len(treenames) > 50:
    i = 1
    to_merge = []
    for chunk in chunks(treenames, 25):
        print("creating chunk #{:02d}".format(i))
        trees = " ,".join(['"{}"'.format(t) for t in chunk])
        basename = "{:}_part{:03d}.root".format(
            os.path.splitext(os.path.basename(f))[0], i)
        outputfile = os.path.join(output_path, basename)
        jh.add_job(name=name + "_part{:03d}".format(i),
                   run_script=skim_script.format(inputfile=f,
                                                 outputfile=outputfile,
                                                 treenames=trees,
                                                 branches=branchnames,
                                                 selection=selection),
                   tags=name + "_skim")
        to_merge.append(outputfile)
        i += 1
    outputfile = os.path.join(output_path, os.path.basename(f))
    ft = FinishedTrigger(outputfile)
    jh.add_job(name=name + "_merge",
               run_script=merge_script.format(outputfile=outputfile,
                                              inputfiles=" ".join(to_merge)),
               finished_func=ft,
               parent_tags=name + '_skim')
else:
    treenames = " ,".join(['"{}"'.format(t) for t in treenames])
    outputfile = os.path.join(output_path, os.path.basename(f))