Exemple #1
0
 def test_chain_multiparent(self):
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_chain_multiparent',
                     listens=False)
     output1 = '@SLURMY.output_dir/parent1'
     run_script1 = '#!/bin/bash\ntouch {}; sleep 2;'.format(output1)
     jh.add_job(run_script=run_script1,
                name='test_parent1',
                tags='parent1',
                output=output1)
     output2 = '@SLURMY.output_dir/parent2'
     run_script2 = '#!/bin/bash\ntouch {}; sleep 2;'.format(output2)
     jh.add_job(run_script=run_script2,
                name='test_parent2',
                tags='parent2',
                output=output2)
     run_script3 = '#!/bin/bash\nls {} {};'.format(output1, output2)
     jh.add_job(run_script=run_script3,
                name='test_child',
                parent_tags=['parent1', 'parent2'])
     jh.run_jobs()
     self.assertIs(jh.jobs.test_parent1.status, Status.SUCCESS)
     self.assertIs(jh.jobs.test_parent2.status, Status.SUCCESS)
     self.assertIs(jh.jobs.test_child.status, Status.SUCCESS)
Exemple #2
0
 def test_trigger_success_listener(self):
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_trigger_success_listener')
     jh.add_job(run_script=self.run_script_trigger_success, name='test')
     jh.run_jobs()
     self.assertIs(jh.jobs.test.status, Status.SUCCESS)
Exemple #3
0
 def test_run_max(self):
     ## This effectively only tests if run_jobs finishes or not
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_run_max',
                     run_max=1)
     for i in range(3):
         jh.add_job(run_script=self.run_script, name='test_{}'.format(i))
     jh.run_jobs()
     for i in range(3):
         self.assertIs(jh['test_{}'.format(i)].status, Status.SUCCESS)
Exemple #4
0
 def test_post_process(self):
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_post_process')
     output_file = os.path.join(jh.config.output_dir, 'test')
     post_func = TestPostFunction(output_file)
     jh.add_job(run_script=self.run_script,
                name='test',
                post_func=post_func)
     jh.run_jobs()
     time.sleep(1)
     self.assertIs(jh.jobs.test.status, Status.SUCCESS)
     self.assertTrue(os.path.isfile(output_file))
Exemple #5
0
 def test_output_listener(self):
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_output_listener')
     jh.add_job(run_script=self.run_script_touch_file,
                name='test',
                output=self.output_file)
     jh.run_jobs()
     self.assertIs(jh.jobs.test.status, Status.SUCCESS)
     jh.reset()
     jh.jobs.test.config.backend.run_script = self.run_script_success
     jh.jobs.test.config.output = 'jwoigjwoijegoijwoijegoiwoeg'
     jh.run_jobs()
     self.assertIs(jh.jobs.test.status, Status.FAILED)
Exemple #6
0
 def test_chain_fail(self):
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_chain',
                     listens=False)
     jh.add_job(run_script=self.run_script_fail,
                name='test_parent',
                tags='parent')
     jh.add_job(run_script=self.run_script_success,
                name='test_child',
                parent_tags='parent')
     jh.run_jobs()
     self.assertIs(jh.jobs.test_parent.status, Status.FAILED)
     self.assertIs(jh.jobs.test_child.status, Status.CANCELLED)
Exemple #7
0
 def test_chain(self):
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_chain',
                     listens=False)
     jh.add_job(run_script=self.run_script_touch_file,
                name='test_parent',
                tags='parent')
     jh.add_job(run_script=self.run_script_ls_file,
                name='test_child',
                parent_tags='parent')
     jh.run_jobs()
     self.assertIs(jh.jobs.test_parent.status, Status.SUCCESS)
     self.assertIs(jh.jobs.test_child.status, Status.SUCCESS)
Exemple #8
0
 def test_reset(self):
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_reset',
                     listens=False)
     jh.add_job(run_script=self.run_script, name='test')
     jh.run_jobs()
     self.assertIs(jh.jobs.test.status, Status.SUCCESS)
     id_first = jh.jobs.test.id
     jh.reset()
     self.assertIs(jh.jobs.test.status, Status.CONFIGURED)
     jh.run_jobs()
     self.assertIs(jh.jobs.test.status, Status.SUCCESS)
     id_second = jh.jobs.test.id
     self.assertIsNot(id_first, id_second)
Exemple #9
0
 def test_batch_listener(self):
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_batch_listener')
     jh.add_job(run_script=self.run_script_fail, name='test')
     jh.run_jobs()
     status_fail = jh.jobs.test.status
     id_first = jh.jobs.test.id
     jh.jobs.test.config.backend.run_script = self.run_script_success
     jh.run_jobs(retry=True)
     status_success = jh.jobs.test.status
     id_second = jh.jobs.test.id
     self.assertIsNot(id_first, id_second)
     self.assertIs(status_fail, Status.FAILED)
     self.assertIs(status_success, Status.SUCCESS)
Exemple #10
0
def main():
    """Batch submission using slurmy."""
    ## Set up the JobHandler
    jh = JobHandler(
        local_max=8,
        local_dynamic=True,
        work_dir=mkdir('batch'),
        printer_bar_mode=True,
        wrapper=SingularityWrapper('docker://philippgadow/checkmate'))
    ## Define the run script content

    ## Add a job
    jh.add_job(run_script='/work/run_example_in_docker.sh',
               job_type=Type.LOCAL)
    ## Run all jobs
    jh.run_jobs()
Exemple #11
0
 def test_mix_batch_local_listener(self):
     from slurmy import JobHandler, Status
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_mix_batch_local_listener',
                     local_max=1,
                     local_dynamic=True)
     jh.add_job(run_script=self.run_script_fail, name='test_1')
     jh.add_job(run_script=self.run_script_fail, name='test_2')
     jh.run_jobs()
     self.assertIsNot(jh.jobs.test_1.type, jh.jobs.test_2.type)
     self.assertIs(jh.jobs.test_1.status, Status.FAILED)
     self.assertIs(jh.jobs.test_2.status, Status.FAILED)
     jh.jobs.test_1.config.backend.run_script = self.run_script_success
     jh.jobs.test_2.config.backend.run_script = self.run_script_success
     jh.run_jobs(retry=True)
     self.assertIs(jh.jobs.test_1.status, Status.SUCCESS)
     self.assertIs(jh.jobs.test_2.status, Status.SUCCESS)
Exemple #12
0
 def test_local_listener(self):
     from slurmy import JobHandler, Status, Type, test_mode
     test_mode(True)
     jh = JobHandler(work_dir=self.test_dir,
                     verbosity=0,
                     name='test_local_listener',
                     local_max=1)
     jh.add_job(run_script=self.run_script_fail,
                name='test',
                job_type=Type.LOCAL)
     jh.run_jobs()
     status_fail = jh.jobs.test.status
     jh.jobs.test.config.backend.run_script = self.run_script_success
     jh.run_jobs(retry=True)
     status_success = jh.jobs.test.status
     test_mode(False)
     self.assertIs(status_fail, Status.FAILED)
     self.assertIs(status_success, Status.SUCCESS)
Exemple #13
0
        outputfile = os.path.join(output_path, basename)
        jh.add_job(name=name + "_part{:03d}".format(i),
                   run_script=skim_script.format(inputfile=f,
                                                 outputfile=outputfile,
                                                 treenames=trees,
                                                 branches=branchnames,
                                                 selection=selection),
                   tags=name + "_skim")
        to_merge.append(outputfile)
        i += 1
    outputfile = os.path.join(output_path, os.path.basename(f))
    ft = FinishedTrigger(outputfile)
    jh.add_job(name=name + "_merge",
               run_script=merge_script.format(outputfile=outputfile,
                                              inputfiles=" ".join(to_merge)),
               finished_func=ft,
               parent_tags=name + '_skim')
else:
    treenames = " ,".join(['"{}"'.format(t) for t in treenames])
    outputfile = os.path.join(output_path, os.path.basename(f))
    ft = FinishedTrigger(outputfile)
    jh.add_job(name=name,
               run_script=skim_script.format(inputfile=f,
                                             outputfile=outputfile,
                                             treenames=treenames,
                                             branches=branchnames,
                                             finished_func=ft,
                                             selection=selection))

jh.run_jobs()
Exemple #14
0
def run():
    parser = argparse.ArgumentParser(description='Run stuff locally on etp')
    parser.add_argument('files', type=argparse.FileType('r'), nargs='+')
    parser.add_argument("-s",help="input sample",default=None)
    parser.add_argument("-selector",help="selector",default="OneLep")
    parser.add_argument("-writeTrees",help="sys or nominal",default="1")
    parser.add_argument("-deepConfig",help="input sample",default="SusySkim1LInclusive_Rel21.config")
    parser.add_argument("-outputPath",help="output path",default=None)
    parser.add_argument("-process",help="process tag to find your output",default=None)

    args = parser.parse_args()


    jobscript = """
    echo Running on host `hostname`
    echo Time is `date`
    echo Directory is `pwd`

    shopt -s expand_aliases

    export ATLAS_LOCAL_ROOT_BASE=/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase
    source ${ATLAS_LOCAL_ROOT_BASE}/user/atlasLocalSetup.sh

    pushd $testarea

    set -- ""
    acmSetup

    popd

    echo "run_xAODNtMaker"
    echo "    -s $sample"
    echo "    -selector $selector"
    echo "    -writeTrees $writeTrees"
    echo "    -deepConfig $deepConfig"
    echo "    -MaxEvents $maxEvents"
    echo "    -SkipEvents $skipEvents"

    run_xAODNtMaker -s $sample -selector $selector -writeTrees $writeTrees -deepConfig $deepConfig -MaxEvents $maxEvents -SkipEvents $skipEvents

    [[ "$?" = "0" ]] && mv ${outputPath}/submitDir/data-tree/  ${outputPath}/${groupset}/${mytag}/merged/${process}_${minEvent}_${maxEvent}_merged_processed_${writeTrees}.root
    """

    sw = SingularityWrapper('/cvmfs/atlas.cern.ch/repo/containers/images/singularity/x86_64-centos7.img')
    jh = JobHandler(wrapper = sw,work_dir="/project/etp2/eschanet/collect", name="run_ntuple")#, run_max=50)

    # sf = SuccessOutputFile()

    for exportopts, optDict in params:
        slurm = Slurm(export = exportopts)

        outputfile = os.path.abspath("{path}/{groupset}/{mytag}/merged/{process}_{minEvent}_{maxEvent}_merged_processed_{sys}.root.done".format(**optDict))

        jobname = "run_xAODNtMaker_{groupset}_{process}_{sys}_{minEvent}".format(**optDict)

        jobname = clean_jobname(jobname)

        print(jobname)
        ft = FinishedTrigger(outputfile)
        jh.add_job(backend = slurm, run_script = jobscript, output = outputfile, success_func = ft, name = jobname)

    jh.run_jobs()