Esempio n. 1
0
def submit_job(brunel_app, reco_type, input_files=None, local=RUN_LOCAL):
    # Set EvtMax depending on if this is a local job
    brunel_app.extraOpts += 'from Configurables import Brunel\n'
    brunel_app.extraOpts += 'Brunel().EvtMax = {}'.format(2 * int(local) - 1)

    # Configure the corresponding Job
    job = Job(name='VP hybrid distortions',
              comment='{reco_type} reconstruction {suffix}'.format(
                  reco_type=reco_type, suffix=['', '(local)'][local]),
              application=brunel_app,
              splitter=SplitByFiles(filesPerJob=1, ignoremissing=True),
              parallel_submit=True)

    if local:
        job.backend = Local()
        job.outputfiles = [LocalFile('*.xdst'), LocalFile('*.root')]
        job.inputdata = dataset[:1]
    else:
        job.backend = Dirac()
        job.outputfiles = [DiracFile('*.xdst'), DiracFile('*.root')]
        job.inputdata = dataset

    job.inputfiles = input_files or []

    queues.add(job.submit)
    return True
Esempio n. 2
0
def test_lcg(gpi):
    from Ganga.GPI import Job, LCG, VomsProxy, credential_store, jobs

    logger.info('Submitting first job')
    j1 = Job()
    j1.backend = LCG()
    j1.submit()

    logger.info('Submitting second job')
    j2 = Job()
    j2.backend = LCG(credential_requirements=VomsProxy(vo='lhcb'))
    j2.submit()

    # Wipe out all the credentials to make sure they can be created on cue
    for cred in credential_store:
        cred.destroy()

    logger.info('Monitoring jobs')
    for j in jobs:
        stripProxy(j).backend.master_updateMonitoringInformation(
            [stripProxy(j)])

    # Wipe out all the credentials to make sure they can be created on cue
    credential_store.clear()

    logger.info('Monitoring jobs')
    for j in jobs:
        stripProxy(j).backend.master_updateMonitoringInformation(
            [stripProxy(j)])

    # Wipe out all the credentials to make sure they can be created on cue
    for cred in credential_store:
        cred.destroy()
    credential_store.clear()

    logger.info('Monitoring jobs')
    for j in jobs:
        stripProxy(j).backend.master_updateMonitoringInformation(
            [stripProxy(j)])

    # Wipe out all the credentials to make sure they can be created on cue
    for cred in credential_store:
        cred.destroy()
    credential_store.clear()

    logger.info('Killing jobs')
    for j in jobs:
        j.kill()
Esempio n. 3
0
    def Savannah19123(self):
        from Ganga.GPI import Job, Local

        from GangaTest.Framework.utils import sleep_until_state

        # check if stdout and stderr exists or not, flag indicates if files are required to exist or not

        def check(exists_flag):
            for fn in ['stdout','stderr']:
                fn = os.path.join(j.outputdir,fn)
                file_exists = os.path.exists(fn)
                if exists_flag:
                    self.assertTrue(file_exists, 'file %s should exist but it does not' % fn)
                else:
                    self.assertFalse(file_exists, 'file %s should not exist but it does' % fn)

        j = Job()
        j.application.exe = 'bash'
        j.application.args = ['-c', 'for i in `seq 1 30`; do echo $i; sleep 1; done']
        j.backend = Local()

        j.submit()

        check(False)

        sleep_until_state(j, 5, 'running')

        j.kill()

        check(True)
Esempio n. 4
0
    def test_a_CreateJob(self):
        from Ganga.GPI import jobs, Job, Executable, Local

        jobs.remove()
        j = Job()
        j.application = Executable()
        j.backend = Local()
Esempio n. 5
0
def test_job_complete(gpi):
    from Ganga.GPI import Job, LCG

    j = Job()
    j.backend = LCG()
    j.submit()
    assert run_until_completed(j, timeout=1200, sleep_period=10), 'Timeout on job submission: job is still not finished'
Esempio n. 6
0
def test_submit_kill_resubmit(gpi):
    """
    Test that a simple submit-kill-resubmit-kill cycle works
    """

    from Ganga.GPI import Job, LCG
    j = Job()
    j.backend = LCG()

    with patch('Ganga.Lib.LCG.Grid.submit', return_value='https://example.com:9000/42') as submit:
        j.submit()
        submit.assert_called_once()
        assert j.backend.id == 'https://example.com:9000/42'

    with patch('Ganga.Lib.LCG.Grid.cancel', return_value=True) as cancel:
        j.kill()
        cancel.assert_called_once()
        assert j.status == 'killed'

    with patch('Ganga.Lib.LCG.Grid.submit', return_value='https://example.com:9000/43') as submit:
        j.resubmit()
        submit.assert_called_once()
        assert j.backend.id == 'https://example.com:9000/43'

    with patch('Ganga.Lib.LCG.Grid.cancel', return_value=True):
        j.kill()
Esempio n. 7
0
def test_job_kill(gpi):
    from Ganga.GPI import Job, LCG

    j = Job()
    j.backend = LCG()
    j.submit()
    j.kill()
Esempio n. 8
0
def test_job_kill(gpi):
    from Ganga.GPI import Job, CREAM

    vo = getConfig('LCG')['VirtualOrganisation']
    call = subprocess.Popen(['lcg-infosites', 'ce', 'cream', '--vo', vo],
                            stdout=subprocess.PIPE)
    stdout, stderr = call.communicate()

    # Based on output of:
    #
    # #   CPU    Free Total Jobs      Running Waiting ComputingElement
    # ----------------------------------------------------------------
    #   19440    2089      17760        17351     409 arc-ce01.gridpp.rl.ac.uk:2811/nordugrid-Condor-grid3000M
    #    3240       0       1594         1250     344 carceri.hec.lancs.ac.uk:8443/cream-sge-grid
    #    1176      30       1007          587     420 ce01.tier2.hep.manchester.ac.uk:8443/cream-pbs-long
    #
    # Select the CREAM CEs (URL path starts with '/cream') and how many free slots they have
    ces = re.findall(
        r'^\s*\d+\s*(?P<free>\d+)\s*\d+\s*\d+\s*\d+\s*(?P<ce>[^:/\s]+uk:\d+/cream.*)$',
        stdout, re.MULTILINE)
    # Grab the one with the most empty slots
    ce = sorted(ces)[-1][1]

    j = Job()
    j.backend = CREAM(CE=ce)
    j.submit()
    j.kill()
Esempio n. 9
0
def test_job_kill(gpi):
    from Ganga.GPI import Job, LCG

    j = Job()
    j.backend = LCG()
    j.submit()
    j.kill()
Esempio n. 10
0
def test_submit_kill_resubmit(gpi):
    """
    Test that a simple submit-kill-resubmit-kill cycle works
    """

    from Ganga.GPI import Job, LCG
    j = Job()
    j.backend = LCG()

    with patch('Ganga.Lib.LCG.Grid.submit',
               return_value='https://example.com:9000/42') as submit:
        j.submit()
        submit.assert_called_once()
        assert j.backend.id == 'https://example.com:9000/42'

    with patch('Ganga.Lib.LCG.Grid.cancel', return_value=True) as cancel:
        j.kill()
        cancel.assert_called_once()
        assert j.status == 'killed'

    with patch('Ganga.Lib.LCG.Grid.submit',
               return_value='https://example.com:9000/43') as submit:
        j.resubmit()
        submit.assert_called_once()
        assert j.backend.id == 'https://example.com:9000/43'

    with patch('Ganga.Lib.LCG.Grid.cancel', return_value=True):
        j.kill()
Esempio n. 11
0
    def test_e_testInMemory(self):
        """
        Test the resubmit on a job in memory vs a job which has been loaded from disk
        """
        from Ganga.GPI import Job, Local

        j = Job()
        j.splitter = self._getSplitter()
        j.backend = Local()
        j.submit()

        from GangaTest.Framework.utils import sleep_until_completed
        sleep_until_completed(j)

        # Job has ben created, split, run and now exists in Memory (NOT SJXML)

        from Ganga.Utility.Config import setConfigOption
        setConfigOption('Configuration', 'resubmitOnlyFailedSubjobs', 'True')

        j.resubmit()

        sleep_until_completed(j)

        j.subjobs(0).resubmit()

        # We should get here if calling resubmit doesn't stall

        j.subjobs(0).force_status('failed', force=True)

        j.resubmit()

        sleep_until_completed(j)

        assert j.subjobs(0).status == 'completed'

        # Test resubmit from the master job worked

        j.subjobs(0).force_status('failed')

        j.subjobs(0).resubmit()

        sleep_until_completed(j)

        assert j.subjobs(0).status == 'completed'

        # Test that the resubmit from the subjob worked

        setConfigOption('Configuration', 'resubmitOnlyFailedSubjobs', 'False')

        j.resubmit()

        sleep_until_completed(j)

        j.subjobs(0).force_status('failed')

        j.resubmit()

        sleep_until_completed(j)
Esempio n. 12
0
    def test_e_testInMemory(self):
        """
        Test the resubmit on a job in memory vs a job which has been loaded from disk
        """
        from Ganga.GPI import Job, Local

        j=Job()
        j.splitter = self._getSplitter()
        j.backend = Local()
        j.submit()

        from GangaTest.Framework.utils import sleep_until_completed
        sleep_until_completed(j)

        # Job has ben created, split, run and now exists in Memory (NOT SJXML)

        from Ganga.Utility.Config import setConfigOption
        setConfigOption('Configuration', 'resubmitOnlyFailedSubjobs', 'True')

        j.resubmit()

        sleep_until_completed(j)

        j.subjobs(0).resubmit()

        # We should get here if calling resubmit doesn't stall

        j.subjobs(0).force_status('failed')

        j.resubmit()

        sleep_until_completed(j)

        assert j.subjobs(0).status == 'completed'

        # Test resubmit from the master job worked

        j.subjobs(0).force_status('failed')

        j.subjobs(0).resubmit()

        sleep_until_completed(j)

        assert j.subjobs(0).status == 'completed'

        # Test that the resubmit from the subjob worked

        setConfigOption('Configuration', 'resubmitOnlyFailedSubjobs', 'False')

        j.resubmit()

        sleep_until_completed(j)

        j.subjobs(0).force_status('failed')

        j.resubmit()

        sleep_until_completed(j)
Esempio n. 13
0
def test_lcg(gpi):
    from Ganga.GPI import Job, LCG, VomsProxy, credential_store, jobs

    logger.info("Submitting first job")
    j1 = Job()
    j1.backend = LCG()
    j1.submit()

    logger.info("Submitting second job")
    j2 = Job()
    j2.backend = LCG(credential_requirements=VomsProxy(vo="lhcb"))
    j2.submit()

    # Wipe out all the credentials to make sure they can be created on cue
    for cred in credential_store:
        cred.destroy()

    logger.info("Monitoring jobs")
    for j in jobs:
        stripProxy(j).backend.master_updateMonitoringInformation([stripProxy(j)])

    # Wipe out all the credentials to make sure they can be created on cue
    credential_store.clear()

    logger.info("Monitoring jobs")
    for j in jobs:
        stripProxy(j).backend.master_updateMonitoringInformation([stripProxy(j)])

    # Wipe out all the credentials to make sure they can be created on cue
    for cred in credential_store:
        cred.destroy()
    credential_store.clear()

    logger.info("Monitoring jobs")
    for j in jobs:
        stripProxy(j).backend.master_updateMonitoringInformation([stripProxy(j)])

    # Wipe out all the credentials to make sure they can be created on cue
    for cred in credential_store:
        cred.destroy()
    credential_store.clear()

    logger.info("Killing jobs")
    for j in jobs:
        j.kill()
Esempio n. 14
0
def test_job_submit_and_monitor(gpi):
    from Ganga.GPI import Job, LCG

    j = Job()
    j.backend = LCG()
    j.submit()

    assert j.status != 'new'
    stripProxy(LCG).master_updateMonitoringInformation([stripProxy(j)])
Esempio n. 15
0
def test_job_submit_and_monitor(gpi):
    from Ganga.GPI import Job, LCG

    j = Job()
    j.backend = LCG()
    j.submit()

    assert j.status != 'new'
    stripProxy(LCG).master_updateMonitoringInformation([stripProxy(j)])
Esempio n. 16
0
    def testInterfaceLookFeel(self):

        from Ganga.GPI import Job, LSF, Executable, DaVinci

        j1 = Job(name='my',application='DaVinci')
        j2 = Job(application = DaVinci())

        j1.backend = LSF()
        j1.backend.queue = '8nm'
        j2.backend = j1.backend # deepcopy
        j2.backend.queue = '16nh' # shortcut
        bk2 = j2.backend # reference

        assert j2.backend.queue == '16nh'
        bk2.queue = '100nh'
        assert j2.backend.queue == '100nh'

        ap = Executable()

        j1.application = ap # deepcopy
Esempio n. 17
0
    def test_e_UsingDifferentBackends(self):
        from Ganga.GPI import Job, plugins, Local

        # -- USINGDIFFERENTBACKENDS PLUGINS START
        plugins("backends")
        # -- USINGDIFFERENTBACKENDS PLUGINS STOP

        # -- USINGDIFFERENTBACKENDS LOCAL START
        j = Job()
        j.backend = Local()
        j.submit()
Esempio n. 18
0
    def test_e_UsingDifferentBackends(self):
        from Ganga.GPI import Job, plugins, Local

        # -- USINGDIFFERENTBACKENDS PLUGINS START
        plugins("backends")
        # -- USINGDIFFERENTBACKENDS PLUGINS STOP

        # -- USINGDIFFERENTBACKENDS LOCAL START
        j = Job()
        j.backend = Local()
        j.submit()
Esempio n. 19
0
    def testInterfaceLookFeel(self):
        """
        This test tests the Executable app and that the DaVinci are assignable
        """
        from Ganga.GPI import Job, LSF, Executable, DaVinci

        j1 = Job(name='my',application='DaVinci')
        j2 = Job(application = DaVinci())

        j1.backend = LSF()
        j1.backend.queue = '8nm'
        j2.backend = j1.backend # deepcopy
        j2.backend.queue = '16nh' # shortcut
        bk2 = j2.backend # reference

        assert j2.backend.queue == '16nh'
        bk2.queue = '100nh'
        assert j2.backend.queue == '100nh'

        ap = Executable()

        j1.application = ap # deepcopy
Esempio n. 20
0
    def testMergeThatAlwaysFailsOverwrite(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile

        j = Job()
        j.application = Executable(exe='sh', args=['-c', 'echo foo > out.txt'])
        j.backend = Local()
        j.outputfiles = [LocalFile('out.txt')]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=['out.txt'], overwrite=True)

        j.submit()

        assert run_until_state(j, 'failed', timeout=60)
        assert os.path.exists(os.path.join(j.outputdir, 'out.txt.merge_summary')), 'Summary file should be created'
Esempio n. 21
0
    def test_a_JobSubmission(self):
        """
        Create lots of subjobs and submit it
        """
        from Ganga.GPI import Job, Local
        j = Job()
        j.application.exe = "sleep"
        j.splitter = self._getSplitter()
        j.backend = Local()
        j.submit()

        # Test we can submit a job and we're going to check the sj are created

        assert len(j.subjobs) == TestSJSubmit.n_subjobs
Esempio n. 22
0
    def testMergeThatAlwaysFailsOverwrite(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile

        j = Job()
        j.application = Executable(exe='sh', args=['-c', 'echo foo > out.txt'])
        j.backend = Local()
        j.outputfiles = [LocalFile('out.txt')]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=['out.txt'], overwrite=True)

        j.submit()

        assert run_until_state(j, 'failed', timeout=60)
        assert os.path.exists(os.path.join(j.outputdir, 'out.txt.merge_summary')), 'Summary file should be created'
Esempio n. 23
0
    def test_a_JobSubmission(self):
        """
        Create lots of subjobs and submit it
        """
        from Ganga.GPI import Job, Local
        j = Job()
        j.application.exe = "sleep"
        j.splitter = self._getSplitter()
        j.backend = Local()
        j.submit()

        # Test we can submit a job and we're going to check the sj are created

        assert len(j.subjobs) == TestSJSubmit.n_subjobs
Esempio n. 24
0
    def testLargeJobSubmission(self):
        """
        Create lots of subjobs and submit it
        """
        from Ganga.GPI import Job, GenericSplitter, Local
        j = Job()
        j.application.exe = "sleep"
        j.splitter = GenericSplitter()
        j.splitter.attribute = 'application.args'
        j.splitter.values = [['400'] for _ in range(0, 20)]
        j.backend = Local()
        j.submit()

        assert len(j.subjobs) == 20
Esempio n. 25
0
    def testLargeJobSubmission(self):
        """
        Create lots of subjobs and submit it
        """
        from Ganga.GPI import Job, GenericSplitter, Local
        j = Job()
        j.application.exe = "sleep"
        j.splitter = GenericSplitter()
        j.splitter.attribute = 'application.args'
        j.splitter.values = [['400'] for _ in range(0, 20)]
        j.backend = Local()
        j.submit()

        assert len(j.subjobs) == 20
    def test_Savannah44116(self):
        from Ganga.GPI import Job, TestApplication, TestSubmitter

        from GangaTest.Framework.utils import sleep_until_state

        j = Job()
        j.application = TestApplication()
        j.application.postprocess_mark_as_failed = True
        j.backend = TestSubmitter()
        j.backend.time = 1

        j.submit()

        self.assertTrue(sleep_until_state(j, 10, 'failed'), 'Job is not marked as failed despite app.postprocess() hook')
Esempio n. 27
0
    def test_SubmitAndRemoval(self):
        from Ganga.GPI import Job, TestSubmitter
        
        j = Job()
        j.backend = TestSubmitter(time=1, update_delay=5)
        j.submit()
        all = [j]

        for i in range(10):
            j2 = j.copy()
            j2.submit()
            all.append(j2)

        for j in all:
            j.remove()
    def testMergeThatAlwaysFailsIgnoreFailed(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile

        j = Job()
        j.application = Executable(exe='sh', args=['-c', 'echo foo > out.txt'])
        j.backend = Local()
        j.outputfiles = [LocalFile('out.txt')]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=['out.txt'], ignorefailed=True)

        j.submit()

        sleep_until_completed(j, 60)
        assert j.status == 'failed'
        assert os.path.exists(os.path.join(j.outputdir, 'out.txt.merge_summary')), 'Summary file should be created'
Esempio n. 29
0
    def test_SubmitAndRemoval(self):
        from Ganga.GPI import Job, TestSubmitter

        j = Job()
        j.backend = TestSubmitter(time=1, update_delay=5)
        j.submit()
        all = [j]

        for i in range(10):
            j2 = j.copy()
            j2.submit()
            all.append(j2)

        for j in all:
            j.remove()
Esempio n. 30
0
    def test_Savannah15630(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile

        from GangaTest.Framework.utils import sleep_until_completed
        j = Job()
        j.application = Executable(exe='touch', args=['out.dat'])
        j.backend = Local()
        j.outputfiles = [LocalFile('out.dat')]
        j.submit()
        self.assertTrue(sleep_until_completed(j, 60), 'Timeout on job submission: job is still not finished')

        import os.path
        p = os.path.join(j.outputdir, j.application.args[0])

        self.assertTrue(os.path.exists(p))
Esempio n. 31
0
    def testMergeThatAlwaysFailsFlagsSet(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile

        j = Job()
        j.application = Executable(exe="sh", args=["-c", "echo foo > out.txt"])
        j.backend = Local()
        j.outputfiles = [LocalFile("out.txt")]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=["out.txt"], ignorefailed=True, overwrite=True)

        j.submit()

        run_until_completed(j, timeout=60)
        assert j.status == "failed"
        assert os.path.exists(os.path.join(j.outputdir, "out.txt.merge_summary")), "Summary file should be created"
Esempio n. 32
0
    def test_Savannah44116(self):
        from Ganga.GPI import Job, TestApplication, TestSubmitter

        from GangaTest.Framework.utils import sleep_until_state

        j = Job()
        j.application = TestApplication()
        j.application.postprocess_mark_as_failed = True
        j.backend = TestSubmitter()
        j.backend.time = 1

        j.submit()

        self.assertTrue(
            sleep_until_state(j, 10, 'failed'),
            'Job is not marked as failed despite app.postprocess() hook')
Esempio n. 33
0
    def test_Savannah15630(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile

        from GangaTest.Framework.utils import sleep_until_completed
        j = Job()
        j.application = Executable(exe='touch', args=['out.dat'])
        j.backend = Local()
        j.outputfiles = [LocalFile('out.dat')]
        j.submit()
        self.assertTrue(
            sleep_until_completed(j, 60),
            'Timeout on job submission: job is still not finished')

        import os.path
        p = os.path.join(j.outputdir, j.application.args[0])

        self.assertTrue(os.path.exists(p))
Esempio n. 34
0
    def test_Savannah19123(self):
        from Ganga.GPI import Job, Local

        from GangaTest.Framework.utils import sleep_until_state

        # check if stdout and stderr exists or not, flag indicates if files are required to exist or not

        def check(exists_flag):
            for fn in ['stdout', 'stderr']:
                fn = os.path.join(j.outputdir, fn)
                file_exists = os.path.exists(fn)
                if exists_flag:
                    self.assertTrue(
                        file_exists,
                        'file %s should exist but it does not' % fn)
                else:
                    self.assertFalse(
                        file_exists,
                        'file %s should not exist but it does' % fn)

        j = Job()
        j.application.exe = 'bash'
        j.application.args = [
            '-c', 'for i in `seq 1 30`; do echo $i; sleep 1; done'
        ]
        j.backend = Local()

        j.submit()

        check(False)

        if not sleep_until_state(j, 5, 'running'):
            # problem with the test - print out stdout/stderr and assert
            for fn in ['stdout', 'stderr']:
                fn = os.path.join(j.outputdir, fn)
                print " ----  Contents of " + fn
                if os.path.exists(fn):
                    print open(fn).read()
                else:
                    print "NO FILE AVAILABLE"

            self.assertEqual(j.status, 'running')

        j.kill()

        check(True)
Esempio n. 35
0
    def testJobCopy(self):
        """Test that a job copy copies everything properly"""
        from Ganga.GPI import Job, ARC, GenericSplitter, GangaDataset, LocalFile, FileChecker
        from Ganga.GPIDev.Base.Proxy import isType

        j = Job()
        j.application.exe = "sleep"
        j.application.args = ['myarg']
        j.backend = ARC()
        j.backend.CE = "my.ce"
        j.inputdata = GangaDataset()
        j.inputdata.files = [ LocalFile("*.txt") ]
        j.inputfiles = [ LocalFile("*.txt") ]
        j.name = "testname"
        j.outputfiles = [ LocalFile("*.txt") ]
        j.postprocessors = FileChecker(files=['stdout'], searchStrings = ['my search'])
        j.splitter = GenericSplitter()
        j.splitter.attribute = "application.args"
        j.splitter.values = ['arg 1', 'arg 2', 'arg 3']
        j2 = j.copy()

        # test the copy has worked
        self.assertTrue( isType(j2, Job) )
        self.assertEqual( j2.application.exe, "sleep" )
        self.assertEqual( j2.application.args, ["myarg"] )
        self.assertTrue( isType(j2.backend, ARC) )
        self.assertEqual( j2.backend.CE, "my.ce" )
        self.assertTrue( isType(j2.inputdata, GangaDataset) )
        self.assertEqual( len(j2.inputdata.files), 1 )
        self.assertTrue( isType(j2.inputdata.files[0], LocalFile) )
        self.assertEqual( j2.inputdata.files[0].namePattern, "*.txt" )
        self.assertEqual( len(j2.inputfiles), 1 )
        self.assertTrue( isType(j2.inputfiles[0], LocalFile) )
        self.assertEqual( j2.inputfiles[0].namePattern, "*.txt" )
        self.assertEqual( j2.name, "testname" )
        self.assertEqual( len(j2.outputfiles), 1 )
        self.assertTrue( isType(j2.outputfiles[0], LocalFile) )
        self.assertEqual( j2.outputfiles[0].namePattern, "*.txt" )
        self.assertEqual( len(j2.postprocessors), 1 )
        self.assertTrue( isType(j2.postprocessors[0], FileChecker) )
        self.assertEqual( j2.postprocessors[0].files, ["stdout"] )
        self.assertEqual( j2.postprocessors[0].searchStrings, ["my search"] )
        self.assertTrue( isType(j2.splitter, GenericSplitter) )
        self.assertEqual( j2.splitter.attribute, "application.args" )
        self.assertEqual( j2.splitter.values, ['arg 1', 'arg 2', 'arg 3'])
Esempio n. 36
0
    def test_CondorConfigDefaults(self):
        from Ganga.GPI import Job, TestSplitter, TestSubmitter

        j = Job()
        j.splitter = TestSplitter()
        j.splitter.backs = [TestSubmitter(), TestSubmitter(), TestSubmitter()]
        j.backend = TestSubmitter()
        b = j.splitter.backs[1]
        b.fail = 'submit'

        assert j.status == 'new'

        j.submit(keep_going=False)
        assert j.status in ['submitted', 'running']
        assert j.subjobs[0].status in ['submitted', 'running']

        assert j.subjobs[1].status == 'new'
        assert j.subjobs[2].status == 'new'
Esempio n. 37
0
        def testInterfaceLookFeel(self):

            # Just testing that the job construction works

            from Ganga.GPI import Job, Im3ShapeApp, Im3ShapeSplitter, DiracFile, LocalFile, GangaDataset, Dirac

            j=Job()
            app = Im3ShapeApp(
                            im3_location=DiracFile(lfn='/lsst/y1a1-v2-z/software/2016-02-24/im3shape-grid.tar.gz'),
                            ini_location=LocalFile('/afs/cern.ch/user/r/rcurrie/cmtuser/GANGA/GANGA_LSST/install/ganga/python/params_disc.ini'),
                            blacklist=LocalFile('/afs/cern.ch/user/r/rcurrie/cmtuser/GANGA/GANGA_LSST/install/ganga/python/blacklist-y1.txt')
                                )
            j.application = app
            j.backend=Dirac()
            mydata=GangaDataset()
            mydata.append(DiracFile(lfn='/lsst/DES0005+0043-z-meds-y1a1-gamma.fits.fz'))
            j.inputdata = mydata
            j.splitter=Im3ShapeSplitter(size=20)
            j.outputfiles = [DiracFile('*.main.txt'), DiracFile('*.epoch.txt')]
Esempio n. 38
0
    def test_CondorConfigDefaults(self):
        # A test with sequential submission
        from Ganga.GPI import Job, TestSplitter, TestSubmitter

        j = Job()
        j.splitter = TestSplitter()
        j.splitter.backs = [TestSubmitter(),TestSubmitter(),TestSubmitter()]
        j.backend= TestSubmitter()
        b = j.splitter.backs[1]
        b.fail = 'submit'
        j.parallel_submit = False

        assert j.status == 'new'

        with pytest.raises(IncompleteJobSubmissionError):
            j.submit(keep_going=True)
        assert j.subjobs[0].status in ['submitted', 'running']

        assert j.subjobs[1].status == 'new'
        assert j.subjobs[2].status == 'new'
Esempio n. 39
0
    def test_CondorConfigDefaults(self):
        # A test with sequential submission
        from Ganga.GPI import Job, TestSplitter, TestSubmitter

        j = Job()
        j.splitter = TestSplitter()
        j.splitter.backs = [TestSubmitter(), TestSubmitter(), TestSubmitter()]
        j.backend = TestSubmitter()
        b = j.splitter.backs[1]
        b.fail = 'submit'
        j.parallel_submit = False

        assert j.status == 'new'

        with pytest.raises(IncompleteJobSubmissionError):
            j.submit(keep_going=True)
        assert j.subjobs[0].status in ['submitted', 'running']

        assert j.subjobs[1].status == 'new'
        assert j.subjobs[2].status == 'new'
    def test_Savannah19123(self):
        from Ganga.GPI import Job, Local

        from GangaTest.Framework.utils import sleep_until_state

        # check if stdout and stderr exists or not, flag indicates if files are required to exist or not

        def check(exists_flag):
            for fn in ['stdout','stderr']:
                fn = os.path.join(j.outputdir,fn)
                file_exists = os.path.exists(fn)
                if exists_flag:
                    self.assertTrue(file_exists, 'file %s should exist but it does not' % fn)
                else:
                    self.assertFalse(file_exists, 'file %s should not exist but it does' % fn)

        j = Job()
        j.application.exe = 'bash'
        j.application.args = ['-c', 'for i in `seq 1 30`; do echo $i; sleep 1; done']
        j.backend = Local()

        j.submit()

        check(False)

        if not sleep_until_state(j, 5, 'running'):
            # problem with the test - print out stdout/stderr and assert
            for fn in ['stdout','stderr']:
                fn = os.path.join(j.outputdir,fn)
                print " ----  Contents of " + fn
                if os.path.exists(fn):
                    print open(fn).read()
                else:
                    print "NO FILE AVAILABLE"

            self.assertEqual(j.status, 'running')

        j.kill()

        check(True)
Esempio n. 41
0
def test_submit_monitor(gpi):
    """
    Test that an LCG job can be monitored
    """

    from Ganga.GPI import Job, LCG
    j = Job()
    j.backend = LCG()

    job_id = 'https://example.com:9000/42'

    with patch('Ganga.Lib.LCG.Grid.submit', return_value=job_id) as submit:
        j.submit()
        submit.assert_called_once()
        assert j.backend.id == job_id

    status_info = {
        'status': 'Submitted',
        'name': '',
        'destination': '',
        'reason': '',
        'exit': '',
        'is_node': False,
        'id': job_id
    }

    status_results = [
        ([status_info], []),  # Once for the proper status call
        ([], [])  # Once for the bulk monitoring call
    ]

    with patch('Ganga.Lib.LCG.Grid.status',
               side_effect=status_results) as status:
        stripProxy(j).backend.master_updateMonitoringInformation(
            [stripProxy(j)])
        assert status.call_count == 1

    with patch('Ganga.Lib.LCG.Grid.cancel', return_value=True):
        j.kill()
Esempio n. 42
0
    def testKilling(self):
        """
        Create some subjobs and kill them
        """
        from Ganga.GPI import Job, GenericSplitter, Local
        from GangaTest.Framework.utils import sleep_until_state
        j = Job()
        j.application.exe = "sleep"
        j.splitter = GenericSplitter()
        j.splitter.attribute = 'application.args'
        j.splitter.values = [['400'] for _ in range(0, 5)]
        j.backend = Local()
        j.submit()

        sleep_until_state(j, None, 'running')
        assert j.status == 'running'

        j.subjobs(0).kill()
        assert j.subjobs(0).status == 'killed'
        assert j.subjobs(1).status != 'killed'
        j.kill()
        assert j.status == 'killed'
        assert all(sj.status == 'killed' for sj in j.subjobs)
Esempio n. 43
0
    def testMergeRemoval(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile, jobs

        # see Savannah 33710
        j = Job()
        jobID = j.id
        # job will run for at least 20 seconds
        j.application = Executable(exe="sh", args=["-c", "sleep 20; echo foo > out.txt"])
        j.backend = Local()
        j.outputfiles = [LocalFile("out.txt")]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=["out.txt"])

        j.postprocessors[0].ignorefailed = True
        j.postprocessors[0].alwaysfail = True
        j.postprocessors[0].wait = 10

        j.submit()
        run_until_state(j, state="running")
        j.remove()

        with pytest.raises(KeyError):
            jobs(jobID)
Esempio n. 44
0
    def testKilling(self):
        """
        Create some subjobs and kill them
        """
        from Ganga.GPI import Job, GenericSplitter, Local
        from GangaTest.Framework.utils import sleep_until_state
        j = Job()
        j.application.exe = "sleep"
        j.splitter = GenericSplitter()
        j.splitter.attribute = 'application.args'
        j.splitter.values = [['400'] for _ in range(0, 5)]
        j.backend = Local()
        j.submit()

        sleep_until_state(j, None, 'running')
        assert j.status == 'running'

        j.subjobs(0).kill()
        assert j.subjobs(0).status == 'killed'
        assert j.subjobs(1).status != 'killed'
        j.kill()
        assert j.status == 'killed'
        assert all(sj.status == 'killed' for sj in j.subjobs)
Esempio n. 45
0
def submit(N, K):
    jobs = []
    for i in range(K):
        j = Job()
        j.backend = LCG()
        j.backend.middleware = 'GLITE'
        j.splitter = GenericSplitter()
        j.splitter.attribute = 'application.args'
        j.splitter.values = [['x']] * N
        j.submit()
        jobs.append(j)
    import time

    def finished():
        for j in jobs:
            if not j.status in ['failed', 'completed']:
                return False
        return True

    while not finished():
        time.sleep(1)

    return jobs
Esempio n. 46
0
def submit(N, K):
    jobs = []
    for i in range(K):
        j = Job()
        j.backend = LCG()
        j.backend.middleware = 'GLITE'
        j.splitter = GenericSplitter()
        j.splitter.attribute = 'application.args'
        j.splitter.values = [['x']] * N
        j.submit()
        jobs.append(j)
    import time

    def finished():
        for j in jobs:
            if not j.status in ['failed', 'completed']:
                return False
        return True

    while not finished():
        time.sleep(1)

    return jobs
Esempio n. 47
0
def test_submit_monitor(gpi):
    """
    Test that an LCG job can be monitored
    """

    from Ganga.GPI import Job, LCG
    j = Job()
    j.backend = LCG()

    job_id = 'https://example.com:9000/42'

    with patch('Ganga.Lib.LCG.Grid.submit', return_value=job_id) as submit:
        j.submit()
        submit.assert_called_once()
        assert j.backend.id == job_id

    status_info = {
        'status': 'Submitted',
        'name': '',
        'destination': '',
        'reason': '',
        'exit': '',
        'is_node': False,
        'id': job_id
    }

    status_results = [
        ([status_info], []),  # Once for the proper status call
        ([], [])  # Once for the bulk monitoring call
    ]

    with patch('Ganga.Lib.LCG.Grid.status', side_effect=status_results) as status:
        stripProxy(j).backend.master_updateMonitoringInformation([stripProxy(j)])
        assert status.call_count == 2

    with patch('Ganga.Lib.LCG.Grid.cancel', return_value=True):
        j.kill()
Esempio n. 48
0
    def testMergeRemoval(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile, jobs

        # see Savannah 33710
        j = Job()
        jobID = j.id
        # job will run for at least 20 seconds
        j.application = Executable(exe='sh',
                                   args=['-c', 'sleep 20; echo foo > out.txt'])
        j.backend = Local()
        j.outputfiles = [LocalFile('out.txt')]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=['out.txt'])

        j.postprocessors[0].ignorefailed = True
        j.postprocessors[0].alwaysfail = True
        j.postprocessors[0].wait = 10

        j.submit()
        run_until_state(j, state='running')
        j.remove()

        with pytest.raises(KeyError):
            jobs(jobID)
Esempio n. 49
0
def test_job_kill(gpi):
    from Ganga.GPI import Job, CREAM

    vo = getConfig("LCG")["VirtualOrganisation"]
    call = subprocess.Popen(["lcg-infosites", "ce", "cream", "--vo", vo], stdout=subprocess.PIPE)
    stdout, stderr = call.communicate()

    # Based on output of:
    #
    # #   CPU    Free Total Jobs      Running Waiting ComputingElement
    # ----------------------------------------------------------------
    #   19440    2089      17760        17351     409 arc-ce01.gridpp.rl.ac.uk:2811/nordugrid-Condor-grid3000M
    #    3240       0       1594         1250     344 carceri.hec.lancs.ac.uk:8443/cream-sge-grid
    #    1176      30       1007          587     420 ce01.tier2.hep.manchester.ac.uk:8443/cream-pbs-long
    #
    # Select the CREAM CEs (URL path starts with '/cream') and how many free slots they have
    ces = re.findall(r"^\s*\d+\s*(?P<free>\d+)\s*\d+\s*\d+\s*\d+\s*(?P<ce>[^:/\s]+:\d+/cream.*)$", stdout, re.MULTILINE)
    # Grab the one with the most empty slots
    ce = sorted(ces)[-1][1]

    j = Job()
    j.backend = CREAM(CE=ce)
    j.submit()
    j.kill()
Esempio n. 50
0
    def Savannah19123(self):
        from Ganga.GPI import Job, Local

        from GangaTest.Framework.utils import sleep_until_state

        # check if stdout and stderr exists or not, flag indicates if files are required to exist or not

        def check(exists_flag):
            for fn in ['stdout', 'stderr']:
                fn = os.path.join(j.outputdir, fn)
                file_exists = os.path.exists(fn)
                if exists_flag:
                    self.assertTrue(
                        file_exists,
                        'file %s should exist but it does not' % fn)
                else:
                    self.assertFalse(
                        file_exists,
                        'file %s should not exist but it does' % fn)

        j = Job()
        j.application.exe = 'bash'
        j.application.args = [
            '-c', 'for i in `seq 1 30`; do echo $i; sleep 1; done'
        ]
        j.backend = Local()

        j.submit()

        check(False)

        sleep_until_state(j, 5, 'running')

        j.kill()

        check(True)
Esempio n. 51
0
def makeIPResolutionsJob( jobName, dataFile, brunelVersion="", dataType = '2012', extraFiles = [],
                          ddDBTag = None, condDBTag = None ) :
    """Call this method to make a job that will run Brunel with the IP resolutions
    ntupling algorithm, using the given config file (data type, etc) and data file.
    Add this method to your ~/.ganga.py file to have it automatically loaded whenever
    you run ganga."""

    print "Creating an IP resolutions monitoring job named \'%s\'" % jobName
    dataFile = os.path.expandvars(dataFile)
    if not os.path.exists(dataFile) :
        print "Could not find the data file \"%s\"!" % dataFile
        return None
    dataFile = os.path.abspath(dataFile)
    print "Using data file \'%s\'" % dataFile
        
    print "Parsing data file for options."
    stepInfos = parseDataFileForSteps(dataFile)
    productionOptsFiles = None
    if len(stepInfos) > 0 :
        for step in stepInfos :
            # Get any additional options files used for the Brunel step.
            if step['ApplicationName'] == "Brunel" :
                productionOptsFiles = []
                for optsFile in step['OptionFiles'].split(";") :
                    productionOptsFiles.append(optsFile.replace(" \n",""))
            # Get the DB tags used for the Gauss step. This should be the
            # same as for the Brunel step but it seems there's a bug in bkk.
            if step['ApplicationName'] == "Gauss" or step['ApplicationName'] == 'DaVinci' or step['ApplicationName'] == 'Brunel' :
                if ddDBTag == None :
                    ddDBTag = step['DDDB']
                if condDBTag == None :
                    condDBTag = step['CONDDB']
    if None == ddDBTag and len(extraFiles) == 0 :
        print "The DB tags could not be retrieved from the data file and no extra options\
files have been specified! The job cannot be made."
        return None
    
    j = Job( name = jobName )
    if brunelVersion == "" :
        j.application = Brunel()
    else :
        j.application = Brunel(version=brunelVersion)

    j.application.extraopts = 'Brunel().OutputLevel = 5\nBrunel().PrintFreq = 10000\nBrunel().DataType = "{0}"\n'.format(dataType)
    if None != ddDBTag :
        print "Using DDDBTag \"%s\" and CondDBTag \"%s\"" % (ddDBTag, condDBTag)
        j.application.extraopts += "\nBrunel().DDDBtag = \"%s\"\nBrunel().CondDBtag = \"%s\"\n" % (ddDBTag, condDBTag)
        if 'sim' in ddDBTag.lower() :
            j.application.extraopts += '\nBrunel().Simulation = True\n'
            #j.application.extraopts += '\nBrunel().WithMC = True\n'
    else :
        print "The DB tags could not be retrieved from the data file."
        print "If they are not defined in one of the extra options files default values will be used."

    j.application.optsfile = [ mainIPConfigFile ] + extraFiles
    print "Using options files:"
    for optsFile in j.application.optsfile :
        print optsFile.name
    if None != productionOptsFiles and len(productionOptsFiles) > 0 :
        for optsFile in productionOptsFiles :
            print optsFile
            j.application.extraopts += "\nimportOptions(\"%s\")\n" % optsFile
    print "Reading in data ..."
    j.inputdata = j.application.readInputData( dataFile )
    print "Data read. %s files found." % len(j.inputdata.files)
    if len(j.inputdata.files) > 0 :
        j.application.extraopts += '\nBrunel().InputType = "{0}"\n'.format(j.inputdata.files[0].name.split('.')[-1].upper())
    j.splitter = SplitByFiles( filesPerJob = 10 )
    
    j.backend = Dirac()
    
    j.outputfiles = [DiracFile('*.root')]
    
    return j
Esempio n. 52
0
    def test_f_InputAndOutputData(self):
        from Ganga.GPI import Job, File, LocalFile, GangaDataset, Local, plugins
        # -- INPUTANDOUTPUTDATA BASIC START
        # create a script to send
        open('my_script2.sh', 'w').write("""#!/bin/bash
        ls -ltr
        more "my_input.txt"
        echo "TESTING" > my_output.txt
        """)
        import os
        os.system('chmod +x my_script2.sh')

        # create a script to send
        open('my_input.txt', 'w').write('Input Testing works!')

        j = Job()
        j.application.exe = File('my_script2.sh')
        j.inputfiles = [ LocalFile('my_input.txt') ]
        j.outputfiles = [ LocalFile('my_output.txt') ]
        j.submit()
        # -- INPUTANDOUTPUTDATA BASIC STOP

        # -- INPUTANDOUTPUTDATA PEEKOUTPUT START
        j.peek()   # list output dir contents
        j.peek('my_output.txt')
        # -- INPUTANDOUTPUTDATA PEEKOUTPUT STOP

        # -- INPUTANDOUTPUTDATA FAILJOB START
        # This job will fail
        j = Job()
        j.application.exe = File('my_script2.sh')
        j.inputfiles = [ LocalFile('my_input.txt') ]
        j.outputfiles = [ LocalFile('my_output_FAIL.txt') ]
        j.submit()
        # -- INPUTANDOUTPUTDATA FAILJOB STOP

        # -- INPUTANDOUTPUTDATA WILDCARD START
        # This job will pick up both 'my_input.txt' and 'my_output.txt'
        j = Job()
        j.application.exe = File('my_script2.sh')
        j.inputfiles = [LocalFile('my_input.txt')]
        j.outputfiles = [LocalFile('*.txt')]
        j.submit()
        # -- INPUTANDOUTPUTDATA WILDCARD STOP

        # -- INPUTANDOUTPUTDATA OUTPUTFILES START
        j.outputfiles
        # -- INPUTANDOUTPUTDATA OUTPUTFILES STOP

        # -- INPUTANDOUTPUTDATA INPUTDATA START
        # Create a test script
        open('my_script3.sh', 'w').write("""#!/bin/bash
        echo $PATH
        ls -ltr
        more __GangaInputData.txt__
        echo "MY TEST FILE" > output_file.txt
        """)
        import os
        os.system('chmod +x my_script3.sh')

        # Submit a job
        j = Job()
        j.application.exe = File('my_script3.sh')
        j.inputdata = GangaDataset(files=[LocalFile('*.sh')])
        j.backend = Local()
        j.submit()
        # -- INPUTANDOUTPUTDATA INPUTDATA STOP

        # -- INPUTANDOUTPUTDATA GANGAFILES START
        plugins('gangafiles')
Esempio n. 53
0
    def test_f_InputAndOutputData(self):
        from Ganga.GPI import Job, File, LocalFile, GangaDataset, Local, plugins
        # -- INPUTANDOUTPUTDATA BASIC START
        # create a script to send
        open('my_script2.sh', 'w').write("""#!/bin/bash
        ls -ltr
        more "my_input.txt"
        echo "TESTING" > my_output.txt
        """)
        import os
        os.system('chmod +x my_script2.sh')

        # create a script to send
        open('my_input.txt', 'w').write('Input Testing works!')

        j = Job()
        j.application.exe = File('my_script2.sh')
        j.inputfiles = [LocalFile('my_input.txt')]
        j.outputfiles = [LocalFile('my_output.txt')]
        j.submit()
        # -- INPUTANDOUTPUTDATA BASIC STOP

        # -- INPUTANDOUTPUTDATA PEEKOUTPUT START
        j.peek()  # list output dir contents
        j.peek('my_output.txt')
        # -- INPUTANDOUTPUTDATA PEEKOUTPUT STOP

        # -- INPUTANDOUTPUTDATA FAILJOB START
        # This job will fail
        j = Job()
        j.application.exe = File('my_script2.sh')
        j.inputfiles = [LocalFile('my_input.txt')]
        j.outputfiles = [LocalFile('my_output_FAIL.txt')]
        j.submit()
        # -- INPUTANDOUTPUTDATA FAILJOB STOP

        # -- INPUTANDOUTPUTDATA WILDCARD START
        # This job will pick up both 'my_input.txt' and 'my_output.txt'
        j = Job()
        j.application.exe = File('my_script2.sh')
        j.inputfiles = [LocalFile('my_input.txt')]
        j.outputfiles = [LocalFile('*.txt')]
        j.submit()
        # -- INPUTANDOUTPUTDATA WILDCARD STOP

        # -- INPUTANDOUTPUTDATA OUTPUTFILES START
        j.outputfiles
        # -- INPUTANDOUTPUTDATA OUTPUTFILES STOP

        # -- INPUTANDOUTPUTDATA INPUTDATA START
        # Create a test script
        open('my_script3.sh', 'w').write("""#!/bin/bash
        echo $PATH
        ls -ltr
        more __GangaInputData.txt__
        echo "MY TEST FILE" > output_file.txt
        """)
        import os
        os.system('chmod +x my_script3.sh')

        # Submit a job
        j = Job()
        j.application.exe = File('my_script3.sh')
        j.inputdata = GangaDataset(files=[LocalFile('*.sh')])
        j.backend = Local()
        j.submit()
        # -- INPUTANDOUTPUTDATA INPUTDATA STOP

        # -- INPUTANDOUTPUTDATA GANGAFILES START
        plugins('gangafiles')
Esempio n. 54
0
j = Job(name=JNAME.format(
    polarity, year, mode
))
j.comment = (
    '{1} {2} MC {0} ntuple creation for k3pi mixing measurement.'
    .format(event_type, year, polarity)
)
j.application = DaVinci(version='v41r3')
j.application.optsfile = [s.format(path=base, year=year) for s in OPTIONS]

if args.test:
    # If testing, run over a couple of files locally,
    # saving the results to the sandbox
    j.inputdata = dataset[0:1]
    j.backend = Local()
    # Prepend test string to job name
    j.name = 'TEST_{0}'.format(j.name)
    j.outputfiles = [LocalFile(tfn)]
else:
    # If not testing, run over everything on the grid, splitting jobs
    # into groups of 10 files, notifying me on job completion/subjob failure,
    # and save the results on the grid storage
    j.inputdata = dataset
    j.backend = Dirac()
    j.backend.settings['CPUTime'] = 60*60*24*7
    j.do_auto_resubmit = True
    j.splitter = SplitByFiles(filesPerJob=5, ignoremissing=True)
    j.postprocessors = [Notifier(address=email)]
    j.outputfiles = [DiracFile(tfn)]