Пример #1
0
    def test_g_Splitters(self):
        from Ganga.GPI import Job, GenericSplitter, GangaDataset, GangaDatasetSplitter, LocalFile

        # -- SPLITTERS BASICUSE START
        j = Job()
        j.splitter = GenericSplitter()
        j.splitter.attribute = 'application.args'
        j.splitter.values = [['hello', 1], ['world', 2], ['again', 3]]
        j.submit()
        # -- SPLITTERS BASICUSE STOP

        # -- SPLITTERS SUBJOBCHECK START
        j.subjobs
        j.subjobs(0).peek("stdout")
        # -- SPLITTERS SUBJOBCHECK STOP

        # -- SPLITTERS MULTIATTRS START
        j = Job()
        j.splitter = GenericSplitter()
        j.splitter.multi_attrs = {'application.args': ['hello1', 'hello2'],
                                  'application.env': [{'MYENV':'test1'}, {'MYENV':'test2'}]}
        j.submit()
        # -- SPLITTERS MULTIATTRS STOP

        # -- SPLITTERS DATASETSPLITTER START
        j = Job()
        j.application.exe = 'more'
        j.application.args = ['__GangaInputData.txt__']
        j.inputdata = GangaDataset( files=[ LocalFile('*.txt') ] )
        j.splitter = GangaDatasetSplitter()
        j.splitter.files_per_subjob = 2
        j.submit()
Пример #2
0
    def test_e_MultipleFiles(self):
        """Test that the wildcards work"""

        from Ganga.GPI import LocalFile, MassStorageFile, Job, ArgSplitter

        _ext = '.root'
        _ext2 = '.txt'
        file_1 = generate_unique_temp_file(_ext)
        file_2 = generate_unique_temp_file(_ext)
        file_3 = generate_unique_temp_file(_ext2)
        TestMassStorageWN._managed_files.append(file_1)
        TestMassStorageWN._managed_files.append(file_2)
        TestMassStorageWN._managed_files.append(file_3)

        j = Job()
        j.inputfiles = [
            LocalFile(file_1),
            LocalFile(file_2),
            LocalFile(file_3)
        ]
        j.splitter = ArgSplitter(
            args=[[_] for _ in range(0, TestMassStorageWN.sj_len)])
        j.outputfiles = [
            MassStorageFile(namePattern='*' + _ext,
                            outputfilenameformat='{jid}/{sjid}/{fname}'),
            MassStorageFile(namePattern='*' + _ext2)
        ]
        j.submit()
Пример #3
0
    def test_j_Queues(self):
        from Ganga.GPI import queues, Job, GenericSplitter

        queues

        # -- QUEUES EXAMPLE START
        for i in range(1, 10):
            j = Job()
            queues.add(j.submit)
        # -- QUEUES EXAMPLE STOP

        # -- QUEUES FUNCTION START
        def f(x):
            print x

        queues.add(f, args=(123, ))
        # -- QUEUES FUNCTION STOP

        # -- QUEUES SPLIT START
        j = Job()
        j.splitter = GenericSplitter()
        j.splitter.attribute = 'application.args'
        j.splitter.values = [i for i in range(0, 10)]
        j.parallel_submit = True
        j.submit()
Пример #4
0
    def test_j_Queues(self):
        from Ganga.GPI import queues, Job, GenericSplitter

        queues

        # -- QUEUES EXAMPLE START
        for i in range(1, 10):
            j = Job()
            queues.add(j.submit)
        # -- QUEUES EXAMPLE STOP

        # -- QUEUES FUNCTION START
        def f(x):
            print x

        queues.add(f, args=(123,))
        # -- QUEUES FUNCTION STOP

        # -- QUEUES SPLIT START
        j = Job()
        j.splitter = GenericSplitter()
        j.splitter.attribute = 'application.args'
        j.splitter.values = [i for i in range(0, 10)]
        j.parallel_submit = True
        j.submit()
Пример #5
0
    def test_c_onlyCreate(self):
        """here for testing job create"""
        from Ganga.GPI import Job, Executable, ArgSplitter, MassStorageFile

        j=Job()
        j.application=Executable(exe='touch')
        j.splitter=ArgSplitter(args=[['abc.txt'], ['def.txt']])
        j.outputfiles=[MassStorageFile(outputfilenameformat = '/test/{sjid}-{fname}', namePattern = '*.txt')]
Пример #6
0
    def test_e_testInMemory(self):
        """
        Test the resubmit on a job in memory vs a job which has been loaded from disk
        """
        from Ganga.GPI import Job, Local

        j=Job()
        j.splitter = self._getSplitter()
        j.backend = Local()
        j.submit()

        from GangaTest.Framework.utils import sleep_until_completed
        sleep_until_completed(j)

        # Job has ben created, split, run and now exists in Memory (NOT SJXML)

        from Ganga.Utility.Config import setConfigOption
        setConfigOption('Configuration', 'resubmitOnlyFailedSubjobs', 'True')

        j.resubmit()

        sleep_until_completed(j)

        j.subjobs(0).resubmit()

        # We should get here if calling resubmit doesn't stall

        j.subjobs(0).force_status('failed')

        j.resubmit()

        sleep_until_completed(j)

        assert j.subjobs(0).status == 'completed'

        # Test resubmit from the master job worked

        j.subjobs(0).force_status('failed')

        j.subjobs(0).resubmit()

        sleep_until_completed(j)

        assert j.subjobs(0).status == 'completed'

        # Test that the resubmit from the subjob worked

        setConfigOption('Configuration', 'resubmitOnlyFailedSubjobs', 'False')

        j.resubmit()

        sleep_until_completed(j)

        j.subjobs(0).force_status('failed')

        j.resubmit()

        sleep_until_completed(j)
Пример #7
0
    def test_e_testInMemory(self):
        """
        Test the resubmit on a job in memory vs a job which has been loaded from disk
        """
        from Ganga.GPI import Job, Local

        j = Job()
        j.splitter = self._getSplitter()
        j.backend = Local()
        j.submit()

        from GangaTest.Framework.utils import sleep_until_completed
        sleep_until_completed(j)

        # Job has ben created, split, run and now exists in Memory (NOT SJXML)

        from Ganga.Utility.Config import setConfigOption
        setConfigOption('Configuration', 'resubmitOnlyFailedSubjobs', 'True')

        j.resubmit()

        sleep_until_completed(j)

        j.subjobs(0).resubmit()

        # We should get here if calling resubmit doesn't stall

        j.subjobs(0).force_status('failed', force=True)

        j.resubmit()

        sleep_until_completed(j)

        assert j.subjobs(0).status == 'completed'

        # Test resubmit from the master job worked

        j.subjobs(0).force_status('failed')

        j.subjobs(0).resubmit()

        sleep_until_completed(j)

        assert j.subjobs(0).status == 'completed'

        # Test that the resubmit from the subjob worked

        setConfigOption('Configuration', 'resubmitOnlyFailedSubjobs', 'False')

        j.resubmit()

        sleep_until_completed(j)

        j.subjobs(0).force_status('failed')

        j.resubmit()

        sleep_until_completed(j)
Пример #8
0
    def test_c_onlyCreate(self):
        """here for testing job create"""
        from Ganga.GPI import Job, Executable, ArgSplitter, MassStorageFile

        j = Job()
        j.application = Executable(exe='touch')
        j.splitter = ArgSplitter(args=[['abc.txt'], ['def.txt']])
        j.outputfiles = [
            MassStorageFile(outputfilenameformat='/test/{sjid}-{fname}',
                            namePattern='*.txt')
        ]
Пример #9
0
    def testArgSplitter(self):
        from Ganga.GPI import Job, ArgSplitter
        from GangaTest.Framework.utils import sleep_until_completed

        j = Job()
        j.splitter = ArgSplitter(args=[['1'], ['2'], ['3']])
        j.submit()

        self.assertTrue(sleep_until_completed(j, 60), 'Timeout on completing job')

        self.assertEqual(len(j.subjobs), 3)
Пример #10
0
    def test_a_jobSubmit(self):
        """here for testing a submit"""
        from Ganga.GPI import Job, Executable, ArgSplitter, MassStorageFile

        j=Job()
        j.application=Executable(exe='touch')
        j.splitter=ArgSplitter(args=[['abc.txt'], ['def.txt']])
        j.outputfiles=[MassStorageFile(outputfilenameformat = '/test/{sjid}-{fname}', namePattern = '*.txt')]
        j.submit()

        from GangaTest.Framework.utils import sleep_until_completed
        sleep_until_completed(j)
Пример #11
0
    def test_a_JobConstruction(self):
        """ First construct the Job object (singular)"""
        from Ganga.Utility.Config import getConfig
        self.assertFalse(getConfig('TestingFramework')['AutoCleanup'])

        from Ganga.GPI import Job, jobs, ArgSplitter
        j=Job()
        assert len(jobs) == 1

        j.splitter = ArgSplitter()
        j.splitter.args = getNestedList()

        assert j.splitter.args == getNestedList()
Пример #12
0
    def test_g_Splitters(self):
        from Ganga.GPI import Job, GenericSplitter, GangaDataset, GangaDatasetSplitter, LocalFile

        # -- SPLITTERS BASICUSE START
        j = Job()
        j.splitter = GenericSplitter()
        j.splitter.attribute = 'application.args'
        j.splitter.values = [['hello', 1], ['world', 2], ['again', 3]]
        j.submit()
        # -- SPLITTERS BASICUSE STOP

        # -- SPLITTERS SUBJOBCHECK START
        j.subjobs
        j.subjobs(0).peek("stdout")
        # -- SPLITTERS SUBJOBCHECK STOP

        # -- SPLITTERS MULTIATTRS START
        j = Job()
        j.splitter = GenericSplitter()
        j.splitter.multi_attrs = {
            'application.args': ['hello1', 'hello2'],
            'application.env': [{
                'MYENV': 'test1'
            }, {
                'MYENV': 'test2'
            }]
        }
        j.submit()
        # -- SPLITTERS MULTIATTRS STOP

        # -- SPLITTERS DATASETSPLITTER START
        j = Job()
        j.application.exe = 'more'
        j.application.args = ['__GangaInputData.txt__']
        j.inputdata = GangaDataset(files=[LocalFile('*.txt')])
        j.splitter = GangaDatasetSplitter()
        j.splitter.files_per_subjob = 2
        j.submit()
Пример #13
0
    def testMergeThatAlwaysFailsOverwrite(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile

        j = Job()
        j.application = Executable(exe='sh', args=['-c', 'echo foo > out.txt'])
        j.backend = Local()
        j.outputfiles = [LocalFile('out.txt')]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=['out.txt'], overwrite=True)

        j.submit()

        assert run_until_state(j, 'failed', timeout=60)
        assert os.path.exists(os.path.join(j.outputdir, 'out.txt.merge_summary')), 'Summary file should be created'
Пример #14
0
    def test_a_JobSubmission(self):
        """
        Create lots of subjobs and submit it
        """
        from Ganga.GPI import Job, Local
        j = Job()
        j.application.exe = "sleep"
        j.splitter = self._getSplitter()
        j.backend = Local()
        j.submit()

        # Test we can submit a job and we're going to check the sj are created

        assert len(j.subjobs) == TestSJSubmit.n_subjobs
Пример #15
0
    def testLargeJobSubmission(self):
        """
        Create lots of subjobs and submit it
        """
        from Ganga.GPI import Job, GenericSplitter, Local
        j = Job()
        j.application.exe = "sleep"
        j.splitter = GenericSplitter()
        j.splitter.attribute = 'application.args'
        j.splitter.values = [['400'] for _ in range(0, 20)]
        j.backend = Local()
        j.submit()

        assert len(j.subjobs) == 20
Пример #16
0
    def test_c_SplitJob(self):
        """Test submitting subjobs"""
        from Ganga.GPI import Job, LocalFile, MassStorageFile, ArgSplitter

        _ext = '.txt2'

        file_1 = generate_unique_temp_file(_ext)
        TestMassStorageWN._managed_files.append(file_1)

        j = Job()
        j.inputfiles = [LocalFile(file_1)]
        j.splitter = ArgSplitter(args = [[_] for _ in range(0, TestMassStorageWN.sj_len) ])
        j.outputfiles = [MassStorageFile(namePattern='*'+_ext, outputfilenameformat='{jid}/{sjid}/{fname}')]
        j.submit()
Пример #17
0
    def testLargeJobSubmission(self):
        """
        Create lots of subjobs and submit it
        """
        from Ganga.GPI import Job, GenericSplitter, Local
        j = Job()
        j.application.exe = "sleep"
        j.splitter = GenericSplitter()
        j.splitter.attribute = 'application.args'
        j.splitter.values = [['400'] for _ in range(0, 20)]
        j.backend = Local()
        j.submit()

        assert len(j.subjobs) == 20
Пример #18
0
    def testMergeThatAlwaysFailsOverwrite(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile

        j = Job()
        j.application = Executable(exe='sh', args=['-c', 'echo foo > out.txt'])
        j.backend = Local()
        j.outputfiles = [LocalFile('out.txt')]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=['out.txt'], overwrite=True)

        j.submit()

        assert run_until_state(j, 'failed', timeout=60)
        assert os.path.exists(os.path.join(j.outputdir, 'out.txt.merge_summary')), 'Summary file should be created'
Пример #19
0
    def test_a_JobSubmission(self):
        """
        Create lots of subjobs and submit it
        """
        from Ganga.GPI import Job, Local
        j = Job()
        j.application.exe = "sleep"
        j.splitter = self._getSplitter()
        j.backend = Local()
        j.submit()

        # Test we can submit a job and we're going to check the sj are created

        assert len(j.subjobs) == TestSJSubmit.n_subjobs
Пример #20
0
    def testSubjobsSubmit(self):

        from Ganga.GPI import Job, Executable, TestSubmitter, ArgSplitter
        from Ganga.GPIDev.Lib.GangaList.GangaList import GangaList as gangaList
        from Ganga.GPIDev.Base.Proxy import isType

        j = Job(application=Executable(), backend=TestSubmitter(time=1))
        j.splitter = ArgSplitter(args=[['A'], ['B'], ['C']])

        j.submit()
        assert run_until_completed(j), 'Job must complete'
        assert len(j.subjobs) == 3, 'splitting must occur'
        for jj in j.subjobs:
            assert not isType(jj.master, gangaList)
Пример #21
0
    def testMergeThatAlwaysFailsFlagsSet(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile

        j = Job()
        j.application = Executable(exe="sh", args=["-c", "echo foo > out.txt"])
        j.backend = Local()
        j.outputfiles = [LocalFile("out.txt")]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=["out.txt"], ignorefailed=True, overwrite=True)

        j.submit()

        run_until_completed(j, timeout=60)
        assert j.status == "failed"
        assert os.path.exists(os.path.join(j.outputdir, "out.txt.merge_summary")), "Summary file should be created"
Пример #22
0
    def test_a_jobSubmit(self):
        """here for testing a submit"""
        from Ganga.GPI import Job, Executable, ArgSplitter, MassStorageFile

        j = Job()
        j.application = Executable(exe='touch')
        j.splitter = ArgSplitter(args=[['abc.txt'], ['def.txt']])
        j.outputfiles = [
            MassStorageFile(outputfilenameformat='/test/{sjid}-{fname}',
                            namePattern='*.txt')
        ]
        j.submit()

        from GangaTest.Framework.utils import sleep_until_completed
        sleep_until_completed(j)
Пример #23
0
    def test_a_testClientSideSubmit(self):
        """Test the client side code whilst stil using the Local backend"""

        from Ganga.GPI import LocalFile, Job, ArgSplitter

        file_1 = generate_unique_temp_file(TestLocalFileClient._ext)
        file_2 = generate_unique_temp_file(TestLocalFileClient._ext)
        TestLocalFileClient._managed_files.append(file_1)
        TestLocalFileClient._managed_files.append(file_2)

        j = Job()
        j.inputfiles = [LocalFile(file_1), LocalFile(file_2)]
        j.splitter = ArgSplitter(args = [[_] for _ in range(TestLocalFileClient.sj_len)])
        j.outputfiles = [LocalFile(namePattern='*'+TestLocalFileClient._ext)]
        j.submit()
Пример #24
0
    def test_a_JobConstruction(self):
        """ First construct the Job object (singular)"""
        from Ganga.Utility.Config import getConfig
        self.assertFalse(getConfig('TestingFramework')['AutoCleanup'])

        from Ganga.GPI import Job, jobs, ArgSplitter
        j=Job()
        self.assertEqual(len(jobs), 1) # Don't really gain anything from assertEqual...

        j.splitter = ArgSplitter(args=[[i] for i in range(global_subjob_num)])
        j.submit()

        self.assertEqual(len(j.subjobs), global_subjob_num)
        from GangaTest.Framework.utils import sleep_until_completed
        sleep_until_completed(j, 60)
Пример #25
0
    def testMergeThatAlwaysFailsIgnoreFailed(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile

        j = Job()
        j.application = Executable(exe='sh', args=['-c', 'echo foo > out.txt'])
        j.backend = Local()
        j.outputfiles = [LocalFile('out.txt')]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=['out.txt'], ignorefailed=True)

        j.submit()

        sleep_until_completed(j, 60)
        assert j.status == 'failed'
        assert os.path.exists(os.path.join(j.outputdir, 'out.txt.merge_summary')), 'Summary file should be created'
Пример #26
0
    def test_a_JobConstruction(self):
        """ First construct the Job object (singular)"""
        from Ganga.Utility.Config import getConfig
        self.assertFalse(getConfig('TestingFramework')['AutoCleanup'])

        from Ganga.GPI import Job, jobs, ArgSplitter
        j = Job()
        self.assertEqual(len(jobs),
                         1)  # Don't really gain anything from assertEqual...

        j.splitter = ArgSplitter(args=[[i] for i in range(global_subjob_num)])
        j.submit()

        self.assertEqual(len(j.subjobs), global_subjob_num)
        from GangaTest.Framework.utils import sleep_until_completed
        sleep_until_completed(j, 60)
Пример #27
0
    def test_a_testClientSideSubmit(self):
        """Test the client side code whilst stil using the Local backend"""

        from Ganga.GPI import LocalFile, Job, ArgSplitter

        file_1 = generate_unique_temp_file(TestLocalFileClient._ext)
        file_2 = generate_unique_temp_file(TestLocalFileClient._ext)
        TestLocalFileClient._managed_files.append(file_1)
        TestLocalFileClient._managed_files.append(file_2)

        j = Job()
        j.inputfiles = [LocalFile(file_1), LocalFile(file_2)]
        j.splitter = ArgSplitter(
            args=[[_] for _ in range(TestLocalFileClient.sj_len)])
        j.outputfiles = [LocalFile(namePattern='*' + TestLocalFileClient._ext)]
        j.submit()
Пример #28
0
    def test_g_MultipleFiles(self):
        """Test that the wildcards work"""

        from Ganga.GPI import LocalFile, MassStorageFile, Job, ArgSplitter

        _ext = '.root'
        file_1 = generate_unique_temp_file(_ext)
        file_2 = generate_unique_temp_file(_ext)
        TestMassStorageWN._managed_files.append(file_1)
        TestMassStorageWN._managed_files.append(file_2)

        j = Job()
        j.inputfiles = [LocalFile(file_1), LocalFile(file_2)]
        j.splitter = ArgSplitter(args = [[_] for _ in range(0, TestMassStorageWN.sj_len) ])
        j.outputfiles = [MassStorageFile(namePattern='*'+_ext, outputfilenameformat='{jid}_{sjid}_{fname}')]
        j.submit()
Пример #29
0
    def testJobCopy(self):
        """Test that a job copy copies everything properly"""
        from Ganga.GPI import Job, ARC, GenericSplitter, GangaDataset, LocalFile, FileChecker
        from Ganga.GPIDev.Base.Proxy import isType

        j = Job()
        j.application.exe = "sleep"
        j.application.args = ['myarg']
        j.backend = ARC()
        j.backend.CE = "my.ce"
        j.inputdata = GangaDataset()
        j.inputdata.files = [ LocalFile("*.txt") ]
        j.inputfiles = [ LocalFile("*.txt") ]
        j.name = "testname"
        j.outputfiles = [ LocalFile("*.txt") ]
        j.postprocessors = FileChecker(files=['stdout'], searchStrings = ['my search'])
        j.splitter = GenericSplitter()
        j.splitter.attribute = "application.args"
        j.splitter.values = ['arg 1', 'arg 2', 'arg 3']
        j2 = j.copy()

        # test the copy has worked
        self.assertTrue( isType(j2, Job) )
        self.assertEqual( j2.application.exe, "sleep" )
        self.assertEqual( j2.application.args, ["myarg"] )
        self.assertTrue( isType(j2.backend, ARC) )
        self.assertEqual( j2.backend.CE, "my.ce" )
        self.assertTrue( isType(j2.inputdata, GangaDataset) )
        self.assertEqual( len(j2.inputdata.files), 1 )
        self.assertTrue( isType(j2.inputdata.files[0], LocalFile) )
        self.assertEqual( j2.inputdata.files[0].namePattern, "*.txt" )
        self.assertEqual( len(j2.inputfiles), 1 )
        self.assertTrue( isType(j2.inputfiles[0], LocalFile) )
        self.assertEqual( j2.inputfiles[0].namePattern, "*.txt" )
        self.assertEqual( j2.name, "testname" )
        self.assertEqual( len(j2.outputfiles), 1 )
        self.assertTrue( isType(j2.outputfiles[0], LocalFile) )
        self.assertEqual( j2.outputfiles[0].namePattern, "*.txt" )
        self.assertEqual( len(j2.postprocessors), 1 )
        self.assertTrue( isType(j2.postprocessors[0], FileChecker) )
        self.assertEqual( j2.postprocessors[0].files, ["stdout"] )
        self.assertEqual( j2.postprocessors[0].searchStrings, ["my search"] )
        self.assertTrue( isType(j2.splitter, GenericSplitter) )
        self.assertEqual( j2.splitter.attribute, "application.args" )
        self.assertEqual( j2.splitter.values, ['arg 1', 'arg 2', 'arg 3'])
Пример #30
0
    def test_c_SplitJob(self):
        """Test submitting subjobs"""
        MassStorageFile = self.fileClass
        from Ganga.GPI import Job, LocalFile, ArgSplitter

        _ext = '.txt2'

        file_1 = generate_unique_temp_file(_ext)
        TestMassStorageWN._managed_files.append(file_1)

        j = Job()
        j.inputfiles = [LocalFile(file_1)]
        j.splitter = ArgSplitter(args = [[_] for _ in range(0, TestMassStorageWN.sj_len) ])
        j.outputfiles = [MassStorageFile(namePattern='*'+_ext, outputfilenameformat=self.extendedFormat)]
        j.submit()

        for f in j.outputfiles:
            assert f.outputfilenameformat == self.extendedFormat
Пример #31
0
    def test_CondorConfigDefaults(self):
        from Ganga.GPI import Job, TestSplitter, TestSubmitter

        j = Job()
        j.splitter = TestSplitter()
        j.splitter.backs = [TestSubmitter(), TestSubmitter(), TestSubmitter()]
        j.backend = TestSubmitter()
        b = j.splitter.backs[1]
        b.fail = 'submit'

        assert j.status == 'new'

        j.submit(keep_going=False)
        assert j.status in ['submitted', 'running']
        assert j.subjobs[0].status in ['submitted', 'running']

        assert j.subjobs[1].status == 'new'
        assert j.subjobs[2].status == 'new'
Пример #32
0
        def testInterfaceLookFeel(self):

            # Just testing that the job construction works

            from Ganga.GPI import Job, Im3ShapeApp, Im3ShapeSplitter, DiracFile, LocalFile, GangaDataset, Dirac

            j=Job()
            app = Im3ShapeApp(
                            im3_location=DiracFile(lfn='/lsst/y1a1-v2-z/software/2016-02-24/im3shape-grid.tar.gz'),
                            ini_location=LocalFile('/afs/cern.ch/user/r/rcurrie/cmtuser/GANGA/GANGA_LSST/install/ganga/python/params_disc.ini'),
                            blacklist=LocalFile('/afs/cern.ch/user/r/rcurrie/cmtuser/GANGA/GANGA_LSST/install/ganga/python/blacklist-y1.txt')
                                )
            j.application = app
            j.backend=Dirac()
            mydata=GangaDataset()
            mydata.append(DiracFile(lfn='/lsst/DES0005+0043-z-meds-y1a1-gamma.fits.fz'))
            j.inputdata = mydata
            j.splitter=Im3ShapeSplitter(size=20)
            j.outputfiles = [DiracFile('*.main.txt'), DiracFile('*.epoch.txt')]
Пример #33
0
    def test_CondorConfigDefaults(self):
        # A test with sequential submission
        from Ganga.GPI import Job, TestSplitter, TestSubmitter

        j = Job()
        j.splitter = TestSplitter()
        j.splitter.backs = [TestSubmitter(),TestSubmitter(),TestSubmitter()]
        j.backend= TestSubmitter()
        b = j.splitter.backs[1]
        b.fail = 'submit'
        j.parallel_submit = False

        assert j.status == 'new'

        with pytest.raises(IncompleteJobSubmissionError):
            j.submit(keep_going=True)
        assert j.subjobs[0].status in ['submitted', 'running']

        assert j.subjobs[1].status == 'new'
        assert j.subjobs[2].status == 'new'
Пример #34
0
    def test_a_Savannah32342(self):
        """Basic splitting test"""
        from Ganga.GPI import Job, ArgSplitter, jobs

        j = Job()
        j.splitter = ArgSplitter(args=[['A'], ['B']])
        j.submit()

        assert len(j.subjobs) == 2, 'Splitting must have occured'
        for jj in j.subjobs:
            assert jj._impl._getParent(), 'Parent must be set'

        # make sure we have out job in the repository
        job_seen = False
        for jj in jobs:
            if j is jj:
                job_seen = True
                break

        assert job_seen, 'Job must be in the repository'
Пример #35
0
    def test_a_Savannah32342(self):
        """Basic splitting test"""
        from Ganga.GPI import Job, ArgSplitter, jobs

        j = Job()
        j.splitter = ArgSplitter(args=[['A'], ['B']])
        j.submit()

        assert len(j.subjobs) == 2, 'Splitting must have occured'
        for jj in j.subjobs:
            assert jj._impl._getParent(), 'Parent must be set'

        # make sure we have out job in the repository
        job_seen = False
        for jj in jobs:
            if j is jj:
                job_seen = True
                break

        assert job_seen, 'Job must be in the repository'
Пример #36
0
    def test_CondorConfigDefaults(self):
        # A test with sequential submission
        from Ganga.GPI import Job, TestSplitter, TestSubmitter

        j = Job()
        j.splitter = TestSplitter()
        j.splitter.backs = [TestSubmitter(), TestSubmitter(), TestSubmitter()]
        j.backend = TestSubmitter()
        b = j.splitter.backs[1]
        b.fail = 'submit'
        j.parallel_submit = False

        assert j.status == 'new'

        with pytest.raises(IncompleteJobSubmissionError):
            j.submit(keep_going=True)
        assert j.subjobs[0].status in ['submitted', 'running']

        assert j.subjobs[1].status == 'new'
        assert j.subjobs[2].status == 'new'
Пример #37
0
    def test_a_testClientInputSubmit(self):
        """Test that a job can be submitted with inputfiles in the input"""

        MassStorageFile = self.fileClass
        from Ganga.GPI import LocalFile, Job, ArgSplitter

        _ext = '.root'
        file_1 = generate_unique_temp_file(_ext)
        file_2 = generate_unique_temp_file(_ext)
        self._managed_files.append(file_1)
        self._managed_files.append(file_2)
        msf_1 = MassStorageFile(file_1)
        msf_2 = MassStorageFile(file_2)
        msf_1.put()
        msf_2.put()

        j = Job()
        j.inputfiles = [msf_1, msf_2]
        j.splitter = ArgSplitter(args = [[_] for _ in range(self.sj_len)])
        j.outputfiles = [LocalFile(namePattern='*'+_ext)]
        j.submit()
Пример #38
0
    def test_a_testClientSideSubmit(self):
        """Test the client side code whilst stil using the Local backend"""

        MassStorageFile = self.fileClass

        from Ganga.GPI import LocalFile, Job, ArgSplitter

        TestMassStorageClient.cleanUp()

        assert getConfig('Output')[_getName(self.fileClass)]['backendPostprocess']['Local'] == 'client'

        file_1 = generate_unique_temp_file(TestMassStorageClient._ext)
        file_2 = generate_unique_temp_file(TestMassStorageClient._ext)
        TestMassStorageClient._managed_files.append(file_1)
        TestMassStorageClient._managed_files.append(file_2)

        j = Job()
        j.inputfiles = [LocalFile(file_1), LocalFile(file_2)]
        j.splitter = ArgSplitter(args = [[_] for _ in range(TestMassStorageClient.sj_len)])
        j.outputfiles = [MassStorageFile(namePattern='*'+TestMassStorageClient._ext)]
        j.submit()
Пример #39
0
    def test_a_testClientSideSubmit(self):
        """Test the client side code whilst stil using the Local backend"""

        MassStorageFile = self.fileClass

        from Ganga.GPI import LocalFile, Job, ArgSplitter

        TestMassStorageClient.cleanUp()

        assert getConfig('Output')[_getName(self.fileClass)]['backendPostprocess']['Local'] == 'client'

        file_1 = generate_unique_temp_file(TestMassStorageClient._ext)
        file_2 = generate_unique_temp_file(TestMassStorageClient._ext)
        TestMassStorageClient._managed_files.append(file_1)
        TestMassStorageClient._managed_files.append(file_2)

        j = Job()
        j.inputfiles = [LocalFile(file_1), LocalFile(file_2)]
        j.splitter = ArgSplitter(args = [[_] for _ in range(TestMassStorageClient.sj_len)])
        j.outputfiles = [MassStorageFile(namePattern='*'+TestMassStorageClient._ext)]
        j.submit()
Пример #40
0
    def test_a_testClientInputSubmit(self):
        """Test that a job can be submitted with inputfiles in the input"""

        MassStorageFile = self.fileClass
        from Ganga.GPI import LocalFile, Job, ArgSplitter

        _ext = '.root'
        file_1 = generate_unique_temp_file(_ext)
        file_2 = generate_unique_temp_file(_ext)
        self._managed_files.append(file_1)
        self._managed_files.append(file_2)
        msf_1 = MassStorageFile(file_1)
        msf_2 = MassStorageFile(file_2)
        msf_1.put()
        msf_2.put()

        j = Job()
        j.inputfiles = [msf_1, msf_2]
        j.splitter = ArgSplitter(args=[[_] for _ in range(self.sj_len)])
        j.outputfiles = [LocalFile(namePattern='*' + _ext)]
        j.submit()
Пример #41
0
    def test_a_JobConstruction(self):
        """ First construct the Job object (singular)"""
        from Ganga.Utility.Config import getConfig
        self.assertFalse(getConfig('TestingFramework')['AutoCleanup'])

        from Ganga.GPIDev.Base.Proxy import stripProxy
        from Ganga.GPI import Job, jobs, ArgSplitter
        j=Job()
        orig_sj_proxy = j.subjobs
        j.splitter = ArgSplitter()
        j.splitter.args = [[0], [1]]
        i=0
        for sj in stripProxy(j.splitter).split(stripProxy(j)):
            sj.id = i
            stripProxy(j).subjobs.append(sj)
            i=i+1
        assert len(jobs) == 1
        assert len(j.subjobs) == 2

        sj_proxy = j.subjobs
        assert sj_proxy is j.subjobs
        assert orig_sj_proxy is sj_proxy

        for sj in j.subjobs:
            assert isinstance(sj, Job)

        global global_AutoStartReg
        global_AutoStartReg = False

        stripProxy(sj)._getRegistry().flush_all()

        for sj in j.subjobs:
            stripProxy(sj)._setDirty()

        stripProxy(sj)._getRegistry().flush_all()

        for sj in j.subjobs:
            stripProxy(sj)._setDirty()
Пример #42
0
    def test_a_JobConstruction(self):
        """ First construct the Job object (singular)"""
        from Ganga.Utility.Config import getConfig
        self.assertFalse(getConfig('TestingFramework')['AutoCleanup'])

        from Ganga.GPIDev.Base.Proxy import stripProxy
        from Ganga.GPI import Job, jobs, ArgSplitter
        j = Job()
        orig_sj_proxy = j.subjobs
        j.splitter = ArgSplitter()
        j.splitter.args = [[0], [1]]
        i = 0
        for sj in stripProxy(j.splitter).split(stripProxy(j)):
            sj.id = i
            stripProxy(j).subjobs.append(sj)
            i = i + 1
        assert len(jobs) == 1
        assert len(j.subjobs) == 2

        sj_proxy = j.subjobs
        assert sj_proxy is j.subjobs
        assert orig_sj_proxy is sj_proxy

        for sj in j.subjobs:
            assert isinstance(sj, Job)

        global global_AutoStartReg
        global_AutoStartReg = False

        stripProxy(sj)._getRegistry().flush_all()

        for sj in j.subjobs:
            stripProxy(sj)._setDirty()

        stripProxy(sj)._getRegistry().flush_all()

        for sj in j.subjobs:
            stripProxy(sj)._setDirty()
Пример #43
0
    def testKilling(self):
        """
        Create some subjobs and kill them
        """
        from Ganga.GPI import Job, GenericSplitter, Local
        from GangaTest.Framework.utils import sleep_until_state
        j = Job()
        j.application.exe = "sleep"
        j.splitter = GenericSplitter()
        j.splitter.attribute = 'application.args'
        j.splitter.values = [['400'] for _ in range(0, 5)]
        j.backend = Local()
        j.submit()

        sleep_until_state(j, None, 'running')
        assert j.status == 'running'

        j.subjobs(0).kill()
        assert j.subjobs(0).status == 'killed'
        assert j.subjobs(1).status != 'killed'
        j.kill()
        assert j.status == 'killed'
        assert all(sj.status == 'killed' for sj in j.subjobs)
Пример #44
0
    def testMergeRemoval(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile, jobs

        # see Savannah 33710
        j = Job()
        jobID = j.id
        # job will run for at least 20 seconds
        j.application = Executable(exe="sh", args=["-c", "sleep 20; echo foo > out.txt"])
        j.backend = Local()
        j.outputfiles = [LocalFile("out.txt")]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=["out.txt"])

        j.postprocessors[0].ignorefailed = True
        j.postprocessors[0].alwaysfail = True
        j.postprocessors[0].wait = 10

        j.submit()
        run_until_state(j, state="running")
        j.remove()

        with pytest.raises(KeyError):
            jobs(jobID)
Пример #45
0
def submit(N, K):
    jobs = []
    for i in range(K):
        j = Job()
        j.backend = LCG()
        j.backend.middleware = 'GLITE'
        j.splitter = GenericSplitter()
        j.splitter.attribute = 'application.args'
        j.splitter.values = [['x']] * N
        j.submit()
        jobs.append(j)
    import time

    def finished():
        for j in jobs:
            if not j.status in ['failed', 'completed']:
                return False
        return True

    while not finished():
        time.sleep(1)

    return jobs
Пример #46
0
    def testKilling(self):
        """
        Create some subjobs and kill them
        """
        from Ganga.GPI import Job, GenericSplitter, Local
        from GangaTest.Framework.utils import sleep_until_state
        j = Job()
        j.application.exe = "sleep"
        j.splitter = GenericSplitter()
        j.splitter.attribute = 'application.args'
        j.splitter.values = [['400'] for _ in range(0, 5)]
        j.backend = Local()
        j.submit()

        sleep_until_state(j, None, 'running')
        assert j.status == 'running'

        j.subjobs(0).kill()
        assert j.subjobs(0).status == 'killed'
        assert j.subjobs(1).status != 'killed'
        j.kill()
        assert j.status == 'killed'
        assert all(sj.status == 'killed' for sj in j.subjobs)
Пример #47
0
def submit(N, K):
    jobs = []
    for i in range(K):
        j = Job()
        j.backend = LCG()
        j.backend.middleware = 'GLITE'
        j.splitter = GenericSplitter()
        j.splitter.attribute = 'application.args'
        j.splitter.values = [['x']] * N
        j.submit()
        jobs.append(j)
    import time

    def finished():
        for j in jobs:
            if not j.status in ['failed', 'completed']:
                return False
        return True

    while not finished():
        time.sleep(1)

    return jobs
Пример #48
0
    def test_d_testXMLContent(self):
        # Check content of XML is as expected
        from Ganga.Core.GangaRepository.VStreamer import to_file, from_file

        from Ganga.GPI import jobs, Job, ArgSplitter
        from Ganga.GPIDev.Base.Proxy import stripProxy

        from tempfile import NamedTemporaryFile

        j = jobs(0)
        assert path.isfile(getXMLFile(j))
        with open(getXMLFile(j)) as handler:
            tmpobj, errs = from_file(handler)

            assert tmpobj.splitter
            assert tmpobj.splitter.args == getNestedList()

            ignore_subs = ''

            with NamedTemporaryFile(delete=False) as new_temp_file:

                to_file(stripProxy(j), new_temp_file, ignore_subs)
                new_temp_file.flush()

            with NamedTemporaryFile(delete=False) as new_temp_file2:
                j2 = Job()
                j2.splitter = ArgSplitter()
                j2.splitter.args = getNestedList()

                to_file(stripProxy(j2), new_temp_file2, ignore_subs)
                new_temp_file2.flush()

            assert open(handler.name).read() == open(new_temp_file.name).read()
            assert open(handler.name) != open(new_temp_file2.name).read()

            unlink(new_temp_file.name)
            unlink(new_temp_file2.name)
Пример #49
0
    def testMergeRemoval(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile, jobs

        # see Savannah 33710
        j = Job()
        jobID = j.id
        # job will run for at least 20 seconds
        j.application = Executable(exe='sh',
                                   args=['-c', 'sleep 20; echo foo > out.txt'])
        j.backend = Local()
        j.outputfiles = [LocalFile('out.txt')]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=['out.txt'])

        j.postprocessors[0].ignorefailed = True
        j.postprocessors[0].alwaysfail = True
        j.postprocessors[0].wait = 10

        j.submit()
        run_until_state(j, state='running')
        j.remove()

        with pytest.raises(KeyError):
            jobs(jobID)
Пример #50
0
    def test_g_MultipleFiles(self):
        """Test that the wildcards work"""

        MassStorageFile = self.fileClass
        from Ganga.GPI import LocalFile, Job, ArgSplitter

        _ext = '.root'
        file_1 = generate_unique_temp_file(_ext)
        file_2 = generate_unique_temp_file(_ext)
        TestMassStorageWN._managed_files.append(file_1)
        TestMassStorageWN._managed_files.append(file_2)

        j = Job()
        j.inputfiles = [LocalFile(file_1), LocalFile(file_2)]
        j.splitter = ArgSplitter(args = [[_] for _ in range(0, TestMassStorageWN.sj_len) ])
        j.outputfiles = [MassStorageFile(namePattern='*'+_ext, outputfilenameformat=self.customOutputFormat)]
        
        for f in j.outputfiles:
            assert f.outputfilenameformat == self.customOutputFormat

        j.submit()

        for f in j.outputfiles:
            assert f.outputfilenameformat == self.customOutputFormat
Пример #51
0
j = Job(name=JNAME.format(polarity, year))
# j.application = make_exec_app(version='v41r2p1')
# j.application.options = [path.format(basedir) for path in OPTIONS]

# Old submission method
j.application = DaVinci(version='v41r2p1')
# j.application = DaVinci()
j.application.optsfile = [path.format(basedir) for path in OPTIONS]

# If testing, run over a couple of files locally, saving
# the results to the sandbox.
# Else, run over everything on the grid, splitting jobs into groups of 10
# files, notifying me on job completion/subjob failure,
# and save the results on the grid storage
if args.test:
    j.inputdata = dataset[0:1]
    j.backend = Local()
    # Prepend test string to job name
    j.name = 'TEST_{0}'.format(j.name)
    j.outputfiles = [LocalFile(tfn)]
else:
    j.inputdata = dataset
    j.backend = Dirac()
    j.do_auto_resubmit = True
    j.splitter = SplitByFiles(filesPerJob=200)
    j.postprocessors = [Notifier(address=email)]
    j.outputfiles = [DiracFile(tfn)]

if not args.inspect_job:
    queues.add(j.submit)  # noqa
Пример #52
0
def makeIPResolutionsJob( jobName, dataFile, brunelVersion="", dataType = '2012', extraFiles = [],
                          ddDBTag = None, condDBTag = None ) :
    """Call this method to make a job that will run Brunel with the IP resolutions
    ntupling algorithm, using the given config file (data type, etc) and data file.
    Add this method to your ~/.ganga.py file to have it automatically loaded whenever
    you run ganga."""

    print "Creating an IP resolutions monitoring job named \'%s\'" % jobName
    dataFile = os.path.expandvars(dataFile)
    if not os.path.exists(dataFile) :
        print "Could not find the data file \"%s\"!" % dataFile
        return None
    dataFile = os.path.abspath(dataFile)
    print "Using data file \'%s\'" % dataFile
        
    print "Parsing data file for options."
    stepInfos = parseDataFileForSteps(dataFile)
    productionOptsFiles = None
    if len(stepInfos) > 0 :
        for step in stepInfos :
            # Get any additional options files used for the Brunel step.
            if step['ApplicationName'] == "Brunel" :
                productionOptsFiles = []
                for optsFile in step['OptionFiles'].split(";") :
                    productionOptsFiles.append(optsFile.replace(" \n",""))
            # Get the DB tags used for the Gauss step. This should be the
            # same as for the Brunel step but it seems there's a bug in bkk.
            if step['ApplicationName'] == "Gauss" or step['ApplicationName'] == 'DaVinci' or step['ApplicationName'] == 'Brunel' :
                if ddDBTag == None :
                    ddDBTag = step['DDDB']
                if condDBTag == None :
                    condDBTag = step['CONDDB']
    if None == ddDBTag and len(extraFiles) == 0 :
        print "The DB tags could not be retrieved from the data file and no extra options\
files have been specified! The job cannot be made."
        return None
    
    j = Job( name = jobName )
    if brunelVersion == "" :
        j.application = Brunel()
    else :
        j.application = Brunel(version=brunelVersion)

    j.application.extraopts = 'Brunel().OutputLevel = 5\nBrunel().PrintFreq = 10000\nBrunel().DataType = "{0}"\n'.format(dataType)
    if None != ddDBTag :
        print "Using DDDBTag \"%s\" and CondDBTag \"%s\"" % (ddDBTag, condDBTag)
        j.application.extraopts += "\nBrunel().DDDBtag = \"%s\"\nBrunel().CondDBtag = \"%s\"\n" % (ddDBTag, condDBTag)
        if 'sim' in ddDBTag.lower() :
            j.application.extraopts += '\nBrunel().Simulation = True\n'
            #j.application.extraopts += '\nBrunel().WithMC = True\n'
    else :
        print "The DB tags could not be retrieved from the data file."
        print "If they are not defined in one of the extra options files default values will be used."

    j.application.optsfile = [ mainIPConfigFile ] + extraFiles
    print "Using options files:"
    for optsFile in j.application.optsfile :
        print optsFile.name
    if None != productionOptsFiles and len(productionOptsFiles) > 0 :
        for optsFile in productionOptsFiles :
            print optsFile
            j.application.extraopts += "\nimportOptions(\"%s\")\n" % optsFile
    print "Reading in data ..."
    j.inputdata = j.application.readInputData( dataFile )
    print "Data read. %s files found." % len(j.inputdata.files)
    if len(j.inputdata.files) > 0 :
        j.application.extraopts += '\nBrunel().InputType = "{0}"\n'.format(j.inputdata.files[0].name.split('.')[-1].upper())
    j.splitter = SplitByFiles( filesPerJob = 10 )
    
    j.backend = Dirac()
    
    j.outputfiles = [DiracFile('*.root')]
    
    return j
Пример #53
0
))
j.comment = (
    '{1} {2} MC {0} ntuple creation for k3pi mixing measurement.'
    .format(event_type, year, polarity)
)
j.application = DaVinci(version='v41r3')
j.application.optsfile = [s.format(path=base, year=year) for s in OPTIONS]

if args.test:
    # If testing, run over a couple of files locally,
    # saving the results to the sandbox
    j.inputdata = dataset[0:1]
    j.backend = Local()
    # Prepend test string to job name
    j.name = 'TEST_{0}'.format(j.name)
    j.outputfiles = [LocalFile(tfn)]
else:
    # If not testing, run over everything on the grid, splitting jobs
    # into groups of 10 files, notifying me on job completion/subjob failure,
    # and save the results on the grid storage
    j.inputdata = dataset
    j.backend = Dirac()
    j.backend.settings['CPUTime'] = 60*60*24*7
    j.do_auto_resubmit = True
    j.splitter = SplitByFiles(filesPerJob=5, ignoremissing=True)
    j.postprocessors = [Notifier(address=email)]
    j.outputfiles = [DiracFile(tfn)]

if not args.inspect_job:
    queues.add(j.submit)  # noqa