示例#1
0
    def testMergeThatAlwaysFailsOverwrite(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile

        j = Job()
        j.application = Executable(exe='sh', args=['-c', 'echo foo > out.txt'])
        j.backend = Local()
        j.outputfiles = [LocalFile('out.txt')]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=['out.txt'], overwrite=True)

        j.submit()

        assert run_until_state(j, 'failed', timeout=60)
        assert os.path.exists(os.path.join(j.outputdir, 'out.txt.merge_summary')), 'Summary file should be created'
示例#2
0
    def testMergeThatAlwaysFailsOverwrite(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile

        j = Job()
        j.application = Executable(exe='sh', args=['-c', 'echo foo > out.txt'])
        j.backend = Local()
        j.outputfiles = [LocalFile('out.txt')]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=['out.txt'], overwrite=True)

        j.submit()

        assert run_until_state(j, 'failed', timeout=60)
        assert os.path.exists(os.path.join(j.outputdir, 'out.txt.merge_summary')), 'Summary file should be created'
示例#3
0
    def testMergeThatAlwaysFailsFlagsSet(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile

        j = Job()
        j.application = Executable(exe="sh", args=["-c", "echo foo > out.txt"])
        j.backend = Local()
        j.outputfiles = [LocalFile("out.txt")]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=["out.txt"], ignorefailed=True, overwrite=True)

        j.submit()

        run_until_completed(j, timeout=60)
        assert j.status == "failed"
        assert os.path.exists(os.path.join(j.outputdir, "out.txt.merge_summary")), "Summary file should be created"
    def testMergeThatAlwaysFailsIgnoreFailed(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile

        j = Job()
        j.application = Executable(exe='sh', args=['-c', 'echo foo > out.txt'])
        j.backend = Local()
        j.outputfiles = [LocalFile('out.txt')]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=['out.txt'], ignorefailed=True)

        j.submit()

        sleep_until_completed(j, 60)
        assert j.status == 'failed'
        assert os.path.exists(os.path.join(j.outputdir, 'out.txt.merge_summary')), 'Summary file should be created'
示例#5
0
    def testJobCopy(self):
        """Test that a job copy copies everything properly"""
        from Ganga.GPI import Job, ARC, GenericSplitter, GangaDataset, LocalFile, FileChecker
        from Ganga.GPIDev.Base.Proxy import isType

        j = Job()
        j.application.exe = "sleep"
        j.application.args = ['myarg']
        j.backend = ARC()
        j.backend.CE = "my.ce"
        j.inputdata = GangaDataset()
        j.inputdata.files = [ LocalFile("*.txt") ]
        j.inputfiles = [ LocalFile("*.txt") ]
        j.name = "testname"
        j.outputfiles = [ LocalFile("*.txt") ]
        j.postprocessors = FileChecker(files=['stdout'], searchStrings = ['my search'])
        j.splitter = GenericSplitter()
        j.splitter.attribute = "application.args"
        j.splitter.values = ['arg 1', 'arg 2', 'arg 3']
        j2 = j.copy()

        # test the copy has worked
        self.assertTrue( isType(j2, Job) )
        self.assertEqual( j2.application.exe, "sleep" )
        self.assertEqual( j2.application.args, ["myarg"] )
        self.assertTrue( isType(j2.backend, ARC) )
        self.assertEqual( j2.backend.CE, "my.ce" )
        self.assertTrue( isType(j2.inputdata, GangaDataset) )
        self.assertEqual( len(j2.inputdata.files), 1 )
        self.assertTrue( isType(j2.inputdata.files[0], LocalFile) )
        self.assertEqual( j2.inputdata.files[0].namePattern, "*.txt" )
        self.assertEqual( len(j2.inputfiles), 1 )
        self.assertTrue( isType(j2.inputfiles[0], LocalFile) )
        self.assertEqual( j2.inputfiles[0].namePattern, "*.txt" )
        self.assertEqual( j2.name, "testname" )
        self.assertEqual( len(j2.outputfiles), 1 )
        self.assertTrue( isType(j2.outputfiles[0], LocalFile) )
        self.assertEqual( j2.outputfiles[0].namePattern, "*.txt" )
        self.assertEqual( len(j2.postprocessors), 1 )
        self.assertTrue( isType(j2.postprocessors[0], FileChecker) )
        self.assertEqual( j2.postprocessors[0].files, ["stdout"] )
        self.assertEqual( j2.postprocessors[0].searchStrings, ["my search"] )
        self.assertTrue( isType(j2.splitter, GenericSplitter) )
        self.assertEqual( j2.splitter.attribute, "application.args" )
        self.assertEqual( j2.splitter.values, ['arg 1', 'arg 2', 'arg 3'])
示例#6
0
    def testMergeRemoval(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile, jobs

        # see Savannah 33710
        j = Job()
        jobID = j.id
        # job will run for at least 20 seconds
        j.application = Executable(exe="sh", args=["-c", "sleep 20; echo foo > out.txt"])
        j.backend = Local()
        j.outputfiles = [LocalFile("out.txt")]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=["out.txt"])

        j.postprocessors[0].ignorefailed = True
        j.postprocessors[0].alwaysfail = True
        j.postprocessors[0].wait = 10

        j.submit()
        run_until_state(j, state="running")
        j.remove()

        with pytest.raises(KeyError):
            jobs(jobID)
示例#7
0
    def testMergeRemoval(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile, jobs

        # see Savannah 33710
        j = Job()
        jobID = j.id
        # job will run for at least 20 seconds
        j.application = Executable(exe='sh',
                                   args=['-c', 'sleep 20; echo foo > out.txt'])
        j.backend = Local()
        j.outputfiles = [LocalFile('out.txt')]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=['out.txt'])

        j.postprocessors[0].ignorefailed = True
        j.postprocessors[0].alwaysfail = True
        j.postprocessors[0].wait = 10

        j.submit()
        run_until_state(j, state='running')
        j.remove()

        with pytest.raises(KeyError):
            jobs(jobID)
示例#8
0
    def test_h_PostProcessors(self):

        from Ganga.GPI import Job, RootMerger, TextMerger, CustomMerger, SmartMerger, RootFileChecker, FileChecker, \
            Notifier, CustomChecker

        j = Job()

        # -- POSTPROCESSORS APPEND START
        j.postprocessors.append(RootMerger(files = ['thesis_data.root'],ignorefailed = True,overwrite = True))
        # -- POSTPROCESSORS APPEND STOP

        # -- POSTPROCESSORS TEXTMERGER START
        TextMerger(compress = True)
        # -- POSTPROCESSORS TEXTMERGER STOP

        # -- POSTPROCESSORS ROOTMERGER START
        RootMerger(args = '-T')
        # -- POSTPROCESSORS ROOTMERGER STOP

        # -- POSTPROCESSORS CUSTOMMERGER START
        CustomMerger().module = '~/mymerger.py'
        # -- POSTPROCESSORS CUSTOMMERGER STOP

        # -- POSTPROCESSORS SMARTMERGER START
        SmartMerger(files = ['thesis_data.root','stdout'],overwrite = True)
        # -- POSTPROCESSORS SMARTMERGER STOP

        # -- POSTPROCESSORS SMARTMERGERAPPEND START
        j.postprocessors.append(SmartMerger(files = ['thesis_data.root','stdout'],overwrite = True))
        # -- POSTPROCESSORS SMARTMERGERAPPEND STOP

        # -- POSTPROCESSORS SMARTMERGERAPPEND2 START
        j.postprocessors.append(TextMerger(files = ['stdout'],overwrite = True))
        j.postprocessors.append(RootMerger(files = ['thesis_data.root'],overwrite = False))
        # -- POSTPROCESSORS SMARTMERGERAPPEND2 STOP

        # -- POSTPROCESSORS FILECHECKER START
        fc = FileChecker(files = ['stdout'], searchStrings = ['Segmentation'])
        # -- POSTPROCESSORS FILECHECKER STOP

        # -- POSTPROCESSORS FILECHECKERMUSTEXIST START
        fc.filesMustExist = True
        # -- POSTPROCESSORS FILECHECKERMUSTEXIST STOP

        # -- POSTPROCESSORS FILECHECKEROPTS START
        fc.searchStrings = ['SUCCESS']
        fc.failIfFound = False
        # -- POSTPROCESSORS FILECHECKEROPTS STOP

        # -- POSTPROCESSORS FILECHECKEROPTS START
        rfc = RootFileChecker(files = ["*.root"])
        rfc.files = ["*.root"]
        j.postprocessors.append(rfc)
        # -- POSTPROCESSORS FILECHECKEROPTS STOP

        # -- POSTPROCESSORS CUSTOMCHECKER START
        cc = CustomChecker(module = '~/mychecker.py')
        # -- POSTPROCESSORS CUSTOMCHECKER STOP

        # -- POSTPROCESSORS NOTIFIER START
        n = Notifier(address = 'myaddress.cern.ch')
        # -- POSTPROCESSORS NOTIFIER STOP

        # -- POSTPROCESSORS NOTIFIEROPTS START
        n.verbose = True
        # -- POSTPROCESSORS NOTIFIEROPTS STOP

        # -- POSTPROCESSORS MULTIPLE START
        tm = TextMerger(files=['stdout'], compress=True)
        rm = RootMerger(files=['thesis_data.root'], args='-f6')
        fc = FileChecker(files=['stdout'], searchStrings=['Segmentation'])
        cc = CustomChecker(module='~/mychecker.py')
        n = Notifier(address='myadress.cern.ch')

        j.postprocessors = [tm, rm, fc, cc, n]
        # -- POSTPROCESSORS MULTIPLE STOP

        # -- POSTPROCESSORS MULTIPLE2 START
        j.postprocessors.append(fc)
        j.postprocessors.append(tm)
        j.postprocessors.append(rm)
        j.postprocessors.append(cc)
        j.postprocessors.append(n)
        # -- POSTPROCESSORS MULTIPLE2 STOP

        j.postprocessors.remove(FileChecker())
示例#9
0
    def test_h_PostProcessors(self):

        from Ganga.GPI import Job, RootMerger, TextMerger, CustomMerger, SmartMerger, RootFileChecker, FileChecker, \
            Notifier, CustomChecker

        j = Job()

        # -- POSTPROCESSORS APPEND START
        j.postprocessors.append(
            RootMerger(files=['thesis_data.root'],
                       ignorefailed=True,
                       overwrite=True))
        # -- POSTPROCESSORS APPEND STOP

        # -- POSTPROCESSORS TEXTMERGER START
        TextMerger(compress=True)
        # -- POSTPROCESSORS TEXTMERGER STOP

        # -- POSTPROCESSORS ROOTMERGER START
        RootMerger(args='-T')
        # -- POSTPROCESSORS ROOTMERGER STOP

        # -- POSTPROCESSORS CUSTOMMERGER START
        CustomMerger().module = '~/mymerger.py'
        # -- POSTPROCESSORS CUSTOMMERGER STOP

        # -- POSTPROCESSORS SMARTMERGER START
        SmartMerger(files=['thesis_data.root', 'stdout'], overwrite=True)
        # -- POSTPROCESSORS SMARTMERGER STOP

        # -- POSTPROCESSORS SMARTMERGERAPPEND START
        j.postprocessors.append(
            SmartMerger(files=['thesis_data.root', 'stdout'], overwrite=True))
        # -- POSTPROCESSORS SMARTMERGERAPPEND STOP

        # -- POSTPROCESSORS SMARTMERGERAPPEND2 START
        j.postprocessors.append(TextMerger(files=['stdout'], overwrite=True))
        j.postprocessors.append(
            RootMerger(files=['thesis_data.root'], overwrite=False))
        # -- POSTPROCESSORS SMARTMERGERAPPEND2 STOP

        # -- POSTPROCESSORS FILECHECKER START
        fc = FileChecker(files=['stdout'], searchStrings=['Segmentation'])
        # -- POSTPROCESSORS FILECHECKER STOP

        # -- POSTPROCESSORS FILECHECKERMUSTEXIST START
        fc.filesMustExist = True
        # -- POSTPROCESSORS FILECHECKERMUSTEXIST STOP

        # -- POSTPROCESSORS FILECHECKEROPTS START
        fc.searchStrings = ['SUCCESS']
        fc.failIfFound = False
        # -- POSTPROCESSORS FILECHECKEROPTS STOP

        # -- POSTPROCESSORS FILECHECKEROPTS START
        rfc = RootFileChecker(files=["*.root"])
        rfc.files = ["*.root"]
        j.postprocessors.append(rfc)
        # -- POSTPROCESSORS FILECHECKEROPTS STOP

        # -- POSTPROCESSORS CUSTOMCHECKER START
        cc = CustomChecker(module='~/mychecker.py')
        # -- POSTPROCESSORS CUSTOMCHECKER STOP

        # -- POSTPROCESSORS NOTIFIER START
        n = Notifier(address='myaddress.cern.ch')
        # -- POSTPROCESSORS NOTIFIER STOP

        # -- POSTPROCESSORS NOTIFIEROPTS START
        n.verbose = True
        # -- POSTPROCESSORS NOTIFIEROPTS STOP

        # -- POSTPROCESSORS MULTIPLE START
        tm = TextMerger(files=['stdout'], compress=True)
        rm = RootMerger(files=['thesis_data.root'], args='-f6')
        fc = FileChecker(files=['stdout'], searchStrings=['Segmentation'])
        cc = CustomChecker(module='~/mychecker.py')
        n = Notifier(address='myadress.cern.ch')

        j.postprocessors = [tm, rm, fc, cc, n]
        # -- POSTPROCESSORS MULTIPLE STOP

        # -- POSTPROCESSORS MULTIPLE2 START
        j.postprocessors.append(fc)
        j.postprocessors.append(tm)
        j.postprocessors.append(rm)
        j.postprocessors.append(cc)
        j.postprocessors.append(n)
        # -- POSTPROCESSORS MULTIPLE2 STOP

        j.postprocessors.remove(FileChecker())
示例#10
0
))
j.comment = (
    '{1} {2} MC {0} ntuple creation for k3pi mixing measurement.'
    .format(event_type, year, polarity)
)
j.application = DaVinci(version='v41r3')
j.application.optsfile = [s.format(path=base, year=year) for s in OPTIONS]

if args.test:
    # If testing, run over a couple of files locally,
    # saving the results to the sandbox
    j.inputdata = dataset[0:1]
    j.backend = Local()
    # Prepend test string to job name
    j.name = 'TEST_{0}'.format(j.name)
    j.outputfiles = [LocalFile(tfn)]
else:
    # If not testing, run over everything on the grid, splitting jobs
    # into groups of 10 files, notifying me on job completion/subjob failure,
    # and save the results on the grid storage
    j.inputdata = dataset
    j.backend = Dirac()
    j.backend.settings['CPUTime'] = 60*60*24*7
    j.do_auto_resubmit = True
    j.splitter = SplitByFiles(filesPerJob=5, ignoremissing=True)
    j.postprocessors = [Notifier(address=email)]
    j.outputfiles = [DiracFile(tfn)]

if not args.inspect_job:
    queues.add(j.submit)  # noqa