def test_g_MultipleFiles(self): """Test that the wildcards work""" from Ganga.GPI import LocalFile, MassStorageFile, Job, ArgSplitter _ext = '.root' file_1 = generate_unique_temp_file(_ext) file_2 = generate_unique_temp_file(_ext) TestMassStorageWN._managed_files.append(file_1) TestMassStorageWN._managed_files.append(file_2) j = Job() j.inputfiles = [LocalFile(file_1), LocalFile(file_2)] j.splitter = ArgSplitter( args=[[_] for _ in range(0, TestMassStorageWN.sj_len)]) j.outputfiles = [ MassStorageFile(namePattern='*' + _ext, outputfilenameformat='{jid}_{sjid}_{fname}') ] j.submit()
def test_Savannah13979(self): from Ganga.GPI import Job, Executable, export, load self.fname = 'test_savannah_13979.ganga' j = Job(application=Executable()) args_set = [['a'], ['''a b'''], [ '''a b''', 'simple', 'normal\nnewline', """another multiline""" ]] for args in args_set: j.application.args = args export(j, self.fname) j2 = load(self.fname)[0] self.assertEqual(j2.application.args, args)
def test_Savannah74531(self): from Ganga.GPI import Job, jobs, jobtree index = 10 while index > 0: Job() index -= 1 testSlice = jobs[0:4] testList1 = jobs.select(4, 7) testList2 = [jobs[8], jobs[9]] jobtree.cd() jobtree.mkdir('testTreeOne') jobtree.add(testSlice, 'testTreeOne') jobtree.add(testList1, 'testTreeOne') jobtree.add(testList2, 'testTreeOne') jobtree.rm('testTreeOne')
def test_a_Savannah32342(self): """Basic splitting test""" from Ganga.GPI import Job, ArgSplitter, jobs j = Job() j.splitter = ArgSplitter(args=[['A'], ['B']]) j.submit() assert len(j.subjobs) == 2, 'Splitting must have occured' for jj in j.subjobs: assert jj._impl._getParent(), 'Parent must be set' # make sure we have out job in the repository job_seen = False for jj in jobs: if j is jj: job_seen = True break assert job_seen, 'Job must be in the repository'
def test_CondorConfigDefaults(self): # A test with sequential submission from Ganga.GPI import Job, TestSplitter, TestSubmitter j = Job() j.splitter = TestSplitter() j.splitter.backs = [TestSubmitter(), TestSubmitter(), TestSubmitter()] j.backend = TestSubmitter() b = j.splitter.backs[1] b.fail = 'submit' j.parallel_submit = False assert j.status == 'new' with pytest.raises(IncompleteJobSubmissionError): j.submit(keep_going=True) assert j.subjobs[0].status in ['submitted', 'running'] assert j.subjobs[1].status == 'new' assert j.subjobs[2].status == 'new'
def testDatasetsFunctions(self): from Ganga.GPI import DiracFile, PhysicalFile, LHCbDataset, Job, LocalFile # test constructors/setters ds = LHCbDataset(['lfn:a', 'pfn:b']) assert len(ds) == 2 print(ds[0]) assert isinstance(ds[0], DiracFile) assert isinstance(ds[1], LocalFile) ds = LHCbDataset() ds.files = ['lfn:a', 'pfn:b'] assert isinstance(ds[0], DiracFile) assert isinstance(ds[1], LocalFile) assert ds.getFullFileNames() == ['LFN:a', 'PFN:b'] ds.files.append('lfn:c') assert isinstance(ds[-1], DiracFile) # check job assignments j = Job() j.inputdata = ['lfn:a', 'pfn:b'] assert isinstance(j.inputdata, LHCbDataset) j.outputfiles = ['a', DiracFile('b')] assert isinstance(j.outputfiles[0], LocalFile) print(type(j.outputfiles[1])) assert isinstance(j.outputfiles[1], DiracFile) # check the LHCbDataset functions: assert ds.getLFNs() == ['a', 'c'] assert ds.getPFNs() == ['b'] ds2 = LHCbDataset(['lfn:a', 'lfn:d']) ds.extend(ds2, True) assert len(ds) == 4 # check the useful difference functions etc assert sorted(ds.difference(ds2).getFileNames()) == ['b', 'c'] assert sorted(ds.symmetricDifference(ds2).getFileNames()) == ['b', 'c'] assert sorted(ds.intersection(ds2).getFileNames()) == ['a', 'd'] assert sorted(ds.union(ds2).getFileNames()) == ['a', 'b', 'c', 'd']
def test_a_testClientSideSubmit(self): """Test the client side code whilst stil using the Local backend""" MassStorageFile = self.fileClass from Ganga.GPI import LocalFile, Job, ArgSplitter TestMassStorageClient.cleanUp() assert getConfig('Output')[_getName(self.fileClass)]['backendPostprocess']['Local'] == 'client' file_1 = generate_unique_temp_file(TestMassStorageClient._ext) file_2 = generate_unique_temp_file(TestMassStorageClient._ext) TestMassStorageClient._managed_files.append(file_1) TestMassStorageClient._managed_files.append(file_2) j = Job() j.inputfiles = [LocalFile(file_1), LocalFile(file_2)] j.splitter = ArgSplitter(args = [[_] for _ in range(TestMassStorageClient.sj_len)]) j.outputfiles = [MassStorageFile(namePattern='*'+TestMassStorageClient._ext)] j.submit()
def Savannah28511(self): from Ganga.GPI import Job, TestSubmitter from GangaTest.Framework.utils import sleep_until_completed, sleep_until_state j = Job() j.submit() self.assertTrue(sleep_until_completed(j, 20), 'Job is not completed') j.resubmit() self.assertTrue(sleep_until_completed(j, 30), 'Job is not completed after fail during resubmit') j._impl.updateStatus('failed') self.assertTrue(sleep_until_state(j, 20, 'failed'), 'Job is not failed') j.resubmit() self.assertTrue(sleep_until_completed(j, 20), 'Job is not completed after resubmit')
def test_Savannah19059(self): from Ganga.GPI import Executable, Job, Interactive, LocalFile import os.path from GangaTest.Framework.utils import sleep_until_completed # Test if Interactive backend copies back sandbox app = Executable() app.exe = 'touch' self.fname = 'abc' app.args = [self.fname] self.j = Job(backend=Interactive(), application=app, outputfiles=[LocalFile(self.fname)]) self.j.submit() self.assertTrue(sleep_until_completed(self.j, 60), 'Timeout on registering Interactive job as completed') self.assertTrue( os.path.exists(os.path.join(self.j.outputdir, self.fname)))
def testPrepareJob(self): from Ganga.GPI import Job, LocalFile, prepareGaudiExec import os if os.path.exists(TestExternalGaudiExec.tmpdir_release): os.system("rm -rf %s/*" % TestExternalGaudiExec.tmpdir_release) j = Job(application=prepareGaudiExec( 'DaVinci', latestDaVinci(), TestExternalGaudiExec.tmpdir_release)) myHelloOpts = path.join(TestExternalGaudiExec.tmpdir_release, 'hello.py') FileBuffer('hello.py', 'print("Hello")').create(myHelloOpts) assert path.isfile(myHelloOpts) j.application.options = [LocalFile(myHelloOpts)] j.prepare()
def test_a_testClientInputSubmit(self): """Test that a job can be submitted with inputfiles in the input""" MassStorageFile = self.fileClass from Ganga.GPI import LocalFile, Job, ArgSplitter _ext = '.root' file_1 = generate_unique_temp_file(_ext) file_2 = generate_unique_temp_file(_ext) self._managed_files.append(file_1) self._managed_files.append(file_2) msf_1 = MassStorageFile(file_1) msf_2 = MassStorageFile(file_2) msf_1.put() msf_2.put() j = Job() j.inputfiles = [msf_1, msf_2] j.splitter = ArgSplitter(args=[[_] for _ in range(self.sj_len)]) j.outputfiles = [LocalFile(namePattern='*' + _ext)] j.submit()
def testInternal(self): from Ganga.GPI import GaudiExec, Job, LocalFile, DiracFile tmp_fol = gettempdir() gaudi_testFol = path.join(tmp_fol, 'GaudiExecTest') shutil.rmtree(gaudi_testFol, ignore_errors=True) makedirs(gaudi_testFol) gaudi_testOpts = path.join(gaudi_testFol, 'testOpts.py') with open(gaudi_testOpts, 'w+') as temp_opt: temp_opt.write("print('hello')") assert path.exists(gaudi_testOpts) gr = GaudiExec(directory=gaudi_testFol, options=[LocalFile(gaudi_testOpts)]) assert isinstance( stripProxy(gr).getOptsFiles()[0], stripProxy(LocalFile)) reconstructed_path = path.join( stripProxy(gr).getOptsFiles()[0].localDir, stripProxy(gr).getOptsFiles()[0].namePattern) assert reconstructed_path == gaudi_testOpts assert open(reconstructed_path).read() == "print('hello')" j = Job() j.application = gr assert isinstance(j.application, GaudiExec) df = DiracFile(lfn='/not/some/file') gr.options = [df] assert gr.options[0].lfn == df.lfn shutil.rmtree(gaudi_testFol, ignore_errors=True)
def _constructJob(): """ This is a helper method to construct a new GaudiExec job object for submission testing This just helps reduce repeat code between tests """ import os if os.path.exists(TestExternalGaudiExec.tmpdir_release): os.system("rm -fr %s/" % TestExternalGaudiExec.tmpdir_release) from Ganga.GPI import Job, LocalFile, prepareGaudiExec j = Job(application=prepareGaudiExec( 'DaVinci', latestDaVinci(), TestExternalGaudiExec.tmpdir_release)) myOpts = path.join(TestExternalGaudiExec.tmpdir_release, 'testfile.py') FileBuffer('testfile.py', 'print("ThisIsATest")').create(myOpts) j.application.options = [LocalFile(myOpts)] return j
def test_submit_monitor(gpi): """ Test that an LCG job can be monitored """ from Ganga.GPI import Job, LCG j = Job() j.backend = LCG() job_id = 'https://example.com:9000/42' with patch('Ganga.Lib.LCG.Grid.submit', return_value=job_id) as submit: j.submit() submit.assert_called_once() assert j.backend.id == job_id status_info = { 'status': 'Submitted', 'name': '', 'destination': '', 'reason': '', 'exit': '', 'is_node': False, 'id': job_id } status_results = [ ([status_info], []), # Once for the proper status call ([], []) # Once for the bulk monitoring call ] with patch('Ganga.Lib.LCG.Grid.status', side_effect=status_results) as status: stripProxy(j).backend.master_updateMonitoringInformation( [stripProxy(j)]) assert status.call_count == 1 with patch('Ganga.Lib.LCG.Grid.cancel', return_value=True): j.kill()
def testSubmitJobComplete(self): """ Test that the job completes successfully """ from Ganga.GPI import jobs from Ganga.GPI import Job, LocalFile, prepareGaudiExec import os if os.path.exists(TestExternalGaudiExec.tmpdir_release): os.system("rm -rf %s/*" % TestExternalGaudiExec.tmpdir_release) j = Job(application=prepareGaudiExec( 'DaVinci', latestDaVinci(), TestExternalGaudiExec.tmpdir_release)) myOpts = path.join(TestExternalGaudiExec.tmpdir_release, 'testfile.py') FileBuffer('testfile.py', 'print("ThisIsATest")').create(myOpts) j.application.options = [LocalFile(myOpts)] j.submit() run_until_completed(j) assert j.status == 'completed' outputfile = path.join(j.outputdir, 'stdout') assert path.isfile(outputfile) assert 'testfile.py' in open(outputfile).read() assert 'data.py' in open(outputfile).read() assert 'ThisIsATest' in open(outputfile).read() assert j.application.platform in open(outputfile).read()
def test_a_JobConstruction(self): """ First construct the Job object (singular)""" from Ganga.Utility.Config import getConfig self.assertFalse(getConfig('TestingFramework')['AutoCleanup']) from Ganga.GPIDev.Base.Proxy import stripProxy from Ganga.GPI import Job, jobs, ArgSplitter j = Job() orig_sj_proxy = j.subjobs j.splitter = ArgSplitter() j.splitter.args = [[0], [1]] i = 0 for sj in stripProxy(j.splitter).split(stripProxy(j)): sj.id = i stripProxy(j).subjobs.append(sj) i = i + 1 assert len(jobs) == 1 assert len(j.subjobs) == 2 sj_proxy = j.subjobs assert sj_proxy is j.subjobs assert orig_sj_proxy is sj_proxy for sj in j.subjobs: assert isinstance(sj, Job) global global_AutoStartReg global_AutoStartReg = False stripProxy(sj)._getRegistry().flush_all() for sj in j.subjobs: stripProxy(sj)._setDirty() stripProxy(sj)._getRegistry().flush_all() for sj in j.subjobs: stripProxy(sj)._setDirty()
def testKilling(self): """ Create some subjobs and kill them """ from Ganga.GPI import Job, GenericSplitter, Local from GangaTest.Framework.utils import sleep_until_state j = Job() j.application.exe = "sleep" j.splitter = GenericSplitter() j.splitter.attribute = 'application.args' j.splitter.values = [['400'] for _ in range(0, 5)] j.backend = Local() j.submit() sleep_until_state(j, None, 'running') assert j.status == 'running' j.subjobs(0).kill() assert j.subjobs(0).status == 'killed' assert j.subjobs(1).status != 'killed' j.kill() assert j.status == 'killed' assert all(sj.status == 'killed' for sj in j.subjobs)
def testFailure(self): """ Check a simple job fails and raises the correct exception """ from Ganga.GPI import Job, Dirac, Executable import time j = Job(backend=Dirac()) j.application = Executable(exe='ech') j.application.args = ['Hello World'] j.submit() assert run_until_state(j, 'failed', 220) filepath = os.path.join(j.outputdir, 'Ganga_Executable.log') i = 0 while not os.path.exists(filepath) and i < 10: i = i + 1 time.sleep(5) found = False with open(filepath, 'r') as f: for line in f: if "Exception occured in running process: ['ech', 'Hello World']" in line: found = True assert found
def submit(N, K): jobs = [] for i in range(K): j = Job() j.backend = LCG() j.backend.middleware = 'GLITE' j.splitter = GenericSplitter() j.splitter.attribute = 'application.args' j.splitter.values = [['x']] * N j.submit() jobs.append(j) import time def finished(): for j in jobs: if not j.status in ['failed', 'completed']: return False return True while not finished(): time.sleep(1) return jobs
def Savannah47814(self): from Ganga.GPI import Job, Executable from GangaTest.Framework.utils import sleep_until_state, file_contains j = Job() j.application = Executable(exe='ThisScriptDoesNotExist') j.submit() failed = sleep_until_state( j, 60, state='failed', break_states=['new', 'killed', 'completed', 'unknown', 'removed']) self.assertTrue( failed, 'Job with illegal script should fail. Instead it went into the state %s' % j.status) import os.path f = os.path.join(j.outputdir, '__jobstatus__') self.assertTrue(file_contains(f, 'No such file or directory'), '__jobstatus__ file should contain error')
def testDatasets(self): from Ganga.GPI import DiracFile, PhysicalFile, LHCbDataset, Job, LocalFile # test constructors/setters ds = LHCbDataset(['lfn:a', 'pfn:b']) assert len(ds) == 2 print(ds[0]) assert isinstance(ds[0], DiracFile) assert isinstance(ds[1], PhysicalFile) ds = LHCbDataset() ds.files = ['lfn:a', 'pfn:b'] assert isinstance(ds[0], DiracFile) assert isinstance(ds[1], PhysicalFile) ds.files.append('lfn:c') assert isinstance(ds[-1], DiracFile) d = OutputData(['a', 'b']) assert isinstance(d.files[0], str) assert isinstance(d.files[1], str) # check job assignments j = Job() j.inputdata = ['lfn:a', 'pfn:b'] assert isinstance(j.inputdata, LHCbDataset) j.outputfiles = ['a', DiracFile('b')] assert isinstance(j.outputfiles[0], LocalFile) print(type(j.outputfiles[1])) assert isinstance(j.outputfiles[1], DiracFile) LFN_DATA = [ 'LFN:/lhcb/LHCb/Collision11/DIMUON.DST/00016768/0000/00016768_00000005_1.dimuon.dst', 'LFN:/lhcb/LHCb/Collision11/DIMUON.DST/00016768/0000/00016768_00000006_1.dimuon.dst' ] ds = LHCbDataset(LFN_DATA) assert len(ds.getReplicas().keys()) == 2 assert ds.getCatalog()
def test_d_testXMLContent(self): # Check content of XML is as expected from Ganga.Core.GangaRepository.VStreamer import to_file, from_file from Ganga.GPI import jobs, Job, ArgSplitter from Ganga.GPIDev.Base.Proxy import stripProxy from tempfile import NamedTemporaryFile j = jobs(0) assert path.isfile(getXMLFile(j)) with open(getXMLFile(j)) as handler: tmpobj, errs = from_file(handler) assert tmpobj.splitter assert tmpobj.splitter.args == getNestedList() ignore_subs = '' with NamedTemporaryFile(delete=False) as new_temp_file: to_file(stripProxy(j), new_temp_file, ignore_subs) new_temp_file.flush() with NamedTemporaryFile(delete=False) as new_temp_file2: j2 = Job() j2.splitter = ArgSplitter() j2.splitter.args = getNestedList() to_file(stripProxy(j2), new_temp_file2, ignore_subs) new_temp_file2.flush() assert open(handler.name).read() == open(new_temp_file.name).read() assert open(handler.name) != open(new_temp_file2.name).read() unlink(new_temp_file.name) unlink(new_temp_file2.name)
def test_A_Construction(self): from Ganga.GPI import Job, LocalFile j = Job() assert (j.application.is_prepared == None) j.prepare() assert (j.application.is_prepared != None) TestShared.shared_area_location = j.application.is_prepared.path() assert (path.isdir(TestShared.shared_area_location)) TestShared.a_file_location = path.join( j.application.is_prepared.path(), 'a.txt') TestShared.b_file_location = path.join( j.application.is_prepared.path(), 'b.txt') open(TestShared.a_file_location, 'w').close() open(TestShared.b_file_location, 'w').close() j.application.is_prepared.associated_files.append( LocalFile(TestShared.a_file_location)) j.application.is_prepared.associated_files.append( LocalFile(TestShared.b_file_location))
def test_job_kill(gpi): from Ganga.GPI import Job, CREAM vo = getConfig('LCG')['VirtualOrganisation'] call = subprocess.Popen(['lcg-infosites', 'ce', 'cream', '--vo', vo], stdout=subprocess.PIPE) stdout, stderr = call.communicate() # Based on output of: # # # CPU Free Total Jobs Running Waiting ComputingElement # ---------------------------------------------------------------- # 19440 2089 17760 17351 409 arc-ce01.gridpp.rl.ac.uk:2811/nordugrid-Condor-grid3000M # 3240 0 1594 1250 344 carceri.hec.lancs.ac.uk:8443/cream-sge-grid # 1176 30 1007 587 420 ce01.tier2.hep.manchester.ac.uk:8443/cream-pbs-long # # Select the CREAM CEs (URL path starts with '/cream') and how many free slots they have ces = re.findall(r'^\s*\d+\s*(?P<free>\d+)\s*\d+\s*\d+\s*\d+\s*(?P<ce>[^:/\s]+:\d+/cream.*)$', stdout, re.MULTILINE) # Grab the one with the most empty slots ce = sorted(ces)[-1][1] j = Job() j.backend = CREAM(CE=ce) j.submit() j.kill()
def testMergeRemoval(self): from Ganga.GPI import Job, Executable, Local, LocalFile, jobs # see Savannah 33710 j = Job() jobID = j.id # job will run for at least 20 seconds j.application = Executable(exe='sh', args=['-c', 'sleep 20; echo foo > out.txt']) j.backend = Local() j.outputfiles = [LocalFile('out.txt')] j.splitter = CopySplitter() j.postprocessors = MergerTester(files=['out.txt']) j.postprocessors[0].ignorefailed = True j.postprocessors[0].alwaysfail = True j.postprocessors[0].wait = 10 j.submit() run_until_state(j, state='running') j.remove() with pytest.raises(KeyError): jobs(jobID)
def Savannah19123(self): from Ganga.GPI import Job, Local from GangaTest.Framework.utils import sleep_until_state # check if stdout and stderr exists or not, flag indicates if files are required to exist or not def check(exists_flag): for fn in ['stdout', 'stderr']: fn = os.path.join(j.outputdir, fn) file_exists = os.path.exists(fn) if exists_flag: self.assertTrue( file_exists, 'file %s should exist but it does not' % fn) else: self.assertFalse( file_exists, 'file %s should not exist but it does' % fn) j = Job() j.application.exe = 'bash' j.application.args = [ '-c', 'for i in `seq 1 30`; do echo $i; sleep 1; done' ] j.backend = Local() j.submit() check(False) sleep_until_state(j, 5, 'running') j.kill() check(True)
def test_Savannah18272(self): from Ganga.GPI import Job, File j = Job() j.application.exe = File('/hello') self.assertEqual(j.application.exe, File('/hello'))
def test_a_TestJobDirs(self): from Ganga.GPI import Job j = Job() assert j.inputdir != '' assert j.outputdir != ''
def test_f_InputAndOutputData(self): from Ganga.GPI import Job, File, LocalFile, GangaDataset, Local, plugins # -- INPUTANDOUTPUTDATA BASIC START # create a script to send open('my_script2.sh', 'w').write("""#!/bin/bash ls -ltr more "my_input.txt" echo "TESTING" > my_output.txt """) import os os.system('chmod +x my_script2.sh') # create a script to send open('my_input.txt', 'w').write('Input Testing works!') j = Job() j.application.exe = File('my_script2.sh') j.inputfiles = [LocalFile('my_input.txt')] j.outputfiles = [LocalFile('my_output.txt')] j.submit() # -- INPUTANDOUTPUTDATA BASIC STOP # -- INPUTANDOUTPUTDATA PEEKOUTPUT START j.peek() # list output dir contents j.peek('my_output.txt') # -- INPUTANDOUTPUTDATA PEEKOUTPUT STOP # -- INPUTANDOUTPUTDATA FAILJOB START # This job will fail j = Job() j.application.exe = File('my_script2.sh') j.inputfiles = [LocalFile('my_input.txt')] j.outputfiles = [LocalFile('my_output_FAIL.txt')] j.submit() # -- INPUTANDOUTPUTDATA FAILJOB STOP # -- INPUTANDOUTPUTDATA WILDCARD START # This job will pick up both 'my_input.txt' and 'my_output.txt' j = Job() j.application.exe = File('my_script2.sh') j.inputfiles = [LocalFile('my_input.txt')] j.outputfiles = [LocalFile('*.txt')] j.submit() # -- INPUTANDOUTPUTDATA WILDCARD STOP # -- INPUTANDOUTPUTDATA OUTPUTFILES START j.outputfiles # -- INPUTANDOUTPUTDATA OUTPUTFILES STOP # -- INPUTANDOUTPUTDATA INPUTDATA START # Create a test script open('my_script3.sh', 'w').write("""#!/bin/bash echo $PATH ls -ltr more __GangaInputData.txt__ echo "MY TEST FILE" > output_file.txt """) import os os.system('chmod +x my_script3.sh') # Submit a job j = Job() j.application.exe = File('my_script3.sh') j.inputdata = GangaDataset(files=[LocalFile('*.sh')]) j.backend = Local() j.submit() # -- INPUTANDOUTPUTDATA INPUTDATA STOP # -- INPUTANDOUTPUTDATA GANGAFILES START plugins('gangafiles')
def test_h_PostProcessors(self): from Ganga.GPI import Job, RootMerger, TextMerger, CustomMerger, SmartMerger, RootFileChecker, FileChecker, \ Notifier, CustomChecker j = Job() # -- POSTPROCESSORS APPEND START j.postprocessors.append( RootMerger(files=['thesis_data.root'], ignorefailed=True, overwrite=True)) # -- POSTPROCESSORS APPEND STOP # -- POSTPROCESSORS TEXTMERGER START TextMerger(compress=True) # -- POSTPROCESSORS TEXTMERGER STOP # -- POSTPROCESSORS ROOTMERGER START RootMerger(args='-T') # -- POSTPROCESSORS ROOTMERGER STOP # -- POSTPROCESSORS CUSTOMMERGER START CustomMerger().module = '~/mymerger.py' # -- POSTPROCESSORS CUSTOMMERGER STOP # -- POSTPROCESSORS SMARTMERGER START SmartMerger(files=['thesis_data.root', 'stdout'], overwrite=True) # -- POSTPROCESSORS SMARTMERGER STOP # -- POSTPROCESSORS SMARTMERGERAPPEND START j.postprocessors.append( SmartMerger(files=['thesis_data.root', 'stdout'], overwrite=True)) # -- POSTPROCESSORS SMARTMERGERAPPEND STOP # -- POSTPROCESSORS SMARTMERGERAPPEND2 START j.postprocessors.append(TextMerger(files=['stdout'], overwrite=True)) j.postprocessors.append( RootMerger(files=['thesis_data.root'], overwrite=False)) # -- POSTPROCESSORS SMARTMERGERAPPEND2 STOP # -- POSTPROCESSORS FILECHECKER START fc = FileChecker(files=['stdout'], searchStrings=['Segmentation']) # -- POSTPROCESSORS FILECHECKER STOP # -- POSTPROCESSORS FILECHECKERMUSTEXIST START fc.filesMustExist = True # -- POSTPROCESSORS FILECHECKERMUSTEXIST STOP # -- POSTPROCESSORS FILECHECKEROPTS START fc.searchStrings = ['SUCCESS'] fc.failIfFound = False # -- POSTPROCESSORS FILECHECKEROPTS STOP # -- POSTPROCESSORS FILECHECKEROPTS START rfc = RootFileChecker(files=["*.root"]) rfc.files = ["*.root"] j.postprocessors.append(rfc) # -- POSTPROCESSORS FILECHECKEROPTS STOP # -- POSTPROCESSORS CUSTOMCHECKER START cc = CustomChecker(module='~/mychecker.py') # -- POSTPROCESSORS CUSTOMCHECKER STOP # -- POSTPROCESSORS NOTIFIER START n = Notifier(address='myaddress.cern.ch') # -- POSTPROCESSORS NOTIFIER STOP # -- POSTPROCESSORS NOTIFIEROPTS START n.verbose = True # -- POSTPROCESSORS NOTIFIEROPTS STOP # -- POSTPROCESSORS MULTIPLE START tm = TextMerger(files=['stdout'], compress=True) rm = RootMerger(files=['thesis_data.root'], args='-f6') fc = FileChecker(files=['stdout'], searchStrings=['Segmentation']) cc = CustomChecker(module='~/mychecker.py') n = Notifier(address='myadress.cern.ch') j.postprocessors = [tm, rm, fc, cc, n] # -- POSTPROCESSORS MULTIPLE STOP # -- POSTPROCESSORS MULTIPLE2 START j.postprocessors.append(fc) j.postprocessors.append(tm) j.postprocessors.append(rm) j.postprocessors.append(cc) j.postprocessors.append(n) # -- POSTPROCESSORS MULTIPLE2 STOP j.postprocessors.remove(FileChecker())