def subDDSim(): # Decide parameters for a job outputSE = "KEK-SRM" outputSE = "KEK-DISK" isLocal = _clip.isLocal nbevts = 10 if _clip.numberOfEvents == 0 else _clip.numberOfEvents outputFile = "ddsim_example.slcio" if _clip.outputFile == "" else _clip.outputFile outputDir = _clip.outputDir inputFile = _clip.inputFile if inputFile == "": gLogger.error("Input file for ddsim does not given.") exit(-1) # Create DIRAC objects for job submission dIlc = DiracILC() job = UserJob() job.setJobGroup("myddsimjob") job.setName("myddsim") job.setOutputSandbox(['*.log', '*.sh', '*.py', '*.xml']) job.setILDConfig("v02-00-02") # job.setInputSandbox(["a6-parameters.sin", "P2f_qqbar.sin"]) # job.setDestination(["LCG.KEK.jp", "LCG.DESY-HH.de"]) # job submission destination job.setBannedSites(["LCG.UKI-SOUTHGRID-RALPP.uk" ]) # a list of sites not to submit job # job.setCPUTime( cputime_limit_in_seconds_by_dirac_units ) ddsim = DDSim() ddsim.setVersion("ILCSoft-02-00-02_gcc49") ddsim.setDetectorModel("ILD_l5_v02") ddsim.setInputFile(inputFile) ddsim.setNumberOfEvents(nbevts) extraCLIArguments = " --steeringFile ddsim_steer.py " extraCLIArguments += " --outputFile %s " % outputFile extraCLIArguments += " --vertexSigma 0.0 0.0 0.1968 0.0 --vertexOffset 0.0 0.0 0.0 0.0 " ddsim.setExtraCLIArguments(extraCLIArguments) # ddsim.setRandomSeed(1234565) # ddsim.setStartFrom(20) # Number of events to skip before starting ddsim job.append(ddsim) if outputDir != "": job.setOutputData([outputFile], OutputPath=outputDir, OutputSE=outputSE) if isLocal: job.submit(dIlc, mode="local") else: job.submit(dIlc)
def getJob(self): """ Define a generic job, it should be always the same """ from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob myjob = UserJob() myjob.setName("Testing") myjob.setJobGroup("Tests") myjob.setCPUTime(30000) myjob.dontPromptMe() myjob.setLogLevel("VERBOSE") myjob.setPlatform("x86_64-slc5-gcc43-opt") myjob.setOutputSandbox(["*.log","*.xml", "*.sh"]) myjob._addParameter( myjob.workflow, 'TestFailover', 'String', True, 'Test failoverRequest') myjob._addParameter( myjob.workflow, 'Platform', 'JDL', "x86_64-slc5-gcc43-opt", 'OS Platform') if self.ildConfig: myjob.setILDConfig(self.ildConfig) return myjob
def subDDSim(clip1): # Decide parameters for a job outputSE = "KEK-SRM" isLocal = clip1.isLocal nbevts = 0 if clip1.numberOfEvents == 0 else clip1.numberOfEvents #print('inside subddsim(): nbevts ', nbevts) outputFile = "" if clip1.outputFile == "" else clip1.outputFile #print('inside subddsim outfile ', outputFile) outputDir = clip1.outputDir #print('inside subddsim outdir ', outputDir) inputFile = clip1.inputFile #print('inside subddsim inputFile ', inputFile) if inputFile == "": gLogger.error("Input file for ddsim is not given.") exit(-1) # Create DIRAC objects for job submission dIlc = DiracILC() job = UserJob() job.setJobGroup("myddsimjob") job.setName("myddsim") job.setOutputSandbox(['*.log', '*.sh', '*.py', '*.xml']) job.setILDConfig("v02-00-02") # job.setInputSandbox(["a6-parameters.sin", "P2f_qqbar.sin"]) # job.setDestination(["LCG.KEK.jp", "LCG.DESY-HH.de"]) # job submission destination job.setBannedSites(["LCG.UKI-SOUTHGRID-RALPP.uk" ]) # a list of sites not to submit job # job.setCPUTime( cputime_limit_in_seconds_by_dirac_units ) ddsim = DDSim() ddsim.setVersion("ILCSoft-02-00-02_gcc49") ddsim.setDetectorModel("ILD_l5_v05") ddsim.setInputFile(inputFile) ddsim.setNumberOfEvents(nbevts) extraCLIArguments = " --steeringFile ddsim_steer_July26.py" extraCLIArguments += " --outputFile %s " % outputFile extraCLIArguments += " --vertexSigma 0.0 0.0 0.1968 0.0 --vertexOffset 0.0 0.0 0.0 0.0 " ddsim.setExtraCLIArguments(extraCLIArguments) return ddsim
def getJob(self): """ Define a generic job, it should be always the same """ from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob myjob = UserJob() myjob.setName("Testing") myjob.setJobGroup("Tests") myjob.setCPUTime(30000) myjob.dontPromptMe() myjob.setLogLevel("VERBOSE") myjob.setPlatform("x86_64-slc5-gcc43-opt") myjob.setOutputSandbox(["*.log", "*.xml", "*.sh"]) myjob._addParameter(myjob.workflow, 'TestFailover', 'String', True, 'Test failoverRequest') myjob._addParameter(myjob.workflow, 'Platform', 'JDL', "x86_64-slc5-gcc43-opt", 'OS Platform') if self.ildConfig: myjob.setILDConfig(self.ildConfig) return myjob
ma.setExtraCLIArguments(" --constant.DetectorModel=%s "%(detectorModel) ) ma.setLogFile("marlin.log") #ma.getInputFromApp(sim) ma.setInputFile([lcoutputSIM]) ma.setEnergy(energy) ma.setNumberOfEvents(evtsPerRun) ma.setOutputDstFile(lcoutputDST) ma.setOutputRecFile(lcoutputREC) RECoutput.append(lcoutputDST) RECoutput.append(lcoutputREC) job = UserJob() job.setName(jobname) job.setJobGroup(jobGrName) job.setILDConfig(ILDConfigVer) job.setCPUTime(86400) job.setInputData([lcinputREC]) job.setInputSandbox(["runRecoSplit_all_Tmp.py"]) job.setOutputSandbox(["*.log","*.sh","MarlinStdRecoParsed.xml","marlin*.xml","*.py "]) #job.setOutputSandbox(["*.log","*.sh","MarlinStdRecoParsed.xml","marlin*.xml","*.py ","*.root"]) #job.setDestinationCE('lyogrid07.in2p3.fr') job.dontPromptMe() job.setBannedSites(['LCG.QMUL.uk']) #job.setBannedSites(['LCG.IN2P3-CC.fr','LCG.DESYZN.de','LCG.DESY-HH.de','LCG.KEK.jp','OSG.UConn.us','LCG.Cracow.pl','OSG.MIT.us','LCG.Glasgow.uk','OSG.CIT.us','OSG.BNL.us','LCG.Brunel.uk','LCG.RAL-LCG2.uk','LCG.Oxford.uk','OSG.UCSDT2.us']) # run Malrin reco jobs mares = job.append(ma) if not mares['OK']: print mares['Not ok appending Marlin to job']
def subOverlay(): # Decide parameters for a job outputSE = "KEK-SRM" isLocal = _clip.isLocal nbevts = 50 if _clip.numberOfEvents == 0 else _clip.numberOfEvents nbevts = 0 # To analize all input events outputFilePrefix = "overlay_example" if _clip.outputFilePrefix == "" else _clip.outputFilePrefix outputDir = _clip.outputDir inputFile = _clip.inputFile if inputFile == "": gLogger.error("Input file for ddsim does not given.") exit(-1) recfile = outputFilePrefix + ".rec.slcio" dstfile = outputFilePrefix + ".dst.slcio" detector_model = "ILD_l5_o1_v02" key = detector_model.split('_') sim_detectorModel = "_".join([key[0], key[1], key[3]]) # Create DIRAC objects for job submission dIlc = DiracILC() job = UserJob() job.setJobGroup("myoverlayjob") job.setName("myoverlay") job.setOutputSandbox(['*.log', '*.sh', '*.py', '*.xml']) job.setILDConfig("v02-00-02") # job.setInputSandbox(["a6-parameters.sin", "P2f_qqbar.sin"]) # job.setDestination(["LCG.KEK.jp", "LCG.DESY-HH.de"]) # job submission destination # job.setBannedSites([]) # a list of sites not to submit job # job.setCPUTime( cputime_limit_in_seconds_by_dirac_units ) # Create Overlay application ovldata = [{ "ProcessorName": "BgOverlayWW", "evttype": "aa_lowpt_WW", "ProdID": 10237, "expBG": 0.211, "subdir": "000" }, { "ProcessorName": "BgOverlayWB", "evttype": "aa_lowpt_WB", "ProdID": 10241, "expBG": 0.24605, "subdir": "000" }, { "ProcessorName": "BgOverlayBW", "evttype": "aa_lowpt_BW", "ProdID": 10239, "expBG": 0.243873, "subdir": "000" }, { "ProcessorName": "BgOverlayBB", "evttype": "aa_lowpt_BB", "ProdID": 10235, "expBG": 0.35063, "subdir": "000" }, { "ProcessorName": "PairBgOverlay", "evttype": "seeablepairs", "ProdID": 10233, "expBG": 1.0, "subdir": "100" }] BXOverlay = 1 NbSigEvtsPerJob = 100 numberOfSignalEvents = NbSigEvtsPerJob basebkgpath = "/ilc/prod/ilc/mc-opt-3/ild/sim/500-TDR_ws" energy = "500" for ovl in ovldata: print "### OverlayInput ... " + ovl["ProcessorName"] ovlapp = OverlayInput() ovlpath = "%s/%s/%s/v02-00-01/%8.8d/%s" % \ ( basebkgpath, ovl["evttype"], sim_detectorModel, ovl["ProdID"] , ovl["subdir"] ) print " OverlayPath ... " + ovlpath ovlapp.setMachine("ilc_dbd") # ovlapp.setEnergy(energy) # ovlapp.setDetectorModel(sim_detectorModel) ovlapp.setProcessorName(ovl["ProcessorName"]) ovlapp.setBkgEvtType(ovl["evttype"]) ovlapp.setPathToFiles(ovlpath) ovlapp.setGGToHadInt(ovl["expBG"]) ovlapp.setBXOverlay(BXOverlay) ovlapp.setNbSigEvtsPerJob(NbSigEvtsPerJob) ovlapp.setNumberOfSignalEventsPerJob(numberOfSignalEvents) res = job.append(ovlapp) if not res['OK']: print res['Message'] exit(1) # Create Marlin application marlin = Marlin() marlin.setVersion("ILCSoft-02-00-02_gcc49") marlin.setDetectorModel(detector_model) marlin.setSteeringFile("MarlinStdReco.xml") marlin.setInputFile(inputFile) marlin.setNumberOfEvents(nbevts) marlin.setOutputDstFile(dstfile) marlin.setOutputRecFile(recfile) extraCLIArguments = " --constant.DetectorModel=%s " % detector_model extraCLIArguments += " --constant.RunOverlay=true --constant.CMSEnergy=%s " % str( energy) extraCLIArguments += " --global.Verbosity=MESSAGE " marlin.setExtraCLIArguments(extraCLIArguments) job.append(marlin) if outputDir != "": job.setOutputData([dstfile, recfile], OutputPath=outputDir, OutputSE=outputSE) if isLocal: job.submit(dIlc, mode="local") else: job.submit(dIlc)
datadir = "/ilc/prod/ilc/mc-opt-3/ild/dst-merged/500-TDR_ws/higgs_ffh/ILD_l5_o1_v02/v02-00-01/" filepref = "rv02-00-01.sv02-00-01.mILD_l5_o1_v02.E500-TDR_ws.I106523.Pnnh.eL.pR.n001.d_dstm_10763_" indata = [datadir + filepref + "0.slcio", datadir + filepref + "1.slcio"] d = DiracILC(True, "repo.rep") ################################################ j = UserJob() j.setJobGroup("Tutorial") j.setName("GenericExec") j.setInputSandbox(["mypre.sh", "myanal.sh"]) j.setInputData(indata) j._setSoftwareTags(["marlin.ILCSoft-02-00-02_gcc49"]) # j._setSoftwareTags(["lcio.ILCSoft-02-00-01_gcc49"]) j.setILDConfig("v02-00-02") ################################################ appre = GenericApplication() appre.setScript("mypre.sh") appre.setArguments("This is input arguments") res = j.append(appre) if not res['OK']: print res['Message'] exit(1) ################################################ appost = GenericApplication() appost.setScript("myanal.sh") appost.setArguments("This is my analysis step") res = j.append(appost)
class UserJobTestCase(unittest.TestCase): """Base class for the UserJob test cases.""" def setUp(self): """Set up the objects.""" self.log_mock = Mock(name="SubMock") with patch('%s.getProxyInfo' % MODULE_NAME, new=Mock(return_value=None)): self.ujo = UserJob() def test_submit_noproxy( self ): self.ujo.proxyinfo = S_ERROR() assertDiracFailsWith( self.ujo.submit(), "Not allowed to submit a job, you need a ['ilc_user', 'calice_user'] proxy", self ) def test_submit_wrongproxygroup( self ): self.ujo.proxyinfo = S_OK( { 'group' : 'my_test_group.notInallowed_list' } ) assertDiracFailsWith( self.ujo.submit(), "Not allowed to submit job, you need a ['ilc_user', 'calice_user'] proxy", self ) def test_submit_noproxygroup( self ): self.ujo.proxyinfo = S_OK( { 'some_key' : 'Value', True : 1, False : [], 135 : {} } ) assertDiracFailsWith( self.ujo.submit(), 'Could not determine group, you do not have the right proxy', self ) def test_submit_addtoworkflow_fails( self ): self.ujo.proxyinfo = S_OK( { 'group' : 'ilc_user' } ) with patch('%s.UserJob._addToWorkflow' % MODULE_NAME, new=Mock(return_value=S_ERROR('workflow_testadd_error'))): assertDiracFailsWith( self.ujo.submit(), 'workflow_testadd_error', self ) def test_submit_addtoworkflow_fails_2( self ): self.ujo.proxyinfo = S_OK( { 'group' : 'calice_user' } ) with patch('%s.UserJob._addToWorkflow' % MODULE_NAME, new=Mock(return_value=S_ERROR('err_workflow_testadd'))): assertDiracFailsWith( self.ujo.submit(), 'err_workflow_testadd', self ) def test_submit_createnew_dirac_instance( self ): ilc_mock = Mock() ilc_mock().submitJob.return_value = S_OK('test_submission_successful') self.ujo.proxyinfo = S_OK( { 'group' : 'ilc_user' } ) with patch('%s.UserJob._addToWorkflow' % MODULE_NAME, new=Mock(return_value=S_OK())), \ patch('%s.DiracILC' % MODULE_NAME, new=ilc_mock): assertDiracSucceedsWith_equals( self.ujo.submit(), 'test_submission_successful', self ) ilc_mock().submitJob.assert_called_once_with(self.ujo, 'wms') assert self.ujo.oktosubmit def test_submit_existing_dirac_instance(self): """Test submit with dirac instance.""" ilc_mock = Mock() ilc_mock.submitJob.return_value = S_OK('test_submission_successful') self.ujo.proxyinfo = S_OK({'group': 'ilc_user'}) with patch('%s.UserJob._addToWorkflow' % MODULE_NAME, new=Mock(return_value=S_OK())): assertDiracSucceedsWith_equals(self.ujo.submit(diracinstance=ilc_mock), 'test_submission_successful', self) ilc_mock.submitJob.assert_called_once_with(self.ujo, 'wms') assert self.ujo.oktosubmit def test_setinputdata_failed( self ): assertDiracFailsWith( self.ujo.setInputData( { '/mylfn1' : True, '/mylfn2' : False } ), 'expected lfn string or list of lfns for input data', self ) def test_setinputdata(self): """Test setting input data.""" assertDiracSucceeds(self.ujo.setInputData(['LFN:/mylfn1', 'LFN:/mylfn2']), self) self.assertEqual(self.ujo.workflow.parameters.find('InputData').getValue(), '/mylfn1;/mylfn2') assertDiracSucceeds(self.ujo.setInputData('/mylfn1'), self) self.assertEqual(self.ujo.workflow.parameters.find('InputData').getValue(), '/mylfn1') def test_inputsandbox( self ): self.ujo.inputsandbox = Mock() assertDiracSucceeds( self.ujo.setInputSandbox( 'LFN:/ilc/user/u/username/libraries.tar.gz' ), self ) self.ujo.inputsandbox.extend.assert_called_once_with( [ 'LFN:/ilc/user/u/username/libraries.tar.gz' ] ) def test_inputsandbox_dictpassed( self ): assertDiracFailsWith( self.ujo.setInputSandbox( { '/some/file' : True, '/my/dict' : True } ), 'File passed must be either single file or list of files', self ) def test_setOutputData(self): """Test setting output data.""" assertDiracSucceeds(self.ujo.setOutputData(['/myFile1', '/myFile2']), self) self.assertEqual(self.ujo.workflow.parameters.find('UserOutputData').getValue(), '/myFile1;/myFile2') assertDiracSucceeds(self.ujo.setOutputData('/myFile2'), self) self.assertEqual(self.ujo.workflow.parameters.find('UserOutputData').getValue(), '/myFile2') assertDiracSucceeds(self.ujo.setOutputData('/myFile2', OutputSE="MY-SE"), self) self.assertEqual(self.ujo.workflow.parameters.find('UserOutputData').getValue(), '/myFile2') self.assertEqual(self.ujo.workflow.parameters.find('UserOutputSE').getValue(), 'MY-SE') assertDiracSucceeds(self.ujo.setOutputData('/myFile2', OutputSE=["MY-SE", 'YOUR-SE']), self) self.assertEqual(self.ujo.workflow.parameters.find('UserOutputData').getValue(), '/myFile2') self.assertEqual(self.ujo.workflow.parameters.find('UserOutputSE').getValue(), 'MY-SE;YOUR-SE') def test_setoutputdata_dictpassed( self ): assertDiracFailsWith( self.ujo.setOutputData( { '/mydict' : True } ), 'Expected file name string or list of file names for output data', self ) def test_setoutputdata_nolistse( self ): wf_mock = Mock() self.ujo.workflow = wf_mock with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock: assertDiracFailsWith(self.ujo.setOutputData(['mylfn1', 'other_lfn', 'last___lfn'], OutputSE={'mydict': True}), 'Expected string or list for OutputSE', self) addparam_mock.assert_called_once_with(wf_mock, 'UserOutputData', 'JDL', 'mylfn1;other_lfn;last___lfn', 'List of output data files') def test_setoutputdata_outputpath_nostring( self ): wf_mock = Mock() self.ujo.workflow = wf_mock with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock: assertDiracFailsWith( self.ujo.setOutputData( [ 'mylfn1', 'other_lfn', 'last___lfn' ], OutputPath = { 'mydict' : True } ), 'Expected string for OutputPath', self ) addparam_mock.assert_called_once_with( wf_mock, 'UserOutputData', 'JDL', 'mylfn1;other_lfn;last___lfn', 'List of output data files' ) def test_setoutputdata_invalid_outputpath_1( self ): wf_mock = Mock() self.ujo.workflow = wf_mock with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock: assertDiracFailsWith( self.ujo.setOutputData( [ 'mylfn1', 'other_lfn', 'last___lfn' ], OutputPath = '//ilc/user/somedir/output.xml' ), 'Output path contains /ilc/user/ which is not what you want', self ) addparam_mock.assert_called_once_with( wf_mock, 'UserOutputData', 'JDL', 'mylfn1;other_lfn;last___lfn', 'List of output data files' ) def test_setoutputdata_invalid_outputpath_2( self ): wf_mock = Mock() self.ujo.workflow = wf_mock with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock: assertDiracFailsWith( self.ujo.setOutputData( [ 'mylfn1', 'other_lfn', 'last___lfn' ], OutputPath = '//some/dir/ilc/user/somedir/output.xml' ), 'Output path contains /ilc/user/ which is not what you want', self ) addparam_mock.assert_called_once_with( wf_mock, 'UserOutputData', 'JDL', 'mylfn1;other_lfn;last___lfn', 'List of output data files' ) def test_setoutputdata( self ): wf_mock = Mock() self.ujo.workflow = wf_mock with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock: assertDiracSucceeds( self.ujo.setOutputData( [ 'mylfn1', 'other_lfn', 'last___lfn' ], OutputPath = '//some/dir/somedir/output.xml' ), self ) assertMockCalls( addparam_mock, [ ( wf_mock, 'UserOutputData', 'JDL', 'mylfn1;other_lfn;last___lfn', 'List of output data files' ), ( wf_mock, 'UserOutputPath', 'JDL', 'some/dir/somedir/output.xml', 'User specified Output Path' ) ], self ) def test_setoutputsandbox( self ): wf_mock = Mock() self.ujo.workflow = wf_mock with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock: assertDiracSucceeds( self.ujo.setOutputSandbox( '/my/dir/myfile.txt' ), self ) addparam_mock.assert_called_once_with( wf_mock, 'OutputSandbox', 'JDL', '/my/dir/myfile.txt', 'Output sandbox file' ) def test_setoutputsandbox_successes(self): """Test setting output sandbox.""" assertDiracSucceeds(self.ujo.setOutputSandbox(['myfile.txt', 'myfile.doc']), self) self.assertEqual(self.ujo.workflow.parameters.find('OutputSandbox').getValue(), 'myfile.txt;myfile.doc') def test_setoutputsandbox_dictpassed( self ): wf_mock = Mock() self.ujo.workflow = wf_mock with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock: assertDiracFailsWith( self.ujo.setOutputSandbox( { 'mydict' : True } ), 'Expected file string or list of files for output sandbox contents', self ) self.assertFalse( addparam_mock.called ) def test_configs(self): """Test setting different config packages.""" assertDiracSucceeds(self.ujo.setILDConfig('123.4'), self) self.assertEqual(self.ujo.workflow.parameters.find('ILDConfigPackage').getValue(), 'ILDConfig123.4') assertDiracSucceeds(self.ujo.setCLICConfig('567.8'), self) self.assertEqual(self.ujo.workflow.parameters.find('ClicConfigPackage').getValue(), 'ClicConfig567.8') self.assertIn('ildconfig', self.ujo.workflow.parameters.find('SoftwarePackages').getValue()) self.assertIn('clicconfig', self.ujo.workflow.parameters.find('SoftwarePackages').getValue()) def test_submit_split(self): """Test submitting with automatic splitting.""" self.ujo._splittingOption = True self.ujo._split = Mock(return_value=S_OK()) self.ujo.proxyinfo = S_OK({'group': 'ilc_user'}) ilc_mock = Mock() ilc_mock.submitJob.return_value = S_OK('test_submission_successful') assertDiracSucceeds(self.ujo.submit(diracinstance=ilc_mock), self) ilc_mock.submitJob.assert_called_once_with(self.ujo, 'wms') self.ujo._splittingOption = True self.ujo._split = Mock(return_value=S_ERROR("Splitting InValid")) assertDiracFailsWith(self.ujo.submit(), "Splitting InValid", self) @patch("%s._checkSplitConsistency" % MIXIN_CLASS, new=Mock(return_value=S_OK())) def test_split_bydata(self): """Test splitting by data.""" self.ujo._eventsPerJob = "1" self.ujo._numberOfJobs = "1" self.ujo._splittingOption = "byData" self.ujo._switch['byData'] = Mock(return_value=[("InputData", ["/ilc/user/u/username/data1"], True)]) with patch("%s.UserJob.setParameterSequence" % MODULE_NAME) as mock_parametric, \ patch('%s.LOG' % MIXIN_MODULE, new=self.log_mock): info_message = "Job splitting successful" assertDiracSucceeds(self.ujo._split(), self) self.log_mock.notice.assert_called_with(info_message) mock_parametric.assert_any_call("InputData", ["/ilc/user/u/username/data1"], True) @patch("%s.toInt" % MIXIN_MODULE, new=Mock(return_value=1)) @patch("%s.UserJob._checkSplitConsistency" % MODULE_NAME, new=Mock(return_value=S_OK())) def test_split_byevents(self): """Test splitting by events.""" self.ujo._splittingOption = "byEvents" self.ujo._switch['byEvents'] = Mock(return_value=[('NumberOfEvents', [1, 2], 'NbOfEvts')]) with patch("%s.UserJob.setParameterSequence" % MODULE_NAME) as mock_parametric, \ patch('%s.LOG' % MIXIN_MODULE, new=self.log_mock): info_message = "Job splitting successful" assertDiracSucceeds(self.ujo._split(), self) self.log_mock.notice.assert_called_with(info_message) mock_parametric.assert_any_call('NumberOfEvents', [1, 2], 'NbOfEvts') @patch("%s.toInt" % MIXIN_MODULE, new=Mock(return_value=1)) @patch("%s.UserJob._checkSplitConsistency" % MODULE_NAME, new=Mock(return_value=S_OK())) def test_split_atomicsubmission(self): """Test splitting atomic.""" self.ujo._splittingOption = None info_message = "Job splitting successful" with patch('%s.LOG' % MIXIN_MODULE, new=self.log_mock): assertDiracSucceeds(self.ujo._split(), self) self.log_mock.notice.assert_called_with(info_message) @patch("%s.toInt" % MIXIN_MODULE, new=Mock(return_value=False)) def test_split_inputparameters_failed(self): """Test splitting input parameters with failure.""" assertDiracFailsWith( self.ujo._split(), "Splitting: Invalid values for splitting", self ) @patch("%s.toInt" % MIXIN_MODULE, new=Mock(return_value=1)) @patch("%s._checkSplitConsistency" % MIXIN_CLASS, new=Mock(return_value=S_ERROR('failed'))) def test_split_checkSplitConsistency_failed(self): """Test splitting check consistency with failure.""" assertDiracFailsWith(self.ujo._split(), 'failed', self) @patch("%s.toInt" % MIXIN_MODULE, new=Mock(return_value=1)) @patch("%s._checkSplitConsistency" % MIXIN_CLASS, new=Mock(return_value=S_OK())) def test_split_sequencer_fails(self): """Test splitting when the sequencer fails.""" self.ujo._splittingOption = "bySequence" self.ujo._switch['bySequence'] = Mock(return_value=[]) self.ujo.setParameterSequence = Mock() self.ujo._split() self.ujo.setParameterSequence.assert_not_called() def test_checkSplitconsistency(self): """Test splitting consistency check.""" app1 = Fcc() app2 = Fcc() self.ujo.applicationlist = [app1, app2] self.ujo._switch = {"byEvents": lambda x: x} self.ujo._splittingOption = "byEvents" self.assertTrue(self.ujo._checkSplitConsistency()) def test_checkjobconsistency_bad_split_parameter(self): """Test splitting consistency check with bad split parameters.""" app1 = Fcc() app2 = Fcc() self.ujo.applicationlist = [app1, app2] self.ujo._switch = {"byEvents": lambda x: x } self.ujo._splittingOption = "byHand" self.assertFalse(self.ujo._checkSplitConsistency()['OK']) self.assertIn('_checkSplitConsistency', self.ujo.errorDict) def test_checkjobconsistency_no_same_events( self ): app1 = Fcc() app2 = Fcc() app1.numberOfEvents = 1 app2.numberOfEvents = 2 self.ujo.applicationlist = [app1, app2] self.ujo._switch = {"byEvents": lambda x: x } self.ujo._splittingOption = "byEvents" with patch('%s.LOG' % MIXIN_MODULE, new=self.log_mock): resCheck = self.ujo._checkSplitConsistency() self.assertFalse(resCheck['OK']) self.assertIn("have the same number", resCheck['Message']) def test_checkjobconsistency_negative_events( self ): app1 = Fcc() app2 = Fcc() app1.numberOfEvents = app2.numberOfEvents = -1 self.ujo.applicationlist = [app1, app2] self.ujo._switch = ["byEvents"] self.ujo._splittingOption = "byEvents" self.assertTrue(self.ujo._checkSplitConsistency()) def test_splitbydata( self ): self.ujo._data = ['data1', 'data2'] app1 = Fcc() app2 = Fcc() self.ujo.applicationlist = [app1, app2] assertEqualsImproved(self.ujo._splitByData(), [("InputData", [['data1'], ['data2']], 'ParametricInputData')], self) def test_splitbydata_no_data(self): """Test splitting without data.""" self.ujo._data = None self.assertFalse(self.ujo._splitByData()) self.assertIn('_splitByData', self.ujo.errorDict) def test_splitbydata_incorrectparameter(self): """Test splitting with data.""" self.ujo._data = ["/path/to/data1","/path/to/data2"] self.ujo._numberOfFilesPerJob = 3 self.assertFalse(self.ujo._splitByData()) self.assertIn('_splitByData', self.ujo.errorDict) def test_splitbyevents_1st_case(self): """Test splitting by events.""" app1 = Fcc() app2 = Fcc() self.ujo.applicationlist = [app1, app2] self.ujo._eventsPerJob = 2 self.ujo._numberOfJobs = 3 map_event_job = [2, 2, 2] assertEqualsImproved(self.ujo._splitByEvents(), [('NumberOfEvents', map_event_job, 'NbOfEvts')], self) def test_splitbyevents_2nd_case( self ): app1 = Fcc() app2 = Fcc() self.ujo.applicationlist = [app1, app2] self.ujo._eventsPerJob = 3 self.ujo._totalNumberOfEvents = 5 map_event_job = [3, 2] assertEqualsImproved(self.ujo._splitByEvents(), [('NumberOfEvents', map_event_job, 'NbOfEvts')], self) def test_splitbyevents_2nd_case_failed( self ): app1 = Fcc() app2 = Fcc() self.ujo.applicationlist = [app1, app2] self.ujo._eventsPerJob = 3 self.ujo._totalNumberOfEvents = 2 self.assertFalse(self.ujo._splitByEvents()) def test_splitbyevents_3rd_case(self): """Test splitting by events case 3.""" app1 = Fcc() app2 = Fcc() self.ujo.applicationlist = [app1, app2] self.ujo._numberOfJobs = 2 self.ujo._totalNumberOfEvents = 2 map_event_job = [1, 1] assertEqualsImproved(self.ujo._splitByEvents(), [('NumberOfEvents', map_event_job, 'NbOfEvts')], self) self.ujo._numberOfJobs = 3 self.ujo._totalNumberOfEvents = 5 map_event_job = [2, 2, 1] assertEqualsImproved(self.ujo._splitByEvents(), [('NumberOfEvents', map_event_job, 'NbOfEvts')], self) def test_splitbyevents_3rd_case_failed(self): """Test splitting by events case 3 fails.""" app1 = Fcc() app2 = Fcc() self.ujo.applicationlist = [app1, app2] self.ujo._numberOfJobs = 2 self.ujo._totalNumberOfEvents = None self.assertFalse(self.ujo._splitByEvents()) def test_setsplitevents(self): """Test splitting set split events.""" self.ujo.setSplitEvents(42, 42, 126) assertEqualsImproved(self.ujo._totalNumberOfEvents, 126, self) assertEqualsImproved(self.ujo._eventsPerJob, 42, self) assertEqualsImproved(self.ujo._numberOfJobs, 42, self) assertEqualsImproved(self.ujo._splittingOption, "byEvents", self) def test_setsplitInputdata(self): """Test set split input data.""" input_data = ["/path/to/data1", "/path/to/data2"] self.ujo.setSplitInputData(input_data) for data in input_data: self.assertIn(data, self.ujo._data) assertEqualsImproved(self.ujo._splittingOption, "byData", self) def test_setSplitFiles(self): """Test set split files over jobs.""" self.ujo.setSplitFilesAcrossJobs('myLFN', 20, 20) self.assertEqual(self.ujo._data, ['myLFN']) self.assertEqual(self.ujo._eventsPerFile, 20) self.assertEqual(self.ujo._eventsPerJob, 20) def test_splitBySkip(self): """Test set split with skip.""" self.ujo._eventsPerFile = 13 self.ujo._eventsPerJob = 5 self.ujo._data = ['lfn_%d' % d for d in [1, 2]] result = self.ujo._splitBySkip() self.assertEqual([('InputData', ['lfn_1', 'lfn_1', 'lfn_1', 'lfn_2', 'lfn_2', 'lfn_2'], 'InputData'), ('startFrom', [0, 5, 10, 0, 5, 10], 'startFrom'), ('NumberOfEvents', [5, 5, 3, 5, 5, 3], 'NbOfEvts')], result) self.ujo._eventsPerFile = 15 self.ujo._eventsPerJob = 5 self.ujo._data = ['lfn_%d' % d for d in [1, 2]] result = self.ujo._splitBySkip() self.assertEqual([('InputData', ['lfn_1', 'lfn_1', 'lfn_1', 'lfn_2', 'lfn_2', 'lfn_2'], 'InputData'), ('startFrom', [0, 5, 10, 0, 5, 10], 'startFrom'), ('NumberOfEvents', [5, 5, 5, 5, 5, 5], 'NbOfEvts')], result) def test_setSplittingStartIndex(self): """Test setting start index.""" res = self.ujo.setSplittingStartIndex(111) self.assertTrue(res['OK']) self.assertEqual(self.ujo._startJobIndex, 111) self.ujo._startJobIndex = 0 res = self.ujo.setSplittingStartIndex(-111) self.assertFalse(res['OK']) self.assertIn('setSplittingStartIndex', self.ujo.errorDict) self.assertEqual(self.ujo._startJobIndex, 0) def test_doNotAlter(self): """Test setting not altering the output.""" self.ujo.setSplitDoNotAlterOutputFilename() self.assertIsNotNone(self.ujo.workflow.parameters.find('DoNotAlterOutputData')) self.assertEqual(self.ujo.workflow.parameters.find('DoNotAlterOutputData').getValue(), "True") self.ujo.setSplitDoNotAlterOutputFilename(False) self.assertIsNotNone(self.ujo.workflow.parameters.find('DoNotAlterOutputData')) self.assertEqual(self.ujo.workflow.parameters.find('DoNotAlterOutputData').getValue(), "False") def test_setSplitJobIndexList(self): """Test the setSplitJobIndexList function.""" res = self.ujo.setSplitJobIndexList(range(0, 7, 3)) self.assertTrue(res['OK']) self.assertEqual([0, 3, 6], self.ujo._jobIndexList) res = self.ujo.setSplitJobIndexList(set(range(1, 7, 3))) self.assertFalse(res['OK']) self.assertIn('Invalid argument type', res['Message']) self.assertEqual([0, 3, 6], self.ujo._jobIndexList)
def main(argv): # Input arguments ildconfig_version = "$ILDCONFIGVER" ilcsoft_version = "$ILCSOFTVER" evts_per_run = $EVTSPERRUN detector_model = "$DETECTOR" sim_input = "$SIMINPUT" process_name = "$PROCESS" index = $IND sim_input = diracpath_from_pnfspath( sim_input ) sim_detector_model = detector_model_wo_option( detector_model ) job_group = ilcsoft_version + "_" + ildconfig_version + "_" + process_name + "_" + detector_model dirac = DiracILC(True,job_group+".rep") # outputs to be saved onto grid SE RECoutput = [] # DDSim evtStart = (index-1)*evts_per_run evtEnd = index*evts_per_run - 1 RandSeed = random.randrange(11623, 99999) lcinputSIM = "LFN:" + sim_input lcoutputSIM = ilcsoft_version + ".ILDConfig_" + ildconfig_version + ".E1000." + process_name + ".eLpR.evt%s-%s_SIM.slcio"%(str(evtStart),(str)(evtEnd)) sim = DDSim() sim.setVersion(ilcsoft_version) sim.setDetectorModel(sim_detector_model) sim.setInputFile(lcinputSIM) sim.setSteeringFile("ddsim_steer.py") sim.setNumberOfEvents(evts_per_run) sim.setRandomSeed(RandSeed) sim.setEnergy(1000) sim.setStartFrom(evtStart) sim.setOutputFile(lcoutputSIM) # Marlin lcoutputDST = ilcsoft_version + ".ILDConfig_" + ildconfig_version + ".E1000." + process_name + ".eLpR.evt%s-%s_DST.slcio"%(str(evtStart),(str)(evtEnd)) ma = Marlin() ma.setVersion(ilcsoft_version) ma.setDetectorModel(detector_model) ma.setSteeringFile("MarlinStdReco.xml") ma.setExtraCLIArguments( "--constant.lcgeo_DIR=$lcgeo_DIR --constant.DetectorModel={} --global.MaxRecordNumber=0".format(detector_model) ) ma.setLogFile("marlin.log") ma.getInputFromApp(sim) ma.setEnergy(1000) ma.setOutputDstFile(lcoutputDST) RECoutput.append(lcoutputDST) # ILCDirac user job job = UserJob() job.setName("user_sim_reco") job.setJobGroup(job_group) job.setILDConfig(ildconfig_version) job.setCPUTime(86400) tmp_file_name = process_name + "_sim_reco_job_tmp.py" job.setInputSandbox([tmp_file_name]) job.setOutputSandbox(["*.log","MarlinStdRecoParsed.xml","marlin*.xml","*.py "]) job.dontPromptMe() job.setBannedSites(['LCG.IN2P3-CC.fr','OSG.UConn.us','LCG.Cracow.pl','OSG.MIT.us','LCG.Glasgow.uk','OSG.CIT.us','OSG.BNL.us','LCG.Brunel.uk','LCG.RAL-LCG2.uk','LCG.Oxford.uk','OSG.UCSDT2.us']) # run simulation job simres = job.append(sim) if not simres['OK']: print simres['Not ok appending ddsim to job'] quit() # run Malrin reco jobs mares = job.append(ma) if not mares['OK']: print mares['Not ok appending Marlin to job'] quit() job.setOutputData(RECoutput,"ILDPerformance/WWZZSeparation/{}_ILDConfig_{}_{}".format(ilcsoft_version,ildconfig_version,detector_model),"DESY-SRM") print RECoutput submit_output = job.submit(dirac) print submit_output