def test_CreateAndSubmit(self): jobParams = {'JobID': '1', 'JobType': 'Merge', 'CPUTime': '1000000', 'Executable': 'dirac-jobexec', 'Arguments': "helloWorld.xml -o LogLevel=DEBUG --cfg pilot.cfg", 'InputSandbox': ['helloWorld.xml', 'exe-script.py']} resourceParams = {} optimizerParams = {} # res = createJobWrapper( 1, jobParams, resourceParams, optimizerParams, logLevel = 'DEBUG' ) # self.assertTrue( res['OK'] ) # wrapperFile = res['Value'] ceFactory = ComputingElementFactory() ceInstance = ceFactory.getCE('InProcess') self.assertTrue(ceInstance['OK']) computingElement = ceInstance['Value'] # res = computingElement.submitJob( wrapperFile, self.payloadProxy ) # self.assertTrue( res['OK'] ) if 'pilot.cfg' in os.listdir('.'): jobParams.setdefault('ExtraOptions', 'pilot.cfg') res = createJobWrapper(2, jobParams, resourceParams, optimizerParams, extraOptions='pilot.cfg', logLevel='DEBUG') else: res = createJobWrapper(2, jobParams, resourceParams, optimizerParams, logLevel='DEBUG') self.assertTrue(res['OK'], res.get('Message')) wrapperFile = res['Value'] res = computingElement.submitJob(wrapperFile, self.payloadProxy) self.assertTrue(res['OK'], res.get('Message'))
def test_CreateAndSubmit( self ): jobParams = {'JobID': '1', 'JobType': 'Merge', 'CPUTime': '1000000', 'Executable': '$DIRACROOT/scripts/dirac-jobexec', 'Arguments': "helloWorld.xml -o LogLevel=DEBUG pilot.cfg", 'InputSandbox': ['helloWorld.xml', 'exe-script.py']} resourceParams = {} optimizerParams = {} # res = createJobWrapper( 1, jobParams, resourceParams, optimizerParams, logLevel = 'DEBUG' ) # self.assert_( res['OK'] ) # wrapperFile = res['Value'] ceFactory = ComputingElementFactory() ceInstance = ceFactory.getCE( 'InProcess' ) self.assert_( ceInstance['OK'] ) computingElement = ceInstance['Value'] # res = computingElement.submitJob( wrapperFile, self.payloadProxy ) # self.assert_( res['OK'] ) if 'pilot.cfg' in os.listdir( '.' ): jobParams.setdefault( 'ExtraOptions', 'pilot.cfg' ) res = createJobWrapper( 2, jobParams, resourceParams, optimizerParams, extraOptions = 'pilot.cfg', logLevel = 'DEBUG' ) else: res = createJobWrapper( 2, jobParams, resourceParams, optimizerParams, logLevel = 'DEBUG' ) self.assert_( res['OK'] ) wrapperFile = res['Value'] res = computingElement.submitJob( wrapperFile, self.payloadProxy ) self.assert_( res['OK'] )
def __submitJob( self, jobID, jobParams, resourceParams, optimizerParams, proxyChain, processors, wholeNode = False ): """ Submit job to the Computing Element instance after creating a custom Job Wrapper with the available job parameters. """ logLevel = self.am_getOption( 'DefaultLogLevel', 'INFO' ) defaultWrapperLocation = self.am_getOption( 'JobWrapperTemplate', 'DIRAC/WorkloadManagementSystem/JobWrapper/JobWrapperTemplate.py' ) jobDesc = { "jobID": jobID, "jobParams": jobParams, "resourceParams": resourceParams, "optimizerParams": optimizerParams, "extraOptions": self.extraOptions, "defaultWrapperLocation": defaultWrapperLocation } result = createJobWrapper( log = self.log, logLevel = logLevel, **jobDesc ) if not result['OK']: return result wrapperFile = result['Value'] self.__report( jobID, 'Matched', 'Submitted To CE' ) self.log.info( 'Submitting JobWrapper %s to %sCE' % ( os.path.basename( wrapperFile ), self.ceName ) ) # Pass proxy to the CE proxy = proxyChain.dumpAllToString() if not proxy['OK']: self.log.error( 'Invalid proxy', proxy ) return S_ERROR( 'Payload Proxy Not Found' ) payloadProxy = proxy['Value'] submission = self.computingElement.submitJob( wrapperFile, payloadProxy, numberOfProcessors = processors, wholeNode = wholeNode, jobDesc = jobDesc, log = self.log, logLevel = logLevel ) ret = S_OK( 'Job submitted' ) if submission['OK']: batchID = submission['Value'] self.log.info( 'Job %s submitted as %s' % ( jobID, batchID ) ) self.log.verbose( 'Set JobParameter: Local batch ID %s' % ( batchID ) ) self.__setJobParam( jobID, 'LocalBatchID', str( batchID ) ) if 'PayloadFailed' in submission: ret['PayloadFailed'] = submission['PayloadFailed'] return ret time.sleep( self.jobSubmissionDelay ) else: self.log.error( 'Job submission failed', jobID ) self.__setJobParam( jobID, 'ErrorMessage', '%s CE Submission Error' % ( self.ceName ) ) if 'ReschedulePayload' in submission: rescheduleFailedJob( jobID, submission['Message'] ) return S_OK() # Without this job is marked as failed at line 265 above else: if 'Value' in submission: self.log.error( 'Error in DIRAC JobWrapper:', 'exit code = %s' % ( str( submission['Value'] ) ) ) return S_ERROR( '%s CE Error: %s' % ( self.ceName, submission['Message'] ) ) return ret
def test_CreateAndSubmit(self): jobParams = { "JobID": "1", "JobType": "Merge", "CPUTime": "1000000", "Executable": "dirac-jobexec", "Arguments": "helloWorld.xml -o LogLevel=DEBUG --cfg pilot.cfg", "InputSandbox": ["helloWorld.xml", "exe-script.py"], } resourceParams = {} optimizerParams = {} # res = createJobWrapper( 1, jobParams, resourceParams, optimizerParams, logLevel = 'DEBUG' ) # self.assertTrue( res['OK'] ) # wrapperFile = res['Value'] ceFactory = ComputingElementFactory() ceInstance = ceFactory.getCE("InProcess") self.assertTrue(ceInstance["OK"]) computingElement = ceInstance["Value"] # res = computingElement.submitJob( wrapperFile, self.payloadProxy ) # self.assertTrue( res['OK'] ) if "pilot.cfg" in os.listdir("."): jobParams.setdefault("ExtraOptions", "pilot.cfg") res = createJobWrapper(2, jobParams, resourceParams, optimizerParams, extraOptions="pilot.cfg", logLevel="DEBUG") else: res = createJobWrapper(2, jobParams, resourceParams, optimizerParams, logLevel="DEBUG") self.assertTrue(res["OK"], res.get("Message")) wrapperFile = res["Value"][0] res = computingElement.submitJob(wrapperFile, self.payloadProxy) self.assertTrue(res["OK"], res.get("Message"))
def test_submitJob(): with open("testJob.py", "w") as execFile: execFile.write(jobScript % "1") os.chmod("testJob.py", 0o755) ce = InProcessComputingElement("InProcessCE") res = ce.submitJob("testJob.py", None) assert res["OK"] is True res = ce.getCEStatus() assert res["OK"] is True assert res["SubmittedJobs"] == 1 _stopJob(1) for ff in ["testJob.py", "stop_job_2", "job.info", "std.out"]: if os.path.isfile(ff): os.remove(ff) # # With a job wrapper and some MP parameters with open("testJob.py", "w") as execFile: execFile.write(jobScript % "2") os.chmod("testJob.py", 0o755) jobParams = {"JobType": "User", "Executable": "testJob.py"} resourceParams = {"GridCE": "some_CE"} optimizerParams = {} wrapperFile = createJobWrapper( 2, jobParams, resourceParams, optimizerParams, logLevel="DEBUG")["Value"][ 0] # This is not under test, assuming it works fine res = ce.submitJob( wrapperFile, proxy=None, numberOfProcessors=4, maxNumberOfProcessors=8, wholeNode=False, mpTag=True, jobDesc={ "jobParams": jobParams, "resourceParams": resourceParams, "optimizerParams": optimizerParams }, ) assert res["OK"] is True res = ce.getCEStatus() assert res["OK"] is True assert res["SubmittedJobs"] == 2 _stopJob(2) for ff in ["testJob.py", "stop_job_2", "job.info", "std.out"]: if os.path.isfile(ff): os.remove(ff) if os.path.isdir("job"): shutil.rmtree("job")
def __submitJob( self, jobID, jobParams, resourceParams, optimizerParams, proxyChain ): """Submit job to the Computing Element instance after creating a custom Job Wrapper with the available job parameters. """ logLevel = self.am_getOption( 'DefaultLogLevel', 'INFO' ) defaultWrapperLocation = self.am_getOption( 'JobWrapperTemplate', 'DIRAC/WorkloadManagementSystem/JobWrapper/JobWrapperTemplate.py' ) result = createJobWrapper( jobID, jobParams, resourceParams, optimizerParams, extraOptions = self.extraOptions, defaultWrapperLocation = defaultWrapperLocation, log = self.log, logLevel = logLevel ) if not result['OK']: return result wrapperFile = result['Value'] self.__report( jobID, 'Matched', 'Submitted To CE' ) self.log.info( 'Submitting %s to %sCE' % ( os.path.basename( wrapperFile ), self.ceName ) ) #Pass proxy to the CE proxy = proxyChain.dumpAllToString() if not proxy['OK']: self.log.error( proxy ) return S_ERROR( 'Payload Proxy Not Found' ) payloadProxy = proxy['Value'] # FIXME: how can we set the batchID before we submit, this makes no sense batchID = 'dc%s' % ( jobID ) submission = self.computingElement.submitJob( wrapperFile, payloadProxy ) ret = S_OK( 'Job submitted' ) if submission['OK']: batchID = submission['Value'] self.log.info( 'Job %s submitted as %s' % ( jobID, batchID ) ) self.log.verbose( 'Set JobParameter: Local batch ID %s' % ( batchID ) ) self.__setJobParam( jobID, 'LocalBatchID', str( batchID ) ) if 'PayloadFailed' in submission: ret['PayloadFailed'] = submission['PayloadFailed'] return ret time.sleep( self.jobSubmissionDelay ) else: self.log.error( 'Job submission failed', jobID ) self.__setJobParam( jobID, 'ErrorMessage', '%s CE Submission Error' % ( self.ceName ) ) if 'ReschedulePayload' in submission: rescheduleFailedJob( jobID, submission['Message'], self.__report ) else: if 'Value' in submission: self.log.error( 'Error in DIRAC JobWrapper:', 'exit code = %s' % ( str( submission['Value'] ) ) ) # make sure the Job is declared Failed self.__report( jobID, 'Failed', submission['Message'] ) return S_ERROR( '%s CE Submission Error: %s' % ( self.ceName, submission['Message'] ) ) return ret
def test_submitJob(): with open('testJob.py', 'w') as execFile: execFile.write(jobScript % '1') os.chmod('testJob.py', 0o755) ce = InProcessComputingElement('InProcessCE') res = ce.submitJob('testJob.py', None) assert res['OK'] is True res = ce.getCEStatus() assert res['OK'] is True assert res['SubmittedJobs'] == 1 _stopJob(1) for ff in ['testJob.py', 'stop_job_2', 'job.info', 'std.out']: if os.path.isfile(ff): os.remove(ff) # # With a job wrapper and some MP parameters with open('testJob.py', 'w') as execFile: execFile.write(jobScript % '2') os.chmod('testJob.py', 0o755) jobParams = {'JobType': 'User', 'Executable': 'testJob.py'} resourceParams = {'GridCE': 'some_CE'} optimizerParams = {} wrapperFile = createJobWrapper( 2, jobParams, resourceParams, optimizerParams, logLevel='DEBUG')[ 'Value'] # This is not under test, assuming it works fine res = ce.submitJob(wrapperFile, proxy=None, numberOfProcessors=4, maxNumberOfProcessors=8, wholeNode=False, mpTag=True, jobDesc={ "jobParams": jobParams, "resourceParams": resourceParams, "optimizerParams": optimizerParams }) assert res['OK'] is True res = ce.getCEStatus() assert res['OK'] is True assert res['SubmittedJobs'] == 2 _stopJob(2) for ff in ['testJob.py', 'stop_job_2', 'job.info', 'std.out']: if os.path.isfile(ff): os.remove(ff) if os.path.isdir('job'): shutil.rmtree('job')
def test_submitJobWrapper(): with open("testJob.py", "w") as execFile: execFile.write(jobScript % "2") os.chmod("testJob.py", 0o755) jobParams = {"JobType": "User", "Executable": "testJob.py"} resourceParams = {"GridCE": "some_CE"} optimizerParams = {} wrapperFile = createJobWrapper( 2, jobParams, resourceParams, optimizerParams, logLevel="DEBUG")[ "Value"] # This is not under test, assuming it works fine shutil.copy(fj, os.curdir) ce = SingularityComputingElement("SingularityComputingElement") res = ce.submitJob( wrapperFile, proxy=None, numberOfProcessors=4, maxNumberOfProcessors=8, wholeNode=False, mpTag=True, jobDesc={ "jobParams": jobParams, "resourceParams": resourceParams, "optimizerParams": optimizerParams }, ) assert res["OK"] is False # This is False because the image can't be found assert res["ReschedulePayload"] is True res = ce.getCEStatus() assert res["OK"] is True if six.PY2: assert res["SubmittedJobs"] == 0 else: assert res["SubmittedJobs"] == 1 _stopJob(2) for ff in [ "testJob.py", "stop_job_2", "job.info", "std.out", "pilot.json" ]: if os.path.isfile(ff): os.remove(ff) if os.path.isdir("job"): shutil.rmtree("job")
def test_submitJobWrapper(): with open('testJob.py', 'w') as execFile: execFile.write(jobScript % '2') os.chmod('testJob.py', 0o755) jobParams = {'JobType': 'User', 'Executable': 'testJob.py'} resourceParams = {'GridCE': 'some_CE'} optimizerParams = {} wrapperFile = createJobWrapper(2, jobParams, resourceParams, optimizerParams, logLevel='DEBUG')['Value'] # This is not under test, assuming it works fine shutil.copy(fj, os.curdir) ce = SingularityComputingElement('SingularityComputingElement') res = ce.submitJob(wrapperFile, proxy=None, numberOfProcessors=4, maxNumberOfProcessors=8, wholeNode=False, mpTag=True, jobDesc={"jobParams": jobParams, "resourceParams": resourceParams, "optimizerParams": optimizerParams}) assert res['OK'] is False # This is False because the image can't be found assert res['ReschedulePayload'] is True res = ce.getCEStatus() assert res['OK'] is True assert res['SubmittedJobs'] == 1 _stopJob(2) for ff in ['testJob.py', 'stop_job_2', 'job.info', 'std.out', 'pilot.json']: if os.path.isfile(ff): os.remove(ff) if os.path.isdir('job'): shutil.rmtree('job')
def _submitJob( self, jobID, jobParams, resourceParams, optimizerParams, proxyChain, jobReport, processors=1, wholeNode=False, maxNumberOfProcessors=0, mpTag=False, ): """Submit job to the Computing Element instance after creating a custom Job Wrapper with the available job parameters. """ logLevel = self.am_getOption("DefaultLogLevel", "INFO") defaultWrapperLocation = self.am_getOption( "JobWrapperTemplate", "DIRAC/WorkloadManagementSystem/JobWrapper/JobWrapperTemplate.py") # Add the number of requested processors to the job environment if "ExecutionEnvironment" in jobParams: if isinstance(jobParams["ExecutionEnvironment"], six.string_types): jobParams["ExecutionEnvironment"] = jobParams[ "ExecutionEnvironment"].split(";") jobParams.setdefault("ExecutionEnvironment", []).append("DIRAC_JOB_PROCESSORS=%d" % processors) jobDesc = { "jobID": jobID, "jobParams": jobParams, "resourceParams": resourceParams, "optimizerParams": optimizerParams, "extraOptions": self.extraOptions, "defaultWrapperLocation": defaultWrapperLocation, } result = createJobWrapper(log=self.log, logLevel=logLevel, **jobDesc) if not result["OK"]: return result wrapperFile = result["Value"][0] inputs = list(result["Value"][1:]) jobReport.setJobStatus(minorStatus="Submitting To CE") self.log.info( "Submitting JobWrapper", "%s to %sCE" % (os.path.basename(wrapperFile), self.ceName)) # Pass proxy to the CE proxy = proxyChain.dumpAllToString() if not proxy["OK"]: self.log.error("Invalid proxy", proxy) return S_ERROR("Payload Proxy Not Found") payloadProxy = proxy["Value"] submission = self.computingElement.submitJob( wrapperFile, payloadProxy, numberOfProcessors=processors, maxNumberOfProcessors=maxNumberOfProcessors, wholeNode=wholeNode, mpTag=mpTag, jobDesc=jobDesc, log=self.log, logLevel=logLevel, inputs=inputs, ) submissionResult = S_OK("Job submitted") if submission["OK"]: batchID = submission["Value"] self.log.info("Job submitted", "(DIRAC JobID: %s; Batch ID: %s" % (jobID, batchID)) if "PayloadFailed" in submission: submissionResult["PayloadFailed"] = submission["PayloadFailed"] time.sleep(self.jobSubmissionDelay) else: self.log.error("Job submission failed", jobID) jobReport.setJobParameter(par_name="ErrorMessage", par_value="%s CE Submission Error" % (self.ceName), sendFlag=False) if "ReschedulePayload" in submission: result = self._rescheduleFailedJob(jobID, submission["Message"]) self._finish(result["Message"], self.stopOnApplicationFailure) return S_OK() # Without this, the job is marked as failed else: if "Value" in submission: # yes, it's "correct", S_ERROR with 'Value' key self.log.error( "Error in DIRAC JobWrapper or inner CE execution:", "exit code = %s" % (str(submission["Value"])), ) self.log.error("CE Error", "%s : %s" % (self.ceName, submission["Message"])) submissionResult = submission return submissionResult
def _submitJob(self, jobID, jobParams, resourceParams, optimizerParams, proxyChain, processors=1, wholeNode=False, maxNumberOfProcessors=0, mpTag=False): """ Submit job to the Computing Element instance after creating a custom Job Wrapper with the available job parameters. """ logLevel = self.am_getOption('DefaultLogLevel', 'INFO') defaultWrapperLocation = self.am_getOption( 'JobWrapperTemplate', 'DIRAC/WorkloadManagementSystem/JobWrapper/JobWrapperTemplate.py') jobDesc = { "jobID": jobID, "jobParams": jobParams, "resourceParams": resourceParams, "optimizerParams": optimizerParams, "extraOptions": self.extraOptions, "defaultWrapperLocation": defaultWrapperLocation } result = createJobWrapper(log=self.log, logLevel=logLevel, **jobDesc) if not result['OK']: return result wrapperFile = result['Value'] self.__report(jobID, 'Matched', 'Submitted To CE') self.log.info( 'Submitting JobWrapper', '%s to %sCE' % (os.path.basename(wrapperFile), self.ceName)) # Pass proxy to the CE proxy = proxyChain.dumpAllToString() if not proxy['OK']: self.log.error('Invalid proxy', proxy) return S_ERROR('Payload Proxy Not Found') payloadProxy = proxy['Value'] submission = self.computingElement.submitJob( wrapperFile, payloadProxy, numberOfProcessors=processors, maxNumberOfProcessors=maxNumberOfProcessors, wholeNode=wholeNode, mpTag=mpTag, jobDesc=jobDesc, log=self.log, logLevel=logLevel) ret = S_OK('Job submitted') if submission['OK']: batchID = submission['Value'] self.log.info('Job submitted', '%s as %s' % (jobID, batchID)) if 'PayloadFailed' in submission: ret['PayloadFailed'] = submission['PayloadFailed'] return ret time.sleep(self.jobSubmissionDelay) else: self.log.error('Job submission failed', jobID) self.__setJobParam(jobID, 'ErrorMessage', '%s CE Submission Error' % (self.ceName)) if 'ReschedulePayload' in submission: rescheduleFailedJob(jobID, submission['Message']) return S_OK() # Without this, the job is marked as failed else: if 'Value' in submission: self.log.error( 'Error in DIRAC JobWrapper:', 'exit code = %s' % (str(submission['Value']))) return S_ERROR('%s CE Error: %s' % (self.ceName, submission['Message'])) return ret
def _submitJob(self, jobID, jobParams, resourceParams, optimizerParams, proxyChain, jobReport, processors=1, wholeNode=False, maxNumberOfProcessors=0, mpTag=False): """ Submit job to the Computing Element instance after creating a custom Job Wrapper with the available job parameters. """ logLevel = self.am_getOption('DefaultLogLevel', 'INFO') defaultWrapperLocation = self.am_getOption( 'JobWrapperTemplate', 'DIRAC/WorkloadManagementSystem/JobWrapper/JobWrapperTemplate.py') # Add the number of requested processors to the job environment if 'ExecutionEnvironment' in jobParams: if isinstance(jobParams['ExecutionEnvironment'], six.string_types): jobParams['ExecutionEnvironment'] = jobParams[ 'ExecutionEnvironment'].split(';') jobParams.setdefault('ExecutionEnvironment', []).append('DIRAC_JOB_PROCESSORS=%d' % processors) jobDesc = { "jobID": jobID, "jobParams": jobParams, "resourceParams": resourceParams, "optimizerParams": optimizerParams, "extraOptions": self.extraOptions, "defaultWrapperLocation": defaultWrapperLocation } result = createJobWrapper(log=self.log, logLevel=logLevel, **jobDesc) if not result['OK']: return result wrapperFile = result['Value'] jobReport.setJobStatus(status='Matched', minor='Submitting To CE') gridCE = gConfig.getValue('/LocalSite/GridCE', '') queue = gConfig.getValue('/LocalSite/CEQueue', '') jobReport.setJobParameter(par_name='GridCE', par_value=gridCE, sendFlag=False) jobReport.setJobParameter(par_name='CEQueue', par_value=queue, sendFlag=False) self.log.info( 'Submitting JobWrapper', '%s to %sCE' % (os.path.basename(wrapperFile), self.ceName)) # Pass proxy to the CE proxy = proxyChain.dumpAllToString() if not proxy['OK']: self.log.error('Invalid proxy', proxy) return S_ERROR('Payload Proxy Not Found') payloadProxy = proxy['Value'] submission = self.computingElement.submitJob( wrapperFile, payloadProxy, numberOfProcessors=processors, maxNumberOfProcessors=maxNumberOfProcessors, wholeNode=wholeNode, mpTag=mpTag, jobDesc=jobDesc, log=self.log, logLevel=logLevel) submissionResult = S_OK('Job submitted') if submission['OK']: batchID = submission['Value'] self.log.info('Job submitted', '(DIRAC JobID: %s; Batch ID: %s' % (jobID, batchID)) if 'PayloadFailed' in submission: submissionResult['PayloadFailed'] = submission['PayloadFailed'] time.sleep(self.jobSubmissionDelay) else: self.log.error('Job submission failed', jobID) jobReport.setJobParameter(par_name='ErrorMessage', par_value='%s CE Submission Error' % (self.ceName), sendFlag=False) if 'ReschedulePayload' in submission: self._rescheduleFailedJob(jobID, submission['Message'], self.stopOnApplicationFailure) return S_OK() # Without this, the job is marked as failed else: if 'Value' in submission: self.log.error( 'Error in DIRAC JobWrapper or inner CE execution:', 'exit code = %s' % (str(submission['Value']))) self.log.error("CE Error", "%s : %s" % (self.ceName, submission['Message'])) submissionResult = submission return submissionResult
def __submitJob(self, jobID, jobParams, resourceParams, optimizerParams, proxyChain): """Submit job to the Computing Element instance after creating a custom Job Wrapper with the available job parameters. """ logLevel = self.am_getOption("DefaultLogLevel", "INFO") defaultWrapperLocation = self.am_getOption( "JobWrapperTemplate", "DIRAC/WorkloadManagementSystem/JobWrapper/JobWrapperTemplate.py" ) result = createJobWrapper( jobID, jobParams, resourceParams, optimizerParams, extraOptions=self.extraOptions, defaultWrapperLocation=defaultWrapperLocation, log=self.log, logLevel=logLevel, ) if not result["OK"]: return result wrapperFile = result["Value"] self.__report(jobID, "Matched", "Submitted To CE") self.log.info("Submitting %s to %sCE" % (os.path.basename(wrapperFile), self.ceName)) # Pass proxy to the CE proxy = proxyChain.dumpAllToString() if not proxy["OK"]: self.log.error("Invalid proxy", proxy) return S_ERROR("Payload Proxy Not Found") payloadProxy = proxy["Value"] # FIXME: how can we set the batchID before we submit, this makes no sense batchID = "dc%s" % (jobID) submission = self.computingElement.submitJob(wrapperFile, payloadProxy) ret = S_OK("Job submitted") if submission["OK"]: batchID = submission["Value"] self.log.info("Job %s submitted as %s" % (jobID, batchID)) self.log.verbose("Set JobParameter: Local batch ID %s" % (batchID)) self.__setJobParam(jobID, "LocalBatchID", str(batchID)) if "PayloadFailed" in submission: ret["PayloadFailed"] = submission["PayloadFailed"] return ret time.sleep(self.jobSubmissionDelay) else: self.log.error("Job submission failed", jobID) self.__setJobParam(jobID, "ErrorMessage", "%s CE Submission Error" % (self.ceName)) if "ReschedulePayload" in submission: rescheduleFailedJob(jobID, submission["Message"]) return S_OK() # Without this job is marked as failed at line 265 above else: if "Value" in submission: self.log.error("Error in DIRAC JobWrapper:", "exit code = %s" % (str(submission["Value"]))) # make sure the Job is declared Failed self.__report(jobID, "Failed", submission["Message"]) return S_ERROR("%s CE Submission Error: %s" % (self.ceName, submission["Message"])) return ret