def submitProbeJobs(self, ce): """ Submit some jobs to the CEs """ #need credentials, should be there since the initialize from DIRAC.Interfaces.API.Dirac import Dirac d = Dirac() from DIRAC.Interfaces.API.Job import Job from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations import DIRAC ops = Operations() scriptname = ops.getValue("ResourceStatus/SofwareManagementScript", self.script) j = Job() j.setDestinationCE(ce) j.setCPUTime(1000) j.setName("Probe %s" % ce) j.setJobGroup("SoftwareProbe") j.setExecutable("%s/GlastDIRAC/ResourceStatusSystem/Client/%s" % (DIRAC.rootPath, scriptname), logFile='SoftwareProbe.log') j.setOutputSandbox('*.log') res = d.submit(j) if not res['OK']: return res return S_OK()
def cleanProd3sw(args=None): """ Simple wrapper to remove Software package """ # get arguments package = args[0] version = args[1] site = args[2] # ## job = Job() dirac = Dirac() Step = job.setExecutable( './cta-cleansw.py', arguments = '%s %s' % ( package, version ), \ logFile = 'cleanSoftware_Log.txt' ) Step['Value']['name'] = 'Step_cleanSoftware' Step['Value']['descr_short'] = 'clean Software' # override for testing job.setName('CleanProd3Sw') # send job at Lyon CC job.setDestination([site]) # run job # res = dirac.submit( job, "local" ) res = dirac.submit(job) Script.gLogger.notice('Submission Result: ', res) return DIRAC.S_OK('Done')
def submitProbeJobs(self, ce): """ Submit some jobs to the CEs """ #need credentials, should be there since the initialize from DIRAC.Interfaces.API.Dirac import Dirac d = Dirac() from DIRAC.Interfaces.API.Job import Job from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations import os ops = Operations("glast.org") scriptname = ops.getValue("ResourceStatus/SofwareManagementScript", self.script) j = Job() j.setDestinationCE(ce) j.setCPUTime(1000) j.setName("Probe %s" % ce) j.setJobGroup("SoftwareProbe") j.setExecutable("%s/GlastDIRAC/ResourceStatusSystem/Client/%s" % (os.environ['DIRAC'], scriptname), logFile='SoftwareProbe.log') j.setOutputSandbox('*.log') res = d.submit(j) if not res['OK']: return res return S_OK()
def runLocal( self, dirac = None ): """ The dirac (API) object is for local submission. """ if dirac is None: dirac = Dirac() return dirac.submit( self, mode = 'local' )
def submitWMS(job): """ Submit the job to the WMS """ job.setDestination('LCG.IN2P3-CC.fr') dirac = Dirac() res = dirac.submit(job) Script.gLogger.notice('Submission Result: ', res) return res
def runLocal( self, dirac = None ): """ The dirac (API) object is for local submission. """ if dirac is None: dirac = Dirac() return dirac.submit( self, mode = 'local' )
def submit_WMS(job, infileList): """ Submit the job locally or to the WMS """ dirac = Dirac() job.setInputData(infileList) job.setJobGroup('SimpleCtapipe-test') res = dirac.submit(job) Script.gLogger.notice('Submission Result: ', res) return res
def submit_WMS(job, infileList): """ Submit the job locally or to the WMS """ dirac = Dirac() job.setInputData(infileList) job.setJobGroup('SimtelJob') res = dirac.submit(job) if res['OK']: Script.gLogger.info('Submission Result: ', res['Value']) return res
def submit_wms(job): """ Submit the job to the WMS @todo launch job locally """ dirac = Dirac() job.setJobGroup('Prod4CorsikaSSTJob') result = dirac.submit(job) if result['OK']: Script.gLogger.notice('Submitted job: ', result['Value']) return result
def submitWMS( job, infileList ): """ Submit the job locally or to the WMS """ # job.setDestination( 'LCG.IN2P3-CC.fr' ) job.setInputData(infileList[:2]) job.setInputSandbox( ['cta-prod3-get-matching-data.py'] ) dirac = Dirac() #res = dirac.submit( job, "local" ) res = dirac.submit( job ) Script.gLogger.notice( 'Submission Result: ', res ) return res
def submitWMS( job, infileList ): """ Submit the job locally or to the WMS """ dirac = Dirac() job.setParametricInputData( infileList ) job.setOutputData( ['*simtel-dst0.gz'] ) job.setName( 'readctajob' ) res = dirac.submit( job ) Script.gLogger.info( 'Submission Result: ', res['Value'] ) return res
def do_installonsite(self, argss): """ Install a release on a grid site : installonsite tag site """ args = argss.split() if len(args) < 2: print self.do_installonsite.__doc__ return tag = args[0] site = args[1] #print "Check if the software with the tag '"+tag+"' exists on the rsync server..." #res = self.client.getSitesForTag(tag) #if not res['OK']: #print res['Message'] #return #print "tag found !" from DIRAC.Interfaces.API.Dirac import Dirac d = Dirac() from DIRAC.Interfaces.API.Job import Job from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations import os ops = Operations() scriptname = "InstallSoftware.py" j = Job() j.setDestination(site) j.setCPUTime(1000) j.setName("Installation " + tag) j.setExecutable(os.environ['DIRAC'] + "/GlastDIRAC/ResourceStatusSystem/Client/externals/" + scriptname, logFile='SoftwareInstallation.log') j.setOutputSandbox('*.log') res = d.submit(j) if not res['OK']: print "Could not submit the installation at site %s, message %s" % ( site, res['Message']) return print "Job submitted, id = " + str(res['Value']) print "Add tag :" res = self.client.addTagAtSite(tag, site) if not res['OK']: print "Could not register tag %s at site %s, message %s" % ( tag, site, res['Message']) return print "Added %s to %i CEs" % (tag, len(res['Value'][tag]))
def submitWMS(job, infileList): """ Submit the job locally or to the WMS """ job.setParametricInputData(infileList) job.setOutputData(['*simtel.gz']) job.setOutputSandbox(['*Log.txt']) job.setInputSandbox(['mycfg']) job.setName('simteljob') dirac = Dirac() res = dirac.submit(job) Script.gLogger.info('Submission Result: ', res['Value']) return res
def submitWMS(job, infileList): """ Submit the job locally or to the WMS """ dirac = Dirac() job.setInputData(infileList) job.setJobGroup('EvnDisp3-SCT-test') job.setInputSandbox(['cta-prod3-get-matching-data.py']) res = dirac.submit(job) Script.gLogger.notice('Submission Result: ', res) return res
def run_test_job(args): simtel_files = load_files_from_list(args[0]) #simtel_files = ["/vo.cta.in2p3.fr/MC/PROD3/LaPalma/proton/simtel/1260/Data/071xxx/proton_40deg_180deg_run71001___cta-prod3-lapalma3-2147m-LaPalma.simtel.gz", #"/vo.cta.in2p3.fr/MC/PROD3/LaPalma/proton/simtel/1260/Data/070xxx/proton_40deg_180deg_run70502___cta-prod3-lapalma3-2147m-LaPalma.simtel.gz"] dirac = Dirac() j = Job() j.setCPUTime(500) j.setInputData(simtel_files[0]) j.setExecutable('echo', 'Hello World!') j.setName('Hello World') res = dirac.submit(j) print('Submission Result: {}'.format(res)) return res
def submit_paramWMS(job, infileList): """ Submit parametric jobs to the WMS """ job.setParameterSequence('InputData', infileList, addToWorkflow='ParametricInputData') job.setName('prov_ctapipe') dirac = Dirac() res = dirac.submit(job) if res['OK']: Script.gLogger.notice('Submission Result: ', res['Value']) return res
def do_installonsite(self,argss): """ Install a release on a grid site : installonsite tag site """ args = argss.split() if len(args)<2: print self.do_installonsite.__doc__ return tag = args[0] site = args[1] #print "Check if the software with the tag '"+tag+"' exists on the rsync server..." #res = self.client.getSitesForTag(tag) #if not res['OK']: #print res['Message'] #return #print "tag found !" from DIRAC.Interfaces.API.Dirac import Dirac d = Dirac() from DIRAC.Interfaces.API.Job import Job from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations import os ops = Operations() scriptname = "InstallSoftware.py" j = Job() j.setDestination(site) j.setCPUTime(1000) j.setName("Installation "+tag) j.setExecutable(os.environ['DIRAC']+"/GlastDIRAC/ResourceStatusSystem/Client/"+scriptname , logFile='SoftwareInstallation.log') j.setOutputSandbox('*.log') res = d.submit(j) if not res['OK']: print "Could not submit the installation at site %s, message %s"%(site,res['Message']) return print "Job submitted, id = "+str(res['Value']) print "Add tag :" res = self.client.addTagAtSite(tag,site) if not res['OK']: print "Could not register tag %s at site %s, message %s"%(tag,site,res['Message']) return print "Added %s to %i CEs"%(tag,len(res['Value'][tag]))
def submitWMS( job, infileList ): """ Submit the job to the WMS """ dirac = Dirac() job.setParametricInputData( infileList ) job.setOutputData( ['./*evndisp.tar.gz'] ) # to be used if DataManagement step in EvnDisp3UserJob is commented #job.setJobGroup( 'EvnDisp-proton' ) job.setName( 'evndispjob' ) job.setOutputSandbox( ['*Log.txt'] ) #job.setInputSandbox( ['myconf'] ) res = dirac.submit( job ) Script.gLogger.info( 'Submission Result: ', res['Value'] ) return res
def submitJob(jobPara): dirac = Dirac() j = Job() j.setName(jobPara['jobName']) j.setJobGroup(jobPara['jobGroup']) j.setExecutable(jobPara['jobScript'], logFile = jobPara['jobScriptLog']) j.setInputSandbox(jobPara['inputSandbox']) j.setOutputSandbox(jobPara['outputSandbox']) j.setOutputData(jobPara['outputData'], jobPara['SE']) j.setDestination(jobPara['sites']) j.setCPUTime(jobPara['CPUTime']) result = dirac.submit(j) if result['OK']: print 'Job %s submitted successfully. ID = %d' %(jobPara['jobName'],result['Value']) else: print 'Job %s submitted failed' %jobPara['jobName'] return result
def submit_wms(job): """ Submit the job to the WMS @todo launch job locally """ dirac = Dirac() base_path = '/vo.cta.in2p3.fr/MC/PROD4/LaPalma/gamma/corsika/1897/Data/000xxx' input_data = [ '%s/run1_gamma_za20deg_South-lapalma-lstmagic.corsika.zst' % base_path, '%s/run3_gamma_za20deg_South-lapalma-lstmagic.corsika.zst' % base_path ] job.setInputData(input_data) job.setJobGroup('Prod4SimtelLSTMagicJob') result = dirac.submit(job) if result['OK']: Script.gLogger.notice('Submitted job: ', result['Value']) return result
def submit_wms(job): """ Submit the job to the WMS @todo launch job locally """ dirac = Dirac() base_path = '/vo.cta.in2p3.fr/user/b/bregeon/Paranal/proton/simtel/0000/Data/000xxx' input_data = [ '%s/proton_20deg_0deg_tid123___cta-prod4-sst-1m_desert-2150m-Paranal-sst-1m_data.tar' % base_path ] job.setInputData(input_data) job.setJobGroup('EvnDisp4SSTJob') result = dirac.submit(job) if result['OK']: Script.gLogger.notice('Submitted job: ', result['Value']) return result
def submitJob(jobPara): dirac = Dirac() j = Job() j.setName(jobPara['jobName']) j.setJobGroup(jobPara['jobGroup']) j.setExecutable(jobPara['jobScript'], logFile=jobPara['jobScriptLog']) j.setInputSandbox(jobPara['inputSandbox']) j.setOutputSandbox(jobPara['outputSandbox']) j.setOutputData(jobPara['outputData'], jobPara['SE']) j.setDestination(jobPara['sites']) j.setCPUTime(jobPara['CPUTime']) result = dirac.submit(j) if result['OK']: print 'Job %s submitted successfully. ID = %d' % (jobPara['jobName'], result['Value']) else: print 'Job %s submitted failed' % jobPara['jobName'] return result
def runProd3(args=None): """ Simple wrapper to create a Prod3MCUserJob and setup parameters from positional arguments given on the command line. Parameters: args -- a list of 3 strings corresponding to job arguments runMin runMax input_card """ # get arguments runMin = int(args[0]) runMax = int(args[1]) input_card = args[2] # ## Create Prod3 User Job job = Prod3MCUserJob() # set package version and corsika input card. to be set before setupWorkflow job.setPackage('corsika_simhessarray') job.setVersion('2015-10-20-p3') job.setInputCard(input_card) # ## setup workflow: set executable and parameters job.setupWorkflow() # # set run_number as parameter for parametric jobs ilist = [] for run_number in range(runMin, runMax + 1): ilist.append(str(run_number)) job.setParameterSequence('run', ilist) # ## set job attributes job.setName('corsika') job.setInputSandbox([input_card]) job.setOutputSandbox(['*Log.txt']) job.setOutputData(['*corsika.gz']) # # submit job dirac = Dirac() res = dirac.submit(job) # debug Script.gLogger.info('Submission Result: ', res) Script.gLogger.info(job.workflow) return res
def submitWMS(args): first_line = args[0] job = Job() dirac = Dirac() job.setName('mandelbrot') job.setExecutable('git clone https://github.com/bregeon/mandel4ts.git') job.setExecutable('./mandel4ts/mandelbrot.py', arguments="-P 0.0005 -M 1000 -L %s -N 200" % first_line) job.setOutputData(['data_*.bmp', 'data*.txt']) res = dirac.submit(job) return res
def submit(self, param): j = Job() j.setName(param['jobName']) j.setExecutable(param['jobScript'],logFile = param['jobScriptLog']) if self.site: j.setDestination(self.site) if self.jobGroup: j.setJobGroup(self.jobGroup) j.setInputSandbox(param['inputSandbox']) j.setOutputSandbox(param['outputSandbox']) j.setOutputData(param['outputData'], outputSE = self.outputSE, outputPath = self.outputPath) dirac = GridDirac() result = dirac.submit(j) status = {} status['submit'] = result['OK'] if status['submit']: status['job_id'] = result['Value'] return status
def submit_wms(job): """ Submit the job to the WMS @todo launch job locally """ dirac = Dirac() base_path = '/vo.cta.in2p3.fr/MC/PROD3/LaPalma/proton/simtel/1602/Data/000xxx' input_data = [ '%s/proton_20deg_0deg_run100___cta-prod3-demo-2147m-LaPalma-baseline.simtel.gz' % base_path, '%s/proton_20deg_0deg_run101___cta-prod3-demo-2147m-LaPalma-baseline.simtel.gz' % base_path, '%s/proton_20deg_0deg_run102___cta-prod3-demo-2147m-LaPalma-baseline.simtel.gz' % base_path ] job.setInputData(input_data) job.setJobGroup('DL1DataHandlerJob') result = dirac.submit(job) if result['OK']: Script.gLogger.notice('Submitted job: ', result['Value']) return result
def submit(self, param): j = Job() j.setName(param['jobName']) j.setExecutable(param['jobScript'], logFile=param['jobScriptLog']) if self.site: j.setDestination(self.site) if self.jobGroup: j.setJobGroup(self.jobGroup) j.setInputSandbox(param['inputSandbox']) j.setOutputSandbox(param['outputSandbox']) j.setOutputData(param['outputData'], outputSE=self.outputSE, outputPath=self.outputPath) dirac = GridDirac() result = dirac.submit(j) status = {} status['submit'] = result['OK'] if status['submit']: status['job_id'] = result['Value'] return status
def submitWMS(job, infileList): """ Submit the job to the WMS """ dirac = Dirac() job.setParameterSequence('InputData', infileList, addToWorkflow='ParametricInputData') job.setOutputData( ['./*evndisp.tar.gz'], outputPath='evndisp_data' ) # to be used if DataManagement step in EvnDisp3UserJob is commented #job.setJobGroup( 'EvnDisp-proton' ) job.setName('evndispjob') job.setOutputSandbox(['*Log.txt']) #job.setInputSandbox( ['myconf'] ) # To allow jobs run at other sites than the site where the InputData are located #job.setType( 'DataReprocessing' ) res = dirac.submit(job) if res['OK']: Script.gLogger.info('Submission Result: ', res['Value']) return res
dexit(1) sites = result[ 'Value' ] j.setDestination(sites) if not opts.stagein is None: input_stage_files = [] # we do add. input staging files = opts.stagein.split(",") for f in files: if f.startswith("LFN"): input_stage_files.append(f) else: input_stage_files+=extract_file(f) for f in input_stage_files: if not f.startswith("LFN"): gLogger.error("*ERROR* required inputfiles to be defined through LFN, could not find LFN in %s"%f) dexit(1) j.setInputData(input_stage_files) if opts.debug: gLogger.notice('*DEBUG* just showing the JDL of the job to be submitted') gLogger.notice(j._toJDL()) d = Dirac(True,"myRepo.rep") res = d.submit(j) if not res['OK']: gLogger.error("Error during Job Submission ",res['Message']) dexit(1) JobID = res['Value'] gLogger.notice("Your job %s (\"%s\") has been submitted."%(str(JobID),executable))
input_stage_files = [] if pipeline: xrd_keytab = op.getValue("Pipeline/XrdKey", None) if not xrd_keytab: gLogger.notice("*DEBUG* adding XrdKey file %s to input" % xrd_keytab) input_stage_files.append(xrd_keytab) if not opts.stagein is None: # we do add. input staging files = opts.stagein.split(",") for f in files: input_stage_files.append(f) if len(input_stage_files): if len(input_stage_files) == 1: j.setInputData(input_stage_files[-1]) else: j.setInputData(input_stage_files) if opts.debug: gLogger.notice( '*DEBUG* just showing the JDL of the job to be submitted') gLogger.notice(j._toJDL()) d = Dirac(True, "myRepo.rep") res = d.submit(j) if not res['OK']: gLogger.error("Error during Job Submission ", res['Message']) dexit(1) JobID = res['Value'] gLogger.notice("Your job %s (\"%s\") has been submitted." % (str(JobID), executable))
# dirac job created by ganga from DIRAC.Interfaces.API.Job import Job from DIRAC.Interfaces.API.Dirac import Dirac j = Job() dirac = Dirac() # default commands added by ganga j.setName("helloWorld-test") j.setInputSandbox( ['/afs/cern.ch/user/f/fstagni/userJobs/_inputHello.tar.bz2', '/afs/cern.ch/user/f/fstagni/userJobs/hello-script.py'] ) j.setExecutable("exe-script.py","","Ganga_Executable.log") # <-- user settings j.setCPUTime(172800) j.setBannedSites(['LCG.CERN.ch', 'LCG.CNAF.it', 'LCG.GRIDKA.de', 'LCG.IN2P3.fr', 'LCG.NIKHEF.nl', 'LCG.PIC.es', 'LCG.RAL.uk', 'LCG.SARA.nl']) # user settings --> #print j.workflow # submit the job to dirac result = dirac.submit(j) print result
class DiracDaemon(Daemonize): """Dirac Daemon.""" def __init__(self, address, **kwargs): """Initialise.""" super(DiracDaemon, self).__init__(action=self.main, **kwargs) self._address = address self._dirac_api = Dirac() def main(self): """Daemon main.""" # Defer creation of server to inside the daemon context otherwise the socket will be # closed when daemonising dirac_server = SimpleXMLRPCServer(self._address) dirac_server.register_introspection_functions() dirac_server.register_instance(self._dirac_api) # override Dirac().status to make sure that the keys are strings. dirac_server.register_function(self.status) dirac_server.register_function(self.submit_job) dirac_server.serve_forever() def status(self, ids): """ Return the status of Dirac jobs with ids. This method will essentially be overriding Dirac().status to ensure that the dict keys which are the ids of the jobs are cast to strings such that they can be sent over xmlrpc socket. """ return { str(k): v for k, v in self._dirac_api.status(ids).get("Value", {}).iteritems() } def submit_job(self, request_id, executable, macro, starting_seed=8000000, njobs=10, platform='ANY', output_data_site='UKI-LT2-IC-HEP-disk', output_log='lzproduction_output.log'): """ Submit LZProduction job to DIRAC. Args: request_id (int): The id number of the associated request executable (str): The full path to the executable job script macro (str): The full path to the macro for this job starting_seed (int): The random seed for the first of the parametric jobs njobs (int): The number of parametric jobs to create platform (str): The required platform output_data_site (str): The name of the grid site to store the output data at output_log (str): The file name for the output log file Returns: list: The list of created parametric job DIRAC ids """ j = Job() j.setName(os.path.splitext(os.path.basename(macro))[0] + '%(args)s') j.setExecutable(os.path.basename(executable), os.path.basename(macro) + ' %(args)s', output_log) j.setInputSandbox([executable, macro]) j.setOutputData('*.root', output_data_site, str(request_id)) j.setParameterSequence( "args", [str(i) for i in xrange(starting_seed, starting_seed + njobs)], addToWorkflow=True) j.setPlatform(platform) return self.status(self._dirac_api.submit(j).get("Value", []))
gLogger.setLevel( 'DEBUG' ) cwd = os.path.realpath( '.' ) dirac = Dirac() # Simple Hello Word job to DIRAC.Jenkins.ch gLogger.info( "\n Submitting hello world job targeting DIRAC.Jenkins.ch" ) helloJ = Job() helloJ.setName( "helloWorld-TEST-TO-Jenkins" ) helloJ.setInputSandbox( [find_all( 'exe-script.py', '..', '/DIRAC/tests/Workflow/' )[0]] ) helloJ.setExecutable( "exe-script.py", "", "helloWorld.log" ) helloJ.setCPUTime( 17800 ) helloJ.setDestination( 'DIRAC.Jenkins.ch' ) result = dirac.submit( helloJ ) gLogger.info( "Hello world job: ", result ) if not result['OK']: gLogger.error( "Problem submitting job", result['Message'] ) exit( 1 ) # Simple Hello Word job to DIRAC.Jenkins.ch, that needs to be matched by a MP WN gLogger.info( "\n Submitting hello world job targeting DIRAC.Jenkins.ch and a MP WN" ) helloJMP = Job() helloJMP.setName( "helloWorld-TEST-TO-Jenkins-MP" ) helloJMP.setInputSandbox( [find_all( 'exe-script.py', '..', '/DIRAC/tests/Workflow/' )[0]] ) helloJMP.setExecutable( "exe-script.py", "", "helloWorld.log" ) helloJMP.setCPUTime( 17800 ) helloJMP.setDestination( 'DIRAC.Jenkins.ch' ) helloJMP.setTag('MultiProcessor') result = dirac.submit( helloJMP ) # this should make the difference!
class CEBaseTest( TestBase ): """ CEBaseTest is base class for all the CE test classes. Real CE test should implement its _judge method. """ def __init__( self, args = None, apis = None ): super( CEBaseTest, self ).__init__( args, apis ) self.timeout = self.args.get( 'timeout', 1800 ) self.vo = self.args.get( 'VO' ) self.testType = self.args[ 'TestType' ] self.executable = self.args[ 'executable' ] self.__logPath = '/opt/dirac/pro/BESDIRAC/ResourceStatusSystem/SAM/log' self.__scriptPath = '/opt/dirac/pro/BESDIRAC/ResourceStatusSystem/SAM/sam_script' if 'WMSAdministrator' in self.apis: self.wmsAdmin = self.apis[ 'WMSAdministrator' ] else: self.wmsAdmin = RPCClient( 'WorkloadManagement/WMSAdministrator' ) if 'Dirac' in self.apis: self.dirac = self.apis[ 'Dirac' ] else: self.dirac = Dirac() def doTest( self, elementDict ): """ submit test job to the specified ce or cloud.. """ elementName = elementDict[ 'ElementName' ] elementType = elementDict[ 'ElementType' ] vos = elementDict[ 'VO' ] site = None; ce = None if elementType == 'ComputingElement': ce = elementName if elementType == 'CLOUD': site = elementName if self.vo: submitVO = self.vo elif vos: submitVO = vos[ 0 ] else: submitVO = 'bes' submissionTime = datetime.utcnow().replace( microsecond = 0 ) sendRes = self.__submit( site, ce, submitVO ) if not sendRes[ 'OK' ]: return sendRes jobID = sendRes[ 'Value' ] result = { 'Result' : { 'JobID' : jobID, 'VO' : submitVO, 'SubmissionTime' : submissionTime }, 'Finish' : False } return S_OK( result ) def __submit( self, site, CE, vo ): """ set the job and submit. """ job = Job() job.setName( self.testType ) job.setJobGroup( 'CE-Test' ) job.setExecutable( self.executable ) job.setInputSandbox( '%s/%s' % ( self.__scriptPath, self.executable ) ) if site and not CE: job.setDestination( site ) if CE: job.setDestinationCE( CE ) LOCK.acquire() proxyPath = BESUtils.getProxyByVO( 'zhangxm', vo ) if not proxyPath[ 'OK' ]: LOCK.release() return proxyPath proxyPath = proxyPath[ 'Value' ] oldProxy = os.environ.get( 'X509_USER_PROXY' ) os.environ[ 'X509_USER_PROXY' ] = proxyPath result = self.dirac.submit( job ) if oldProxy is None: del os.environ[ 'X509_USER_PROXY' ] else: os.environ[ 'X509_USER_PROXY' ] = oldProxy LOCK.release() return result def getTestResult( self, elementName, vo, jobID, submissionTime ): """ download output sandbox and judge the test status from the log file. """ isFinish = False res = self.__getJobOutput( jobID, vo ) if not res[ 'OK' ]: return res output = res[ 'Value' ] status = res[ 'Status' ] resDict = { 'CompletionTime' : None, 'Status' : None, 'Log' : None, 'ApplicationTime' : None } utcNow = datetime.utcnow().replace( microsecond = 0 ) if output: isFinish = True resDict[ 'CompletionTime' ] = utcNow log = output[ 'Log' ] if not output[ 'Download' ]: resDict[ 'Status' ] = 'Unknown' resDict[ 'Log' ] = 'Fail to download log file for job %s: %s' % ( jobID, log ) else: resDict[ 'Log' ] = log resDict[ 'Status' ] = self._judge( log ) resDict[ 'AppliactionTime' ] = self.__getAppRunningTime( log ) else: if utcNow - submissionTime >= timedelta( seconds = self.timeout ): isFinish = True if elementName.split( '.' )[ 0 ] == 'CLOUD': site = elementName else: site = BESUtils.getSiteForCE( elementName ) jobCount = self.wmsAdmin.getSiteSummaryWeb( { 'Site' : site }, [], 0, 0 ) if not jobCount[ 'OK' ]: return jobCount params = jobCount[ 'Value' ][ 'ParameterNames' ] records = jobCount[ 'Value' ][ 'Records' ][ 0 ] run = records[ params.index( 'Running' ) ] done = records[ params.index( 'Done' ) ] if status == 'Waiting' and run == 0 and done == 0: resDict[ 'Status' ] = 'Bad' resDict[ 'Log' ] = 'The test job is waiting for %d seconds, but no running and done jobs at this site.' % self.timeout else: if run != 0: resDict[ 'Status' ] = 'Busy' resDict[ 'Log' ] = 'Site %s is too busy to execute this test job, job status is %s' % ( site, status ) else: resDict[ 'Status' ] = 'Unknown' resDict[ 'Log' ] = 'Test did not complete within the timeout of %d seconds, job status is %s' % ( self.timeout, status ) self.dirac.kill( jobID ) if not isFinish: return S_OK() else: return S_OK( resDict ) def __getJobOutput( self, jobID, vo ): status = self.dirac.status( jobID ) if not status[ 'OK' ]: return status status = status[ 'Value' ][ jobID ][ 'Status' ] if status in ( 'Done', 'Failed' ): LOCK.acquire() proxyPath = BESUtils.getProxyByVO( 'zhangxm', vo ) if not proxyPath[ 'OK' ]: LOCK.release() return proxyPath proxyPath = proxyPath[ 'Value' ] oldProxy = os.environ.get( 'X509_USER_PROXY' ) os.environ[ 'X509_USER_PROXY' ] = proxyPath outputRes = self.dirac.getOutputSandbox( jobID, self.__logPath ) if oldProxy is None: del os.environ[ 'X509_USER_PROXY' ] else: os.environ[ 'X509_USER_PROXY' ] = oldProxy LOCK.release() if not outputRes[ 'OK' ]: ret = S_OK( { 'Download' : False, 'Log' : outputRes[ 'Message' ] } ) else: try: logfile = open( '%s/%d/Script1_CodeOutput.log' % ( self.__logPath, jobID ), 'r' ) log = logfile.read() logfile.close() except IOError, e: raise IOError os.system( 'rm -rf %s/%d' % ( self.__logPath, jobID ) ) ret = S_OK( { 'Download' : True, 'Log' : log } ) else:
print print("OutputData: {}{}".format(output_path, output_filename_wave)) print("OutputData: {}{}".format(output_path, output_filename_tail)) j.setOutputData([output_filename_wave, output_filename_tail], outputSE=None, outputPath=output_path) # check if we should somehow stop doing what we are doing if "dry" in sys.argv: print("\nrunning dry -- not submitting") exit() # this sends the job to the GRID and uploads all the # files into the input sandbox in the process print("\nsubmitting job") print('Submission Result: {}\n'.format(dirac.submit(j)['Value'])) # break if this is only a test submission if "test" in sys.argv: print("test run -- only submitting one job") exit() try: os.remove("datapipe.tar.gz") os.remove("tino_cta.tar.gz") except: pass print("\nall done -- exiting now") exit()
##################### if (DO_NOT_SUBMIT): sys.exit(os.EX_USAGE) ### ALWAYS, INFO, VERBOSE, WARN, DEBUG j.setLogLevel('debug') j.setDestination(site_dirac) JOB_IDX = first_job + 1 + idx JOB_NAME = PROD_NAME + " IDX_" + str(JOB_IDX) print '\nJOB NAME is : ', JOB_NAME j.setName(JOB_NAME) j.setCPUTime(JOB_CPUTIME) ## 4 days run_corsika_sim_args = input_file_base + " " + corsika_version + " " + corsika_bin j.setExecutable( './run_corsika_sim', arguments = run_corsika_sim_args, logFile='run_sim.log') if (TEST_JOB) : jobID = dirac.submit(j,mode='local') else : jobID = dirac.submit(j) id = str(jobID) + "\n" print 'Submission Result: ',jobID with open('jobids.list', 'a') as f_id_log: f_id_log.write(id + '\n')
print("OutputData: {}{}".format(output_path, output_filenames[mode])) #print("OutputData: {}{}".format(output_path, output_filename_wave)) #print("OutputData: {}{}".format(output_path, output_filename_tail)) #j.setOutputData([output_filename_wave, output_filename_tail], j.setOutputData(outputs, outputSE=None, outputPath=output_path) # check if we should somehow stop doing what we are doing if "dry" in sys.argv: print("\nrunning dry -- not submitting") break # this sends the job to the GRID and uploads all the # files into the input sandbox in the process print("\nsubmitting job") print('Submission Result: {}\n'.format(dirac.submit(j)['Value'])) # break if this is only a test submission if "test" in sys.argv: print("test run -- only submitting one job") break # since there are two nested loops, need to break again if "dry" in sys.argv or "test" in sys.argv: break try: os.remove("datapipe.tar.gz") os.remove("modules.tar.gz") except: pass
print "Usage %s <scriptName> <jobName> <nbJobs>"%sys.argv[0] sys.exit(1) scriptName = sys.argv[1] jobName = sys.argv[2] nbJobs = int(sys.argv[3]) if not os.path.exists(jobName): os.makedirs(jobName) os.makedirs("%s/Done"%jobName) os.makedirs("%s/Failed"%jobName) else: print "Folder %s exists"%jobName sys.exit(1) f = open("%s/jobIdList.txt"%jobName, 'w') for i in xrange(nbJobs): j = Job() j.setCPUTime(10000) j.setExecutable(scriptName) j.addToOutputSandbox.append('myLog.txt') j.addToOutputSandbox.append('clock.txt') j.addToOutputSandbox.append('time.txt') dirac = Dirac() jobID = dirac.submit(j) realId = jobID.get('JobID') f.write("%s\n"%realId) f.close()
def main(dataset, chunksize, test): ''' The DATASET argument is a list of paths to MC files on the grid. Like the output of cta-prod3-dump-dataset for example. See also https://forge.in2p3.fr/projects/cta_dirac/wiki/CTA-DIRAC_MC_PROD3_Status Keep in mind that for some effing reason this needs to be executed within this weird 'dirac' environment which comes with its own glibc, python and pip. I guess the real Mr. Dirac would turn in his grave. ''' dirac = Dirac() with open(dataset) as f: simtel_files = f.readlines() print('Analysing {}'.format(len(simtel_files))) server_list = [ "TORINO-USER", "CYF-STORM-USER", "CYF-STORM-Disk", "M3PEC-Disk", "OBSPM-Disk", "POLGRID-Disk", "FRASCATI-USER", "LAL-Disk", "CIEMAT-Disk", "CIEMAT-USER", "CPPM-Disk", "LAL-USER", "CYFRONET-Disk", "DESY-ZN-USER", "M3PEC-USER", "LPNHE-Disk", "LPNHE-USER", "LAPP-USER", "LAPP-Disk" ] desy_server = 'DESY-ZN-USER' servers_with_miniconda = [ 'LCG.IN2P3-CC.fr', 'LCG.DESY-ZEUTHEN.de', 'LCG.CNAF.it', 'LCG.GRIF.fr', 'LCG.CYFRONET.pl', 'LCG.Prague.cz', 'LCG.CIEMAT.es' ] chunks = np.array_split(sorted(simtel_files), int(len(simtel_files) / chunksize)) print('Got a total of {} chunks'.format(len(chunks))) for c, simtel_filenames in tqdm(enumerate( chunks[0:2])): # send just 2 jobs for now. # convert chunk to a list of strings. becasue this dirac thing cant take numpy arrays simtel_filenames = [ str(s).strip() for s in simtel_filenames if 'SCT' not in s ] print('Starting processing for chunk {}'.format(c)) print(simtel_filenames) j = Job() # set runtime to 0.5h j.setCPUTime(30 * 60) j.setName('cta_preprocessing_{}'.format(c)) j.setInputData(simtel_filenames) j.setOutputData(['./processing_output/*.hdf5'], outputSE=None, outputPath='cta_preprocessing/') j.setInputSandbox( ['../process_simtel.py', './install_dependencies.py']) j.setOutputSandbox(['cta_preprocessing.log']) j.setExecutable('./job_script.sh') # These servers seem to have mini conda installed # destination = np.random.choice(servers_with_miniconda) j.setDestination(servers_with_miniconda) value = dirac.submit(j) print('Number {} Submission Result: {}'.format(c, value))
gLogger.setLevel('DEBUG') cwd = os.path.realpath('.') dirac = Dirac() # Simple Hello Word job to DIRAC.Jenkins.ch gLogger.info("\n Submitting hello world job targeting DIRAC.Jenkins.ch") helloJ = Job() helloJ.setName("helloWorld-TEST-TO-Jenkins") helloJ.setInputSandbox( [find_all('exe-script.py', '..', '/DIRAC/tests/Workflow/')[0]]) helloJ.setExecutable("exe-script.py", "", "helloWorld.log") helloJ.setCPUTime(17800) helloJ.setDestination('DIRAC.Jenkins.ch') result = dirac.submit(helloJ) gLogger.info("Hello world job: ", result) if not result['OK']: gLogger.error("Problem submitting job", result['Message']) exit(1) # Simple Hello Word job to DIRAC.Jenkins.ch, that needs to be matched by a MP WN gLogger.info( "\n Submitting hello world job targeting DIRAC.Jenkins.ch and a MP WN") helloJMP = Job() helloJMP.setName("helloWorld-TEST-TO-Jenkins-MP") helloJMP.setInputSandbox( [find_all('exe-script.py', '..', '/DIRAC/tests/Workflow/')[0]]) helloJMP.setExecutable("exe-script.py", "", "helloWorld.log") helloJMP.setCPUTime(17800) helloJMP.setDestination('DIRAC.Jenkins.ch')