def submitProbeJobs(self, ce):
   """ Submit some jobs to the CEs
   """
   
   #need credentials, should be there since the initialize
   
   from DIRAC.Interfaces.API.Dirac import Dirac
   d = Dirac()
   from DIRAC.Interfaces.API.Job import Job
   
   from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
   import DIRAC
   
   ops = Operations()
   scriptname = ops.getValue("ResourceStatus/SofwareManagementScript", self.script)
   
   j = Job()
   j.setDestinationCE(ce)
   j.setCPUTime(1000)
   j.setName("Probe %s" % ce)
   j.setJobGroup("SoftwareProbe")
   j.setExecutable("%s/GlastDIRAC/ResourceStatusSystem/Client/%s" % (DIRAC.rootPath, scriptname), 
                   logFile='SoftwareProbe.log')
   j.setOutputSandbox('*.log')
   res = d.submit(j)
   if not res['OK']:
     return res
     
   return S_OK()
    def createJob(self):
        job = Job()
        job.setName(self.__stepName)
        job.setOutputSandbox(['*log'])

        job.setExecutable('/usr/bin/wget',
                          arguments='"{0}/{1}"'.format(URL_ROOT,
                                                       self.__executable))
        job.setExecutable('/bin/chmod',
                          arguments='+x "{0}"'.format(self.__executable))

        arguments = '"{0}" "{1}" "{2}" "{3}" "{4}" "{5}" @{{JOB_ID}}'.format(
            self.__softwareVersion, self.__application, self.__outputPath,
            self.__outputPattern, self.__outputSE, self.__outputMode)
        if self.__extraArgs:
            arguments += ' ' + self.__extraArgs
        job.setExecutable(self.__executable, arguments=arguments)

        # failover for failed jobs
        job.setExecutable('/bin/ls -l',
                          modulesList=['Script', 'FailoverRequest'])

        if self.__inputData:
            job.setInputData(self.__inputData)

        if self.__site:
            job.setDestination(self.__site)

        if self.__bannedsite:
            job.setBannedSites(self.__bannedsite)

        job.setOutputSandbox(['app.out', 'app.err', 'Script3_CodeOutput.log'])

        self.__job = job
    def submitProbeJobs(self, ce):
        """ Submit some jobs to the CEs
    """
        #need credentials, should be there since the initialize
        from DIRAC.Interfaces.API.Dirac import Dirac
        d = Dirac()
        from DIRAC.Interfaces.API.Job import Job

        from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
        import os

        ops = Operations("glast.org")
        scriptname = ops.getValue("ResourceStatus/SofwareManagementScript",
                                  self.script)

        j = Job()
        j.setDestinationCE(ce)
        j.setCPUTime(1000)
        j.setName("Probe %s" % ce)
        j.setJobGroup("SoftwareProbe")
        j.setExecutable("%s/GlastDIRAC/ResourceStatusSystem/Client/%s" %
                        (os.environ['DIRAC'], scriptname),
                        logFile='SoftwareProbe.log')
        j.setOutputSandbox('*.log')
        res = d.submit(j)
        if not res['OK']:
            return res

        return S_OK()
Exemple #4
0
def createWorkflowBodyStep2():
    job = Job()
    job.setName("merge mandelbrot")
    job.setOutputSandbox(["*log"])

    # define the job workflow in 3 steps
    # job step1: setup software
    job.setExecutable("git clone https://github.com/bregeon/mandel4ts.git")
    # job step2: run mandelbrot merge
    job.setExecutable("./mandel4ts/merge_data.py")
    # job step3: upload data and set metadata
    outputPath = os.path.join("/dirac/prodsys/mandelbrot/images/merged")
    outputPattern = "data_merged*txt"
    outputSE = "RAL-SE"
    nb_input_files = 7
    outputMetadata = json.dumps(
        {
            "application": "mandelbrot",
            "image_format": "ascii",
            "image_width": 7680,
            "image_height": 200 * nb_input_files,
        }
    )
    job.setExecutable(
        "./mandel4ts/dirac-add-files.py",
        arguments="%s '%s' %s '%s'" % (outputPath, outputPattern, outputSE, outputMetadata),
    )
    return job.workflow.toXML()
Exemple #5
0
def createWorkflowBodyStep1():
    job = Job()
    job.setName("mandelbrot raw")
    job.setOutputSandbox(["*log"])
    # this is so that the JOB_ID within the transformation can be evaluated on the fly in the job application, see below
    job.workflow.addParameter(
        Parameter("JOB_ID", "000000", "string", "", "", True, False,
                  "Initialize JOB_ID"))
    # define the job workflow in 3 steps
    # job step1: setup software
    job.setExecutable("git clone https://github.com/bregeon/mandel4ts.git")
    # job step2: run mandelbrot application
    # note how the JOB_ID (within the transformation) is passed as an argument and will be evaluated on the fly
    job.setExecutable("./mandel4ts/mandelbrot.py",
                      arguments="-P 0.0005 -M 1000 -L @{JOB_ID} -N 200")
    # job step3: upload data and set metadata
    outputPath = os.path.join("/dirac/prodsys/mandelbrot/images/raw")
    outputPattern = "data_*txt"
    outputSE = "RAL-SE"
    outputMetadata = json.dumps({
        "application": "mandelbrot",
        "image_format": "ascii",
        "image_width": 7680,
        "image_height": 200
    })
    job.setExecutable(
        "./mandel4ts/dirac-add-files.py",
        arguments="%s '%s' %s '%s'" %
        (outputPath, outputPattern, outputSE, outputMetadata),
    )
    return job.workflow.toXML()
Exemple #6
0
def submit(name,
           job_group,
           task_id,
           input_sandbox,
           output_sandbox,
           executable,
           site=None,
           banned_site=None,
           sub_ids=[]):
    dirac = Dirac()

    submit_result = {'backend_job_ids': {}}
    jobInfos = {}

    for run in range(int((len(sub_ids) + 99) / 100)):
        ids_this_run = [x for x in sub_ids[run * 100:(run + 1) * 100]]
        job_names = ['%s.%s' % (name, sub_id) for sub_id in ids_this_run]
        j = Job()
        j.setName(name)
        j.setExecutable(executable)

        j.setParameterSequence('JobName', job_names, addToWorkflow=True)
        j.setParameterSequence('arguments', ids_this_run, addToWorkflow=True)

        if input_sandbox:
            j.setInputSandbox(input_sandbox)
        if output_sandbox:
            j.setOutputSandbox(output_sandbox)

        if job_group:
            j.setJobGroup(job_group)
        if site:  # set destination to a certain site; list not allowed
            j.setDestination(site)

        if banned_site:
            j.setBannedSites(banned_site)

        result = dirac.submitJob(j)

        if not result['OK']:
            sys.stdout.write('DIRAC job submit error: %s\n' %
                             result['Message'])
            sys.exit(1)

        for sub_id, dirac_id in zip(ids_this_run, result['Value']):
            submit_result['backend_job_ids'][sub_id] = dirac_id
            jobInfos[dirac_id] = {'SubID': sub_id}

    #Register on Task-manager Webapp of IHEPDIRAC
    task = RPCClient('WorkloadManagement/TaskManager')
    taskInfo = {'TaskName': name, 'JobGroup': job_group, 'JSUB-ID': task_id}
    task_result = task.createTask(name, taskInfo, jobInfos)
    task_web_id = task_result['Value']
    submit_result['backend_task_id'] = task_web_id

    return submit_result
Exemple #7
0
def read_hessjob(args=None):

    from DIRAC.Interfaces.API.Dirac import Dirac
    from DIRAC.Interfaces.API.Job import Job

    if (len(args) != 1):
        Script.showHelp()

    version = args[0]

    user_script = './read_hess2dst.sh'

    sim_file = 'simtel_file.list'

    infileLFNList = [
        '/vo.cta.in2p3.fr/MC/PROD2/Config_310113/prod-2_21122012_corsika/gamma/prod-2_06052013_simtel_STD/Data/002xxx/gamma_20.0_180.0_alt2662.0_run002997.simtel.gz',
        '/vo.cta.in2p3.fr/MC/PROD2/Config_310113/prod-2_21122012_corsika/gamma/prod-2_06052013_simtel_STD/Data/002xxx/gamma_20.0_180.0_alt2662.0_run002998.simtel.gz'
    ]

    f = open(sim_file, 'w')

    for infileLFN in infileLFNList:
        filein = os.path.basename(infileLFN)
        f.write(filein)
        f.write('\n')

    f.close()

    j = Job()

    j.setInputData(infileLFNList)

    options = []
    options = [sim_file]

    executablestr = "%s %s %s" % (version, user_script, ' '.join(options))

    j.setExecutable('./cta-read_hess.py', executablestr)

    j.setInputSandbox(['cta-read_hess.py', user_script, sim_file])

    j.setOutputSandbox(['read_hess.log'])

    j.setOutputData(['*dst.gz'])

    j.setName(user_script)

    j.setCPUTime(100000)

    Script.gLogger.info(j._toJDL())

    Dirac().submit(j)
    def do_installonsite(self, argss):
        """ Install a release on a grid site : 
            installonsite tag site
        """
        args = argss.split()
        if len(args) < 2:
            print self.do_installonsite.__doc__
            return
        tag = args[0]
        site = args[1]

        #print "Check if the software with the tag '"+tag+"' exists on the rsync server..."
        #res = self.client.getSitesForTag(tag)
        #if not res['OK']:
        #print res['Message']
        #return
        #print "tag found !"

        from DIRAC.Interfaces.API.Dirac import Dirac
        d = Dirac()
        from DIRAC.Interfaces.API.Job import Job

        from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
        import os

        ops = Operations()
        scriptname = "InstallSoftware.py"
        j = Job()
        j.setDestination(site)
        j.setCPUTime(1000)
        j.setName("Installation " + tag)
        j.setExecutable(os.environ['DIRAC'] +
                        "/GlastDIRAC/ResourceStatusSystem/Client/externals/" +
                        scriptname,
                        logFile='SoftwareInstallation.log')
        j.setOutputSandbox('*.log')
        res = d.submit(j)
        if not res['OK']:
            print "Could not submit the installation at site %s, message %s" % (
                site, res['Message'])
            return

        print "Job submitted, id = " + str(res['Value'])

        print "Add tag :"
        res = self.client.addTagAtSite(tag, site)
        if not res['OK']:
            print "Could not register tag %s at site %s, message %s" % (
                tag, site, res['Message'])
            return
        print "Added %s to %i CEs" % (tag, len(res['Value'][tag]))
Exemple #9
0
def submitJob(jobPara):
    dirac = Dirac()
    j = Job()
    j.setName(jobPara['jobName'])
    j.setJobGroup(jobPara['jobGroup'])
    j.setExecutable(jobPara['jobScript'], logFile = jobPara['jobScriptLog'])
    j.setInputSandbox(jobPara['inputSandbox'])
    j.setOutputSandbox(jobPara['outputSandbox'])
    j.setOutputData(jobPara['outputData'], jobPara['SE'])
    j.setDestination(jobPara['sites'])
    j.setCPUTime(jobPara['CPUTime'])
    result = dirac.submit(j)
    if result['OK']:
        print 'Job %s submitted successfully. ID = %d' %(jobPara['jobName'],result['Value'])
    else:
        print 'Job %s submitted failed' %jobPara['jobName']
    return result
    def do_installonsite(self,argss):
        """ Install a release on a grid site : 
            installonsite tag site
        """
        args = argss.split()
        if len(args)<2:
            print self.do_installonsite.__doc__
            return
        tag = args[0]
        site = args[1]
        
        #print "Check if the software with the tag '"+tag+"' exists on the rsync server..." 
        #res = self.client.getSitesForTag(tag)
        #if not res['OK']:
          #print res['Message']
          #return 
        #print "tag found !"  

        from DIRAC.Interfaces.API.Dirac import Dirac
        d = Dirac()
        from DIRAC.Interfaces.API.Job import Job
        
        from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
        import os
        
        ops = Operations()
        scriptname = "InstallSoftware.py"
        j = Job()
        j.setDestination(site)
        j.setCPUTime(1000)
        j.setName("Installation "+tag)
        j.setExecutable(os.environ['DIRAC']+"/GlastDIRAC/ResourceStatusSystem/Client/"+scriptname , logFile='SoftwareInstallation.log')
        j.setOutputSandbox('*.log')
        res = d.submit(j)
        if not res['OK']:
          print "Could not submit the installation at site %s, message %s"%(site,res['Message'])
          return
        
        print "Job submitted, id = "+str(res['Value'])
        
        print "Add tag :"
        res = self.client.addTagAtSite(tag,site)
        if not res['OK']:
            print "Could not register tag %s at site %s, message %s"%(tag,site,res['Message'])
            return
        print "Added %s to %i CEs"%(tag,len(res['Value'][tag]))
Exemple #11
0
def submitJob(jobPara):
    dirac = Dirac()
    j = Job()
    j.setName(jobPara['jobName'])
    j.setJobGroup(jobPara['jobGroup'])
    j.setExecutable(jobPara['jobScript'], logFile=jobPara['jobScriptLog'])
    j.setInputSandbox(jobPara['inputSandbox'])
    j.setOutputSandbox(jobPara['outputSandbox'])
    j.setOutputData(jobPara['outputData'], jobPara['SE'])
    j.setDestination(jobPara['sites'])
    j.setCPUTime(jobPara['CPUTime'])
    result = dirac.submit(j)
    if result['OK']:
        print 'Job %s submitted successfully. ID = %d' % (jobPara['jobName'],
                                                          result['Value'])
    else:
        print 'Job %s submitted failed' % jobPara['jobName']
    return result
Exemple #12
0
def simteljob(args = None ):

  from DIRAC.Interfaces.API.Dirac import Dirac
  from DIRAC.Interfaces.API.Job import Job

  if (len(args)!=1):
    Script.showHelp()

  version = args[0]

  user_script = './run_simtel.sh'
  
  infileLFNList = ['/vo.cta.in2p3.fr/MC/PROD2/Config_120213/prod-2_21122012_corsika/proton/Data/044xxx/proton_20.0_180.0_alt2662.0_run044019.corsika.gz',
'/vo.cta.in2p3.fr/MC/PROD2/Config_120213/prod-2_21122012_corsika/proton/Data/044xxx/proton_20.0_180.0_alt2662.0_run044085.corsika.gz']


  for infileLFN in infileLFNList:
    filein = os.path.basename(infileLFN)

    j = Job()

    j.setInputSandbox( ['cta-simtel.py', user_script] )  
    j.setInputData(infileLFN)
  
    user_args = []
    user_args = [filein]
  
    executablestr = "%s %s %s" % ( version, user_script, ' '.join( user_args ) )

    j.setExecutable('./cta-simtel.py', executablestr)

    sim_out = 'Data/sim_telarray/cta-ultra5/0.0deg/Data/*.simtel.gz'
    log_out = 'Data/sim_telarray/cta-ultra5/0.0deg/Log/*.log.gz'
    hist_out = 'Data/sim_telarray/cta-ultra5/0.0deg/Histograms/*.hdata.gz'
   
    j.setOutputData([sim_out,log_out,hist_out])
    j.setOutputSandbox('simtel.log')
    j.setName(user_script)
    j.setCPUTime(100000)

    Script.gLogger.info( j._toJDL() )

    Dirac().submit( j )
Exemple #13
0
 def basicTest(self):
     j = Job()
     j.setCPUTime(50000)
     j.setExecutable(
         '/Users/stuart/dirac/workspace/DIRAC3/DIRAC/Interfaces/API/test/myPythonScript.py'
     )
     # j.setExecutable('/bin/echo hello')
     j.setOwner('paterson')
     j.setType('test')
     j.setName('MyJobName')
     #j.setAncestorDepth(1)
     j.setInputSandbox([
         '/Users/stuart/dirac/workspace/DIRAC3/DIRAC/Interfaces/API/test/DV.opts',
         '/Users/stuart/dirac/workspace/DIRAC3/DIRAC/Interfaces/API/test/DV2.opts'
     ])
     j.setOutputSandbox(['firstfile.txt', 'anotherfile.root'])
     j.setInputData([
         '/lhcb/production/DC04/v2/DST/00000742_00003493_11.dst',
         '/lhcb/production/DC04/v2/DST/00000742_00003493_10.dst'
     ])
     j.setOutputData(['my.dst', 'myfile.log'])
     j.setDestination('LCG.CERN.ch')
     j.setPlatform('LCG')
     j.setSystemConfig('x86_64-slc5-gcc43-opt')
     j.setSoftwareTags(['VO-lhcb-Brunel-v30r17', 'VO-lhcb-Boole-v12r10'])
     #print j._toJDL()
     #print j.printObj()
     xml = j._toXML()
     testFile = 'jobDescription.xml'
     if os.path.exists(testFile):
         os.remove(testFile)
     xmlfile = open(testFile, 'w')
     xmlfile.write(xml)
     xmlfile.close()
     print '>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>Creating code for the workflow'
     print j.createCode()
     print '>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>Executing the workflow'
     j.execute()
     print '>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>Trying to run the same workflow from generated XML file'
     workflow = fromXMLFile(testFile)
     code = workflow.createCode()
     print code
     workflow.execute()
Exemple #14
0
def launch_batch_pict( pitch_start, step, n_pict ):

	j = Job()
	j.setCPUTime(500)
        j.setName('%s_%f' % (EXEC, pitch_start))
	j.setJobGroup(JOBGROUP)
	j.setInputSandbox([EXEC])
	out_bmp_list=[]
	pitch=pitch_start

	for i in range(n_pict):
        	out_bmp='out_%f.bmp' % pitch
        	out_bmp_list.append(out_bmp)
		j.setExecutable(EXEC,arguments="-W 600 -H 600 -X -0.77568377 -Y -0.13646737 -P %f -M 500 %s" % (pitch, out_bmp))
        	pitch+=step
		
	j.setOutputSandbox(out_bmp_list + ["StdOut"] + ["StdErr"])
	result = dirac.submit(j)
	print 'Submission Result: ',result
	return result
Exemple #15
0
    def submit(self, param):        
        j = Job()
        j.setName(param['jobName'])
        j.setExecutable(param['jobScript'],logFile = param['jobScriptLog'])
        if self.site:
            j.setDestination(self.site)
        if self.jobGroup:
            j.setJobGroup(self.jobGroup)            
        j.setInputSandbox(param['inputSandbox'])
        j.setOutputSandbox(param['outputSandbox'])
        j.setOutputData(param['outputData'], outputSE = self.outputSE, outputPath = self.outputPath)

        dirac = GridDirac()
        result = dirac.submit(j)

        status = {}
        status['submit'] = result['OK']
        if status['submit']:
            status['job_id'] = result['Value']

        return status
Exemple #16
0
    def submit(self, param):
        j = Job()
        j.setName(param['jobName'])
        j.setExecutable(param['jobScript'], logFile=param['jobScriptLog'])
        if self.site:
            j.setDestination(self.site)
        if self.jobGroup:
            j.setJobGroup(self.jobGroup)
        j.setInputSandbox(param['inputSandbox'])
        j.setOutputSandbox(param['outputSandbox'])
        j.setOutputData(param['outputData'],
                        outputSE=self.outputSE,
                        outputPath=self.outputPath)

        dirac = GridDirac()
        result = dirac.submit(j)

        status = {}
        status['submit'] = result['OK']
        if status['submit']:
            status['job_id'] = result['Value']

        return status
Exemple #17
0
 def basicTest(self):
   j = Job()
   j.setCPUTime(50000)
   j.setExecutable('/Users/stuart/dirac/workspace/DIRAC3/DIRAC/Interfaces/API/test/myPythonScript.py')
  # j.setExecutable('/bin/echo hello')
   j.setOwner('paterson')
   j.setType('test')
   j.setName('MyJobName')
   #j.setAncestorDepth(1)
   j.setInputSandbox(['/Users/stuart/dirac/workspace/DIRAC3/DIRAC/Interfaces/API/test/DV.opts','/Users/stuart/dirac/workspace/DIRAC3/DIRAC/Interfaces/API/test/DV2.opts'])
   j.setOutputSandbox(['firstfile.txt','anotherfile.root'])
   j.setInputData(['/lhcb/production/DC04/v2/DST/00000742_00003493_11.dst',
                   '/lhcb/production/DC04/v2/DST/00000742_00003493_10.dst'])
   j.setOutputData(['my.dst','myfile.log'])
   j.setDestination('LCG.CERN.ch')
   j.setPlatform('LCG')
   j.setSystemConfig('x86_64-slc5-gcc43-opt')
   j.setSoftwareTags(['VO-lhcb-Brunel-v30r17','VO-lhcb-Boole-v12r10'])
   #print j._toJDL()
   #print j.printObj()
   xml = j._toXML()
   testFile = 'jobDescription.xml'
   if os.path.exists(testFile):
     os.remove(testFile)
   xmlfile = open(testFile,'w')
   xmlfile.write(xml)
   xmlfile.close()
   print '>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>Creating code for the workflow'
   print j.createCode()
   print '>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>Executing the workflow'
   j.execute()
   print '>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>Trying to run the same workflow from generated XML file'
   workflow = fromXMLFile(testFile)
   code = workflow.createCode()
   print code
   workflow.execute()
    numsensors = 1

    args = visit + ' ' + insidename + ' ' + str(startsensor) + ' ' + str(
        numsensors) + ' ' + str(idx)
    outputname = 'fits_' + visit + '_' + str(idx) + '.tar'

    j.setCPUTime(1209600)
    j.setExecutable('runimsim2.1.sh', arguments=args)
    j.stderr = "std.err"
    j.stdout = "std.out"
    #!!! May need the 2.1i directory here depending on visit number !!!
    j.setInputSandbox([
        "runimsim2.1.sh", "run_imsim_nersc.py",
        "LFN:/lsst/user/j/james.perry/instcats/2.1i/" + instcatname
    ])
    j.setOutputSandbox(["std.out", "std.err"])
    j.setTag(["8Processors"])
    #j.setOutputData([visit + "/" + outputname], outputPath="", outputSE=["IN2P3-CC-disk"])
    j.setOutputData([visit + "/" + outputname],
                    outputPath="",
                    outputSE=["UKI-NORTHGRID-LANCS-HEP-disk"])
    j.setPlatform("AnyPlatform")

    j.setDestination(site)

    jobID = dirac.submitJob(j)
    print("Submitted job to " + site + " as ID " + str(jobID))
    print "Status is:", dirac.status(jobID['JobID'])

    joblistfile.write(str(jobID['JobID']) + '\n')
Exemple #19
0
def submitTS():

    ########################################
    # Modify here with your dirac username
    owner = 'user02'
    ########################################

    ########################################
    # Job description
    ########################################
    job = Job()
    job.setName('mandelbrot raw')
    job.setOutputSandbox(['*log'])
    job.workflow.addParameter(
        Parameter("JOB_ID", "000000", "string", "", "", True, False,
                  "Initialize JOB_ID"))

    ## define the job workflow in 3 steps
    # job step1: setup software
    job.setExecutable('git clone https://github.com/bregeon/mandel4ts.git')
    # job step2: run mandelbrot application
    job.setExecutable('./mandel4ts/mandelbrot.py',
                      arguments="-P 0.0005 -M 1000 -L @{JOB_ID} -N 200")

    outputPath = os.path.join('/vo.france-grilles.fr/user', owner[0], owner,
                              'mandelbrot/images/raw')
    outputPattern = 'data_*txt'
    outputSE = 'DIRAC-USER'
    outputMetadata = json.dumps({
        "application": "mandelbrot",
        "image_format": "ascii",
        "image_width": 7680,
        "image_height": 200,
        "owner": owner
    })

    # job step3: upload data and set metadata
    job.setExecutable('./mandel4ts/dirac-add-files.py',
                      arguments="%s '%s' %s '%s'" %
                      (outputPath, outputPattern, outputSE, outputMetadata))

    ########################################
    # Transformation definition
    ########################################
    t = Transformation()

    t.setTransformationName(owner + '_step1')
    t.setType("MCSimulation")
    t.setDescription("Mandelbrot images production")
    t.setLongDescription("Mandelbrot images production")
    # set the job workflow to the transformation
    t.setBody(job.workflow.toXML())

    ########################################
    # Transformation submission
    ########################################
    res = t.addTransformation()

    if not res['OK']:
        print(res['Message'])
        DIRAC.exit(-1)

    t.setStatus("Active")
    t.setAgentType("Automatic")

    return res
    outputfiles.append("StdErr")

    ## The DIRAC job to submit.
    j = Job(stdout='StdOut', stderr='StdErr')

    # Set the name of the job (viewable in the web portal).
    j.setName(jobname)

    # As we're just copying the input sandbox to the storage element
    # via OutputData, we'll just list the files as a check for the
    # output written to StdOut.
    j.setExecutable('/bin/ls -l')

    # Here we add the names of the temporary copies of the frame data
    # files in the dataset to the input sandbox. These will be uploaded
    # to the grid with the job...
    j.setInputSandbox(retrieved_clusters)

    # These are the files retrieved with the local job output.
    j.setOutputSandbox(outputfiles)

    # You can set your preferred site here.
    j.setDestination(sitename)

    ## The DIRAC instance.
    dirac = Dirac()

#    # Submit the job and print the result.
#    result = dirac.submit(j)
#    print 'Submission result: ', result
Exemple #21
0
def submitTS():

    ########################################
    # Modify here with your dirac username
    owner = 'user02'
    ########################################

    ########################################
    # Job description
    ########################################
    job = Job()
    job.setName('merge mandelbrot')
    job.setOutputSandbox(['*log'])

    ## define the job workflow in 3 steps
    # job step1: setup software
    job.setExecutable('git clone https://github.com/bregeon/mandel4ts.git')
    # job step2: run mandelbrot merge
    job.setExecutable('./mandel4ts/merge_data.py')

    outputPath = os.path.join('/vo.france-grilles.fr/user', owner[0], owner,
                              'mandelbrot/images/merged')
    outputPattern = 'data_merged*txt'
    outputSE = 'DIRAC-USER'
    nb_input_files = 7
    outputMetadata = json.dumps({
        "application": "mandelbrot",
        "image_format": "ascii",
        "image_width": 7680,
        "image_height": 200 * nb_input_files,
        "owner": owner
    })

    # job step3: upload data and set metadata
    job.setExecutable('./mandel4ts/dirac-add-files.py',
                      arguments="%s '%s' %s '%s'" %
                      (outputPath, outputPattern, outputSE, outputMetadata))

    ########################################
    # Transformation definition
    ########################################
    t = Transformation()

    t.setTransformationName(owner + '_step2')
    t.setType("DataReprocessing")
    t.setDescription("Merge mandelbrot images production")
    t.setLongDescription("Merge mandelbrot images production")
    t.setGroupSize(nb_input_files)  # group input files
    # set the job workflow to the transformation
    t.setBody(job.workflow.toXML())

    # define input data by metadata query
    inputMetaquery = json.dumps({
        "application": "mandelbrot",
        "image_format": "ascii",
        "image_width": 7680,
        "image_height": 200,
        "owner": owner
    })
    t.setFileMask(inputMetaquery)

    ########################################
    # Transformation submission
    ########################################
    res = t.addTransformation()

    if not res['OK']:
        print(res['Message'])
        DIRAC.exit(-1)

    t.setStatus("Active")
    t.setAgentType("Automatic")

    return res
    # Set the name of the job (viewable in the web portal).
    j.setName(jobname)

    #
    j.setExecutable('/bin/sh', arguments='%s %s %s' % ('run.sh', '/cvmfs/cernatschool.gridpp.ac.uk/grid-klustering-001-00-07/', 'process-frames.py'))

    #
    j.setInputSandbox(inputfiles)

    #...and added to the desired storage element with the corresponding
    # LFN via the job's OutputData. You may wish to change:
    # * The Storage Element - by changing the outputSE parameter;
    # * The LFN base name   - by changing the outputPath parameter.
    j.setOutputData(kluster_file_names, \
                    outputSE='%s' % (se), \
                    outputPath='/%s/' % (gridoutdir)\
                   )

    # These are the files retrieved with the local job output.
    j.setOutputSandbox(['StdOut', 'StdErr', 'klusters.json', 'log_process_frames.log'])

    # You can set your preferred site here.
    j.setDestination(sitename)

    ## The DIRAC instance.
    dirac = Dirac()

#    # Submit the job and print the result.
#    result = dirac.submit(j)
#    print 'Submission result: ', result
def main(dataset, chunksize, test):
    '''
    The DATASET argument is a list of paths to MC files on the grid. Like the output of
    cta-prod3-dump-dataset for example. See also
    https://forge.in2p3.fr/projects/cta_dirac/wiki/CTA-DIRAC_MC_PROD3_Status

    Keep in mind that for some effing reason this needs to be executed within this weird 'dirac'
    environment which comes with its own glibc, python and pip. I guess the real Mr. Dirac would turn in his grave.

    '''
    dirac = Dirac()

    with open(dataset) as f:
        simtel_files = f.readlines()
        print('Analysing {}'.format(len(simtel_files)))

    server_list = [
        "TORINO-USER", "CYF-STORM-USER", "CYF-STORM-Disk", "M3PEC-Disk",
        "OBSPM-Disk", "POLGRID-Disk", "FRASCATI-USER", "LAL-Disk",
        "CIEMAT-Disk", "CIEMAT-USER", "CPPM-Disk", "LAL-USER", "CYFRONET-Disk",
        "DESY-ZN-USER", "M3PEC-USER", "LPNHE-Disk", "LPNHE-USER", "LAPP-USER",
        "LAPP-Disk"
    ]
    desy_server = 'DESY-ZN-USER'

    servers_with_miniconda = [
        'LCG.IN2P3-CC.fr', 'LCG.DESY-ZEUTHEN.de', 'LCG.CNAF.it', 'LCG.GRIF.fr',
        'LCG.CYFRONET.pl', 'LCG.Prague.cz', 'LCG.CIEMAT.es'
    ]

    chunks = np.array_split(sorted(simtel_files),
                            int(len(simtel_files) / chunksize))

    print('Got a total of {} chunks'.format(len(chunks)))
    for c, simtel_filenames in tqdm(enumerate(
            chunks[0:2])):  # send just 2 jobs for now.
        # convert chunk to a list of strings. becasue this dirac thing cant take numpy arrays
        simtel_filenames = [
            str(s).strip() for s in simtel_filenames if 'SCT' not in s
        ]
        print('Starting processing for chunk {}'.format(c))
        print(simtel_filenames)
        j = Job()
        # set runtime to 0.5h
        j.setCPUTime(30 * 60)
        j.setName('cta_preprocessing_{}'.format(c))
        j.setInputData(simtel_filenames)
        j.setOutputData(['./processing_output/*.hdf5'],
                        outputSE=None,
                        outputPath='cta_preprocessing/')

        j.setInputSandbox(
            ['../process_simtel.py', './install_dependencies.py'])
        j.setOutputSandbox(['cta_preprocessing.log'])
        j.setExecutable('./job_script.sh')
        # These servers seem to  have mini conda installed
        # destination = np.random.choice(servers_with_miniconda)
        j.setDestination(servers_with_miniconda)

        value = dirac.submit(j)
        print('Number {} Submission Result: {}'.format(c, value))
Exemple #24
0
 j.setPlatform('EL7')
 j.setTag([str(nprocs) + 'Processors'])
 j.setDestination(SitesList)
 j.setExecutable('RMSynthesis2.sh',
                 arguments=str(nprocs) + ' ' + str(id_start) + ' ' +
                 str(id_end) + ' ' + str(expmnt))
 # Input data
 j.setInputData(inputdata_list)
 j.setInputSandbox([
     'RMSynthesis2.sh', 'run2.sh',
     'prmon_1.0.1_x86_64-static-gnu72-opt.tar.gz'
 ])
 # Output data
 j.setOutputSandbox([
     'StdOut', 'StdErr',
     'outputtxt_' + str(id_start) + '_' + str(id_end - 1) + '.txt',
     'prmon' + str(id_start) + '_' + str(id_end - 1) + '.txt'
 ])
 o_data_file = lfn + 'second/results_experiment_' + str(
     expmnt) + '/' + 'LOS_' + str(id_start) + '_to_' + str(id_end -
                                                           1) + '.npy'
 try:
     output_process = subprocess.check_output('dirac-dms-remove-files ' +
                                              o_data_file,
                                              stderr=subprocess.STDOUT,
                                              shell=True)
 except subprocess.CalledProcessError as e:
     print 'Failed: ' + str(e.returncode) + ' ' + e.output
 else:
     print "Output: ", output_process
 j.setOutputData(
            if not os.path.isfile(executable):
                gLogger.error("file %s not found."%executable)
                dexit(1)
            os.chmod(executable,0755) # make file executable
            input_sandbox_files.append(executable)
        j.setExecutable(str(executable))
    else:
        gLogger.error("No executable defined.")
        dexit(1)
        
    j.setName("MC job")
    if not opts.name is None:
        j.setName(opts.name)

    j.setInputSandbox(input_sandbox_files) # all input files in the sandbox
    j.setOutputSandbox(output_sandbox_files)

    j.setCPUTime(opts.cpu)
    if not opts.site is None:
        j.setDestination(opts.site.split(","))#can also be a list
        
    if not opts.bannedSites is None:
        j.setBannedSites(opts.bannedSites.split(","))

    if not opts.release is None:
        tag = opts.release
        cl = SoftwareTagClient()
        result = cl.getSitesForTag(tag,'Valid') # keyword doesn't work there.
        if not result['OK']:
            gLogger.error("*ERROR* Could not get sites for Tag %s"%tag,result['Message'])
            dexit(1)
Exemple #26
0
inputSandbox = ['test.sh']
outputSandbox = [stdout, stderr, logfile, output]

# The executible here '' is later set, so don't confuse users later on
diracJob = Job('', stdout, stderr)

diracJob.setName(jobName)

# Set the program/executable, arguments, logFile, ...
diracJob.setExecutable(executable, arguments='test.sh', logFile=logfile)

# this file is needed remotely for the job
diracJob.setInputSandbox(inputSandbox)

# these files are created by the job regardless of the executable
diracJob.setOutputSandbox(outputSandbox)

# Set the job length, but not needed in this example
diracJob.setCPUTime(500)

print 'submitting job', jobName
dirac = Dirac()
result = dirac.submitJob(diracJob)
print 'Submission Result: ', result

# try to create job id file
try:
    jid = 'JobID'
    if jid in result.keys():
        jid_file = open('%s.jid' % (jobName), 'w')
        jid_file.write('%s\n' % (str(result[jid]).strip()))
def Stereo(args=None):

    from DIRAC.Interfaces.API.Dirac import Dirac
    from DIRAC.Interfaces.API.Job import Job

    user_script = './stereo.sh'
    macro = './CTAstereo.C'

    if (len(args) != 5):
        Script.showHelp()

    particle = args[0]
    typeofdata = args[1]
    direction = args[2]
    zenith = args[3]
    diffuse = args[4]

    if typeofdata == 'train':
        # The master layout with all the telescopes
        candidates = './Prod3_3HB9All_Candidates.txt'
    elif typeofdata == 'test':
        # Different layouts
        candidates = './Prod3_3HB9_Candidates_Full.txt'
    else:
        print "Invalid type of data definition!"
        Script.showHelp()
        return 1

    if diffuse == "0":
        diffName = "point"
    elif diffuse == "1":
        diffName = "diff"
    else:
        print "Invalid extension definition!"
        Script.showHelp()
        return 1

    if zenith == "40":
        zenName = "40deg"
    elif zenith == "20":
        zenName = "20deg"
    else:
        print "Invalid zenith definition!"
        Script.showHelp()
        return 1

    if direction == "N":
        directionName = "north"
        # deg = "180"
    elif direction == "S":
        directionName = "south"
        # deg = "0"
    else:
        print 'Wrong direction. It can only be "N" or "S".'
        Script.showHelp()
        return 1

    filesPerJob = 5

    site = "PARANAL"

    listname = './training/gamma_trainLUT_%s_%s_%s.lfns' % (zenName, diffName, direction)

    loop = 0
    iJob = 0
    # totalEntries /= (2*filesPerJob)
    # print totalEntries

    f = open(listname, 'r')
    totalEntries = sum(1 for _ in f)
    f = open(listname, 'r')
    fileList = []
    text_file_name = "lfnFiles_%s_%s_%s_%s.txt" % (particle, direction, zenName, diffuse)
    text_file = open(text_file_name, "w")
    for line in f:
        loop = loop+1
        infileLFN = line.strip()
        # filein = os.path.basename(infileLFN)
        fileList.append(infileLFN)
        text_file.write("%s\n" % infileLFN)
        remain = loop % filesPerJob

        if iJob == 10:
            break

        if loop == totalEntries:
            remain = 0

        if remain == 0:
            iJob = iJob+1

            j = Job()
            text_file.close()
            j.setInputSandbox([user_script, "setupPackageMARS.sh", text_file_name, candidates, macro])
            jobName = "%s %s %s %s %s %s %s %s" % (user_script, site, particle, typeofdata, directionName, zenName, diffName, iJob)
            jobOut = "%s_%s_%s_%s_%s.out" % (user_script, site, typeofdata, directionName, iJob)
            script_args = "%s %s %s %s %s %s %s" % (particle, typeofdata, direction, zenName, diffName, site, iJob)

            j.setExecutable(user_script, script_args)
            j.setOutputSandbox([jobOut, "applicationLog.txt"])
            j.setName(jobName)
            j.setBannedSites(['LCG.MSFG.fr', 'LCG.M3PEC.fr', 'LCG.OBSPM.fr', 'LCG.UNI-DORTMUND.de', 'LCG.UNIV-LILLE.fr', 'LCG.GRIF.fr', 'ARC.SE-SNIC-T2.se'])
            Script.gLogger.info(j._toJDL())

            print "Submitting job %s" % (jobName)
            Dirac().submit(j)
            fileList = []
            text_file = open(text_file_name, "w")
def dirLUT(args=None):

    from DIRAC.Interfaces.API.Dirac import Dirac
    from DIRAC.Interfaces.API.Job import Job

    user_script = './dirLUT.sh'

    if (len(args) != 3):
        Script.showHelp()
    direction = args[0]
    zenith = args[1]
    diffuse = args[2]

    if diffuse == "0":
        diffName = "point"
    elif diffuse == "1":
        diffName = "diff"
    else:
        print "Invalid extension definition!"
        Script.showHelp()
        return 1

    if zenith == "40":
        zenName = "40deg"
    elif zenith == "20":
        zenName = "20deg"
    else:
        print "Invalid zenith definition!"
        Script.showHelp()
        return 1

    if direction == "N":
        directionName = "north"
        # deg = "180"
    elif direction == "S":
        directionName = "south"
        # deg = "0"
    else:
        print 'Wrong direction. It can only be "N" or "S".'
        Script.showHelp()
        return 1

    listname = './training/gamma_trainLUT_%s_%s_%s.lfns' % (zenName, diffName,
                                                            direction)

    with open(listname) as f:
        totalEntries = sum(1 for _ in f)

    # Number of files used per job
    runN = 20

    runMin = 0
    runMax = totalEntries / runN

    for i in range(runMin, runMax):
        jobName = "%s_%s_%s_%s_%s" % (user_script, direction, zenName,
                                      diffName, i)
        jobOut = "%s_%s_%s%s.out" % (user_script, directionName, diffName, i)
        script_args = "%s %s %s %s %s" % (direction, zenName, diffName, i,
                                          runN)
        j = Job()
        j.setInputSandbox([
            user_script, listname, "setupPackageMARS.sh", "CheckFileZombie.C"
        ])
        j.setExecutable(user_script, script_args)
        j.setOutputSandbox([jobOut, "applicationLog.txt"])
        j.setName(jobName)
        j.setBannedSites([
            'LCG.MSFG.fr', 'LCG.M3PEC.fr', 'LCG.OBSPM.fr',
            'LCG.UNI-DORTMUND.de', 'LCG.UNIV-LILLE.fr'
        ])
        Script.gLogger.info(j._toJDL())
        print "Submitting job %s" % (script_args)
        Dirac().submit(j)
Exemple #29
0
def TrainERF(args=None):

    from DIRAC.Interfaces.API.Dirac import Dirac
    from DIRAC.Interfaces.API.Job import Job

    user_script = './trainERF.sh'

    if (len(args) != 3):
        Script.showHelp()
    direction = args[0]
    zenith = args[1]
    diffuse = args[2]

    site = "PARANAL"

    if diffuse == "0":
        diffName = "point"
    elif diffuse == "1":
        diffName = "diff"
    else:
        print "Invalid extension definition!"
        Script.showHelp()
        return 1

    if zenith == "40":
        zenName = "40deg"
    elif zenith == "20":
        zenName = "20deg"
    else:
        print "Invalid zenith definition!"
        Script.showHelp()
        return 1

    if direction == "N":
        directionName = "north"
        deg = "180"
    elif direction == "S":
        directionName = "south"
        deg = "0"
    else:
        print 'Wrong direction. It can only be "N" or "S".'
        Script.showHelp()
        return 1

    # List of files over which the training should be done
    LFN_file = './training/gamma_trainERF_%s_%s_%s.lfns' % (zenName, diffName,
                                                            direction)

    StatFile = './Statistic_train.txt'
    for telType in range(0, 6):
        jobName = "%s_%s_%s_%s_%s" % (user_script, directionName, telType,
                                      diffName, zenName)
        jobOut = "%s_%s_%s.out" % (user_script, directionName, telType)
        script_args = "%s %s %s %s %s" % (direction, site, diffName, zenName,
                                          telType)
        j = Job()
        # create LFN list
        # LFNList = []
        # f = open(LFN_file, 'r')
        # for line in f:
        #    infileLFN = line.strip()
        #    LFNList.append(infileLFN)
        j.setInputSandbox(
            [user_script, "setupPackageMARS.sh", LFN_file, StatFile])
        j.setExecutable(user_script, script_args)
        j.setOutputSandbox([jobOut, "applicationLog.txt"])
        j.setName(jobName)
        Script.gLogger.info(j._toJDL())
        print "Launching %s %s" % (user_script, script_args)
        Dirac().submit(j)
Exemple #30
0
def submitTS():

    ########################################
    # Modify here with your dirac username
    owner = 'user02'
    ########################################

    ########################################
    # Job description
    ########################################
    job = Job()
    job.setName('mandelbrot raw')
    job.setOutputSandbox(['*log'])
    job.setType('MCSimulation')

    # this is so that the JOB_ID within the transformation can be evaluated on the fly in the job application, see below
    job.workflow.addParameter(
        Parameter("JOB_ID", "000000", "string", "", "", True, False,
                  "Initialize JOB_ID"))

    ## define the job workflow in 3 steps
    # job step1: setup software
    job.setExecutable('git clone https://github.com/bregeon/mandel4ts.git')
    # job step2: run mandelbrot application
    # note how the JOB_ID (within the transformation) is passed as an argument and will be evaluated on the fly
    job.setExecutable('./mandel4ts/mandelbrot.py',
                      arguments="-P 0.0005 -M 1000 -L @{JOB_ID} -N 200")

    outputPath = os.path.join('/vo.france-grilles.fr/user', owner[0], owner,
                              'ts_mandelbrot/images/raw')
    outputPattern = 'data_*txt'
    outputSE = 'DIRAC-USER'
    outputMetadata = json.dumps({
        "application": "mandelbrot",
        "image_format": "ascii",
        "image_width": 7680,
        "image_height": 200,
        "owner": owner
    })

    # job step3: upload data and set metadata
    # pilot.cfg in arguments is necessary with pilot 3
    job.setExecutable('./mandel4ts/dirac-add-files.py',
                      arguments="pilot.cfg %s '%s' %s '%s'" %
                      (outputPath, outputPattern, outputSE, outputMetadata))

    # job step4: mark input files as done with the FailoverRequest (and a few other things)
    job.setExecutable('/bin/ls -l', modulesList=['Script', 'FailoverRequest'])

    ########################################
    # Transformation definition
    ########################################
    t = Transformation()

    t.setTransformationName(owner + '_step1')
    t.setType("MCSimulation")
    t.setDescription("Mandelbrot images production")
    t.setLongDescription("Mandelbrot images production")
    # set the job workflow to the transformation
    t.setBody(job.workflow.toXML())

    ########################################
    # Transformation submission
    ########################################
    res = t.addTransformation()

    if not res['OK']:
        print(res['Message'])
        DIRAC.exit(-1)

    t.setStatus("Active")
    t.setAgentType("Automatic")

    return res
def Flux(args=None):

    from DIRAC.Interfaces.API.Dirac import Dirac
    from DIRAC.Interfaces.API.Job import Job
    import time
    import os.path

    user_script = './flux.sh'
    modmacro = './CTAflux_speeed.C'
    site = "PARANAL"

    if (len(args) != 5):
        Script.showHelp()

    typeofdata = "test"
    particle = args[0]
    direction = args[1]
    MOD = args[2]
    exten = args[3]
    zenName = args[4]

    # List of files over which flux should be run

    LFN_file = "./stereofiles/lfn_%s_%s_%s_%s.lfns" % (particle, exten,
                                                       zenName, direction)

    fileLength = sum(1 for line in open(LFN_file))
    f = open(LFN_file, 'r')

    if particle == "proton":
        filesPerJob = 10
    else:
        filesPerJob = 20

    fileList = []
    text_file_name = "lfnStereoFiles_%s_%s_%s_%s.txt" % (particle, exten,
                                                         typeofdata, direction)
    text_file = open(text_file_name, "w")

    # File containing the id number of files already produced. The relaunch of these jobs will be skipped
    done_file_name = "./stereofiles/done/done_%s_%s_%s_%s.lfns" % (
        particle, exten, zenName, direction)

    if os.path.exists(done_file_name):
        done_content = [
            int(line.strip()) for line in open(done_file_name, 'r')
        ]
    else:
        done_content = []

    loop = 0
    iJob = 0

    for line in f:
        loop = loop + 1
        infileLFN = line.strip()

        fileList.append(infileLFN)
        text_file.write("%s\n" % infileLFN)
        remain = loop % filesPerJob

        if remain == 0 or loop == fileLength:
            iJob = iJob + 1

            # Skipping of already finished jobs
            if iJob in done_content:
                text_file.close()
                fileList = []
                text_file = open(text_file_name, "w")
                continue

            else:
                j = Job()
                text_file.close()
                j.setInputSandbox([
                    user_script, "setupPackageMARS.sh", "CheckFileZombie.C",
                    text_file_name, modmacro
                ])

                jobName = "%s_%s_%s_%s_%s_%s_%s" % (user_script, site,
                                                    particle, direction, iJob,
                                                    exten, zenName)
                jobOut = "%s_%s_%s_%s_%s.out" % (user_script, site, particle,
                                                 direction, iJob)
                script_args = "%s %s %s %s %s %s %s" % (
                    particle, site, iJob, direction, MOD, exten, zenName)

                j.setExecutable(user_script, script_args)
                j.setOutputSandbox([jobOut, "applicationLog.txt"])
                j.setName(jobName)
                j.setBannedSites([
                    'LCG.MSFG.fr', 'LCG.M3PEC.fr', 'LCG.OBSPM.fr',
                    'LCG.UNI-DORTMUND.de', 'LCG.UNIV-LILLE.fr',
                    'LCG.Prague.cz', 'LCG.GRIF.fr'
                ])
                Script.gLogger.info(j._toJDL())
                print "Submitting job %s %s %s %s %s %s" % (
                    user_script, zenName, particle, direction, site, iJob)
                time.sleep(3)
                Dirac().submit(j)
                fileList = []
                text_file = open(text_file_name, "w")
    # Here we add the names of the temporary copies of the frame data
    # files in the dataset to the input sandbox. These will be uploaded
    # to the grid with the job...
    j.setInputSandbox(file_dict.keys())

    #...and added to the desired storage element with the corresponding
    # LFN via the job's OutputData. You may wish to change:
    # * The Storage Element - by changing the outputSE parameter;
    # * The LFN base name   - by changing the outputPath parameter.
    j.setOutputData(file_dict.keys(), \
                    outputSE='%s' % (se), \
                    outputPath='/%s/' % (gridoutdir)\
                   )

    # These are the files retrieved with the local job output.
    j.setOutputSandbox(['StdOut', 'StdErr'])

    # You can set your preferred site here.
    j.setDestination(sitename)

    ## The DIRAC instance.
    dirac = Dirac()

#    # Submit the job and print the result.
#    result = dirac.submit(j)
#    print 'Submission result: ', result

    # Delete the (temporary) data files.
    for fn in file_dict.keys():
        os.remove(fn)
Exemple #33
0
def submitTS():

  ########################################
  # Modify here with your dirac username 
  owner = 'user02'
  ########################################

  
  ########################################
  # Job description
  ########################################
  job = Job()
  job.setName('build mandelbrot')
  job.setOutputSandbox( ['*log'] )
  job.setType('DataReprocessing')

  ## define the job workflow in 3 steps
  # job step1: setup software  
  job.setExecutable('git clone https://github.com/bregeon/mandel4ts.git')
  # job step2: run mandelbrot build image
  job.setExecutable('./mandel4ts/build_merged_img.py')

  outputPath = os.path.join('/vo.france-grilles.fr/user',owner[0],owner,'mandelbrot/images/final')
  outputPattern = 'merged_image.bmp'
  outputSE = 'DIRAC-USER'
  outputMetadata = json.dumps( {"application":"mandelbrot","image_format":"bmp", "image_width":7680, "image_height":4200, "owner":owner} )

  # job step3: upload data and set metadata
  job.setExecutable( './mandel4ts/dirac-add-files.py', arguments = "%s '%s' %s '%s'" % (outputPath, outputPattern, outputSE, outputMetadata ) )
  
  # job step4: mark input files as done with the FailoverRequest (and a few other things)
  job.setExecutable('/bin/ls -l', modulesList=['Script', 'FailoverRequest'])

  ########################################
  # Transformation definition
  ########################################
  t = Transformation()

  t.setTransformationName( owner+'_step3' )
  t.setType( "DataReprocessing" ) 
  t.setDescription( "Merge mandelbrot images production" )
  t.setLongDescription( "Merge mandelbrot images production" )
  t.setGroupSize( 3 ) # group input files
  # set the job workflow to the transformation
  t.setBody ( job.workflow.toXML() )

  # define input data by metadata query
  inputMetaquery = {"application":"mandelbrot","image_format":"ascii", "image_width":7680, "image_height":1400, "owner":owner} 
  t.setInputMetaQuery(inputMetaquery) 

  ########################################
  # Transformation submission
  ########################################
  res = t.addTransformation() 

  if not res['OK']:
    print(res['Message'])
    DIRAC.exit( -1 )

  t.setStatus( "Active" )
  t.setAgentType( "Automatic" )
  
  return res
def TrainRF(args=None):

    from DIRAC.Interfaces.API.Dirac import Dirac
    from DIRAC.Interfaces.API.Job import Job

    user_script = './trainRF.sh'

    if (len(args) != 3):
        Script.showHelp()
    direction = args[0]
    zenith = args[1]
    diffuse = args[2]

    site = "PARANAL"

    if diffuse == "0":
        diffName = "point"
    elif diffuse == "1":
        diffName = "diff"
    else:
        print "Invalid extension definition!"
        Script.showHelp()
        return 1

    if zenith == "40":
        zenName = "40deg"
    elif zenith == "20":
        zenName = "20deg"
    else:
        print "Invalid zenith definition!"
        Script.showHelp()
        return 1

    if direction == "N":
        directionName = "north"
        # deg = "180"
    elif direction == "S":
        directionName = "south"
        # deg = "0"
    else:
        print 'Wrong direction. It can only be "N" or "S".'
        Script.showHelp()
        return 1

    # Macro fixing the file check before continues.
    ROOTmacro = "CTAtrain.C"

    # List of files over which the training should be done
    LFN_file_gammas = './training/gamma_ghtrain_%s_%s_%s.lfns' % (
        zenName, diffName, direction)
    LFN_file_protons = './training/proton_ghtrain_%s_%s_%s.lfns' % (
        zenName, diffName, direction)

    StatFile = './Statistic_train.txt'

    for telType in range(0, 6):
        jobName = "%s_%s_%s_%s_%s" % (user_script, directionName, diffName,
                                      telType, zenName)
        jobOut = "%s_%s_%s_%s.out" % (user_script, directionName, diffName,
                                      telType)
        script_args = "%s %s %s %s %s" % (direction, site, diffName, telType,
                                          zenName)

        j = Job()

        j.setInputSandbox([
            user_script, "setupPackageMARS.sh", LFN_file_gammas,
            LFN_file_protons, ROOTmacro, StatFile
        ])
        j.setExecutable(user_script, script_args)
        j.setOutputSandbox([jobOut, "applicationLog.txt"])
        j.setName(jobName)
        Script.gLogger.info(j._toJDL())
        print "Launching %s %s" % (user_script, script_args)
        Dirac().submit(j)