示例#1
0
    def __init__(self):

        self.dIlc = DiracILC(False)

        self.job = UserJob()
        self.job.setJobGroup("FCC")
        self.job.setName("FCC APP")
        self.job.setOutputSandbox(["*.log", '*.root'])
        #self.job.setDestination('LCG.DESY-HH.de')

        #EOS public location
        self.EOS_MGM_URL = 'root://eospublic.cern.ch'
        #EOS environment
        self.setEOS = 'export EOS_MGM_URL=' + self.EOS_MGM_URL
        self.myclient = client.FileSystem(self.EOS_MGM_URL + ':1094')

        #sandbox
        self.InputSandbox = []
        self.folders_to_upload = []
        self.filtered_extensions = []
        self.excludes_or_includes = []
        self.temp_cwd = os.path.join(os.getcwd(), 'fcc_temp_dirac')
示例#2
0
def getJob():
    """ produce a job: it's always the same, so we don't need to put it in the main
  """
    j = UserJob()
    ma = Marlin()
    ma.setVersion("v0111Prod")
    ma.setSteeringFile("clic_ild_cdr_steering.xml")
    ma.setGearFile("clic_ild_cdr.gear")
    result = j.append(ma)
    if not result['OK']:
        gLogger.error(result["Message"])
        dexit(1)
    j.setCPUTime(10000)
    j.setOutputSandbox("*.log")
    return j
def getJob():
  """ produce a job: it's always the same, so we don't need to put it in the main
  """
  j = UserJob()
  ma = Marlin()
  ma.setVersion("v0111Prod")
  ma.setSteeringFile("clic_ild_cdr_steering.xml")
  ma.setGearFile("clic_ild_cdr.gear")
  result = j.append(ma)
  if not result['OK']:
    gLogger.error(result["Message"])
    dexit(1)
  j.setCPUTime(10000)
  j.setOutputSandbox("*.log")
  return j
def all_jobs(name):
    d = DiracILC(True, "repo.rep")

    ################################################
    j = UserJob()
    j.setJobGroup("PM1")
    j.setName("Exec1")
    banned_sites = [
        "OSG.BNL.us", "LCG.UKI-NORTHGRID-LIV-HEP.uk", "OSG.UCSDT2.us",
        "LCG.SCOTGRIDDURHAM.uk", "LCG.NIKHEF.nl", "LCG.UKI-SOUTHGRID-RALPP.uk",
        "LCG.GRIF.fr", "LCG.Manchester.uk", "LCG.UKI-LT2-IC-HEP.uk",
        "LCG.Weizmann.il"
    ]

    j.setBannedSites(banned_sites)

    caindir = name
    #print('Cain directory is ',caindir)
    indata = [
        'LFN:/ilc/user/a/amustahid/cain.exe',
        str(caindir), 'LFN:/ilc/user/a/amustahid/runcain.sh',
        'LFN:/ilc/user/a/amustahid/convert_pairs_lcio.py',
        'LFN:/ilc/user/a/amustahid/pyLCIO.tar.gz',
        '/home/belle2/mustahid/useful/my.sh', './splitInput.py',
        './subddsim.py', './ddsim_steer_July26.py', './ILD_l5_v05.xml',
        './my2.sh', './dbd_500GeV.nung_1.xml',
        'LFN:/ilc/user/a/amustahid/myProcessors.tar.gz', './create_dir.py',
        './conf.py', './util.py', './testcain.sh', './beam_250.i'
    ]
    j.setInputSandbox(indata)

    ################################################

    #app = GenericApplication()
    #app.setScript("create_dir.py")
    #app.setInputFile("testcain.sh")
    #logf = 'create_dir.log'
    #app.setLogFile(logf)
    #app.setDebug(debug=True)
    #create_dirname = 'create_dir'
    #app.setName(create_dirname)
    #res=j.append(app)
    #if not res['OK']:
    #  print res['Message']
    #  exit(1)
    ################################################
    appre = GenericApplication()
    name = name.split('/')
    #print(name)
    cain_name = name[-1]
    subdir = name[-2]
    dirname = name[-3]

    #print('Cain file name ', cain_name)
    appre.setScript("LFN:/ilc/user/a/amustahid/runcain.sh")
    #appre.setScript("testcain.sh")
    ifile = cain_name.split('.')
    ifile = ifile[0] + '.' + ifile[1] + '.' + ifile[2]
    #print('ifile ',ifile)

    appre.setArguments(ifile)
    #direc = 'LFN:/ilc/user/a/amustahid/'
    #appre.setInputFile(ifile+".i")
    #appre.setArguments("This is input arguments")
    logf = ifile + '_' + subdir + '.log'
    appre.setLogFile(logf)
    appre.setDebug(debug=True)
    name = 'CAIN'
    appre.setName(name)
    res = j.append(appre)
    if not res['OK']:
        print res['Message']
        exit(1)
    ################################################

    ################################################
    #appost = GenericApplication()
    #appost.setScript("myanal.sh")
    #appost.setArguments("This is my analysis step")
    #res=j.append(appost)
    #if not res['OK']:
    #  print res['Message']
    #  exit(1)

    ap = GenericApplication()
    ap.setScript('my.sh')
    logf = 'my.log'
    ap.setLogFile(logf)
    ap.setDebug(debug=True)
    name = 'my'
    ap.setName(name)
    res = j.append(ap)
    if not res['OK']:
        print res['Message']
        exit(1)

    outfile = 'incoherent_pair.dat'
    appre.setOutputFile(outfile)

    ################################################
    direc = 'incoherent_pair'
    inputFile = direc + '/' + 'inco_pair_split.slcio'

    # global variables to hold command line parameters
    # ######################################
    base = '.'
    #outdir=base+'/'+dirname+'/slcio_test_2ndrun'
    outdir = base + '/' + dirname + '/Run_7'
    #print('outdir'+' '+str(outdir))
    geant_name = ifile
    outputFile = geant_name + '_' + subdir + '.slcio'

    #_clip = _Params(False,1,inputFile,outputFile,outdir)

    nbevents = 100
    clip = _Params(nbevents, inputFile, outputFile, outdir)
    ddsim = subDDSim(clip)
    ################################################

    res = j.append(ddsim)
    if not res['OK']:
        print res['Message']
        exit(1)

    j.setOutputData(outputFile, outdir, "KEK-SRM")
    j.setOutputSandbox(["*.log", "*.dat", "*.slcio"])
    j.dontPromptMe()
    res = j.submit(d)
    #res = j.submit(d, mode='local')
    if res['OK']:
        print str(res["Value"])
        #print "Dirac job, "+str(res["Value"])+", was submitted."
    else:
        print "Failed to submit Dirac job. return message was as follows."
        pprint.pprint(res)
示例#5
0
Script.parseCommandLine()

from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
from ILCDIRAC.Interfaces.API.NewInterface.Applications import Mokka, Marlin, OverlayInput
from ILCDIRAC.Interfaces.API.DiracILC import DiracILC

from DIRAC import exit as dexit

d = DiracILC(True, "repo.rep")

n_evts = 500
n_evts_per_job = 100
n_jobs = n_evts / n_evts_per_job

for i in range(n_jobs):
    j = UserJob()

    mo = Mokka()
    mo.setEnergy(3000)
    mo.setVersion("0706P08")
    mo.setSteeringFile("clic_ild_cdr.steer")
    mo.setMacFile("particlegun_electron.mac")
    mo.setOutputFile("MyFile.slcio")
    mo.setNbEvts(n_evts_per_job)
    res = j.append(mo)
    if not res['OK']:
        print res['Message']
        break
    ma = Marlin()
    ma.setVersion("v0111Prod")
    ma.setSteeringFile("clic_ild_cdr_steering.xml")
示例#6
0
from DIRAC.Core.Base import Script
Script.parseCommandLine()

from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
from ILCDIRAC.Interfaces.API.NewInterface.Applications import Marlin
from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient
import time

dIlc = DiracILC()
lcoutput = []
for i in range(1, 301):
    lcoutput = "aa_%d.root" % i
    job = UserJob()
    job.setDestination('LCG.CERN.ch')
    job.setInputSandbox([
        "LFN:/ilc/user/k/kacarevic/hgamgam/PandoraPFA/PandoraLikelihoodData9EBin_CLIC_ILD.xml",
        "LFN:/ilc/user/k/kacarevic/hgamgam/PandoraPFA/PandoraSettingsFast.xml",
        "LFN:/ilc/user/k/kacarevic/hgamgam/PandoraPFA/MarlinRecoRootFiles/steering.xml",
        "LFN:/ilc/user/k/kacarevic/hgamgam/PandoraPFA/clic_ild_cdr.gear",
        "LFN:/ilc/user/k/kacarevic/hgamgam/PandoraPFA/MarlinRecoRootFiles/lib.tar.gz"
    ])
    job.setInputData(
        "LFN:/ilc/user/k/kacarevic/hgamgam/Marlin/newPandora/aa/aa_%d.slcio" %
        i)
    job.setOutputSandbox(
        ["*.log", "*.sh", "*.py", "*.out", "*.xml", "*.steer "])
    job.setJobGroup("myRoot")
    job.setName("root_aa_%d" % i)
    marl = Marlin()
    marl.setVersion('ILCSoft-2016-09-27_gcc48')
示例#7
0
mrgOutfile = "m" + ILDConfigVer + ".m" + detectorModel + ".LQGSP_BERT." + infile
lcoutputMRG  = mrgOutfile + "_%s.DST.slcio"%(mrg_range)
print "runDSTmerge:  lcoutputMRG= ", lcoutputMRG

jobGroup = idin + "_" + detModelSim
dirac = DiracILC(True,jobGroup+".rep")

MRGoutput = []
# outputs to be saved onto grid SE
outpath="MyProd_" + ILDConfigVer + "/E250-TDR_ws/" + chann + "/" +ireq+ "/mrg"

jobname="m" + idin + "_" + str(mrgix1)
print jobname

job = UserJob()
job.setName(jobname)
job.setJobGroup(jobGrName)
job.setILDConfig(ILDConfigVer)
job.setCPUTime(6400)
job.setInputSandbox(["runDSTmerge_Tmp.py"])
job.setOutputSandbox( ["*.log","*.sh","*.py"] )
job.setInputData(mergeList)
#job.setInputData(mergeList[0])
#job.setOutputData( lcoutputMRG, outpath, "CERN-SRM" )
job.setOutputData( lcoutputMRG, outpath, "IN2P3-SRM" )
job.dontPromptMe()

slcioconcat = SLCIOConcatenate()
slcioconcat.setInputFile(mergeList)
#slcioconcat.setInputFile(mergeList[0])
示例#8
0
simfile="LFN:"+simdir+"sv01-14-01-p00.mILD_o1_v05.E500-TDR_ws.I108161.Pffh.eL.pL_sim_6762_1.slcio"

gearfile=basedir+"GearOutput.xml"
outdst = "toto.dst.slcio" #% i
outrec = "toto.rec.slcio" #% i

n_events_per_job=50000
energy=500
GGToHadInt500=1.7
BXOverlay=1

d= DiracILC(True,"repo.rep")

###In case one wants a loop: comment the folowing.
#for i in range(2):
j = UserJob()
j.setJobGroup("Tutorial")
j.setName("MarlinExample")#%i)
j.setInputSandbox([pandoraLikelihoodData, bg_aver])

## Define the overlay
ov = OverlayInput()
ov.setMachine("ilc_dbd")
ov.setEnergy(energy)
ov.setNumberOfSignalEventsPerJob(n_events_per_job)
ov.setBXOverlay(BXOverlay)
ov.setGGToHadInt(GGToHadInt500)
ov.setBkgEvtType("aa_lowpt")
# ov.setBackgroundType("aa_lowpt")
ov.setDetectorModel("ILD_o1_v05")
res = j.append(ov)
示例#9
0
def getJob(dirac, jobid, jobpara):
  iser=jobid+100

  outdir = "/ilc/user/a/amiyamoto/myprod2/test/"
  outdst = "toto-ovl-%5.5i.dst.slcio"%iser
  outrec = "toto-ovl-%5.5i.rec.slcio"%iser
  dstlfn = outdir+"dst/"+outdst
  reclfn = outdir+"rec/"+outrec
  outsrm = "CERN-SRM"

###In case one wants a loop: comment the folowing.
#for i in range(2):
  j = UserJob()
  j.setJobGroup("Tutorial")
  j.setName("MarlinOverlayParametric%i"%iser)
  j.setInputSandbox(jobpara["setting_file"])

## Define the overlay
  ov = OverlayInput()
  ov.setMachine("ilc_dbd")
  ov.setEnergy(energy)
  ov.setNumberOfSignalEventsPerJob(int(jobpara["n_events_per_job"]))
  ov.setBXOverlay(int(jobpara["BXOverlay"]))
  ov.setGGToHadInt(float(jobpara["GGToHadInt500"]))
  ov.setBkgEvtType("aa_lowpt")
# ov.setBackgroundType("aa_lowpt")
  ov.setDetectorModel("ILD_o1_v05")
  res = j.append(ov)
  if not res['OK']:
    print res['Message']
    exit(1)

## Define Marlin job
  ma = Marlin()
  ma.setDebug()
  ma.setVersion("ILCSoft-01-17-09")
  ma.setSteeringFile("marlin_stdreco.xml")
  ma.setGearFile("GearOutput.xml")
#   ma.setInputFile(simfile)
  ma.setInputFile(simlists[jobid])
  ma.setOutputDstFile(outdst)
  ma.setOutputRecFile(outrec)
  res = j.append(ma)
  if not res['OK']:
    print res['Message']
    exit(1)

# Upload files to different directories
  upload_script="upload%i.sh"%iser
  upload = GenericApplication()
# Create a script to upload files.
  shfile = open(upload_script,"w")
  shfile.write("#!/bin/bash\n")
  shfile.write("/bin/ls -l \n")
  shfile.write("dirac-dms-add-file -ddd "+dstlfn+" "+outdst+" "+outsrm+" \n")
  shfile.write("dirac-dms-add-file -ddd "+reclfn+" "+outrec+" "+outsrm+" \n")
  shfile.close()
  os.chmod(upload_script,0755)
  upload.setScript(upload_script)

  res = j.append(upload)
  if not res['OK'] :
    print res['Message']
    exit(1)

#   j.setOutputData([outdst,outrec],"myprod2/test","PNNL-SRM")
  j.setInputSandbox([ setting_file, upload_script ] )
  j.setOutputSandbox(["*.log","*.xml","*.sh","TaggingEfficiency.root","PfoAnalysis.root"])
  j.setCPUTime(10000)
  j.dontPromptMe()

  res = j.submit(dirac)
  if not res["OK"] :
    print "Failed submit job, jobid=%s" %jobid
    print res


  os.remove(upload_script)

  return j
示例#10
0
from ILCDIRAC.Interfaces.API.NewInterface.Applications import LCSIM

#jobParams = [('slicTest8_mu+_theta90.mac','diracTest8_mu+_theta90.slcio',50),('slicTest7_mu+_theta_5-175.mac','diracTest_mu+_theta_5-175.slcio',50),('slicTest3_e+.mac','diracTest3_e+.slcio',10),('slicTest2_pi+.mac','diractTest2_pi+.slcio',10)]
#jobParams = [('slicTest10_mu+_100gev_theta70_testNewGeom.mac','diracTest10_mu+_100gev_theta70_testNewGeom.slcio',10),('slicTest10_mu+_100gev_theta90_testNewGeom.mac','diracTest10_mu+_100gev_theta90_testNewGeom.slcio',10)]
jobParams = [
    ('/users/detector/ssetru/SiDSim/detectors/detector_vtx_matbudghalf_nonsensitivelayer/slicmacros/slicTest8_mu+_100gev_theta60.mac',
     'diracTest_100gev_theta60_vtx_matbudghalf_nonsensitivelayer.slcio', 100),
    ('/users/detector/ssetru/SiDSim/detectors/detector_vtx_matbudghalf_nonsensitivelayer/slicmacros/slicTest8_mu+_10gev_theta60.mac',
     'diracTest_10gev_theta60_vtx_matbudghalf_nonsensitivelayer.slcio', 100)
]
#slicMacros = ['slicTest8_mu+_theta90.mac','slicTest7_mu+_theta_5-175.mac','slicTest3_e+.mac','slicTest2_pi+.mac']
#fileOutputs = ['diracTest2Loop1.slcio','diracTest2Loop2.slcio','diracTest2Loop3.slcio','diractTest2Loop4.slcio']
#slicNumEvents = [100,100,10,10]

for macro, output, nEvts in jobParams:
    job = UserJob()
    job.setName("ssetru_dirac_test1")
    job.setJobGroup("tests")
    job.setCPUTime(86400)
    #below ten mb, specified local path
    #larger input files are put into a grid storage unit, specified with grid path
    #job.setInputSandbox(["newDetector.zip"])
    job.setInputSandbox(["alias.properties"])
    #'/afs/cern.ch/user/s/ssetru/www/newDetector.zip'
    #job.setInputSandbox.append('/afs/cern.ch/user/s/ssetru/www/newDetector.zip')
    #has log files, also may want to specify *.xml, generally short term data
    job.setOutputSandbox(["*.log", "*.mac", "*.xml"])
    #stored forever, in grid storage until you delete, path specified goes after your user directory in dirac
    job.setOutputData(output, "test_vtx_matbudghalf_nonsensitivelayer",
                      "CERN-SRM")
示例#11
0
class Submit():

    #*****************************************#
    # Function name : __init__                #
    # role : create a DIRAC job, an instance  #
    # of DiracILC, and filter the output      #
    #*****************************************#

    def __init__(self):

        self.dIlc = DiracILC(False)

        self.job = UserJob()
        self.job.setJobGroup("FCC")
        self.job.setName("FCC APP")
        self.job.setOutputSandbox(["*.log", '*.root'])
        #self.job.setDestination('LCG.DESY-HH.de')

        #EOS public location
        self.EOS_MGM_URL = 'root://eospublic.cern.ch'
        #EOS environment
        self.setEOS = 'export EOS_MGM_URL=' + self.EOS_MGM_URL
        self.myclient = client.FileSystem(self.EOS_MGM_URL + ':1094')

        #sandbox
        self.InputSandbox = []
        self.folders_to_upload = []
        self.filtered_extensions = []
        self.excludes_or_includes = []
        self.temp_cwd = os.path.join(os.getcwd(), 'fcc_temp_dirac')

    #********************************************#
    # Function name : read_from_file             #
    # input : file_name                          #
    # role : read a file and return its content  #
    #********************************************#

    def read_from_file(self, file_name):

        try:

            with open(file_name, 'r') as f:
                content = f.read()
            return content
        except:
            return False

    #*********************************************************************************#
    # Function name : create_temp_tree_from_files                                     #
    # role : given a relative tree path of file to the local FCCSW folder of the user #
    # it looks for this file, recreates the relative tree in a temporary folder       #
    # containing only this file and not all files of the source folder                #
    # Finally, the folder will be added to the DIRAC sandbox                          #
    #*********************************************************************************#

    def create_temp_tree_from_files(self, files, fccsw_path):

        if files:
            for file in files:
                tree = os.path.dirname(file)
                tree_full_path = os.path.join(self.temp_cwd, tree)
                if not os.path.exists(tree_full_path):
                    os.makedirs(tree_full_path)
                root_folder = tree.split(os.path.sep)[0]
                root_folder_full_path = os.path.join(self.temp_cwd,
                                                     root_folder)

                if root_folder_full_path not in self.folders_to_upload:
                    self.folders_to_upload += [root_folder_full_path]

                source = os.path.join(fccsw_path, file)
                destination = os.path.join(self.temp_cwd, file)

                if not os.path.exists(source):
                    print '\nThe file : ' + source + ' does not exist\n'
                    quit()
                else:
                    copyfile(source, destination)

    #***************************************************#
    # Function name : compress                          #
    # role : it compress all folders, the job           #
    # will need once in the grid                        #
    #***************************************************#

    def compress(self, temp_folder, actual_folder, tar_extension,
                 filtered_extension, exclude_or_include):

        #print filtered_extension,exclude_or_include

        exclude_func = None

        if False is not filtered_extension:

            if exclude_or_include:
                exclude_func = lambda filename: filename.find(
                    filtered_extension) >= 0
            else:
                exclude_func = lambda filename: os.path.isfile(
                    filename) and filename.find(filtered_extension) < 0

        tar = tarfile.open(temp_folder + tar_extension, "w:gz")

        for name in os.listdir(actual_folder):
            #print name
            renamed = os.path.join(actual_folder, name)
            #print renamed
            #print os.path.basename(renamed)

            tar.add(renamed,
                    arcname=os.path.basename(renamed),
                    exclude=exclude_func)

        tar.close()

    #*************************************************#
    # Function name : find_eos_file                   #
    # input : file_name                               #
    # role : check if file exists on EOS              #
    # before sending the job to DIRAC                 #
    #*************************************************#

    def find_eos_file(self, file_name):
        #then the file is in EOS

        eos_file_full_path = self.EOS_MGM_URL + '/' + file_name

        with client.File() as eosFile:
            file_status = eosFile.open(eos_file_full_path, OpenFlags.UPDATE)

        #problem with file created directly on eos
        #no problem with uploded files with xrdcp cmd

        #print eos_file_full_path
        #print file_status
        status = self.XRootDStatus2Dictionnary(file_status)

        if 'False' == status[' ok'] or False == status:
            return file_name, False
        else:
            return eos_file_full_path, True

    #*************************************************#
    # Function name : find_eos_folder                 #
    # input : folder_name                             #
    # role : check if folder exists on eos            #
    # before sending the job to the worker            #
    #*************************************************#

    def find_eos_folder(self, folder_name):
        #then the file is in eos

        eos_folder_full_path = self.EOS_MGM_URL + '/' + folder_name

        status, listing = self.myclient.dirlist(folder_name, DirListFlags.STAT)

        if None == listing:
            return folder_name, False
        else:
            return eos_folder_full_path, True

    #*************************************************#
    # Function name : find_path                       #
    # input : file_name                               #
    # role : check if file/folder exists on afs       #
    # before checking on eos                          #
    #*************************************************#

    def find_path(self, path, file_or_dir='file'):

        #we suppose that the user enter absolute or relative afs path
        #or only absolute eos path

        if not path.startswith('/eos/'):
            #afs path are absolute or relative
            #because software are stored in this filesystem
            #and users generally submit their job from lxplus

            #absolute path provided
            #os.isabs not cross platform
            #i used startwith...
            if path.startswith('/afs/') and os.path.exists(path):
                return os.path.abspath(path), True
            #if relative (does not start with /afs/)
            #add cwd
            elif os.path.exists(os.path.abspath(path)):
                return os.path.abspath(path), True
            #maybe local user machine, print upload error message
            else:
                return path, False

        #absolute path
        elif path.startswith('/eos/'):
            #print "the file is in eos"

            #eos path are absolute

            file_path, is_file_exist = self.find_eos_file(path)

            folder_path, is_folder_exist = self.find_eos_folder(path)

            if is_file_exist:
                return file_path, is_file_exist
            elif is_folder_exist:
                return folder_path, is_folder_exist
            else:
                return path, False
        else:  #other file system
            return path, False

    #********************************************************#
    # Function name : upload_sandbox_with_application_files  #
    # role : upload all extra folders or files               #
    # specified by the user for an application               #
    # For now, we do not check if file exists on cvmfs       #
    #********************************************************#

    def upload_sandbox_with_application_files(self, paths):

        upload_path_message = " does not exist\nPlease ensure that your path exist in an accessible file system (EOS or AFS)\n"

        for path in paths:

            if not path.startswith('/cvmfs/'):

                path, is_exist = self.find_path(path)
                if False == is_exist:
                    message = "\nThe path '" + path + "'" + upload_path_message
                else:
                    if path.startswith('/afs/'):
                        print '\nWARNING : STORING FILES ON AFS IS DEPRECATED\n'
                        print '\nYou plan to upload :' + path + ' which is stored on AFS\n'

                    #files are directly added to the sandbox
                    if os.path.isfile(path):
                        self.InputSandbox += [path]
                    else:  #folders are compressed before
                        self.folders_to_upload += [path]

    #********************************************************#
    # Function name : upload_sandbox_with_fccsw_files        #
    # role : upload files called in FCCSW configuration file #
    # and needed folders relative to FCCSW                   #
    #********************************************************#

    def upload_sandbox_with_fccsw_files(self, fccsw_path, fcc_conf_file):

        InstallArea_folder = os.path.join(fccsw_path, 'InstallArea')

        Detector_folder = os.path.join(fccsw_path, 'Detector')

        fccsw_folders = [InstallArea_folder, Detector_folder]

        #explanation
        #InstallArea_folder : dbg files are excluded
        #Detector_folder : only xml files are included
        self.filtered_extensions += ['.dbg', '.xml']

        self.excludes_or_includes += [True, False]

        content = self.read_from_file(fcc_conf_file)

        if False is content:
            print "\nError in reading configuration file :\n" + fcc_conf_file
            quit()

        #xml_files = re.findall(r'file:(.*.xml)',content)

        txt_files = re.findall(r'="(.*.txt)', content)

        cmd_files = re.findall(r'filename="(.*.cmd)', content)

        #print txt_files
        #print cmd_files

        folders = self.create_temp_tree_from_files(txt_files, fccsw_path)
        if None != folders:
            fccsw_folders += folders

        folders = self.create_temp_tree_from_files(cmd_files, fccsw_path)
        if None != folders:
            fccsw_folders += folders

        self.folders_to_upload = fccsw_folders + self.folders_to_upload

    #********************************************************#
    # Function name : update_sandbox                         #
    # role : check the files and folders required by the job #
    # and add them to the sandbox                            #
    # Indeed, it calls upload_sandbox_with_* functions       #
    #********************************************************#

    def update_sandbox(self, fccsw_path, paths, fcc_conf_file):

        #first, it creates a specific local working directory for the sandbox
        if not os.path.exists(self.temp_cwd):
            os.makedirs(self.temp_cwd)

        #update sandbox with application files
        if '' != paths:
            self.upload_sandbox_with_application_files(paths)

        #update sandbox with FCCSW application files
        if '' != fccsw_path:
            self.upload_sandbox_with_fccsw_files(fccsw_path, fcc_conf_file)

    #********************************************************#
    # Function name : compress_sandbox_subfolders            #
    # role : compress all local folders required by the job  #
    #********************************************************#

    def compress_sandbox_subfolders(self):

        compressed_folders = []

        for idx, actual_folder in enumerate(self.folders_to_upload):

            tar_extension = '.tgz'

            if idx < len(self.filtered_extensions):
                filtered_extension = self.filtered_extensions[idx]
                exclude_or_include = self.excludes_or_includes[idx]
            else:
                filtered_extension = False
                exclude_or_include = False

            temp_folder = os.path.join(self.temp_cwd,
                                       os.path.basename(actual_folder))

            #DIRAC already compressed the sandbox before submitting the job
            self.compress(temp_folder, actual_folder, tar_extension,
                          filtered_extension, exclude_or_include)

            compressed_folders += [temp_folder + tar_extension]

        self.InputSandbox += compressed_folders

    #**************************************************#
    # Function name : submit                           #
    # role : create DIRAC generic application for each #
    # FCC application and submit the DIRAC job         #
    # containing these generic DIRAC applications      #
    #**************************************************#

    def submit(self, applications, script_to_source):

        fcc_execution_module = os.path.join(os.getcwd(),
                                            "fcc_execution_module.py")
        fcc_installation_module = os.path.join(os.getcwd(),
                                               'fcc_installation_module.py')

        fcc_environment = script_to_source

        #Initialization of the sandbox
        paths = [fcc_execution_module, fcc_environment]
        self.upload_sandbox_with_application_files(paths)

        for application in applications:

            #application specification
            job_specification = application.job_specification

            #application specification details
            fcc_executable = job_specification['fcc_executable']
            fccsw_path = job_specification['fccsw_path']
            paths = job_specification['paths']
            fcc_conf_file = job_specification['fcc_conf_file']
            fcc_input_files = '' if '' == job_specification[
                'fcc_input_files'] else ' --ifiles ' + ' '.join(
                    job_specification['fcc_input_files'])
            fcc_output_file = '' if '' == job_specification[
                'fcc_output_file'] else ' --ofile ' + job_specification[
                    'fcc_output_file']
            fccsw = '' if '' == job_specification[
                'fccsw_path'] else ' --fccsw ' + job_specification['fccsw_path']
            number_of_events = '' if '' == job_specification[
                'number_of_events'] else ' -N ' + job_specification[
                    'number_of_events']

            #**************************************************MINIMUM REQUIREMENTS CHECKING*******************************************************#
            if '' == fcc_executable or '' == fcc_conf_file or '' == fcc_environment:
                print "\nError in parsing applications :\n" + fcc_executable
                print "\nYou have to provide at least an executable, a configuration file and a script to source for each application\n"
                quit()
            #**************************************************MINIMUM REQUIREMENTS CHECKING*******************************************************#

            #we update the sandbox according to the specification of the application
            self.update_sandbox(fccsw_path, paths, fcc_conf_file)

            #we instanciate a generic DIRAC application
            generic_dirac_application = GenericApplication()

            #we set the the installation module as the main script
            generic_dirac_application.setScript(fcc_installation_module)

            #we set the arguments of the script
            arguments = '--source ' + fcc_environment + ' --exec ' + fcc_executable + ' --conf ' + fcc_conf_file + fcc_input_files + fcc_output_file + fccsw + number_of_events

            generic_dirac_application.setExtraCLIArguments(arguments)

            #we add the generic DIRAC application to the DIRAC job
            try:
                res = self.job.append(generic_dirac_application)
                if not res['OK']:
                    print res['Message']
                    quit()
            except:
                print "\nPlease, configure your proxy before submitting a job from DIRAC"
                print "If you do not set up a proxy, refer to the manual or maybe you have to refresh it"
                print "by typing :"
                print "dirac-proxy-init\n"
                quit()

        #before submitting the DIRAC job, we compress all folders of the sandbox
        self.compress_sandbox_subfolders()

        print '\n**********************************HERE THE CONTENT OF YOUR SANDBOX*********************************\n'
        print self.InputSandbox
        print '\n**********************************HERE THE CONTENT OF YOUR SANDBOX*********************************\n'

        #we set the sandbox
        self.job.setInputSandbox(self.InputSandbox)

        #-------------------------------now we submit the job containing all applications-------------------------------------------#

        res = self.job.submit(self.dIlc)
        if not res['OK']:
            print 'Please check your application requirements'
            print res['Message']
            quit()
        else:
            print "The Job you submited has the following ID : " + str(
                res['JobID'])
def main():
	# Take the input arguments from the argument parser, and check they exist...
	args = parse_args()
	if not args:
		print 'Invalid Arguments'
		sys.exit(1)

	print args.chain[0]

	# softVersions = ["v3r0p3", "3.0-SNAPSHOT", "ILC_DBD", "0116"]
	softVersions = ["v3r0p3", "HEAD", "ILC_DBD", "0116"] # Working (recommended)
	# softVersions = ["v3r0p3", "2.5", "ILC_DBD", "0116"] # Working 
	# softVersions = ["v3r0p3", "HEAD", "ILC_DBD", "ILCSoft-01-17-07"] # Working
	# softVersions = ["v3r0p3", "HEAD", "ILCSoft-01-17-08", "0116"]

	check_events_arguments(args.events, args.split)
	detector = args.detector
	alias_properties(detector)
	outputPath, outputBase, repoName = input_output(args.Input, detector, args.chain, args.digiSteering)
	inputSandbox, outputSandbox = setup_sandboxes(args.macFile)

	dirac = DiracILC(True, repoName)

	# Prepares values for the job loop...
	if args.split < 0:
		nInputEvents = int(args.events)
		nOutputEvents = int(args.events)
	if args.split > 0:
		nInputEvents = int(args.events)
		nOutputEvents = int(args.split)

	# Loop that runs through the required number of jobs to be executed...
	for startEvent in range(0, nInputEvents, nOutputEvents):

################## Job Initialise ########################################		
		job = UserJob()
		job.setName(outputBase)
		job.setJobGroup('JobGroup')
		job.setInputSandbox(inputSandbox)
		fileNumber = startEvent/nOutputEvents
		print "Job ---> ", fileNumber

################## SLIC ##################################################
		if 1 in args.chain:
			slic = SLIC()
			slic.setVersion(softVersions[0])
			slic.setSteeringFile(args.macFile)
			# slic.setInputFile(lfn)
			slic.setOutputFile(outputBase.replace('.slcio', '_' + str(fileNumber) + '_sim.slcio'))
			slic.setDetectorModel(detector)
			slic.setNumberOfEvents(nOutputEvents)
			slic.setStartFrom(startEvent)
			#print slic.listAttributes()
			result = job.append(slic)
			if not result['OK']:
				print result['Message']
				sys.exit(2)

################## Overlay ###############################################
		if 2 in args.chain:
			'''
			#Add the gghad background overlay.
			gghad = OverlayInput()
			#gghad.setProdID(1767)
			gghad.setEnergy(500.0)
			gghad.setBXOverlay('args.bunches')
			gghad.setGGToHadInt( 4.1 )
			gghad.setNbSigEvtsPerJob(nOutputEvents)
			gghad.setMachine('ilc_dbd')
			gghad.setDetectorModel('sidloi3')
			gghad.setBkgEvtType('aa_lowpt')
			result = job.append( gghad )
			if not result['OK']:
				print result['Message']
				sys.exit(2)
			
			#Add the pair background overlay.
			pairs = OverlayInput()
			pairs.setProdID(2)
			pairs.setEnergy(500.0)
			pairs.setBXOverlay('args.bunches')
			pairs.setGGToHadInt(1.)
			pairs.setNbSigEvtsPerJob(nOutputEvents)
			pairs.setMachine('ilc_dbd')
			pairs.setDetectorModel('sidloi3')
			pairs.setBkgEvtType('eepairs')
			result = job.append( pairs )
			if not result['OK']:
				print result['Message']
				sys.exit(2)
			'''
			gghad = OverlayInput()
			gghad.setPathToFiles('/ilc/user/j/jstrube/gghadron_lowpt/sidloi3/')
			gghad.setBXOverlay(int(args.bunches))
			gghad.setGGToHadInt( 4.1 )
			gghad.setNbSigEvtsPerJob(nOutputEvents)
			gghad.setBkgEvtType('aa_lowpt')		
			result = job.append( gghad )
			if not result['OK']:
				print result['Message']
				sys.exit(2)
			'''
			pairs = OverlayInput()
			pairs.setPathToFiles('/ilc/user/j/jstrube/GuineaPig/sidloi3/')
			pairs.setBXOverlay(int(args.bunches))
			pairs.setGGToHadInt(1.)
			pairs.setBkgEvtType('eepairs')
			pairs.setNbSigEvtsPerJob(nOutputEvents)
			result = job.append( pairs )
			if not result['OK']:
				print result['Message']
				sys.exit(2)
			'''
			
################## lcsim (digitization and tracking) #####################
		if 3 in args.chain:
			lcsim = LCSIM()
			lcsim.setVersion(softVersions[1])
			lcsim.setSteeringFile(args.digiSteering) # Another version is included in /steeringFiles
			if 1 in args.chain:
				lcsim.getInputFromApp(slic)
			lcsim.setTrackingStrategy('steeringFiles/sidloi3_trackingStrategies_default.xml')
			lcsim.setAliasProperties('steeringFiles/alias.properties')
			lcsim.setDetectorModel(detector+".zip")
			#lcsim.setOutputFile(outputBase.replace('.slcio', '_' + str(fileNumber) + '_digiTracking.slcio'))
			lcsim.setOutputDstFile(outputBase.replace('.slcio', '_' + str(fileNumber) + '_DST.slcio')) #NEED TO CHANGE!!!
			lcsim.setNumberOfEvents(nOutputEvents)
			#print lcsim.listAttributes()
			result = job.append(lcsim)
			if not result['OK']:
				print result['Message']
				sys.exit(2)

################## slicPandora ###########################################
		if 4 in args.chain:
			slicPandora = SLICPandora()
			slicPandora.setVersion(softVersions[2])
			slicPandora.setDetectorModel(detector)
			slicPandora.getInputFromApp(lcsim)
			slicPandora.setOutputFile(outputBase.replace('.slcio', '_' + str(fileNumber) + '_pandora.slcio'))
			slicPandora.setPandoraSettings('pandoraSettings.xml')
			slicPandora.setNumberOfEvents(nOutputEvents)
			#print slicPandora.listAttributes()
			result = job.append(slicPandora)
			if not result['OK']:
				print result['Message']
				sys.exit(2)

################## Marlin, LCFIPlus Vertexing ############################
		if 5 in args.chain:
			vertexing = Marlin()
			vertexing.setVersion(softVersions[3])
			vertexing.setSteeringFile('steeringFiles/sid_dbd_vertexing.xml')
			vertexing.setGearFile('steeringFiles/' + detector + '.gear')
			vertexing.getInputFromApp(slicPandora)
			vertexing.setOutputFile(outputBase.replace('.slcio', '_' + str(fileNumber) + '_vertexing.slcio'))
			vertexing.setNumberOfEvents(nOutputEvents)
			#print vertexing.listAttributes()
			result = job.append(vertexing)
			if not result['OK']:
				print result['Message']
				sys.exit(2)
################## lcsim (DST production) ################################
			lcsimDst = LCSIM()
			lcsimDst.setVersion(softVersions[1])
			lcsimDst.setSteeringFile('steeringFiles/sid_dbd_postPandora.xml')
			lcsimDst.getInputFromApp(vertexing)
			lcsimDst.setNumberOfEvents(nOutputEvents)
			lcsimDst.setAliasProperties('steeringFiles/alias.properties')
			lcsimDst.setDetectorModel(detector+".zip")
			lcsimDst.setOutputRecFile(outputBase.replace('.slcio', '_' + str(fileNumber) + '_Rec.slcio'))
			lcsimDst.setOutputDstFile(outputBase.replace('.slcio', '_' + str(fileNumber) + '_DST.slcio'))
			#print lcsimDst.listAttributes()
			result = job.append(lcsimDst)
			if not result['OK']:
				print result['Message']
				sys.exit(2)

################## Marlin, LCFIPlus flavortag ############################
		if 6 in args.chain:
			flavortag = Marlin()
			flavortag.setVersion(softVersions[3])
			flavortag.setSteeringFile('steeringFiles/sid_dbd_flavortag.xml')
			flavortag.setGearFile('steeringFiles/' + detector + '.gear')
			flavortag.setInputFile(lcsimDstOutput)
			flavortag.setOutputFile(outputBase.replace('.slcio', '_' + '_flavortag.slcio'))
			flavortag.setNumberOfEvents(nOutputEvents)
			#print flavortag.listAttributes()
			result = job.append(flavortag)
			if not result['OK']:
				print result['Message']
				sys.exit(2)

################## Job Finalise ##########################################

		# List of banned sites that the job shall not be sent too. These are sites that jobs tend to fail on,
		# This list is likely to change.
		job.setBannedSites(['LCG.IN2P3-CC.fr', 'LCG.RAL-LCG2.uk', 'LCG.DESY-HH.de', 'LCG.DESYZN.de', 'LCG.KEK.jp',
							'OSG.PNNL.us','OSG.UConn.us','OSG.GridUNESP_CENTRAL.br','LCG.SCOTGRIDDURHAM.uk',
							'LCG.TECHNIONself.il','LCG.UKI-SOUTHGRID-RALPP.uk','OSG.FNAL_FERMIGRID.us','LCG.UKI-LT2-IC-HEP.uk'])

		job.setCPUTime(50000)
		job.setPlatform('x86_64-slc5-gcc43-opt')

		# Sets the output data file according to if -f is selcted, ships ouput to your /ilc/user/a/aPerson/
		# directory on the grid.
		outputLevel = max(args.chain)
		if outputLevel == 1:
			job.setOutputData(outputBase.replace('.slcio', '_' + str(fileNumber) + '_sim.slcio'), outputPath, 'CERN-SRM')
		if outputLevel == 3:
			#job.setOutputData(outputBase.replace('.slcio', '_' + str(fileNumber) + '_digiTracking.slcio'), outputPath, 'CERN-SRM')
			job.setOutputData(outputBase.replace('.slcio', '_' + str(fileNumber) + '_DST.slcio'), outputPath, 'CERN-SRM')
		if outputLevel == 4:
			job.setOutputData(outputBase.replace('.slcio', '_' + str(fileNumber) + '_pandora.slcio'), outputPath, 'CERN-SRM')
		if outputLevel == 5:
			job.setOutputData(outputBase.replace('.slcio', '_' + str(fileNumber) + '_DST.slcio'), outputPath, 'CERN-SRM')
		if outputLevel == 6:
			job.setOutputData(outputBase.replace('.slcio', '_' + str(fileNumber) + '_flavortag.slcio'), outputPath, 'CERN-SRM')

		job.setOutputSandbox(outputSandbox)
		job.setInputData(args.Input)

		if args.dontPromptMe:
			job.dontPromptMe()
		# Submits Job!!!
		job.submit()

	return 0;
示例#13
0
simfile="LFN:/ilc/prod/ilc/mc-dbd/ild/sim/500-TDR_ws/6f_ttbar/ILD_o1_v05/v01-14-01-p00/sv01-14-01-p00.mILD_o1_v05.E500-TDR_ws.I37623.P6f_bbcyyc.eR.pL-00001.slcio"
setting_file="LFN:/ilc/user/a/amiyamot/software/Settings/marlinSettings-v01-17-09_500.tar.gz"

n_events_per_job=100
energy=500
GGToHadInt500=1.7
BXOverlay=1

outdst = "toto-ovl.dst.slcio" #% i
outrec = "toto-ovl.rec.slcio" #% i

d= DiracILC(True,"repo.rep")

###In case one wants a loop: comment the folowing.
#for i in range(2):
j = UserJob()
j.setJobGroup("Tutorial")
j.setName("MarlinOverlayExample")#%i)
j.setInputSandbox([setting_file])

## Define the overlay
ov = OverlayInput()
ov.setMachine("ilc_dbd")
ov.setEnergy(energy)
ov.setNumberOfSignalEventsPerJob(n_events_per_job)
ov.setBXOverlay(BXOverlay)
ov.setGGToHadInt(GGToHadInt500)
ov.setBkgEvtType("aa_lowpt")
# ov.setBackgroundType("aa_lowpt")
ov.setDetectorModel("ILD_o1_v05")
res = j.append(ov)
示例#14
0
def defGridJob(jobName, jobGroup, inputFile):
    '''Defines Grid job'''

    from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
    job = UserJob()
    job.setName(jobName)
    job.setJobGroup(jobGroup)
    job.setCPUTime(86400)
    job.dontPromptMe()
    job.setBannedSites([
        'LCG.IN2P3-CC.fr', 'OSG.UConn.us', 'LCG.Cracow.pl', 'OSG.MIT.us',
        'LCG.Glasgow.uk', 'OSG.CIT.us', 'OSG.BNL.us', 'LCG.Brunel.uk'
    ])

    job.setInputData(inputFile)
    job.setInputSandbox([
        'LFN:/ilc/user/l/lstroem/topasymmetry/lib_ilcsoft_2017-05-30_gcc62.tar.gz'
    ])  #only need LFN: for InputSandbox
    job.setOutputSandbox(["*.out", "*.log"])

    outputFile = jobName + ".slcio"
    rootFile = jobName + ".root"

    #job.setOutputData([outputFile, rootFile], jobGroup, "CERN-DST-EOS") #keep some files on the GRID for further processing
    job.setOutputData([rootFile], jobGroup, "CERN-DST-EOS")

    return job, outputFile, rootFile
示例#15
0
from DIRAC.Core.Base import Script
Script.parseCommandLine()
from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
from ILCDIRAC.Interfaces.API.NewInterface.Applications import Marlin
from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
d = DiracILC()                            # Provides job checking utilities
j = UserJob()                             # You are running a user job
m = Marlin()                              # Get an application instance
m.setVersion("v0116")                     # Define the version to use
m.setSteeringFile("clic_ild_cdr_steering.xml") #What the app should do
m.setInputFile("LFN:/ilc/prod/clic/3tev/ee_h_bb/ILD/DST/00000375/000/\
ee_h_bb_dst_375_999.slcio")               # Add some input
m.setGearFile("clic_ilc_cdr.gear")        # Application specific field
res = j.append(m)                         # Add the application to the job
if not res['OK']:
  print res['Message']                    # Catch any error
j.submit(d)                               # Submit the job
#Not shown here: metadata queries, chaining of applications
示例#16
0
def subDDSim():

    # Decide parameters for a job
    outputSE = "KEK-SRM"
    outputSE = "KEK-DISK"

    isLocal = _clip.isLocal
    nbevts = 10 if _clip.numberOfEvents == 0 else _clip.numberOfEvents
    outputFile = "ddsim_example.slcio" if _clip.outputFile == "" else _clip.outputFile
    outputDir = _clip.outputDir
    inputFile = _clip.inputFile
    if inputFile == "":
        gLogger.error("Input file for ddsim does not given.")
        exit(-1)

    # Create DIRAC objects for job submission

    dIlc = DiracILC()

    job = UserJob()
    job.setJobGroup("myddsimjob")
    job.setName("myddsim")
    job.setOutputSandbox(['*.log', '*.sh', '*.py', '*.xml'])
    job.setILDConfig("v02-00-02")

    # job.setInputSandbox(["a6-parameters.sin", "P2f_qqbar.sin"])
    # job.setDestination(["LCG.KEK.jp", "LCG.DESY-HH.de"])  # job submission destination
    job.setBannedSites(["LCG.UKI-SOUTHGRID-RALPP.uk"
                        ])  # a list of sites not to submit job
    # job.setCPUTime( cputime_limit_in_seconds_by_dirac_units )

    ddsim = DDSim()
    ddsim.setVersion("ILCSoft-02-00-02_gcc49")
    ddsim.setDetectorModel("ILD_l5_v02")
    ddsim.setInputFile(inputFile)
    ddsim.setNumberOfEvents(nbevts)
    extraCLIArguments = " --steeringFile ddsim_steer.py "
    extraCLIArguments += " --outputFile %s " % outputFile
    extraCLIArguments += " --vertexSigma 0.0 0.0 0.1968 0.0 --vertexOffset 0.0 0.0 0.0 0.0 "
    ddsim.setExtraCLIArguments(extraCLIArguments)

    # ddsim.setRandomSeed(1234565)
    # ddsim.setStartFrom(20)        # Number of events to skip before starting ddsim

    job.append(ddsim)

    if outputDir != "":
        job.setOutputData([outputFile],
                          OutputPath=outputDir,
                          OutputSE=outputSE)

    if isLocal:
        job.submit(dIlc, mode="local")
    else:
        job.submit(dIlc)
示例#17
0
def testAndProbeSites():
    """submits jobs to test sites"""
    clip = Params()
    clip.registerSwitches()
    Script.parseCommandLine()

    from DIRAC import gLogger, exit as dexit

    from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import CheckWNs
    from ILCDIRAC.Interfaces.API.DiracILC import DiracILC

    from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getQueues

    res = getQueues(siteList=clip.site, ceList=clip.ce)
    if not res['OK']:
        gLogger.error("Failed getting the queues", res['Message'])
        dexit(1)

    sitedict = res['Value']
    CEs = []

    for ces in sitedict.values():
        CEs.extend(ces.keys())

    gLogger.notice("Found %s CEs to look at." % len(CEs))

    d = DiracILC(True, "SiteProbe.rep")

    for CE in CEs:
        j = UserJob()
        j.setDestinationCE(CE)
        c = CheckWNs()
        res = j.append(c)
        if not res['OK']:
            gLogger.error(res['Message'])
            continue
        j.setOutputSandbox("*.log")
        j.setCPUTime(30000)
        j.dontPromptMe()
        res = j.submit(d)
        if not res['OK']:
            gLogger.error("Failed to submit job, aborting")
            dexit(1)

    dexit(0)
示例#18
0
Script.parseCommandLine()
from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
from ILCDIRAC.Interfaces.API.NewInterface.Applications import Marlin
from ILCDIRAC.Interfaces.API.DiracILC import DiracILC

simfile="LFN:/ilc/prod/ilc/mc-dbd/ild/sim/500-TDR_ws/6f_ttbar/ILD_o1_v05/v01-14-01-p00/sv01-14-01-p00.mILD_o1_v05.E500-TDR_ws.I37623.P6f_bbcyyc.eR.pL-00001.slcio"
setting_file="LFN:/ilc/user/a/amiyamot/software/Settings/marlinSettings-v01-17-09_500.tar.gz"

outdst = "toto-3.dst.slcio" #% i
outrec = "toto-3.rec.slcio" #% i

d= DiracILC(True,"repo.rep")

###In case one wants a loop: comment the folowing.
#for i in range(2):
j = UserJob()
j.setJobGroup("Tutorial")
j.setName("MarlinExample")#%i)
j.setInputSandbox(setting_file)

ma = Marlin()
ma.setDebug()
ma.setVersion("ILCSoft-01-17-09")
ma.setSteeringFile("marlin_stdreco.xml")
ma.setGearFile("GearOutput.xml")
ma.setInputFile(simfile)
ma.setOutputDstFile(outdst)
ma.setOutputRecFile(outrec)

res = j.append(ma)
if not res['OK']:
示例#19
0
class UserJobTestCase(unittest.TestCase):
  """Base class for the UserJob test cases."""

  def setUp(self):
    """Set up the objects."""
    self.log_mock = Mock(name="SubMock")
    with patch('%s.getProxyInfo' % MODULE_NAME, new=Mock(return_value=None)):
      self.ujo = UserJob()

  def test_submit_noproxy( self ):
    self.ujo.proxyinfo = S_ERROR()
    assertDiracFailsWith( self.ujo.submit(),
                          "Not allowed to submit a job, you need a ['ilc_user', 'calice_user'] proxy", self )

  def test_submit_wrongproxygroup( self ):
    self.ujo.proxyinfo = S_OK( { 'group' : 'my_test_group.notInallowed_list' } )
    assertDiracFailsWith( self.ujo.submit(),
                          "Not allowed to submit job, you need a ['ilc_user', 'calice_user'] proxy", self )

  def test_submit_noproxygroup( self ):
    self.ujo.proxyinfo = S_OK( { 'some_key' : 'Value', True : 1, False : [], 135 : {} } )
    assertDiracFailsWith( self.ujo.submit(), 'Could not determine group, you do not have the right proxy', self )

  def test_submit_addtoworkflow_fails( self ):
    self.ujo.proxyinfo = S_OK( { 'group' : 'ilc_user' } )
    with patch('%s.UserJob._addToWorkflow' % MODULE_NAME, new=Mock(return_value=S_ERROR('workflow_testadd_error'))):
      assertDiracFailsWith( self.ujo.submit(), 'workflow_testadd_error', self )

  def test_submit_addtoworkflow_fails_2( self ):
    self.ujo.proxyinfo = S_OK( { 'group' : 'calice_user' } )
    with patch('%s.UserJob._addToWorkflow' % MODULE_NAME, new=Mock(return_value=S_ERROR('err_workflow_testadd'))):
      assertDiracFailsWith( self.ujo.submit(), 'err_workflow_testadd', self )

  def test_submit_createnew_dirac_instance( self ):
    ilc_mock = Mock()
    ilc_mock().submitJob.return_value = S_OK('test_submission_successful')
    self.ujo.proxyinfo = S_OK( { 'group' : 'ilc_user' } )
    with patch('%s.UserJob._addToWorkflow' % MODULE_NAME, new=Mock(return_value=S_OK())), \
         patch('%s.DiracILC' % MODULE_NAME, new=ilc_mock):
      assertDiracSucceedsWith_equals( self.ujo.submit(), 'test_submission_successful', self )
      ilc_mock().submitJob.assert_called_once_with(self.ujo, 'wms')
      assert self.ujo.oktosubmit

  def test_submit_existing_dirac_instance(self):
    """Test submit with dirac instance."""
    ilc_mock = Mock()
    ilc_mock.submitJob.return_value = S_OK('test_submission_successful')
    self.ujo.proxyinfo = S_OK({'group': 'ilc_user'})
    with patch('%s.UserJob._addToWorkflow' % MODULE_NAME, new=Mock(return_value=S_OK())):
      assertDiracSucceedsWith_equals(self.ujo.submit(diracinstance=ilc_mock), 'test_submission_successful', self)
      ilc_mock.submitJob.assert_called_once_with(self.ujo, 'wms')
      assert self.ujo.oktosubmit

  def test_setinputdata_failed( self ):
    assertDiracFailsWith( self.ujo.setInputData( { '/mylfn1' : True, '/mylfn2' : False } ),
                          'expected lfn string or list of lfns for input data', self )

  def test_setinputdata(self):
    """Test setting input data."""
    assertDiracSucceeds(self.ujo.setInputData(['LFN:/mylfn1', 'LFN:/mylfn2']), self)
    self.assertEqual(self.ujo.workflow.parameters.find('InputData').getValue(), '/mylfn1;/mylfn2')
    assertDiracSucceeds(self.ujo.setInputData('/mylfn1'), self)
    self.assertEqual(self.ujo.workflow.parameters.find('InputData').getValue(), '/mylfn1')

  def test_inputsandbox( self ):
    self.ujo.inputsandbox = Mock()
    assertDiracSucceeds( self.ujo.setInputSandbox( 'LFN:/ilc/user/u/username/libraries.tar.gz' ), self )
    self.ujo.inputsandbox.extend.assert_called_once_with( [ 'LFN:/ilc/user/u/username/libraries.tar.gz' ] )

  def test_inputsandbox_dictpassed( self ):
    assertDiracFailsWith( self.ujo.setInputSandbox( { '/some/file' : True, '/my/dict' : True } ),
                          'File passed must be either single file or list of files', self )

  def test_setOutputData(self):
    """Test setting output data."""
    assertDiracSucceeds(self.ujo.setOutputData(['/myFile1', '/myFile2']), self)
    self.assertEqual(self.ujo.workflow.parameters.find('UserOutputData').getValue(), '/myFile1;/myFile2')

    assertDiracSucceeds(self.ujo.setOutputData('/myFile2'), self)
    self.assertEqual(self.ujo.workflow.parameters.find('UserOutputData').getValue(), '/myFile2')

    assertDiracSucceeds(self.ujo.setOutputData('/myFile2', OutputSE="MY-SE"), self)
    self.assertEqual(self.ujo.workflow.parameters.find('UserOutputData').getValue(), '/myFile2')
    self.assertEqual(self.ujo.workflow.parameters.find('UserOutputSE').getValue(), 'MY-SE')

    assertDiracSucceeds(self.ujo.setOutputData('/myFile2', OutputSE=["MY-SE", 'YOUR-SE']), self)
    self.assertEqual(self.ujo.workflow.parameters.find('UserOutputData').getValue(), '/myFile2')
    self.assertEqual(self.ujo.workflow.parameters.find('UserOutputSE').getValue(), 'MY-SE;YOUR-SE')


  def test_setoutputdata_dictpassed( self ):
    assertDiracFailsWith( self.ujo.setOutputData( { '/mydict' : True } ),
                          'Expected file name string or list of file names for output data', self )

  def test_setoutputdata_nolistse( self ):
    wf_mock = Mock()
    self.ujo.workflow = wf_mock
    with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock:
      assertDiracFailsWith(self.ujo.setOutputData(['mylfn1', 'other_lfn', 'last___lfn'],
                                                  OutputSE={'mydict': True}),
                           'Expected string or list for OutputSE', self)
      addparam_mock.assert_called_once_with(wf_mock, 'UserOutputData', 'JDL',
                                            'mylfn1;other_lfn;last___lfn', 'List of output data files')

  def test_setoutputdata_outputpath_nostring( self ):
    wf_mock = Mock()
    self.ujo.workflow = wf_mock
    with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock:
      assertDiracFailsWith( self.ujo.setOutputData( [ 'mylfn1', 'other_lfn', 'last___lfn' ],
                                                    OutputPath = { 'mydict' : True } ),
                            'Expected string for OutputPath', self )
      addparam_mock.assert_called_once_with( wf_mock, 'UserOutputData', 'JDL',
                                             'mylfn1;other_lfn;last___lfn', 'List of output data files' )

  def test_setoutputdata_invalid_outputpath_1( self ):
    wf_mock = Mock()
    self.ujo.workflow = wf_mock
    with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock:
      assertDiracFailsWith( self.ujo.setOutputData( [ 'mylfn1', 'other_lfn', 'last___lfn' ],
                                                    OutputPath = '//ilc/user/somedir/output.xml' ),
                            'Output path contains /ilc/user/ which is not what you want', self )
      addparam_mock.assert_called_once_with( wf_mock, 'UserOutputData', 'JDL',
                                             'mylfn1;other_lfn;last___lfn', 'List of output data files' )

  def test_setoutputdata_invalid_outputpath_2( self ):
    wf_mock = Mock()
    self.ujo.workflow = wf_mock
    with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock:
      assertDiracFailsWith( self.ujo.setOutputData( [ 'mylfn1', 'other_lfn', 'last___lfn' ],
                                                    OutputPath = '//some/dir/ilc/user/somedir/output.xml' ),
                            'Output path contains /ilc/user/ which is not what you want', self )
      addparam_mock.assert_called_once_with( wf_mock, 'UserOutputData', 'JDL',
                                             'mylfn1;other_lfn;last___lfn', 'List of output data files' )

  def test_setoutputdata( self ):
    wf_mock = Mock()
    self.ujo.workflow = wf_mock
    with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock:
      assertDiracSucceeds( self.ujo.setOutputData( [ 'mylfn1', 'other_lfn', 'last___lfn' ], OutputPath =
                                                   '//some/dir/somedir/output.xml' ), self )
      assertMockCalls( addparam_mock, [
        ( wf_mock, 'UserOutputData', 'JDL', 'mylfn1;other_lfn;last___lfn', 'List of output data files' ),
        ( wf_mock, 'UserOutputPath', 'JDL', 'some/dir/somedir/output.xml', 'User specified Output Path' ) ],
                       self )

  def test_setoutputsandbox( self ):
    wf_mock = Mock()
    self.ujo.workflow = wf_mock
    with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock:
      assertDiracSucceeds( self.ujo.setOutputSandbox( '/my/dir/myfile.txt' ), self )
      addparam_mock.assert_called_once_with( wf_mock, 'OutputSandbox', 'JDL',
                                             '/my/dir/myfile.txt', 'Output sandbox file' )

  def test_setoutputsandbox_successes(self):
    """Test setting output sandbox."""
    assertDiracSucceeds(self.ujo.setOutputSandbox(['myfile.txt', 'myfile.doc']), self)
    self.assertEqual(self.ujo.workflow.parameters.find('OutputSandbox').getValue(), 'myfile.txt;myfile.doc')


  def test_setoutputsandbox_dictpassed( self ):
    wf_mock = Mock()
    self.ujo.workflow = wf_mock
    with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock:
      assertDiracFailsWith( self.ujo.setOutputSandbox( { 'mydict' : True } ),
                            'Expected file string or list of files for output sandbox contents', self )
      self.assertFalse( addparam_mock.called )

  def test_configs(self):
    """Test setting different config packages."""
    assertDiracSucceeds(self.ujo.setILDConfig('123.4'), self)
    self.assertEqual(self.ujo.workflow.parameters.find('ILDConfigPackage').getValue(), 'ILDConfig123.4')

    assertDiracSucceeds(self.ujo.setCLICConfig('567.8'), self)
    self.assertEqual(self.ujo.workflow.parameters.find('ClicConfigPackage').getValue(), 'ClicConfig567.8')

    self.assertIn('ildconfig', self.ujo.workflow.parameters.find('SoftwarePackages').getValue())
    self.assertIn('clicconfig', self.ujo.workflow.parameters.find('SoftwarePackages').getValue())

  def test_submit_split(self):
    """Test submitting with automatic splitting."""
    self.ujo._splittingOption = True
    self.ujo._split = Mock(return_value=S_OK())
    self.ujo.proxyinfo = S_OK({'group': 'ilc_user'})
    ilc_mock = Mock()
    ilc_mock.submitJob.return_value = S_OK('test_submission_successful')
    assertDiracSucceeds(self.ujo.submit(diracinstance=ilc_mock), self)
    ilc_mock.submitJob.assert_called_once_with(self.ujo, 'wms')

    self.ujo._splittingOption = True
    self.ujo._split = Mock(return_value=S_ERROR("Splitting InValid"))
    assertDiracFailsWith(self.ujo.submit(), "Splitting InValid", self)

  @patch("%s._checkSplitConsistency" % MIXIN_CLASS, new=Mock(return_value=S_OK()))
  def test_split_bydata(self):
    """Test splitting by data."""
    self.ujo._eventsPerJob = "1"
    self.ujo._numberOfJobs = "1"
    self.ujo._splittingOption = "byData"
    self.ujo._switch['byData'] = Mock(return_value=[("InputData", ["/ilc/user/u/username/data1"], True)])
    with patch("%s.UserJob.setParameterSequence" % MODULE_NAME) as mock_parametric, \
         patch('%s.LOG' % MIXIN_MODULE, new=self.log_mock):
      info_message = "Job splitting successful"
      assertDiracSucceeds(self.ujo._split(), self)
      self.log_mock.notice.assert_called_with(info_message)
      mock_parametric.assert_any_call("InputData", ["/ilc/user/u/username/data1"], True)

  @patch("%s.toInt" % MIXIN_MODULE, new=Mock(return_value=1))
  @patch("%s.UserJob._checkSplitConsistency" % MODULE_NAME, new=Mock(return_value=S_OK()))
  def test_split_byevents(self):
    """Test splitting by events."""
    self.ujo._splittingOption = "byEvents"
    self.ujo._switch['byEvents'] = Mock(return_value=[('NumberOfEvents', [1, 2], 'NbOfEvts')])
    with patch("%s.UserJob.setParameterSequence" % MODULE_NAME) as mock_parametric, \
         patch('%s.LOG' % MIXIN_MODULE, new=self.log_mock):
      info_message = "Job splitting successful"
      assertDiracSucceeds(self.ujo._split(), self)
      self.log_mock.notice.assert_called_with(info_message)
      mock_parametric.assert_any_call('NumberOfEvents', [1, 2], 'NbOfEvts')

  @patch("%s.toInt" % MIXIN_MODULE, new=Mock(return_value=1))
  @patch("%s.UserJob._checkSplitConsistency" % MODULE_NAME, new=Mock(return_value=S_OK()))
  def test_split_atomicsubmission(self):
    """Test splitting atomic."""
    self.ujo._splittingOption = None
    info_message = "Job splitting successful"
    with patch('%s.LOG' % MIXIN_MODULE, new=self.log_mock):
      assertDiracSucceeds(self.ujo._split(), self)
    self.log_mock.notice.assert_called_with(info_message)

  @patch("%s.toInt" % MIXIN_MODULE, new=Mock(return_value=False))
  def test_split_inputparameters_failed(self):
    """Test splitting input parameters with failure."""
    assertDiracFailsWith( self.ujo._split(), "Splitting: Invalid values for splitting", self )

  @patch("%s.toInt" % MIXIN_MODULE, new=Mock(return_value=1))
  @patch("%s._checkSplitConsistency" % MIXIN_CLASS, new=Mock(return_value=S_ERROR('failed')))
  def test_split_checkSplitConsistency_failed(self):
    """Test splitting check consistency with failure."""
    assertDiracFailsWith(self.ujo._split(), 'failed', self)

  @patch("%s.toInt" % MIXIN_MODULE, new=Mock(return_value=1))
  @patch("%s._checkSplitConsistency" % MIXIN_CLASS, new=Mock(return_value=S_OK()))
  def test_split_sequencer_fails(self):
    """Test splitting when the sequencer fails."""
    self.ujo._splittingOption = "bySequence"
    self.ujo._switch['bySequence'] = Mock(return_value=[])
    self.ujo.setParameterSequence = Mock()
    self.ujo._split()
    self.ujo.setParameterSequence.assert_not_called()

  def test_checkSplitconsistency(self):
    """Test splitting consistency check."""
    app1 = Fcc()
    app2 = Fcc()
    self.ujo.applicationlist = [app1, app2]
    self.ujo._switch = {"byEvents": lambda x: x}
    self.ujo._splittingOption = "byEvents"
    self.assertTrue(self.ujo._checkSplitConsistency())

  def test_checkjobconsistency_bad_split_parameter(self):
    """Test splitting consistency check with bad split parameters."""
    app1 = Fcc()
    app2 = Fcc()
    self.ujo.applicationlist = [app1, app2]
    self.ujo._switch = {"byEvents": lambda x: x }
    self.ujo._splittingOption = "byHand"
    self.assertFalse(self.ujo._checkSplitConsistency()['OK'])
    self.assertIn('_checkSplitConsistency', self.ujo.errorDict)

  def test_checkjobconsistency_no_same_events( self ):
    app1 = Fcc()
    app2 = Fcc()
    app1.numberOfEvents = 1
    app2.numberOfEvents = 2
    self.ujo.applicationlist = [app1, app2]
    self.ujo._switch = {"byEvents": lambda x: x }
    self.ujo._splittingOption = "byEvents"
    with patch('%s.LOG' % MIXIN_MODULE, new=self.log_mock):
      resCheck = self.ujo._checkSplitConsistency()
    self.assertFalse(resCheck['OK'])
    self.assertIn("have the same number", resCheck['Message'])

  def test_checkjobconsistency_negative_events( self ):
    app1 = Fcc()
    app2 = Fcc()
    app1.numberOfEvents = app2.numberOfEvents = -1
    self.ujo.applicationlist = [app1, app2]
    self.ujo._switch = ["byEvents"]
    self.ujo._splittingOption = "byEvents"
    self.assertTrue(self.ujo._checkSplitConsistency())

  def test_splitbydata( self ):
    self.ujo._data = ['data1', 'data2']
    app1 = Fcc()
    app2 = Fcc()
    self.ujo.applicationlist = [app1, app2]
    assertEqualsImproved(self.ujo._splitByData(), [("InputData", [['data1'], ['data2']], 'ParametricInputData')], self)

  def test_splitbydata_no_data(self):
    """Test splitting without data."""
    self.ujo._data = None
    self.assertFalse(self.ujo._splitByData())
    self.assertIn('_splitByData', self.ujo.errorDict)

  def test_splitbydata_incorrectparameter(self):
    """Test splitting with data."""
    self.ujo._data = ["/path/to/data1","/path/to/data2"]
    self.ujo._numberOfFilesPerJob = 3
    self.assertFalse(self.ujo._splitByData())
    self.assertIn('_splitByData', self.ujo.errorDict)

  def test_splitbyevents_1st_case(self):
    """Test splitting by events."""
    app1 = Fcc()
    app2 = Fcc()
    self.ujo.applicationlist = [app1, app2]
    self.ujo._eventsPerJob = 2
    self.ujo._numberOfJobs = 3
    map_event_job = [2, 2, 2]
    assertEqualsImproved(self.ujo._splitByEvents(), [('NumberOfEvents', map_event_job, 'NbOfEvts')], self)

  def test_splitbyevents_2nd_case( self ):
    app1 = Fcc()
    app2 = Fcc()
    self.ujo.applicationlist = [app1, app2]
    self.ujo._eventsPerJob = 3
    self.ujo._totalNumberOfEvents = 5
    map_event_job = [3, 2]
    assertEqualsImproved(self.ujo._splitByEvents(), [('NumberOfEvents', map_event_job, 'NbOfEvts')], self)

  def test_splitbyevents_2nd_case_failed( self ):
    app1 = Fcc()
    app2 = Fcc()
    self.ujo.applicationlist = [app1, app2]
    self.ujo._eventsPerJob = 3
    self.ujo._totalNumberOfEvents = 2
    self.assertFalse(self.ujo._splitByEvents())

  def test_splitbyevents_3rd_case(self):
    """Test splitting by events case 3."""
    app1 = Fcc()
    app2 = Fcc()
    self.ujo.applicationlist = [app1, app2]
    self.ujo._numberOfJobs = 2
    self.ujo._totalNumberOfEvents = 2
    map_event_job = [1, 1]
    assertEqualsImproved(self.ujo._splitByEvents(), [('NumberOfEvents', map_event_job, 'NbOfEvts')], self)

    self.ujo._numberOfJobs = 3
    self.ujo._totalNumberOfEvents = 5
    map_event_job = [2, 2, 1]
    assertEqualsImproved(self.ujo._splitByEvents(), [('NumberOfEvents', map_event_job, 'NbOfEvts')], self)

  def test_splitbyevents_3rd_case_failed(self):
    """Test splitting by events case 3 fails."""
    app1 = Fcc()
    app2 = Fcc()
    self.ujo.applicationlist = [app1, app2]
    self.ujo._numberOfJobs = 2
    self.ujo._totalNumberOfEvents = None
    self.assertFalse(self.ujo._splitByEvents())

  def test_setsplitevents(self):
    """Test splitting set split events."""
    self.ujo.setSplitEvents(42, 42, 126)
    assertEqualsImproved(self.ujo._totalNumberOfEvents, 126, self)
    assertEqualsImproved(self.ujo._eventsPerJob, 42, self)
    assertEqualsImproved(self.ujo._numberOfJobs, 42, self)
    assertEqualsImproved(self.ujo._splittingOption, "byEvents", self)

  def test_setsplitInputdata(self):
    """Test set split input data."""
    input_data = ["/path/to/data1", "/path/to/data2"]
    self.ujo.setSplitInputData(input_data)
    for data in input_data:
      self.assertIn(data, self.ujo._data)
    assertEqualsImproved(self.ujo._splittingOption, "byData", self)

  def test_setSplitFiles(self):
    """Test set split files over jobs."""
    self.ujo.setSplitFilesAcrossJobs('myLFN', 20, 20)
    self.assertEqual(self.ujo._data, ['myLFN'])
    self.assertEqual(self.ujo._eventsPerFile, 20)
    self.assertEqual(self.ujo._eventsPerJob, 20)

  def test_splitBySkip(self):
    """Test set split with skip."""
    self.ujo._eventsPerFile = 13
    self.ujo._eventsPerJob = 5
    self.ujo._data = ['lfn_%d' % d for d in [1, 2]]
    result = self.ujo._splitBySkip()
    self.assertEqual([('InputData', ['lfn_1', 'lfn_1', 'lfn_1', 'lfn_2', 'lfn_2', 'lfn_2'], 'InputData'),
                      ('startFrom', [0, 5, 10, 0, 5, 10], 'startFrom'),
                      ('NumberOfEvents', [5, 5, 3, 5, 5, 3], 'NbOfEvts')],
                     result)

    self.ujo._eventsPerFile = 15
    self.ujo._eventsPerJob = 5
    self.ujo._data = ['lfn_%d' % d for d in [1, 2]]
    result = self.ujo._splitBySkip()
    self.assertEqual([('InputData', ['lfn_1', 'lfn_1', 'lfn_1', 'lfn_2', 'lfn_2', 'lfn_2'], 'InputData'),
                      ('startFrom', [0, 5, 10, 0, 5, 10], 'startFrom'),
                      ('NumberOfEvents', [5, 5, 5, 5, 5, 5], 'NbOfEvts')],
                     result)

  def test_setSplittingStartIndex(self):
    """Test setting start index."""
    res = self.ujo.setSplittingStartIndex(111)
    self.assertTrue(res['OK'])
    self.assertEqual(self.ujo._startJobIndex, 111)

    self.ujo._startJobIndex = 0
    res = self.ujo.setSplittingStartIndex(-111)
    self.assertFalse(res['OK'])
    self.assertIn('setSplittingStartIndex', self.ujo.errorDict)
    self.assertEqual(self.ujo._startJobIndex, 0)

  def test_doNotAlter(self):
    """Test setting not altering the output."""
    self.ujo.setSplitDoNotAlterOutputFilename()
    self.assertIsNotNone(self.ujo.workflow.parameters.find('DoNotAlterOutputData'))
    self.assertEqual(self.ujo.workflow.parameters.find('DoNotAlterOutputData').getValue(), "True")

    self.ujo.setSplitDoNotAlterOutputFilename(False)
    self.assertIsNotNone(self.ujo.workflow.parameters.find('DoNotAlterOutputData'))
    self.assertEqual(self.ujo.workflow.parameters.find('DoNotAlterOutputData').getValue(), "False")

  def test_setSplitJobIndexList(self):
    """Test the setSplitJobIndexList function."""
    res = self.ujo.setSplitJobIndexList(range(0, 7, 3))
    self.assertTrue(res['OK'])
    self.assertEqual([0, 3, 6], self.ujo._jobIndexList)

    res = self.ujo.setSplitJobIndexList(set(range(1, 7, 3)))
    self.assertFalse(res['OK'])
    self.assertIn('Invalid argument type', res['Message'])
    self.assertEqual([0, 3, 6], self.ujo._jobIndexList)
示例#20
0
ma.setVersion(ILCSoftVer)
ma.setDetectorModel(detectorModel)
ma.setSteeringFile("MarlinStdReco.xml")
ma.setExtraCLIArguments(" --constant.DetectorModel=%s "%(detectorModel) )
ma.setLogFile("marlin.log")
#ma.getInputFromApp(sim)
ma.setInputFile([lcoutputSIM])
ma.setEnergy(energy)
ma.setNumberOfEvents(evtsPerRun)
ma.setOutputDstFile(lcoutputDST)
ma.setOutputRecFile(lcoutputREC)

RECoutput.append(lcoutputDST)
RECoutput.append(lcoutputREC)

job = UserJob()
job.setName(jobname)
job.setJobGroup(jobGrName)
job.setILDConfig(ILDConfigVer)
job.setCPUTime(86400)
job.setInputData([lcinputREC])
job.setInputSandbox(["runRecoSplit_all_Tmp.py"])
job.setOutputSandbox(["*.log","*.sh","MarlinStdRecoParsed.xml","marlin*.xml","*.py "])
#job.setOutputSandbox(["*.log","*.sh","MarlinStdRecoParsed.xml","marlin*.xml","*.py ","*.root"])
#job.setDestinationCE('lyogrid07.in2p3.fr')

job.dontPromptMe()
job.setBannedSites(['LCG.QMUL.uk'])
#job.setBannedSites(['LCG.IN2P3-CC.fr','LCG.DESYZN.de','LCG.DESY-HH.de','LCG.KEK.jp','OSG.UConn.us','LCG.Cracow.pl','OSG.MIT.us','LCG.Glasgow.uk','OSG.CIT.us','OSG.BNL.us','LCG.Brunel.uk','LCG.RAL-LCG2.uk','LCG.Oxford.uk','OSG.UCSDT2.us'])

# run Malrin reco jobs
示例#21
0
 def setUp(self):
   """Set up the objects."""
   self.log_mock = Mock(name="SubMock")
   with patch('%s.getProxyInfo' % MODULE_NAME, new=Mock(return_value=None)):
     self.ujo = UserJob()
示例#22
0
 def getJob(self):
     """ Define a generic job, it should be always the same
 """
     from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
     myjob = UserJob()
     myjob.setName("Testing")
     myjob.setJobGroup("Tests")
     myjob.setCPUTime(30000)
     myjob.dontPromptMe()
     myjob.setLogLevel("VERBOSE")
     myjob.setPlatform("x86_64-slc5-gcc43-opt")
     myjob.setOutputSandbox(["*.log", "*.xml", "*.sh"])
     myjob._addParameter(myjob.workflow, 'TestFailover', 'String', True,
                         'Test failoverRequest')
     myjob._addParameter(myjob.workflow, 'Platform', 'JDL',
                         "x86_64-slc5-gcc43-opt", 'OS Platform')
     if self.ildConfig:
         myjob.setILDConfig(self.ildConfig)
     return myjob
示例#23
0
def main():
    # Take the input arguments from the argument parser, and check they exist...
    args = parse_args()
    if not args:
        print 'Invalid Arguments'
        sys.exit(1)

#### Software Versions ####
    softVersions = ["v3r0p3", "HEAD", "ILC_DBD",
                    "0116"]  # Working (recommended)
    # softVersions = ["v3r0p3", "2.5", "ILC_DBD", "0116"] # Working
    # softVersions = ["v3r0p3", "HEAD", "ILC_DBD", "ILCSoft-01-17-07"] # Working

    # Check the --runs and --split arguments to make sure they are compatible, if not exit...
    if not check_events_arguments(args.events, args.split):
        sys.exit(1)

    # Check the input LFN given by user, it needs to have .stdhep extension and should not have LFN: at the beginning...
    lfn_check, lfn = check_input_LFN(args.stdhepInput)
    if not lfn_check:
        sys.exit(1)

    # Call when you begin ILC-DIRAC jobs, the true indicates a repository file is included...
    dirac = DiracILC(True,
                     setup_repository_name(args.stdhepInput, args.detector))

    # Prepares the input and output sandboxes, if -f, then adds the files required for flavortagging,
    # into the input sandbox
    inputSandbox, outputSandbox = setup_sandboxes(args.macFile, args.flavortag)

    # Prepares values for the job loop...
    if args.split < 0:
        nInputEvents = int(args.events)
        nOutputEvents = int(args.events)
    if args.split > 0:
        nInputEvents = int(args.events)
        nOutputEvents = int(args.split)

    # Loop that runs through the required number of jobs to be executed...
    for startEvent in range(0, nInputEvents, nOutputEvents):

        ################## Job Initialise ########################################
        job = UserJob()
        job.setName(path.basename(args.stdhepInput))
        job.setJobGroup('JobGroup')
        job.setInputSandbox(inputSandbox)
        fileNumber = startEvent / nOutputEvents
        print "Job ", fileNumber

        outputFiles = setup_output_dict(args.stdhepInput, args.detector,
                                        fileNumber, args.outputPath,
                                        softVersions)
        slicOutput = outputFiles['slicOutput']
        prePandoraOutput = outputFiles['prePandoraOutput']
        pandoraOutput = outputFiles['pandoraOutput']
        vertexingOutput = outputFiles['vertexingOutput']
        lcsimRecOutput = outputFiles['lcsimRecOutput']
        lcsimDstOutput = outputFiles['lcsimDstOutput']
        flavortagOutput = outputFiles['flavortagOutput']
        diracOutput = outputFiles['diracOutput']

        ################## SLIC ##################################################
        slic = SLIC()
        slic.setVersion(softVersions[0])
        slic.setSteeringFile(args.macFile)
        # slic.setInputFile(lfn)
        slic.setOutputFile(slicOutput)
        slic.setDetectorModel(args.detector)
        slic.setNumberOfEvents(nOutputEvents)
        slic.setStartFrom(startEvent)
        #print slic.listAttributes()
        result = job.append(slic)
        if not result['OK']:
            print result['Message']
            sys.exit(2)

################## lcsim (digitization and tracking) #####################
        lcsim = LCSIM()
        lcsim.setVersion(softVersions[1])
        lcsim.setSteeringFile(
            'steeringFiles/sid_dbd_prePandora_noOverlay_v22.xml'
        )  # Another version is included in /steeringFiles
        lcsim.getInputFromApp(slic)
        lcsim.setTrackingStrategy(
            'steeringFiles/sidloi3_trackingStrategies_default.xml')
        # lcsim.setAliasProperties('alias.properties')
        lcsim.setDetectorModel('geometryFiles/sidloi3.zip')
        lcsim.setOutputFile(prePandoraOutput)
        lcsim.setNumberOfEvents(nOutputEvents)
        #print lcsim.listAttributes()
        result = job.append(lcsim)
        if not result['OK']:
            print result['Message']
            sys.exit(2)

################## slicPandora ###########################################
        slicPandora = SLICPandora()
        slicPandora.setVersion(softVersions[2])
        slicPandora.setDetectorModel(args.detector)
        slicPandora.getInputFromApp(lcsim)
        slicPandora.setOutputFile(pandoraOutput)
        slicPandora.setPandoraSettings('pandoraSettings.xml')
        slicPandora.setNumberOfEvents(nOutputEvents)
        #print slicPandora.listAttributes()
        result = job.append(slicPandora)
        if not result['OK']:
            print result['Message']
            sys.exit(2)

################## Marlin, LCFIPlus Vertexing ############################
        vertexing = Marlin()
        vertexing.setVersion(softVersions[3])
        vertexing.setSteeringFile('steeringFiles/sid_dbd_vertexing.xml')
        vertexing.setGearFile('steeringFiles/sidloi3.gear')
        vertexing.getInputFromApp(slicPandora)
        vertexing.setOutputFile(vertexingOutput)
        vertexing.setNumberOfEvents(nOutputEvents)
        #print vertexing.listAttributes()
        result = job.append(vertexing)
        if not result['OK']:
            print result['Message']
            sys.exit(2)

################## lcsim (DST production) ################################
        lcsimDst = LCSIM()
        lcsimDst.setVersion(softVersions[1])
        lcsimDst.setSteeringFile('steeringFiles/sid_dbd_postPandora.xml')
        lcsimDst.getInputFromApp(vertexing)
        lcsimDst.setNumberOfEvents(nOutputEvents)
        # lcsimDst.setAliasProperties('alias.properties')
        lcsimDst.setDetectorModel('geometryFiles/sidloi3.zip')
        lcsimDst.setOutputRecFile(lcsimRecOutput)
        lcsimDst.setOutputDstFile(lcsimDstOutput)
        #print lcsimDst.listAttributes()
        result = job.append(lcsimDst)
        if not result['OK']:
            print result['Message']
            sys.exit(2)

################## Marlin, LCFIPlus flavortag ############################
        if args.flavortag:
            flavortag = Marlin()
            flavortag.setVersion(softVersions[3])
            flavortag.setSteeringFile('steeringFiles/sid_dbd_flavortag.xml')
            flavortag.setGearFile('steeringFiles/sidloi3.gear')
            flavortag.setInputFile(lcsimDstOutput)
            flavortag.setOutputFile(flavortagOutput)
            flavortag.setNumberOfEvents(nOutputEvents)
            #print flavortag.listAttributes()
            result = job.append(flavortag)
            if not result['OK']:
                print result['Message']
                sys.exit(2)


################## Job Finalise ##########################################

# List of banned sites that the job shall not be sent too. These are sites that jobs tend to fail on,
# This list is likely to change.
        job.setBannedSites([
            'LCG.IN2P3-CC.fr',
            'LCG.RAL-LCG2.uk',
            'LCG.DESY-HH.de',
            'LCG.DESYZN.de',
            'LCG.KEK.jp',
            'OSG.PNNL.us',
        ])

        job.setCPUTime(50000)
        job.setPlatform('x86_64-slc5-gcc43-opt')

        # Sets the output data file according to if -f is selcted, ships ouput to your /ilc/user/a/aPerson/
        # directory on the grid.
        if args.flavortag:
            job.setOutputData(flavortagOutput, diracOutput, args.SE)

        else:
            job.setOutputData(lcsimDstOutput, diracOutput, args.SE)

        job.setOutputSandbox(outputSandbox)
        job.setInputData(lfn)

        if args.dontPromptMe:
            job.dontPromptMe()
        # Submits Job!!!
        job.submit()

    return 0
示例#24
0
def create_job(inputData, saveName, outputDir, dontPromptMe):

    slcioFile = saveName + '.slcio'
    rootFile = saveName + '.root'

    if check_file_existence(outputDir, slcioFile, dontPromptMe):
        remove_file(outputDir, slcioFile, dontPromptMe)
    if check_file_existence(outputDir, rootFile, dontPromptMe):
        remove_file(outputDir, rootFile, dontPromptMe)

    dIlc = DiracILC()

    job = UserJob()
    job.setOutputSandbox(['*.out', '*.log', '*.sh', '*.py', '*.xml'])
    if SAVE_SLCIO:
        job.setOutputData([slcioFile, rootFile],
                          OutputPath=outputDir,
                          OutputSE=STORAGE_SE)
    else:
        job.setOutputData(rootFile, OutputPath=outputDir, OutputSE=STORAGE_SE)
    job.setJobGroup('myMarlinRun1')
    job.setName('MyMarlinJob1')
    # job.setBannedSites(['LCG.IN2P3-CC.fr','OSG.UConn.us','LCG.Cracow.pl','OSG.MIT.us','LCG.Glasgow.uk','OSG.CIT.us','OSG.BNL.us','LCG.Brunel.uk'])
    job.setInputSandbox(['LFN:/ilc/user/a/amaier/mylibs.tar.gz'])

    marl = Marlin()
    marl.setVersion('ILCSoft-2017-11-15_gcc62')

    marl.setInputFile(inputData)
    job.setInputData(list(map(lambda x: x.lstrip('LFN:'), inputData)))
    # marl.setInputFile(['LFN:/ilc/prod/clic/1.4tev/qq_ln/ILD/DST/00003249/010/qq_ln_dst_3249_10000.slcio'])
    marl.setSteeringFile('marlin/FullChain.xml')
    # marl.setSteeringFile('marlin/FullChainNewDetModel.xml')
    marl.setOutputFile(slcioFile)
    gearFile = '/afs/cern.ch/user/a/amaier/projects/CLIC_analysis/grid/marlin/clic_ild_cdr.gear'
    if not os.path.isfile(gearFile):
        print('Error: gear file', gearFile,
              'does not exist! Abort submission.')
        return
    marl.setGearFile(gearFile)
    marl.setExtraCLIArguments(
        "--MyNtupleMaker.OutputFileName={rootOutfile}".format(
            rootOutfile=rootFile))
    # marl.setNumberOfEvents(1000)

    job.append(marl)
    if dontPromptMe:
        job.dontPromptMe()
    job.submit(dIlc)

    return False
def getJob(jobid, jobpara):
  iser=jobid+1

  outdst = "toto-ovl-%5.5i.dst.slcio"%iser
  outrec = "toto-ovl-%5.5i.rec.slcio"%iser

###In case one wants a loop: comment the folowing.
#for i in range(2):
  j = UserJob()
  j.setJobGroup("Tutorial")
  j.setName("MarlinOverlayParametric%i"%iser)
  j.setInputSandbox(jobpara["setting_file"])

## Define the overlay
  ov = OverlayInput()
  ov.setMachine("ilc_dbd")
  ov.setEnergy(energy)
  ov.setNumberOfSignalEventsPerJob(int(jobpara["n_events_per_job"]))
  ov.setBXOverlay(int(jobpara["BXOverlay"]))
  ov.setGGToHadInt(float(jobpara["GGToHadInt500"]))
  ov.setBkgEvtType("aa_lowpt")
# ov.setBackgroundType("aa_lowpt")
  ov.setDetectorModel("ILD_o1_v05")
  res = j.append(ov)
  if not res['OK']:
    print res['Message']
    exit(1)

## Define Marlin job
  ma = Marlin()
  ma.setDebug()
  ma.setVersion("ILCSoft-01-17-09")
  ma.setSteeringFile("marlin_ovl_stdreco.xml")
  ma.setGearFile("GearOutput.xml")
#   ma.setInputFile(simfile)
  ma.setInputFile(simlists[jobid])
  ma.setOutputDstFile(outdst)
  ma.setOutputRecFile(outrec)
  res = j.append(ma)
  if not res['OK']:
    print res['Message']
    exit(1)
  
  j.setOutputData([outdst,outrec],"myprod2/test","PNNL-SRM")
  j.setOutputSandbox(["*.log","*.xml","*.sh","TaggingEfficiency.root","PfoAnalysis.root"])
  j.setCPUTime(10000)
  j.dontPromptMe()
  return j
示例#26
0
from DIRAC.Core.Base import Script
Script.parseCommandLine()

from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
from ILCDIRAC.Interfaces.API.NewInterface.Applications import Whizard, Mokka, Marlin, OverlayInput
from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob

from DIRAC import exit as dexit

dirac =DiracILC()

#wh.setOutputFile("myfile.stdhep")

j = UserJob()

wh = Whizard(processlist=dirac.getProcessList())
wh.setEnergy(3000)
wh.setEvtType("ee_h_mumu")
wh.setNbEvts(1)
wh.setEnergy(3000)
params = {}
params['USERB1']='F'
wh.setParameterDict(params)
wh.setModel("sm")
res = j.append(wh)
if not res['OK']:
    print res['Message']
    dexit(1)


mo = Mokka()
def main():
	# Take the input arguments from the argument parser, and check they exist...
	args = parse_args()
	if not args:
		print 'Invalid Arguments'
		sys.exit(1)

#### Software Versions ####
	softVersions = ["v3r0p3", "HEAD", "ILC_DBD", "0116"] # Working (recommended)
	# softVersions = ["v3r0p3", "2.5", "ILC_DBD", "0116"] # Working 
	# softVersions = ["v3r0p3", "HEAD", "ILC_DBD", "ILCSoft-01-17-07"] # Working

	# Check the --runs and --split arguments to make sure they are compatible, if not exit... 
	if not check_events_arguments(args.events, args.split):
		sys.exit(1)

	# Check the input LFN given by user, it needs to have .stdhep extension and should not have LFN: at the beginning...
	lfn_check, lfn = check_input_LFN(args.stdhepInput)
	if not lfn_check:
		sys.exit(1)

	# Call when you begin ILC-DIRAC jobs, the true indicates a repository file is included...
	dirac = DiracILC(True, setup_repository_name(args.stdhepInput, args.detector))

	# Prepares the input and output sandboxes, if -f, then adds the files required for flavortagging,
	# into the input sandbox
	inputSandbox, outputSandbox = setup_sandboxes(args.macFile, args.flavortag)

	# Prepares values for the job loop...
	if args.split < 0:
		nInputEvents = int(args.events)
		nOutputEvents = int(args.events)
	if args.split > 0:
		nInputEvents = int(args.events)
		nOutputEvents = int(args.split)

	# Loop that runs through the required number of jobs to be executed...
	for startEvent in range(0, nInputEvents, nOutputEvents):

################## Job Initialise ########################################		
		job = UserJob()
		job.setName(path.basename(args.stdhepInput))
		job.setJobGroup('JobGroup')
		job.setInputSandbox(inputSandbox)
		fileNumber = startEvent/nOutputEvents
		print "Job ", fileNumber

		outputFiles = setup_output_dict(args.stdhepInput, args.detector, fileNumber, args.outputPath, softVersions)
		slicOutput=outputFiles['slicOutput']
		prePandoraOutput=outputFiles['prePandoraOutput']
		pandoraOutput=outputFiles['pandoraOutput']
		vertexingOutput=outputFiles['vertexingOutput']
		lcsimRecOutput=outputFiles['lcsimRecOutput']
		lcsimDstOutput=outputFiles['lcsimDstOutput']
		flavortagOutput=outputFiles['flavortagOutput']
		diracOutput=outputFiles['diracOutput']

################## SLIC ##################################################
		slic = SLIC()
		slic.setVersion(softVersions[0])
		slic.setSteeringFile(args.macFile)
		# slic.setInputFile(lfn)
		slic.setOutputFile(slicOutput)
		slic.setDetectorModel(args.detector)
		slic.setNumberOfEvents(nOutputEvents)
		slic.setStartFrom(startEvent)
		#print slic.listAttributes()
		result = job.append(slic)
		if not result['OK']:
			print result['Message']
			sys.exit(2)

################## lcsim (digitization and tracking) #####################
		lcsim = LCSIM()
		lcsim.setVersion(softVersions[1])
		lcsim.setSteeringFile('steeringFiles/sid_dbd_prePandora_noOverlay_v22.xml') # Another version is included in /steeringFiles
		lcsim.getInputFromApp(slic)
		lcsim.setTrackingStrategy('steeringFiles/sidloi3_trackingStrategies_default.xml')
		# lcsim.setAliasProperties('alias.properties')
		lcsim.setDetectorModel('geometryFiles/sidloi3.zip')
		lcsim.setOutputFile(prePandoraOutput)
		lcsim.setNumberOfEvents(nOutputEvents)
		#print lcsim.listAttributes()
		result = job.append(lcsim)
		if not result['OK']:
			print result['Message']
			sys.exit(2)

################## slicPandora ###########################################
		slicPandora = SLICPandora()
		slicPandora.setVersion(softVersions[2])
		slicPandora.setDetectorModel(args.detector)
		slicPandora.getInputFromApp(lcsim)
		slicPandora.setOutputFile(pandoraOutput)
		slicPandora.setPandoraSettings('pandoraSettings.xml')
		slicPandora.setNumberOfEvents(nOutputEvents)
		#print slicPandora.listAttributes()
		result = job.append(slicPandora)
		if not result['OK']:
			print result['Message']
			sys.exit(2)

################## Marlin, LCFIPlus Vertexing ############################
		vertexing = Marlin()
		vertexing.setVersion(softVersions[3])
		vertexing.setSteeringFile('steeringFiles/sid_dbd_vertexing.xml')
		vertexing.setGearFile('steeringFiles/sidloi3.gear')
		vertexing.getInputFromApp(slicPandora)
		vertexing.setOutputFile(vertexingOutput)
		vertexing.setNumberOfEvents(nOutputEvents)
		#print vertexing.listAttributes()
		result = job.append(vertexing)
		if not result['OK']:
			print result['Message']
			sys.exit(2)

################## lcsim (DST production) ################################
		lcsimDst = LCSIM()
		lcsimDst.setVersion(softVersions[1])
		lcsimDst.setSteeringFile('steeringFiles/sid_dbd_postPandora.xml')
		lcsimDst.getInputFromApp(vertexing)
		lcsimDst.setNumberOfEvents(nOutputEvents)
		# lcsimDst.setAliasProperties('alias.properties')
		lcsimDst.setDetectorModel('geometryFiles/sidloi3.zip')
		lcsimDst.setOutputRecFile(lcsimRecOutput)
		lcsimDst.setOutputDstFile(lcsimDstOutput)
		#print lcsimDst.listAttributes()
		result = job.append(lcsimDst)
		if not result['OK']:
			print result['Message']
			sys.exit(2)

################## Marlin, LCFIPlus flavortag ############################
		if args.flavortag:
			flavortag = Marlin()
			flavortag.setVersion(softVersions[3])
			flavortag.setSteeringFile('steeringFiles/sid_dbd_flavortag.xml')
			flavortag.setGearFile('steeringFiles/sidloi3.gear')
			flavortag.setInputFile(lcsimDstOutput)
			flavortag.setOutputFile(flavortagOutput)
			flavortag.setNumberOfEvents(nOutputEvents)
			#print flavortag.listAttributes()
			result = job.append(flavortag)
			if not result['OK']:
				print result['Message']
				sys.exit(2)

################## Job Finalise ##########################################

		# List of banned sites that the job shall not be sent too. These are sites that jobs tend to fail on,
		# This list is likely to change.
		job.setBannedSites(['LCG.IN2P3-CC.fr', 'LCG.RAL-LCG2.uk', 'LCG.DESY-HH.de', 'LCG.DESYZN.de', 'LCG.KEK.jp',
							'OSG.PNNL.us',])

		job.setCPUTime(50000)
		job.setPlatform('x86_64-slc5-gcc43-opt')

		# Sets the output data file according to if -f is selcted, ships ouput to your /ilc/user/a/aPerson/
		# directory on the grid.
		if args.flavortag:
			job.setOutputData(flavortagOutput, diracOutput, args.SE)

		else: 
			job.setOutputData(lcsimDstOutput, diracOutput, args.SE)

		job.setOutputSandbox(outputSandbox)
		job.setInputData(lfn)

		if args.dontPromptMe:
			job.dontPromptMe()
		# Submits Job!!!
		job.submit()

	return 0;
示例#28
0
from DIRAC.Core.Base import Script

Script.parseCommandLine()

from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
from ILCDIRAC.Interfaces.API.NewInterface.Applications import Whizard, Mokka, Marlin, OverlayInput
from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob

from DIRAC import exit as dexit

dirac = DiracILC()

# wh.setOutputFile("myfile.stdhep")

j = UserJob()

wh = Whizard(processlist=dirac.getProcessList())
wh.setEnergy(3000)
wh.setEvtType("ee_h_mumu")
wh.setNbEvts(1)
wh.setEnergy(3000)
params = {}
params["USERB1"] = "F"
wh.setParameterDict(params)
wh.setModel("sm")
res = j.append(wh)
if not res["OK"]:
    print res["Message"]
    dexit(1)

示例#29
0
import pprint

from DIRAC.Core.Base import Script
Script.parseCommandLine()
from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
from ILCDIRAC.Interfaces.API.NewInterface.Applications import GenericApplication
from ILCDIRAC.Interfaces.API.DiracILC import DiracILC

datadir = "/ilc/prod/ilc/mc-opt-3/ild/dst-merged/500-TDR_ws/higgs_ffh/ILD_l5_o1_v02/v02-00-01/"
filepref = "rv02-00-01.sv02-00-01.mILD_l5_o1_v02.E500-TDR_ws.I106523.Pnnh.eL.pR.n001.d_dstm_10763_"
indata = [datadir + filepref + "0.slcio", datadir + filepref + "1.slcio"]

d = DiracILC(True, "repo.rep")

################################################
j = UserJob()
j.setJobGroup("Tutorial")
j.setName("GenericExec")
j.setInputSandbox(["mypre.sh", "myanal.sh"])
j.setInputData(indata)
j._setSoftwareTags(["marlin.ILCSoft-02-00-02_gcc49"])
# j._setSoftwareTags(["lcio.ILCSoft-02-00-01_gcc49"])
j.setILDConfig("v02-00-02")

################################################
appre = GenericApplication()
appre.setScript("mypre.sh")
appre.setArguments("This is input arguments")
res = j.append(appre)
if not res['OK']:
    print res['Message']
示例#30
0
def subDDSim(clip1):

    # Decide parameters for a job
    outputSE = "KEK-SRM"

    isLocal = clip1.isLocal
    nbevts = 0 if clip1.numberOfEvents == 0 else clip1.numberOfEvents
    #print('inside subddsim(): nbevts ', nbevts)
    outputFile = "" if clip1.outputFile == "" else clip1.outputFile
    #print('inside subddsim outfile ', outputFile)
    outputDir = clip1.outputDir
    #print('inside subddsim outdir ', outputDir)
    inputFile = clip1.inputFile
    #print('inside subddsim inputFile ', inputFile)
    if inputFile == "":
        gLogger.error("Input file for ddsim is not given.")
        exit(-1)

    # Create DIRAC objects for job submission

    dIlc = DiracILC()

    job = UserJob()
    job.setJobGroup("myddsimjob")
    job.setName("myddsim")
    job.setOutputSandbox(['*.log', '*.sh', '*.py', '*.xml'])
    job.setILDConfig("v02-00-02")

    # job.setInputSandbox(["a6-parameters.sin", "P2f_qqbar.sin"])
    # job.setDestination(["LCG.KEK.jp", "LCG.DESY-HH.de"])  # job submission destination
    job.setBannedSites(["LCG.UKI-SOUTHGRID-RALPP.uk"
                        ])  # a list of sites not to submit job
    # job.setCPUTime( cputime_limit_in_seconds_by_dirac_units )

    ddsim = DDSim()
    ddsim.setVersion("ILCSoft-02-00-02_gcc49")
    ddsim.setDetectorModel("ILD_l5_v05")
    ddsim.setInputFile(inputFile)
    ddsim.setNumberOfEvents(nbevts)
    extraCLIArguments = " --steeringFile ddsim_steer_July26.py"
    extraCLIArguments += " --outputFile %s " % outputFile
    extraCLIArguments += " --vertexSigma 0.0 0.0 0.1968 0.0 --vertexOffset 0.0 0.0 0.0 0.0 "
    ddsim.setExtraCLIArguments(extraCLIArguments)

    return ddsim
示例#31
0
if ixjob < 10:
   indx = "00" + str(ixjob)
elif (ixjob>9 and ixjob < 100)  :
   indx = "0" +str(ixjob)
else  :
   indx = str(ixjob)

jobname = "s" + idin + "_" + indx
lcinputSIM  = lcinputpath + genfile
lcoutputSIM  = simOutfile + "_%s.SIM.slcio"%(indx)
print lcinputSIM
print jobname

SIMoutput.append(lcoutputSIM)

job = UserJob()
job.setName(jobname)
job.setJobGroup(jobGrName)
job.setILDConfig(ILDConfigVer)
job.setCPUTime(86400)
job.setInputSandbox(["runSimSplit_any_Tmp.py"])
job.setOutputSandbox(["*.log","*.sh","*.py "])
#job.setOutputData(lcoutputSIM,OutputPath="MyTest/sim1",OutputSE="IN2P3-SRM")
job.setOutputData( SIMoutput,"MyProd_" + ILDConfigVer + "/E250-TDR_ws/" + chann + "/" +ireq+ "/sim","IN2P3-SRM")
#job.setDestinationCE('lyogrid07.in2p3.fr')

job.dontPromptMe()
job.setBannedSites(['LCG.Tau.il'])
#job.setBannedSites(['LCG.IN2P3-CC.fr','LCG.DESYZN.de','LCG.DESY-HH.de','LCG.KEK.jp','OSG.UConn.us','LCG.Cracow.pl','OSG.MIT.us','LCG.Glasgow.uk','OSG.CIT.us','OSG.BNL.us','LCG.Brunel.uk','LCG.RAL-LCG2.uk','LCG.Oxford.uk','OSG.UCSDT2.us'])

示例#32
0
    nameSteeringMarlin = templateSteeringMarlin.replace('_template.xml',templateSuffix)

    #####################################################################      

    # #####################################################################      
    #prepare file for running marlin                                                                                     
    with open(templateSteeringMarlin) as f:
         open(nameSteeringMarlin,"w").write(f.read().replace(templateOutRoot,rootFile))

    # #####################################################################      


    #####################################################################      
    #job definition   
              
    job = UserJob() #use UserJob unless recommended differently      
    job.setName(nameJob)
    job.setJobGroup(nameJobGroup)
    job.setCPUTime(86400)
    job.setBannedSites(['LCG.UKI-LT2-IC-HEP.uk','LCG.KEK.jp','LCG.IN2P3-CC.fr','LCG.Tau.il','Weizmann.il','LCG.Weizmann.il','OSG.MIT.us','OSG.FNAL_FERMIGRID.us','OSG.GridUNESP_CENTRAL.br','OSG.SPRACE.br'])
    job.setInputSandbox([nameSteeringMarlin,'LFN:/ilc/user/o/oviazlo/FCCee_o5/ilcsoft_2017-06-21/lcgeo_28_06_2017_v3.tgz',detectorModel,'LFN:/ilc/user/o/oviazlo/PandoraSettings.tar.gz','LFN:/ilc/user/o/oviazlo/FCCee_o5/marlin_lib_simHits_v3.tgz'])
    job.setOutputSandbox(["*.log"])  #files that should be brought back when retrieving the job outputs 
    job.setOutputData([rootFile],nameDir,"CERN-DST-EOS")   
    
    #####################################################################    


    #####################################################################  
    #ddsim

    ddsim = DDSim()
示例#33
0
if localjob :
  genfile="/gpfs/home/ilc/miyamoto/ILDProd/examples/ddsim-v01-17-10/E0500-TDR_ws.Pea_lvv.Gwhizard-1.95.eL.pB.I37494.01_000.stdhep"

else:
  genfile="LFN:/ilc/prod/ilc/ild/test/temp1/gensplit/500-TDR_ws/3f/run002/E0500-TDR_ws.Pea_lvv.Gwhizard-1.95.eL.pB.I37494.01_000.stdhep"

now = datetime.now()
simfile="ddsim-data-%s.slcio" % now.strftime("%Y%m%d-%H%M%S")
steeringfile="/cvmfs/ilc.desy.de/sw/ILDConfig/v01-17-10-p01/StandardConfig/lcgeo_current/ddsim_steer.py"

d= DiracILC(True,"repo.rep")

###In case one wants a loop: comment the folowing.
#for i in range(2):
j = UserJob()
j.setJobGroup("Tutorial")
j.setName("DDSim-example")  #%i)
# j.setInputSandbox(setting_file)

ddsim = DDSim()
ddsim.setVersion("ILCSoft-01-17-10")
ddsim.setDetectorModel("ILD_o1_v05")
ddsim.setInputFile(genfile)
ddsim.setRandomSeed(12345)
# ddsim.setStartFrom(1)
ddsim.setNumberOfEvents(5)  # Number of events should not exceed number of events in file.
                            # Otherwise, G4exception is thrown
# ddsim.setDebug()
ddsim.setSteeringFile(steeringfile)
ddsim.setOutputFile(simfile)
 def execute(self):
   """  First we update the site list and banned site
   """
   res = getProxyInfo(False, False)
   if not res['OK']:
     self.log.error("submitTasks: Failed to determine credentials for submission", res['Message'])
     return res
   proxyInfo = res['Value']
   owner = proxyInfo['username']
   ownerGroup = proxyInfo['group']
   self.log.info("submitTasks: Jobs will be submitted with the credentials %s:%s" % (owner, ownerGroup))    
   
   sites = self.diracadmin.getSiteMask()['Value']
   for site in sites:
     res = self.ppc.changeSiteStatus( {'SiteName' : site, 'Status' : 'OK'} )
     if not res['OK']:
       self.log.error('Cannot add or update site %s' % site)
       
   banned_sites = self.diracadmin.getBannedSites()['Value']
   for banned_site in banned_sites:
     self.ppc.changeSiteStatus( {'SiteName' : banned_site, 'Status' : 'Banned'} )
     if not res['OK']:
       self.log.error('Cannot mark as banned site %s' % banned_site)
       
   ##Then we need to get new installation tasks
   res = self.ppc.getInstallSoftwareTask()
   if not res['OK']:
     self.log.error('Failed to obtain task')
   task_dict = res['Value']
   for softdict in task_dict.values():
     self.log.info('Will install %s %s at %s' % (softdict['AppName'], softdict['AppVersion'], softdict['Sites']))
     for site in softdict['Sites']:
       j = UserJob()
       j.setPlatform(softdict['Platform'])
       j.dontPromptMe()
       j.setDestination(site)
       j.setJobGroup("Installation")
       j.setName('install_%s' % site)
       j._addSoftware(softdict['AppName'], softdict['AppVersion'])
       #Add the application here somehow.
       res  = j.append(SoftwareInstall())
       if not res['OK']:
         self.log.error(res['Message'])
         continue
       res = j.submit(self.dirac)
       #res = self.dirac.submit(j)
       if not res['OK']:
         self.log.error('Could not create the job')
         continue
       jobdict = {}
       jobdict['AppName'] = softdict['AppName']
       jobdict['AppVersion'] = softdict['AppVersion']
       jobdict['Platform'] = softdict['Platform']
       jobdict['JobID'] = res['Value']
       jobdict['Status'] = 'Waiting'
       jobdict['Site'] = site
       res = self.ppc.addOrUpdateJob(jobdict)
       if not res['OK']:
         self.log.error('Could not add job %s: %s' % (jobdict['JobID'], res['Message']))
   
   ##Monitor jobs
   jobs = {}
   res = self.ppc.getJobs()
   if not res['OK']:
     self.log.error('Could not retrieve jobs')
   else:
     jobs = res['Value']
     for job in jobs:
       res = self.dirac.status(job['JobID'])
       if res['OK']:
         jobstatuses = res['Value'] 
         job['Status'] = jobstatuses['JobID']['Status']
         res = self.ppc.addOrUpdateJob(job)
         if not res['OK']:
           self.log.error("Failed to updated job %s: %s" % (job['JobID'], res['Message']))
       else:
         self.log.error("Failed to update job %s status" % job['JobID'])
         
   return S_OK()
示例#35
0

ovi = OverlayInput() 
ovi.setEnergy(500.)
ovi.setBXOverlay(300)
ovi.setGGToHadInt(0.3)
ovi.setNbSigEvtsPerJob(10)
ovi.setBkgEvtType("gghad")
ovi.setDetectorModel("CLIC_ILD_CDR")

overlay = [True,False]

for ov in overlay:
  d = DiracILC(True,"repo_overlay_%s.rep"%ov)
  for lfn in lfns:
    j = UserJob()
    steeringf = "clic_ild_cdr_steering.xml"
    if ov:
      steeringf = "clic_ild_cdr_steering_overlay.xml"
      res = j.append(ovi)
      if not res['OK']:
        print(res['Message'])
        continue
    ma = Marlin() 
    ma.setVersion("v0111Prod")
    ma.setGearFile("clic_ild_cdr.gear")
    ma.setSteeringFile(steeringf)
    ma.setInputFile("LFN:"+lfn)
    ma.setNbEvts(10)
    ma.setEnergy(500.)
    ma.setOutputRecFile("myrec_overlay_%s.slcio"%ov)
示例#36
0
 def getJob(self):
   """ Define a generic job, it should be always the same
   """
   from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
   myjob = UserJob()
   myjob.setName("Testing")
   myjob.setJobGroup("Tests")
   myjob.setCPUTime(30000)
   myjob.dontPromptMe()
   myjob.setLogLevel("VERBOSE")
   myjob.setPlatform("x86_64-slc5-gcc43-opt")
   myjob.setOutputSandbox(["*.log","*.xml", "*.sh"])
   myjob._addParameter( myjob.workflow, 'TestFailover', 'String', True, 'Test failoverRequest')
   myjob._addParameter( myjob.workflow, 'Platform', 'JDL', "x86_64-slc5-gcc43-opt", 'OS Platform')
   if self.ildConfig:
     myjob.setILDConfig(self.ildConfig)
   return myjob
示例#37
0
from DIRAC.Core.Base import Script
Script.parseCommandLine()
from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
from ILCDIRAC.Interfaces.API.NewInterface.Applications import Marlin
from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
d= DiracILC(True,"repo.rep")

###In case one wants a loop: comment the folowing.
#for i in range(2):
j = UserJob()
j.setJobGroup("Tutorial")
j.setName("example")#%i)

ma = Marlin()
ma.setVersion("v0111Prod")
ma.setSteeringFile("clic_ild_cdr_steering.xml")
ma.setGearFile("clic_ild_cdr.gear")
ma.setInputFile("LFN:/ilc/prod/clic/3tev/gghad/ILD/SIM/00000187/000/gghad_sim_187_97.slcio")
outdst = "toto.dst.slcio" #% i
outrec = "toto.rec.slcio" #% i
ma.setOutputDstFile(outdst)
ma.setOutputRecFile(outrec)

res = j.append(ma)
if not res['OK']:
    print res['Message']
    exit(1)
  
j.setOutputData([outdst,outrec],"some/path","KEK-SRM")
j.setOutputSandbox("*.log")
j.dontPromptMe()
示例#38
0

basedir="/cvmfs/ilc.desy.de/sw/ILDConfig/v01-16-p05_500/StandardConfig/current/"
pandoraLikelihoodData=basedir+"PandoraLikelihoodData9EBin.xml"
bg_aver=basedir+"bg_aver.sv01-14-01-p00.mILD_o1_v05.E500-TDR_ws.PBeamstr-pairs.I230000.root"

simfile="LFN:/ilc/prod/ilc/mc-dbd/ild/sim/500-TDR_ws/6f_ttbar/ILD_o1_v05/v01-14-01-p00/sv01-14-01-p00.mILD_o1_v05.E500-TDR_ws.I37623.P6f_bbcyyc.eR.pL-00001.slcio"
gearfile=basedir+"GearOutput.xml"
outdst = "toto.dst.slcio" #% i
outrec = "toto.rec.slcio" #% i

d= DiracILC(True,"repo.rep")

###In case one wants a loop: comment the folowing.
#for i in range(2):
j = UserJob()
j.setJobGroup("Tutorial")
j.setName("MarlinExample")#%i)



ma = Marlin()
ma.setDebug()
# ma.setLogLevel("verbose")
# ma.setILDConfig("v01-16-p05_500") 

ma.setVersion("v01-16-02")
ma.setSteeringFile("marlin_stdreco.xml")
ma.setGearFile(gearfile)
ma.setInputFile([simfile, pandoraLikelihoodData, bg_aver])
ma.setOutputDstFile(outdst)
示例#39
0
from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
from ILCDIRAC.Interfaces.API.NewInterface.Applications import Mokka, Marlin, OverlayInput
from ILCDIRAC.Interfaces.API.DiracILC import DiracILC

from DIRAC import exit as dexit

d = DiracILC(True,"repo.rep")


n_evts = 500
n_evts_per_job= 100
n_jobs = n_evts/n_evts_per_job

for i in range(n_jobs):
  j = UserJob()

  mo = Mokka()
  mo.setEnergy(3000)
  mo.setVersion("0706P08")
  mo.setSteeringFile("clic_ild_cdr.steer")
  mo.setMacFile("particlegun_electron.mac")
  mo.setOutputFile("MyFile.slcio")
  mo.setNbEvts(n_evts_per_job)
  res = j.append(mo)
  if not res['OK']:
    print res['Message']
    break
  ma = Marlin()
  ma.setVersion("v0111Prod")
  ma.setSteeringFile("clic_ild_cdr_steering.xml")
示例#40
0
def subOverlay():

    # Decide parameters for a job
    outputSE = "KEK-SRM"

    isLocal = _clip.isLocal
    nbevts = 50 if _clip.numberOfEvents == 0 else _clip.numberOfEvents
    nbevts = 0  # To analize all input events
    outputFilePrefix = "overlay_example" if _clip.outputFilePrefix == "" else _clip.outputFilePrefix
    outputDir = _clip.outputDir
    inputFile = _clip.inputFile
    if inputFile == "":
        gLogger.error("Input file for ddsim does not given.")
        exit(-1)

    recfile = outputFilePrefix + ".rec.slcio"
    dstfile = outputFilePrefix + ".dst.slcio"
    detector_model = "ILD_l5_o1_v02"
    key = detector_model.split('_')
    sim_detectorModel = "_".join([key[0], key[1], key[3]])

    # Create DIRAC objects for job submission

    dIlc = DiracILC()

    job = UserJob()
    job.setJobGroup("myoverlayjob")
    job.setName("myoverlay")
    job.setOutputSandbox(['*.log', '*.sh', '*.py', '*.xml'])
    job.setILDConfig("v02-00-02")

    # job.setInputSandbox(["a6-parameters.sin", "P2f_qqbar.sin"])
    # job.setDestination(["LCG.KEK.jp", "LCG.DESY-HH.de"])  # job submission destination
    # job.setBannedSites([])         # a list of sites not to submit job
    # job.setCPUTime( cputime_limit_in_seconds_by_dirac_units )

    # Create Overlay application
    ovldata = [{
        "ProcessorName": "BgOverlayWW",
        "evttype": "aa_lowpt_WW",
        "ProdID": 10237,
        "expBG": 0.211,
        "subdir": "000"
    }, {
        "ProcessorName": "BgOverlayWB",
        "evttype": "aa_lowpt_WB",
        "ProdID": 10241,
        "expBG": 0.24605,
        "subdir": "000"
    }, {
        "ProcessorName": "BgOverlayBW",
        "evttype": "aa_lowpt_BW",
        "ProdID": 10239,
        "expBG": 0.243873,
        "subdir": "000"
    }, {
        "ProcessorName": "BgOverlayBB",
        "evttype": "aa_lowpt_BB",
        "ProdID": 10235,
        "expBG": 0.35063,
        "subdir": "000"
    }, {
        "ProcessorName": "PairBgOverlay",
        "evttype": "seeablepairs",
        "ProdID": 10233,
        "expBG": 1.0,
        "subdir": "100"
    }]

    BXOverlay = 1
    NbSigEvtsPerJob = 100
    numberOfSignalEvents = NbSigEvtsPerJob
    basebkgpath = "/ilc/prod/ilc/mc-opt-3/ild/sim/500-TDR_ws"
    energy = "500"

    for ovl in ovldata:
        print "### OverlayInput ... " + ovl["ProcessorName"]
        ovlapp = OverlayInput()
        ovlpath = "%s/%s/%s/v02-00-01/%8.8d/%s" % \
       ( basebkgpath, ovl["evttype"], sim_detectorModel, ovl["ProdID"] , ovl["subdir"] )
        print "    OverlayPath ... " + ovlpath
        ovlapp.setMachine("ilc_dbd")
        # ovlapp.setEnergy(energy)
        # ovlapp.setDetectorModel(sim_detectorModel)
        ovlapp.setProcessorName(ovl["ProcessorName"])
        ovlapp.setBkgEvtType(ovl["evttype"])
        ovlapp.setPathToFiles(ovlpath)
        ovlapp.setGGToHadInt(ovl["expBG"])
        ovlapp.setBXOverlay(BXOverlay)
        ovlapp.setNbSigEvtsPerJob(NbSigEvtsPerJob)
        ovlapp.setNumberOfSignalEventsPerJob(numberOfSignalEvents)
        res = job.append(ovlapp)
        if not res['OK']:
            print res['Message']
            exit(1)

    # Create Marlin application
    marlin = Marlin()
    marlin.setVersion("ILCSoft-02-00-02_gcc49")
    marlin.setDetectorModel(detector_model)
    marlin.setSteeringFile("MarlinStdReco.xml")
    marlin.setInputFile(inputFile)
    marlin.setNumberOfEvents(nbevts)
    marlin.setOutputDstFile(dstfile)
    marlin.setOutputRecFile(recfile)
    extraCLIArguments = " --constant.DetectorModel=%s " % detector_model
    extraCLIArguments += " --constant.RunOverlay=true --constant.CMSEnergy=%s " % str(
        energy)
    extraCLIArguments += " --global.Verbosity=MESSAGE "
    marlin.setExtraCLIArguments(extraCLIArguments)

    job.append(marlin)

    if outputDir != "":
        job.setOutputData([dstfile, recfile],
                          OutputPath=outputDir,
                          OutputSE=outputSE)

    if isLocal:
        job.submit(dIlc, mode="local")
    else:
        job.submit(dIlc)
示例#41
0
def subWhizard2():

    from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
    from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import Marlin, Whizard2

    # Decide parameters for a job
    outputSE = "KEK-SRM"

    isLocal = _clip.isLocal
    nbevts = 50 if _clip.numberOfEvents == 0 else _clip.numberOfEvents
    outputFile = "E500.P2f_bB.GWhizard2.I100000.e0.p0.n001.slcio" if _clip.outputFile == "" else _clip.outputFile
    outputDir = _clip.outputDir

    # Create DIRAC objects for job submission

    dIlc = DiracILC()

    job = UserJob()
    job.setInputSandbox(["pythia6-parameters.sin", "P2f_qqbar.sin"])
    job.setOutputSandbox(['*.log', '*.sh', '*.py', '*.xml'])
    job.setOutputData([outputFile])
    job.setJobGroup("mywhiz2")
    job.setName("mywhizard2")

    whiz = Whizard2()
    whiz.setVersion("2.7.0")
    whiz.setNumberOfEvents(nbevts)
    whiz.setEvtType("P2f_bB")
    whiz.setProcessVariables("P2f_bB")
    # whiz.setRandomSeed(15)
    whiz.setSinFile("P2f_qqbar.sin")
    whiz.setOutputFile(outputFile)
    job.append(whiz)

    if outputDir != "":
        print " outputDir = " + outputDir
        print " outputSE = " + outputSE
        job.setOutputData([outputFile],
                          OutputPath=outputDir,
                          OutputSE=outputSE)

    if isLocal:
        job.submit(dIlc, mode="local")
    else:
        job.submit(dIlc)
示例#42
0
print "found %s files" % len(lfns)

ovi = OverlayInput()
ovi.setEnergy(500.)
ovi.setBXOverlay(300)
ovi.setGGToHadInt(0.3)
ovi.setNbSigEvtsPerJob(10)
ovi.setBkgEvtType("gghad")
ovi.setDetectorModel("CLIC_ILD_CDR")

overlay = [True, False]

for ov in overlay:
    d = DiracILC(True, "repo_overlay_%s.rep" % ov)
    for lfn in lfns:
        j = UserJob()
        steeringf = "clic_ild_cdr_steering.xml"
        if ov:
            steeringf = "clic_ild_cdr_steering_overlay.xml"
            res = j.append(ovi)
            if not res['OK']:
                print res['Message']
                continue
        ma = Marlin()
        ma.setVersion("v0111Prod")
        ma.setGearFile("clic_ild_cdr.gear")
        ma.setSteeringFile(steeringf)
        ma.setInputFile("LFN:" + lfn)
        ma.setNbEvts(10)
        ma.setEnergy(500.)
        ma.setOutputRecFile("myrec_overlay_%s.slcio" % ov)
def main(argv):
    # Input arguments
    ildconfig_version   = "$ILDCONFIGVER"
    ilcsoft_version     = "$ILCSOFTVER"

    evts_per_run    = $EVTSPERRUN
    detector_model  = "$DETECTOR"
    sim_input       = "$SIMINPUT"
    process_name    = "$PROCESS"

    index           = $IND

    sim_input = diracpath_from_pnfspath( sim_input )
    sim_detector_model = detector_model_wo_option( detector_model )

    job_group = ilcsoft_version + "_" + ildconfig_version + "_" + process_name + "_" + detector_model
    dirac = DiracILC(True,job_group+".rep")

    # outputs to be saved onto grid SE
    RECoutput = []

    # DDSim

    evtStart   = (index-1)*evts_per_run
    evtEnd     = index*evts_per_run - 1
    RandSeed = random.randrange(11623, 99999)

    lcinputSIM  = "LFN:" + sim_input
    lcoutputSIM = ilcsoft_version + ".ILDConfig_" + ildconfig_version + ".E1000." + process_name + ".eLpR.evt%s-%s_SIM.slcio"%(str(evtStart),(str)(evtEnd))

    sim = DDSim()
    sim.setVersion(ilcsoft_version)

    sim.setDetectorModel(sim_detector_model)
    sim.setInputFile(lcinputSIM)
    sim.setSteeringFile("ddsim_steer.py")
    sim.setNumberOfEvents(evts_per_run)
    sim.setRandomSeed(RandSeed)
    sim.setEnergy(1000)
    sim.setStartFrom(evtStart)
    sim.setOutputFile(lcoutputSIM)

    # Marlin
    lcoutputDST = ilcsoft_version + ".ILDConfig_" + ildconfig_version + ".E1000." + process_name + ".eLpR.evt%s-%s_DST.slcio"%(str(evtStart),(str)(evtEnd))

    ma = Marlin()
    ma.setVersion(ilcsoft_version)
    ma.setDetectorModel(detector_model)
    ma.setSteeringFile("MarlinStdReco.xml")
    ma.setExtraCLIArguments( "--constant.lcgeo_DIR=$lcgeo_DIR --constant.DetectorModel={} --global.MaxRecordNumber=0".format(detector_model) )
    ma.setLogFile("marlin.log")
    ma.getInputFromApp(sim)
    ma.setEnergy(1000)
    ma.setOutputDstFile(lcoutputDST)

    RECoutput.append(lcoutputDST)

    # ILCDirac user job
    job = UserJob()
    job.setName("user_sim_reco")

    job.setJobGroup(job_group)

    job.setILDConfig(ildconfig_version)
    job.setCPUTime(86400)

    tmp_file_name = process_name + "_sim_reco_job_tmp.py"
    job.setInputSandbox([tmp_file_name])
    job.setOutputSandbox(["*.log","MarlinStdRecoParsed.xml","marlin*.xml","*.py "])

    job.dontPromptMe()
    job.setBannedSites(['LCG.IN2P3-CC.fr','OSG.UConn.us','LCG.Cracow.pl','OSG.MIT.us','LCG.Glasgow.uk','OSG.CIT.us','OSG.BNL.us','LCG.Brunel.uk','LCG.RAL-LCG2.uk','LCG.Oxford.uk','OSG.UCSDT2.us'])

    # run simulation job
    simres = job.append(sim)
    if not simres['OK']:
            print simres['Not ok appending ddsim to job']
            quit()


    # run Malrin reco jobs
    mares = job.append(ma)
    if not mares['OK']:
            print mares['Not ok appending Marlin to job']
            quit()

    job.setOutputData(RECoutput,"ILDPerformance/WWZZSeparation/{}_ILDConfig_{}_{}".format(ilcsoft_version,ildconfig_version,detector_model),"DESY-SRM")
    print RECoutput

    submit_output = job.submit(dirac)
    print submit_output