Exemplo n.º 1
0
def subDDSim():

    # Decide parameters for a job
    outputSE = "KEK-SRM"
    outputSE = "KEK-DISK"

    isLocal = _clip.isLocal
    nbevts = 10 if _clip.numberOfEvents == 0 else _clip.numberOfEvents
    outputFile = "ddsim_example.slcio" if _clip.outputFile == "" else _clip.outputFile
    outputDir = _clip.outputDir
    inputFile = _clip.inputFile
    if inputFile == "":
        gLogger.error("Input file for ddsim does not given.")
        exit(-1)

    # Create DIRAC objects for job submission

    dIlc = DiracILC()

    job = UserJob()
    job.setJobGroup("myddsimjob")
    job.setName("myddsim")
    job.setOutputSandbox(['*.log', '*.sh', '*.py', '*.xml'])
    job.setILDConfig("v02-00-02")

    # job.setInputSandbox(["a6-parameters.sin", "P2f_qqbar.sin"])
    # job.setDestination(["LCG.KEK.jp", "LCG.DESY-HH.de"])  # job submission destination
    job.setBannedSites(["LCG.UKI-SOUTHGRID-RALPP.uk"
                        ])  # a list of sites not to submit job
    # job.setCPUTime( cputime_limit_in_seconds_by_dirac_units )

    ddsim = DDSim()
    ddsim.setVersion("ILCSoft-02-00-02_gcc49")
    ddsim.setDetectorModel("ILD_l5_v02")
    ddsim.setInputFile(inputFile)
    ddsim.setNumberOfEvents(nbevts)
    extraCLIArguments = " --steeringFile ddsim_steer.py "
    extraCLIArguments += " --outputFile %s " % outputFile
    extraCLIArguments += " --vertexSigma 0.0 0.0 0.1968 0.0 --vertexOffset 0.0 0.0 0.0 0.0 "
    ddsim.setExtraCLIArguments(extraCLIArguments)

    # ddsim.setRandomSeed(1234565)
    # ddsim.setStartFrom(20)        # Number of events to skip before starting ddsim

    job.append(ddsim)

    if outputDir != "":
        job.setOutputData([outputFile],
                          OutputPath=outputDir,
                          OutputSE=outputSE)

    if isLocal:
        job.submit(dIlc, mode="local")
    else:
        job.submit(dIlc)
Exemplo n.º 2
0
    def __init__(self, clip, params):
        self.clip = clip
        self.ildConfig = params.get("ildConfig", None)
        self.alwaysOverlay = params.get("alwaysOverlay", False)
        self.runOverlay = self.clip.testOverlay or self.alwaysOverlay
        self.mokkaVersion = params["mokkaVersion"]
        self.mokkaSteeringFile = params.get("mokkaSteeringFile")
        self.detectorModel = params.get("detectorModel")
        self.marlinVersion = params.get("marlinVersion")
        self.marlinSteeringFile = params.get("marlinSteeringFile")
        self.ddsimVersion = params.get("ddsimVersion")
        self.ddsimDetectorModel = params.get("ddsimDetectorModel")
        self.ddsimInputFile = params.get("ddsimInputFile")
        self.marlinInputdata = params.get("marlinInputdata")
        self.gearFile = params.get("gearFile")
        self.lcsimVersion = params.get("lcsimVersion")
        self.steeringFileVersion = params.get("steeringFileVersion", None)
        self.rootVersion = params["rootVersion"]

        self.whizard2Version = params.get("whizard2Version")
        self.whizard2SinFile = params.get("whizard2SinFile")

        self.energy = params.get("energy")
        self.backgroundType = params.get("backgroundType")
        self.machine = params.get("machine")

        self.gearFile = params.get("gearFile")
        self.marlinSteeringFile = params.get("marlinSteeringFile")
        self.marlinVersion = params.get("marlinVersion")

        self.lcsimPreSteeringFile = params.get("lcsimPreSteeringFile")
        self.lcsimPostSteeringFile = params.get("lcsimPostSteeringFile")

        self.fccSwPath = params.get("fccSwPath")
        self.fccSwSteeringFile = params.get("fccSwSteeringFile")

        self.fccAnalysisSteeringFile = params.get("fccAnalysisSteeringFile")

        ### other things needed to run tests
        self.log = gLogger.getSubLogger("JobCreater")

        from ILCDIRAC.Interfaces.API.DiracILC import DiracILC, __RCSID__ as drcsid
        from ILCDIRAC.Interfaces.API.NewInterface.UserJob import __RCSID__ as jrcsid
        from ILCDIRAC.Interfaces.API.NewInterface.Applications import __RCSID__ as apprcsid

        if self.clip.submitMode == "local":
            self.log.notice("")
            self.log.notice("       DIRAC RCSID:", drcsid)
            self.log.notice("         Job RCSID:", jrcsid)
            self.log.notice("Applications RCSID:", apprcsid)
            self.log.notice("")

        self.diracInstance = DiracILC(False, 'tests.rep')
        self.jobList = {}
Exemplo n.º 3
0
def create_job(inputData, saveName, outputDir, dontPromptMe):

    slcioFile = saveName + '.slcio'
    rootFile = saveName + '.root'

    if check_file_existence(outputDir, slcioFile, dontPromptMe):
        remove_file(outputDir, slcioFile, dontPromptMe)
    if check_file_existence(outputDir, rootFile, dontPromptMe):
        remove_file(outputDir, rootFile, dontPromptMe)

    dIlc = DiracILC()

    job = UserJob()
    job.setOutputSandbox(['*.out', '*.log', '*.sh', '*.py', '*.xml'])
    if SAVE_SLCIO:
        job.setOutputData([slcioFile, rootFile],
                          OutputPath=outputDir,
                          OutputSE=STORAGE_SE)
    else:
        job.setOutputData(rootFile, OutputPath=outputDir, OutputSE=STORAGE_SE)
    job.setJobGroup('myMarlinRun1')
    job.setName('MyMarlinJob1')
    # job.setBannedSites(['LCG.IN2P3-CC.fr','OSG.UConn.us','LCG.Cracow.pl','OSG.MIT.us','LCG.Glasgow.uk','OSG.CIT.us','OSG.BNL.us','LCG.Brunel.uk'])
    job.setInputSandbox(['LFN:/ilc/user/a/amaier/mylibs.tar.gz'])

    marl = Marlin()
    marl.setVersion('ILCSoft-2017-11-15_gcc62')

    marl.setInputFile(inputData)
    job.setInputData(list(map(lambda x: x.lstrip('LFN:'), inputData)))
    # marl.setInputFile(['LFN:/ilc/prod/clic/1.4tev/qq_ln/ILD/DST/00003249/010/qq_ln_dst_3249_10000.slcio'])
    marl.setSteeringFile('marlin/FullChain.xml')
    # marl.setSteeringFile('marlin/FullChainNewDetModel.xml')
    marl.setOutputFile(slcioFile)
    gearFile = '/afs/cern.ch/user/a/amaier/projects/CLIC_analysis/grid/marlin/clic_ild_cdr.gear'
    if not os.path.isfile(gearFile):
        print('Error: gear file', gearFile,
              'does not exist! Abort submission.')
        return
    marl.setGearFile(gearFile)
    marl.setExtraCLIArguments(
        "--MyNtupleMaker.OutputFileName={rootOutfile}".format(
            rootOutfile=rootFile))
    # marl.setNumberOfEvents(1000)

    job.append(marl)
    if dontPromptMe:
        job.dontPromptMe()
    job.submit(dIlc)

    return False
Exemplo n.º 4
0
    def submit(self, diracinstance=None, mode="wms"):
        """ Submit call: when your job is defined, and all applications are set, you need to call this to
    add the job to DIRAC.

    :param diracinstance: DiracILC instance
    :type diracinstance: ~ILCDIRAC.Interfaces.API.DiracILC.DiracILC
    :param str mode: "wms" (default), "agent", or "local"

    .. note ::
      The *local* mode means that the job will be run on the submission machine. Use this mode for testing of submission scripts

    """
        if self.splittingOption:
            result = self._split()
            if 'OK' in result and not result['OK']:
                return result

        #Check the credentials. If no proxy or not user proxy, return an error
        if not self.proxyinfo['OK']:
            self.log.error(
                "Not allowed to submit a job, you need a %s proxy." %
                self.usergroup)
            return self._reportError(
                "Not allowed to submit a job, you need a %s proxy." %
                self.usergroup, self.__class__.__name__)
        if 'group' in self.proxyinfo['Value']:
            group = self.proxyinfo['Value']['group']
            if group not in self.usergroup:
                self.log.error(
                    "Not allowed to submit a job, you need a %s proxy." %
                    self.usergroup)
                return self._reportError(
                    "Not allowed to submit job, you need a %s proxy." %
                    self.usergroup, self.__class__.__name__)
        else:
            self.log.error(
                "Could not determine group, you do not have the right proxy.")
            return self._reportError(
                "Could not determine group, you do not have the right proxy.")

        res = self._addToWorkflow()
        if not res['OK']:
            return res
        self.oktosubmit = True
        if not diracinstance:
            self.diracinstance = DiracILC()
        else:
            self.diracinstance = diracinstance
        return self.diracinstance.submit(self, mode)
Exemplo n.º 5
0
def testAndProbeSites():
    """submits jobs to test sites"""
    clip = Params()
    clip.registerSwitches()
    Script.parseCommandLine()

    from DIRAC import gLogger, exit as dexit

    from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import CheckWNs
    from ILCDIRAC.Interfaces.API.DiracILC import DiracILC

    from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getQueues

    res = getQueues(siteList=clip.site, ceList=clip.ce)
    if not res['OK']:
        gLogger.error("Failed getting the queues", res['Message'])
        dexit(1)

    sitedict = res['Value']
    CEs = []

    for ces in sitedict.values():
        CEs.extend(ces.keys())

    gLogger.notice("Found %s CEs to look at." % len(CEs))

    d = DiracILC(True, "SiteProbe.rep")

    for CE in CEs:
        j = UserJob()
        j.setDestinationCE(CE)
        c = CheckWNs()
        res = j.append(c)
        if not res['OK']:
            gLogger.error(res['Message'])
            continue
        j.setOutputSandbox("*.log")
        j.setCPUTime(30000)
        j.dontPromptMe()
        res = j.submit(d)
        if not res['OK']:
            gLogger.error("Failed to submit job, aborting")
            dexit(1)

    dexit(0)
Exemplo n.º 6
0
def subWhizard2():

    from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
    from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import Marlin, Whizard2

    # Decide parameters for a job
    outputSE = "KEK-SRM"

    isLocal = _clip.isLocal
    nbevts = 50 if _clip.numberOfEvents == 0 else _clip.numberOfEvents
    outputFile = "E500.P2f_bB.GWhizard2.I100000.e0.p0.n001.slcio" if _clip.outputFile == "" else _clip.outputFile
    outputDir = _clip.outputDir

    # Create DIRAC objects for job submission

    dIlc = DiracILC()

    job = UserJob()
    job.setInputSandbox(["pythia6-parameters.sin", "P2f_qqbar.sin"])
    job.setOutputSandbox(['*.log', '*.sh', '*.py', '*.xml'])
    job.setOutputData([outputFile])
    job.setJobGroup("mywhiz2")
    job.setName("mywhizard2")

    whiz = Whizard2()
    whiz.setVersion("2.7.0")
    whiz.setNumberOfEvents(nbevts)
    whiz.setEvtType("P2f_bB")
    whiz.setProcessVariables("P2f_bB")
    # whiz.setRandomSeed(15)
    whiz.setSinFile("P2f_qqbar.sin")
    whiz.setOutputFile(outputFile)
    job.append(whiz)

    if outputDir != "":
        print " outputDir = " + outputDir
        print " outputSE = " + outputSE
        job.setOutputData([outputFile],
                          OutputPath=outputDir,
                          OutputSE=outputSE)

    if isLocal:
        job.submit(dIlc, mode="local")
    else:
        job.submit(dIlc)
Exemplo n.º 7
0
def subDDSim(clip1):

    # Decide parameters for a job
    outputSE = "KEK-SRM"

    isLocal = clip1.isLocal
    nbevts = 0 if clip1.numberOfEvents == 0 else clip1.numberOfEvents
    #print('inside subddsim(): nbevts ', nbevts)
    outputFile = "" if clip1.outputFile == "" else clip1.outputFile
    #print('inside subddsim outfile ', outputFile)
    outputDir = clip1.outputDir
    #print('inside subddsim outdir ', outputDir)
    inputFile = clip1.inputFile
    #print('inside subddsim inputFile ', inputFile)
    if inputFile == "":
        gLogger.error("Input file for ddsim is not given.")
        exit(-1)

    # Create DIRAC objects for job submission

    dIlc = DiracILC()

    job = UserJob()
    job.setJobGroup("myddsimjob")
    job.setName("myddsim")
    job.setOutputSandbox(['*.log', '*.sh', '*.py', '*.xml'])
    job.setILDConfig("v02-00-02")

    # job.setInputSandbox(["a6-parameters.sin", "P2f_qqbar.sin"])
    # job.setDestination(["LCG.KEK.jp", "LCG.DESY-HH.de"])  # job submission destination
    job.setBannedSites(["LCG.UKI-SOUTHGRID-RALPP.uk"
                        ])  # a list of sites not to submit job
    # job.setCPUTime( cputime_limit_in_seconds_by_dirac_units )

    ddsim = DDSim()
    ddsim.setVersion("ILCSoft-02-00-02_gcc49")
    ddsim.setDetectorModel("ILD_l5_v05")
    ddsim.setInputFile(inputFile)
    ddsim.setNumberOfEvents(nbevts)
    extraCLIArguments = " --steeringFile ddsim_steer_July26.py"
    extraCLIArguments += " --outputFile %s " % outputFile
    extraCLIArguments += " --vertexSigma 0.0 0.0 0.1968 0.0 --vertexOffset 0.0 0.0 0.0 0.0 "
    ddsim.setExtraCLIArguments(extraCLIArguments)

    return ddsim
Exemplo n.º 8
0
def _createLFNList():
    """create the LFnList"""
    cliparams = _Params()
    cliparams.registerSwitches()
    Script.parseCommandLine(ignoreErrors=False)
    from DIRAC import gLogger

    repoLocation = cliparams.repo
    if not repoLocation:
        Script.showHelp()
        dexit(2)
    from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
    dirac = DiracILC(True, repoLocation)

    dirac.monitorRepository(False)
    lfns = []
    lfns = dirac.retrieveRepositoryOutputDataLFNs()
    gLogger.notice("lfnlist=[")
    for lfn in lfns:
        gLogger.notice('"LFN:%s",' % lfn)
    gLogger.notice("]")
    dexit(0)
Exemplo n.º 9
0
    def __init__(self):

        self.dIlc = DiracILC(False)

        self.job = UserJob()
        self.job.setJobGroup("FCC")
        self.job.setName("FCC APP")
        self.job.setOutputSandbox(["*.log", '*.root'])
        #self.job.setDestination('LCG.DESY-HH.de')

        #EOS public location
        self.EOS_MGM_URL = 'root://eospublic.cern.ch'
        #EOS environment
        self.setEOS = 'export EOS_MGM_URL=' + self.EOS_MGM_URL
        self.myclient = client.FileSystem(self.EOS_MGM_URL + ':1094')

        #sandbox
        self.InputSandbox = []
        self.folders_to_upload = []
        self.filtered_extensions = []
        self.excludes_or_includes = []
        self.temp_cwd = os.path.join(os.getcwd(), 'fcc_temp_dirac')
Exemplo n.º 10
0
nameDir = nameJobGroup+'_files'+nameTag
nameRepositoryFile = nameJobGroup+nameTag+'_repository.rep'


#####################################################################
 
#####################################################################     
#set enviroment          
import os
import sys

from DIRAC.Core.Base import Script #dirac enviroment                                              
Script.parseCommandLine() #dirac enviroment     

from ILCDIRAC.Interfaces.API.DiracILC import DiracILC #job receiver class   
dirac = DiracILC(True,nameRepositoryFile) #important to retrive jobs        
#####################################################################         

#####################################################################        
#job definition                             
from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
#####################################################################   

#####################################################################        
#filecatalog                             
from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient 
#####################################################################   

#####################################################################    
#ddsim
from ILCDIRAC.Interfaces.API.NewInterface.Applications import DDSim
Exemplo n.º 11
0
def all_jobs(name):
    d = DiracILC(True, "repo.rep")

    ################################################
    j = UserJob()
    j.setJobGroup("PM1")
    j.setName("Exec1")
    banned_sites = [
        "OSG.BNL.us", "LCG.UKI-NORTHGRID-LIV-HEP.uk", "OSG.UCSDT2.us",
        "LCG.SCOTGRIDDURHAM.uk", "LCG.NIKHEF.nl", "LCG.UKI-SOUTHGRID-RALPP.uk",
        "LCG.GRIF.fr", "LCG.Manchester.uk", "LCG.UKI-LT2-IC-HEP.uk",
        "LCG.Weizmann.il"
    ]

    j.setBannedSites(banned_sites)

    caindir = name
    #print('Cain directory is ',caindir)
    indata = [
        'LFN:/ilc/user/a/amustahid/cain.exe',
        str(caindir), 'LFN:/ilc/user/a/amustahid/runcain.sh',
        'LFN:/ilc/user/a/amustahid/convert_pairs_lcio.py',
        'LFN:/ilc/user/a/amustahid/pyLCIO.tar.gz',
        '/home/belle2/mustahid/useful/my.sh', './splitInput.py',
        './subddsim.py', './ddsim_steer_July26.py', './ILD_l5_v05.xml',
        './my2.sh', './dbd_500GeV.nung_1.xml',
        'LFN:/ilc/user/a/amustahid/myProcessors.tar.gz', './create_dir.py',
        './conf.py', './util.py', './testcain.sh', './beam_250.i'
    ]
    j.setInputSandbox(indata)

    ################################################

    #app = GenericApplication()
    #app.setScript("create_dir.py")
    #app.setInputFile("testcain.sh")
    #logf = 'create_dir.log'
    #app.setLogFile(logf)
    #app.setDebug(debug=True)
    #create_dirname = 'create_dir'
    #app.setName(create_dirname)
    #res=j.append(app)
    #if not res['OK']:
    #  print res['Message']
    #  exit(1)
    ################################################
    appre = GenericApplication()
    name = name.split('/')
    #print(name)
    cain_name = name[-1]
    subdir = name[-2]
    dirname = name[-3]

    #print('Cain file name ', cain_name)
    appre.setScript("LFN:/ilc/user/a/amustahid/runcain.sh")
    #appre.setScript("testcain.sh")
    ifile = cain_name.split('.')
    ifile = ifile[0] + '.' + ifile[1] + '.' + ifile[2]
    #print('ifile ',ifile)

    appre.setArguments(ifile)
    #direc = 'LFN:/ilc/user/a/amustahid/'
    #appre.setInputFile(ifile+".i")
    #appre.setArguments("This is input arguments")
    logf = ifile + '_' + subdir + '.log'
    appre.setLogFile(logf)
    appre.setDebug(debug=True)
    name = 'CAIN'
    appre.setName(name)
    res = j.append(appre)
    if not res['OK']:
        print res['Message']
        exit(1)
    ################################################

    ################################################
    #appost = GenericApplication()
    #appost.setScript("myanal.sh")
    #appost.setArguments("This is my analysis step")
    #res=j.append(appost)
    #if not res['OK']:
    #  print res['Message']
    #  exit(1)

    ap = GenericApplication()
    ap.setScript('my.sh')
    logf = 'my.log'
    ap.setLogFile(logf)
    ap.setDebug(debug=True)
    name = 'my'
    ap.setName(name)
    res = j.append(ap)
    if not res['OK']:
        print res['Message']
        exit(1)

    outfile = 'incoherent_pair.dat'
    appre.setOutputFile(outfile)

    ################################################
    direc = 'incoherent_pair'
    inputFile = direc + '/' + 'inco_pair_split.slcio'

    # global variables to hold command line parameters
    # ######################################
    base = '.'
    #outdir=base+'/'+dirname+'/slcio_test_2ndrun'
    outdir = base + '/' + dirname + '/Run_7'
    #print('outdir'+' '+str(outdir))
    geant_name = ifile
    outputFile = geant_name + '_' + subdir + '.slcio'

    #_clip = _Params(False,1,inputFile,outputFile,outdir)

    nbevents = 100
    clip = _Params(nbevents, inputFile, outputFile, outdir)
    ddsim = subDDSim(clip)
    ################################################

    res = j.append(ddsim)
    if not res['OK']:
        print res['Message']
        exit(1)

    j.setOutputData(outputFile, outdir, "KEK-SRM")
    j.setOutputSandbox(["*.log", "*.dat", "*.slcio"])
    j.dontPromptMe()
    res = j.submit(d)
    #res = j.submit(d, mode='local')
    if res['OK']:
        print str(res["Value"])
        #print "Dirac job, "+str(res["Value"])+", was submitted."
    else:
        print "Failed to submit Dirac job. return message was as follows."
        pprint.pprint(res)
Exemplo n.º 12
0
    elif opt in ('v', 'version'):
        slicVersion = arg
    elif opt in ('w', 'workflow'):
        workflowName = arg
    elif opt in ('n', 'nocheck'):
        checkMeta = False

if (detectorName == None) or (prodID == None):
    Script.showHelp()
    sys.exit(2)

from ILCDIRAC.Interfaces.API.NewInterface.ProductionJob import ProductionJob
from ILCDIRAC.Interfaces.API.NewInterface.Applications import SLIC, LCSIM, SLICPandora, OverlayInput
from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient
dirac = DiracILC()

meta = {}
meta['ProdID'] = prodID
if eventType:
    meta['EvtType'] = eventType
if energy:
    meta['Energy'] = energy
if dataType:
    meta['Datatype'] = dataType

fc = FileCatalogClient()
res = fc.getCompatibleMetadata(meta)
if not res['OK']:
    print "Error looking up the catalog for metadata"
    exit(2)
Exemplo n.º 13
0
from DIRAC.Core.Base import Script
Script.parseCommandLine()

from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
from ILCDIRAC.Interfaces.API.NewInterface.Applications import Mokka, Marlin, OverlayInput
from ILCDIRAC.Interfaces.API.DiracILC import DiracILC

from DIRAC import exit as dexit

d = DiracILC(True, "repo.rep")

n_evts = 500
n_evts_per_job = 100
n_jobs = n_evts / n_evts_per_job

for i in range(n_jobs):
    j = UserJob()

    mo = Mokka()
    mo.setEnergy(3000)
    mo.setVersion("0706P08")
    mo.setSteeringFile("clic_ild_cdr.steer")
    mo.setMacFile("particlegun_electron.mac")
    mo.setOutputFile("MyFile.slcio")
    mo.setNbEvts(n_evts_per_job)
    res = j.append(mo)
    if not res['OK']:
        print res['Message']
        break
    ma = Marlin()
    ma.setVersion("v0111Prod")
Exemplo n.º 14
0
    def __init__(self, *args, **kwargs):
        AgentModule.__init__(self, *args, **kwargs)
        self.name = 'DataRecoveryAgent'
        self.enabled = False

        self.productionsToIgnore = self.am_getOption("ProductionsToIgnore", [])
        self.transformationTypes = self.am_getOption("TransformationTypes", [
            'MCReconstruction', 'MCSimulation', 'MCReconstruction_Overlay',
            'MCGeneration'
        ])
        self.transformationStatus = self.am_getOption("TransformationStatus",
                                                      ['Active', 'Completing'])
        self.shifterProxy = self.am_setOption('shifterProxy', 'DataManager')

        self.jobStatus = [
            'Failed', 'Done'
        ]  ##This needs to be both otherwise we cannot account for all cases

        self.jobMon = JobMonitoringClient()
        self.fcClient = FileCatalogClient()
        self.tClient = TransformationClient()
        self.reqClient = ReqClient()
        self.diracILC = DiracILC()
        self.inputFilesProcessed = set()
        self.todo = {'MCGeneration':
                     [ dict( Message="MCGeneration: OutputExists: Job 'Done'",
                             ShortMessage="MCGeneration: job 'Done' ",
                             Counter=0,
                             Check=lambda job: job.allFilesExist() and job.status=='Failed',
                             Actions=lambda job,tInfo: [ job.setJobDone(tInfo) ]
                           ),
                       dict( Message="MCGeneration: OutputMissing: Job 'Failed'",
                             ShortMessage="MCGeneration: job 'Failed' ",
                             Counter=0,
                             Check=lambda job: job.allFilesMissing() and job.status=='Done',
                             Actions=lambda job,tInfo: [ job.setJobFailed(tInfo) ]
                           ),
                       # dict( Message="MCGeneration, job 'Done': OutputExists: Task 'Done'",
                       #       ShortMessage="MCGeneration: job already 'Done' ",
                       #       Counter=0,
                       #       Check=lambda job: job.allFilesExist() and job.status=='Done',
                       #       Actions=lambda job,tInfo: [ tInfo._TransformationInfo__setTaskStatus(job, 'Done') ]
                       #     ),
                     ],
                     'OtherProductions':
                     [ \
                   ## should always be first!

                       dict( Message="One of many Successful: clean others",
                             ShortMessage="Other Tasks --> Keep",
                             Counter=0,
                             Check=lambda job: job.allFilesExist() and job.otherTasks and job.inputFile not in self.inputFilesProcessed,
                             Actions=lambda job,tInfo: [ self.inputFilesProcessed.add(job.inputFile), job.setJobDone(tInfo), job.setInputProcessed(tInfo) ]
                           ),
                       dict( Message="Other Task processed Input, no Output: Fail",
                             ShortMessage="Other Tasks --> Fail",
                             Counter=0,
                             Check=lambda job: job.inputFile in self.inputFilesProcessed and job.allFilesMissing() and job.status!='Failed',
                             Actions=lambda job,tInfo: [ job.setJobFailed(tInfo) ]
                           ),
                       dict( Message="Other Task processed Input: Fail and clean",
                             ShortMessage="Other Tasks --> Cleanup",
                             Counter=0,
                             Check=lambda job: job.inputFile in self.inputFilesProcessed and not job.allFilesMissing(),
                             Actions=lambda job,tInfo: [ job.setJobFailed(tInfo), job.cleanOutputs(tInfo) ]
                           ),
                       dict( Message="InputFile missing: mark job 'Failed', mark input 'Deleted', clean",
                             ShortMessage="Input Missing --> Job 'Failed, Input 'Deleted', Cleanup",
                             Counter=0,
                             Check=lambda job: job.inputFile and not job.inputFileExists and job.fileStatus != "Deleted",
                             Actions=lambda job,tInfo: [ job.cleanOutputs(tInfo), job.setJobFailed(tInfo), job.setInputDeleted(tInfo) ]
                           ),
                       dict( Message="InputFile Deleted, output Exists: mark job 'Failed', clean",
                             ShortMessage="Input Deleted --> Job 'Failed, Cleanup",
                             Counter=0,
                             Check=lambda job: job.inputFile and not job.inputFileExists and job.fileStatus == "Deleted" and not job.allFilesMissing(),
                             Actions=lambda job,tInfo: [ job.cleanOutputs(tInfo), job.setJobFailed(tInfo) ]
                           ),
                       ## All Output Exists
                       dict( Message="Output Exists, job Failed, input not Processed --> Job Done, Input Processed",
                             ShortMessage="Output Exists --> Job Done, Input Processed",
                             Counter=0,
                             Check=lambda job: job.allFilesExist() and \
                                               not job.otherTasks and \
                                               job.status=='Failed' and \
                                               job.fileStatus!="Processed" and \
                                               job.inputFileExists,
                             Actions=lambda job,tInfo: [ job.setJobDone(tInfo), job.setInputProcessed(tInfo) ]
                           ),
                       dict( Message="Output Exists, job Failed, input Processed --> Job Done",
                             ShortMessage="Output Exists --> Job Done",
                             Counter=0,
                             Check=lambda job: job.allFilesExist() and \
                                               not job.otherTasks and \
                                               job.status=='Failed' and \
                                               job.fileStatus=="Processed" and \
                                               job.inputFileExists,
                             Actions=lambda job,tInfo: [ job.setJobDone(tInfo) ]
                           ),
                       dict( Message="Output Exists, job Done, input not Processed --> Input Processed",
                             ShortMessage="Output Exists --> Input Processed",
                             Counter=0,
                             Check=lambda job: job.allFilesExist() and \
                                               not job.otherTasks and \
                                               job.status=='Done' and \
                                               job.fileStatus!="Processed" and \
                                               job.inputFileExists,
                             Actions=lambda job,tInfo: [ job.setInputProcessed(tInfo) ]
                           ),
                       ## outputmissing
                       dict( Message="Output Missing, job Failed, input Assigned, MaxError --> Input MaxReset",
                             ShortMessage="Max ErrorCount --> Input MaxReset",
                             Counter=0,
                             Check=lambda job: job.allFilesMissing() and \
                                               not job.otherTasks and \
                                               job.status=='Failed' and \
                                               job.fileStatus in ASSIGNEDSTATES and \
                                               job.inputFile not in self.inputFilesProcessed and \
                                               job.inputFileExists and \
                                               job.errorCount > MAXRESET,
                             Actions=lambda job,tInfo: [ job.setInputMaxReset(tInfo) ]
                           ),
                       dict( Message="Output Missing, job Failed, input Assigned --> Input Unused",
                             ShortMessage="Output Missing --> Input Unused",
                             Counter=0,
                             Check=lambda job: job.allFilesMissing() and \
                                               not job.otherTasks and \
                                               job.status=='Failed' and \
                                               job.fileStatus in ASSIGNEDSTATES and \
                                               job.inputFile not in self.inputFilesProcessed and \
                                               job.inputFileExists,
                             Actions=lambda job,tInfo: [ job.setInputUnused(tInfo) ]
                           ),
                       dict( Message="Output Missing, job Done, input Assigned --> Job Failed, Input Unused",
                             ShortMessage="Output Missing --> Job Failed, Input Unused",
                             Counter=0,
                             Check=lambda job: job.allFilesMissing() and \
                                               not job.otherTasks and \
                                               job.status=='Done' and \
                                               job.fileStatus in ASSIGNEDSTATES and \
                                               job.inputFile not in self.inputFilesProcessed and \
                                               job.inputFileExists,
                             Actions=lambda job,tInfo: [ job.setInputUnused(tInfo), job.setJobFailed(tInfo) ]
                           ),
                       ## some files missing, needing cleanup. Only checking for
                       ## assigned, because processed could mean an earlier job was
                       ## succesful and this one is just the duplicate that needed
                       ## to be removed! But we check for other tasks earlier, so
                       ## this should not happen
                       dict( Message="Some missing, job Failed, input Assigned --> cleanup, Input 'Unused'",
                             ShortMessage="Output Missing --> Cleanup, Input Unused",
                             Counter=0,
                             Check=lambda job: job.someFilesMissing() and \
                                               not job.otherTasks and \
                                               job.status=='Failed' and \
                                               job.fileStatus in ASSIGNEDSTATES and \
                                               job.inputFileExists,
                             Actions=lambda job,tInfo: [job.cleanOutputs(tInfo),job.setInputUnused(tInfo)]
                             #Actions=lambda job,tInfo: []
                           ),
                       dict( Message="Some missing, job Done, input Assigned --> cleanup, job Failed, Input 'Unused'",
                             ShortMessage="Output Missing --> Cleanup, Job Failed, Input Unused",
                             Counter=0,
                             Check=lambda job: job.someFilesMissing() and \
                                               not job.otherTasks and \
                                               job.status=='Done' and \
                                               job.fileStatus in ASSIGNEDSTATES and \
                                               job.inputFileExists,
                             Actions=lambda job,tInfo: [job.cleanOutputs(tInfo),job.setInputUnused(tInfo),job.setJobFailed(tInfo)]
                             #Actions=lambda job,tInfo: []
                           ),
                       dict( Message="Some missing, job Done --> job Failed",
                             ShortMessage="Output Missing, Done --> Job Failed",
                             Counter=0,
                             Check=lambda job: not job.allFilesExist() and job.status=='Done',
                             Actions=lambda job,tInfo: [job.setJobFailed(tInfo)]
                           ),
                       dict ( Message="Something Strange",
                              ShortMessage="Strange",
                              Counter=0,
                              Check=lambda job: job.status not in ("Failed","Done"),
                              Actions=lambda job,tInfo: []
                            ),
                       ##should always be the last one!
                       dict ( Message="Failed Hard",
                              ShortMessage="Failed Hard",
                              Counter=0,
                              Check=lambda job: False, ## never
                              Actions=lambda job,tInfo: []
                            ),
                     ]
                    }
        self.jobCache = defaultdict(lambda: (0, 0))
        self.printEveryNJobs = self.am_getOption('PrintEvery', 200)
        ##Notification
        self.notesToSend = ""
        self.addressTo = self.am_getOption('MailTo',
                                           ["*****@*****.**"])
        self.addressFrom = self.am_getOption('MailFrom',
                                             "*****@*****.**")
        self.subject = "DataRecoveryAgent"
Exemplo n.º 15
0
    ma.setSteeringFile("clic_ild_cdr_steering.xml")
    ma.setGearFile("clic_ild_cdr.gear")
    result = j.append(ma)
    if not result['OK']:
        gLogger.error(result["Message"])
        dexit(1)
    j.setCPUTime(10000)
    j.setOutputSandbox("*.log")
    return j


if __name__ == '__main__':

    lfns = getFiles()  #get a list of files

    d = DiracILC(True, "paramjobtest.rep")  #get your dirac instance

    job = getJob()  #get a job, any can do

    #here is where the interesting stuff happen
    from DIRAC.Core.Utilities.List import breakListIntoChunks
    for flist in breakListIntoChunks(lfns, 200):
        #200 is the number of files per chunk, and the max number of jobs produced in one go

        #This is the magical line
        job.setParametricInputData(flist)

        #The rest of the sumission is the same
        res = job.submit(d)
        if not res["OK"]:
            gLogger.error("Failed to submit the job: ", res["Message"])
Exemplo n.º 16
0
#every single script has these four lines

from DIRAC.Core.Base import Script
Script.parseCommandLine()

#create ilcdirac instance
from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
dirac = DiracILC(True, "some_job_repository.rep")

#job definition
from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
#job = UserJob()
from ILCDIRAC.Interfaces.API.NewInterface.Applications import SLIC
from ILCDIRAC.Interfaces.API.NewInterface.Applications import LCSIM

#jobParams = [('slicTest8_mu+_theta90.mac','diracTest8_mu+_theta90.slcio',50),('slicTest7_mu+_theta_5-175.mac','diracTest_mu+_theta_5-175.slcio',50),('slicTest3_e+.mac','diracTest3_e+.slcio',10),('slicTest2_pi+.mac','diractTest2_pi+.slcio',10)]
#jobParams = [('slicTest10_mu+_100gev_theta70_testNewGeom.mac','diracTest10_mu+_100gev_theta70_testNewGeom.slcio',10),('slicTest10_mu+_100gev_theta90_testNewGeom.mac','diracTest10_mu+_100gev_theta90_testNewGeom.slcio',10)]
jobParams = [
    ('/users/detector/ssetru/SiDSim/detectors/detector_vtx_matbudghalf_nonsensitivelayer/slicmacros/slicTest8_mu+_100gev_theta60.mac',
     'diracTest_100gev_theta60_vtx_matbudghalf_nonsensitivelayer.slcio', 100),
    ('/users/detector/ssetru/SiDSim/detectors/detector_vtx_matbudghalf_nonsensitivelayer/slicmacros/slicTest8_mu+_10gev_theta60.mac',
     'diracTest_10gev_theta60_vtx_matbudghalf_nonsensitivelayer.slcio', 100)
]
#slicMacros = ['slicTest8_mu+_theta90.mac','slicTest7_mu+_theta_5-175.mac','slicTest3_e+.mac','slicTest2_pi+.mac']
#fileOutputs = ['diracTest2Loop1.slcio','diracTest2Loop2.slcio','diracTest2Loop3.slcio','diractTest2Loop4.slcio']
#slicNumEvents = [100,100,10,10]

for macro, output, nEvts in jobParams:
    job = UserJob()
    job.setName("ssetru_dirac_test1")
    job.setJobGroup("tests")
Exemplo n.º 17
0
def subOverlay():

    # Decide parameters for a job
    outputSE = "KEK-SRM"

    isLocal = _clip.isLocal
    nbevts = 50 if _clip.numberOfEvents == 0 else _clip.numberOfEvents
    nbevts = 0  # To analize all input events
    outputFilePrefix = "overlay_example" if _clip.outputFilePrefix == "" else _clip.outputFilePrefix
    outputDir = _clip.outputDir
    inputFile = _clip.inputFile
    if inputFile == "":
        gLogger.error("Input file for ddsim does not given.")
        exit(-1)

    recfile = outputFilePrefix + ".rec.slcio"
    dstfile = outputFilePrefix + ".dst.slcio"
    detector_model = "ILD_l5_o1_v02"
    key = detector_model.split('_')
    sim_detectorModel = "_".join([key[0], key[1], key[3]])

    # Create DIRAC objects for job submission

    dIlc = DiracILC()

    job = UserJob()
    job.setJobGroup("myoverlayjob")
    job.setName("myoverlay")
    job.setOutputSandbox(['*.log', '*.sh', '*.py', '*.xml'])
    job.setILDConfig("v02-00-02")

    # job.setInputSandbox(["a6-parameters.sin", "P2f_qqbar.sin"])
    # job.setDestination(["LCG.KEK.jp", "LCG.DESY-HH.de"])  # job submission destination
    # job.setBannedSites([])         # a list of sites not to submit job
    # job.setCPUTime( cputime_limit_in_seconds_by_dirac_units )

    # Create Overlay application
    ovldata = [{
        "ProcessorName": "BgOverlayWW",
        "evttype": "aa_lowpt_WW",
        "ProdID": 10237,
        "expBG": 0.211,
        "subdir": "000"
    }, {
        "ProcessorName": "BgOverlayWB",
        "evttype": "aa_lowpt_WB",
        "ProdID": 10241,
        "expBG": 0.24605,
        "subdir": "000"
    }, {
        "ProcessorName": "BgOverlayBW",
        "evttype": "aa_lowpt_BW",
        "ProdID": 10239,
        "expBG": 0.243873,
        "subdir": "000"
    }, {
        "ProcessorName": "BgOverlayBB",
        "evttype": "aa_lowpt_BB",
        "ProdID": 10235,
        "expBG": 0.35063,
        "subdir": "000"
    }, {
        "ProcessorName": "PairBgOverlay",
        "evttype": "seeablepairs",
        "ProdID": 10233,
        "expBG": 1.0,
        "subdir": "100"
    }]

    BXOverlay = 1
    NbSigEvtsPerJob = 100
    numberOfSignalEvents = NbSigEvtsPerJob
    basebkgpath = "/ilc/prod/ilc/mc-opt-3/ild/sim/500-TDR_ws"
    energy = "500"

    for ovl in ovldata:
        print "### OverlayInput ... " + ovl["ProcessorName"]
        ovlapp = OverlayInput()
        ovlpath = "%s/%s/%s/v02-00-01/%8.8d/%s" % \
       ( basebkgpath, ovl["evttype"], sim_detectorModel, ovl["ProdID"] , ovl["subdir"] )
        print "    OverlayPath ... " + ovlpath
        ovlapp.setMachine("ilc_dbd")
        # ovlapp.setEnergy(energy)
        # ovlapp.setDetectorModel(sim_detectorModel)
        ovlapp.setProcessorName(ovl["ProcessorName"])
        ovlapp.setBkgEvtType(ovl["evttype"])
        ovlapp.setPathToFiles(ovlpath)
        ovlapp.setGGToHadInt(ovl["expBG"])
        ovlapp.setBXOverlay(BXOverlay)
        ovlapp.setNbSigEvtsPerJob(NbSigEvtsPerJob)
        ovlapp.setNumberOfSignalEventsPerJob(numberOfSignalEvents)
        res = job.append(ovlapp)
        if not res['OK']:
            print res['Message']
            exit(1)

    # Create Marlin application
    marlin = Marlin()
    marlin.setVersion("ILCSoft-02-00-02_gcc49")
    marlin.setDetectorModel(detector_model)
    marlin.setSteeringFile("MarlinStdReco.xml")
    marlin.setInputFile(inputFile)
    marlin.setNumberOfEvents(nbevts)
    marlin.setOutputDstFile(dstfile)
    marlin.setOutputRecFile(recfile)
    extraCLIArguments = " --constant.DetectorModel=%s " % detector_model
    extraCLIArguments += " --constant.RunOverlay=true --constant.CMSEnergy=%s " % str(
        energy)
    extraCLIArguments += " --global.Verbosity=MESSAGE "
    marlin.setExtraCLIArguments(extraCLIArguments)

    job.append(marlin)

    if outputDir != "":
        job.setOutputData([dstfile, recfile],
                          OutputPath=outputDir,
                          OutputSE=outputSE)

    if isLocal:
        job.submit(dIlc, mode="local")
    else:
        job.submit(dIlc)
Exemplo n.º 18
0
from DIRAC.Core.Base import Script
Script.parseCommandLine()
from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
from ILCDIRAC.Interfaces.API.NewInterface.UserJob import *
from ILCDIRAC.Interfaces.API.NewInterface.Applications import *

dirac = DiracILC(True, "CLICdet_repo.cfg")
job = UserJob()
job.setName("example")
job.setInputData([
    "/ilc/user/p/proloff/stdhep/2f_samples_23_04_2013/whizard_SM_bb_500_90deg.stdhep"
])
job.setCLICConfig("ILCSoft-2017-12-21")
job.setOutputSandbox(["*.root", "*.slcio", "*.log"])

ddsim = DDSim()
ddsim.setSteeringFile("CLICPerformance/examples/clic_steer.py")
ddsim.setVersion("ILCSoft-2017-12-21_gcc62")
ddsim.setDetectorModel("CLIC_o3_v14")
ddsim.setInputFile(
    "/ilc/user/p/proloff/stdhep/2f_samples_23_04_2013/whizard_SM_bb_500_90deg.stdhep"
)
ddsim.setOutputFile("ddsim_output.slcio")
ddsim.setStartFrom(0)
ddsim.setNumberOfEvents(10)
res1 = job.append(ddsim)
if not res1['OK']:
    print res1['Message']
    sys.exit(2)

# -------------------- Comment if gg_had overlay not wanted -----------------------------------------------------#
Exemplo n.º 19
0
def main():
    # Take the input arguments from the argument parser, and check they exist...
    args = parse_args()
    if not args:
        print 'Invalid Arguments'
        sys.exit(1)

#### Software Versions ####
    softVersions = ["v3r0p3", "HEAD", "ILC_DBD",
                    "0116"]  # Working (recommended)
    # softVersions = ["v3r0p3", "2.5", "ILC_DBD", "0116"] # Working
    # softVersions = ["v3r0p3", "HEAD", "ILC_DBD", "ILCSoft-01-17-07"] # Working

    # Check the --runs and --split arguments to make sure they are compatible, if not exit...
    if not check_events_arguments(args.events, args.split):
        sys.exit(1)

    # Check the input LFN given by user, it needs to have .stdhep extension and should not have LFN: at the beginning...
    lfn_check, lfn = check_input_LFN(args.stdhepInput)
    if not lfn_check:
        sys.exit(1)

    # Call when you begin ILC-DIRAC jobs, the true indicates a repository file is included...
    dirac = DiracILC(True,
                     setup_repository_name(args.stdhepInput, args.detector))

    # Prepares the input and output sandboxes, if -f, then adds the files required for flavortagging,
    # into the input sandbox
    inputSandbox, outputSandbox = setup_sandboxes(args.macFile, args.flavortag)

    # Prepares values for the job loop...
    if args.split < 0:
        nInputEvents = int(args.events)
        nOutputEvents = int(args.events)
    if args.split > 0:
        nInputEvents = int(args.events)
        nOutputEvents = int(args.split)

    # Loop that runs through the required number of jobs to be executed...
    for startEvent in range(0, nInputEvents, nOutputEvents):

        ################## Job Initialise ########################################
        job = UserJob()
        job.setName(path.basename(args.stdhepInput))
        job.setJobGroup('JobGroup')
        job.setInputSandbox(inputSandbox)
        fileNumber = startEvent / nOutputEvents
        print "Job ", fileNumber

        outputFiles = setup_output_dict(args.stdhepInput, args.detector,
                                        fileNumber, args.outputPath,
                                        softVersions)
        slicOutput = outputFiles['slicOutput']
        prePandoraOutput = outputFiles['prePandoraOutput']
        pandoraOutput = outputFiles['pandoraOutput']
        vertexingOutput = outputFiles['vertexingOutput']
        lcsimRecOutput = outputFiles['lcsimRecOutput']
        lcsimDstOutput = outputFiles['lcsimDstOutput']
        flavortagOutput = outputFiles['flavortagOutput']
        diracOutput = outputFiles['diracOutput']

        ################## SLIC ##################################################
        slic = SLIC()
        slic.setVersion(softVersions[0])
        slic.setSteeringFile(args.macFile)
        # slic.setInputFile(lfn)
        slic.setOutputFile(slicOutput)
        slic.setDetectorModel(args.detector)
        slic.setNumberOfEvents(nOutputEvents)
        slic.setStartFrom(startEvent)
        #print slic.listAttributes()
        result = job.append(slic)
        if not result['OK']:
            print result['Message']
            sys.exit(2)

################## lcsim (digitization and tracking) #####################
        lcsim = LCSIM()
        lcsim.setVersion(softVersions[1])
        lcsim.setSteeringFile(
            'steeringFiles/sid_dbd_prePandora_noOverlay_v22.xml'
        )  # Another version is included in /steeringFiles
        lcsim.getInputFromApp(slic)
        lcsim.setTrackingStrategy(
            'steeringFiles/sidloi3_trackingStrategies_default.xml')
        # lcsim.setAliasProperties('alias.properties')
        lcsim.setDetectorModel('geometryFiles/sidloi3.zip')
        lcsim.setOutputFile(prePandoraOutput)
        lcsim.setNumberOfEvents(nOutputEvents)
        #print lcsim.listAttributes()
        result = job.append(lcsim)
        if not result['OK']:
            print result['Message']
            sys.exit(2)

################## slicPandora ###########################################
        slicPandora = SLICPandora()
        slicPandora.setVersion(softVersions[2])
        slicPandora.setDetectorModel(args.detector)
        slicPandora.getInputFromApp(lcsim)
        slicPandora.setOutputFile(pandoraOutput)
        slicPandora.setPandoraSettings('pandoraSettings.xml')
        slicPandora.setNumberOfEvents(nOutputEvents)
        #print slicPandora.listAttributes()
        result = job.append(slicPandora)
        if not result['OK']:
            print result['Message']
            sys.exit(2)

################## Marlin, LCFIPlus Vertexing ############################
        vertexing = Marlin()
        vertexing.setVersion(softVersions[3])
        vertexing.setSteeringFile('steeringFiles/sid_dbd_vertexing.xml')
        vertexing.setGearFile('steeringFiles/sidloi3.gear')
        vertexing.getInputFromApp(slicPandora)
        vertexing.setOutputFile(vertexingOutput)
        vertexing.setNumberOfEvents(nOutputEvents)
        #print vertexing.listAttributes()
        result = job.append(vertexing)
        if not result['OK']:
            print result['Message']
            sys.exit(2)

################## lcsim (DST production) ################################
        lcsimDst = LCSIM()
        lcsimDst.setVersion(softVersions[1])
        lcsimDst.setSteeringFile('steeringFiles/sid_dbd_postPandora.xml')
        lcsimDst.getInputFromApp(vertexing)
        lcsimDst.setNumberOfEvents(nOutputEvents)
        # lcsimDst.setAliasProperties('alias.properties')
        lcsimDst.setDetectorModel('geometryFiles/sidloi3.zip')
        lcsimDst.setOutputRecFile(lcsimRecOutput)
        lcsimDst.setOutputDstFile(lcsimDstOutput)
        #print lcsimDst.listAttributes()
        result = job.append(lcsimDst)
        if not result['OK']:
            print result['Message']
            sys.exit(2)

################## Marlin, LCFIPlus flavortag ############################
        if args.flavortag:
            flavortag = Marlin()
            flavortag.setVersion(softVersions[3])
            flavortag.setSteeringFile('steeringFiles/sid_dbd_flavortag.xml')
            flavortag.setGearFile('steeringFiles/sidloi3.gear')
            flavortag.setInputFile(lcsimDstOutput)
            flavortag.setOutputFile(flavortagOutput)
            flavortag.setNumberOfEvents(nOutputEvents)
            #print flavortag.listAttributes()
            result = job.append(flavortag)
            if not result['OK']:
                print result['Message']
                sys.exit(2)


################## Job Finalise ##########################################

# List of banned sites that the job shall not be sent too. These are sites that jobs tend to fail on,
# This list is likely to change.
        job.setBannedSites([
            'LCG.IN2P3-CC.fr',
            'LCG.RAL-LCG2.uk',
            'LCG.DESY-HH.de',
            'LCG.DESYZN.de',
            'LCG.KEK.jp',
            'OSG.PNNL.us',
        ])

        job.setCPUTime(50000)
        job.setPlatform('x86_64-slc5-gcc43-opt')

        # Sets the output data file according to if -f is selcted, ships ouput to your /ilc/user/a/aPerson/
        # directory on the grid.
        if args.flavortag:
            job.setOutputData(flavortagOutput, diracOutput, args.SE)

        else:
            job.setOutputData(lcsimDstOutput, diracOutput, args.SE)

        job.setOutputSandbox(outputSandbox)
        job.setInputData(lfn)

        if args.dontPromptMe:
            job.dontPromptMe()
        # Submits Job!!!
        job.submit()

    return 0
Exemplo n.º 20
0
base = open(steeringTemplateFile,'r')
steeringTemplateContent = base.read()
base.close()

for eventSelection in eventsToSimulate:
    eventType = eventSelection['EventType']
    detectorModel = eventSelection['DetectorModel']
    reconstructionVariant = eventSelection['ReconstructionVariant']
    energy = eventSelection['Energy']
    analysisTag = eventSelection['AnalysisTag']

    # Make local gear file
    os.system('cp ' + gearFile + ' .')
    gearFileLocal = os.path.basename(gearFile)

    diracInstance = DiracILC(withRepo=False)
    
    slcioFormat = 'DetModel_' + detectorModel + '_RecoVar_' + reconstructionVariant + '_' + eventType + '_' + str(energy) + 'GeV_GenNumber_(.*?)_(.*?)_(.*?)_DST.slcio'

    slcioFilesToProcess = getDstSlcioFiles(jobDescription,detectorModel,reconstructionVariant,energy,eventType)

    if not slcioFilesToProcess:
        print 'No slcio files found.  Exiting job submission.'
        sys.exit()

    for slcioFile in slcioFilesToProcess:
        print 'Checking ' + eventType + ' ' + str(energy) + 'GeV jobs.  Detector model ' + detectorModel + '.  Reconstruction stage ' + reconstructionVariant + '.  Slcio file ' + slcioFile + '.'
        slcioFileNoPath = os.path.basename(slcioFile)

        inputSandbox = ['LFN:/ilc/user/s/sgreen/SelectionProcessorTarBall/MarlinSelectionProcessor.tar.gz']
Exemplo n.º 21
0
def main():
    '''
    This is the script main method, which will hold all the logic.
  '''

    cliParams = Params()
    cliParams.registerSwitches()
    Script.parseCommandLine(ignoreErrors=True)

    consistent = cliParams.checkConsistency()
    if not consistent['OK']:
        gLogger.error("Error checking consistency:", consistent['Message'])
        Script.showHelp()
        dexit(2)

    ##Get prodID
    prodID = float(cliParams.prodID)

    ##Get all possible input files
    inputFiles = getInputFiles(prodID)

    ##Get suffix
    suffix = cliParams.suffix
    if suffix: suffix = "_" + suffix

    ##Jet clustering
    algorithm = "ValenciaPlugin {radius:.1f} {beta:.1f} {gamma:.1f}"
    jetRadius = float(cliParams.jetRadius)
    jetBeta = float(cliParams.jetBeta)
    jetGamma = float(cliParams.jetGamma)

    jetCluster = "ExclusiveNJets 2"
    jetRecomb = "E_scheme"

    ##Top tagger
    deltaR = float(cliParams.deltaR)
    deltaP = float(cliParams.deltaP)
    cos_theta_W_max = float(cliParams.cos_theta_W_max)

    ##Sixfermion sample
    sixFermionSample = cliParams.sixFermionSample

    ##Number of files per job
    nrFilesPerJob = int(cliParams.nrFilesPerJob)

    from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
    repDir = "/afs/cern.ch/user/l/lstroem/clicdp/analysis/steering/chain/topasymmetry_wflavourtag/submit/{prodID:04.0f}/rep/".format(
        prodID=prodID)
    subprocess.call("mkdir -p " + repDir, shell=True)
    dirac = DiracILC(
        False
    )  #, repDir+"topasymmetry_vlc{suffix}.rep".format(suffix = suffix))

    jetAlgo = algorithm.format(radius=jetRadius, beta=jetBeta, gamma=jetGamma)
    inputFileList = []
    i = 0
    j = 1
    for inputFile in inputFiles:
        inputFileList.append(inputFile)
        i += 1
        if (i >= nrFilesPerJob * j) or (i == len(inputFiles)):

            jobName = "topasymmetry_chain_{jetSettings}_dR{deltaR:.2f}_dP{deltaP:.2f}_cthetaWmax{cos_theta_W_max:.2f}_1Jun2017_part{index}_{prodID:04.0f}".format(
                jetSettings=getJetSettings(jetAlgo, jetCluster, jetRecomb),
                deltaR=deltaR,
                deltaP=deltaP,
                cos_theta_W_max=cos_theta_W_max,
                index=j,
                prodID=prodID)
            jobGroup = "topasymmetry_chain_{prodID:04.0f}".format(
                prodID=prodID)
            job, outputFile, rootFile = defGridJob(jobName, jobGroup,
                                                   inputFileList)

            ##Check if outputfile already exists
            pOutCheck = subprocess.Popen(
                "dirac-dms-lfn-replicas /ilc/user/r/rstrom/" + jobGroup + "/" +
                rootFile,
                shell=True,
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE)
            outCheck, errCheck = pOutCheck.communicate()
            #gLogger.notice(jobName)
            if "no such file" not in outCheck.lower():
                gLogger.notice('File exists! Skipping!')
                inputFileList = []
                j += 1
                continue  #continue #use break if only part1, use continue of run over all parts
            #gLogger.notice(jetAlgo)
            #doLogger(jobName, inputFileList, outputFile, rootFile)
            res = job.append(
                defMarlin(outputFile,
                          rootFile,
                          jetAlgo=jetAlgo,
                          jetCluster=jetCluster,
                          jetRecomb=jetRecomb,
                          deltaR=deltaR,
                          deltaP=deltaP,
                          cos_theta_W_max=cos_theta_W_max,
                          sixFermionSample=sixFermionSample))

            if not res['OK']:  #Catch if there is an error
                print res['Message']  #Print the error message
                dexit

            ##Job submission
            print job.submit(dirac)
            inputFileList = []
            j += 1
            #break #add break of only part1

    gLogger.notice("All done!")
    dexit(0)
Exemplo n.º 22
0
lfns = res['Value']
print "found %s files" % len(lfns)

ovi = OverlayInput()
ovi.setEnergy(500.)
ovi.setBXOverlay(300)
ovi.setGGToHadInt(0.3)
ovi.setNbSigEvtsPerJob(10)
ovi.setBkgEvtType("gghad")
ovi.setDetectorModel("CLIC_ILD_CDR")

overlay = [True, False]

for ov in overlay:
    d = DiracILC(True, "repo_overlay_%s.rep" % ov)
    for lfn in lfns:
        j = UserJob()
        steeringf = "clic_ild_cdr_steering.xml"
        if ov:
            steeringf = "clic_ild_cdr_steering_overlay.xml"
            res = j.append(ovi)
            if not res['OK']:
                print res['Message']
                continue
        ma = Marlin()
        ma.setVersion("v0111Prod")
        ma.setGearFile("clic_ild_cdr.gear")
        ma.setSteeringFile(steeringf)
        ma.setInputFile("LFN:" + lfn)
        ma.setNbEvts(10)
Exemplo n.º 23
0
from DIRAC.Core.Base import Script
Script.parseCommandLine()

from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
from ILCDIRAC.Interfaces.API.NewInterface.Applications import Marlin
from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient
import time

dIlc = DiracILC()
lcoutput = []
for i in range(1, 301):
    lcoutput = "aa_%d.root" % i
    job = UserJob()
    job.setDestination('LCG.CERN.ch')
    job.setInputSandbox([
        "LFN:/ilc/user/k/kacarevic/hgamgam/PandoraPFA/PandoraLikelihoodData9EBin_CLIC_ILD.xml",
        "LFN:/ilc/user/k/kacarevic/hgamgam/PandoraPFA/PandoraSettingsFast.xml",
        "LFN:/ilc/user/k/kacarevic/hgamgam/PandoraPFA/MarlinRecoRootFiles/steering.xml",
        "LFN:/ilc/user/k/kacarevic/hgamgam/PandoraPFA/clic_ild_cdr.gear",
        "LFN:/ilc/user/k/kacarevic/hgamgam/PandoraPFA/MarlinRecoRootFiles/lib.tar.gz"
    ])
    job.setInputData(
        "LFN:/ilc/user/k/kacarevic/hgamgam/Marlin/newPandora/aa/aa_%d.slcio" %
        i)
    job.setOutputSandbox(
        ["*.log", "*.sh", "*.py", "*.out", "*.xml", "*.steer "])
    job.setJobGroup("myRoot")
    job.setName("root_aa_%d" % i)
    marl = Marlin()
    marl.setVersion('ILCSoft-2016-09-27_gcc48')
Exemplo n.º 24
0
 
res = fc.findFilesByMetadata(meta)
if not res['OK']:
   print res['Message']

lfns = res['Value']
#print "Found %s files" % len(lfns)
filelist=[]
for lfn in lfns:
   filelist.append(lfn)
#print filelist
#filelist2=filelist[0]

#check if big radius in fastjetanalyzer indeed 0.7 or 1.0
jobGroup = "HZAnalyzer_190417_ee_qqqq_m_qqqq_2TeV_13696_VLC7PFOs"
dirac = DiracILC(True,jobGroup+".rep")
job = UserJob()
#job.setExecutionEnv({'ROOT_INCLUDE_PATH':'./'})
job.setJobGroup(jobGroup)
job.setOutputSandbox ( [ "*.log","*.out","*.py"] )
job.setBannedSites(['LCG.IN2P3-CC.fr','OSG.UConn.us','LCG.Cracow.pl','OSG.MIT.us','LCG.Glasgow.uk','OSG.CIT.us','OSG.BNL.us','LCG.Brunel.uk','LCG.QMUL.uk'])
#pay attention that the Zuds200 here is NOT changed
job.setInputSandbox( ["LFN:/ilc/user/w/webermat/190606/HZAnalyzerlib.tar.gz", "LFN:/ilc/user/w/webermat/190412/vtxprob.tar.gz","LFN:/ilc/user/w/webermat/190412/flavourTagging04-01_ct_90deg/lcfiweights.tar.gz"] ) 
job.setBannedSites(['LCG.INP3-CC.fr','OSG.UConn.us','LCG.Cracow.pl','OSG.MIT.us','LCG.Glasgow.uk','OSG.CIT.us','OSG.BNL.us','LCG.Brunel.uk','LCG.QMUL.uk','LCG.Oxford.uk'])
job.setSplitInputData(filelist, numberOfFilesPerJob=50)
ma = Marlin()
ma.setVersion('ILCSoft-2019-04-17_gcc62')
#ma.setInputFile("LFN:/ilc/user/w/webermat/ddsimstdheptautau/ILC18-10-11_gcc62_CLIC_o3_v14/tautau200/ddsim_ILC181011_gcc62_tautau_200_CLIC_o3_v14_0%s.slcio"%(str(input_ind)))
ma.setSteeringFile("/eos/user/w/weberma2/steeringFiles/testHZAnalyzer.xml")
ma.setDetectorModel("CLIC_o3_v14")
HZrootfilename2="HZStudy_ee_qqqq_m_qqqq_2TeV_13696_polm80_3TeV_wO_CLIC_o3_v14_DR7.root"
def main(argv):
    # Input arguments
    ildconfig_version   = "$ILDCONFIGVER"
    ilcsoft_version     = "$ILCSOFTVER"

    evts_per_run    = $EVTSPERRUN
    detector_model  = "$DETECTOR"
    sim_input       = "$SIMINPUT"
    process_name    = "$PROCESS"

    index           = $IND

    sim_input = diracpath_from_pnfspath( sim_input )
    sim_detector_model = detector_model_wo_option( detector_model )

    job_group = ilcsoft_version + "_" + ildconfig_version + "_" + process_name + "_" + detector_model
    dirac = DiracILC(True,job_group+".rep")

    # outputs to be saved onto grid SE
    RECoutput = []

    # DDSim

    evtStart   = (index-1)*evts_per_run
    evtEnd     = index*evts_per_run - 1
    RandSeed = random.randrange(11623, 99999)

    lcinputSIM  = "LFN:" + sim_input
    lcoutputSIM = ilcsoft_version + ".ILDConfig_" + ildconfig_version + ".E1000." + process_name + ".eLpR.evt%s-%s_SIM.slcio"%(str(evtStart),(str)(evtEnd))

    sim = DDSim()
    sim.setVersion(ilcsoft_version)

    sim.setDetectorModel(sim_detector_model)
    sim.setInputFile(lcinputSIM)
    sim.setSteeringFile("ddsim_steer.py")
    sim.setNumberOfEvents(evts_per_run)
    sim.setRandomSeed(RandSeed)
    sim.setEnergy(1000)
    sim.setStartFrom(evtStart)
    sim.setOutputFile(lcoutputSIM)

    # Marlin
    lcoutputDST = ilcsoft_version + ".ILDConfig_" + ildconfig_version + ".E1000." + process_name + ".eLpR.evt%s-%s_DST.slcio"%(str(evtStart),(str)(evtEnd))

    ma = Marlin()
    ma.setVersion(ilcsoft_version)
    ma.setDetectorModel(detector_model)
    ma.setSteeringFile("MarlinStdReco.xml")
    ma.setExtraCLIArguments( "--constant.lcgeo_DIR=$lcgeo_DIR --constant.DetectorModel={} --global.MaxRecordNumber=0".format(detector_model) )
    ma.setLogFile("marlin.log")
    ma.getInputFromApp(sim)
    ma.setEnergy(1000)
    ma.setOutputDstFile(lcoutputDST)

    RECoutput.append(lcoutputDST)

    # ILCDirac user job
    job = UserJob()
    job.setName("user_sim_reco")

    job.setJobGroup(job_group)

    job.setILDConfig(ildconfig_version)
    job.setCPUTime(86400)

    tmp_file_name = process_name + "_sim_reco_job_tmp.py"
    job.setInputSandbox([tmp_file_name])
    job.setOutputSandbox(["*.log","MarlinStdRecoParsed.xml","marlin*.xml","*.py "])

    job.dontPromptMe()
    job.setBannedSites(['LCG.IN2P3-CC.fr','OSG.UConn.us','LCG.Cracow.pl','OSG.MIT.us','LCG.Glasgow.uk','OSG.CIT.us','OSG.BNL.us','LCG.Brunel.uk','LCG.RAL-LCG2.uk','LCG.Oxford.uk','OSG.UCSDT2.us'])

    # run simulation job
    simres = job.append(sim)
    if not simres['OK']:
            print simres['Not ok appending ddsim to job']
            quit()


    # run Malrin reco jobs
    mares = job.append(ma)
    if not mares['OK']:
            print mares['Not ok appending Marlin to job']
            quit()

    job.setOutputData(RECoutput,"ILDPerformance/WWZZSeparation/{}_ILDConfig_{}_{}".format(ilcsoft_version,ildconfig_version,detector_model),"DESY-SRM")
    print RECoutput

    submit_output = job.submit(dirac)
    print submit_output