Exemplo n.º 1
0
    def __init__(self, clip, params):
        self.clip = clip
        self.ildConfig = params.get("ildConfig", None)
        self.alwaysOverlay = params.get("alwaysOverlay", False)
        self.runOverlay = self.clip.testOverlay or self.alwaysOverlay
        self.mokkaVersion = params["mokkaVersion"]
        self.mokkaSteeringFile = params.get("mokkaSteeringFile")
        self.detectorModel = params.get("detectorModel")
        self.marlinVersion = params.get("marlinVersion")
        self.marlinSteeringFile = params.get("marlinSteeringFile")
        self.ddsimVersion = params.get("ddsimVersion")
        self.ddsimDetectorModel = params.get("ddsimDetectorModel")
        self.ddsimInputFile = params.get("ddsimInputFile")
        self.marlinInputdata = params.get("marlinInputdata")
        self.gearFile = params.get("gearFile")
        self.lcsimVersion = params.get("lcsimVersion")
        self.steeringFileVersion = params.get("steeringFileVersion", None)
        self.rootVersion = params["rootVersion"]

        self.whizard2Version = params.get("whizard2Version")
        self.whizard2SinFile = params.get("whizard2SinFile")

        self.energy = params.get("energy")
        self.backgroundType = params.get("backgroundType")
        self.machine = params.get("machine")

        self.gearFile = params.get("gearFile")
        self.marlinSteeringFile = params.get("marlinSteeringFile")
        self.marlinVersion = params.get("marlinVersion")

        self.lcsimPreSteeringFile = params.get("lcsimPreSteeringFile")
        self.lcsimPostSteeringFile = params.get("lcsimPostSteeringFile")

        self.fccSwPath = params.get("fccSwPath")
        self.fccSwSteeringFile = params.get("fccSwSteeringFile")

        self.fccAnalysisSteeringFile = params.get("fccAnalysisSteeringFile")

        ### other things needed to run tests
        self.log = gLogger.getSubLogger("JobCreater")

        from ILCDIRAC.Interfaces.API.DiracILC import DiracILC, __RCSID__ as drcsid
        from ILCDIRAC.Interfaces.API.NewInterface.UserJob import __RCSID__ as jrcsid
        from ILCDIRAC.Interfaces.API.NewInterface.Applications import __RCSID__ as apprcsid

        if self.clip.submitMode == "local":
            self.log.notice("")
            self.log.notice("       DIRAC RCSID:", drcsid)
            self.log.notice("         Job RCSID:", jrcsid)
            self.log.notice("Applications RCSID:", apprcsid)
            self.log.notice("")

        self.diracInstance = DiracILC(False, 'tests.rep')
        self.jobList = {}
Exemplo n.º 2
0
    def submit(self, diracinstance=None, mode="wms"):
        """ Submit call: when your job is defined, and all applications are set, you need to call this to
    add the job to DIRAC.

    :param diracinstance: DiracILC instance
    :type diracinstance: ~ILCDIRAC.Interfaces.API.DiracILC.DiracILC
    :param str mode: "wms" (default), "agent", or "local"

    .. note ::
      The *local* mode means that the job will be run on the submission machine. Use this mode for testing of submission scripts

    """
        if self.splittingOption:
            result = self._split()
            if 'OK' in result and not result['OK']:
                return result

        #Check the credentials. If no proxy or not user proxy, return an error
        if not self.proxyinfo['OK']:
            self.log.error(
                "Not allowed to submit a job, you need a %s proxy." %
                self.usergroup)
            return self._reportError(
                "Not allowed to submit a job, you need a %s proxy." %
                self.usergroup, self.__class__.__name__)
        if 'group' in self.proxyinfo['Value']:
            group = self.proxyinfo['Value']['group']
            if group not in self.usergroup:
                self.log.error(
                    "Not allowed to submit a job, you need a %s proxy." %
                    self.usergroup)
                return self._reportError(
                    "Not allowed to submit job, you need a %s proxy." %
                    self.usergroup, self.__class__.__name__)
        else:
            self.log.error(
                "Could not determine group, you do not have the right proxy.")
            return self._reportError(
                "Could not determine group, you do not have the right proxy.")

        res = self._addToWorkflow()
        if not res['OK']:
            return res
        self.oktosubmit = True
        if not diracinstance:
            self.diracinstance = DiracILC()
        else:
            self.diracinstance = diracinstance
        return self.diracinstance.submit(self, mode)
Exemplo n.º 3
0
  def submit(self, diracinstance = None, mode = "wms"):
    """ Submit call: when your job is defined, and all applications are set, you need to call this to
    add the job to DIRAC.

    :param diracinstance: :any:`DiracILC <ILCDIRAC.Interfaces.API.DiracILC.DiracILC>` instance
    :param string mode: "wms" (default), "agent", or "local"

    .. note ::
      The *local* mode means that the job will be run on the submission machine. Use this mode for testing of submission scripts

    """
    #Check the credentials. If no proxy or not user proxy, return an error
    if not self.proxyinfo['OK']:
      self.log.error("Not allowed to submit a job, you need a %s proxy." % self.usergroup)
      return self._reportError("Not allowed to submit a job, you need a %s proxy." % self.usergroup,
                               self.__class__.__name__)
    if self.proxyinfo['Value'].has_key('group'):
      group = self.proxyinfo['Value']['group']
      if not group in self.usergroup:
        self.log.error("Not allowed to submit a job, you need a %s proxy." % self.usergroup)
        return self._reportError("Not allowed to submit job, you need a %s proxy." % self.usergroup,
                                 self.__class__.__name__)
    else:
      self.log.error("Could not determine group, you do not have the right proxy.")       
      return self._reportError("Could not determine group, you do not have the right proxy.")

    res = self._addToWorkflow()
    if not res['OK']:
      return res
    self.oktosubmit = True
    if not diracinstance:
      self.diracinstance = DiracILC()
    else:
      self.diracinstance = diracinstance
    return self.diracinstance.submit(self, mode)
Exemplo n.º 4
0
  def __init__( self,
                clip,
                params
              ):
    self.clip = clip
    self.ildConfig = params.get( "ildConfig", None )
    self.alwaysOverlay = params.get( "alwaysOverlay", False )
    self.runOverlay = self.clip.testOverlay or self.alwaysOverlay
    self.mokkaVersion = params["mokkaVersion"]
    self.mokkaSteeringFile = params.get( "mokkaSteeringFile" )
    self.detectorModel = params.get( "detectorModel" )
    self.marlinVersion = params.get( "marlinVersion" )
    self.marlinSteeringFile = params.get( "marlinSteeringFile" )
    self.ddsimVersion = params.get( "ddsimVersion" )
    self.ddsimDetectorModel = params.get( "ddsimDetectorModel")
    self.ddsimInputFile = params.get( "ddsimInputFile" )
    self.marlinInputdata = params.get ( "marlinInputdata" )
    self.gearFile = params.get( "gearFile" )
    self.lcsimVersion = params.get( "lcsimVersion" )
    self.steeringFileVersion = params.get( "steeringFileVersion", None )
    self.rootVersion = params["rootVersion"]

    self.whizard2Version = params.get( "whizard2Version" )
    self.whizard2SinFile = params.get( "whizard2SinFile" )

    self.energy = params.get("energy")
    self.backgroundType = params.get("backgroundType")
    self.machine = params.get("machine")

    self.gearFile           = params.get( "gearFile" )
    self.marlinSteeringFile = params.get( "marlinSteeringFile" )
    self.marlinVersion      = params.get( "marlinVersion" )

    self.lcsimPreSteeringFile  = params.get( "lcsimPreSteeringFile" )
    self.lcsimPostSteeringFile = params.get( "lcsimPostSteeringFile" )

    self.fccSwPath = params.get( "fccSwPath" )
    self.fccSwSteeringFile = params.get( "fccSwSteeringFile" )

    self.fccAnalysisSteeringFile = params.get( "fccAnalysisSteeringFile" )

    ### other things needed to run tests
    self.log = gLogger.getSubLogger("JobCreater")

    from ILCDIRAC.Interfaces.API.DiracILC                  import DiracILC, __RCSID__ as drcsid
    from ILCDIRAC.Interfaces.API.NewInterface.UserJob      import __RCSID__ as jrcsid
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import __RCSID__ as apprcsid

    if self.clip.submitMode == "local":
      self.log.notice("")
      self.log.notice("       DIRAC RCSID:", drcsid )
      self.log.notice("         Job RCSID:", jrcsid )
      self.log.notice("Applications RCSID:", apprcsid )
      self.log.notice("")

    self.diracInstance = DiracILC(False, 'tests.rep')
    self.jobList = {}
 def initialize(self):
   self.pollingTime = self.am_getOption('PollingTime', 86400)
   gMonitor.registerActivity("Iteration", "Agent Loops", AGENT_NAME, "Loops/min", gMonitor.OP_SUM)
   self.ppc = ProcessProdClient()
   self.dirac = DiracILC()
   self.diracadmin = DiracAdmin()
   self.am_setOption( 'shifterProxy', 'Admin' )
   
   return S_OK()
Exemplo n.º 6
0
def subDDSim():

    # Decide parameters for a job
    outputSE = "KEK-SRM"
    outputSE = "KEK-DISK"

    isLocal = _clip.isLocal
    nbevts = 10 if _clip.numberOfEvents == 0 else _clip.numberOfEvents
    outputFile = "ddsim_example.slcio" if _clip.outputFile == "" else _clip.outputFile
    outputDir = _clip.outputDir
    inputFile = _clip.inputFile
    if inputFile == "":
        gLogger.error("Input file for ddsim does not given.")
        exit(-1)

    # Create DIRAC objects for job submission

    dIlc = DiracILC()

    job = UserJob()
    job.setJobGroup("myddsimjob")
    job.setName("myddsim")
    job.setOutputSandbox(['*.log', '*.sh', '*.py', '*.xml'])
    job.setILDConfig("v02-00-02")

    # job.setInputSandbox(["a6-parameters.sin", "P2f_qqbar.sin"])
    # job.setDestination(["LCG.KEK.jp", "LCG.DESY-HH.de"])  # job submission destination
    job.setBannedSites(["LCG.UKI-SOUTHGRID-RALPP.uk"
                        ])  # a list of sites not to submit job
    # job.setCPUTime( cputime_limit_in_seconds_by_dirac_units )

    ddsim = DDSim()
    ddsim.setVersion("ILCSoft-02-00-02_gcc49")
    ddsim.setDetectorModel("ILD_l5_v02")
    ddsim.setInputFile(inputFile)
    ddsim.setNumberOfEvents(nbevts)
    extraCLIArguments = " --steeringFile ddsim_steer.py "
    extraCLIArguments += " --outputFile %s " % outputFile
    extraCLIArguments += " --vertexSigma 0.0 0.0 0.1968 0.0 --vertexOffset 0.0 0.0 0.0 0.0 "
    ddsim.setExtraCLIArguments(extraCLIArguments)

    # ddsim.setRandomSeed(1234565)
    # ddsim.setStartFrom(20)        # Number of events to skip before starting ddsim

    job.append(ddsim)

    if outputDir != "":
        job.setOutputData([outputFile],
                          OutputPath=outputDir,
                          OutputSE=outputSE)

    if isLocal:
        job.submit(dIlc, mode="local")
    else:
        job.submit(dIlc)
def _createLFNList():
  """create the LFnList"""
  cliparams = _Params()
  cliparams.registerSwitches()
  Script.parseCommandLine( ignoreErrors = False )
  
  repoLocation =  cliparams.repo
  if not repoLocation:
    Script.showHelp()
    dexit(2)
  from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
  dirac = DiracILC(True, repoLocation)
  
  dirac.monitorRepository(False)
  lfns = []
  lfns = dirac.retrieveRepositoryOutputDataLFNs()
  LOG.notice("lfnlist=[")
  for lfn in lfns :
    LOG.notice('"LFN:%s",' % lfn)
  LOG.notice("]")
  dexit(0)
Exemplo n.º 8
0
def _createLFNList():
    """create the LFnList"""
    cliparams = _Params()
    cliparams.registerSwitches()
    Script.parseCommandLine(ignoreErrors=False)
    from DIRAC import gLogger

    repoLocation = cliparams.repo
    if not repoLocation:
        Script.showHelp()
        dexit(2)
    from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
    dirac = DiracILC(True, repoLocation)

    dirac.monitorRepository(False)
    lfns = []
    lfns = dirac.retrieveRepositoryOutputDataLFNs()
    gLogger.notice("lfnlist=[")
    for lfn in lfns:
        gLogger.notice('"LFN:%s",' % lfn)
    gLogger.notice("]")
    dexit(0)
Exemplo n.º 9
0
 def submit(self, diracinstance = None, mode = "wms"):
   """ Submit call: when your job is defined, and all applications are set, you need to call this to
   add the job to DIRAC.
   """
   res = self._addToWorkflow()
   if not res['OK']:
     return res
   self.oktosubmit = True
   if not diracinstance:
     self.diracinstance = DiracILC()
   else:
     self.diracinstance = diracinstance
   return self.diracinstance.submit(self, mode)
Exemplo n.º 10
0
def create_job(inputData, saveName, outputDir, dontPromptMe):

    slcioFile = saveName + '.slcio'
    rootFile = saveName + '.root'

    if check_file_existence(outputDir, slcioFile, dontPromptMe):
        remove_file(outputDir, slcioFile, dontPromptMe)
    if check_file_existence(outputDir, rootFile, dontPromptMe):
        remove_file(outputDir, rootFile, dontPromptMe)

    dIlc = DiracILC()

    job = UserJob()
    job.setOutputSandbox(['*.out', '*.log', '*.sh', '*.py', '*.xml'])
    if SAVE_SLCIO:
        job.setOutputData([slcioFile, rootFile],
                          OutputPath=outputDir,
                          OutputSE=STORAGE_SE)
    else:
        job.setOutputData(rootFile, OutputPath=outputDir, OutputSE=STORAGE_SE)
    job.setJobGroup('myMarlinRun1')
    job.setName('MyMarlinJob1')
    # job.setBannedSites(['LCG.IN2P3-CC.fr','OSG.UConn.us','LCG.Cracow.pl','OSG.MIT.us','LCG.Glasgow.uk','OSG.CIT.us','OSG.BNL.us','LCG.Brunel.uk'])
    job.setInputSandbox(['LFN:/ilc/user/a/amaier/mylibs.tar.gz'])

    marl = Marlin()
    marl.setVersion('ILCSoft-2017-11-15_gcc62')

    marl.setInputFile(inputData)
    job.setInputData(list(map(lambda x: x.lstrip('LFN:'), inputData)))
    # marl.setInputFile(['LFN:/ilc/prod/clic/1.4tev/qq_ln/ILD/DST/00003249/010/qq_ln_dst_3249_10000.slcio'])
    marl.setSteeringFile('marlin/FullChain.xml')
    # marl.setSteeringFile('marlin/FullChainNewDetModel.xml')
    marl.setOutputFile(slcioFile)
    gearFile = '/afs/cern.ch/user/a/amaier/projects/CLIC_analysis/grid/marlin/clic_ild_cdr.gear'
    if not os.path.isfile(gearFile):
        print('Error: gear file', gearFile,
              'does not exist! Abort submission.')
        return
    marl.setGearFile(gearFile)
    marl.setExtraCLIArguments(
        "--MyNtupleMaker.OutputFileName={rootOutfile}".format(
            rootOutfile=rootFile))
    # marl.setNumberOfEvents(1000)

    job.append(marl)
    if dontPromptMe:
        job.dontPromptMe()
    job.submit(dIlc)

    return False
Exemplo n.º 11
0
def testAndProbeSites():
    """submits jobs to test sites"""
    clip = Params()
    clip.registerSwitches()
    Script.parseCommandLine()

    from DIRAC import gLogger, exit as dexit

    from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import CheckWNs
    from ILCDIRAC.Interfaces.API.DiracILC import DiracILC

    from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getQueues

    res = getQueues(siteList=clip.site, ceList=clip.ce)
    if not res['OK']:
        gLogger.error("Failed getting the queues", res['Message'])
        dexit(1)

    sitedict = res['Value']
    CEs = []

    for ces in sitedict.values():
        CEs.extend(ces.keys())

    gLogger.notice("Found %s CEs to look at." % len(CEs))

    d = DiracILC(True, "SiteProbe.rep")

    for CE in CEs:
        j = UserJob()
        j.setDestinationCE(CE)
        c = CheckWNs()
        res = j.append(c)
        if not res['OK']:
            gLogger.error(res['Message'])
            continue
        j.setOutputSandbox("*.log")
        j.setCPUTime(30000)
        j.dontPromptMe()
        res = j.submit(d)
        if not res['OK']:
            gLogger.error("Failed to submit job, aborting")
            dexit(1)

    dexit(0)
Exemplo n.º 12
0
def subWhizard2():

    from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
    from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import Marlin, Whizard2

    # Decide parameters for a job
    outputSE = "KEK-SRM"

    isLocal = _clip.isLocal
    nbevts = 50 if _clip.numberOfEvents == 0 else _clip.numberOfEvents
    outputFile = "E500.P2f_bB.GWhizard2.I100000.e0.p0.n001.slcio" if _clip.outputFile == "" else _clip.outputFile
    outputDir = _clip.outputDir

    # Create DIRAC objects for job submission

    dIlc = DiracILC()

    job = UserJob()
    job.setInputSandbox(["pythia6-parameters.sin", "P2f_qqbar.sin"])
    job.setOutputSandbox(['*.log', '*.sh', '*.py', '*.xml'])
    job.setOutputData([outputFile])
    job.setJobGroup("mywhiz2")
    job.setName("mywhizard2")

    whiz = Whizard2()
    whiz.setVersion("2.7.0")
    whiz.setNumberOfEvents(nbevts)
    whiz.setEvtType("P2f_bB")
    whiz.setProcessVariables("P2f_bB")
    # whiz.setRandomSeed(15)
    whiz.setSinFile("P2f_qqbar.sin")
    whiz.setOutputFile(outputFile)
    job.append(whiz)

    if outputDir != "":
        print " outputDir = " + outputDir
        print " outputSE = " + outputSE
        job.setOutputData([outputFile],
                          OutputPath=outputDir,
                          OutputSE=outputSE)

    if isLocal:
        job.submit(dIlc, mode="local")
    else:
        job.submit(dIlc)
Exemplo n.º 13
0
def subDDSim(clip1):

    # Decide parameters for a job
    outputSE = "KEK-SRM"

    isLocal = clip1.isLocal
    nbevts = 0 if clip1.numberOfEvents == 0 else clip1.numberOfEvents
    #print('inside subddsim(): nbevts ', nbevts)
    outputFile = "" if clip1.outputFile == "" else clip1.outputFile
    #print('inside subddsim outfile ', outputFile)
    outputDir = clip1.outputDir
    #print('inside subddsim outdir ', outputDir)
    inputFile = clip1.inputFile
    #print('inside subddsim inputFile ', inputFile)
    if inputFile == "":
        gLogger.error("Input file for ddsim is not given.")
        exit(-1)

    # Create DIRAC objects for job submission

    dIlc = DiracILC()

    job = UserJob()
    job.setJobGroup("myddsimjob")
    job.setName("myddsim")
    job.setOutputSandbox(['*.log', '*.sh', '*.py', '*.xml'])
    job.setILDConfig("v02-00-02")

    # job.setInputSandbox(["a6-parameters.sin", "P2f_qqbar.sin"])
    # job.setDestination(["LCG.KEK.jp", "LCG.DESY-HH.de"])  # job submission destination
    job.setBannedSites(["LCG.UKI-SOUTHGRID-RALPP.uk"
                        ])  # a list of sites not to submit job
    # job.setCPUTime( cputime_limit_in_seconds_by_dirac_units )

    ddsim = DDSim()
    ddsim.setVersion("ILCSoft-02-00-02_gcc49")
    ddsim.setDetectorModel("ILD_l5_v05")
    ddsim.setInputFile(inputFile)
    ddsim.setNumberOfEvents(nbevts)
    extraCLIArguments = " --steeringFile ddsim_steer_July26.py"
    extraCLIArguments += " --outputFile %s " % outputFile
    extraCLIArguments += " --vertexSigma 0.0 0.0 0.1968 0.0 --vertexOffset 0.0 0.0 0.0 0.0 "
    ddsim.setExtraCLIArguments(extraCLIArguments)

    return ddsim
Exemplo n.º 14
0
  def setUp( self ):
    """set up the objects"""
    ops_mock = Mock()
    mocked_modules = { 'DIRAC.ConfigurationSystem.Client.Helpers.Operations' : ops_mock }
    self.module_patcher = patch.dict( sys.modules, mocked_modules )
    self.module_patcher.start()
    from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
    self.dilc = DiracILC()

    def setOptions(*args):
      if 'SingleReplicaSEs' in args[0]:
        return ['SE']
      if 'Minimum' in args[0]:
        return 1
      if args[0].endswith('PreferredSEs'):
        return ['Awesome-Tape-SE']

    ops_mock = Mock()
    ops_mock.getValue = Mock()
    ops_mock.getValue.side_effect = setOptions
    self.dilc.ops = ops_mock
Exemplo n.º 15
0
    def __init__(self):

        self.dIlc = DiracILC(False)

        self.job = UserJob()
        self.job.setJobGroup("FCC")
        self.job.setName("FCC APP")
        self.job.setOutputSandbox(["*.log", '*.root'])
        #self.job.setDestination('LCG.DESY-HH.de')

        #EOS public location
        self.EOS_MGM_URL = 'root://eospublic.cern.ch'
        #EOS environment
        self.setEOS = 'export EOS_MGM_URL=' + self.EOS_MGM_URL
        self.myclient = client.FileSystem(self.EOS_MGM_URL + ':1094')

        #sandbox
        self.InputSandbox = []
        self.folders_to_upload = []
        self.filtered_extensions = []
        self.excludes_or_includes = []
        self.temp_cwd = os.path.join(os.getcwd(), 'fcc_temp_dirac')
Exemplo n.º 16
0
    ma.setSteeringFile("clic_ild_cdr_steering.xml")
    ma.setGearFile("clic_ild_cdr.gear")
    result = j.append(ma)
    if not result['OK']:
        gLogger.error(result["Message"])
        dexit(1)
    j.setCPUTime(10000)
    j.setOutputSandbox("*.log")
    return j


if __name__ == '__main__':

    lfns = getFiles()  #get a list of files

    d = DiracILC(True, "paramjobtest.rep")  #get your dirac instance

    job = getJob()  #get a job, any can do

    #here is where the interesting stuff happen
    from DIRAC.Core.Utilities.List import breakListIntoChunks
    for flist in breakListIntoChunks(lfns, 200):
        #200 is the number of files per chunk, and the max number of jobs produced in one go

        #This is the magical line
        job.setParametricInputData(flist)

        #The rest of the sumission is the same
        res = job.submit(d)
        if not res["OK"]:
            gLogger.error("Failed to submit the job: ", res["Message"])
Exemplo n.º 17
0
lfns = res['Value']    
print("found %s files" % len(lfns))


ovi = OverlayInput() 
ovi.setEnergy(500.)
ovi.setBXOverlay(300)
ovi.setGGToHadInt(0.3)
ovi.setNbSigEvtsPerJob(10)
ovi.setBkgEvtType("gghad")
ovi.setDetectorModel("CLIC_ILD_CDR")

overlay = [True,False]

for ov in overlay:
  d = DiracILC(True,"repo_overlay_%s.rep"%ov)
  for lfn in lfns:
    j = UserJob()
    steeringf = "clic_ild_cdr_steering.xml"
    if ov:
      steeringf = "clic_ild_cdr_steering_overlay.xml"
      res = j.append(ovi)
      if not res['OK']:
        print(res['Message'])
        continue
    ma = Marlin() 
    ma.setVersion("v0111Prod")
    ma.setGearFile("clic_ild_cdr.gear")
    ma.setSteeringFile(steeringf)
    ma.setInputFile("LFN:"+lfn)
    ma.setNbEvts(10)
Exemplo n.º 18
0
nameDir = nameJobGroup+'_files'+nameTag
nameRepositoryFile = nameJobGroup+nameTag+'_repository.rep'


#####################################################################
 
#####################################################################     
#set enviroment          
import os
import sys

from DIRAC.Core.Base import Script #dirac enviroment                                              
Script.parseCommandLine() #dirac enviroment     

from ILCDIRAC.Interfaces.API.DiracILC import DiracILC #job receiver class   
dirac = DiracILC(True,nameRepositoryFile) #important to retrive jobs        
#####################################################################         

#####################################################################        
#job definition                             
from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
#####################################################################   

#####################################################################        
#filecatalog                             
from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient 
#####################################################################   

#####################################################################    
#ddsim
from ILCDIRAC.Interfaces.API.NewInterface.Applications import DDSim
Exemplo n.º 19
0
    elif opt in ('v', 'version'):
        slicVersion = arg
    elif opt in ('w', 'workflow'):
        workflowName = arg
    elif opt in ('n', 'nocheck'):
        checkMeta = False

if (detectorName == None) or (prodID == None):
    Script.showHelp()
    sys.exit(2)

from ILCDIRAC.Interfaces.API.NewInterface.ProductionJob import ProductionJob
from ILCDIRAC.Interfaces.API.NewInterface.Applications import SLIC, LCSIM, SLICPandora, OverlayInput
from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient
dirac = DiracILC()

meta = {}
meta['ProdID'] = prodID
if eventType:
    meta['EvtType'] = eventType
if energy:
    meta['Energy'] = energy
if dataType:
    meta['Datatype'] = dataType

fc = FileCatalogClient()
res = fc.getCompatibleMetadata(meta)
if not res['OK']:
    print "Error looking up the catalog for metadata"
    exit(2)
Exemplo n.º 20
0
def all_jobs(name):
    d = DiracILC(True, "repo.rep")

    ################################################
    j = UserJob()
    j.setJobGroup("PM1")
    j.setName("Exec1")
    banned_sites = [
        "OSG.BNL.us", "LCG.UKI-NORTHGRID-LIV-HEP.uk", "OSG.UCSDT2.us",
        "LCG.SCOTGRIDDURHAM.uk", "LCG.NIKHEF.nl", "LCG.UKI-SOUTHGRID-RALPP.uk",
        "LCG.GRIF.fr", "LCG.Manchester.uk", "LCG.UKI-LT2-IC-HEP.uk",
        "LCG.Weizmann.il"
    ]

    j.setBannedSites(banned_sites)

    caindir = name
    #print('Cain directory is ',caindir)
    indata = [
        'LFN:/ilc/user/a/amustahid/cain.exe',
        str(caindir), 'LFN:/ilc/user/a/amustahid/runcain.sh',
        'LFN:/ilc/user/a/amustahid/convert_pairs_lcio.py',
        'LFN:/ilc/user/a/amustahid/pyLCIO.tar.gz',
        '/home/belle2/mustahid/useful/my.sh', './splitInput.py',
        './subddsim.py', './ddsim_steer_July26.py', './ILD_l5_v05.xml',
        './my2.sh', './dbd_500GeV.nung_1.xml',
        'LFN:/ilc/user/a/amustahid/myProcessors.tar.gz', './create_dir.py',
        './conf.py', './util.py', './testcain.sh', './beam_250.i'
    ]
    j.setInputSandbox(indata)

    ################################################

    #app = GenericApplication()
    #app.setScript("create_dir.py")
    #app.setInputFile("testcain.sh")
    #logf = 'create_dir.log'
    #app.setLogFile(logf)
    #app.setDebug(debug=True)
    #create_dirname = 'create_dir'
    #app.setName(create_dirname)
    #res=j.append(app)
    #if not res['OK']:
    #  print res['Message']
    #  exit(1)
    ################################################
    appre = GenericApplication()
    name = name.split('/')
    #print(name)
    cain_name = name[-1]
    subdir = name[-2]
    dirname = name[-3]

    #print('Cain file name ', cain_name)
    appre.setScript("LFN:/ilc/user/a/amustahid/runcain.sh")
    #appre.setScript("testcain.sh")
    ifile = cain_name.split('.')
    ifile = ifile[0] + '.' + ifile[1] + '.' + ifile[2]
    #print('ifile ',ifile)

    appre.setArguments(ifile)
    #direc = 'LFN:/ilc/user/a/amustahid/'
    #appre.setInputFile(ifile+".i")
    #appre.setArguments("This is input arguments")
    logf = ifile + '_' + subdir + '.log'
    appre.setLogFile(logf)
    appre.setDebug(debug=True)
    name = 'CAIN'
    appre.setName(name)
    res = j.append(appre)
    if not res['OK']:
        print res['Message']
        exit(1)
    ################################################

    ################################################
    #appost = GenericApplication()
    #appost.setScript("myanal.sh")
    #appost.setArguments("This is my analysis step")
    #res=j.append(appost)
    #if not res['OK']:
    #  print res['Message']
    #  exit(1)

    ap = GenericApplication()
    ap.setScript('my.sh')
    logf = 'my.log'
    ap.setLogFile(logf)
    ap.setDebug(debug=True)
    name = 'my'
    ap.setName(name)
    res = j.append(ap)
    if not res['OK']:
        print res['Message']
        exit(1)

    outfile = 'incoherent_pair.dat'
    appre.setOutputFile(outfile)

    ################################################
    direc = 'incoherent_pair'
    inputFile = direc + '/' + 'inco_pair_split.slcio'

    # global variables to hold command line parameters
    # ######################################
    base = '.'
    #outdir=base+'/'+dirname+'/slcio_test_2ndrun'
    outdir = base + '/' + dirname + '/Run_7'
    #print('outdir'+' '+str(outdir))
    geant_name = ifile
    outputFile = geant_name + '_' + subdir + '.slcio'

    #_clip = _Params(False,1,inputFile,outputFile,outdir)

    nbevents = 100
    clip = _Params(nbevents, inputFile, outputFile, outdir)
    ddsim = subDDSim(clip)
    ################################################

    res = j.append(ddsim)
    if not res['OK']:
        print res['Message']
        exit(1)

    j.setOutputData(outputFile, outdir, "KEK-SRM")
    j.setOutputSandbox(["*.log", "*.dat", "*.slcio"])
    j.dontPromptMe()
    res = j.submit(d)
    #res = j.submit(d, mode='local')
    if res['OK']:
        print str(res["Value"])
        #print "Dirac job, "+str(res["Value"])+", was submitted."
    else:
        print "Failed to submit Dirac job. return message was as follows."
        pprint.pprint(res)
Exemplo n.º 21
0
from DIRAC.Core.Base import Script

Script.parseCommandLine()

from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
from ILCDIRAC.Interfaces.API.NewInterface.Applications import Whizard, Mokka, Marlin, OverlayInput
from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob

from DIRAC import exit as dexit

dirac = DiracILC()

# wh.setOutputFile("myfile.stdhep")

j = UserJob()

wh = Whizard(processlist=dirac.getProcessList())
wh.setEnergy(3000)
wh.setEvtType("ee_h_mumu")
wh.setNbEvts(1)
wh.setEnergy(3000)
params = {}
params["USERB1"] = "F"
wh.setParameterDict(params)
wh.setModel("sm")
res = j.append(wh)
if not res["OK"]:
    print res["Message"]
    dexit(1)

Exemplo n.º 22
0
Created on Feb 8, 2012

:author: Stephane Poss
'''
__RCSID__ = "$Id$"
#pylint: skip-file
#pylint: disable=C0103
from DIRAC.Core.Base import Script
Script.parseCommandLine()

from ILCDIRAC.Interfaces.API.NewInterface.ProductionJob import ProductionJob
from ILCDIRAC.Interfaces.API.NewInterface.Applications import Whizard, Mokka, Marlin, OverlayInput, StdhepCut, StdhepCutJava
from ILCDIRAC.Interfaces.API.NewInterface.Applications import SLIC, LCSIM, SLICPandora, SLCIOSplit, StdHepSplit
from ILCDIRAC.Interfaces.API.DiracILC import DiracILC

dirac = DiracILC()

###As it's a full chain, we start at generation
##so we need to define the process and the energy
## The rest will be set later. We could also set the process 
##and the energy directly in the whizard def, but for clarity
## it's better to do it before, that way we know the very 
##essential


def getdicts(process):
  """ Create the proper structures to build all the prodcutions for the samples with ee_, ea_ aa_.
  """
  plist = []
  if process.count("ee_"):
    plist.append({'process':process,'pname1':'e1', 'pname2':'E1', "epa_b1":'F', "epa_b2":'F'})
Exemplo n.º 23
0
#every single script has these four lines

from DIRAC.Core.Base import Script
Script.parseCommandLine()

#create ilcdirac instance
from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
dirac = DiracILC(True, "some_job_repository.rep")

#job definition
from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
#job = UserJob()
from ILCDIRAC.Interfaces.API.NewInterface.Applications import SLIC
from ILCDIRAC.Interfaces.API.NewInterface.Applications import LCSIM

#jobParams = [('slicTest8_mu+_theta90.mac','diracTest8_mu+_theta90.slcio',50),('slicTest7_mu+_theta_5-175.mac','diracTest_mu+_theta_5-175.slcio',50),('slicTest3_e+.mac','diracTest3_e+.slcio',10),('slicTest2_pi+.mac','diractTest2_pi+.slcio',10)]
#jobParams = [('slicTest10_mu+_100gev_theta70_testNewGeom.mac','diracTest10_mu+_100gev_theta70_testNewGeom.slcio',10),('slicTest10_mu+_100gev_theta90_testNewGeom.mac','diracTest10_mu+_100gev_theta90_testNewGeom.slcio',10)]
jobParams = [
    ('/users/detector/ssetru/SiDSim/detectors/detector_vtx_matbudghalf_nonsensitivelayer/slicmacros/slicTest8_mu+_100gev_theta60.mac',
     'diracTest_100gev_theta60_vtx_matbudghalf_nonsensitivelayer.slcio', 100),
    ('/users/detector/ssetru/SiDSim/detectors/detector_vtx_matbudghalf_nonsensitivelayer/slicmacros/slicTest8_mu+_10gev_theta60.mac',
     'diracTest_10gev_theta60_vtx_matbudghalf_nonsensitivelayer.slcio', 100)
]
#slicMacros = ['slicTest8_mu+_theta90.mac','slicTest7_mu+_theta_5-175.mac','slicTest3_e+.mac','slicTest2_pi+.mac']
#fileOutputs = ['diracTest2Loop1.slcio','diracTest2Loop2.slcio','diracTest2Loop3.slcio','diractTest2Loop4.slcio']
#slicNumEvents = [100,100,10,10]

for macro, output, nEvts in jobParams:
    job = UserJob()
    job.setName("ssetru_dirac_test1")
    job.setJobGroup("tests")
Exemplo n.º 24
0
def main():
    # Take the input arguments from the argument parser, and check they exist...
    args = parse_args()
    if not args:
        print 'Invalid Arguments'
        sys.exit(1)

#### Software Versions ####
    softVersions = ["v3r0p3", "HEAD", "ILC_DBD",
                    "0116"]  # Working (recommended)
    # softVersions = ["v3r0p3", "2.5", "ILC_DBD", "0116"] # Working
    # softVersions = ["v3r0p3", "HEAD", "ILC_DBD", "ILCSoft-01-17-07"] # Working

    # Check the --runs and --split arguments to make sure they are compatible, if not exit...
    if not check_events_arguments(args.events, args.split):
        sys.exit(1)

    # Check the input LFN given by user, it needs to have .stdhep extension and should not have LFN: at the beginning...
    lfn_check, lfn = check_input_LFN(args.stdhepInput)
    if not lfn_check:
        sys.exit(1)

    # Call when you begin ILC-DIRAC jobs, the true indicates a repository file is included...
    dirac = DiracILC(True,
                     setup_repository_name(args.stdhepInput, args.detector))

    # Prepares the input and output sandboxes, if -f, then adds the files required for flavortagging,
    # into the input sandbox
    inputSandbox, outputSandbox = setup_sandboxes(args.macFile, args.flavortag)

    # Prepares values for the job loop...
    if args.split < 0:
        nInputEvents = int(args.events)
        nOutputEvents = int(args.events)
    if args.split > 0:
        nInputEvents = int(args.events)
        nOutputEvents = int(args.split)

    # Loop that runs through the required number of jobs to be executed...
    for startEvent in range(0, nInputEvents, nOutputEvents):

        ################## Job Initialise ########################################
        job = UserJob()
        job.setName(path.basename(args.stdhepInput))
        job.setJobGroup('JobGroup')
        job.setInputSandbox(inputSandbox)
        fileNumber = startEvent / nOutputEvents
        print "Job ", fileNumber

        outputFiles = setup_output_dict(args.stdhepInput, args.detector,
                                        fileNumber, args.outputPath,
                                        softVersions)
        slicOutput = outputFiles['slicOutput']
        prePandoraOutput = outputFiles['prePandoraOutput']
        pandoraOutput = outputFiles['pandoraOutput']
        vertexingOutput = outputFiles['vertexingOutput']
        lcsimRecOutput = outputFiles['lcsimRecOutput']
        lcsimDstOutput = outputFiles['lcsimDstOutput']
        flavortagOutput = outputFiles['flavortagOutput']
        diracOutput = outputFiles['diracOutput']

        ################## SLIC ##################################################
        slic = SLIC()
        slic.setVersion(softVersions[0])
        slic.setSteeringFile(args.macFile)
        # slic.setInputFile(lfn)
        slic.setOutputFile(slicOutput)
        slic.setDetectorModel(args.detector)
        slic.setNumberOfEvents(nOutputEvents)
        slic.setStartFrom(startEvent)
        #print slic.listAttributes()
        result = job.append(slic)
        if not result['OK']:
            print result['Message']
            sys.exit(2)

################## lcsim (digitization and tracking) #####################
        lcsim = LCSIM()
        lcsim.setVersion(softVersions[1])
        lcsim.setSteeringFile(
            'steeringFiles/sid_dbd_prePandora_noOverlay_v22.xml'
        )  # Another version is included in /steeringFiles
        lcsim.getInputFromApp(slic)
        lcsim.setTrackingStrategy(
            'steeringFiles/sidloi3_trackingStrategies_default.xml')
        # lcsim.setAliasProperties('alias.properties')
        lcsim.setDetectorModel('geometryFiles/sidloi3.zip')
        lcsim.setOutputFile(prePandoraOutput)
        lcsim.setNumberOfEvents(nOutputEvents)
        #print lcsim.listAttributes()
        result = job.append(lcsim)
        if not result['OK']:
            print result['Message']
            sys.exit(2)

################## slicPandora ###########################################
        slicPandora = SLICPandora()
        slicPandora.setVersion(softVersions[2])
        slicPandora.setDetectorModel(args.detector)
        slicPandora.getInputFromApp(lcsim)
        slicPandora.setOutputFile(pandoraOutput)
        slicPandora.setPandoraSettings('pandoraSettings.xml')
        slicPandora.setNumberOfEvents(nOutputEvents)
        #print slicPandora.listAttributes()
        result = job.append(slicPandora)
        if not result['OK']:
            print result['Message']
            sys.exit(2)

################## Marlin, LCFIPlus Vertexing ############################
        vertexing = Marlin()
        vertexing.setVersion(softVersions[3])
        vertexing.setSteeringFile('steeringFiles/sid_dbd_vertexing.xml')
        vertexing.setGearFile('steeringFiles/sidloi3.gear')
        vertexing.getInputFromApp(slicPandora)
        vertexing.setOutputFile(vertexingOutput)
        vertexing.setNumberOfEvents(nOutputEvents)
        #print vertexing.listAttributes()
        result = job.append(vertexing)
        if not result['OK']:
            print result['Message']
            sys.exit(2)

################## lcsim (DST production) ################################
        lcsimDst = LCSIM()
        lcsimDst.setVersion(softVersions[1])
        lcsimDst.setSteeringFile('steeringFiles/sid_dbd_postPandora.xml')
        lcsimDst.getInputFromApp(vertexing)
        lcsimDst.setNumberOfEvents(nOutputEvents)
        # lcsimDst.setAliasProperties('alias.properties')
        lcsimDst.setDetectorModel('geometryFiles/sidloi3.zip')
        lcsimDst.setOutputRecFile(lcsimRecOutput)
        lcsimDst.setOutputDstFile(lcsimDstOutput)
        #print lcsimDst.listAttributes()
        result = job.append(lcsimDst)
        if not result['OK']:
            print result['Message']
            sys.exit(2)

################## Marlin, LCFIPlus flavortag ############################
        if args.flavortag:
            flavortag = Marlin()
            flavortag.setVersion(softVersions[3])
            flavortag.setSteeringFile('steeringFiles/sid_dbd_flavortag.xml')
            flavortag.setGearFile('steeringFiles/sidloi3.gear')
            flavortag.setInputFile(lcsimDstOutput)
            flavortag.setOutputFile(flavortagOutput)
            flavortag.setNumberOfEvents(nOutputEvents)
            #print flavortag.listAttributes()
            result = job.append(flavortag)
            if not result['OK']:
                print result['Message']
                sys.exit(2)


################## Job Finalise ##########################################

# List of banned sites that the job shall not be sent too. These are sites that jobs tend to fail on,
# This list is likely to change.
        job.setBannedSites([
            'LCG.IN2P3-CC.fr',
            'LCG.RAL-LCG2.uk',
            'LCG.DESY-HH.de',
            'LCG.DESYZN.de',
            'LCG.KEK.jp',
            'OSG.PNNL.us',
        ])

        job.setCPUTime(50000)
        job.setPlatform('x86_64-slc5-gcc43-opt')

        # Sets the output data file according to if -f is selcted, ships ouput to your /ilc/user/a/aPerson/
        # directory on the grid.
        if args.flavortag:
            job.setOutputData(flavortagOutput, diracOutput, args.SE)

        else:
            job.setOutputData(lcsimDstOutput, diracOutput, args.SE)

        job.setOutputSandbox(outputSandbox)
        job.setInputData(lfn)

        if args.dontPromptMe:
            job.dontPromptMe()
        # Submits Job!!!
        job.submit()

    return 0
class SoftwareManagementAgent( AgentModule ):
  """ Agent to run software management things
  """
  def initialize(self):
    self.pollingTime = self.am_getOption('PollingTime', 86400)
    gMonitor.registerActivity("Iteration", "Agent Loops", AGENT_NAME, "Loops/min", gMonitor.OP_SUM)
    self.ppc = ProcessProdClient()
    self.dirac = DiracILC()
    self.diracadmin = DiracAdmin()
    self.am_setOption( 'shifterProxy', 'Admin' )
    
    return S_OK()

  ##############################################################################
  def execute(self):
    """  First we update the site list and banned site
    """
    res = getProxyInfo(False, False)
    if not res['OK']:
      self.log.error("submitTasks: Failed to determine credentials for submission", res['Message'])
      return res
    proxyInfo = res['Value']
    owner = proxyInfo['username']
    ownerGroup = proxyInfo['group']
    self.log.info("submitTasks: Jobs will be submitted with the credentials %s:%s" % (owner, ownerGroup))    
    
    sites = self.diracadmin.getSiteMask()['Value']
    for site in sites:
      res = self.ppc.changeSiteStatus( {'SiteName' : site, 'Status' : 'OK'} )
      if not res['OK']:
        self.log.error('Cannot add or update site %s' % site)
        
    banned_sites = self.diracadmin.getBannedSites()['Value']
    for banned_site in banned_sites:
      self.ppc.changeSiteStatus( {'SiteName' : banned_site, 'Status' : 'Banned'} )
      if not res['OK']:
        self.log.error('Cannot mark as banned site %s' % banned_site)
        
    ##Then we need to get new installation tasks
    res = self.ppc.getInstallSoftwareTask()
    if not res['OK']:
      self.log.error('Failed to obtain task')
    task_dict = res['Value']
    for softdict in task_dict.values():
      self.log.info('Will install %s %s at %s' % (softdict['AppName'], softdict['AppVersion'], softdict['Sites']))
      for site in softdict['Sites']:
        j = UserJob()
        j.setPlatform(softdict['Platform'])
        j.dontPromptMe()
        j.setDestination(site)
        j.setJobGroup("Installation")
        j.setName('install_%s' % site)
        j._addSoftware(softdict['AppName'], softdict['AppVersion'])
        #Add the application here somehow.
        res  = j.append(SoftwareInstall())
        if not res['OK']:
          self.log.error(res['Message'])
          continue
        res = j.submit(self.dirac)
        #res = self.dirac.submit(j)
        if not res['OK']:
          self.log.error('Could not create the job')
          continue
        jobdict = {}
        jobdict['AppName'] = softdict['AppName']
        jobdict['AppVersion'] = softdict['AppVersion']
        jobdict['Platform'] = softdict['Platform']
        jobdict['JobID'] = res['Value']
        jobdict['Status'] = 'Waiting'
        jobdict['Site'] = site
        res = self.ppc.addOrUpdateJob(jobdict)
        if not res['OK']:
          self.log.error('Could not add job %s: %s' % (jobdict['JobID'], res['Message']))
    
    ##Monitor jobs
    jobs = {}
    res = self.ppc.getJobs()
    if not res['OK']:
      self.log.error('Could not retrieve jobs')
    else:
      jobs = res['Value']
      for job in jobs:
        res = self.dirac.status(job['JobID'])
        if res['OK']:
          jobstatuses = res['Value'] 
          job['Status'] = jobstatuses['JobID']['Status']
          res = self.ppc.addOrUpdateJob(job)
          if not res['OK']:
            self.log.error("Failed to updated job %s: %s" % (job['JobID'], res['Message']))
        else:
          self.log.error("Failed to update job %s status" % job['JobID'])
          
    return S_OK()
Exemplo n.º 26
0
base = open(steeringTemplateFile,'r')
steeringTemplateContent = base.read()
base.close()

for eventSelection in eventsToSimulate:
    eventType = eventSelection['EventType']
    detectorModel = eventSelection['DetectorModel']
    reconstructionVariant = eventSelection['ReconstructionVariant']
    energy = eventSelection['Energy']
    analysisTag = eventSelection['AnalysisTag']

    # Make local gear file
    os.system('cp ' + gearFile + ' .')
    gearFileLocal = os.path.basename(gearFile)

    diracInstance = DiracILC(withRepo=False)
    
    slcioFormat = 'DetModel_' + detectorModel + '_RecoVar_' + reconstructionVariant + '_' + eventType + '_' + str(energy) + 'GeV_GenNumber_(.*?)_(.*?)_(.*?)_DST.slcio'

    slcioFilesToProcess = getDstSlcioFiles(jobDescription,detectorModel,reconstructionVariant,energy,eventType)

    if not slcioFilesToProcess:
        print 'No slcio files found.  Exiting job submission.'
        sys.exit()

    for slcioFile in slcioFilesToProcess:
        print 'Checking ' + eventType + ' ' + str(energy) + 'GeV jobs.  Detector model ' + detectorModel + '.  Reconstruction stage ' + reconstructionVariant + '.  Slcio file ' + slcioFile + '.'
        slcioFileNoPath = os.path.basename(slcioFile)

        inputSandbox = ['LFN:/ilc/user/s/sgreen/SelectionProcessorTarBall/MarlinSelectionProcessor.tar.gz']
Exemplo n.º 27
0
lfns = res['Value']
print "found %s files" % len(lfns)

ovi = OverlayInput()
ovi.setEnergy(500.)
ovi.setBXOverlay(300)
ovi.setGGToHadInt(0.3)
ovi.setNbSigEvtsPerJob(10)
ovi.setBkgEvtType("gghad")
ovi.setDetectorModel("CLIC_ILD_CDR")

overlay = [True, False]

for ov in overlay:
    d = DiracILC(True, "repo_overlay_%s.rep" % ov)
    for lfn in lfns:
        j = UserJob()
        steeringf = "clic_ild_cdr_steering.xml"
        if ov:
            steeringf = "clic_ild_cdr_steering_overlay.xml"
            res = j.append(ovi)
            if not res['OK']:
                print res['Message']
                continue
        ma = Marlin()
        ma.setVersion("v0111Prod")
        ma.setGearFile("clic_ild_cdr.gear")
        ma.setSteeringFile(steeringf)
        ma.setInputFile("LFN:" + lfn)
        ma.setNbEvts(10)
Exemplo n.º 28
0
from DIRAC.Core.Base import Script
Script.parseCommandLine()
from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
from ILCDIRAC.Interfaces.API.NewInterface.UserJob import *
from ILCDIRAC.Interfaces.API.NewInterface.Applications import *

dirac = DiracILC(True, "CLICdet_repo.cfg")
job = UserJob()
job.setName("example")
job.setInputData([
    "/ilc/user/p/proloff/stdhep/2f_samples_23_04_2013/whizard_SM_bb_500_90deg.stdhep"
])
job.setCLICConfig("ILCSoft-2017-12-21")
job.setOutputSandbox(["*.root", "*.slcio", "*.log"])

ddsim = DDSim()
ddsim.setSteeringFile("CLICPerformance/examples/clic_steer.py")
ddsim.setVersion("ILCSoft-2017-12-21_gcc62")
ddsim.setDetectorModel("CLIC_o3_v14")
ddsim.setInputFile(
    "/ilc/user/p/proloff/stdhep/2f_samples_23_04_2013/whizard_SM_bb_500_90deg.stdhep"
)
ddsim.setOutputFile("ddsim_output.slcio")
ddsim.setStartFrom(0)
ddsim.setNumberOfEvents(10)
res1 = job.append(ddsim)
if not res1['OK']:
    print res1['Message']
    sys.exit(2)

# -------------------- Comment if gg_had overlay not wanted -----------------------------------------------------#
Exemplo n.º 29
0
class JobCreater(object):
    """contains all the versions and parameters to create all theses tests"""

    # pylint: disable=too-many-instance-attributes
    # Test parameters, necessary due to amount of tests in this class.
    def __init__(self, clip, params):
        self.clip = clip
        self.ildConfig = params.get("ildConfig", None)
        self.alwaysOverlay = params.get("alwaysOverlay", False)
        self.runOverlay = self.clip.testOverlay or self.alwaysOverlay
        self.mokkaVersion = params["mokkaVersion"]
        self.mokkaSteeringFile = params.get("mokkaSteeringFile")
        self.detectorModel = params.get("detectorModel")
        self.marlinVersion = params.get("marlinVersion")
        self.marlinSteeringFile = params.get("marlinSteeringFile")
        self.ddsimVersion = params.get("ddsimVersion")
        self.ddsimDetectorModel = params.get("ddsimDetectorModel")
        self.ddsimInputFile = params.get("ddsimInputFile")
        self.marlinInputdata = params.get("marlinInputdata")
        self.gearFile = params.get("gearFile")
        self.lcsimVersion = params.get("lcsimVersion")
        self.steeringFileVersion = params.get("steeringFileVersion", None)
        self.rootVersion = params["rootVersion"]

        self.whizard2Version = params.get("whizard2Version")
        self.whizard2SinFile = params.get("whizard2SinFile")

        self.energy = params.get("energy")
        self.backgroundType = params.get("backgroundType")
        self.machine = params.get("machine")

        self.gearFile = params.get("gearFile")
        self.marlinSteeringFile = params.get("marlinSteeringFile")
        self.marlinVersion = params.get("marlinVersion")

        self.lcsimPreSteeringFile = params.get("lcsimPreSteeringFile")
        self.lcsimPostSteeringFile = params.get("lcsimPostSteeringFile")

        self.fccSwPath = params.get("fccSwPath")
        self.fccSwSteeringFile = params.get("fccSwSteeringFile")

        self.fccAnalysisSteeringFile = params.get("fccAnalysisSteeringFile")

        ### other things needed to run tests
        self.log = gLogger.getSubLogger("JobCreater")

        from ILCDIRAC.Interfaces.API.DiracILC import DiracILC, __RCSID__ as drcsid
        from ILCDIRAC.Interfaces.API.NewInterface.UserJob import __RCSID__ as jrcsid
        from ILCDIRAC.Interfaces.API.NewInterface.Applications import __RCSID__ as apprcsid

        if self.clip.submitMode == "local":
            self.log.notice("")
            self.log.notice("       DIRAC RCSID:", drcsid)
            self.log.notice("         Job RCSID:", jrcsid)
            self.log.notice("Applications RCSID:", apprcsid)
            self.log.notice("")

        self.diracInstance = DiracILC(False, 'tests.rep')
        self.jobList = {}

    def createDDSimTest(self, inputfile=None, detectorModel=None):
        """Create a job running ddsim"""
        if inputfile is None:
            inputfile = self.ddsimInputFile
        sandbox = [inputfile]
        if detectorModel is None:
            detectorModel = self.ddsimDetectorModel
        else:
            sandbox.append(detectorModel)
        jobdd = self.getJob()
        from ILCDIRAC.Interfaces.API.NewInterface.Applications.DDSim import DDSim
        ddsim = DDSim()
        ddsim.setVersion(self.ddsimVersion)
        ddsim.setDetectorModel(detectorModel)
        ddsim.setNumberOfEvents(1)
        ddsim.setInputFile(inputfile)
        jobdd.setInputSandbox(sandbox)
        res = jobdd.append(ddsim)
        if not res['OK']:
            self.log.error("Failed adding DDSim:", res['Message'])
            return S_ERROR("Failed adding DDSim to Job")

        return S_OK(jobdd)

    def createWhizard2Test(self):
        """Create a job running Whizard2"""
        jobdd = self.getJob()
        from ILCDIRAC.Interfaces.API.NewInterface.Applications.Whizard2 import Whizard2
        whiz = Whizard2()
        whiz.setVersion(self.whizard2Version)
        whiz.setNumberOfEvents(1)
        whiz.setSinFile(self.whizard2SinFile)
        whiz.setOutputFile("test.stdhep")
        res = jobdd.append(whiz)
        if not res['OK']:
            self.log.error("Failed adding Whizard2:", res['Message'])
            return S_ERROR("Failed adding Whizard2 to Job")
        return S_OK(jobdd)

    def createMokkaTest(self):
        """create a job running mokka, and maybe whizard before"""
        self.log.notice("Creating jobs for Mokka")
        #(Whizard + )Mokka
        jobmo = self.getJob()
        if self.clip.testChain:
            whmo = self.getWhizard(2)
            res = jobmo.append(whmo)
            if not res['OK']:
                self.log.error("Failed adding Whizard:", res['Message'])
                return S_ERROR("Failed adding Whizard")
        elif self.clip.testInputData:
            jobmo.setInputData(
                "/ilc/prod/clic/SingleParticles/Muon/50GeV/Muon_50GeV_Fixed_cosTheta0.7.stdhep"
            )
        else:
            self.log.error("Mokka does not know where to get its input from")
            return S_ERROR("Mokka does not know where to gets its input from")

        mo = self.getMokka()

        if self.clip.testChain:
            mo.getInputFromApp(whmo)
        else:
            mo.setNumberOfEvents(1)
        res = jobmo.append(mo)
        if not res['OK']:
            self.log.error("Failed adding Mokka:", res['Message'])
            return S_ERROR("Failed adding Mokka to Job")
        jobmo.setOutputData("testsim.slcio", OutputSE="CERN-DIP-4")
        self.jobList['Mokka1'] = jobmo
        return S_OK(jobmo)

    def createRootScriptTest(self):
        """create a job running root"""
        self.log.notice("Creating jobs for Root")
        jobRoot = self.getJob()
        jobRoot.setInputSandbox(["root.sh", "input.root", "input2.root"])
        root = self.getRoot()
        res = jobRoot.append(root)
        if not res['OK']:
            self.log.error("Failed adding Root:", res['Message'])
            return S_ERROR("Failed adding Root to Job")
        self.jobList['Root'] = jobRoot
        return S_OK(jobRoot)

    def createRootHaddTest(self):
        """create a job running root"""
        self.log.notice("Creating jobs for Root")
        jobRoot = self.getJob()
        jobRoot.setInputSandbox(["input.root", "input2.root"])
        root = self.getRoot()
        root.setScript("hadd")
        res = jobRoot.append(root)
        if not res['OK']:
            self.log.error("Failed adding Root:", res['Message'])
            return S_ERROR("Failed adding Root to Job")
        self.jobList['Root'] = jobRoot
        return S_OK(jobRoot)

    def createRootMacroTest(self):
        """create a job running root"""
        self.log.notice("Creating jobs for Root")
        jobRoot = self.getJob()
        jobRoot.setInputSandbox(["func.C", "input.root", "input2.root"])
        root = self.getRootMacro()
        root.setScript("func.C")
        res = jobRoot.append(root)
        if not res['OK']:
            self.log.error("Failed adding Root:", res['Message'])
            return S_ERROR("Failed adding Root to Job")
        self.jobList['Root'] = jobRoot
        return S_OK(jobRoot)

    def getOverlay(self, nbevts):
        """ Create an overlay step
    """
        pathToFiles = None
        from ILCDIRAC.Interfaces.API.NewInterface.Applications import OverlayInput
        overlay = OverlayInput()
        if self.energy == 350:
            if self.detectorModel == "ILD_o1_v05":
                pathToFiles = "/ilc/user/s/sailer/testFiles/overlay/ild_350/"
        if pathToFiles:
            overlay.setPathToFiles(pathToFiles)
        else:
            self.log.warn(
                "better define pathToFiles for this overlay: %s, %s, %s" %
                (self.energy, self.machine, self.backgroundType))
            overlay.setMachine(self.machine)
            overlay.setEnergy(self.energy)
            overlay.setDetectorModel(self.detectorModel)

        overlay.setBkgEvtType(self.backgroundType)
        overlay.setBXOverlay(60)
        overlay.setGGToHadInt(0.3)
        overlay.setNumberOfSignalEventsPerJob(nbevts)

        return overlay

    def getMokka(self):
        """ Define a mokka app
    """
        from ILCDIRAC.Interfaces.API.NewInterface.Applications import Mokka
        mokka = Mokka()
        mokka.setVersion(self.mokkaVersion)
        mokka.setSteeringFile(self.mokkaSteeringFile)
        mokka.setOutputFile("testsim.slcio")
        mokka.setDetectorModel(self.detectorModel)
        if self.steeringFileVersion:
            mokka.setSteeringFileVersion(self.steeringFileVersion)
        return mokka

    def getRoot(self):
        """ Define a root app
    """
        from ILCDIRAC.Interfaces.API.NewInterface.Applications import RootScript
        root = RootScript()
        root.setScript("root.sh")
        root.setArguments("output.root input.root input2.root")
        root.setVersion(self.rootVersion)
        root.setOutputFile("output.root")
        return root

    def getRootMacro(self):
        """ Define a root app
    """
        from ILCDIRAC.Interfaces.API.NewInterface.Applications import RootMacro
        root = RootMacro()
        root.setMacro("func.C")
        root.setArguments(r"\"input.root\"")
        root.setVersion(self.rootVersion)
        return root

    @staticmethod
    def getSLIC():
        """ Get a SLIC instance
    """
        from ILCDIRAC.Interfaces.API.NewInterface.Applications import SLIC
        slic = SLIC()
        slic.setVersion('v2r9p8')
        slic.setSteeringFile('defaultClicCrossingAngle.mac')
        slic.setDetectorModel('clic_sid_cdr')
        slic.setOutputFile('testsim.slcio')
        return slic

    @staticmethod
    def getSLICPandora():
        """ Get some SLICPAndora app
    """
        from ILCDIRAC.Interfaces.API.NewInterface.Applications import SLICPandora
        slicp = SLICPandora()
        slicp.setVersion('CLIC_CDR')
        slicp.setDetectorModel('clic_sid_cdr')
        slicp.setPandoraSettings("PandoraSettingsSlic.xml")
        slicp.setOutputFile('testpandora.slcio')
        return slicp

    def getMarlin(self):
        """ Define a marlin step
    """
        from ILCDIRAC.Interfaces.API.NewInterface.Applications import Marlin
        marlin = Marlin()
        #  marlin.setVersion("v0111Prod")
        marlin.setVersion(self.marlinVersion)
        marlin.setSteeringFile(self.marlinSteeringFile)
        marlin.setGearFile(self.gearFile)
        marlin.setOutputDstFile("testmarlinDST.slcio")
        marlin.setOutputRecFile("testmarlinREC.slcio")
        marlin.setNumberOfEvents(1)
        return marlin

    def getDD(self):
        """ Create a DDSim object
    """
        from ILCDIRAC.Interfaces.API.NewInterface.Applications.DDSim import DDSim
        ddsim = DDSim()
        ddsim.setVersion(self.ddsimVersion)
        ddsim.setDetectorModel(self.ddsimDetectorModel)
        ddsim.setInputFile(self.ddsimInputFile)
        ddsim.setNumberOfEvents(2)
        return ddsim

    def getLCSIM(self, prepandora=True):
        """ Get some LCSIM
    """
        from ILCDIRAC.Interfaces.API.NewInterface.Applications import LCSIM
        lcsim = LCSIM()
        lcsim.setVersion('CLIC_CDR')
        lcsim.setDetectorModel('clic_sid_cdr.zip')
        if prepandora:
            lcsim.setSteeringFile(self.lcsimPreSteeringFile)
            lcsim.setOutputFile("testlcsim.slcio")
        else:
            lcsim.setSteeringFile(self.lcsimPostSteeringFile)
            #lcsim.setOutputFile("testlcsimfinal.slcio")
            lcsim.setOutputDstFile("testlcsimDST.slcio")
            lcsim.setOutputRecFile("testlcsimREC.slcio")
        lcsim.setTrackingStrategy("defaultStrategies_clic_sid_cdr.xml")
        return lcsim

    def getFccSw(self):
        """ Define a fccsw step
    """
        from ILCDIRAC.Interfaces.API.NewInterface.Applications import FccSw
        fccsw = FccSw()
        fccsw.fccSwPath = self.fccSwPath
        fccsw.setSteeringFile(self.fccSwSteeringFile)
        return fccsw

    def getFccAnalysis(self):
        """ Define a fccanalysis step
    """
        from ILCDIRAC.Interfaces.API.NewInterface.Applications import FccAnalysis
        fccanalysis = FccAnalysis()
        fccanalysis.setSteeringFile(self.fccAnalysisSteeringFile)
        return fccanalysis

    @staticmethod
    def getStdhepcut(generated):
        """ Get some cuts in
    """
        from ILCDIRAC.Interfaces.API.NewInterface.Applications import StdhepCutJava
        stdhepcut = StdhepCutJava()
        stdhepcut.setVersion('1.0')
        stdhepcut.setSelectionEfficiency(1.)
        #stdhepcut.setSteeringFile("cuts_testing_1400.txt")
        stdhepcut.setInlineCuts("leptonInvMass_R 13 100 200")
        stdhepcut.setSteeringFileVersion("V18")
        stdhepcut.setMaxNbEvts(1)
        stdhepcut.setNumberOfEvents(generated)
        return stdhepcut

    @staticmethod
    def getStdhepSplit():
        """ Get some stdhep split
    """
        from ILCDIRAC.Interfaces.API.NewInterface.Applications import StdHepSplit
        stdhepsplit = StdHepSplit()
        stdhepsplit.setVersion("V2")
        stdhepsplit.setNumberOfEventsPerFile(5)
        stdhepsplit.setOutputFile("teststdhepsplit.stdhep")
        stdhepsplit.setMaxRead(10)
        return stdhepsplit

    @staticmethod
    def getLCIOSplit(events_per_file):
        """ Get a LCIO split
    """
        from ILCDIRAC.Interfaces.API.NewInterface.Applications import SLCIOSplit
        lciosplit = SLCIOSplit()
        lciosplit.setNumberOfEventsPerFile(events_per_file)
        lciosplit.setOutputFile("testlciosplit.slcio")
        return lciosplit

    @staticmethod
    def getLCIOConcat():
        """ Get a LCIO Concat
    """
        from ILCDIRAC.Interfaces.API.NewInterface.Applications import SLCIOConcatenate
        lcioconcat = SLCIOConcatenate()
        lcioconcat.setOutputFile("testlcioconcat.slcio")
        return lcioconcat

    def getJob(self):
        """ Define a generic job, it should be always the same
    """
        from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
        myjob = UserJob()
        myjob.setName("Testing")
        myjob.setJobGroup("Tests")
        myjob.setCPUTime(30000)
        myjob.dontPromptMe()
        myjob.setLogLevel("VERBOSE")
        myjob.setPlatform("x86_64-slc5-gcc43-opt")
        myjob.setOutputSandbox(["*.log", "*.xml", "*.sh"])
        myjob._addParameter(myjob.workflow, 'TestFailover', 'String', True,
                            'Test failoverRequest')
        myjob._addParameter(myjob.workflow, 'Platform', 'JDL',
                            "x86_64-slc5-gcc43-opt", 'OS Platform')
        if self.ildConfig:
            myjob.setILDConfig(self.ildConfig)
        return myjob

    def getWhizardModel(self, nbevts, energy, model):
        """ Create a default whizard
    """
        from ILCDIRAC.Interfaces.API.NewInterface.Applications import Whizard
        proddict = "e2e2_o"
        if model != "sm":
            proddict = "se2se2_r"
        whiz = Whizard(processlist=self.diracInstance.getProcessList())
        whiz.setModel(model)
        pdict = {}
        pdict['process_input'] = {}
        pdict['process_input']['process_id'] = proddict
        pdict['process_input']['sqrts'] = energy
        if model != 'sm':
            pdict['process_input']['input_file'] = "LesHouches.msugra_1"
            pdict['process_input']['input_slha_format'] = 'T'

        pdict['process_input']['beam_recoil'] = 'T'

        pdict['integration_input'] = {}
        pdict['integration_input']['calls'] = '1  50000 10  5000  1  15000'
        pdict['simulation_input'] = {}
        pdict['simulation_input']['normalize_weight'] = 'F'
        pdict['simulation_input']['n_events'] = nbevts
        pdict['simulation_input']['keep_initials'] = 'T'
        pdict['simulation_input']['events_per_file'] = 500000
        if model != 'sm':
            pdict['simulation_input'][
                'pythia_parameters'] = "PMAS(25,1)=125; PMAS(25,2)=0.3605E-02; MSTU(22)=20 ;PARJ(21)=0.40000;PARJ(41)=0.11000; PARJ(42)=0.52000; PARJ(81)=0.25000; PARJ(82)=1.90000; MSTJ(11)=3; PARJ(54)=-0.03100; PARJ(55)=-0.00200;PARJ(1)=0.08500; PARJ(3)=0.45000; PARJ(4)=0.02500; PARJ(2)=0.31000; PARJ(11)=0.60000; PARJ(12)=0.40000; PARJ(13)=0.72000;PARJ(14)=0.43000; PARJ(15)=0.08000; PARJ(16)=0.08000; PARJ(17)=0.17000; MSTP(3)=1;IMSS(1)=11; IMSS(21)=71; IMSS(22)=71"
        else:
            pdict['simulation_input'][
                'pythia_parameters'] = "PMAS(25,1)=125; PMAS(25,2)=0.3605E-02; MSTU(22)=20 ; MSTJ(28)=2 ;PARJ(21)=0.40000;PARJ(41)=0.11000; PARJ(42)=0.52000; PARJ(81)=0.25000; PARJ(82)=1.90000; MSTJ(11)=3; PARJ(54)=-0.03100; PARJ(55)=-0.00200;PARJ(1)=0.08500; PARJ(3)=0.45000; PARJ(4)=0.02500; PARJ(2)=0.31000; PARJ(11)=0.60000; PARJ(12)=0.40000; PARJ(13)=0.72000;PARJ(14)=0.43000; PARJ(15)=0.08000; PARJ(16)=0.08000; PARJ(17)=0.17000; MSTP(3)=1"
            pdict['parameter_input'] = {}
            #  pdict['parameter_input']['mmu']=mmu
            #  pdict['parameter_input']['mtau']=mtau
            #  pdict['parameter_input']['mb']=mb
            #  pdict['parameter_input']['mc']=mc
            pdict['parameter_input']['mH'] = 125
        pdict['beam_input_1'] = {}
        pdict['beam_input_1']['particle_name'] = "e1"
        pdict['beam_input_1']['polarization'] = "0.0 0.0"
        pdict['beam_input_1']['USER_spectrum_on'] = 'T'
        pdict['beam_input_1'][
            'USER_spectrum_mode'] = 19 if energy == 1400 else 11
        pdict['beam_input_1']['ISR_on'] = 'T'
        pdict['beam_input_1']['EPA_on'] = "F"

        pdict['beam_input_2'] = {}
        pdict['beam_input_2']['particle_name'] = "E1"
        pdict['beam_input_2']['polarization'] = "0.0 0.0"
        pdict['beam_input_2']['USER_spectrum_on'] = 'T'
        pdict['beam_input_2']['ISR_on'] = 'T'
        pdict['beam_input_2'][
            'USER_spectrum_mode'] = 19 if energy == 1400 else 11
        pdict['beam_input_2']['EPA_on'] = 'F'

        whiz.setFullParameterDict(pdict)
        whiz.setOutputFile("testgen.stdhep")
        return whiz

    def getWhizard(self, nbevts):
        """ Get some defualt SM whizard
    """
        return self.getWhizardModel(nbevts, 1400, "sm")

    def getWhizardSUSY(self, nbevts):
        """ Get a susy whizard
    """
        return self.getWhizardModel(nbevts, 3000, "slsqhh")

    def createWhizardTest(self):
        """create a test for whizard"""
        self.log.notice("Creating jobs for Whizard")
        ##### WhizardJob
        jobw = self.getJob()
        wh = self.getWhizard(2)
        res = jobw.append(wh)
        if not res['OK']:
            self.log.error("Failed adding Whizard:", res['Message'])
            return S_ERROR()
        self.jobList['Whizard1'] = jobw

        ##### WhizardJob
        jobwsusy = self.getJob()
        whsusy = self.getWhizardSUSY(2)
        res = jobwsusy.append(whsusy)
        if not res['OK']:
            self.log.error("Failed adding Whizard:", res['Message'])
            return S_ERROR()
        self.jobList['WhizSusy'] = jobwsusy
        return S_OK((jobw, jobwsusy))

    def createSlicTest(self):
        """create tests for slic"""
        self.log.notice("Creating jobs for SLIC")
        #run (Whizard +)SLIC
        jobslic = self.getJob()
        if self.clip.testChain:
            whslic = self.getWhizard(2)
            res = jobslic.append(whslic)
            if not res["OK"]:
                self.log.error("Failed adding Whizard:", res['Value'])
                return S_ERROR()
        elif self.clip.testInputData:
            jobslic.setInputData(
                "/ilc/prod/clic/SingleParticles/Muon/50GeV/Muon_50GeV_Fixed_cosTheta0.7.stdhep"
            )
        else:
            self.log.error("SLIC does not know where to get its input from")
            return S_ERROR()
        myslic = self.getSLIC()
        if self.clip.testChain:
            myslic.getInputFromApp(whslic)
        else:
            myslic.setNumberOfEvents(2)
        res = jobslic.append(myslic)
        if not res['OK']:
            self.log.error("Failed adding slic: ", res["Message"])
            return S_ERROR()
        self.jobList['Slic1'] = jobslic
        return S_OK(jobslic)

    def createMarlinTest(self, setInputData=False):
        """create tests for marlin"""
        self.log.notice("Creating test for Marlin")
        #((Whizard + Mokka +)Overlay+) Marlin
        jobma = self.getJob()
        if self.clip.testChain:
            moma = self.getMokka()
            if not self.clip.testInputData:
                whma = self.getWhizard(2)
                res = jobma.append(whma)
                if not res['OK']:
                    self.log.error("Failed adding Whizard:", res['Message'])
                    return S_ERROR()
                moma.getInputFromApp(whma)
            else:
                jobma.setInputData(
                    "/ilc/prod/clic/SingleParticles/Muon/50GeV/Muon_50GeV_Fixed_cosTheta0.7.stdhep"
                )
                moma.setNumberOfEvents(1)
            res = jobma.append(moma)
            if not res['OK']:
                self.log.error("Failed adding Mokka:", res['Message'])
                return S_ERROR()
        elif self.clip.testInputData:
            jobma.setInputData(self.marlinInputdata)
        else:
            self.log.error("Marlin does not know where to get its input from")
            return S_ERROR()
        if self.runOverlay:
            ov = self.getOverlay(2)
            res = jobma.append(ov)
            if not res["OK"]:
                self.log.error("Failed adding Overlay:", res['Message'])
                return S_ERROR
        ma = self.getMarlin()
        if self.clip.testChain:
            ma.getInputFromApp(moma)
        else:
            ma.setNumberOfEvents(2)

        res = jobma.append(ma)
        if not res['OK']:
            self.log.error("Failed adding Marlin:", res['Message'])
            return S_ERROR()
        self.jobList['Marlin1'] = jobma
        return S_OK(jobma)

    def createLCSimTest(self):
        """create tests for LCSIM"""
        self.log.notice("Creating test for LCSIM")
        #run ((whiz+SLIC+)+Overlay+)LCSIM
        joblcsim = self.getJob()
        if self.clip.testChain:
            mysliclcsim = self.getSLIC()

            if not self.clip.testInputData:
                whlcsim = self.getWhizard(2)
                res = joblcsim.append(whlcsim)
                if not res["OK"]:
                    self.log.error("Failed adding Whizard:", res['Value'])
                    return S_ERROR()
                mysliclcsim.getInputFromApp(whlcsim)
            else:
                joblcsim.setInputData(
                    "/ilc/prod/clic/SingleParticles/Muon/50GeV/Muon_50GeV_Fixed_cosTheta0.7.stdhep"
                )
                mysliclcsim.setNumberOfEvents(2)

            res = joblcsim.append(mysliclcsim)
            if not res['OK']:
                self.log.error("Failed adding slic: ", res["Message"])
                return S_ERROR()
        elif self.clip.testInputData:
            #joblcsim.setInputData("/ilc/prod/clic/1.4tev/ee_qqaa/SID/SIM/00002308/000/ee_qqaa_sim_2308_222.slcio")
            joblcsim.setInputData(
                "/ilc/user/s/sailer/testFiles/clic_prod_sid_h_nunu_sim.slcio")
        else:
            self.log.error("LCSIM does not know where to get its input from")
            return S_ERROR()
        if self.runOverlay:
            ovlcsim = self.getOverlay(2)
            res = joblcsim.append(ovlcsim)
            if not res["OK"]:
                self.log.error("Failed adding Overlay:", res['Message'])
                return S_ERROR()
        mylcsim = self.getLCSIM(True)
        if self.clip.testChain:
            mylcsim.getInputFromApp(mysliclcsim)
        else:
            mylcsim.setNumberOfEvents(2)
        res = joblcsim.append(mylcsim)
        if not res['OK']:
            self.log.error("Failed adding LCSIM: ", res["Message"])
            return S_ERROR()
        self.jobList['lcsim1'] = joblcsim

        return S_OK(joblcsim)

    def createSlicPandoraTest(self):
        """create tests for slicPandora"""
        self.log.notice("Creating tests for SLICPandora")
        #run ((whiz+SLIC) + (Overlay +) LCSIM +) SLICPandora + LCSIM
        joblcsimov = self.getJob()
        if not self.clip.testChain:
            self.log.error(
                "SLICPandora does not know where to get its input from")
            return S_ERROR()
        mylcsimov = self.getLCSIM(True)
        if not self.clip.testInputData:
            whlcsimov = self.getWhizard(2)
            res = joblcsimov.append(whlcsimov)
            if not res["OK"]:
                self.log.error("Failed adding Whizard:", res['Value'])
                return S_ERROR()
            mysliclcsimov = self.getSLIC()
            mysliclcsimov.getInputFromApp(whlcsimov)
            res = joblcsimov.append(mysliclcsimov)
            if not res['OK']:
                self.log.error("Failed adding slic: ", res["Message"])
                return S_ERROR()
            mylcsimov.getInputFromApp(mysliclcsimov)
        else:
            #joblcsimov.setInputData("/ilc/prod/clic/1.4tev/ee_qqaa/SID/SIM/00002308/000/ee_qqaa_sim_2308_222.slcio")
            joblcsimov.setInputData(
                "/ilc/user/s/sailer/testFiles/clic_prod_sid_h_nunu_sim.slcio")
            mylcsimov.setNumberOfEvents(2)

        if self.runOverlay:
            ovslicp = self.getOverlay(2)
            res = joblcsimov.append(ovslicp)
            if not res["OK"]:
                self.log.error("Failed adding Overlay:", res['Message'])
                return S_ERROR()

        res = joblcsimov.append(mylcsimov)
        if not res['OK']:
            self.log.error("Failed adding LCSIM: ", res["Message"])
            return S_ERROR()

        myslicpov = self.getSLICPandora()
        myslicpov.getInputFromApp(mylcsimov)
        res = joblcsimov.append(myslicpov)
        if not res['OK']:
            self.log.error("Failed adding SLICPandora: ", res["Message"])
            return S_ERROR()
        mylcsimovp = self.getLCSIM(False)
        mylcsimovp.getInputFromApp(myslicpov)
        res = joblcsimov.append(mylcsimovp)
        if not res['OK']:
            self.log.error("Failed adding LCSIM: ", res["Message"])
            return S_ERROR()
        self.jobList['lcsimov1'] = joblcsimov
        return S_OK(joblcsimov)

    def createUtilityTests(self):
        """Create tests for utility applications"""
        self.log.notice("Creating tests for utility applications")
        jobwsplit = self.getJob()
        whsplit = self.getWhizard(10)
        res = jobwsplit.append(whsplit)
        if not res['OK']:
            self.log.error("Failed adding Whizard:", res['Message'])
            return S_ERROR()
        mystdsplit = JobCreater.getStdhepSplit()
        mystdsplit.getInputFromApp(whsplit)
        res = jobwsplit.append(mystdsplit)
        if not res['OK']:
            self.log.error("Failed adding StdHepSplit:", res['Message'])
            return S_ERROR()
        self.jobList['whizSplit'] = jobwsplit

        ##### WhizardJob + split
        jobwcut = self.getJob()
        whcut = self.getWhizard(100)
        res = jobwcut.append(whcut)
        if not res['OK']:
            self.log.error("Failed adding Whizard:", res['Message'])
            return S_ERROR()
        mystdcut = self.getStdhepcut(100)
        mystdcut.getInputFromApp(whcut)
        res = jobwcut.append(mystdcut)
        if not res['OK']:
            self.log.error("Failed adding StdHepCut:", res['Message'])
            return S_ERROR()
        self.jobList['whizCut'] = jobwcut

        #LCIO split
        joblciosplit = self.getJob()
        # joblciosplit.setInputData("/ilc/prod/clic/1.4tev/e2e2_o/ILD/DST/00002215/000/e2e2_o_dst_2215_46.slcio")
        joblciosplit.setInputData(
            "/ilc/user/s/sailer/testFiles/prod_clic_ild_e2e2_o_sim_2214_26.slcio"
        )
        mylciosplit = self.getLCIOSplit(100)
        res = joblciosplit.append(mylciosplit)
        if not res['OK']:
            self.log.error("Failed adding SLCIOSplit:", res['Message'])
            return S_ERROR()
        self.jobList['lcioSplit'] = joblciosplit

        #LCIO concat
        jobconcat = self.getJob()
        # jobconcat.setInputData(["/ilc/prod/clic/1.4tev/e2e2_o/ILD/DST/00002215/000/e2e2_o_dst_2215_27.slcio",
        #                         "/ilc/prod/clic/1.4tev/e2e2_o/ILD/DST/00002215/000/e2e2_o_dst_2215_46.slcio"])

        jobconcat.setInputData([
            "/ilc/prod/clic/1.4tev/aa_qqll_all/ILD/DST/00004275/002/aa_qqll_all_dst_4275_2104.slcio",
            "/ilc/prod/clic/1.4tev/aa_qqll_all/ILD/DST/00004275/002/aa_qqll_all_dst_4275_2105.slcio"
        ])

        myconcat = self.getLCIOConcat()
        res = jobconcat.append(myconcat)
        if not res['OK']:
            self.log.error("Failed adding SLCIOConcatenate:", res['Message'])
            return S_ERROR()
        self.jobList['concat'] = jobconcat
        return S_OK((jobconcat, joblciosplit, jobwcut, jobwsplit))

    def createFccSwTest(self):
        """create tests for fccsw"""
        self.log.notice("Creating jobs for FccSW")
        #### FccSwJob
        jobfccsw = self.getJob()
        fccsw = self.getFccSw()
        res = jobfccsw.append(fccsw)
        if not res['OK']:
            self.log.error("Failed adding FccSw:", res['Message'])
            return S_ERROR()
        self.jobList['FccSw1'] = jobfccsw
        return S_OK(jobfccsw)

    def createFccAnalysisTest(self):
        """create tests for fccanalysis"""
        self.log.notice("Creating jobs for FccAnalysis")
        #### FccAnalysisJob
        jobfccanalysis = self.getJob()
        fccanalysis = self.getFccAnalysis()
        res = jobfccanalysis.append(fccanalysis)
        if not res['OK']:
            self.log.error("Failed adding FccAnalysis:", res['Message'])
            return S_ERROR()
        self.jobList['FccAnalysis1'] = jobfccanalysis
        return S_OK(jobfccanalysis)

    def runJobLocally(self, job, jobName="unknown"):
        """run a job locally"""
        self.log.notice("I will run the tests locally.")
        from DIRAC import gConfig
        localarea = gConfig.getValue("/LocalSite/LocalArea", "")
        if not localarea:
            self.log.error(
                "You need to have /LocalSite/LocalArea defined in your dirac.cfg"
            )
            return S_ERROR()
        if localarea.find("/afs") == 0:
            self.log.error(
                "Don't set /LocalSite/LocalArea set to /afs/... as you'll get to install there"
            )
            self.log.error(
                "check ${HOME}/.dirac.cfg and ${DIRAC}/etc/dirac.cfg")
            return S_ERROR()
        self.log.notice("To run locally, I will create a temp directory here.")
        curdir = os.getcwd()
        tmpdir = tempfile.mkdtemp("", dir="./")
        os.chdir(tmpdir)

        # Jobs that need separate input files
        specialJobs = ['root', 'ddsim']
        filesForJob = {
            'root': ['input2.root', 'input.root'],
            'ddsim': ['FCalTB.tar.gz', 'Muon_50GeV_Fixed_cosTheta0.7.stdhep']
        }
        for specialName in specialJobs:
            if "root" in jobName.lower() and specialName == "root":
                with open("root.sh", "w") as rScript:
                    rScript.write("echo $ROOTSYS")
                with open("func.C", "w") as rMacro:
                    rMacro.write('''
                        void func( TString string ) {
                          std::cout << string << std::endl;
                          TFile* file = TFile::Open(string);
                        file->ls();
                        }
                        ''')
            testfiledir = 'Testfiles'
            for fileName in ['input.root', 'input2.root']:
                shutil.copy(os.path.join(curdir, testfiledir, fileName),
                            os.getcwd())
                print os.path.join(curdir, "input2.root"), os.getcwd()
            if specialName in jobName.lower():
                for fileName in filesForJob[specialName]:
                    shutil.copy(os.path.join(curdir, testfiledir, fileName),
                                os.getcwd())

        resJob = self.runJob(job, jobName)
        os.chdir(curdir)
        if not resJob['OK']:
            return resJob
        os.chdir(curdir)
        if not self.clip.nocleanup:
            cleanup(tmpdir)
        return S_OK()

    def run(self):
        """submit and run all the tests in jobList"""
        res = S_ERROR()
        for name, finjob in self.jobList.iteritems():
            if self.clip.submitMode == 'local':
                res = self.runJobLocally(finjob, name)
            else:
                res = self.runJob(finjob, name)
        return res

    def runJob(self, finjob, name):
        """runs or submits the job"""
        self.log.notice(
            "############################################################")
        self.log.notice(" Running or submitting job: %s " % name)
        self.log.notice("\n\n")
        res = finjob.submit(self.diracInstance, mode=self.clip.submitMode)
        if not res["OK"]:
            self.log.error("Failed job:", res['Message'])
            return S_ERROR()
        return S_OK()

    def checkForTests(self):
        """check which tests to run"""

        if self.clip.testMokka:
            resMokka = self.createMokkaTest()
            if not resMokka['OK']:
                return S_ERROR()

        if self.clip.testWhizard:
            resWhiz = self.createWhizardTest()
            if not resWhiz['OK']:
                return S_ERROR()

        if self.clip.testSlic:
            resSlic = self.createSlicTest()
            if not resSlic['OK']:
                return S_ERROR()

        if self.clip.testMarlin:
            resMarlin = self.createMarlinTest()
            if not resMarlin['OK']:
                return S_ERROR()

        if self.clip.testLCSIM:
            resLCSim = self.createLCSimTest()
            if not resLCSim['OK']:
                return S_ERROR()

        if self.clip.testSlicPandora:
            resSP = self.createSlicPandoraTest()
            if not resSP['OK']:
                return S_ERROR()

        if self.clip.testUtilities:
            resUtil = self.createUtilityTests()
            if not resUtil['OK']:
                return S_ERROR()

        if self.clip.testRoot:
            resRoot = self.createRootScriptTest()
            if not resRoot['OK']:
                return S_ERROR()

            resRoot = self.createRootHaddTest()
            if not resRoot['OK']:
                return S_ERROR()

            resRoot = self.createRootMacroTest()
            if not resRoot['OK']:
                return S_ERROR()

        if self.clip.testFccSw:
            resFccSw = self.createFccSwTest()
            if not resFccSw['OK']:
                return S_ERROR()

        if self.clip.testFccAnalysis:
            resFccAnalysis = self.createFccAnalysisTest()
            if not resFccAnalysis['OK']:
                return S_ERROR()

        return S_OK()
Exemplo n.º 30
0
def subOverlay():

    # Decide parameters for a job
    outputSE = "KEK-SRM"

    isLocal = _clip.isLocal
    nbevts = 50 if _clip.numberOfEvents == 0 else _clip.numberOfEvents
    nbevts = 0  # To analize all input events
    outputFilePrefix = "overlay_example" if _clip.outputFilePrefix == "" else _clip.outputFilePrefix
    outputDir = _clip.outputDir
    inputFile = _clip.inputFile
    if inputFile == "":
        gLogger.error("Input file for ddsim does not given.")
        exit(-1)

    recfile = outputFilePrefix + ".rec.slcio"
    dstfile = outputFilePrefix + ".dst.slcio"
    detector_model = "ILD_l5_o1_v02"
    key = detector_model.split('_')
    sim_detectorModel = "_".join([key[0], key[1], key[3]])

    # Create DIRAC objects for job submission

    dIlc = DiracILC()

    job = UserJob()
    job.setJobGroup("myoverlayjob")
    job.setName("myoverlay")
    job.setOutputSandbox(['*.log', '*.sh', '*.py', '*.xml'])
    job.setILDConfig("v02-00-02")

    # job.setInputSandbox(["a6-parameters.sin", "P2f_qqbar.sin"])
    # job.setDestination(["LCG.KEK.jp", "LCG.DESY-HH.de"])  # job submission destination
    # job.setBannedSites([])         # a list of sites not to submit job
    # job.setCPUTime( cputime_limit_in_seconds_by_dirac_units )

    # Create Overlay application
    ovldata = [{
        "ProcessorName": "BgOverlayWW",
        "evttype": "aa_lowpt_WW",
        "ProdID": 10237,
        "expBG": 0.211,
        "subdir": "000"
    }, {
        "ProcessorName": "BgOverlayWB",
        "evttype": "aa_lowpt_WB",
        "ProdID": 10241,
        "expBG": 0.24605,
        "subdir": "000"
    }, {
        "ProcessorName": "BgOverlayBW",
        "evttype": "aa_lowpt_BW",
        "ProdID": 10239,
        "expBG": 0.243873,
        "subdir": "000"
    }, {
        "ProcessorName": "BgOverlayBB",
        "evttype": "aa_lowpt_BB",
        "ProdID": 10235,
        "expBG": 0.35063,
        "subdir": "000"
    }, {
        "ProcessorName": "PairBgOverlay",
        "evttype": "seeablepairs",
        "ProdID": 10233,
        "expBG": 1.0,
        "subdir": "100"
    }]

    BXOverlay = 1
    NbSigEvtsPerJob = 100
    numberOfSignalEvents = NbSigEvtsPerJob
    basebkgpath = "/ilc/prod/ilc/mc-opt-3/ild/sim/500-TDR_ws"
    energy = "500"

    for ovl in ovldata:
        print "### OverlayInput ... " + ovl["ProcessorName"]
        ovlapp = OverlayInput()
        ovlpath = "%s/%s/%s/v02-00-01/%8.8d/%s" % \
       ( basebkgpath, ovl["evttype"], sim_detectorModel, ovl["ProdID"] , ovl["subdir"] )
        print "    OverlayPath ... " + ovlpath
        ovlapp.setMachine("ilc_dbd")
        # ovlapp.setEnergy(energy)
        # ovlapp.setDetectorModel(sim_detectorModel)
        ovlapp.setProcessorName(ovl["ProcessorName"])
        ovlapp.setBkgEvtType(ovl["evttype"])
        ovlapp.setPathToFiles(ovlpath)
        ovlapp.setGGToHadInt(ovl["expBG"])
        ovlapp.setBXOverlay(BXOverlay)
        ovlapp.setNbSigEvtsPerJob(NbSigEvtsPerJob)
        ovlapp.setNumberOfSignalEventsPerJob(numberOfSignalEvents)
        res = job.append(ovlapp)
        if not res['OK']:
            print res['Message']
            exit(1)

    # Create Marlin application
    marlin = Marlin()
    marlin.setVersion("ILCSoft-02-00-02_gcc49")
    marlin.setDetectorModel(detector_model)
    marlin.setSteeringFile("MarlinStdReco.xml")
    marlin.setInputFile(inputFile)
    marlin.setNumberOfEvents(nbevts)
    marlin.setOutputDstFile(dstfile)
    marlin.setOutputRecFile(recfile)
    extraCLIArguments = " --constant.DetectorModel=%s " % detector_model
    extraCLIArguments += " --constant.RunOverlay=true --constant.CMSEnergy=%s " % str(
        energy)
    extraCLIArguments += " --global.Verbosity=MESSAGE "
    marlin.setExtraCLIArguments(extraCLIArguments)

    job.append(marlin)

    if outputDir != "":
        job.setOutputData([dstfile, recfile],
                          OutputPath=outputDir,
                          OutputSE=outputSE)

    if isLocal:
        job.submit(dIlc, mode="local")
    else:
        job.submit(dIlc)
Exemplo n.º 31
0
class UserJob(SplitMixin, Job):
  """User job class. To be used by users, not for production."""

  def __init__(self, script=None):
    """Initialize UserJob, including proxy and splitmixin."""
    super(UserJob, self).__init__(script)
    self.type = 'User'
    self.diracinstance = None
    self.usergroup = ['ilc_user', 'calice_user']
    self.proxyinfo = getProxyInfo()
    SplitMixin._initialize(self)

  def submit(self, diracinstance = None, mode = "wms"):
    """ Submit call: when your job is defined, and all applications are set, you need to call this to
    add the job to DIRAC.

    :param diracinstance: DiracILC instance
    :type diracinstance: ~ILCDIRAC.Interfaces.API.DiracILC.DiracILC
    :param str mode: "wms" (default), "agent", or "local"

    .. note ::
      The *local* mode means that the job will be run on the submission machine. Use this mode for testing of
      submission scripts

    """
    if self._splittingOption:
      result = self._split()
      if 'OK' in result and not result['OK']:
        return result

    # Check the credentials. If no proxy or not user proxy, return an error
    if not self.proxyinfo['OK']:
      LOG.error("Not allowed to submit a job, you need a %s proxy." % self.usergroup)
      return self._reportError("Not allowed to submit a job, you need a %s proxy." % self.usergroup,
                               self.__class__.__name__)
    if 'group' in self.proxyinfo['Value']:
      group = self.proxyinfo['Value']['group']
      if group not in self.usergroup:
        LOG.error("Not allowed to submit a job, you need a %s proxy." % self.usergroup)
        return self._reportError("Not allowed to submit job, you need a %s proxy." % self.usergroup,
                                 self.__class__.__name__)
    else:
      LOG.error("Could not determine group, you do not have the right proxy.")
      return self._reportError("Could not determine group, you do not have the right proxy.")

    res = self._addToWorkflow()
    if not res['OK']:
      return res
    self.oktosubmit = True
    if not diracinstance:
      self.diracinstance = DiracILC()
    else:
      self.diracinstance = diracinstance
    return self.diracinstance.submitJob(self, mode)
    
  #############################################################################
  def setInputData( self, lfns ):
    """Specify input data by Logical File Name (LFN).

    Input files specified via this function will be automatically staged if necessary.

    Example usage:

    >>> job = UserJob()
    >>> job.setInputData(['/ilc/prod/whizard/processlist.whiz'])

    :param lfns: Logical File Names
    :type lfns: Single LFN string or list of LFNs
    """
    if isinstance( lfns, list ) and lfns:
      for i, lfn in enumerate( lfns ):
        lfns[i] = lfn.replace( 'LFN:', '' )
      #inputData = map( lambda x: 'LFN:' + x, lfns )
      inputData = lfns #because we don't need the LFN: for inputData, and it breaks the 
      #resolution of the metadata in the InputFilesUtilities
      inputDataStr = ';'.join( inputData )
      description = 'List of input data specified by LFNs'
      self._addParameter( self.workflow, 'InputData', 'JDL', inputDataStr, description )
    elif isinstance( lfns, basestring ): #single LFN
      description = 'Input data specified by LFN'
      self._addParameter( self.workflow, 'InputData', 'JDL', lfns, description )
    else:
      kwargs = {'lfns':lfns}
      return self._reportError( 'Expected lfn string or list of lfns for input data', **kwargs )

    return S_OK()

  def setInputSandbox(self, flist):
    """ Add files to the input sandbox, can be on the local machine or on the grid

    >>> job = UserJob()
    >>> job.setInputSandbox( ['LFN:/ilc/user/u/username/libraries.tar.gz',
    >>>                       'mySteeringFile.xml'] )

    :param flist: Files for the inputsandbox
    :type flist: `python:list` or `str`
    """
    if isinstance( flist, basestring ):
      flist = [flist]
    if not isinstance( flist, list ):
      return self._reportError("File passed must be either single file or list of files.") 
    self.inputsandbox.extend(flist)
    return S_OK()

  #############################################################################
  def setOutputData(self, lfns, OutputPath = '', OutputSE = ''):
    """For specifying output data to be registered in Grid storage.  If a list
    of OutputSEs are specified the job wrapper will try each in turn until
    successful.

    Example usage:

    >>> job = UserJob()
    >>> job.setOutputData(['Ntuple.root'])

    :param lfns: Output data file or files
    :type lfns: Single `str` or `python:list` of strings ['','']
    :param str OutputPath: Optional parameter to specify the Path in the Storage, postpended to /ilc/user/u/username/
    :param OutputSE: Optional parameter to specify the Storage Element to store data or files, e.g. CERN-SRM
    :type OutputSE: `python:list` or `str`
    """
    kwargs = {'lfns' : lfns, 'OutputSE' : OutputSE, 'OutputPath' : OutputPath}
    if isinstance( lfns, list ) and lfns:
      outputDataStr = ';'.join(lfns)
      description = 'List of output data files'
      self._addParameter(self.workflow, 'UserOutputData', 'JDL', outputDataStr, description)
    elif isinstance( lfns, basestring ):
      description = 'Output data file'
      self._addParameter(self.workflow, 'UserOutputData', 'JDL', lfns, description)
    else:
      return self._reportError('Expected file name string or list of file names for output data', **kwargs)

    if OutputSE:
      description = 'User specified Output SE'
      if isinstance( OutputSE, basestring ):
        OutputSE = [OutputSE]
      elif not isinstance( OutputSE, list ):
        return self._reportError('Expected string or list for OutputSE', **kwargs)
      OutputSE = ';'.join(OutputSE)
      self._addParameter(self.workflow, 'UserOutputSE', 'JDL', OutputSE, description)

    if OutputPath:
      description = 'User specified Output Path'
      if not isinstance( OutputPath, basestring ):
        return self._reportError('Expected string for OutputPath', **kwargs)
      # Remove leading "/" that might cause problems with os.path.join
      while OutputPath[0] == '/': 
        OutputPath = OutputPath[1:]
      if OutputPath.count("ilc/user"):
        return self._reportError('Output path contains /ilc/user/ which is not what you want', **kwargs)
      self._addParameter(self.workflow, 'UserOutputPath', 'JDL', OutputPath, description)

    return S_OK()
  
  #############################################################################
  def setOutputSandbox( self, files ):
    """Specify output sandbox files.  If specified files are over 10MB, these
    may be uploaded to Grid storage with a notification returned in the
    output sandbox.

    .. Note ::
       Sandbox files are removed after 2 weeks.

    Example usage:

    >>> job = UserJob()
    >>> job.setOutputSandbox(['*.log','*.sh', 'myfile.txt'])

    Use the output sandbox only for small files. Larger files should be stored
    on the grid and downloaded later if necessary. See :func:`setOutputData`

    :param files: Output sandbox files
    :type files: Single `str` or `python:list` of strings ['','']

    """
    if isinstance( files, list ) and files:
      fileList = ";".join( files )
      description = 'Output sandbox file list'
      self._addParameter( self.workflow, 'OutputSandbox', 'JDL', fileList, description )
    elif isinstance( files, basestring ):
      description = 'Output sandbox file'
      self._addParameter( self.workflow, 'OutputSandbox', 'JDL', files, description )
    else:
      kwargs = {'files' : files}
      return self._reportError( 'Expected file string or list of files for output sandbox contents', **kwargs )

    return S_OK()
    
  def setILDConfig(self,version):
    """ Define the Configuration package to obtain
    """
    appName = 'ILDConfig'
    self._addSoftware(appName.lower(), version)
    
    self._addParameter( self.workflow, 'ILDConfigPackage', 'JDL', appName+version, 'ILDConfig package' )
    return S_OK()


  def setCLICConfig(self, version):
    """Define the CLIC Configuration package to obtain, copies steering files
    from CLIC Configuration folder to working directory

    :param str version: version string, e.g.: 'ILCSoft-2017-07-27'
    """
    appName = 'ClicConfig'
    self._addSoftware(appName.lower(), version)

    self._addParameter( self.workflow, 'ClicConfigPackage', 'JDL', appName+version, 'CLIC Config package' )
    return S_OK()
Exemplo n.º 32
0
def main():
    '''
    This is the script main method, which will hold all the logic.
  '''

    cliParams = Params()
    cliParams.registerSwitches()
    Script.parseCommandLine(ignoreErrors=True)

    consistent = cliParams.checkConsistency()
    if not consistent['OK']:
        gLogger.error("Error checking consistency:", consistent['Message'])
        Script.showHelp()
        dexit(2)

    ##Get prodID
    prodID = float(cliParams.prodID)

    ##Get all possible input files
    inputFiles = getInputFiles(prodID)

    ##Get suffix
    suffix = cliParams.suffix
    if suffix: suffix = "_" + suffix

    ##Jet clustering
    algorithm = "ValenciaPlugin {radius:.1f} {beta:.1f} {gamma:.1f}"
    jetRadius = float(cliParams.jetRadius)
    jetBeta = float(cliParams.jetBeta)
    jetGamma = float(cliParams.jetGamma)

    jetCluster = "ExclusiveNJets 2"
    jetRecomb = "E_scheme"

    ##Top tagger
    deltaR = float(cliParams.deltaR)
    deltaP = float(cliParams.deltaP)
    cos_theta_W_max = float(cliParams.cos_theta_W_max)

    ##Sixfermion sample
    sixFermionSample = cliParams.sixFermionSample

    ##Number of files per job
    nrFilesPerJob = int(cliParams.nrFilesPerJob)

    from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
    repDir = "/afs/cern.ch/user/l/lstroem/clicdp/analysis/steering/chain/topasymmetry_wflavourtag/submit/{prodID:04.0f}/rep/".format(
        prodID=prodID)
    subprocess.call("mkdir -p " + repDir, shell=True)
    dirac = DiracILC(
        False
    )  #, repDir+"topasymmetry_vlc{suffix}.rep".format(suffix = suffix))

    jetAlgo = algorithm.format(radius=jetRadius, beta=jetBeta, gamma=jetGamma)
    inputFileList = []
    i = 0
    j = 1
    for inputFile in inputFiles:
        inputFileList.append(inputFile)
        i += 1
        if (i >= nrFilesPerJob * j) or (i == len(inputFiles)):

            jobName = "topasymmetry_chain_{jetSettings}_dR{deltaR:.2f}_dP{deltaP:.2f}_cthetaWmax{cos_theta_W_max:.2f}_1Jun2017_part{index}_{prodID:04.0f}".format(
                jetSettings=getJetSettings(jetAlgo, jetCluster, jetRecomb),
                deltaR=deltaR,
                deltaP=deltaP,
                cos_theta_W_max=cos_theta_W_max,
                index=j,
                prodID=prodID)
            jobGroup = "topasymmetry_chain_{prodID:04.0f}".format(
                prodID=prodID)
            job, outputFile, rootFile = defGridJob(jobName, jobGroup,
                                                   inputFileList)

            ##Check if outputfile already exists
            pOutCheck = subprocess.Popen(
                "dirac-dms-lfn-replicas /ilc/user/r/rstrom/" + jobGroup + "/" +
                rootFile,
                shell=True,
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE)
            outCheck, errCheck = pOutCheck.communicate()
            #gLogger.notice(jobName)
            if "no such file" not in outCheck.lower():
                gLogger.notice('File exists! Skipping!')
                inputFileList = []
                j += 1
                continue  #continue #use break if only part1, use continue of run over all parts
            #gLogger.notice(jetAlgo)
            #doLogger(jobName, inputFileList, outputFile, rootFile)
            res = job.append(
                defMarlin(outputFile,
                          rootFile,
                          jetAlgo=jetAlgo,
                          jetCluster=jetCluster,
                          jetRecomb=jetRecomb,
                          deltaR=deltaR,
                          deltaP=deltaP,
                          cos_theta_W_max=cos_theta_W_max,
                          sixFermionSample=sixFermionSample))

            if not res['OK']:  #Catch if there is an error
                print res['Message']  #Print the error message
                dexit

            ##Job submission
            print job.submit(dirac)
            inputFileList = []
            j += 1
            #break #add break of only part1

    gLogger.notice("All done!")
    dexit(0)
Exemplo n.º 33
0
from DIRAC.Core.Base import Script
Script.parseCommandLine()

from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
from ILCDIRAC.Interfaces.API.NewInterface.Applications import Whizard, Mokka, Marlin, OverlayInput
from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob

from DIRAC import exit as dexit

dirac =DiracILC()

#wh.setOutputFile("myfile.stdhep")

j = UserJob()

wh = Whizard(processlist=dirac.getProcessList())
wh.setEnergy(3000)
wh.setEvtType("ee_h_mumu")
wh.setNbEvts(1)
wh.setEnergy(3000)
params = {}
params['USERB1']='F'
wh.setParameterDict(params)
wh.setModel("sm")
res = j.append(wh)
if not res['OK']:
    print res['Message']
    dexit(1)


mo = Mokka()
Exemplo n.º 34
0
class DiracILCTestCase( unittest.TestCase ):
  """ Base class for the DiracILC test cases
  """
  def setUp( self ):
    """set up the objects"""
    ops_mock = Mock()
    mocked_modules = { 'DIRAC.ConfigurationSystem.Client.Helpers.Operations' : ops_mock }
    self.module_patcher = patch.dict( sys.modules, mocked_modules )
    self.module_patcher.start()
    from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
    self.dilc = DiracILC()

    def setOptions(*args):
      if 'SingleReplicaSEs' in args[0]:
        return ['SE']
      if 'Minimum' in args[0]:
        return 1
      if args[0].endswith('PreferredSEs'):
        return ['Awesome-Tape-SE']

    ops_mock = Mock()
    ops_mock.getValue = Mock()
    ops_mock.getValue.side_effect = setOptions
    self.dilc.ops = ops_mock

  def tearDown( self ):
    self.module_patcher.stop()

  def test_getprocesslist( self ):
    with patch('%s.gConfig.getValue' % MODULE_NAME, new=Mock(return_value='some_gconf_testval')) as conf_mock, \
         patch('%s.ProcessList' % MODULE_NAME, new=Mock()) as pl_mock:
      res = self.dilc.getProcessList()
      pl_mock.assert_called_once_with( 'some_gconf_testval' )
      assertEqualsImproved( res, pl_mock(), self )
      conf_mock.assert_called_once_with( '/LocalSite/ProcessListPath', '' )

  def test_getprocesslist_nopath( self ):
    ops_mock = Mock()
    ops_mock.getValue.return_value = ''
    self.dilc.ops = ops_mock
    with patch('%s.gConfig.getValue' % MODULE_NAME, new=Mock(return_value='')) as conf_mock, \
         patch('%s.ProcessList' % MODULE_NAME, new=Mock()) as pl_mock:
      res = self.dilc.getProcessList()
      pl_mock.assert_called_once_with( '' )
      assertEqualsImproved( res, pl_mock(), self )
      conf_mock.assert_called_once_with( '/LocalSite/ProcessListPath', '' )
      ops_mock.getValue.assert_called_once_with( '/ProcessList/Location', '' )

  def test_presubmissionchecks_notoktosubmit( self ):
    job_mock = Mock()
    job_mock.oktosubmit = False
    assertDiracFailsWith( self.dilc.preSubmissionChecks( job_mock, None ),
                          'you should use job.submit(dirac)', self )

  def test_presubmissionchecks_checkfails( self ):
    job_mock = Mock()
    job_mock.oktosubmit = True
    with patch('%s.DiracILC._do_check' % MODULE_NAME, new=Mock(return_value=S_ERROR('mytest_check_failed'))):
      assertDiracFailsWith( self.dilc.preSubmissionChecks( job_mock, None ),
                            'mytest_check_failed', self )

  def test_presubmissionchecks_askuser_fails( self ):
    job_mock = Mock()
    job_mock.oktosubmit = True
    job_mock._askUser.return_value = S_ERROR( 'user says no' )
    self.dilc.checked = False
    with patch('%s.DiracILC._do_check' % MODULE_NAME, new=Mock(return_value=S_OK())):
      assertDiracFailsWith( self.dilc.preSubmissionChecks( job_mock, None ),
                            'user says no', self )

  def test_checkparams( self ):
    job_mock = Mock()
    job_mock.errorDict = {}
    with patch('%s.DiracILC.preSubmissionChecks' % MODULE_NAME, new=Mock(return_value=S_OK('mytest'))) as check_mock:
      assertDiracSucceedsWith( self.dilc.checkparams( job_mock ), 'mytest', self )
      check_mock.assert_called_once_with( job_mock, mode = '' )

  def test_checkparams_fails( self ):
    job_mock = Mock()
    job_mock.errorDict = { 'myerror1' : [ 'Terrible failure' ], 'last_error' : [ 'True' ] }
    assertDiracFailsWith_equals( self.dilc.checkparams( job_mock ),
                                 { 'myerror1' : [ 'Terrible failure' ], 'last_error' : [ 'True' ] }, self )

  def test_giveprocesslist( self ):
    self.dilc.processList = '13985u185r9135r'
    assertEqualsImproved( self.dilc.giveProcessList(), '13985u185r9135r', self )

  def test_giveprocesslist_empty( self ):
    self.dilc.processList = ''
    assertEqualsImproved( self.dilc.giveProcessList(), '', self )

  def test_giveprocesslist_false( self ):
    self.dilc.processList = False
    assertEqualsImproved( self.dilc.giveProcessList(), False, self )

  def test_giveprocesslist_none( self ):
    self.dilc.processList = None
    assertEqualsImproved( self.dilc.giveProcessList(), None, self )

  def test_retrievelfns_norepo( self ):
    self.dilc.jobRepo = None
    assertDiracSucceeds( self.dilc.retrieveRepositoryOutputDataLFNs(), self )

  def test_retrievelfns( self ):
    repo_mock = Mock()
    ret_dict = { '1' : { 'State' : 'Done', 'UserOutputData' : '1389' }, '2' : {},
                 '3' : { 'State' : 'secret_teststate' }, '4' : { 'State' : 'invalid_state' },
                 '5' : { 'State' : 'Done', 'UserOutputData' : 0 }, '6' : { 'ignore_me' : True },
                 '7' : { 'State' : 'secret_teststate', 'UserOutputData' : 0 },
                 '148' : { 'State' : 'Done', 1 : False, True : 941, 'values_' : 'keys' } }
    repo_mock.readRepository.return_value = S_OK( ret_dict )
    self.dilc.jobRepo = repo_mock
    with patch('%s.DiracILC.parameters' % MODULE_NAME, new=Mock(side_effect=[S_OK({'UploadedOutputData':'/my/test/lfn1'}),S_ERROR(),S_OK({}),S_OK({'some_entries':'some_values',1:True,'UploadedOutputData':'/more_lfns/append/testlfn.log'})])) as param_mock:
      assertEqualsImproved( self.dilc.retrieveRepositoryOutputDataLFNs( [ 'Done', 'secret_teststate' ] ),
                            [ '/my/test/lfn1', '/more_lfns/append/testlfn.log' ], self )
      assertMockCalls( param_mock, [ 3, 5, 7, 148 ], self )

  def test_docheck_checksandbox_fails( self ):
    job_mock = Mock()
    job_mock.inputsandbox = [ 'mysandbox', 'other_value' ]
    with patch('%s.DiracILC.checkInputSandboxLFNs' % MODULE_NAME, new=Mock(return_value=S_ERROR('test_err_sandbox'))) as check_mock:
      assertDiracFailsWith( self.dilc._do_check( job_mock ), 'test_err_sandbox', self )
      check_mock.assert_called_once_with( job_mock )

  def test_docheck_too_many_lists( self ):
    job_mock = Mock()
    job_mock.inputsandbox = [ 'mysandbox', [ 'im_a_sandbox_file.stdhep', [ 'evil_list', 'deletethis'] ] ]
    assertDiracFailsWith( self.dilc._do_check( job_mock ), 'too many lists of lists in the input sandbox', self )

  def test_docheck_checkapps_fails( self ):
    platform_mock = Mock()
    platform_mock.getValue.return_value = 'pf14081'
    apps_mock = Mock()
    apps_mock.getValue.return_value = 'Myapp1v.9.2.1;other_Appv.91.3;more_Dependencies.1;LasT_APP.0'
    param_dict = { 'Platform' : platform_mock, 'SoftwarePackages' : apps_mock }
    job_mock = Mock()
    wf_mock = Mock()
    wf_mock.findParameter.side_effect = lambda param_name : param_dict[ param_name ]
    job_mock.inputsandbox = [ 'mysandbox', 'other_value', [ 'sandbox_file1.txt', 'sandbox_file2.log',
                                                            'last.file' ] ]
    job_mock._resolveInputSandbox.return_value = [ 'resolved_file.1.txt', 'other_resolved_file.txt' ]
    job_mock.workflow = wf_mock
    with patch('%s.DiracILC.checkInputSandboxLFNs' % MODULE_NAME, new=Mock(return_value=S_OK())) as checksb_mock, \
         patch('%s.DiracILC._checkapp' % MODULE_NAME, new=Mock(side_effect=[S_OK()] * 3 + [S_ERROR('checkapp_failed_testme')])) as checkapp_mock:
      assertDiracFailsWith( self.dilc._do_check( job_mock ), 'checkapp_failed_testme', self )
      checksb_mock.assert_called_once_with( job_mock )
      job_mock._resolveInputSandbox.assert_called_once_with( [ 'mysandbox', 'other_value',
                                                               'sandbox_file1.txt', 'sandbox_file2.log',
                                                               'last.file' ] )
      job_mock._addParameter.assert_called_once_with(
        wf_mock, 'InputSandbox', 'JDL', 'resolved_file.1.txt;other_resolved_file.txt', 'Input sandbox file list' )
      assertMockCalls( checkapp_mock, [ ( 'pf14081', 'myapp1v', '9.2.1' ),
                                        ( 'pf14081', 'other_appv', '91.3' ),
                                        ( 'pf14081', 'more_dependencies', '1' ),
                                        ( 'pf14081', 'last_app', '0' ) ], self )

  def test_docheck_checkoutputpath_fails( self ):
    platform_mock = Mock()
    platform_mock.getValue.return_value = 'pf14081'
    apps_mock = Mock()
    apps_mock.getValue.return_value = 'Myapp1v.9.2.1;other_Appv.91.3;more_Dependencies.1;LasT_APP.0'
    path_mock = Mock()
    path_mock.getValue.return_value = 'path1948512895'
    param_dict = { 'Platform' : platform_mock, 'SoftwarePackages' : apps_mock, 'UserOutputPath' : path_mock }
    job_mock = Mock()
    wf_mock = Mock()
    wf_mock.findParameter.side_effect = lambda param_name : param_dict[ param_name ]
    job_mock.inputsandbox = [ 'mysandbox', 'other_value', [ 'sandbox_file1.txt', 'sandbox_file2.log',
                                                            'last.file' ] ]
    job_mock._resolveInputSandbox.return_value = [ 'resolved_file.1.txt', 'other_resolved_file.txt' ]
    job_mock.workflow = wf_mock
    with patch('%s.DiracILC.checkInputSandboxLFNs' % MODULE_NAME, new=Mock(return_value=S_OK())) as checksb_mock, \
         patch('%s.DiracILC._checkapp' % MODULE_NAME, new=Mock(return_value=S_OK())) as checkapp_mock, \
         patch('%s.DiracILC._checkoutputpath' % MODULE_NAME, new=Mock(return_value=S_ERROR('outputpath_check_testerr'))) as checkpath_mock:
      assertDiracFailsWith( self.dilc._do_check( job_mock ), 'outputpath_check_testerr', self )
      checksb_mock.assert_called_once_with( job_mock )
      job_mock._resolveInputSandbox.assert_called_once_with( [ 'mysandbox', 'other_value',
                                                               'sandbox_file1.txt', 'sandbox_file2.log',
                                                               'last.file' ] )
      job_mock._addParameter.assert_called_once_with(
        wf_mock, 'InputSandbox', 'JDL', 'resolved_file.1.txt;other_resolved_file.txt', 'Input sandbox file list' )
      assertMockCalls( checkapp_mock, [ ( 'pf14081', 'myapp1v', '9.2.1' ),
                                        ( 'pf14081', 'other_appv', '91.3' ),
                                        ( 'pf14081', 'more_dependencies', '1' ),
                                        ( 'pf14081', 'last_app', '0' ) ], self )
      checkpath_mock.assert_called_once_with( 'path1948512895' )

  def test_docheck_checkconsistency_fails( self ):
    platform_mock = Mock()
    platform_mock.getValue.return_value = 'pf14081'
    apps_mock = Mock()
    apps_mock.getValue.return_value = 'Myapp1v.9.2.1;other_Appv.91.3;more_Dependencies.1;LasT_APP.0'
    path_mock = Mock()
    path_mock.getValue.return_value = 'path1948512895'
    data_mock = Mock()
    data_mock.getValue.return_value = 'data1389518'
    param_dict = { 'Platform' : platform_mock, 'SoftwarePackages' : apps_mock,
                   'UserOutputPath' : path_mock, 'UserOutputData' : data_mock }
    job_mock = Mock()
    job_mock.addToOutputSandbox = 'job_sandbox13895'
    wf_mock = Mock()
    wf_mock.findParameter.side_effect = lambda param_name : param_dict[ param_name ]
    job_mock.inputsandbox = [ 'mysandbox', 'other_value', [ 'sandbox_file1.txt', 'sandbox_file2.log',
                                                            'last.file' ] ]
    job_mock._resolveInputSandbox.return_value = [ 'resolved_file.1.txt', 'other_resolved_file.txt' ]
    job_mock.workflow = wf_mock
    with patch('%s.DiracILC.checkInputSandboxLFNs' % MODULE_NAME, new=Mock(return_value=S_OK())) as checksb_mock, \
         patch('%s.DiracILC._checkapp' % MODULE_NAME, new=Mock(return_value=S_OK())) as checkapp_mock, \
         patch('%s.DiracILC._checkoutputpath' % MODULE_NAME, new=Mock(return_value=S_OK())) as checkpath_mock, \
         patch('%s.DiracILC._checkdataconsistency' % MODULE_NAME, new=Mock(return_value=S_ERROR('consistency_testerr'))) as checkconsistency_mock:
      assertDiracFailsWith( self.dilc._do_check( job_mock ), 'consistency_testerr', self )
      checksb_mock.assert_called_once_with( job_mock )
      job_mock._resolveInputSandbox.assert_called_once_with( [ 'mysandbox', 'other_value',
                                                               'sandbox_file1.txt', 'sandbox_file2.log',
                                                               'last.file' ] )
      job_mock._addParameter.assert_called_once_with(
        wf_mock, 'InputSandbox', 'JDL', 'resolved_file.1.txt;other_resolved_file.txt', 'Input sandbox file list' )
      assertMockCalls( checkapp_mock, [ ( 'pf14081', 'myapp1v', '9.2.1' ),
                                        ( 'pf14081', 'other_appv', '91.3' ),
                                        ( 'pf14081', 'more_dependencies', '1' ),
                                        ( 'pf14081', 'last_app', '0' ) ], self )
      checkpath_mock.assert_called_once_with( 'path1948512895' )
      checkconsistency_mock.assert_called_once_with( 'data1389518', 'job_sandbox13895' )

  def test_checkapp( self ):
    ops_mock = Mock()
    ops_mock.getValue.return_value = ''
    self.dilc.ops = ops_mock
    assertDiracFailsWith( self.dilc._checkapp( 'test_platform_341', 'testapp', 'v13.2' ),
                          'could not find the specified software testapp_v13.2 for test_platform_341, check in CS',
                          self )
    assertMockCalls( ops_mock.getValue, [
      ( '/AvailableTarBalls/test_platform_341/testapp/v13.2/TarBall', '' ),
      ( '/AvailableTarBalls/test_platform_341/testapp/v13.2/CVMFSPath', '' ) ], self )

  def test_checkoutputpath_invalidchar_1( self ):
    assertDiracFailsWith( self.dilc._checkoutputpath( 'http://www.mysitedoesnotexist3h3.abc/some/file.txt' ),
                          'invalid path', self )

  def test_checkoutputpath_invalidchar_2( self ):
    assertDiracFailsWith( self.dilc._checkoutputpath( '/my/dir/./some/file.log' ),
                          'invalid path', self )

  def test_checkoutputpath_invalidchar_3( self ):
    assertDiracFailsWith( self.dilc._checkoutputpath( '/my/dir/../dir2/somefile.txt' ),
                          'invalid path', self )

  def test_checkoutputpath_trailing_slash( self ):
    assertDiracFailsWith( self.dilc._checkoutputpath( '/my/dir/myfile.txt/  ' ),
                          'invalid path', self )

  def test_checkdataconsistency_outputdata_sandbox_equal( self ):
    assertDiracFailsWith( self.dilc._checkdataconsistency( 'same_item;something_else',
                                                           [ 'distinct_item1', 'same_item' ] ),
                          'output data and sandbox should not contain the same thing', self )

  def test_checkdataconsistency_wildcardchar( self ):
    assertDiracFailsWith( self.dilc._checkdataconsistency(
      '/test/dir/file.txt;/mydir/something/*;/file/dir/log.log',
      [ '/input/sandbox.pdf', '/other/sb/file.stdhep' ] ),
                          'wildcard character in outputdata definition', self )

  def test_checkinputsb_getreplicas_notok( self ):
    job_mock = Mock()
    job_mock.workflow.findParameter.return_value.getValue.return_value = '/some/file.txt;/other/some/file.stdhep;LFN:/my/dir/inputsandbox/in1.stdio;lfn:/my/dir/inputsandbox/in2.pdf'
    with patch('%s.DiracILC.getReplicas' % MODULE_NAME, new=Mock(return_value=S_ERROR('some_err'))) as replica_mock:
      assertDiracFailsWith( self.dilc.checkInputSandboxLFNs( job_mock ), 'could not get replicas', self )
      replica_mock.assert_called_once_with( [ '/my/dir/inputsandbox/in1.stdio', '/my/dir/inputsandbox/in2.pdf' ] )

  def test_checkinputsb_getreplicas_fails( self ):
    job_mock = Mock()
    job_mock.workflow.findParameter.return_value.getValue.return_value = '/some/file.txt;/other/some/file.stdhep;LFN:/my/dir/inputsandbox/in1.stdio;lfn:/my/dir/inputsandbox/in2.pdf'
    ret_dict = { 'Failed' : [ '/failed/replica1', '/other/inval/replica' ], 'Successful' : {} }
    with patch('%s.DiracILC.getReplicas' % MODULE_NAME, new=Mock(return_value=S_OK(ret_dict))) as replica_mock:
      assertDiracFailsWith( self.dilc.checkInputSandboxLFNs( job_mock ), 'failed to find replicas', self )
      replica_mock.assert_called_once_with( [ '/my/dir/inputsandbox/in1.stdio', '/my/dir/inputsandbox/in2.pdf' ] )

  def test_checkinputsb( self ):
    job_mock = Mock()
    job_mock.workflow.findParameter.return_value.getValue.return_value = '/some/file.txt;/other/some/file.stdhep;LFN:/my/dir/inputsandbox/in1.stdio;lfn:/my/dir/inputsandbox/in2.pdf'
    ret_dict = {'Failed': [], 'Successful': {'/one/replica': {'SE': 'surl'}}}
    with patch('%s.DiracILC.getReplicas' % MODULE_NAME, new=Mock(return_value=S_OK(ret_dict))) as replica_mock:
      assertDiracSucceeds( self.dilc.checkInputSandboxLFNs( job_mock ), self )
      replica_mock.assert_called_once_with( [ '/my/dir/inputsandbox/in1.stdio', '/my/dir/inputsandbox/in2.pdf' ] )

  def test_checkinputsb_notInputSB(self):
    job_mock = Mock()
    job_mock.workflow.findParameter.return_value = None
    assertDiracSucceeds(self.dilc.checkInputSandboxLFNs(job_mock), self)

  def test_checkinputsb_notInputSB_Value(self):
    job_mock = Mock()
    job_mock.workflow.findParameter.return_value.getValue.return_value = ''
    assertDiracSucceeds(self.dilc.checkInputSandboxLFNs(job_mock), self)

  def test_checkinputsb_noLFNs(self):
    job_mock = Mock()
    job_mock.workflow.findParameter.return_value.getValue.return_value = '/some/file.txt;/other/some/file.stdhep'
    assertDiracSucceeds(self.dilc.checkInputSandboxLFNs(job_mock), self)

  def test_checkinputsb_noRepl(self):
    job_mock = Mock()
    job_mock.workflow.findParameter.return_value.getValue.return_value = 'LFN:/some/file.txt'
    ret_dict = {'Failed': [], 'Successful': {'/some/file.txt': {'Bad-SE': 'surl'}}}

    def setOptions(*args):
      if 'SingleReplicaSEs' in args[0]:
        return ['Awesome-Disk-SE']
      if 'Minimum' in args[0]:
        return 2
      if args[0].endswith('PreferredSEs'):
        return ['Awesome-Tape-SE']

    ops_mock = Mock()
    ops_mock.getValue = setOptions
    self.dilc.ops = ops_mock

    with patch('%s.DiracILC.getReplicas' % MODULE_NAME, new=Mock(return_value=S_OK(ret_dict))) as replica_mock:
      assertDiracFailsWith(self.dilc.checkInputSandboxLFNs(job_mock), 'Not enough replicas', self)
      replica_mock.assert_called_once_with(['/some/file.txt'])

  def test_checkinputsb_goodRepl(self):
    job_mock = Mock()
    job_mock.workflow.findParameter.return_value.getValue.return_value = 'LFN:/some/file.txt'
    ret_dict = {'Failed': [], 'Successful': {'/some/file.txt': {'Awesome-Disk-SE': 'surl'}}}

    def setOptions(*args):
      if 'SingleReplicaSEs' in args[0]:
        return ['Awesome-Disk-SE']
      if 'Minimum' in args[0]:
        return 2
      if args[0].endswith('PreferredSEs'):
        return ['Awesome-Tape-SE']

    ops_mock = Mock()
    ops_mock.getValue = setOptions
    self.dilc.ops = ops_mock

    with patch('%s.DiracILC.getReplicas' % MODULE_NAME, new=Mock(return_value=S_OK(ret_dict))) as replica_mock:
      assertDiracSucceeds(self.dilc.checkInputSandboxLFNs(job_mock), self)
      replica_mock.assert_called_once_with(['/some/file.txt'])
Exemplo n.º 35
0
from DIRAC.Core.Base import Script
Script.parseCommandLine()

from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
from ILCDIRAC.Interfaces.API.NewInterface.Applications import Mokka, Marlin, OverlayInput
from ILCDIRAC.Interfaces.API.DiracILC import DiracILC

from DIRAC import exit as dexit

d = DiracILC(True, "repo.rep")

n_evts = 500
n_evts_per_job = 100
n_jobs = n_evts / n_evts_per_job

for i in range(n_jobs):
    j = UserJob()

    mo = Mokka()
    mo.setEnergy(3000)
    mo.setVersion("0706P08")
    mo.setSteeringFile("clic_ild_cdr.steer")
    mo.setMacFile("particlegun_electron.mac")
    mo.setOutputFile("MyFile.slcio")
    mo.setNbEvts(n_evts_per_job)
    res = j.append(mo)
    if not res['OK']:
        print res['Message']
        break
    ma = Marlin()
    ma.setVersion("v0111Prod")
Exemplo n.º 36
0
class JobCreater(object):
  """contains all the versions and parameters to create all theses tests"""

  # pylint: disable=too-many-instance-attributes
  # Test parameters, necessary due to amount of tests in this class.
  def __init__( self,
                clip,
                params
              ):
    self.clip = clip
    self.ildConfig = params.get( "ildConfig", None )
    self.alwaysOverlay = params.get( "alwaysOverlay", False )
    self.runOverlay = self.clip.testOverlay or self.alwaysOverlay
    self.mokkaVersion = params["mokkaVersion"]
    self.mokkaSteeringFile = params.get( "mokkaSteeringFile" )
    self.detectorModel = params.get( "detectorModel" )
    self.marlinVersion = params.get( "marlinVersion" )
    self.marlinSteeringFile = params.get( "marlinSteeringFile" )
    self.ddsimVersion = params.get( "ddsimVersion" )
    self.ddsimDetectorModel = params.get( "ddsimDetectorModel")
    self.ddsimInputFile = params.get( "ddsimInputFile" )
    self.marlinInputdata = params.get ( "marlinInputdata" )
    self.gearFile = params.get( "gearFile" )
    self.lcsimVersion = params.get( "lcsimVersion" )
    self.steeringFileVersion = params.get( "steeringFileVersion", None )
    self.rootVersion = params["rootVersion"]

    self.whizard2Version = params.get( "whizard2Version" )
    self.whizard2SinFile = params.get( "whizard2SinFile" )

    self.energy = params.get("energy")
    self.backgroundType = params.get("backgroundType")
    self.machine = params.get("machine")

    self.gearFile           = params.get( "gearFile" )
    self.marlinSteeringFile = params.get( "marlinSteeringFile" )
    self.marlinVersion      = params.get( "marlinVersion" )

    self.lcsimPreSteeringFile  = params.get( "lcsimPreSteeringFile" )
    self.lcsimPostSteeringFile = params.get( "lcsimPostSteeringFile" )

    self.fccSwPath = params.get( "fccSwPath" )
    self.fccSwSteeringFile = params.get( "fccSwSteeringFile" )

    self.fccAnalysisSteeringFile = params.get( "fccAnalysisSteeringFile" )

    ### other things needed to run tests
    self.log = gLogger.getSubLogger("JobCreater")

    from ILCDIRAC.Interfaces.API.DiracILC                  import DiracILC, __RCSID__ as drcsid
    from ILCDIRAC.Interfaces.API.NewInterface.UserJob      import __RCSID__ as jrcsid
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import __RCSID__ as apprcsid

    if self.clip.submitMode == "local":
      self.log.notice("")
      self.log.notice("       DIRAC RCSID:", drcsid )
      self.log.notice("         Job RCSID:", jrcsid )
      self.log.notice("Applications RCSID:", apprcsid )
      self.log.notice("")

    self.diracInstance = DiracILC(False, 'tests.rep')
    self.jobList = {}

    
  def createDDSimTest( self, inputfile = None, detectorModel = None):
    """Create a job running ddsim"""
    if inputfile is None:
      inputfile = self.ddsimInputFile
    sandbox = [ inputfile ]
    if detectorModel is None:
      detectorModel = self.ddsimDetectorModel
    else:
      sandbox.append(detectorModel)
    jobdd = self.getJob()
    from ILCDIRAC.Interfaces.API.NewInterface.Applications.DDSim import DDSim
    ddsim = DDSim()
    ddsim.setVersion(self.ddsimVersion)
    ddsim.setDetectorModel(detectorModel)
    ddsim.setNumberOfEvents(1)
    ddsim.setInputFile(inputfile)
    jobdd.setInputSandbox(sandbox)
    res = jobdd.append(ddsim)
    if not res['OK']:
      self.log.error("Failed adding DDSim:", res['Message'])
      return S_ERROR("Failed adding DDSim to Job")
    
    return S_OK(jobdd)

  def createWhizard2Test( self ):
    """Create a job running Whizard2"""
    jobdd = self.getJob()
    from ILCDIRAC.Interfaces.API.NewInterface.Applications.Whizard2 import Whizard2
    whiz = Whizard2()
    whiz.setVersion(self.whizard2Version)
    whiz.setNumberOfEvents(1)
    whiz.setSinFile(self.whizard2SinFile)
    whiz.setOutputFile("test.stdhep")
    res = jobdd.append(whiz)
    if not res['OK']:
      self.log.error("Failed adding Whizard2:", res['Message'])
      return S_ERROR("Failed adding Whizard2 to Job")
    return S_OK(jobdd)

  def createMokkaTest(self):
    """create a job running mokka, and maybe whizard before"""
    self.log.notice("Creating jobs for Mokka")
    #(Whizard + )Mokka
    jobmo = self.getJob()
    if self.clip.testChain:
      whmo = self.getWhizard(2)
      res = jobmo.append(whmo)
      if not res['OK']:
        self.log.error("Failed adding Whizard:", res['Message'])
        return S_ERROR("Failed adding Whizard")
    elif self.clip.testInputData:
      jobmo.setInputData("/ilc/prod/clic/SingleParticles/Muon/50GeV/Muon_50GeV_Fixed_cosTheta0.7.stdhep")
    else:
      self.log.error("Mokka does not know where to get its input from")
      return S_ERROR("Mokka does not know where to gets its input from")

    mo = self.getMokka()

    if self.clip.testChain:
      mo.getInputFromApp(whmo)
    else:
      mo.setNumberOfEvents(1)
    res = jobmo.append(mo)
    if not res['OK']:
      self.log.error("Failed adding Mokka:", res['Message'])
      return S_ERROR("Failed adding Mokka to Job")
    jobmo.setOutputData("testsim.slcio", OutputSE="CERN-DIP-4")
    self.jobList['Mokka1'] = jobmo
    return S_OK(jobmo)

  def createRootScriptTest(self):
    """create a job running root"""
    self.log.notice("Creating jobs for Root")
    jobRoot = self.getJob()
    jobRoot.setInputSandbox(["root.sh", "input.root","input2.root"])
    root = self.getRoot()
    res = jobRoot.append(root)
    if not res['OK']:
      self.log.error("Failed adding Root:", res['Message'])
      return S_ERROR("Failed adding Root to Job")
    self.jobList['Root'] = jobRoot
    return S_OK(jobRoot)

  def createRootHaddTest(self):
    """create a job running root"""
    self.log.notice("Creating jobs for Root")
    jobRoot = self.getJob()
    jobRoot.setInputSandbox(["input.root","input2.root"])
    root = self.getRoot()
    root.setScript("hadd")
    res = jobRoot.append(root)
    if not res['OK']:
      self.log.error("Failed adding Root:", res['Message'])
      return S_ERROR("Failed adding Root to Job")
    self.jobList['Root'] = jobRoot
    return S_OK(jobRoot)

  def createRootMacroTest(self):
    """create a job running root"""
    self.log.notice("Creating jobs for Root")
    jobRoot = self.getJob()
    jobRoot.setInputSandbox(["func.C", "input.root","input2.root"])
    root = self.getRootMacro()
    root.setScript("func.C")
    res = jobRoot.append(root)
    if not res['OK']:
      self.log.error("Failed adding Root:", res['Message'])
      return S_ERROR("Failed adding Root to Job")
    self.jobList['Root'] = jobRoot
    return S_OK(jobRoot)

  def getOverlay(self, nbevts):
    """ Create an overlay step
    """
    pathToFiles = None
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import OverlayInput
    overlay = OverlayInput()
    if self.energy==350:
      if self.detectorModel=="ILD_o1_v05":
        pathToFiles="/ilc/user/s/sailer/testFiles/overlay/ild_350/"
    if pathToFiles:
      overlay.setPathToFiles(pathToFiles)
    else:
      self.log.warn("better define pathToFiles for this overlay: %s, %s, %s" %
                    (self.energy, self.machine, self.backgroundType) )
      overlay.setMachine(self.machine)
      overlay.setEnergy(self.energy)
      overlay.setDetectorModel(self.detectorModel)

    overlay.setBkgEvtType(self.backgroundType)
    overlay.setBXOverlay(60)
    overlay.setGGToHadInt(0.3)
    overlay.setNumberOfSignalEventsPerJob(nbevts)

    return overlay

  def getMokka(self):
    """ Define a mokka app
    """
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import Mokka
    mokka = Mokka()
    mokka.setVersion(self.mokkaVersion)
    mokka.setSteeringFile(self.mokkaSteeringFile)
    mokka.setOutputFile("testsim.slcio")
    mokka.setDetectorModel(self.detectorModel)
    if self.steeringFileVersion:
      mokka.setSteeringFileVersion(self.steeringFileVersion)
    return mokka

  def getRoot(self):
    """ Define a root app
    """
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import RootScript
    root = RootScript()
    root.setScript("root.sh")
    root.setArguments("output.root input.root input2.root")
    root.setVersion(self.rootVersion)
    root.setOutputFile("output.root")
    return root

  def getRootMacro(self):
    """ Define a root app
    """
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import RootMacro
    root = RootMacro()
    root.setMacro("func.C")
    root.setArguments(r"\"input.root\"")
    root.setVersion(self.rootVersion)
    return root

  @staticmethod
  def getSLIC():
    """ Get a SLIC instance
    """
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import SLIC
    slic = SLIC()
    slic.setVersion('v2r9p8')
    slic.setSteeringFile('defaultClicCrossingAngle.mac')
    slic.setDetectorModel('clic_sid_cdr')
    slic.setOutputFile('testsim.slcio')
    return slic

  @staticmethod
  def getSLICPandora():
    """ Get some SLICPAndora app
    """
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import SLICPandora
    slicp = SLICPandora()
    slicp.setVersion('CLIC_CDR')
    slicp.setDetectorModel('clic_sid_cdr')
    slicp.setPandoraSettings("PandoraSettingsSlic.xml")
    slicp.setOutputFile('testpandora.slcio')
    return slicp


  def getMarlin( self ):
    """ Define a marlin step
    """
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import Marlin
    marlin = Marlin()
  #  marlin.setVersion("v0111Prod")
    marlin.setVersion(self.marlinVersion)
    marlin.setSteeringFile(self.marlinSteeringFile)
    marlin.setGearFile(self.gearFile)
    marlin.setOutputDstFile("testmarlinDST.slcio")
    marlin.setOutputRecFile("testmarlinREC.slcio")
    marlin.setNumberOfEvents(1)
    return marlin

  def getDD( self ):
    """ Create a DDSim object
    """
    from ILCDIRAC.Interfaces.API.NewInterface.Applications.DDSim import DDSim
    ddsim = DDSim()
    ddsim.setVersion(self.ddsimVersion)
    ddsim.setDetectorModel(self.ddsimDetectorModel)
    ddsim.setInputFile(self.ddsimInputFile) 
    ddsim.setNumberOfEvents(2)
    return ddsim
  


  def getLCSIM(self, prepandora = True):
    """ Get some LCSIM
    """
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import LCSIM
    lcsim = LCSIM()
    lcsim.setVersion('CLIC_CDR')
    lcsim.setDetectorModel('clic_sid_cdr.zip')
    if prepandora:
      lcsim.setSteeringFile(self.lcsimPreSteeringFile)
      lcsim.setOutputFile("testlcsim.slcio")
    else:
      lcsim.setSteeringFile(self.lcsimPostSteeringFile)
      #lcsim.setOutputFile("testlcsimfinal.slcio")
      lcsim.setOutputDstFile("testlcsimDST.slcio")
      lcsim.setOutputRecFile("testlcsimREC.slcio")
    lcsim.setTrackingStrategy("defaultStrategies_clic_sid_cdr.xml")
    return lcsim

  def getFccSw( self ):
    """ Define a fccsw step
    """
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import FccSw
    fccsw = FccSw()
    fccsw.fccSwPath = self.fccSwPath
    fccsw.setSteeringFile(self.fccSwSteeringFile)
    return fccsw

  def getFccAnalysis( self ):
    """ Define a fccanalysis step
    """
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import FccAnalysis
    fccanalysis = FccAnalysis()
    fccanalysis.setSteeringFile(self.fccAnalysisSteeringFile)
    return fccanalysis

  @staticmethod
  def getStdhepcut(generated):
    """ Get some cuts in
    """
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import StdhepCutJava
    stdhepcut = StdhepCutJava()
    stdhepcut.setVersion('1.0')
    stdhepcut.setSelectionEfficiency(1.)
    #stdhepcut.setSteeringFile("cuts_testing_1400.txt")
    stdhepcut.setInlineCuts("leptonInvMass_R 13 100 200")
    stdhepcut.setSteeringFileVersion("V18")
    stdhepcut.setMaxNbEvts(1)
    stdhepcut.setNumberOfEvents(generated)
    return stdhepcut

  @staticmethod
  def getStdhepSplit():
    """ Get some stdhep split
    """
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import StdHepSplit
    stdhepsplit = StdHepSplit()
    stdhepsplit.setVersion("V2")
    stdhepsplit.setNumberOfEventsPerFile(5)
    stdhepsplit.setOutputFile("teststdhepsplit.stdhep")
    stdhepsplit.setMaxRead(10)
    return stdhepsplit

  @staticmethod
  def getLCIOSplit(events_per_file):
    """ Get a LCIO split
    """
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import SLCIOSplit
    lciosplit = SLCIOSplit()
    lciosplit.setNumberOfEventsPerFile(events_per_file)
    lciosplit.setOutputFile("testlciosplit.slcio")
    return lciosplit

  @staticmethod
  def getLCIOConcat():
    """ Get a LCIO Concat
    """
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import SLCIOConcatenate
    lcioconcat = SLCIOConcatenate()
    lcioconcat.setOutputFile("testlcioconcat.slcio")
    return lcioconcat

  def getJob(self):
    """ Define a generic job, it should be always the same
    """
    from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
    myjob = UserJob()
    myjob.setName("Testing")
    myjob.setJobGroup("Tests")
    myjob.setCPUTime(30000)
    myjob.dontPromptMe()
    myjob.setLogLevel("VERBOSE")
    myjob.setPlatform("x86_64-slc5-gcc43-opt")
    myjob.setOutputSandbox(["*.log","*.xml", "*.sh"])
    myjob._addParameter( myjob.workflow, 'TestFailover', 'String', True, 'Test failoverRequest')
    myjob._addParameter( myjob.workflow, 'Platform', 'JDL', "x86_64-slc5-gcc43-opt", 'OS Platform')
    if self.ildConfig:
      myjob.setILDConfig(self.ildConfig)
    return myjob



  def getWhizardModel(self, nbevts, energy, model):
    """ Create a default whizard
    """
    from ILCDIRAC.Interfaces.API.NewInterface.Applications import Whizard
    proddict = "e2e2_o"
    if model != "sm":
      proddict = "se2se2_r"
    whiz = Whizard(processlist = self.diracInstance.getProcessList())
    whiz.setModel(model)
    pdict = {}
    pdict['process_input'] = {}
    pdict['process_input']['process_id'] = proddict
    pdict['process_input']['sqrts'] = energy
    if model != 'sm':
      pdict['process_input']['input_file'] = "LesHouches.msugra_1"
      pdict['process_input']['input_slha_format'] = 'T'

    pdict['process_input']['beam_recoil'] = 'T'

    pdict['integration_input'] = {}
    pdict['integration_input']['calls'] = '1  50000 10  5000  1  15000'
    pdict['simulation_input'] = {}
    pdict['simulation_input']['normalize_weight'] = 'F'
    pdict['simulation_input']['n_events'] = nbevts
    pdict['simulation_input']['keep_initials'] = 'T'
    pdict['simulation_input']['events_per_file'] = 500000
    if model != 'sm':
      pdict['simulation_input']['pythia_parameters'] = "PMAS(25,1)=125; PMAS(25,2)=0.3605E-02; MSTU(22)=20 ;PARJ(21)=0.40000;PARJ(41)=0.11000; PARJ(42)=0.52000; PARJ(81)=0.25000; PARJ(82)=1.90000; MSTJ(11)=3; PARJ(54)=-0.03100; PARJ(55)=-0.00200;PARJ(1)=0.08500; PARJ(3)=0.45000; PARJ(4)=0.02500; PARJ(2)=0.31000; PARJ(11)=0.60000; PARJ(12)=0.40000; PARJ(13)=0.72000;PARJ(14)=0.43000; PARJ(15)=0.08000; PARJ(16)=0.08000; PARJ(17)=0.17000; MSTP(3)=1;IMSS(1)=11; IMSS(21)=71; IMSS(22)=71"
    else:
      pdict['simulation_input']['pythia_parameters'] = "PMAS(25,1)=125; PMAS(25,2)=0.3605E-02; MSTU(22)=20 ; MSTJ(28)=2 ;PARJ(21)=0.40000;PARJ(41)=0.11000; PARJ(42)=0.52000; PARJ(81)=0.25000; PARJ(82)=1.90000; MSTJ(11)=3; PARJ(54)=-0.03100; PARJ(55)=-0.00200;PARJ(1)=0.08500; PARJ(3)=0.45000; PARJ(4)=0.02500; PARJ(2)=0.31000; PARJ(11)=0.60000; PARJ(12)=0.40000; PARJ(13)=0.72000;PARJ(14)=0.43000; PARJ(15)=0.08000; PARJ(16)=0.08000; PARJ(17)=0.17000; MSTP(3)=1"
      pdict['parameter_input'] = {}
      #  pdict['parameter_input']['mmu']=mmu
      #  pdict['parameter_input']['mtau']=mtau
      #  pdict['parameter_input']['mb']=mb
      #  pdict['parameter_input']['mc']=mc
      pdict['parameter_input']['mH'] = 125
    pdict['beam_input_1'] = {}
    pdict['beam_input_1']['particle_name'] = "e1"
    pdict['beam_input_1']['polarization'] = "0.0 0.0"
    pdict['beam_input_1']['USER_spectrum_on'] = 'T'
    pdict['beam_input_1']['USER_spectrum_mode'] = 19 if energy == 1400 else 11
    pdict['beam_input_1']['ISR_on'] = 'T'
    pdict['beam_input_1']['EPA_on'] = "F"

    pdict['beam_input_2'] = {}
    pdict['beam_input_2']['particle_name'] = "E1"
    pdict['beam_input_2']['polarization'] = "0.0 0.0"
    pdict['beam_input_2']['USER_spectrum_on'] = 'T'
    pdict['beam_input_2']['ISR_on'] = 'T'
    pdict['beam_input_2']['USER_spectrum_mode'] = 19 if energy == 1400 else 11
    pdict['beam_input_2']['EPA_on'] = 'F'


    whiz.setFullParameterDict(pdict)
    whiz.setOutputFile("testgen.stdhep")
    return whiz


  def getWhizard(self, nbevts):
    """ Get some defualt SM whizard
    """
    return self.getWhizardModel(nbevts, 1400, "sm")

  def getWhizardSUSY(self, nbevts):
    """ Get a susy whizard
    """
    return self.getWhizardModel(nbevts, 3000, "slsqhh")



  def createWhizardTest(self):
    """create a test for whizard"""
    self.log.notice("Creating jobs for Whizard")
    ##### WhizardJob
    jobw = self.getJob()
    wh = self.getWhizard(2)
    res = jobw.append(wh)
    if not res['OK']:
      self.log.error("Failed adding Whizard:", res['Message'])
      return S_ERROR()
    self.jobList['Whizard1'] = jobw

    ##### WhizardJob
    jobwsusy = self.getJob()
    whsusy = self.getWhizardSUSY(2)
    res = jobwsusy.append(whsusy)
    if not res['OK']:
      self.log.error("Failed adding Whizard:", res['Message'])
      return S_ERROR()
    self.jobList['WhizSusy'] = jobwsusy
    return S_OK((jobw, jobwsusy))

  def createSlicTest(self):
    """create tests for slic"""
    self.log.notice("Creating jobs for SLIC")
    #run (Whizard +)SLIC
    jobslic = self.getJob()
    if self.clip.testChain:
      whslic = self.getWhizard(2)
      res = jobslic.append(whslic)
      if not res["OK"]:
        self.log.error("Failed adding Whizard:", res['Value'])
        return S_ERROR()
    elif self.clip.testInputData:
      jobslic.setInputData("/ilc/prod/clic/SingleParticles/Muon/50GeV/Muon_50GeV_Fixed_cosTheta0.7.stdhep")
    else:
      self.log.error("SLIC does not know where to get its input from")
      return S_ERROR()
    myslic = self.getSLIC()
    if self.clip.testChain:
      myslic.getInputFromApp(whslic)
    else:
      myslic.setNumberOfEvents(2)
    res = jobslic.append(myslic)
    if not res['OK']:
      self.log.error("Failed adding slic: ", res["Message"])
      return S_ERROR()
    self.jobList['Slic1'] = jobslic
    return S_OK(jobslic)


  def createMarlinTest(self , setInputData = False):
    """create tests for marlin"""
    self.log.notice( "Creating test for Marlin" )
        #((Whizard + Mokka +)Overlay+) Marlin
    jobma = self.getJob()
    if self.clip.testChain:
      moma = self.getMokka()
      if not self.clip.testInputData:
        whma = self.getWhizard(2)
        res = jobma.append(whma)
        if not res['OK']:
          self.log.error("Failed adding Whizard:", res['Message'])
          return S_ERROR()
        moma.getInputFromApp(whma)
      else:
        jobma.setInputData("/ilc/prod/clic/SingleParticles/Muon/50GeV/Muon_50GeV_Fixed_cosTheta0.7.stdhep")
        moma.setNumberOfEvents(1)
      res = jobma.append(moma)
      if not res['OK']:
        self.log.error("Failed adding Mokka:", res['Message'])
        return S_ERROR()
    elif self.clip.testInputData:
      jobma.setInputData(self.marlinInputdata)
    else:
      self.log.error("Marlin does not know where to get its input from")
      return S_ERROR()
    if self.runOverlay:
      ov = self.getOverlay(2)
      res = jobma.append(ov)
      if not res["OK"]:
        self.log.error("Failed adding Overlay:", res['Message'])
        return S_ERROR
    ma = self.getMarlin()
    if self.clip.testChain:
      ma.getInputFromApp(moma)
    else:
      ma.setNumberOfEvents(2)

    res = jobma.append(ma)
    if not res['OK']:
      self.log.error("Failed adding Marlin:", res['Message'])
      return S_ERROR()
    self.jobList['Marlin1'] =jobma
    return S_OK(jobma)

  def createLCSimTest(self):
    """create tests for LCSIM"""
    self.log.notice( "Creating test for LCSIM" )
    #run ((whiz+SLIC+)+Overlay+)LCSIM
    joblcsim = self.getJob()
    if self.clip.testChain:
      mysliclcsim = self.getSLIC()

      if not self.clip.testInputData:
        whlcsim = self.getWhizard(2)
        res = joblcsim.append(whlcsim)
        if not res["OK"]:
          self.log.error("Failed adding Whizard:", res['Value'])
          return S_ERROR()
        mysliclcsim.getInputFromApp(whlcsim)
      else:
        joblcsim.setInputData("/ilc/prod/clic/SingleParticles/Muon/50GeV/Muon_50GeV_Fixed_cosTheta0.7.stdhep")
        mysliclcsim.setNumberOfEvents(2)

      res = joblcsim.append(mysliclcsim)
      if not res['OK']:
        self.log.error("Failed adding slic: ", res["Message"])
        return S_ERROR()
    elif self.clip.testInputData:
      #joblcsim.setInputData("/ilc/prod/clic/1.4tev/ee_qqaa/SID/SIM/00002308/000/ee_qqaa_sim_2308_222.slcio")
      joblcsim.setInputData("/ilc/user/s/sailer/testFiles/clic_prod_sid_h_nunu_sim.slcio")
    else:
      self.log.error("LCSIM does not know where to get its input from")
      return S_ERROR()
    if self.runOverlay:
      ovlcsim = self.getOverlay(2)
      res = joblcsim.append(ovlcsim)
      if not res["OK"]:
        self.log.error("Failed adding Overlay:", res['Message'])
        return S_ERROR()
    mylcsim = self.getLCSIM(True)
    if self.clip.testChain:
      mylcsim.getInputFromApp(mysliclcsim)
    else:
      mylcsim.setNumberOfEvents(2)
    res = joblcsim.append(mylcsim)
    if not res['OK']:
      self.log.error("Failed adding LCSIM: ", res["Message"])
      return S_ERROR()
    self.jobList['lcsim1'] = joblcsim

    return S_OK(joblcsim)

  def createSlicPandoraTest(self):
    """create tests for slicPandora"""
    self.log.notice("Creating tests for SLICPandora")
    #run ((whiz+SLIC) + (Overlay +) LCSIM +) SLICPandora + LCSIM
    joblcsimov = self.getJob()
    if not self.clip.testChain:
      self.log.error("SLICPandora does not know where to get its input from")
      return S_ERROR()
    mylcsimov = self.getLCSIM(True)
    if not self.clip.testInputData:
      whlcsimov = self.getWhizard(2)
      res = joblcsimov.append(whlcsimov)
      if not res["OK"]:
        self.log.error("Failed adding Whizard:", res['Value'])
        return S_ERROR()
      mysliclcsimov = self.getSLIC()
      mysliclcsimov.getInputFromApp(whlcsimov)
      res = joblcsimov.append(mysliclcsimov)
      if not res['OK']:
        self.log.error("Failed adding slic: ", res["Message"])
        return S_ERROR()
      mylcsimov.getInputFromApp(mysliclcsimov)
    else:
      #joblcsimov.setInputData("/ilc/prod/clic/1.4tev/ee_qqaa/SID/SIM/00002308/000/ee_qqaa_sim_2308_222.slcio")
      joblcsimov.setInputData("/ilc/user/s/sailer/testFiles/clic_prod_sid_h_nunu_sim.slcio")
      mylcsimov.setNumberOfEvents(2)

    if self.runOverlay:
      ovslicp = self.getOverlay(2)
      res = joblcsimov.append(ovslicp)
      if not res["OK"]:
        self.log.error("Failed adding Overlay:", res['Message'])
        return S_ERROR()

    res = joblcsimov.append(mylcsimov)
    if not res['OK']:
      self.log.error("Failed adding LCSIM: ", res["Message"])
      return S_ERROR()

    myslicpov = self.getSLICPandora()
    myslicpov.getInputFromApp(mylcsimov)
    res = joblcsimov.append(myslicpov)
    if not res['OK']:
      self.log.error("Failed adding SLICPandora: ", res["Message"])
      return S_ERROR()
    mylcsimovp = self.getLCSIM(False)
    mylcsimovp.getInputFromApp(myslicpov)
    res = joblcsimov.append(mylcsimovp)
    if not res['OK']:
      self.log.error("Failed adding LCSIM: ", res["Message"])
      return S_ERROR()
    self.jobList['lcsimov1'] = joblcsimov
    return S_OK(joblcsimov)

  def createUtilityTests(self):
    """Create tests for utility applications"""
    self.log.notice("Creating tests for utility applications")
    jobwsplit = self.getJob()
    whsplit = self.getWhizard(10)
    res = jobwsplit.append(whsplit)
    if not res['OK']:
      self.log.error("Failed adding Whizard:", res['Message'])
      return S_ERROR()
    mystdsplit = JobCreater.getStdhepSplit()
    mystdsplit.getInputFromApp(whsplit)
    res = jobwsplit.append(mystdsplit)
    if not res['OK']:
      self.log.error("Failed adding StdHepSplit:", res['Message'])
      return S_ERROR()
    self.jobList['whizSplit'] = jobwsplit

    ##### WhizardJob + split
    jobwcut = self.getJob()
    whcut = self.getWhizard(100)
    res = jobwcut.append(whcut)
    if not res['OK']:
      self.log.error("Failed adding Whizard:", res['Message'])
      return S_ERROR()
    mystdcut = self.getStdhepcut( 100 )
    mystdcut.getInputFromApp(whcut)
    res = jobwcut.append(mystdcut)
    if not res['OK']:
      self.log.error("Failed adding StdHepCut:", res['Message'])
      return S_ERROR()
    self.jobList['whizCut'] = jobwcut

    #LCIO split
    joblciosplit = self.getJob()
    # joblciosplit.setInputData("/ilc/prod/clic/1.4tev/e2e2_o/ILD/DST/00002215/000/e2e2_o_dst_2215_46.slcio")
    joblciosplit.setInputData("/ilc/user/s/sailer/testFiles/prod_clic_ild_e2e2_o_sim_2214_26.slcio")
    mylciosplit = self.getLCIOSplit(100)
    res = joblciosplit.append(mylciosplit)
    if not res['OK']:
      self.log.error("Failed adding SLCIOSplit:", res['Message'])
      return S_ERROR()
    self.jobList['lcioSplit'] = joblciosplit

    #LCIO concat
    jobconcat = self.getJob()
    # jobconcat.setInputData(["/ilc/prod/clic/1.4tev/e2e2_o/ILD/DST/00002215/000/e2e2_o_dst_2215_27.slcio",
    #                         "/ilc/prod/clic/1.4tev/e2e2_o/ILD/DST/00002215/000/e2e2_o_dst_2215_46.slcio"])

    jobconcat.setInputData(["/ilc/prod/clic/1.4tev/aa_qqll_all/ILD/DST/00004275/002/aa_qqll_all_dst_4275_2104.slcio",
                            "/ilc/prod/clic/1.4tev/aa_qqll_all/ILD/DST/00004275/002/aa_qqll_all_dst_4275_2105.slcio"])

    myconcat = self.getLCIOConcat()
    res = jobconcat.append(myconcat)
    if not res['OK']:
      self.log.error("Failed adding SLCIOConcatenate:", res['Message'])
      return S_ERROR()
    self.jobList['concat'] = jobconcat
    return S_OK((jobconcat, joblciosplit,jobwcut,jobwsplit))

  def createFccSwTest(self):
    """create tests for fccsw"""
    self.log.notice("Creating jobs for FccSW")
    #### FccSwJob
    jobfccsw = self.getJob()
    fccsw = self.getFccSw()
    res = jobfccsw.append(fccsw)
    if not res['OK']:
      self.log.error("Failed adding FccSw:", res['Message'])
      return S_ERROR()
    self.jobList['FccSw1'] = jobfccsw
    return S_OK(jobfccsw)

  def createFccAnalysisTest(self):
    """create tests for fccanalysis"""
    self.log.notice("Creating jobs for FccAnalysis")
    #### FccAnalysisJob
    jobfccanalysis = self.getJob()
    fccanalysis = self.getFccAnalysis()
    res = jobfccanalysis.append(fccanalysis)
    if not res['OK']:
      self.log.error("Failed adding FccAnalysis:", res['Message'])
      return S_ERROR()
    self.jobList['FccAnalysis1'] = jobfccanalysis
    return S_OK(jobfccanalysis)

  def runJobLocally(self, job, jobName="unknown"):
    """run a job locally"""
    self.log.notice("I will run the tests locally.")
    from DIRAC import gConfig
    localarea = gConfig.getValue("/LocalSite/LocalArea", "")
    if not localarea:
      self.log.error("You need to have /LocalSite/LocalArea defined in your dirac.cfg")
      return S_ERROR()
    if localarea.find("/afs") == 0:
      self.log.error("Don't set /LocalSite/LocalArea set to /afs/... as you'll get to install there")
      self.log.error("check ${HOME}/.dirac.cfg and ${DIRAC}/etc/dirac.cfg")
      return S_ERROR()
    self.log.notice("To run locally, I will create a temp directory here.")
    curdir = os.getcwd()
    tmpdir = tempfile.mkdtemp("", dir = "./")
    os.chdir(tmpdir)

    # Jobs that need separate input files
    specialJobs = ['root', 'ddsim']
    filesForJob = {
      'root' :  [ 'input2.root', 'input.root' ],
      'ddsim' : [ 'FCalTB.tar.gz', 'Muon_50GeV_Fixed_cosTheta0.7.stdhep' ]
    }
    for specialName in specialJobs:
      if "root" in jobName.lower() and specialName == "root":
        with open("root.sh", "w") as rScript:
          rScript.write( "echo $ROOTSYS" )
        with open("func.C", "w") as rMacro:
          rMacro.write( '''
                        void func( TString string ) {
                          std::cout << string << std::endl;
                          TFile* file = TFile::Open(string);
                        file->ls();
                        }
                        ''' )
      testfiledir = 'Testfiles'
      for fileName in ['input.root', 'input2.root']:
        shutil.copy( os.path.join( curdir, testfiledir, fileName ), os.getcwd() )
        print(os.path.join(curdir, "input2.root"), os.getcwd())
      if specialName in jobName.lower():
        for fileName in filesForJob[specialName]:
          shutil.copy( os.path.join( curdir, testfiledir, fileName ), os.getcwd() )
  
    resJob = self.runJob(job, jobName)
    os.chdir(curdir)
    if not resJob['OK']:
      return resJob
    os.chdir(curdir)
    if not self.clip.nocleanup:
      cleanup(tmpdir)
    return S_OK()

  def run(self):
    """submit and run all the tests in jobList"""
    res = S_ERROR()
    for name, finjob in self.jobList.iteritems():
      if self.clip.submitMode == 'local':
        res = self.runJobLocally(finjob, name)
      else:
        res = self.runJob(finjob, name)
    return res

  def runJob(self, finjob, name):
    """runs or submits the job"""
    self.log.notice("############################################################")
    self.log.notice(" Running or submitting job: %s " % name)
    self.log.notice("\n\n")
    res = finjob.submit(self.diracInstance, mode = self.clip.submitMode)
    if not res["OK"]:
      self.log.error("Failed job:", res['Message'])
      return S_ERROR()
    return S_OK()


  def checkForTests(self):
    """check which tests to run"""

    if self.clip.testMokka:
      resMokka = self.createMokkaTest()
      if not resMokka['OK']:
        return S_ERROR()

    if self.clip.testWhizard:
      resWhiz = self.createWhizardTest()
      if not resWhiz['OK']:
        return S_ERROR()

    if self.clip.testSlic:
      resSlic = self.createSlicTest()
      if not resSlic['OK']:
        return S_ERROR()

    if self.clip.testMarlin:
      resMarlin = self.createMarlinTest()
      if not resMarlin['OK']:
        return S_ERROR()

    if self.clip.testLCSIM:
      resLCSim = self.createLCSimTest()
      if not resLCSim['OK']:
        return S_ERROR()

    if self.clip.testSlicPandora:
      resSP = self.createSlicPandoraTest()
      if not resSP['OK']:
        return S_ERROR()

    if self.clip.testUtilities:
      resUtil = self.createUtilityTests()
      if not resUtil['OK']:
        return S_ERROR()

    if self.clip.testRoot:
      resRoot = self.createRootScriptTest()
      if not resRoot['OK']:
        return S_ERROR()

      resRoot = self.createRootHaddTest()
      if not resRoot['OK']:
        return S_ERROR()

      resRoot = self.createRootMacroTest()
      if not resRoot['OK']:
        return S_ERROR()

    if self.clip.testFccSw:
      resFccSw = self.createFccSwTest()
      if not resFccSw['OK']:
        return S_ERROR()

    if self.clip.testFccAnalysis:
      resFccAnalysis = self.createFccAnalysisTest()
      if not resFccAnalysis['OK']:
        return S_ERROR()

    return S_OK()
Exemplo n.º 37
0
class UserJob(Job):
  """ User job class. To be used by users, not for production.
  """
  def __init__(self, script = None):
    super(UserJob, self).__init__( script )
    self.type = 'User'
    self.diracinstance = None
    
  def submit(self, diracinstance = None, mode = "wms"):
    """ Submit call: when your job is defined, and all applications are set, you need to call this to
    add the job to DIRAC.
    """
    res = self._addToWorkflow()
    if not res['OK']:
      return res
    self.oktosubmit = True
    if not diracinstance:
      self.diracinstance = DiracILC()
    else:
      self.diracinstance = diracinstance
    return self.diracinstance.submit(self, mode)
    
  #############################################################################
  def setInputData( self, lfns ):
    """Helper function.

       Specify input data by Logical File Name (LFN).

       Example usage:

       >>> job = Job()
       >>> job.setInputData(['/ilc/prod/whizard/processlist.whiz'])

       @param lfns: Logical File Names
       @type lfns: Single LFN string or list of LFNs
    """
    if type( lfns ) == list and len( lfns ):
      for i in xrange( len( lfns ) ):
        lfns[i] = lfns[i].replace( 'LFN:', '' )
      inputData = map( lambda x: 'LFN:' + x, lfns )
      inputDataStr = string.join( inputData, ';' )
      description = 'List of input data specified by LFNs'
      self._addParameter( self.workflow, 'InputData', 'JDL', inputDataStr, description )
    elif type( lfns ) == type( ' ' ):  #single LFN
      description = 'Input data specified by LFN'
      self._addParameter( self.workflow, 'InputData', 'JDL', lfns, description )
    else:
      kwargs = {'lfns':lfns}
      return self._reportError( 'Expected lfn string or list of lfns for input data', **kwargs )

    return S_OK()
   
  def setInputSandbox(self, flist):
    """ Mostly inherited from DIRAC.Job
    """
    if type(flist) == type(""):
      flist = [flist]
    if not type(flist) == type([]) :
      return self._reportError("File passed must be either single file or list of files.") 
    self.inputsandbox.extend(flist)
    return S_OK()

  #############################################################################
  def setOutputData(self, lfns, OutputPath = '', OutputSE = ['']):
    """Helper function, used in preference to Job.setOutputData() for ILC.

       For specifying output data to be registered in Grid storage.  If a list
       of OutputSEs are specified the job wrapper will try each in turn until
       successful.

       Example usage:

       >>> job = Job()
       >>> job.setOutputData(['Ntuple.root'])

       @param lfns: Output data file or files
       @type lfns: Single string or list of strings ['','']
       @param OutputSE: Optional parameter to specify the Storage
       @param OutputPath: Optional parameter to specify the Path in the Storage, postpented to /ilc/user/u/username/
       Element to store data or files, e.g. CERN-tape
       @type OutputSE: string or list
       @type OutputPath: string
    """    
    kwargs = {'lfns' : lfns, 'OutputSE' : OutputSE, 'OutputPath' : OutputPath}
    if type(lfns) == list and len(lfns):
      outputDataStr = string.join(lfns, ';')
      description = 'List of output data files'
      self._addParameter(self.workflow, 'UserOutputData', 'JDL', outputDataStr, description)
    elif type(lfns) == type(" "):
      description = 'Output data file'
      self._addParameter(self.workflow, 'UserOutputData', 'JDL', lfns, description)
    else:
      return self._reportError('Expected file name string or list of file names for output data', **kwargs)

    if OutputSE:
      description = 'User specified Output SE'
      if type(OutputSE) in types.StringTypes:
        OutputSE = [OutputSE]
      elif type(OutputSE) != types.ListType:
        return self._reportError('Expected string or list for OutputSE', **kwargs)
      OutputSE = ';'.join(OutputSE)
      self._addParameter(self.workflow, 'UserOutputSE', 'JDL', OutputSE, description)

    if OutputPath:
      description = 'User specified Output Path'
      if not type(OutputPath) in types.StringTypes:
        return self._reportError('Expected string for OutputPath', **kwargs)
      # Remove leading "/" that might cause problems with os.path.join
      while OutputPath[0] == '/': 
        OutputPath = OutputPath[1:]
      if OutputPath.count("ilc/user"):
        return self._reportError('Output path contains /ilc/user/ which is not what you want', **kwargs)
      self._addParameter(self.workflow, 'UserOutputPath', 'JDL', OutputPath, description)

    return S_OK()
  
  #############################################################################
  def setOutputSandbox( self, files ):
    """Helper function.

       Specify output sandbox files.  If specified files are over 10MB, these
       may be uploaded to Grid storage with a notification returned in the
       output sandbox.

       Example usage:

       >>> job = Job()
       >>> job.setOutputSandbox(['*.log','myfile.slcio'])

       @param files: Output sandbox files
       @type files: Single string or list of strings ['','']

    """
    if type( files ) == list and len( files ):
      fileList = string.join( files, ";" )
      description = 'Output sandbox file list'
      self._addParameter( self.workflow, 'OutputSandbox', 'JDL', fileList, description )
    elif type( files ) == type( " " ):
      description = 'Output sandbox file'
      self._addParameter( self.workflow, 'OutputSandbox', 'JDL', files, description )
    else:
      kwargs = {'files' : files}
      return self._reportError( 'Expected file string or list of files for output sandbox contents', **kwargs )

    return S_OK()
    
  def setILDConfig(self,Version):
    """ Define the Configuration package to obtain
    """
    appName = 'ILDConfig'
    self._addSoftware(appName.lower(), Version)
    
    self._addParameter( self.workflow, 'ILDConfigPackage', 'JDL', appName+Version, 'ILDConfig package' )
    return S_OK()
Exemplo n.º 38
0
  def setRepo(self, optionVal):
    self.repo = optionVal
    return S_OK()
  def registerSwitches(self):
    Script.registerSwitch('r:', 'repository=', 'Path to repository file', self.setRepo)
    Script.setUsageMessage('\n'.join( [ __doc__.split( '\n' )[1],
                                        '\nUsage:',
                                        '  %s [option|cfgfile] ...\n' % Script.scriptName ] ) )
    
if __name__=="__main":
  cliparams = Params()
  cliparams.registerSwitches()
  Script.parseCommandLine( ignoreErrors = False )
  from DIRAC import gLogger
  
  repoLocation =  cliparams.repo
  if not repoLocation:
    Script.showHelp()
    dexit(2)
  from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
  dirac = DiracILC(True, repoLocation)
  
  dirac.monitorRepository(False)
  lfns = []
  lfns = dirac.retrieveRepositoryOutputDataLFNs()
  gLogger.notice("lfnlist=[")
  for lfn in lfns :
    gLogger.notice('"LFN:%s",' % lfn)
  gLogger.notice("]")
  dexit(0)
Exemplo n.º 39
0
from DIRAC.Core.Base import Script
Script.parseCommandLine()

from ILCDIRAC.Interfaces.API.DiracILC import DiracILC
from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob
from ILCDIRAC.Interfaces.API.NewInterface.Applications import Marlin
from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient
import time

dIlc = DiracILC()
lcoutput = []
for i in range(1, 301):
    lcoutput = "aa_%d.root" % i
    job = UserJob()
    job.setDestination('LCG.CERN.ch')
    job.setInputSandbox([
        "LFN:/ilc/user/k/kacarevic/hgamgam/PandoraPFA/PandoraLikelihoodData9EBin_CLIC_ILD.xml",
        "LFN:/ilc/user/k/kacarevic/hgamgam/PandoraPFA/PandoraSettingsFast.xml",
        "LFN:/ilc/user/k/kacarevic/hgamgam/PandoraPFA/MarlinRecoRootFiles/steering.xml",
        "LFN:/ilc/user/k/kacarevic/hgamgam/PandoraPFA/clic_ild_cdr.gear",
        "LFN:/ilc/user/k/kacarevic/hgamgam/PandoraPFA/MarlinRecoRootFiles/lib.tar.gz"
    ])
    job.setInputData(
        "LFN:/ilc/user/k/kacarevic/hgamgam/Marlin/newPandora/aa/aa_%d.slcio" %
        i)
    job.setOutputSandbox(
        ["*.log", "*.sh", "*.py", "*.out", "*.xml", "*.steer "])
    job.setJobGroup("myRoot")
    job.setName("root_aa_%d" % i)
    marl = Marlin()
    marl.setVersion('ILCSoft-2016-09-27_gcc48')
Exemplo n.º 40
0
class UserJob(Job):
    """ User job class. To be used by users, not for production.
  """
    def __init__(self, script=None):
        super(UserJob, self).__init__(script)
        self.type = 'User'
        self.diracinstance = None
        self.usergroup = ['ilc_user', 'calice_user']
        self.proxyinfo = getProxyInfo()

        ########## SPLITTING STUFF: ATTRIBUTES ##########
        self._data = []
        self.splittingOption = None
        self._switch = {}
        self.numberOfJobs = None
        self.totalNumberOfEvents = None
        self.eventsPerJob = None
        self.numberOfFilesPerJob = 1

    def submit(self, diracinstance=None, mode="wms"):
        """ Submit call: when your job is defined, and all applications are set, you need to call this to
    add the job to DIRAC.

    :param diracinstance: DiracILC instance
    :type diracinstance: ~ILCDIRAC.Interfaces.API.DiracILC.DiracILC
    :param str mode: "wms" (default), "agent", or "local"

    .. note ::
      The *local* mode means that the job will be run on the submission machine. Use this mode for testing of submission scripts

    """
        if self.splittingOption:
            result = self._split()
            if 'OK' in result and not result['OK']:
                return result

        #Check the credentials. If no proxy or not user proxy, return an error
        if not self.proxyinfo['OK']:
            self.log.error(
                "Not allowed to submit a job, you need a %s proxy." %
                self.usergroup)
            return self._reportError(
                "Not allowed to submit a job, you need a %s proxy." %
                self.usergroup, self.__class__.__name__)
        if 'group' in self.proxyinfo['Value']:
            group = self.proxyinfo['Value']['group']
            if group not in self.usergroup:
                self.log.error(
                    "Not allowed to submit a job, you need a %s proxy." %
                    self.usergroup)
                return self._reportError(
                    "Not allowed to submit job, you need a %s proxy." %
                    self.usergroup, self.__class__.__name__)
        else:
            self.log.error(
                "Could not determine group, you do not have the right proxy.")
            return self._reportError(
                "Could not determine group, you do not have the right proxy.")

        res = self._addToWorkflow()
        if not res['OK']:
            return res
        self.oktosubmit = True
        if not diracinstance:
            self.diracinstance = DiracILC()
        else:
            self.diracinstance = diracinstance
        return self.diracinstance.submit(self, mode)

    #############################################################################
    def setInputData(self, lfns):
        """Specify input data by Logical File Name (LFN).

    Input files specified via this function will be automatically staged if necessary.

    Example usage:

    >>> job = UserJob()
    >>> job.setInputData(['/ilc/prod/whizard/processlist.whiz'])

    :param lfns: Logical File Names
    :type lfns: Single LFN string or list of LFNs
    """
        if isinstance(lfns, list) and lfns:
            for i, lfn in enumerate(lfns):
                lfns[i] = lfn.replace('LFN:', '')
            #inputData = map( lambda x: 'LFN:' + x, lfns )
            inputData = lfns  #because we don't need the LFN: for inputData, and it breaks the
            #resolution of the metadata in the InputFilesUtilities
            inputDataStr = ';'.join(inputData)
            description = 'List of input data specified by LFNs'
            self._addParameter(self.workflow, 'InputData', 'JDL', inputDataStr,
                               description)
        elif isinstance(lfns, basestring):  #single LFN
            description = 'Input data specified by LFN'
            self._addParameter(self.workflow, 'InputData', 'JDL', lfns,
                               description)
        else:
            kwargs = {'lfns': lfns}
            return self._reportError(
                'Expected lfn string or list of lfns for input data', **kwargs)

        return S_OK()

    def setInputSandbox(self, flist):
        """ Add files to the input sandbox, can be on the local machine or on the grid

    >>> job = UserJob()
    >>> job.setInputSandbox( ['LFN:/ilc/user/u/username/libraries.tar.gz',
    >>>                       'mySteeringFile.xml'] )

    :param flist: Files for the inputsandbox
    :type flist: `python:list` or `str`
    """
        if isinstance(flist, basestring):
            flist = [flist]
        if not isinstance(flist, list):
            return self._reportError(
                "File passed must be either single file or list of files.")
        self.inputsandbox.extend(flist)
        return S_OK()

    #############################################################################
    def setOutputData(self, lfns, OutputPath='', OutputSE=''):
        """For specifying output data to be registered in Grid storage.  If a list
    of OutputSEs are specified the job wrapper will try each in turn until
    successful.

    Example usage:

    >>> job = UserJob()
    >>> job.setOutputData(['Ntuple.root'])

    :param lfns: Output data file or files
    :type lfns: Single `str` or `python:list` of strings ['','']
    :param str OutputPath: Optional parameter to specify the Path in the Storage, postpended to /ilc/user/u/username/
    :param OutputSE: Optional parameter to specify the Storage Element to store data or files, e.g. CERN-SRM
    :type OutputSE: `python:list` or `str`
    """
        kwargs = {'lfns': lfns, 'OutputSE': OutputSE, 'OutputPath': OutputPath}
        if isinstance(lfns, list) and lfns:
            outputDataStr = ';'.join(lfns)
            description = 'List of output data files'
            self._addParameter(self.workflow, 'UserOutputData', 'JDL',
                               outputDataStr, description)
        elif isinstance(lfns, basestring):
            description = 'Output data file'
            self._addParameter(self.workflow, 'UserOutputData', 'JDL', lfns,
                               description)
        else:
            return self._reportError(
                'Expected file name string or list of file names for output data',
                **kwargs)

        if OutputSE:
            description = 'User specified Output SE'
            if isinstance(OutputSE, basestring):
                OutputSE = [OutputSE]
            elif not isinstance(OutputSE, list):
                return self._reportError(
                    'Expected string or list for OutputSE', **kwargs)
            OutputSE = ';'.join(OutputSE)
            self._addParameter(self.workflow, 'UserOutputSE', 'JDL', OutputSE,
                               description)

        if OutputPath:
            description = 'User specified Output Path'
            if not isinstance(OutputPath, basestring):
                return self._reportError('Expected string for OutputPath',
                                         **kwargs)
            # Remove leading "/" that might cause problems with os.path.join
            while OutputPath[0] == '/':
                OutputPath = OutputPath[1:]
            if OutputPath.count("ilc/user"):
                return self._reportError(
                    'Output path contains /ilc/user/ which is not what you want',
                    **kwargs)
            self._addParameter(self.workflow, 'UserOutputPath', 'JDL',
                               OutputPath, description)

        return S_OK()

    #############################################################################
    def setOutputSandbox(self, files):
        """Specify output sandbox files.  If specified files are over 10MB, these
    may be uploaded to Grid storage with a notification returned in the
    output sandbox.

    .. Note ::
       Sandbox files are removed after 2 weeks.

    Example usage:

    >>> job = UserJob()
    >>> job.setOutputSandbox(['*.log','*.sh', 'myfile.txt'])

    Use the output sandbox only for small files. Larger files should be stored
    on the grid and downloaded later if necessary. See :func:`setOutputData`

    :param files: Output sandbox files
    :type files: Single `str` or `python:list` of strings ['','']

    """
        if isinstance(files, list) and files:
            fileList = ";".join(files)
            description = 'Output sandbox file list'
            self._addParameter(self.workflow, 'OutputSandbox', 'JDL', fileList,
                               description)
        elif isinstance(files, basestring):
            description = 'Output sandbox file'
            self._addParameter(self.workflow, 'OutputSandbox', 'JDL', files,
                               description)
        else:
            kwargs = {'files': files}
            return self._reportError(
                'Expected file string or list of files for output sandbox contents',
                **kwargs)

        return S_OK()

    def setILDConfig(self, version):
        """ Define the Configuration package to obtain
    """
        appName = 'ILDConfig'
        self._addSoftware(appName.lower(), version)

        self._addParameter(self.workflow, 'ILDConfigPackage', 'JDL',
                           appName + version, 'ILDConfig package')
        return S_OK()

    def setCLICConfig(self, version):
        """Define the CLIC Configuration package to obtain, copies steering files
    from CLIC Configuration folder to working directory

    :param str version: version string, e.g.: 'ILCSoft-2017-07-27'
    """
        appName = 'ClicConfig'
        self._addSoftware(appName.lower(), version)

        self._addParameter(self.workflow, 'ClicConfigPackage', 'JDL',
                           appName + version, 'CLIC Config package')
        return S_OK()

    ##############################  SPLITTING STUFF: METHODS ################################
    # Some methods have been added :
    #
    # * _atomicSubmission
    # * _checkJobConsistency
    # * setSplitEvents
    # * setSplitInputData
    # * setSplitDoNotAlterOutputFilename
    # * _split
    # * _splitByData
    # * _splitByEvents
    # * _toInt
    #
    # Given the type of splitting (Events or Data), these methods compute
    # the right parameters of the method 'Job.setParameterSequence()'
    ##########################################################################################
    def setSplitEvents(self,
                       eventsPerJob=None,
                       numberOfJobs=None,
                       totalNumberOfEvents=None):
        """This function sets split parameters for doing splitting over events

    Example usage:

    >>> job = UserJob()
    >>> job.setSplitEvents( numberOfJobs=42, totalNumberOfEvents=126 )

    Exactly two of the parmeters should be set
    
    :param int eventsPerJob: The events processed by a single job
    :param int numberOfJobs: The number of jobs
    :param int totalNumberOfEvents: The total number of events processed by all jobs

    """

        self.totalNumberOfEvents = totalNumberOfEvents
        self.eventsPerJob = eventsPerJob
        self.numberOfJobs = numberOfJobs

        self._addParameter(self.workflow, 'NbOfEvts', 'JDL', -1,
                           'Number of Events')

        self.splittingOption = "byEvents"

    def setSplitInputData(self, lfns, numberOfFilesPerJob=1):
        """sets split parameters for doing splitting over input data
    
    Example usage:

    >>> job = UserJob()
    >>> job.setSplitInputData( listOfLFNs )

    :param lfns: Logical File Names
    :type lfns: list of LFNs
    :param int numberOfFilesPerJob: The number of input data processed by a single job

    """
        self._data = lfns if isinstance(lfns, list) else [lfns]
        self.numberOfFilesPerJob = numberOfFilesPerJob

        self.splittingOption = "byData"

    def setSplitDoNotAlterOutputFilename(self, value=True):
        """if this option is set the output data lfns will _not_ include the JobIndex

    :param bool value: if *True* disable the changing of the output data
        filenames. If *False* the JobIndex will be added at the end of
        OutputData LFNs before the extension. Or replace '%n' with the jobIndex
        in the fileName. See :func:`Core.Utilities.Splitting.addJobIndexToFilename`
    """
        self._addParameter(self.workflow, 'DoNotAlterOutputData', 'JDL', value,
                           'Do Not Change Output Data')

    def _split(self):
        """checks the consistency of the job and call the right split method.

    :return: The success or the failure of the consistency checking
    :rtype: DIRAC.S_OK, DIRAC.S_ERROR

    """

        self.eventsPerJob = self._toInt(self.eventsPerJob)
        self.numberOfJobs = self._toInt(self.numberOfJobs)

        if self.numberOfJobs is False or self.eventsPerJob is False:
            return self._reportError("Splitting: Invalid values for splitting")

        # FIXME: move somewhere more prominent
        self._switch = {
            "byEvents": self._splitByEvents,
            "byData": self._splitByData,
            None: self._atomicSubmission,
        }

        self.log.info("Job splitting...")

        if not self._checkJobConsistency():
            errorMessage = "Job._checkJobConsistency() failed"
            self.log.error(errorMessage)
            return self._reportError(errorMessage)

        sequence = self._switch[self.splittingOption]()

        if not sequence:
            errorMessage = "Job._splitBySomething() failed"
            self.log.error(errorMessage)
            return self._reportError(errorMessage)

        sequenceType, sequenceList, addToWorkflow = sequence[0], sequence[
            1], sequence[2]

        if sequenceType != "Atomic":
            self.setParameterSequence(sequenceType, sequenceList,
                                      addToWorkflow)
            self.setParameterSequence('JobIndexList',
                                      range(len(sequenceList)),
                                      addToWorkflow='JobIndex')
            self._addParameter(self.workflow, 'JobIndex', 'int', 0, 'JobIndex')

        self.log.info("Job splitting successful")

        return S_OK()

    #############################################################################
    def _atomicSubmission(self):
        """called when no splitting is necessary, do not return valid parameters fot setParameterSequence().
    
    :return: parameter name and parameter values for setParameterSequence(), addToWorkflow flag
    :rtype: tuple of (str, list, bool/str)
    """

        self.log.verbose(
            "Job splitting: No splitting to apply, 'atomic submission' will be used"
        )
        return "Atomic", [], False

    #############################################################################
    def _checkJobConsistency(self):
        """checks if Job parameters are valid.

    :return: The success or the failure of the consistency checking
    :rtype: bool

    :Example:

    >>> self._checkJobConsistency()

    """

        self.log.info("Job consistency: _checkJobConsistency()...")

        if self.splittingOption not in self._switch:
            splitOptions = ",".join(self._switch.keys())
            errorMessage = "checkJobConsistency failed: Bad split value: possible values are %s" % splitOptions
            self.log.error(errorMessage)
            return False

        # All applications should have the same number of events
        # We can get this number from the first application for example
        sameNumberOfEvents = next(iter(self.applicationlist)).numberOfEvents

        if not all(app.numberOfEvents == sameNumberOfEvents
                   for app in self.applicationlist):
            self.log.warn(
                "Job: Applications should all have the same number of events")

        if (self.totalNumberOfEvents == -1
                or sameNumberOfEvents == -1) and not self._data:
            self.log.warn(
                "Job: Number of events is -1 without input data. Was that intentional?"
            )

        self.log.info("job._checkJobConsistency successful")

        return True

    #############################################################################
    def _splitByData(self):
        """a job is submitted per input data.

    :return: parameter name and parameter values for setParameterSequence()
    :rtype: tuple of (str, list, bool/str)

    """

        # reset split attribute to avoid infinite loop
        self.splittingOption = None

        self.log.info("Job splitting: Splitting 'byData' method...")

        # Ensure that data have been specified by setInputData() method
        if not self._data:
            errorMessage = "Job splitting: missing input data"
            self.log.error(errorMessage)
            return False

        if self.numberOfFilesPerJob > len(self._data):
            errorMessage = "Job splitting: 'numberOfFilesPerJob' must be less/equal than the number of input data"
            self.log.error(errorMessage)
            return False

        self._data = breakListIntoChunks(self._data, self.numberOfFilesPerJob)

        self.log.info("Job splitting: submission consists of %d job(s)" %
                      len(self._data))

        return ["InputData", self._data, 'ParametricInputData']

    #############################################################################
    def _splitByEvents(self):
        """a job is submitted per subset of events.
    
    :return: parameter name and parameter values for setParameterSequence()
    :rtype: tuple of (str, list, bool/str)

    """

        # reset split attribute to avoid infinite loop
        self.splittingOption = None

        self.log.info("Job splitting: splitting 'byEvents' method...")

        if self.eventsPerJob and self.numberOfJobs:
            # 1st case: (numberOfJobs=3, eventsPerJob=10)
            # trivial case => each job (total of 3) run applications of 10 events each
            self.log.debug("Job splitting: events per job and number of jobs")

            mapEventJob = [self.eventsPerJob] * self.numberOfJobs

        elif self.eventsPerJob and self.totalNumberOfEvents:
            # 2nd case: (split="byEvents", eventsPerJob=10, totalNumberOfEvents=10)
            # Given the number of events per job and total of number of event we want,
            # we can compute the unknown which is the number of jobs.

            self.log.debug(
                "Job splitting: Events per job and total number of events")

            if self.eventsPerJob > self.totalNumberOfEvents:
                self.log.error(
                    "Job splitting: The number of events per job has to be lower than or equal to the total number of events"
                )
                return False

            numberOfJobsIntDiv = self.totalNumberOfEvents / self.eventsPerJob
            numberOfJobsRest = self.totalNumberOfEvents % self.eventsPerJob

            mapEventJob = [self.eventsPerJob] * numberOfJobsIntDiv

            mapEventJob += [numberOfJobsRest] if numberOfJobsRest != 0 else []

        else:

            # 3rd case: (split='byEvents', njobs=10, totalNumberOfEvents=10)
            # Then compute the right number of events per job
            self.log.debug(
                "Job splitting: The number of jobs and the total number of events"
            )

            if (not self.totalNumberOfEvents) or (self.totalNumberOfEvents <
                                                  self.numberOfJobs):
                self.log.error(
                    "Job splitting: The number of events has to be greater than or equal to the number of jobs"
                )
                return False

            eventPerJobIntDiv = self.totalNumberOfEvents / self.numberOfJobs
            eventPerJobRest = self.totalNumberOfEvents % self.numberOfJobs

            mapEventJob = [eventPerJobIntDiv] * self.numberOfJobs

            if eventPerJobRest != 0:
                for suplement in xrange(eventPerJobRest):
                    mapEventJob[suplement] += 1

        self.log.debug("Job splitting: events over the jobs: %s" % mapEventJob)

        self.log.info("Job splitting: submission consists of %d job(s)" %
                      len(mapEventJob))

        return ['NumberOfEvents', mapEventJob, 'NbOfEvts']

    #############################################################################
    def _toInt(self, number):
        """casts number parameter to an integer.

    It also accepts 'string integer' parameter.

    :param number: the number to cast (number of events, number of jobs)
    :type number: str or int

    :return: The success or the failure of the casting
    :rtype: bool, int or None

    :Example:

    >>> number = self._toInt("1000")

    """

        if number is None:
            return number

        try:
            number = int(number)
            if number <= 0:
                raise ValueError
        except ValueError:
            self.log.error(
                "Job splitting: arguments must be positive integers")
            return False

        return number
Exemplo n.º 41
0
    def __init__(self, *args, **kwargs):
        AgentModule.__init__(self, *args, **kwargs)
        self.name = 'DataRecoveryAgent'
        self.enabled = False

        self.productionsToIgnore = self.am_getOption("ProductionsToIgnore", [])
        self.transformationTypes = self.am_getOption("TransformationTypes", [
            'MCReconstruction', 'MCSimulation', 'MCReconstruction_Overlay',
            'MCGeneration'
        ])
        self.transformationStatus = self.am_getOption("TransformationStatus",
                                                      ['Active', 'Completing'])
        self.shifterProxy = self.am_setOption('shifterProxy', 'DataManager')

        self.jobStatus = [
            'Failed', 'Done'
        ]  ##This needs to be both otherwise we cannot account for all cases

        self.jobMon = JobMonitoringClient()
        self.fcClient = FileCatalogClient()
        self.tClient = TransformationClient()
        self.reqClient = ReqClient()
        self.diracILC = DiracILC()
        self.inputFilesProcessed = set()
        self.todo = {'MCGeneration':
                     [ dict( Message="MCGeneration: OutputExists: Job 'Done'",
                             ShortMessage="MCGeneration: job 'Done' ",
                             Counter=0,
                             Check=lambda job: job.allFilesExist() and job.status=='Failed',
                             Actions=lambda job,tInfo: [ job.setJobDone(tInfo) ]
                           ),
                       dict( Message="MCGeneration: OutputMissing: Job 'Failed'",
                             ShortMessage="MCGeneration: job 'Failed' ",
                             Counter=0,
                             Check=lambda job: job.allFilesMissing() and job.status=='Done',
                             Actions=lambda job,tInfo: [ job.setJobFailed(tInfo) ]
                           ),
                       # dict( Message="MCGeneration, job 'Done': OutputExists: Task 'Done'",
                       #       ShortMessage="MCGeneration: job already 'Done' ",
                       #       Counter=0,
                       #       Check=lambda job: job.allFilesExist() and job.status=='Done',
                       #       Actions=lambda job,tInfo: [ tInfo._TransformationInfo__setTaskStatus(job, 'Done') ]
                       #     ),
                     ],
                     'OtherProductions':
                     [ \
                   ## should always be first!

                       dict( Message="One of many Successful: clean others",
                             ShortMessage="Other Tasks --> Keep",
                             Counter=0,
                             Check=lambda job: job.allFilesExist() and job.otherTasks and job.inputFile not in self.inputFilesProcessed,
                             Actions=lambda job,tInfo: [ self.inputFilesProcessed.add(job.inputFile), job.setJobDone(tInfo), job.setInputProcessed(tInfo) ]
                           ),
                       dict( Message="Other Task processed Input, no Output: Fail",
                             ShortMessage="Other Tasks --> Fail",
                             Counter=0,
                             Check=lambda job: job.inputFile in self.inputFilesProcessed and job.allFilesMissing() and job.status!='Failed',
                             Actions=lambda job,tInfo: [ job.setJobFailed(tInfo) ]
                           ),
                       dict( Message="Other Task processed Input: Fail and clean",
                             ShortMessage="Other Tasks --> Cleanup",
                             Counter=0,
                             Check=lambda job: job.inputFile in self.inputFilesProcessed and not job.allFilesMissing(),
                             Actions=lambda job,tInfo: [ job.setJobFailed(tInfo), job.cleanOutputs(tInfo) ]
                           ),
                       dict( Message="InputFile missing: mark job 'Failed', mark input 'Deleted', clean",
                             ShortMessage="Input Missing --> Job 'Failed, Input 'Deleted', Cleanup",
                             Counter=0,
                             Check=lambda job: job.inputFile and not job.inputFileExists and job.fileStatus != "Deleted",
                             Actions=lambda job,tInfo: [ job.cleanOutputs(tInfo), job.setJobFailed(tInfo), job.setInputDeleted(tInfo) ]
                           ),
                       dict( Message="InputFile Deleted, output Exists: mark job 'Failed', clean",
                             ShortMessage="Input Deleted --> Job 'Failed, Cleanup",
                             Counter=0,
                             Check=lambda job: job.inputFile and not job.inputFileExists and job.fileStatus == "Deleted" and not job.allFilesMissing(),
                             Actions=lambda job,tInfo: [ job.cleanOutputs(tInfo), job.setJobFailed(tInfo) ]
                           ),
                       ## All Output Exists
                       dict( Message="Output Exists, job Failed, input not Processed --> Job Done, Input Processed",
                             ShortMessage="Output Exists --> Job Done, Input Processed",
                             Counter=0,
                             Check=lambda job: job.allFilesExist() and \
                                               not job.otherTasks and \
                                               job.status=='Failed' and \
                                               job.fileStatus!="Processed" and \
                                               job.inputFileExists,
                             Actions=lambda job,tInfo: [ job.setJobDone(tInfo), job.setInputProcessed(tInfo) ]
                           ),
                       dict( Message="Output Exists, job Failed, input Processed --> Job Done",
                             ShortMessage="Output Exists --> Job Done",
                             Counter=0,
                             Check=lambda job: job.allFilesExist() and \
                                               not job.otherTasks and \
                                               job.status=='Failed' and \
                                               job.fileStatus=="Processed" and \
                                               job.inputFileExists,
                             Actions=lambda job,tInfo: [ job.setJobDone(tInfo) ]
                           ),
                       dict( Message="Output Exists, job Done, input not Processed --> Input Processed",
                             ShortMessage="Output Exists --> Input Processed",
                             Counter=0,
                             Check=lambda job: job.allFilesExist() and \
                                               not job.otherTasks and \
                                               job.status=='Done' and \
                                               job.fileStatus!="Processed" and \
                                               job.inputFileExists,
                             Actions=lambda job,tInfo: [ job.setInputProcessed(tInfo) ]
                           ),
                       ## outputmissing
                       dict( Message="Output Missing, job Failed, input Assigned, MaxError --> Input MaxReset",
                             ShortMessage="Max ErrorCount --> Input MaxReset",
                             Counter=0,
                             Check=lambda job: job.allFilesMissing() and \
                                               not job.otherTasks and \
                                               job.status=='Failed' and \
                                               job.fileStatus in ASSIGNEDSTATES and \
                                               job.inputFile not in self.inputFilesProcessed and \
                                               job.inputFileExists and \
                                               job.errorCount > MAXRESET,
                             Actions=lambda job,tInfo: [ job.setInputMaxReset(tInfo) ]
                           ),
                       dict( Message="Output Missing, job Failed, input Assigned --> Input Unused",
                             ShortMessage="Output Missing --> Input Unused",
                             Counter=0,
                             Check=lambda job: job.allFilesMissing() and \
                                               not job.otherTasks and \
                                               job.status=='Failed' and \
                                               job.fileStatus in ASSIGNEDSTATES and \
                                               job.inputFile not in self.inputFilesProcessed and \
                                               job.inputFileExists,
                             Actions=lambda job,tInfo: [ job.setInputUnused(tInfo) ]
                           ),
                       dict( Message="Output Missing, job Done, input Assigned --> Job Failed, Input Unused",
                             ShortMessage="Output Missing --> Job Failed, Input Unused",
                             Counter=0,
                             Check=lambda job: job.allFilesMissing() and \
                                               not job.otherTasks and \
                                               job.status=='Done' and \
                                               job.fileStatus in ASSIGNEDSTATES and \
                                               job.inputFile not in self.inputFilesProcessed and \
                                               job.inputFileExists,
                             Actions=lambda job,tInfo: [ job.setInputUnused(tInfo), job.setJobFailed(tInfo) ]
                           ),
                       ## some files missing, needing cleanup. Only checking for
                       ## assigned, because processed could mean an earlier job was
                       ## succesful and this one is just the duplicate that needed
                       ## to be removed! But we check for other tasks earlier, so
                       ## this should not happen
                       dict( Message="Some missing, job Failed, input Assigned --> cleanup, Input 'Unused'",
                             ShortMessage="Output Missing --> Cleanup, Input Unused",
                             Counter=0,
                             Check=lambda job: job.someFilesMissing() and \
                                               not job.otherTasks and \
                                               job.status=='Failed' and \
                                               job.fileStatus in ASSIGNEDSTATES and \
                                               job.inputFileExists,
                             Actions=lambda job,tInfo: [job.cleanOutputs(tInfo),job.setInputUnused(tInfo)]
                             #Actions=lambda job,tInfo: []
                           ),
                       dict( Message="Some missing, job Done, input Assigned --> cleanup, job Failed, Input 'Unused'",
                             ShortMessage="Output Missing --> Cleanup, Job Failed, Input Unused",
                             Counter=0,
                             Check=lambda job: job.someFilesMissing() and \
                                               not job.otherTasks and \
                                               job.status=='Done' and \
                                               job.fileStatus in ASSIGNEDSTATES and \
                                               job.inputFileExists,
                             Actions=lambda job,tInfo: [job.cleanOutputs(tInfo),job.setInputUnused(tInfo),job.setJobFailed(tInfo)]
                             #Actions=lambda job,tInfo: []
                           ),
                       dict( Message="Some missing, job Done --> job Failed",
                             ShortMessage="Output Missing, Done --> Job Failed",
                             Counter=0,
                             Check=lambda job: not job.allFilesExist() and job.status=='Done',
                             Actions=lambda job,tInfo: [job.setJobFailed(tInfo)]
                           ),
                       dict ( Message="Something Strange",
                              ShortMessage="Strange",
                              Counter=0,
                              Check=lambda job: job.status not in ("Failed","Done"),
                              Actions=lambda job,tInfo: []
                            ),
                       ##should always be the last one!
                       dict ( Message="Failed Hard",
                              ShortMessage="Failed Hard",
                              Counter=0,
                              Check=lambda job: False, ## never
                              Actions=lambda job,tInfo: []
                            ),
                     ]
                    }
        self.jobCache = defaultdict(lambda: (0, 0))
        self.printEveryNJobs = self.am_getOption('PrintEvery', 200)
        ##Notification
        self.notesToSend = ""
        self.addressTo = self.am_getOption('MailTo',
                                           ["*****@*****.**"])
        self.addressFrom = self.am_getOption('MailFrom',
                                             "*****@*****.**")
        self.subject = "DataRecoveryAgent"
Exemplo n.º 42
0
 
res = fc.findFilesByMetadata(meta)
if not res['OK']:
   print res['Message']

lfns = res['Value']
#print "Found %s files" % len(lfns)
filelist=[]
for lfn in lfns:
   filelist.append(lfn)
#print filelist
#filelist2=filelist[0]

#check if big radius in fastjetanalyzer indeed 0.7 or 1.0
jobGroup = "HZAnalyzer_190417_ee_qqqq_m_qqqq_2TeV_13696_VLC7PFOs"
dirac = DiracILC(True,jobGroup+".rep")
job = UserJob()
#job.setExecutionEnv({'ROOT_INCLUDE_PATH':'./'})
job.setJobGroup(jobGroup)
job.setOutputSandbox ( [ "*.log","*.out","*.py"] )
job.setBannedSites(['LCG.IN2P3-CC.fr','OSG.UConn.us','LCG.Cracow.pl','OSG.MIT.us','LCG.Glasgow.uk','OSG.CIT.us','OSG.BNL.us','LCG.Brunel.uk','LCG.QMUL.uk'])
#pay attention that the Zuds200 here is NOT changed
job.setInputSandbox( ["LFN:/ilc/user/w/webermat/190606/HZAnalyzerlib.tar.gz", "LFN:/ilc/user/w/webermat/190412/vtxprob.tar.gz","LFN:/ilc/user/w/webermat/190412/flavourTagging04-01_ct_90deg/lcfiweights.tar.gz"] ) 
job.setBannedSites(['LCG.INP3-CC.fr','OSG.UConn.us','LCG.Cracow.pl','OSG.MIT.us','LCG.Glasgow.uk','OSG.CIT.us','OSG.BNL.us','LCG.Brunel.uk','LCG.QMUL.uk','LCG.Oxford.uk'])
job.setSplitInputData(filelist, numberOfFilesPerJob=50)
ma = Marlin()
ma.setVersion('ILCSoft-2019-04-17_gcc62')
#ma.setInputFile("LFN:/ilc/user/w/webermat/ddsimstdheptautau/ILC18-10-11_gcc62_CLIC_o3_v14/tautau200/ddsim_ILC181011_gcc62_tautau_200_CLIC_o3_v14_0%s.slcio"%(str(input_ind)))
ma.setSteeringFile("/eos/user/w/weberma2/steeringFiles/testHZAnalyzer.xml")
ma.setDetectorModel("CLIC_o3_v14")
HZrootfilename2="HZStudy_ee_qqqq_m_qqqq_2TeV_13696_polm80_3TeV_wO_CLIC_o3_v14_DR7.root"
def main(argv):
    # Input arguments
    ildconfig_version   = "$ILDCONFIGVER"
    ilcsoft_version     = "$ILCSOFTVER"

    evts_per_run    = $EVTSPERRUN
    detector_model  = "$DETECTOR"
    sim_input       = "$SIMINPUT"
    process_name    = "$PROCESS"

    index           = $IND

    sim_input = diracpath_from_pnfspath( sim_input )
    sim_detector_model = detector_model_wo_option( detector_model )

    job_group = ilcsoft_version + "_" + ildconfig_version + "_" + process_name + "_" + detector_model
    dirac = DiracILC(True,job_group+".rep")

    # outputs to be saved onto grid SE
    RECoutput = []

    # DDSim

    evtStart   = (index-1)*evts_per_run
    evtEnd     = index*evts_per_run - 1
    RandSeed = random.randrange(11623, 99999)

    lcinputSIM  = "LFN:" + sim_input
    lcoutputSIM = ilcsoft_version + ".ILDConfig_" + ildconfig_version + ".E1000." + process_name + ".eLpR.evt%s-%s_SIM.slcio"%(str(evtStart),(str)(evtEnd))

    sim = DDSim()
    sim.setVersion(ilcsoft_version)

    sim.setDetectorModel(sim_detector_model)
    sim.setInputFile(lcinputSIM)
    sim.setSteeringFile("ddsim_steer.py")
    sim.setNumberOfEvents(evts_per_run)
    sim.setRandomSeed(RandSeed)
    sim.setEnergy(1000)
    sim.setStartFrom(evtStart)
    sim.setOutputFile(lcoutputSIM)

    # Marlin
    lcoutputDST = ilcsoft_version + ".ILDConfig_" + ildconfig_version + ".E1000." + process_name + ".eLpR.evt%s-%s_DST.slcio"%(str(evtStart),(str)(evtEnd))

    ma = Marlin()
    ma.setVersion(ilcsoft_version)
    ma.setDetectorModel(detector_model)
    ma.setSteeringFile("MarlinStdReco.xml")
    ma.setExtraCLIArguments( "--constant.lcgeo_DIR=$lcgeo_DIR --constant.DetectorModel={} --global.MaxRecordNumber=0".format(detector_model) )
    ma.setLogFile("marlin.log")
    ma.getInputFromApp(sim)
    ma.setEnergy(1000)
    ma.setOutputDstFile(lcoutputDST)

    RECoutput.append(lcoutputDST)

    # ILCDirac user job
    job = UserJob()
    job.setName("user_sim_reco")

    job.setJobGroup(job_group)

    job.setILDConfig(ildconfig_version)
    job.setCPUTime(86400)

    tmp_file_name = process_name + "_sim_reco_job_tmp.py"
    job.setInputSandbox([tmp_file_name])
    job.setOutputSandbox(["*.log","MarlinStdRecoParsed.xml","marlin*.xml","*.py "])

    job.dontPromptMe()
    job.setBannedSites(['LCG.IN2P3-CC.fr','OSG.UConn.us','LCG.Cracow.pl','OSG.MIT.us','LCG.Glasgow.uk','OSG.CIT.us','OSG.BNL.us','LCG.Brunel.uk','LCG.RAL-LCG2.uk','LCG.Oxford.uk','OSG.UCSDT2.us'])

    # run simulation job
    simres = job.append(sim)
    if not simres['OK']:
            print simres['Not ok appending ddsim to job']
            quit()


    # run Malrin reco jobs
    mares = job.append(ma)
    if not mares['OK']:
            print mares['Not ok appending Marlin to job']
            quit()

    job.setOutputData(RECoutput,"ILDPerformance/WWZZSeparation/{}_ILDConfig_{}_{}".format(ilcsoft_version,ildconfig_version,detector_model),"DESY-SRM")
    print RECoutput

    submit_output = job.submit(dirac)
    print submit_output