Exemplo n.º 1
0
def getTransform():
    executorSet = set()
    from EventIndexProducer.EITransformUtils import addEI_MRG_Substep, addEI_MRG_arguments
    executorSet.add(
        hybridPOOLMergeExecutor(
            name='AODMerge',
            skeletonFile='RecJobTransforms/skeleton.MergePool_tf.py',
            inData=['AOD'],
            outData=['AOD_MRG'],
            perfMonFile='ntuple_POOLMerge.pmon.gz'))
    executorSet.add(
        athenaExecutor(
            name='AODtoTAG',
            skeletonFile='RecJobTransforms/skeleton.AODtoTAG_tf.py',
            inData=['AOD_MRG'],
            outData=['TAG'],
        ))

    addEI_MRG_Substep(executorSet)
    trf = transform(executor=executorSet)

    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addCommonRecTrfArgs(trf.parser)
    addMyArgs(trf.parser)
    addEI_MRG_arguments(trf.parser)
    return trf
Exemplo n.º 2
0
def getTransform():
    trf = transform(executor = athenaExecutor(name = 'FilterHitTf', substep="filthits", skeletonFile = 'SimuJobTransforms/skeleton.FilterHit.py',
                                              tryDropAndReload = False))
    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addMyArgs(trf.parser)
    return trf
Exemplo n.º 3
0
def getTransform(RAWtoALL=False):
    executorSet = set()
    addRecoSubsteps(executorSet)

    trf = transform(executor = executorSet, description = 'General purpose ATLAS digitisation and reconstruction transform'
                    ' Inputs can be HITS, RDO, BS, ESD or AOD, with outputs of RDO, ESD, AOD or DAODs.'
                    ' See https://twiki.cern.ch/twiki/bin/view/AtlasComputing/RecoTf for more details.')
    
    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addTriggerArguments(trf.parser)
    addAllRecoArgs(trf, RAWtoALL)
    
    # For digi step - make sure we can add the digitisation/simulation arguments
    # before we add this substep; allows Reco_tf to work without AtlasSimulation
    try:
        from SimuJobTransforms.simTrfArgs import addForwardDetTrfArgs, addBasicDigiArgs, addPileUpTrfArgs, addCommonSimDigTrfArgs
        from SimuJobTransforms.SimTransformUtils import addDigitizationSubstep
        addBasicDigiArgs(trf.parser)
        addForwardDetTrfArgs(trf.parser)
        addPileUpTrfArgs(trf.parser)
        addCommonSimDigTrfArgs(trf.parser)
        simStepSet = set()
        addDigitizationSubstep(simStepSet)
        trf.appendToExecutorSet(list(simStepSet)[0])
    except ImportError, e:
        msg.warning('Failed to import digitisation arguments ({0}). Digitisation substep will not be available.'.format(e))
Exemplo n.º 4
0
def getTransform():
    trf = transform(executor=athenaExecutor(
        name='athena',
        skeletonFile=None,
        skeletonCA="PyJobTransforms.HelloWorldSkeleton"))
    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    return trf
Exemplo n.º 5
0
def getTransform(RAWtoALL=False):
    executorSet = set()
    addRecoSubsteps(executorSet)

    trf = transform(
        executor=executorSet,
        description=
        'General purpose ATLAS digitisation and reconstruction transform'
        ' Inputs can be HITS, RDO, BS, ESD or AOD, with outputs of RDO, ESD, AOD or DAODs.'
        ' See https://twiki.cern.ch/twiki/bin/view/AtlasComputing/RecoTf for more details.'
    )

    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addTriggerArguments(trf.parser)
    addAllRecoArgs(trf, RAWtoALL)

    # For digi step - make sure we can add the digitisation/simulation arguments
    # before we add this substep; allows Reco_tf to work without AtlasSimulation
    try:
        from SimuJobTransforms.simTrfArgs import addForwardDetTrfArgs, addBasicDigiArgs, addPileUpTrfArgs, addCommonSimDigTrfArgs
        from SimuJobTransforms.SimTransformUtils import addDigitizationSubstep
        addBasicDigiArgs(trf.parser)
        addForwardDetTrfArgs(trf.parser)
        addPileUpTrfArgs(trf.parser)
        addCommonSimDigTrfArgs(trf.parser)
        simStepSet = set()
        addDigitizationSubstep(simStepSet)
        trf.appendToExecutorSet(list(simStepSet)[0])
    except ImportError as e:
        msg.warning(
            'Failed to import digitisation arguments ({0}). Digitisation substep will not be available.'
            .format(e))

    # Again, protect core functionality from too tight a dependence on EventOverlay
    try:
        from EventOverlayJobTransforms.overlayTrfArgs import addOverlayTrfArgs, addOverlayPoolTrfArgs
        from EventOverlayJobTransforms.overlayTransformUtils import appendOverlay_PoolSubstep
        addOverlayTrfArgs(trf.parser)
        addOverlayPoolTrfArgs(trf.parser)
        appendOverlay_PoolSubstep(trf, True)
    except ImportError as e:
        msg.warning(
            'Failed to import overlay arguments ({0}). Event overlay substep will not be available.'
            .format(e))

    # Again, protect core functionality from too tight a dependence on PATJobTransforms
    try:
        from PATJobTransforms.PATTransformUtils import addPhysValidationFiles, addValidationArguments, appendPhysValidationSubstep
        addPhysValidationFiles(trf.parser)
        addValidationArguments(trf.parser)
        appendPhysValidationSubstep(trf)
    except ImportError:
        msg.warning(
            'Failed to import PAT arguments. Physics validation substep will not be available.'
        )

    return trf
Exemplo n.º 6
0
def getTransform():
    executorSet = set()
    from EventOverlayJobTransforms.overlayTransformUtils import addOverlay_PoolSubstep, addOverlay_PoolArguments
    addOverlay_PoolSubstep(executorSet)
    trf = transform(executor = executorSet, description = 'ATLAS Overlay transform. Inputs must be HITS. Outputs must be RDO.')
    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addTriggerArguments(trf.parser, addTrigFilter=False)
    addOverlay_PoolArguments(trf.parser)
    return trf
Exemplo n.º 7
0
def getTransform():
    trf = transform(executor=athenaExecutor(
        name='ESDtoAOD',
        skeletonFile='RecJobTransforms/skeleton.ESDtoAOD_tf.py',
        substep='e2a'))
    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addCommonRecTrfArgs(trf.parser)
    addMyArgs(trf.parser)
    return trf
Exemplo n.º 8
0
def getTransform():
    executorSet = set()
    addDAODMergerSubsteps(executorSet)

    trf = transform(executor=executorSet)

    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addCommonRecTrfArgs(trf.parser)
    addDAODArguments(trf.parser)
    return trf
Exemplo n.º 9
0
def getTransform():
    executor_set = set()
    from OverlayConfiguration.OverlayTransformHelpers import addOverlayArguments, addOverlaySubstep
    addOverlaySubstep(executor_set)
    trf = transform(executor=executor_set,
                    description='ATLAS Overlay transform. Inputs must be HITS. Outputs must be RDO.')
    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addTriggerArguments(trf.parser, addTrigFilter=False)
    addOverlayArguments(trf.parser)
    return trf
Exemplo n.º 10
0
def getTransform():
    executorSet = set()

    addRecoSubsteps(executorSet)
    addDigitizationSubstep(executorSet)

    # Sim + Digi - factor these out into an importable function in time
    executorSet.add(
        athenaExecutor(
            name='TRtoHITS',
            skeletonFile='SimuJobTransforms/skeleton.EVGENtoHIT_ISF.py',
            substep='simTRIn',
            tryDropAndReload=False,
            perfMonFile='ntuple.pmon.gz',
            inData=['EVNT_TR'],
            outData=['HITS', 'NULL']))
    executorSet.add(
        athenaExecutor(
            name='EVNTtoHITS',
            skeletonFile='SimuJobTransforms/skeleton.EVGENtoHIT_ISF.py',
            substep='sim',
            tryDropAndReload=False,
            perfMonFile='ntuple.pmon.gz',
            inData=['NULL', 'EVNT'],
            outData=['EVNT_TR', 'HITS', 'NULL']))

    trf = transform(
        executor=executorSet,
        description=
        'Full chain ATLAS transform with ISF simulation, digitisation'
        ' and reconstruction. Inputs can be EVNT, EVNT_TR, HITS, RDO, BS, ESD or AOD, with outputs of RDO, ESD, AOD or DPDs.'
        ' See https://twiki.cern.ch/twiki/bin/viewauth/Atlas/FullChainTf for more details.'
    )

    # Common arguments
    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addTriggerArguments(trf.parser)

    # Reconstruction arguments and outputs (use the factorised 'do it all' function)
    addAllRecoArgs(trf)

    # Simulation and digitisation options
    addCommonSimTrfArgs(trf.parser)
    addCommonSimDigTrfArgs(trf.parser)
    addCosmicsTrfArgs(trf.parser)
    addBasicDigiArgs(trf.parser)
    addSim_tfArgs(trf.parser)
    addForwardDetTrfArgs(trf.parser)
    addPileUpTrfArgs(trf.parser)
    addCommonSimDigTrfArgs(trf.parser)
    addTrackRecordArgs(trf.parser)

    return trf
Exemplo n.º 11
0
def getTransform():
    executorSet = set()
    executorSet.add(hybridPOOLMergeExecutor(name = 'ESDMerge', skeletonFile = 'RecJobTransforms/skeleton.MergePool_tf.py',
                                   inData = ['ESD'], outData = ['ESD_MRG']))

    trf = transform(executor = executorSet)
    
    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addCommonRecTrfArgs(trf.parser)
    addMyArgs(trf.parser)
    return trf
Exemplo n.º 12
0
def getTransform():
    executorSet = set()
    from SimuJobTransforms.SimTransformUtils import addAtlasG4Substep, addAtlasG4Arguments
    addAtlasG4Substep(executorSet)
    trf = transform(
        executor=executorSet,
        description=
        'Legacy ATLAS Simulation transform. Inputs must be EVNT else a single particle Generator job options must be specified. Outputs must be HITS or TrackRecords.'
    )
    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addAtlasG4Arguments(trf.parser)
    return trf
Exemplo n.º 13
0
def getTransform():
    executorSet = set()
    from SimuJobTransforms.SimTransformUtils import addStandardHITSMergeSubstep, addHITSMergeArguments
    addStandardHITSMergeSubstep(executorSet)
    trf = transform(
        executor=executorSet,
        description=
        'HITS Merging transform. Inputs must be HITS. Outputs must be HITS_MRG (i.e. HITS). '
    )
    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addHITSMergeArguments(trf.parser)
    return trf
Exemplo n.º 14
0
def getTransform():
    executorSet = set()
    from SimuJobTransforms.SimTransformUtils import addDigitizationSubstep, addDigitizationArguments
    addDigitizationSubstep(executorSet)
    trf = transform(
        executor=executorSet,
        description=
        'ATLAS Digitization transform. Inputs must be HITS. Outputs must be RDO.'
    )
    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addTriggerArguments(trf.parser, addTrigFilter=False)
    addDigitizationArguments(trf.parser)
    return trf
Exemplo n.º 15
0
def getTransform():

    executorSet = set()
    executorSet.add(
        athenaExecutor(
            name='FTKSimulationMerge',
            skeletonFile='TrigFTKSim/skeleton.FTKStandaloneMerge.py',
            inData=['NTUP_FTKTMP'],
            disableMP=True,
            outData=['NTUP_FTK', 'RDO_FTK'],
        ))
    executorSet.add(
        athenaExecutor(
            name='FTKSimulationRDOMerge',
            skeletonFile='TrigFTKSim/skeleton.FTKStandaloneMerge.py',
            disableMP=True,
            inData=[('NTUP_FTKTMP', 'RDO')],
            outData=['RDO_FTK'],
        ))
    executorSet.add(
        athenaExecutor(
            name='FTKRecoRDOtoESD',
            skeletonFile='RecJobTransforms/skeleton.RAWtoESD.py',
            substep='r2e',
            inData=[('RDO', 'NTUP_FTK')],
            outData=['DESD_FTK'],
            perfMonFile='ntuple_RAWtoESD.pmon.gz',
        ))
    #                                   extraRunargs = {'preInclude': ['TrigFTKSim/FTKReco_jobOptions.py']}))
    executorSet.add(
        athenaExecutor(
            name='FTKRecoESDtoNTUP',
            skeletonFile='PyJobTransforms/skeleton.ESDtoDPD.py',
            substep='e2d',
            inData=['DESD_FTK'],
            outData=['NTUP_TRIG'],
            perfMonFile='ntuple_ESDtoDPD.pmon.gz',
        ))
    #                                   extraRunargs = {'preInclude': ['TrigFTKSim/FTKReco_jobOptions.py']}))

    trf = transform(executor=executorSet,
                    description='FTK full region merge and reco.')

    addAthenaArguments(trf.parser, maxEventsDefaultSubstep='all')
    addDetectorArguments(trf.parser)
    addCommonRecTrfArgs(trf.parser)
    addFTKSimulationArgs(trf.parser)
    addD3PDArguments(trf.parser, pick=['NTUP_TRIG'], transform=trf)
    addBasicDigiArgs(trf.parser)
    return trf
Exemplo n.º 16
0
def getTransform():
    executorSet = set()
    executorSet.add(
        athenaExecutor(name='RDOMerge',
                       skeletonFile='RecJobTransforms/skeleton.MergeRDO_tf.py',
                       inData=['RDO'],
                       outData=['RDO_MRG']))

    trf = transform(executor=executorSet)

    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addForwardDetTrfArgs(trf.parser)
    addCommonRecTrfArgs(trf.parser)
    addMyArgs(trf.parser)
    return trf
Exemplo n.º 17
0
def getTransform():

    executorSet = set()
    executorSet.add(athenaExecutor(name = 'FTKSimulationMergeDigi',
                                   skeletonFile = 'TrigFTKSim/skeleton.FTKStandaloneMergeDigi.py',
                                   inData = ['NTUP_FTKTMP'],disableMP=True,
                                   outData = ['RDO_FTK'],))
    trf = transform(executor = executorSet, description = 'FTK full region merge and reco.')

    addAthenaArguments(trf.parser, maxEventsDefaultSubstep='all')
    addDetectorArguments(trf.parser)
    addCommonRecTrfArgs(trf.parser)
    addFTKSimulationArgs(trf.parser)
    addD3PDArguments(trf.parser, pick=['NTUP_TRIG'], transform=trf)
    addBasicDigiArgs(trf.parser)
    return trf
Exemplo n.º 18
0
def getTransform():
    executorSet = set()
    addRecoSubsteps(executorSet)
    addTrigSimSubsteps(executorSet)
    
    trf = transform(executor = executorSet, description = 'General purpose ATLAS reconstruction transform, which also supports'
                    ' digitisation. Inputs can be HITS, RDO, BS, ESD or AOD, with outputs of RDO, ESD, AOD or DPDs.'
                    ' See https://twiki.cern.ch/twiki/bin/view/AtlasComputing/RecoTf for more details.')
    
    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addTriggerArguments(trf.parser)
    addTrigSimArguments(trf.parser)
    addAllRecoArgs(trf)

    return trf
Exemplo n.º 19
0
def getTransform():
    trf = transform(
        executor=athenaExecutor(
            name='TestBeamTf',
            skeletonFile='SimuJobTransforms/skeleton.TestBeam.py',
            substep='TBsim',
            tryDropAndReload=False,
            perfMonFile='ntuple.pmon.gz',
            inData=['NULL', 'Evgen'],
            outData=['HITS', 'NULL'])
    )  #may have to add evgen to outData if cosmics/cavern background jobs don't work.
    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addCommonSimTrfArgs(trf.parser)
    addCommonSimDigTrfArgs(trf.parser)
    addTestBeamArgs(trf.parser)
    return trf
Exemplo n.º 20
0
def getTransform():
    executorSet = set()
    executorSet.add(hybridPOOLMergeExecutor(name = 'ESDMerge', skeletonFile = 'RecJobTransforms/skeleton.MergePool_tf.py',
                                   inData = ['ESD'], outData = ['ESD_MRG']))
    executorSet.add(hybridPOOLMergeExecutor(name = 'AODMerge', skeletonFile = 'RecJobTransforms/skeleton.MergePool_tf.py',
                                   inData = ['AOD'], outData = ['AOD_MRG']))
    executorSet.add(athenaExecutor(name = 'AODtoTAG', skeletonFile = 'RecJobTransforms/skeleton.AODtoTAG_tf.py',
                                   inData = ['AOD_MRG'], outData = ['TAG'],))
    executorSet.add(tagMergeExecutor(name = 'TAGFileMerge', exe = 'CollAppend', inData = set(['TAG']), outData = set(['TAG_MRG'])))
    executorSet.add(DQMergeExecutor(name = 'DQHistogramMerge', inData = [('HIST_ESD', 'HIST_AOD'), 'HIST'], outData = ['HIST_MRG']))
    executorSet.add(athenaExecutor(name = 'RDOMerge', skeletonFile = 'RecJobTransforms/skeleton.MergeRDO_tf.py',
                                   inData = ['RDO'], outData = ['RDO_MRG']))
    executorSet.add(bsMergeExecutor(name = 'RAWFileMerge', exe = 'file_merging', inData = set(['BS']), outData = set(['BS_MRG'])))
    executorSet.add(athenaExecutor(name = 'EVNTMerge', skeletonFile = 'PyJobTransforms/skeleton.EVNTMerge.py',inData = ['EVNT'], outData = ['EVNT_MRG']))

    addDAODMergerSubsteps(executorSet)
    addNTUPMergeSubsteps(executorSet)

    trf = transform(executor = executorSet)

    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addCommonRecTrfArgs(trf.parser)
    addMyArgs(trf.parser)

    addDAODArguments(trf.parser)
    addPhysValidationMergeFiles(trf.parser)
    addD3PDArguments(trf.parser, transform=trf, addD3PDMRGtypes=True)
    addExtraDPDTypes(trf.parser, transform=trf, NTUPMergerArgs = True)

    # Add HITSMerge only if SimuJobTransforms is available
    try:
        from SimuJobTransforms.SimTransformUtils import addHITSMergeArguments
        addHITSMergeArguments(trf.parser)
        simStepSet = set()
        simStepSet.add(athenaExecutor(name = 'HITSMerge', substep="hitsmerge", skeletonFile = 'SimuJobTransforms/skeleton.HITSMerge.py',
                                   tryDropAndReload = False, inData = ['HITS'], outData = ['HITS_MRG']))
        trf.appendToExecutorSet(list(simStepSet)[0])
    except ImportError as e:
        msg.warning('Failed to import simulation arguments ({0}). HITSMerge will not be available.'.format(e))


    return trf
def getTransform():
    executorSet = set()
    executorSet.add(
        athenaExecutor(
            name='MDTCalibNtuple',
            skeletonFile=
            'MuonCalibJobTransforms/skeleton.mdt_calib_ntuple_tf.py'))
    # possibly (?) one should specify input/output formats to the transform can check them, however, adding this
    # info causes transform to fail.   Of course transform does not know about calibstream bytestream, though
    # in principle it knows about calib ntuples.
    #', inData=['BS'], outData=['NTUP_MUONCALIB']    #specify input/output file formats.
    trf = transform(executor=executorSet)

    # for maxEvents skipEvents - actually skipEvents do not work with calibstream
    # so do not use addAthenaArguments, but make own arguments for these.
    #  addAthenaArguments(trf.parser)
    # for runNumber geometryVersion DBRelease conditionsTag
    addDetectorArguments(trf.parser)
    # for parameters specific to calib ntuple production
    addMyArgs(trf.parser)
    return trf
Exemplo n.º 22
0
def getTransform():
    executorSet = set()
    from EventOverlayJobTransforms.overlayTransformUtils import addOverlayBSFilterSubstep, addOverlay_BSSubstep, addCommonOverlayArguments, addUniqueOverlayBSFilterArguments, addUniqueOverlay_BSArguments, addOverlayChainOverrideArguments
    from SimuJobTransforms.SimTransformUtils import addSimulationSubstep, addSimulationArguments
    addRecoSubsteps(executorSet)
    addOverlayBSFilterSubstep(executorSet)
    addSimulationSubstep(executorSet, overlayTransform=True)
    addOverlay_BSSubstep(executorSet)
    trf = transform(executor=executorSet, description='Full Overlay Chain')

    addOverlayChainOverrideArguments(trf.parser)
    addAthenaArguments(trf.parser, maxEventsDefaultSubstep='all')
    addDetectorArguments(trf.parser)
    addTriggerArguments(trf.parser, addTrigFilter=False)
    addCommonOverlayArguments(trf.parser)
    addUniqueOverlayBSFilterArguments(trf.parser)
    addSimulationArguments(trf.parser)
    addUniqueOverlay_BSArguments(trf.parser)

    # Reconstruction arguments and outputs
    addAllRecoArgs(trf)

    return trf
Exemplo n.º 23
0
def getTransform():
    trf = transform(executor=athenaExecutor(name='athena'))
    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addMyArgs(trf.parser)
    return trf
Exemplo n.º 24
0
def getTransform():
    executorSet = set()

    addRecoSubsteps(executorSet)

    # Sim + Digi - factor these out into an importable function in time
    executorSet.add(
        athenaExecutor(
            name='EVNTtoRDO',
            skeletonFile='FullChainTransforms/FastChainSkeleton.EVGENtoRDO.py',
            substep='simdigi',
            tryDropAndReload=False,
            perfMonFile='ntuple.pmon.gz',
            inData=['NULL', 'EVNT'],
            outData=['RDO', 'NULL']))
    # Overlay
    from EventOverlayJobTransforms.overlayTransformUtils import addOverlay_PoolArguments
    executorSet.add(
        athenaExecutor(
            name='OverlayPool',
            skeletonFile='EventOverlayJobTransforms/skeleton.OverlayPool_tf.py',
            substep='overlayPOOL',
            tryDropAndReload=False,
            perfMonFile='ntuple.pmon.gz',
            inData=[('HITS', 'RDO_BKG')],
            outData=['RDO', 'RDO_SGNL']))

    trf = transform(
        executor=executorSet,
        description=
        'Fast chain ATLAS transform with ISF simulation, digitisation'
        ' and reconstruction. Inputs can be EVNT, with outputs of RDO, ESD, AOD or DPDs.'
        ' See https://twiki.cern.ch/twiki/bin/viewauth/AtlasComputing/FastChainTf for more details.'
    )

    # Common arguments
    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addTriggerArguments(trf.parser)

    # Reconstruction arguments and outputs (use the factorised 'do it all' function)
    addAllRecoArgs(trf)

    # Simulation and digitisation options
    addCommonSimTrfArgs(trf.parser)
    addCommonSimDigTrfArgs(trf.parser)
    addBasicDigiArgs(trf.parser)
    addSim_tfArgs(trf.parser)
    # addForwardDetTrfArgs(trf.parser)
    addPileUpTrfArgs(trf.parser)
    addTrackRecordArgs(trf.parser)
    addFastChainTrfArgs(trf.parser)

    # Overlay
    addOverlay_PoolArguments(trf.parser)

    # Add PhysVal
    addPhysValidationFiles(trf.parser)
    addValidationArguments(trf.parser)
    appendPhysValidationSubstep(trf)

    return trf
Exemplo n.º 25
0
def getTransform():
    executorSet = set()
        
    #BSRDOtoRAW is new option for trigger transform
    #runs primarily using athenaHLT
    #literalRunargs used for when running with athena
    executorSet.add(trigRecoExecutor(name = 'BSRDOtoRAW', skeletonFile = 'TriggerJobOpts/runHLT_standalone.py',
                                     exe = 'setsid athenaHLT.py', 
                                     # setsid is needed to fix the process-group id of child processes to be the same as mother process; discussed in https://its.cern.ch/jira/browse/ATR-20513 
                                     substep = 'b2r', tryDropAndReload = False,
                                     inData = ['BS_RDO', 'RDO'], outData = ['BS', 'BS_TRIGCOST', 'HIST_HLTMON','HIST_DEBUGSTREAMMON'],
                                     perfMonFile = 'ntuple_BSRDOtoRAW.pmon.gz',
                                     literalRunargs = ['writeBS = runArgs.writeBS',
                                                       'from AthenaCommon.AthenaCommonFlags import jobproperties as jps',
                                                       'jps.AthenaCommonFlags.FilesInput.set_Value_and_Lock(runArgs.inputBS_RDOFile)',
                                                       'jps.AthenaCommonFlags.EvtMax.set_Value_and_Lock(runArgs.maxEvents)',
                                                       'if hasattr(runArgs,\'skipEvents\'):',
                                                       '   jps.AthenaCommonFlags.SkipEvents.set_Value_and_Lock(runArgs.skipEvents)',
                                                       'if hasattr(runArgs,\'outputBSFile\'):',
                                                       '   jps.AthenaCommonFlags.BSRDOOutput.set_Value_and_Lock( runArgs.outputBSFile )']))

    #RAWtoCOST is new option for trigger transform
    #runs in athena and will succeed if input BS file has costmon enabled
    executorSet.add(trigCostExecutor(name = 'RAWtoCOST', skeletonFile = 'TrigCostMonitor/readTrigCost.py',
                                     substep = 'r2c',
                                     inData = ['BS_TRIGCOST'], outData = ['NTUP_TRIGCOST','NTUP_TRIGRATE','NTUP_TRIGEBWGHT'],
                                     perfMonFile = 'ntuple_RAWtoCOST.pmon.gz',
                                     literalRunargs = ['BSRDOInput = runArgs.inputBSFile',
                                                       'EvtMax = runArgs.maxEvents']))

    #add default reconstruction steps
    # eventually to be replaced by:
    #from RecJobTransforms.recTransformUtils import addRecoSubsteps
    #addRecoSubsteps(executorSet)
       
    # one difference to reco_tf is that NTUP_TRIG has to be added - needs pyjobtf update before can use above    
        #NTUP_TRIG is added as is not available in ATLASP1HLT, but is available in the reco release
        #hence can be used later in a ATLASP1HLT job if switch releases

    executorSet.add(athenaExecutor(name = 'RAWtoESD', skeletonFile = 'RecJobTransforms/skeleton.RAWtoESD_tf.py',
                                   substep = 'r2e', inData = ['BS', 'RDO'], outData = ['ESD', 'HIST_ESD_INT'], 
                                   perfMonFile = 'ntuple_RAWtoESD.pmon.gz'))
    executorSet.add(athenaExecutor(name = 'ESDtoAOD', skeletonFile = 'RecJobTransforms/skeleton.ESDtoAOD_tf.py',
                                   substep = 'e2a', inData = ['ESD'], outData = ['AOD', 'HIST_AOD_INT'], 
                                   perfMonFile = 'ntuple_ESDtoAOD.pmon.gz'))
    executorSet.add(DQMergeExecutor(name = 'DQHistogramMerge', inData = [('HIST_ESD_INT', 'HIST_AOD_INT')], outData = ['HIST']))
    executorSet.add(athenaExecutor(name = 'ESDtoDPD', skeletonFile = 'PATJobTransforms/skeleton.ESDtoDPD_tf.py',
                                   substep = 'e2d', inData = ['ESD'], outData = ['NTUP_TRIG'],
                                   perfMonFile = 'ntuple_ESDtoDPD.pmon.gz'))
    executorSet.add(athenaExecutor(name = 'AODtoDPD', skeletonFile = 'PATJobTransforms/skeleton.AODtoDPD_tf.py',
                                   substep = 'a2d', inData = ['AOD', 'EVNT'], outData = ['NTUP_TRIG'],
                                   perfMonFile = 'ntuple_AODtoDPD.pmon.gz'))
    executorSet.add(athenaExecutor(name = 'AODtoTAG', skeletonFile = 'RecJobTransforms/skeleton.AODtoTAG_tf.py',
                                   inData = ['AOD'], outData = ['TAG'],))
    executorSet.add(reductionFrameworkExecutor(name = 'AODtoRED', skeletonFile = 'PATJobTransforms/skeleton.AODtoRED_tf.py',
                                   substep = 'a2r', inData = ['AOD'], outData = ['DAOD_RED']))
    executorSet.add(reductionFrameworkExecutorNTUP(name = 'NTUPtoRED', skeletonFile = 'PATJobTransforms/skeleton.NTUPtoRED_tf.py',
                                   substep = 'n2n', inData = ['NTUP_COMMON'], outData = ['NTUP_RED']))
    
    trf = transform(executor = executorSet, description = 'Trigger transform to run HLT_standalone, followed by'
                    ' general purpose ATLAS reconstruction transform. Input to HLT_Standalone is inputBS_RDOFile'
                    ' with outputs of RDO, ESD, AOD or DPDs. For more details see:'
                    ' https://twiki.cern.ch/twiki/bin/viewauth/Atlas/TriggerTransform or for reco_tf, see:'
                    ' https://twiki.cern.ch/twiki/bin/viewauth/Atlas/RecoTf')
    
    #add arguments as donw in reco_tf
    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
    addCommonRecTrfArgs(trf.parser)
    addStandardRecoFiles(trf.parser)
    addPrimaryDPDArguments(trf.parser, transform = trf)
    addD3PDArguments(trf.parser, transform = trf)
    addExtraDPDTypes(trf.parser, transform = trf)
    addReductionArguments(trf.parser, transform = trf)

    #currently ignoring simulation arguments as can't find the package when have below in cmt/requirements:
    #use SimuJobTransforms SimuJobTransforms-* Simulation
    
    # For digi step - make sure we can add the digitisation/simulation arguments
    # before we add this substep
#    try:
#        from SimuJobTransforms.simTrfArgs import addForwardDetTrfArgs, addBasicDigiArgs, addPileUpTrfArgs, addCommonSimDigTrfArgs
#        addBasicDigiArgs(trf.parser)
#        addForwardDetTrfArgs(trf.parser)
#        addPileUpTrfArgs(trf.parser)
#        #addCommonSimDigTrfArgs(trf.parser)
#        executorSet.add(athenaExecutor(name = 'HITtoRDO', skeletonFile = 'SimuJobTransforms/skeleton.HITtoRDO.py',
#                                       substep = 'h2r', tryDropAndReload = False, perfMonFile = 'ntuple.pmon.gz',
#                                       inData = ['Hits'], outData = ['RDO'],
#                                       runtimeRunargs = {'LowPtMinbiasHitsFile' : 'runArgs.inputLowPtMinbiasHitsFile',
#                                                         'HighPtMinbiasHitsFile' : 'runArgs.inputHighPtMinbiasHitsFile',}))
#        
#    except ImportError:
#        msg.warning('Failed to import simulation/digitisation arguments. These substeps will not be available.')

    #now add specific trigger arguments
    #  putting this last makes them appear last in the help so easier to find
    addTriggerArgs(trf.parser)
    addTrigCostArgs(trf.parser)
    addTriggerDBArgs(trf.parser)
    addDebugArgs(trf.parser)

    return trf
Exemplo n.º 26
0
def getTransform():

    executorSet = set()
    executorSet.add(
        athenaExecutor(name='RAWtoESD',
                       skeletonFile='RecJobTransforms/skeleton.RAWtoESD_tf.py',
                       substep='r2e',
                       inData=['BS', 'RDO'],
                       outData=['NTUP_FTKIP'],
                       perfMonFile='ntuple_RAWtoESD.pmon.gz'))

    for tower in range(ntowers):
        for subregion in range(subregions):
            pos = tower * subregions + subregion
            executorSet.add(
                athenaExecutor(
                    name='FTKFullSimulationBank{0:02d}Sub{1}'.format(
                        tower, subregion),
                    skeletonFile='TrigFTKSim/skeleton.FTKStandaloneSim.py',
                    substep='FTKTwr{0:02d}Sub{1}'.format(tower, subregion),
                    inData=['NTUP_FTKIP', 'TXT_FTKIP'],
                    inputEventTest=False,
                    disableMP=True,
                    outData=[
                        'NTUP_FTKTMP_{0:02d}_{1}'.format(tower, subregion)
                    ],
                    extraRunargs={
                        'bankregion': [tower],
                        'banksubregion': [subregion]
                    },
                    # Need to ensure that the correct subregion is used
                    runtimeRunargs={
                        'bankregion': [tower],
                        'subregions': [subregion],
                        'patternbankpath':
                        '[runArgs.patternbankspath[{0}]]'.format(pos),
                        'fitconstantspath':
                        '[runArgs.fitconstantspath[{0}]]'.format(tower),
                        'fit711constantspath':
                        '[runArgs.fit711constantspath[{0}]]'.format(tower),
                        'sectorpath':
                        '[runArgs.sectorspath[{0}]]'.format(tower),
                        'outputNTUP_FTKTMPFile':
                        'runArgs.outputNTUP_FTKTMP_{0:02d}_{1}File'.format(
                            tower, subregion)
                    }))
        #TODO: the current scheme hijack the input and output names using the
        # extraRunargs and runtimeRunargs, this doesn't really allow to change
        # (not really important) to control what happens to the intermediate
        # files. To avoid this hack the skeleton should be more flexible and
        # check for the number of region subregion within the skeleton file
        executorSet.add(
            athenaExecutor(
                name='FTKSimulationMerge{0:02d}'.format(tower),
                skeletonFile='TrigFTKSim/skeleton.FTKStandaloneMerge.py',
                substep='FTKMTwr{0:02d}'.format(tower),
                inputEventTest=False,
                disableMP=True,
                inData=[
                    tuple([
                        'NTUP_FTKTMP_{0:02d}_{1}'.format(tower, subregion)
                        for subregion in range(subregions)
                    ])
                ],
                outData=['NTUP_FTKTMP_{0:02d}'.format(tower)],
                extraRunargs={
                    'inputNTUP_FTKTMPFile': [
                        'tmp.NTUP_FTKTMP_{0:02d}_{1}'.format(tower, subregion)
                        for subregion in range(subregions)
                    ]
                },
                runtimeRunargs={
                    'MergeRegion':
                    tower,
                    'FirstRegion':
                    tower,
                    'outputNTUP_FTKTMPFile':
                    'runArgs.outputNTUP_FTKTMP_{0:02d}File'.format(tower)
                }))

    # add final merge for all the tower, generating a NTUP_FTK file
    executorSet.add(
        athenaExecutor(
            name="FTKSimulationMergeFinal",
            skeletonFile='TrigFTKSim/skeleton.FTKStandaloneMerge.py',
            substep="FTKFinal",
            inputEventTest=False,
            disableMP=True,
            inData=[
                tuple([
                    'NTUP_FTKTMP_{0:02d}'.format(tower)
                    for tower in range(ntowers)
                ]) + ('NTUP_FTKIP', )
            ],
            outData=['NTUP_FTK'],
            runtimeRunargs={
                'MergeFromTowers': True,
                'FirstRegion': 0,
                'TruthTrackTreeName': "'truthtracks'",
                'EvtInfoTreeName': "'evtinfo'",
                'SaveTruthTree': '1'
            }))

    executorSet.add(
        athenaExecutor(name='RDOFTKCreator',
                       skeletonFile='TrigFTKSim/skeleton.RDO_FTK_Creator.py',
                       substep='r2eFTK',
                       inData=[('RDO', 'NTUP_FTK')],
                       outData=['RDO_FTK'],
                       inputEventTest=False,
                       perfMonFile='ntuple_RDOFTKCreator.pmon.gz'))

    executorSet.add(
        athenaExecutor(name='BSFTKCreator',
                       skeletonFile='TrigFTKSim/skeleton.BS_FTK_Creator.py',
                       substep='bs2bsFTK',
                       inData=[('BS', 'NTUP_FTK')],
                       outData=['BS_FTK'],
                       inputEventTest=False,
                       perfMonFile='ntuple_BSFTKCreator.pmon.gz'))

    trf = transform(executor=executorSet,
                    description='FTK simulation for {0} towers, with {1} '
                    'subregions merge and final merge.'.format(
                        ntowers, subregions))

    addTrigFTKAthenaOptions(trf.parser)
    addDetectorArguments(trf.parser)
    addTrigFTKSimOptions(trf.parser, nsubregions=subregions)
    addTrigFTKSimMergeOptions(trf.parser)
    addTrigFTKSimTFOptions(trf.parser)
    addTrigFTKSimRFOptions(trf.parser)
    addFTKSimulationArgs(trf.parser)

    return trf
import sys
from PyJobTransforms.transform import transform
from PyJobTransforms.trfExe import athenaExecutor
from PyJobTransforms.trfArgs import addAthenaArguments, addDetectorArguments
import PyJobTransforms.trfArgClasses as trfArgClasses

if __name__ == '__main__':

    executorSet = set()
    executorSet.add(athenaExecutor(name = 'LArNoiseBursts_from_raw', skeletonFile = 'LArCafJobs/skeleton.LArNoise_fromraw.py',
                                   substep = 'r2e', inData = ['BS',], outData = ['NTUP_LARNOISE','NTUP_HECNOISE','HIST_LARNOISE']))
   
    trf = transform(executor = executorSet) 
    addAthenaArguments(trf.parser)
    addDetectorArguments(trf.parser)
   
    trf.parser.add_argument('--inputBSFile', nargs='+',
                            type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io='input'),
                            help='Input BS file', group='Reco Files')
   
    trf.parser.add_argument('--outputNTUP_LARNOISEFile', nargs='+',
                            type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, io='output'),
                            help='Output LAr Noise Burst file', group='Ntuple Files')

    trf.parser.add_argument('--outputNTUP_HECNOISEFile', nargs='+',
                            type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, io='output'),
                            help='Output HECNoise file', group='Ntuple Files')

    trf.parser.add_argument('--outputHIST_LARNOISEFile', nargs='+',
                            type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output'),
Exemplo n.º 28
0
def getTransform():

    executorSet = set()
    executorSet.add(
        athenaExecutor(
            name='RAWtoESD',
            skeletonFile='RecJobTransforms/skeleton.RAWtoESD_tf.py',
            substep='r2e', inData=['BS', 'RDO'], outData=['NTUP_FTKIP'],
            perfMonFile = 'ntuple_RAWtoESD.pmon.gz'))

    for tower in range(ntowers) :
        for subregion in range(subregions):
            pos = tower * subregions + subregion
            executorSet.add(
                athenaExecutor(
                    name='FTKFullSimulationBank{0:02d}'.format(tower),
                    skeletonFile='TrigFTKSim/skeleton.FTKStandaloneSim.py',
                    substep='FTKTwr{0:02d}'.format(tower),
                    inData=['NTUP_FTKIP', 'TXT_FTKIP'],
                    inputEventTest = False,disableMP=True,
                    outData=['NTUP_FTKTMP_{0:02d}'.format(tower)],
                    extraRunargs={
                        'bankregion': [tower],
                        'banksubregion': [subregion]},
                    # Need to ensure that the correct subregion is used
                    runtimeRunargs={
                        'bankregion': [tower],
                        'subregions': [subregion],
                        'patternbankpath':
                            '[runArgs.patternbankspath[{0}]]'.format(pos),
                        'fitconstantspath':
                            '[runArgs.fitconstantspath[{0}]]'.format(tower),
                        'fit711constantspath':
                            '[runArgs.fit711constantspath[{0}]]'.format(tower),
                        'sectorpath':
                            '[runArgs.sectorspath[{0}]]'.format(tower),
                        'outputNTUP_FTKTMPFile':
                            'runArgs.outputNTUP_FTKTMP_{0:02d}File'.format(tower)}))

    # NOTE: WE DON'T DO MERGING HERE BECAUSE IT'S ONE SUBREGION! ONLY FINAL MERGE
    # add final merge for all the tower, generating a NTUP_FTK file
    executorSet.add(
        athenaExecutor(name="FTKSimulationMergeFinal",
            skeletonFile='TrigFTKSim/skeleton.FTKStandaloneMerge.py',
            substep = "FTKFinal",
            inputEventTest = False,disableMP=True,
            inData=[tuple([
                'NTUP_FTKTMP_{0:02d}'.format(tower)
                for tower in range(ntowers)]) + ('NTUP_FTKIP',)],
            outData=['NTUP_FTK'],
            runtimeRunargs={
                'MergeFromTowers': True,
                'FirstRegion': 0,
                'TruthTrackTreeName': "'truthtracks'",
                'EvtInfoTreeName': "'evtinfo'",
                'UnmergedRoadFormatName': "'FTKRoadsStream%u.'",
                'UnmergedFormatName': "'FTKTracksStream%u.'",
                'SaveTruthTree': '1'}))

    executorSet.add(
        athenaExecutor(
            name='RDOFTKCreator',
            skeletonFile='TrigFTKSim/skeleton.RDO_FTK_Creator.py',
            substep='r2eFTK', inData=[('RDO','NTUP_FTK')],
            outData=['RDO_FTK'], inputEventTest = False,
            perfMonFile='ntuple_RDOFTKCreator.pmon.gz'))

    executorSet.add(
         athenaExecutor(
            name='AODFTKCreator',
            skeletonFile='TrigFTKSim/skeleton.AOD_FTK_Creator.py',
            substep='rFTK2aFTK', inData=[('RDO_FTK'),('BS_FTK')],
            outData=[('AOD')], inputEventTest = False,
            perfMonFile='ntuple_AODFTKCreator.pmon.gz'))

    executorSet.add(
        athenaExecutor(
            name='BSFTKCreator',
            skeletonFile='TrigFTKSim/skeleton.BS_FTK_Creator.py',
            substep='bs2bsFTK', inData=[('BS','NTUP_FTK')],
            outData=['BS_FTK'], inputEventTest = False,
            perfMonFile='ntuple_BSFTKCreator.pmon.gz'))


    trf = transform(executor=executorSet,
                    description='FTK simulation for {0} towers, with {1} '
                                'subregions merge and final merge.'.format(
                                    ntowers, subregions))

    addTrigFTKAthenaOptions(trf.parser)
    addDetectorArguments(trf.parser)
    addTrigFTKSimOptions(trf.parser, nsubregions=subregions)
    addTrigFTKSimMergeOptions(trf.parser);
    addTrigFTKSimTFOptions(trf.parser)
    addTrigFTKSimRFOptions(trf.parser)
    addFTKSimulationArgs(trf.parser)
    return trf