def getTransform(): exeSet = set() exeSet.add(RivetExecutor("EvgenJobTransforms/skeleton.EVGENtoRivet.py")) trf = transform(executor=exeSet) addAthenaArguments(trf.parser) addStdEvgenArgs(trf.parser) return trf
def getTransform(): trf = transform(executor=tagMergeExecutor(name='TAGFileMerge', exe='CollAppend', inData=set(['TAG']), outData=set(['TAG_MRG']))) addMyArgs(trf.parser) return trf
def main(): msg.info('This is %s' % sys.argv[0]) #note that indata (outdata) can not be the same and must be the same text, ['**'], as in the argument input**File (output**File) trfMT = transform(trfName='Trig_trf', executor=trigExecutor(name='athena', exe='athenaMT.py', exeArgs=['athenaoptsMT'], inData=['BS'], outData=['BS_MT'])) addAthenaArguments(trfMT.parser) addTriggerArgs(trfMT.parser) trfMT.parseCmdLineArgs(sys.argv[1:]) #any debug statements will work from here onwards if using --verbose or --loglevel DEBUG #Convert arg names and carry out operations from any of the options that are to be done before running trigPreRun(trfMT) #Run the transform and generate final report trfMT.execute() trfMT.generateReport() #Carry out operations from any of the options that are to be done after running trigPostRun(trfMT) msg.info("%s stopped at %s, trf exit code %d" % (sys.argv[0], time.asctime(), trfMT.exitCode)) sys.exit(trfMT.exitCode)
def getTransform(): executorSet = set() from EventIndexProducer.EITransformUtils import addEI_MRG_Substep, addEI_MRG_arguments executorSet.add( hybridPOOLMergeExecutor( name='AODMerge', skeletonFile='RecJobTransforms/skeleton.MergePool_tf.py', inData=['AOD'], outData=['AOD_MRG'], perfMonFile='ntuple_POOLMerge.pmon.gz')) executorSet.add( athenaExecutor( name='AODtoTAG', skeletonFile='RecJobTransforms/skeleton.AODtoTAG_tf.py', inData=['AOD_MRG'], outData=['TAG'], )) addEI_MRG_Substep(executorSet) trf = transform(executor=executorSet) addAthenaArguments(trf.parser) addDetectorArguments(trf.parser) addCommonRecTrfArgs(trf.parser) addMyArgs(trf.parser) addEI_MRG_arguments(trf.parser) return trf
def getTransform(): # Get the base transform with all arguments added trf = transform(executor = athenaExecutor(name = 'FTKPattGenRoot', skeletonFile = 'TrigFTKBankGen/skeleton.FTKPattBankGenRoot.py')) addAthenaArguments(trf.parser) addFTKPattGenArgs(trf.parser) return trf
def getTransform(): trf = transform(executor = athenaExecutor(name = 'FilterHitTf', substep="filthits", skeletonFile = 'SimuJobTransforms/skeleton.FilterHit.py', tryDropAndReload = False)) addAthenaArguments(trf.parser) addDetectorArguments(trf.parser) addMyArgs(trf.parser) return trf
def getTransform(RAWtoALL=False): executorSet = set() addRecoSubsteps(executorSet) trf = transform(executor = executorSet, description = 'General purpose ATLAS digitisation and reconstruction transform' ' Inputs can be HITS, RDO, BS, ESD or AOD, with outputs of RDO, ESD, AOD or DAODs.' ' See https://twiki.cern.ch/twiki/bin/view/AtlasComputing/RecoTf for more details.') addAthenaArguments(trf.parser) addDetectorArguments(trf.parser) addTriggerArguments(trf.parser) addAllRecoArgs(trf, RAWtoALL) # For digi step - make sure we can add the digitisation/simulation arguments # before we add this substep; allows Reco_tf to work without AtlasSimulation try: from SimuJobTransforms.simTrfArgs import addForwardDetTrfArgs, addBasicDigiArgs, addPileUpTrfArgs, addCommonSimDigTrfArgs from SimuJobTransforms.SimTransformUtils import addDigitizationSubstep addBasicDigiArgs(trf.parser) addForwardDetTrfArgs(trf.parser) addPileUpTrfArgs(trf.parser) addCommonSimDigTrfArgs(trf.parser) simStepSet = set() addDigitizationSubstep(simStepSet) trf.appendToExecutorSet(list(simStepSet)[0]) except ImportError, e: msg.warning('Failed to import digitisation arguments ({0}). Digitisation substep will not be available.'.format(e))
def getTransform(): executorSet = set() for subregion in range(subregions): executorSet.add(athenaExecutor(name = 'FTKFullSimulationBank{0}'.format(subregion), skeletonFile = 'TrigFTKSim/skeleton.FTKStandaloneSim.py', inData = ['RDO','NTUP_FTKIP','TXT_FTKIP'], outData = ['NTUP_FTKTMP_{0}'.format(subregion)], extraRunargs = {'banksubregion': [subregion]}, # Need to ensure that the correct subregion is used runtimeRunargs = {'patternbankpath': 'runArgs.patternbank{0}path'.format(subregion), 'fitconstantspath': 'runArgs.fitconstants{0}path'.format(subregion), 'fit711constantspath': 'runArgs.fit711constants{0}path'.format(subregion), 'sectorpath': 'runArgs.sector{0}path'.format(subregion), 'outputNTUP_FTKTMPFile': 'runArgs.outputNTUP_FTKTMP_{0}File'.format(subregion)})) executorSet.add(athenaExecutor(name = 'FTKSimulationMerge', skeletonFile = 'TrigFTKSim/skeleton.FTKStandaloneMerge.py', inData = [tuple([ 'NTUP_FTKTMP_{0}'.format(subregion) for subregion in range(subregions) ])], outData = ['NTUP_FTKTMP'], extraRunargs = {'inputNTUP_FTKTMPFile': [ 'tmp.NTUP_FTKTMP_{0}'.format(subregion) for subregion in range(subregions)]}, runtimeRunargs = {'MergeRegion': 'runArgs.bankregion[0]', 'FirstRegion': 'runArgs.bankregion[0]'}, )) trf = transform(executor = executorSet, description = 'FTK Subregion simulate x {0} and merge.'.format(subregions)) addAthenaArguments(trf.parser,maxEventsDefaultSubstep='all') addFTKSimulationArgs(trf.parser) return trf
def getTransform(): executorSet = set() for subregion in range(subregions): executorSet.add( athenaExecutor( name='FTKFullSimulationBank{0}'.format(subregion), skeletonFile='TrigFTKSim/skeleton.FTKStandaloneSim.py', inData=['NTUP_FTKIP', 'TXT_FTKIP'], outData=['NTUP_FTKTMP'], extraRunargs={'banksubregion': [subregion]}, # Need to ensure that the correct subregion is used runtimeRunargs={ 'patternbankpath': 'runArgs.patternbank{0}path'.format(subregion), 'outputNTUP_FTKTMPFile': 'runArgs.outputNTUP_FTKFile', 'cachedbankpath': 'runArgs.cachedbank{0}path'.format(subregion), 'CachePath': 'runArgs.CachePath{0}'.format(subregion) })) trf = transform( executor=executorSet, description='FTK Subregion simulate x {0} .'.format(subregions)) addFTKSimulationArgs(trf.parser) addTrigFTKSimOptions(trf.parser, nsubregions=subregions) addTrigFTKSimMergeOptions(trf.parser) addTrigFTKSimTFOptions(trf.parser) addTrigFTKSimRFOptions(trf.parser) return trf
def getTransform(): trf = transform(executor = athenaExecutor(name = 'RAWtoESD', skeletonFile = 'RecJobTransforms/skeleton.RAWtoESD_tf.py', substep = 'r2e', )) addAthenaArguments(trf.parser) addMyArgs(trf.parser) return trf
def getTransform(): executorSet = set() executorSet.add(archiveExecutor(name = 'Archiver', exe = 'zip', inData = ['Data'], outData = ['Arch'])) executorSet.add(archiveExecutor(name = 'Unarchiver', exe = 'unarchive', inData = ['Arch'], outData = ['outNULL'])) trf = transform(executor = executorSet) addMyArgs(trf.parser) return trf
def getTransform(): trf = transform(executor=bsMergeExecutor(name='RAWFileMerge', exe='file_merging', inData=set(['BS']), outData=set(['BS_MRG']))) addMyArgs(trf.parser) return trf
def getTransform(): trf = transform(executor=athenaExecutor( name='athena', skeletonFile=None, skeletonCA="PyJobTransforms.HelloWorldSkeleton")) addAthenaArguments(trf.parser) addDetectorArguments(trf.parser) return trf
def getTransform(): executorSet = set() from SimuJobTransforms.SimTransformUtils import addSimValidationSubstep, addHITSValidArguments addSimValidationSubstep(executorSet) trf = transform(executor = executorSet, description = 'ATLAS Validation transform. Inputs must be HITS. Outputs must be histogram files.') addAthenaArguments(trf.parser) addHITSValidArguments(trf.parser) return trf
def getTransform(): executorSet = set() addNTUPMergeSubsteps(executorSet) trf = transform(executor=executorSet) addPhysValidationMergeFiles(trf.parser) addD3PDArguments(trf.parser, transform=trf, addD3PDMRGtypes=True) addExtraDPDTypes(trf.parser, transform=trf, NTUPMergerArgs=True) return trf
def getTransform(): trf = transform(executor=athenaExecutor( name='FTKSimulationMerge', disableMP=True, skeletonFile='TrigFTKSim/skeleton.FTKStandaloneMerge.py')) addAthenaArguments(trf.parser) addFTKMergerArgs(trf.parser) return trf
def getTransform(RAWtoALL=False): executorSet = set() addRecoSubsteps(executorSet) trf = transform( executor=executorSet, description= 'General purpose ATLAS digitisation and reconstruction transform' ' Inputs can be HITS, RDO, BS, ESD or AOD, with outputs of RDO, ESD, AOD or DAODs.' ' See https://twiki.cern.ch/twiki/bin/view/AtlasComputing/RecoTf for more details.' ) addAthenaArguments(trf.parser) addDetectorArguments(trf.parser) addTriggerArguments(trf.parser) addAllRecoArgs(trf, RAWtoALL) # For digi step - make sure we can add the digitisation/simulation arguments # before we add this substep; allows Reco_tf to work without AtlasSimulation try: from SimuJobTransforms.simTrfArgs import addForwardDetTrfArgs, addBasicDigiArgs, addPileUpTrfArgs, addCommonSimDigTrfArgs from SimuJobTransforms.SimTransformUtils import addDigitizationSubstep addBasicDigiArgs(trf.parser) addForwardDetTrfArgs(trf.parser) addPileUpTrfArgs(trf.parser) addCommonSimDigTrfArgs(trf.parser) simStepSet = set() addDigitizationSubstep(simStepSet) trf.appendToExecutorSet(list(simStepSet)[0]) except ImportError as e: msg.warning( 'Failed to import digitisation arguments ({0}). Digitisation substep will not be available.' .format(e)) # Again, protect core functionality from too tight a dependence on EventOverlay try: from EventOverlayJobTransforms.overlayTrfArgs import addOverlayTrfArgs, addOverlayPoolTrfArgs from EventOverlayJobTransforms.overlayTransformUtils import appendOverlay_PoolSubstep addOverlayTrfArgs(trf.parser) addOverlayPoolTrfArgs(trf.parser) appendOverlay_PoolSubstep(trf, True) except ImportError as e: msg.warning( 'Failed to import overlay arguments ({0}). Event overlay substep will not be available.' .format(e)) # Again, protect core functionality from too tight a dependence on PATJobTransforms try: from PATJobTransforms.PATTransformUtils import addPhysValidationFiles, addValidationArguments, appendPhysValidationSubstep addPhysValidationFiles(trf.parser) addValidationArguments(trf.parser) appendPhysValidationSubstep(trf) except ImportError: msg.warning( 'Failed to import PAT arguments. Physics validation substep will not be available.' ) return trf
def getTransform(): executorSet = set() from EventIndexProducer.EITransformUtils import addEI_Substep, addEI_arguments addEI_Substep(executorSet) trf = transform(executor = executorSet, description = 'EventIndex transform. Input must be a POOL file.') addAthenaArguments(trf.parser) addEI_arguments(trf.parser) return trf
def getTransform(): executorSet = DQMergeExecutor(name='HLTHistogramMerge', inData=['HIST'], outData=['HIST_MRG']) trf = transform(executor=executorSet) addMyArgs(trf.parser) return trf
def getTransform(): trf = transform(executor=athenaExecutor( name='ESDtoAOD', skeletonFile='RecJobTransforms/skeleton.ESDtoAOD_tf.py', substep='e2a')) addAthenaArguments(trf.parser) addDetectorArguments(trf.parser) addCommonRecTrfArgs(trf.parser) addMyArgs(trf.parser) return trf
def getTransform(): executorSet = set() from EventOverlayJobTransforms.overlayTransformUtils import addOverlayBSFilterSubstep, addOverlayBSFilterArguments, addOverlayBSTrigFilterSubstep, addOverlayHITARMakerSubstep addOverlayBSTrigFilterSubstep(executorSet) addOverlayBSFilterSubstep(executorSet) addOverlayHITARMakerSubstep(executorSet) trf = transform(executor = executorSet, description = 'Filter BS data based on trigger bit') addAthenaArguments(trf.parser) addOverlayBSFilterArguments(trf.parser) return trf
def getTransform(): executorSet = set() from EventOverlayJobTransforms.overlayTransformUtils import addOverlay_PoolSubstep, addOverlay_PoolArguments addOverlay_PoolSubstep(executorSet) trf = transform(executor = executorSet, description = 'ATLAS Overlay transform. Inputs must be HITS. Outputs must be RDO.') addAthenaArguments(trf.parser) addDetectorArguments(trf.parser) addTriggerArguments(trf.parser, addTrigFilter=False) addOverlay_PoolArguments(trf.parser) return trf
def getTransform(): executor_set = set() from OverlayConfiguration.OverlayTransformHelpers import addOverlayArguments, addOverlaySubstep addOverlaySubstep(executor_set) trf = transform(executor=executor_set, description='ATLAS Overlay transform. Inputs must be HITS. Outputs must be RDO.') addAthenaArguments(trf.parser) addDetectorArguments(trf.parser) addTriggerArguments(trf.parser, addTrigFilter=False) addOverlayArguments(trf.parser) return trf
def getTransform(): executorSet = set() addRecoSubsteps(executorSet) addDigitizationSubstep(executorSet) # Sim + Digi - factor these out into an importable function in time executorSet.add( athenaExecutor( name='TRtoHITS', skeletonFile='SimuJobTransforms/skeleton.EVGENtoHIT_ISF.py', substep='simTRIn', tryDropAndReload=False, perfMonFile='ntuple.pmon.gz', inData=['EVNT_TR'], outData=['HITS', 'NULL'])) executorSet.add( athenaExecutor( name='EVNTtoHITS', skeletonFile='SimuJobTransforms/skeleton.EVGENtoHIT_ISF.py', substep='sim', tryDropAndReload=False, perfMonFile='ntuple.pmon.gz', inData=['NULL', 'EVNT'], outData=['EVNT_TR', 'HITS', 'NULL'])) trf = transform( executor=executorSet, description= 'Full chain ATLAS transform with ISF simulation, digitisation' ' and reconstruction. Inputs can be EVNT, EVNT_TR, HITS, RDO, BS, ESD or AOD, with outputs of RDO, ESD, AOD or DPDs.' ' See https://twiki.cern.ch/twiki/bin/viewauth/Atlas/FullChainTf for more details.' ) # Common arguments addAthenaArguments(trf.parser) addDetectorArguments(trf.parser) addTriggerArguments(trf.parser) # Reconstruction arguments and outputs (use the factorised 'do it all' function) addAllRecoArgs(trf) # Simulation and digitisation options addCommonSimTrfArgs(trf.parser) addCommonSimDigTrfArgs(trf.parser) addCosmicsTrfArgs(trf.parser) addBasicDigiArgs(trf.parser) addSim_tfArgs(trf.parser) addForwardDetTrfArgs(trf.parser) addPileUpTrfArgs(trf.parser) addCommonSimDigTrfArgs(trf.parser) addTrackRecordArgs(trf.parser) return trf
def getTransform(): trf = transform(executor=logscanExecutor()) # Mostly reco types... addArgs(trf.parser) trfArgs.addParallelJobProcessorArguments(trf.parser) # Add all known D3PD types trfArgs.addD3PDArguments(trf.parser, transform=trf, multipleOK=True) return trf
def getTransform(): executorSet = set() addDAODMergerSubsteps(executorSet) trf = transform(executor=executorSet) addAthenaArguments(trf.parser) addDetectorArguments(trf.parser) addCommonRecTrfArgs(trf.parser) addDAODArguments(trf.parser) return trf
def getTransform(): executorSet = set() executorSet.add( skimRawExecutor(name='SkimRAW', inData=['BS'], outData=['BS_SKIM'], exe='acmd.py')) trf = transform(executor=executorSet) addMyArgs(trf.parser) return trf
def getTransform(): executorSet = set() executorSet.add(hybridPOOLMergeExecutor(name = 'ESDMerge', skeletonFile = 'RecJobTransforms/skeleton.MergePool_tf.py', inData = ['ESD'], outData = ['ESD_MRG'])) trf = transform(executor = executorSet) addAthenaArguments(trf.parser) addDetectorArguments(trf.parser) addCommonRecTrfArgs(trf.parser) addMyArgs(trf.parser) return trf
def getTransform(): exeSet = set() # exeSet.add(SCTCalibExecutor('/afs/cern.ch/work/c/csander/sct/testarea/AtlasOffline/athena/InnerDetector/InDetCalibAlgs/SCT_CalibAlgs/share/skeleton.sct_calib.py')) exeSet.add(SCTCalibExecutor('/afs/cern.ch/user/s/sctcalib/testarea/latest/athena/InnerDetector/InDetCalibAlgs/SCT_CalibAlgs/share/skeleton.sct_calib.py')) trf = transform(executor=exeSet) addAthenaArguments(trf.parser) addSCTCalibArgs(trf.parser) return trf
def getTransform(): executorSet = set() from SimuJobTransforms.SimTransformUtils import addAtlasG4Substep, addAtlasG4Arguments addAtlasG4Substep(executorSet) trf = transform( executor=executorSet, description= 'Legacy ATLAS Simulation transform. Inputs must be EVNT else a single particle Generator job options must be specified. Outputs must be HITS or TrackRecords.' ) addAthenaArguments(trf.parser) addDetectorArguments(trf.parser) addAtlasG4Arguments(trf.parser) return trf