def addNTUPMergeSubsteps(executorSet): # Ye olde NTUPs try: # 'Standard' D3PDs inDataList, outDataList = listKnownD3PDs() for (inData, outData) in iter(zip(inDataList, outDataList)): executorSet.add( NTUPMergeExecutor(name='NTUPLEMerge' + inData.replace('_', ''), exe='hadd', inData=[inData], outData=[outData], exeArgs=[])) # Physics Validation NTUP executorSet.add( NTUPMergeExecutor(name='NTUPLEMergePHYSVAL', exe='hadd', inData=['NTUP_PHYSVAL'], outData=['NTUP_PHYSVAL_MRG'], exeArgs=[])) # Extra Tier-0 NTUPs extraNTUPs = getExtraDPDList(NTUPOnly=True) for ntup in extraNTUPs: executorSet.add( NTUPMergeExecutor(name='NTUPLEMerge' + ntup.name.replace('_', ''), exe='hadd', inData=[ntup.name], outData=[ntup.name + '_MRG'], exeArgs=[])) except ImportError, e: msg.warning( "Failed to get D3PD lists - probably D3PDs are broken in this release: {0}" .format(e))
def getTransform(RAWtoALL=False): executorSet = set() addRecoSubsteps(executorSet) trf = transform(executor = executorSet, description = 'General purpose ATLAS digitisation and reconstruction transform' ' Inputs can be HITS, RDO, BS, ESD or AOD, with outputs of RDO, ESD, AOD or DAODs.' ' See https://twiki.cern.ch/twiki/bin/view/AtlasComputing/RecoTf for more details.') addAthenaArguments(trf.parser) addDetectorArguments(trf.parser) addTriggerArguments(trf.parser) addAllRecoArgs(trf, RAWtoALL) # For digi step - make sure we can add the digitisation/simulation arguments # before we add this substep; allows Reco_tf to work without AtlasSimulation try: from SimuJobTransforms.simTrfArgs import addForwardDetTrfArgs, addBasicDigiArgs, addPileUpTrfArgs, addCommonSimDigTrfArgs from SimuJobTransforms.SimTransformUtils import addDigitizationSubstep addBasicDigiArgs(trf.parser) addForwardDetTrfArgs(trf.parser) addPileUpTrfArgs(trf.parser) addCommonSimDigTrfArgs(trf.parser) simStepSet = set() addDigitizationSubstep(simStepSet) trf.appendToExecutorSet(list(simStepSet)[0]) except ImportError, e: msg.warning('Failed to import digitisation arguments ({0}). Digitisation substep will not be available.'.format(e))
def knownDAODTypes(): DAODTypes = [] try: from DerivationFrameworkCore.DerivationFrameworkProdFlags import listAODtoDPD DAODTypes = [ name.lstrip("Stream") for name in listAODtoDPD ] except ImportError: msg.warning("Could not import DAOD subtypes from DerivationFramework.DerivationFrameworkCore") return DAODTypes
def getTransform(): executorSet = set() executorSet.add(hybridPOOLMergeExecutor(name = 'ESDMerge', skeletonFile = 'RecJobTransforms/skeleton.MergePool_tf.py', inData = ['ESD'], outData = ['ESD_MRG'])) executorSet.add(hybridPOOLMergeExecutor(name = 'AODMerge', skeletonFile = 'RecJobTransforms/skeleton.MergePool_tf.py', inData = ['AOD'], outData = ['AOD_MRG'])) executorSet.add(athenaExecutor(name = 'AODtoTAG', skeletonFile = 'RecJobTransforms/skeleton.AODtoTAG_tf.py', inData = ['AOD_MRG'], outData = ['TAG'],)) executorSet.add(tagMergeExecutor(name = 'TAGFileMerge', exe = 'CollAppend', inData = set(['TAG']), outData = set(['TAG_MRG']))) executorSet.add(DQMergeExecutor(name = 'DQHistogramMerge', inData = [('HIST_ESD', 'HIST_AOD'), 'HIST'], outData = ['HIST_MRG'])) executorSet.add(athenaExecutor(name = 'RDOMerge', skeletonFile = 'RecJobTransforms/skeleton.MergeRDO_tf.py', inData = ['RDO'], outData = ['RDO_MRG'])) executorSet.add(bsMergeExecutor(name = 'RAWFileMerge', exe = 'file_merging', inData = set(['BS']), outData = set(['BS_MRG']))) executorSet.add(athenaExecutor(name = 'EVNTMerge', skeletonFile = 'PyJobTransforms/skeleton.EVNTMerge.py',inData = ['EVNT'], outData = ['EVNT_MRG'])) addDAODMergerSubsteps(executorSet) addNTUPMergeSubsteps(executorSet) trf = transform(executor = executorSet) addAthenaArguments(trf.parser) addDetectorArguments(trf.parser) addCommonRecTrfArgs(trf.parser) addMyArgs(trf.parser) addDAODArguments(trf.parser) addPhysValidationMergeFiles(trf.parser) addD3PDArguments(trf.parser, transform=trf, addD3PDMRGtypes=True) addExtraDPDTypes(trf.parser, transform=trf, NTUPMergerArgs = True) # Add HITSMerge only if SimuJobTransforms is available try: from SimuJobTransforms.SimTransformUtils import addHITSMergeArguments addHITSMergeArguments(trf.parser) simStepSet = set() simStepSet.add(athenaExecutor(name = 'HITSMerge', substep="hitsmerge", skeletonFile = 'SimuJobTransforms/skeleton.HITSMerge.py', tryDropAndReload = False, inData = ['HITS'], outData = ['HITS_MRG'])) trf.appendToExecutorSet(list(simStepSet)[0]) except ImportError as e: msg.warning('Failed to import simulation arguments ({0}). HITSMerge will not be available.'.format(e)) return trf
def getTransform(RAWtoALL=False): executorSet = set() addRecoSubsteps(executorSet) trf = transform( executor=executorSet, description= 'General purpose ATLAS digitisation and reconstruction transform' ' Inputs can be HITS, RDO, BS, ESD or AOD, with outputs of RDO, ESD, AOD or DAODs.' ' See https://twiki.cern.ch/twiki/bin/view/AtlasComputing/RecoTf for more details.' ) addAthenaArguments(trf.parser) addDetectorArguments(trf.parser) addTriggerArguments(trf.parser) addAllRecoArgs(trf, RAWtoALL) # For digi step - make sure we can add the digitisation/simulation arguments # before we add this substep; allows Reco_tf to work without AtlasSimulation try: from SimuJobTransforms.simTrfArgs import addForwardDetTrfArgs, addBasicDigiArgs, addPileUpTrfArgs, addCommonSimDigTrfArgs from SimuJobTransforms.SimTransformUtils import addDigitizationSubstep addBasicDigiArgs(trf.parser) addForwardDetTrfArgs(trf.parser) addPileUpTrfArgs(trf.parser) addCommonSimDigTrfArgs(trf.parser) simStepSet = set() addDigitizationSubstep(simStepSet) trf.appendToExecutorSet(list(simStepSet)[0]) except ImportError as e: msg.warning( 'Failed to import digitisation arguments ({0}). Digitisation substep will not be available.' .format(e)) # Again, protect core functionality from too tight a dependence on EventOverlay try: from EventOverlayJobTransforms.overlayTrfArgs import addOverlayTrfArgs, addOverlayPoolTrfArgs from EventOverlayJobTransforms.overlayTransformUtils import appendOverlay_PoolSubstep addOverlayTrfArgs(trf.parser) addOverlayPoolTrfArgs(trf.parser) appendOverlay_PoolSubstep(trf, True) except ImportError as e: msg.warning( 'Failed to import overlay arguments ({0}). Event overlay substep will not be available.' .format(e)) # Again, protect core functionality from too tight a dependence on PATJobTransforms try: from PATJobTransforms.PATTransformUtils import addPhysValidationFiles, addValidationArguments, appendPhysValidationSubstep addPhysValidationFiles(trf.parser) addValidationArguments(trf.parser) appendPhysValidationSubstep(trf) except ImportError: msg.warning( 'Failed to import PAT arguments. Physics validation substep will not be available.' ) return trf
addCommonSimDigTrfArgs(trf.parser) simStepSet = set() addDigitizationSubstep(simStepSet) trf.appendToExecutorSet(list(simStepSet)[0]) except ImportError, e: msg.warning('Failed to import digitisation arguments ({0}). Digitisation substep will not be available.'.format(e)) # Again, protect core functionality from too tight a dependence on EventOverlay try: from EventOverlayJobTransforms.overlayTrfArgs import addOverlayTrfArgs, addOverlayPoolTrfArgs from EventOverlayJobTransforms.overlayTransformUtils import appendOverlay_PoolSubstep addOverlayTrfArgs(trf.parser) addOverlayPoolTrfArgs(trf.parser) appendOverlay_PoolSubstep(trf, True) except ImportError, e: msg.warning('Failed to import overlay arguments ({0}). Event overlay substep will not be available.'.format(e)) # Again, protect core functionality from too tight a dependence on PATJobTransforms try: from PATJobTransforms.PATTransformUtils import addPhysValidationFiles, addValidationArguments, appendPhysValidationSubstep addPhysValidationFiles(trf.parser) addValidationArguments(trf.parser) appendPhysValidationSubstep(trf) except ImportError: msg.warning('Failed to import PAT arguments. Physics validation substep will not be available.') return trf if __name__ == '__main__': main()
def main(): ########################################################################## parser = argparse.ArgumentParser( description= "Generate signature files for substeps, dumped in JSON format.") parser.add_argument('--output', help='JSON output file', required=True) parser.add_argument('--mode', help='mode (default = params)', choices=['params', 'params-alt', 'substeps'], default='params') parser.add_argument( '--transforms', help='List of transforms to process' ' (any path given is added to PYTHONPATH automatically).' ' If not specified then all executable *_tf.py files' ' found in PATH are added.', nargs='+', default=None) cliargs = vars(parser.parse_args()) ########################################################################## transforms_path_list = _getTransformsFromPATH( ) if cliargs['transforms'] is None else cliargs['transforms'] ########################################################################## for transform_path in transforms_path_list: trfpath = os.path.dirname(transform_path) if len(trfpath) > 1 and trfpath not in sys.path: sys.path.append(trfpath) ########################################################################## result = {} treated = [] for transform_path in transforms_path_list: ###################################################################### if transform_path.endswith('_tf.py') == False: continue ###################################################################### transform_name = os.path.basename(transform_path) transform_module = os.path.splitext(transform_name)[0] ###################################################################### msg.info('Processing transform {0}:'.format(transform_path)) try: trfModule = __import__(transform_module, globals(), locals(), ['getTransform'], -1) except Exception as e: msg.warning( 'Failed to import transform {0} ({1}) - ignored'.format( transform_module, e)) continue ###################################################################### try: if 'getTransform' in dir(trfModule): ############################################################## transform = trfModule.getTransform() ############################################################## # MODE PARAMS # ############################################################## if cliargs['mode'] == 'params': ############################################################## desc = transform.parser.getProdsysDesc if not isinstance(desc, dict): desc = transform.parser.getProdsysDesc() result[transform_module] = (((((((desc))))))) ############################################################## # MODE PARAMS-ALT # ############################################################## if cliargs['mode'] == 'params-alt': ############################################################## desc = transform.parser.getProdsysDesc if not isinstance(desc, dict): desc = transform.parser.getProdsysDesc() result[transform_module] = _patchParams(desc) ############################################################## # MODE SUBSTEPS # ############################################################## if cliargs['mode'] == 'substeps': ############################################################## result[transform_module] = [{ 'name': executor.name, 'alias': executor.substep, } for executor in transform._executors] ################################################################## treated.append(transform_module) ################################################################## except Exception as e: msg.warning( 'Failed to treate transform {0} ({1}) - ignored'.format( transform_module, e)) continue ########################################################################## with open(cliargs['output'], 'w') as fp: json.dump(result, fp, indent=2) ########################################################################## msg.info( 'Successfully generated signature file {0} for transforms {1}'.format( cliargs['output'], json.dumps(treated))) ########################################################################## sys.exit(0)