Exemple #1
0
def makeBkgInputCol(initialList, nBkgEvtsPerCrossing, correctForEmptyBunchCrossings):
    uberList = []
    refreshrate = 1.0

    nSignalEvts = 1000
    from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
    if (athenaCommonFlags.EvtMax.get_Value()>0):
        nSignalEvts = int(athenaCommonFlags.EvtMax.get_Value())
        digilog.info('Number of signal events (from athenaCommonFlags.EvtMax) = %s.', nSignalEvts )
    else:
        nSignalEvts = 0
        import PyUtils.AthFile as athFile
        for inFile in athenaCommonFlags.PoolHitsInput.get_Value():
            try:
                inputFile = athFile.fopen(inFile)
                nSignalEvts += int(inputFile.nentries)
                del inputFile
            except:
                digilog.warning("Unable to open file [%s]"%inFile)
                digilog.warning('caught:\n%s',err)
                import traceback
                traceback.print_exc()
        digilog.info('Number of signal events (read from files) = %s.', nSignalEvts )

    nBkgEventsPerFile = 5000
    try:
        import PyUtils.AthFile as athFile
        inputFile = athFile.fopen(initialList[0])
        nBkgEventsPerFile = int(inputFile.nentries)
        digilog.info('Number of background events per file (read from file) = %s.', nBkgEventsPerFile )
        del inputFile
    except:
        import traceback
        traceback.print_exc()
        digilog.warning('Failed to count the number of background events in %s. Assuming 5000 - if this is an overestimate the job may die.', initialList[0])

    from Digitization.DigitizationFlags import digitizationFlags
    from AthenaCommon.BeamFlags import jobproperties
    Nbunches = 1 + digitizationFlags.finalBunchCrossing.get_Value() - digitizationFlags.initialBunchCrossing.get_Value()
    nbunches = int(Nbunches)
    if correctForEmptyBunchCrossings:
        nbunches = int(math.ceil(float(nbunches) * float(digitizationFlags.bunchSpacing.get_Value())/float(jobproperties.Beam.bunchSpacing.get_Value())))
    digilog.info('Simulating a maximum of %s colliding-bunch crossings (%s colliding+non-colliding total) per signal event', nbunches, Nbunches)
    nBkgEventsForJob = pileUpCalc(float(nSignalEvts), 1.0, float(nBkgEvtsPerCrossing), nbunches)
    digilog.info('Number of background events required: %s. Number of background events in input files: %s', nBkgEventsForJob, (nBkgEventsPerFile*len(initialList)) )
    numberOfRepetitionsRequired =float(nBkgEventsForJob)/float(nBkgEventsPerFile*len(initialList))
    NumberOfRepetitionsRequired = 1 + int(math.ceil(numberOfRepetitionsRequired))
    for i in range(0, NumberOfRepetitionsRequired):
        uberList+=initialList
    digilog.info('Expanding input list from %s to %s', len(initialList), len(uberList))
    return uberList
Exemple #2
0
 def getHitFileRunNumber(self,hitfile):
     """Retrieve the Run Number from the HIT file"""
     #--------------------------------------------------
     # Check for the Run Number in the first Input file
     #--------------------------------------------------
     simRunNumber = -1
     import PyUtils.AthFile as af
     af.server.load_cache('digitization-afcache.ascii')
     f = af.fopen(hitfile)
     if len(f.run_numbers)>0 :
         simRunNumber = f.run_numbers[0]
     else :
         logDigitizationFlags.debug("Old file format detected - using dumpRunNumber.py instead")
         myCommand = 'dumpRunNumber.py '+ hitfile
         import commands, re
         sc,out = commands.getstatusoutput(myCommand)
         if sc != 0:
             logDigitizationFlags.error('ERR: problem:\n%s',str(out) )
             raise SystemExit(sc)
         myOutput = '0'
         for l in out.splitlines():
             if re.match('^run number: .', l):
                 tempout = re.split('^run number: .',l)
                 if len(tempout) > 1:
                     myOutput = tempout[1].strip()
                 del tempout
         if len(myOutput) > 0 :
             simRunNumber = int(myOutput)
         else :
             logDigitizationFlags.info("Failed to find Run Number in hits file metadata.")
         ######################
     logDigitizationFlags.info('Found Run Number %s in hits file metadata.', str(simRunNumber) )
     return simRunNumber
    def __init__(self):

        self.__isData = False
        self.__isAF2 = False
        self.__isDAOD = False
        self.__isTruth3 = False
        self.__Generators = "Unknown"

        self.__mc_runNumber = -1
        self.__mcChannel = -1
        from AthenaCommon.AppMgr import ServiceMgr
        recoLog = logging.getLogger('XAMPP I/O')
        if len(ServiceMgr.EventSelector.InputCollections) == 0:
            recoLog.warning("No infiles were configured thus far")
            return
        from PyUtils import AthFile
        af = AthFile.fopen(ServiceMgr.EventSelector.InputCollections[0])
        self.__isData = "data" in af.fileinfos['tag_info']['project_name']
        self.__isAF2 = not self.isData() and 'tag_info' in af.fileinfos and len(
            [key for key in af.fileinfos['tag_info'].iterkeys() if 'AtlfastII' in key or 'Fast' in key]) > 0
        self.__mc_runNumber = af.fileinfos["run_number"][0] if len(af.fileinfos["run_number"]) > 0 else -1
        self.__mcChannel = af.fileinfos["mc_channel_number"][0] if not self.isData() and len(af.fileinfos["mc_channel_number"]) > 0 else -1

        self.__isDAOD = "DAOD" in af.fileinfos['stream_names'][0]
        self.__isTruth3 = "TRUTH3" in af.fileinfos['stream_names'][0]
        try:
            self.__Generators = af.fileinfos['det_descr_tags']['generators']
        except (KeyError, AttributeError):
            recoLog.warning("Failed to read the 'generators' metadata field")
            self.__Generators = "Unknown"
Exemple #4
0
 def is_rdo(self, input_files=None):
     if input_files is None:
         input_files=self.input_files
     if isinstance(input_files, basestring):
         input_files=[input_files]
     for fname in input_files[:self._nfiles]:
         import PyUtils.AthFile as af
         try:
             infos = af.fopen(fname).infos
             if infos['file_type'] == 'bs':
                 return True
             for stream_name in infos['stream_names']:
                 if stream_name.startswith('StreamRDO'):
                     return True
                 if stream_name.startswith('Stream1'):
                     evtdata=collections.defaultdict(list)
                     for k,v in infos['eventdata_items']:
                         evtdata[k].append(v)
                     evtdata= dict(evtdata)
                     for k in evtdata.keys():
                         if k.endswith(('RDO_Container',
                                        'RawChannelContainer',
                                        'RdoContainer',)):
                             return True
         except Exception, err:
             self.msg.info('caught:\n%s', err)
Exemple #5
0
def getSpecialConfiguration(flags):
    """Return a dict of Special configuration as parsed from flags.Input.Files"""
    if len(flags.Input.Files) > 1:
        log.info("Multiple input files. Using the first for Digitization special configuration.")
    log.info("Obtaining Digitization special configuration from %s", flags.Input.Files[0])
    File = AthFile.fopen(flags.Input.Files[0])
    # extract the special config list
    tag_info = File.infos.get("tag_info", {})
    SpecialCfg = tag_info.get("specialConfiguration", "").split(";")
    # fill containers
    preIncludes = []
    out = {}
    for KeyEqValue in SpecialCfg:
        # Ignore empty or "NONE" substrings, e.g. from consecutive or trailing semicolons
        if not KeyEqValue or KeyEqValue.upper() == "NONE":
            continue
        # If not in key=value format, treat as v, with k="preInclude"
        if "=" not in KeyEqValue:
            KeyEqValue = "preInclude=" + KeyEqValue
        # Handle k=v directives
        key, value = KeyEqValue.split("=")
        if key == "preInclude":
            preIncludes += value.split(",")
        else:
            out[key] = value
    # FIXME includes not migrated
    # from AthenaCommon.Include import include
    # for inc in preIncludes:
    #     include(inc)
    return out
Exemple #6
0
def extract_items(pool_file, verbose=True, items_type='eventdata'):
    """Helper function to read a POOL file and extract the item-list from the
    DataHeader content.
    @params
      `pool_file`  the name of the pool file to inspect
      `verbose`    self-explanatory
      `items_type` what kind of items one is interested in
                   allowed values: 'eventdata' 'metadata'
    Note: this function is actually executed in a forked sub-process
          if `fork` is True
    """
    _allowed_values = ('eventdata',
                       'metadata',)
    if not items_type in _allowed_values:
        err = "".join([
            "invalid argument for 'items_type'. ",
            "got: [%s] " % items_type,
            "(allowed values: %r)" % _allowed_values
            ])
        raise ValueError, err
    import PyUtils.AthFile as af
    f = af.fopen(pool_file)
    key = '%s_items' % items_type
    items = f.fileinfos[key]
    if items is None:
        items = []
    return items
Exemple #7
0
    def __init__(self):

        self.__isData = False
        self.__isAF2 = False
        self.__isDAOD = False
        self.__isTruth3 = False

        self.__mc_runNumber = -1
        self.__mcChannel = -1
        from AthenaCommon.AppMgr import ServiceMgr
        if len(ServiceMgr.EventSelector.InputCollections) == 0:
            print "WARNING: No infiles were configured thus far"
            return
        from PyUtils import AthFile
        af = AthFile.fopen(ServiceMgr.EventSelector.InputCollections[0])

        self.__isData = "data" in af.fileinfos['tag_info']['project_name']
        self.__isAF2 = not self.isData(
        ) and 'tag_info' in af.fileinfos and len([
            key for key in af.fileinfos['tag_info'].iterkeys()
            if 'AtlfastII' in key or 'Fast' in key
        ]) > 0
        self.__mc_runNumber = af.fileinfos["run_number"][0] if len(
            af.fileinfos["run_number"]) > 0 else -1
        self.__mcChannel = af.fileinfos["mc_channel_number"][
            0] if not self.isData() and len(
                af.fileinfos["mc_channel_number"]) > 0 else -1

        self.__isDAOD = "DAOD" in af.fileinfos['stream_names'][0]
        self.__isTruth3 = "TRUTH3" in af.fileinfos['stream_names'][0]
def extract_items(pool_file, verbose=True, items_type='eventdata'):
    """Helper function to read a POOL file and extract the item-list from the
    DataHeader content.
    @params
      `pool_file`  the name of the pool file to inspect
      `verbose`    self-explanatory
      `items_type` what kind of items one is interested in
                   allowed values: 'eventdata' 'metadata'
    Note: this function is actually executed in a forked sub-process
          if `fork` is True
    """
    _allowed_values = ('eventdata',
                       'metadata',)
    if not items_type in _allowed_values:
        err = "".join([
            "invalid argument for 'items_type'. ",
            "got: [%s] " % items_type,
            "(allowed values: %r)" % _allowed_values
            ])
        raise ValueError, err
    import PyUtils.AthFile as af
    f = af.fopen(pool_file)
    key = '%s_items' % items_type
    items = f.fileinfos[key]
    if items is None:
        items = []
    return items
Exemple #9
0
 def is_rdo(self, input_files=None):
     if input_files is None:
         input_files = self.input_files
     if isinstance(input_files, basestring):
         input_files = [input_files]
     for fname in input_files[:self._nfiles]:
         import PyUtils.AthFile as af
         try:
             infos = af.fopen(fname).infos
             if infos['file_type'] == 'bs':
                 return True
             for stream_name in infos['stream_names']:
                 if stream_name.startswith('StreamRDO'):
                     return True
                 if stream_name.startswith('Stream1'):
                     evtdata = collections.defaultdict(list)
                     for k, v in infos['eventdata_items']:
                         evtdata[k].append(v)
                     evtdata = dict(evtdata)
                     for k in evtdata.keys():
                         if k.endswith((
                                 'RDO_Container',
                                 'RawChannelContainer',
                                 'RdoContainer',
                         )):
                             return True
         except Exception, err:
             self.msg.info('caught:\n%s', err)
def isData():
    from AthenaCommon.AppMgr import ServiceMgr
    if len(ServiceMgr.EventSelector.InputCollections) == 0:
        print "WARNING: No infiles were configured thus far"
        return False
    from PyUtils import AthFile
    af = AthFile.fopen(ServiceMgr.EventSelector.InputCollections[0])
    return "data" in af.fileinfos['tag_info']['project_name']
def isOnDAOD():
    from AthenaCommon.AppMgr import ServiceMgr
    from PyUtils import AthFile
    if len(ServiceMgr.EventSelector.InputCollections) == 0:
        print "WARNING: No inputfiles given"
        return True
    af = AthFile.fopen(ServiceMgr.EventSelector.InputCollections[0]
                       )  # opens the first file from the InputCollections list
    # af.fileinfos #this is a dict of dicts, take a look at what's available (e.g. do af.fileinfos.keys() to see the main keys)! Below are some examples:
    streamName = af.fileinfos[
        'stream_names']  # will be something like 'StreamDAOD_XXX' if a derivation
    return "DAOD" in streamName[0]
Exemple #12
0
def main(args):
    """simple command-line utility wrapping PyUtils.AthFile.fopen
    """
    exitcode = 0
    fnames = args.files
    if isinstance(fnames, basestring):
        fnames = [fnames]

    import sys
    import os
    import os.path as osp
    for i, f in enumerate(fnames):
        fnames[i] = osp.expandvars(osp.expanduser(f))

    import PyUtils.AthFile as af
    msg = af.msg
    for fname in fnames:
        try:
            f = af.fopen(fname, evtmax=args.evtmax)
            msg.info(':' * 80)
            msg.info('::::: summary :::::')
            fmt = ' - %-15s: %s'
            print fmt % ('file md5', f.infos['file_md5sum'])
            print fmt % ('file name', f.infos['file_name'])
            print fmt % ('file type', f.infos['file_type'])
            print fmt % ('file guid', f.infos['file_guid'])
            print fmt % ('nentries', f.infos['nentries'])
            print fmt % ('run number', f.infos['run_number'])
            print fmt % ('run type', f.infos['run_type'])
            print fmt % ('evt number', f.infos['evt_number'])
            print fmt % ('evt type', f.infos['evt_type'])
            print fmt % ('lumi block', f.infos['lumi_block'])
            print fmt % ('beam energy', f.infos['beam_energy'])
            print fmt % ('beam type', f.infos['beam_type'])
            print fmt % ('stream tags', f.infos['stream_tags'])
            print fmt % ('stream names', f.infos['stream_names'])
            print fmt % ('geometry', f.infos['geometry'])
            print fmt % ('conditions tag', f.infos['conditions_tag'])
            _metadata = f.infos['metadata']
            _metadata = _metadata.keys() if isinstance(_metadata,
                                                       dict) else None
            print fmt % ('meta data', _metadata)

            msg.info(':' * 80)
        except Exception, e:
            msg.error("Caught exception [%s] !!", str(e.__class__))
            msg.error("What:\n%s\n%s\n%s", e,
                      sys.exc_info()[0],
                      sys.exc_info()[1])
            exitcode = 1
            pass

        except:
Exemple #13
0
def _setup():

    global inputFileSummary
    import os
    from RecExConfig.RecFlags import rec
    import AthenaCommon.Logging as L
    from AthenaCommon.Resilience import treatException

    #define a logger
    msg = L.logging.getLogger('inputFilePeeker' )
    msg.info("Executing   inputFilePeeker.py")

    # special setup for online reconstruction so far
    from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
    if athenaCommonFlags.isOnline():
        # set minimal items of inputFileSummary
        inputFileSummary={'file_type':'bs',
                          'evt_type':['IS_DATA','IS_ATLAS','IS_PHYSICS'],
                          'TagStreamsRef':''}
        return
    
    #get input file name
    from RecExConfig.RecoFunctions import InputFileNames
    inFiles=InputFileNames()
    if len(inFiles) < 1:
        msg.error("No input files specified yet! Cannot do anything.")

    #create and fill inputFileSummary (DC: looping through input files if necessary)
    import PyUtils.AthFile as athFile
    failed_trials = 0
    for inFile in inFiles:
        try:
            fi = athFile.fopen(inFile)
            inputFileSummary = fi.fileinfos
        except Exception,err:
            msg.warning("Unable to open file [%s]"%inFile)
            msg.warning('caught:\n%s',err)
            import traceback
            traceback.print_exc()
            continue

        ## Making sure that stream_names is always defined
        if not inputFileSummary.has_key('stream_names'):
            msg.warning("AthFile didn't find key 'stream_names'. Recovering it but that's unexpected.")
            inputFileSummary['stream_names']=[]
        
        #First try to catch the no entries case
        if inputFileSummary['stream_names'] == []:
            try:
                #print fi.infos['metadata_items'][0][1]
                inputFileSummary['stream_names'] = [fi.infos['metadata_items'][0][1]]
            except Exception, err:
                msg.info("Unable to find stream names in file metadata.")
def main(args):
    """simple command-line utility wrapping PyUtils.AthFile.fopen
    """
    exitcode = 0
    fnames = args.files
    if isinstance(fnames, basestring):
        fnames = [fnames]

    import sys
    import os
    import os.path as osp
    for i,f in enumerate(fnames):
        fnames[i] = osp.expandvars(osp.expanduser(f))

    import PyUtils.AthFile as af
    msg = af.msg
    for fname in fnames:
        try:
            f = af.fopen(fname, evtmax=args.evtmax)
            msg.info(':'*80)
            msg.info('::::: summary :::::')
            fmt = ' - %-15s: %s'
            print fmt % ('file md5',       f.infos['file_md5sum'])
            print fmt % ('file name',      f.infos['file_name'])
            print fmt % ('file type',      f.infos['file_type'])
            print fmt % ('file guid',      f.infos['file_guid'])
            print fmt % ('nentries',       f.infos['nentries'])
            print fmt % ('run number',     f.infos['run_number'])
            print fmt % ('run type',       f.infos['run_type'])
            print fmt % ('evt number',     f.infos['evt_number'])
            print fmt % ('evt type',       f.infos['evt_type'])
            print fmt % ('lumi block',     f.infos['lumi_block'])
            print fmt % ('beam energy',    f.infos['beam_energy'])
            print fmt % ('beam type',      f.infos['beam_type'])
            print fmt % ('stream tags',    f.infos['stream_tags'])
            print fmt % ('stream names',   f.infos['stream_names'])
            print fmt % ('geometry',       f.infos['geometry'])
            print fmt % ('conditions tag', f.infos['conditions_tag'])
            _metadata = f.infos['metadata']
            _metadata = _metadata.keys() if isinstance(_metadata,dict) else None
            print fmt % ('meta data',      _metadata)

            msg.info(':'*80)
        except Exception, e:
            msg.error("Caught exception [%s] !!", str(e.__class__))
            msg.error("What:\n%s\n%s\n%s",e,
                      sys.exc_info()[0],
                      sys.exc_info()[1])
            exitcode = 1
            pass

        except :
Exemple #15
0
 def is_usr(self, input_files=None):
     if input_files is None:
         input_files = self.input_files
     if isinstance(input_files, basestring):
         input_files = [input_files]
     for fname in input_files[:self._nfiles]:
         import PyUtils.AthFile as af
         try:
             infos = af.fopen(fname).infos
             for stream_name in infos['stream_names']:
                 if stream_name.startswith(('StreamUSR', )):
                     return True
         except Exception, err:
             self.msg.info('caught:\n%s', err)
Exemple #16
0
 def is_tag(self, input_files=None):
     if input_files is None:
         input_files=self.input_files
     if isinstance(input_files, basestring):
         input_files=[input_files]
     for fname in input_files[:self._nfiles]:
         import PyUtils.AthFile as af
         try:
             infos = af.fopen(fname).infos
             for stream_name in infos['stream_names']:
                 if stream_name.startswith(('StreamTAG', 'TAG')):
                     return True
         except Exception, err:
             self.msg.info('caught:\n%s', err)
def isAF2():
    if isData(): return False
    # currently the best check we know about
    from AthenaCommon.AppMgr import ServiceMgr
    from PyUtils import AthFile
    if len(ServiceMgr.EventSelector.InputCollections) == 0:
        print "WARNING: No infiles were configured thus far"
        return False
    af = AthFile.fopen(ServiceMgr.EventSelector.InputCollections[0])
    if 'tag_info' in af.fileinfos:
        for key in af.fileinfos['tag_info'].iterkeys():
            if 'AtlfastII' in key or 'Fast' in key: return True
    else: print 'shit, it does not work :-('
    return False
Exemple #18
0
def extract_stream_names(fname):
    """find the stream names ('StreamESD', 'StreamAOD',...) contained in a
       given POOL file
    @params:
     `fname`     the filename of the POOL file to inspect (can be LFN or PFN)

    example:
     >>> import PyUtils.PoolFile as pf
     >>> streams = pf.extract_stream_names ('ESD.pool.root')
     >>> print streams
     ['StreamESD']
    """
    import PyUtils.AthFile as af
    f = af.fopen(fname)
    return f.fileinfos['stream_names']
def extract_stream_names(fname):
    """find the stream names ('StreamESD', 'StreamAOD',...) contained in a
       given POOL file
    @params:
     `fname`     the filename of the POOL file to inspect (can be LFN or PFN)

    example:
     >>> import PyUtils.PoolFile as pf
     >>> streams = pf.extract_stream_names ('ESD.pool.root')
     >>> print streams
     ['StreamESD']
    """
    import PyUtils.AthFile as af
    f = af.fopen(fname)
    return f.fileinfos['stream_names']
Exemple #20
0
def getAthFile():
    ## Allow the input check to be skipped.  This should only be done in production
    ##  jobs, in order to avoid peeking and spoiling performance on some systems
    inputAthFileObject = None
    import os
    if not ('G4ATLAS_SKIPFILEPEEK' in os.environ
            and os.environ['G4ATLAS_SKIPFILEPEEK']):
        from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
        if athenaCommonFlags.PoolEvgenInput.statusOn:
            try:
                import PyUtils.AthFile as af
                inputAthFileObject = af.fopen(
                    athenaCommonFlags.PoolEvgenInput()[0])
            except:
                simMDlog.warning("AthFile failed to open %s",
                                 athenaCommonFlags.PoolEvgenInput()[0])
    else:
        simMDlog.info(
            "G4ATLAS_SKIPFILEPEEK environment variable present, so skipping all input file peeking."
        )
    return inputAthFileObject
Exemple #21
0
def _read_file_impl(cfg, hints):
    cfg.disable_extra_steps()
    cfg.disable_read_steps()
    cfg.disable_write_steps()

    fname = cfg.input_files[0]
    import PyUtils.AthFile as af
    f = af.fopen(fname)

    if f.infos['file_type'] == 'pool':
        if cfg.is_evgen():
            pass

        elif cfg.is_rdo():
            cfg.rec['readRDO'] = True
            pass

        elif cfg.is_esd():
            cfg.rec['readESD'] = True
            pass

        elif cfg.is_aod():
            cfg.rec['readAOD'] = True
            pass

        elif cfg.is_tag():
            cfg.rec['readTAG'] = True
            pass

        pass

    elif f.infos['file_type'] == 'bs':
        cfg.rec['readRDO'] = True
        pass

    else:
        raise RuntimeError('unknown AthFile.file_type [%s]' %
                           (f.infos['file_type'], ))

    return cfg
 def get_runs_from_tagfile(self, fname):
     # check own cache for this file
     if fname in self.file_cache.keys():
         return self.file_cache[fname]
     # check file type with AthFile - this should avoid reopening files more times than necessary
     msg.debug("Checking file %s" % fname)
     import PyUtils.AthFile as athFile
     fileinfos = athFile.fopen(fname).fileinfos
     if not 'TAG' in fileinfos['stream_names']:
         return []
     # this is a TAG file, open it and read all run numbers
     # fileinfos have only the run number from the first TAG
     import PyUtils.Helpers as H
     with H.restricted_ldenviron(projects=['AtlasCore']):
         import re
         with H.ShutUp(filters=[
                 re.compile(
                     'TClass::TClass:0: RuntimeWarning: no dictionary for.*'
                 ),
                 re.compile('.*duplicate entry.*')
         ]):
             msg.debug("Opening TAG file %s" % fname)
             import PyUtils.RootUtils as ru
             f = ru.import_root().TFile.Open(fname, "read")
             if f is None or not f:
                 msg.warning("Failed to open TAG file %s" % fname)
                 return []
             coll_tree = f.Get('POOLCollectionTree')
             run_numbers = set()
             if coll_tree is not None:
                 for row in xrange(0, coll_tree.GetEntries()):
                     coll_tree.GetEntry(row)
                     run_numbers.add(getattr(coll_tree, self.run_attr_name))
                 del coll_tree
             f.Close()
             del f
             self.file_cache[fname] = run_numbers
             msg.info("TAG file: %s, found runs: %s" %
                      (fname, str(run_numbers)))
             return run_numbers
Exemple #23
0
 def is_evgen(self, input_files=None):
     if input_files is None:
         input_files=self.input_files
     if isinstance(input_files, basestring):
         input_files=[input_files]
     for fname in input_files[:self._nfiles]:
         import PyUtils.AthFile as af
         try:
             infos = af.fopen(fname).infos
             for stream_name in infos['stream_names']:
                 if stream_name.startswith(('StreamEvGen', 'StreamEVGEN')):
                     return True
                 if stream_name.startswith('Stream1'):
                     evtdata=collections.defaultdict(list)
                     for k,v in infos['eventdata_items']:
                         evtdata[k].append(v)
                     evtdata = dict(evtdata)
                     genevt = evtdata.get('McEventCollection', [])
                     if 'GEN_EVENT' in genevt:
                         return True
         except Exception, err:
             self.msg.info('caught:\n%s', err)
Exemple #24
0
 def is_evgen(self, input_files=None):
     if input_files is None:
         input_files = self.input_files
     if isinstance(input_files, basestring):
         input_files = [input_files]
     for fname in input_files[:self._nfiles]:
         import PyUtils.AthFile as af
         try:
             infos = af.fopen(fname).infos
             for stream_name in infos['stream_names']:
                 if stream_name.startswith(('StreamEvGen', 'StreamEVGEN')):
                     return True
                 if stream_name.startswith('Stream1'):
                     evtdata = collections.defaultdict(list)
                     for k, v in infos['eventdata_items']:
                         evtdata[k].append(v)
                     evtdata = dict(evtdata)
                     genevt = evtdata.get('McEventCollection', [])
                     if 'GEN_EVENT' in genevt:
                         return True
         except Exception, err:
             self.msg.info('caught:\n%s', err)
Exemple #25
0
def _read_file_impl(cfg, hints):
    cfg.disable_extra_steps()
    cfg.disable_read_steps()
    cfg.disable_write_steps()

    fname = cfg.input_files[0]
    import PyUtils.AthFile as af
    f = af.fopen(fname)

    if f.infos['file_type'] == 'pool':
        if cfg.is_evgen():
            pass

        elif cfg.is_rdo():
            cfg.rec['readRDO'] = True
            pass

        elif cfg.is_esd():
            cfg.rec['readESD'] = True
            pass

        elif cfg.is_aod():
            cfg.rec['readAOD'] = True
            pass

        elif cfg.is_tag():
            cfg.rec['readTAG'] = True
            pass
        
        pass
    
    elif f.infos['file_type'] == 'bs':
        cfg.rec['readRDO'] = True
        pass
    
    else:
        raise RuntimeError('unknown AthFile.file_type [%s]' % (f.infos['file_type'],))
    
    return cfg
Exemple #26
0
    def getDSID(self, runArgs):
        """
            Finds the DSID of the dataset being used. 
       
            Note: It must be specified as a command line option for running on the grid
            For local running it is possible to configure it from the container name

            """
        #try getting the DSID from the RunNumber variable in the file
        import PyUtils.AthFile as af
        f = af.fopen(self.fList[0])
        if len(f.run_numbers) > 0:
            self.curr_DSID = f.run_numbers[0]
        else:
            import re
            try:
                #try to get the DSID from runArgs
                self.curr_DSID = runArgs.RunNumber
            except:
                #if cannot access runargs parse input file name for DSID
                if len(self.fList) != 0:
                    files = self.fList
                    firstFile = files[0].split(".")

                    for index, x in enumerate(firstFile):
                        if re.search('mc[1234567890]{2}', x) is not None:
                            self.curr_DSID = firstFile[index + 1]
                    try:
                        int(self.curr_DSID)
                    except:
                        self.hforLog.error(
                            "Could not find DSID from filename. The Hfor tool will not be correctly configured! Have you obeyed the naming convention?"
                        )
                        self.curr_DSID = 0

                else:
                    self.hforLog.error(
                        "No DSID found. Is the naming convention correct?")
Exemple #27
0
def GetCurrentStreamName(msg, athFile=None):
    """ Helper to decide where to get the input stream name from."""
    # First, try to get the info from the RecFlags
    try:
        from RecExConfig.RecFlags import rec
        msg.debug("Got the stream name from the RecFlags: %s" %
                  rec.mergingStreamName())
        streamName = rec.mergingStreamName()
        if streamName == "": streamName = "unknownStream"
        return streamName
    except ImportError:
        msg.info(
            "Couldn't get input stream name from the RecFlags... trying AthFile directly."
        )
        pass
    # Import the reading of in-file metadata
    if athFile:
        return GetInputStreamNameFromMetaDataItemList(
            athFile.fileinfos["metadata_items"])
    from PyUtils import AthFile
    af = AthFile.fopen(svcMgr.EventSelector.InputCollections[0])
    return GetInputStreamNameFromMetaDataItemList(
        af.fileinfos["metadata_items"])
Exemple #28
0
  421156,
  421157,
  421158,
  700051,
  700052,
  700053,
  700054,
  ]

import PyUtils.AthFile as af
from AthenaCommon.AthenaCommonFlags import athenaCommonFlags

# Peek at the file -- this depends on what kind of file we have
from RecExConfig.ObjKeyStore import objKeyStore
if objKeyStore.isInInput( "McEventCollection", "GEN_EVENT" ):
    f = af.fopen(athenaCommonFlags.FilesInput()[0])
elif objKeyStore.isInInput( "McEventCollection", "TruthEvent"):
    f = af.fopen(athenaCommonFlags.FilesInput()[0])        
else:
    f = af.fopen(athenaCommonFlags.PoolAODInput()[0])
if len(f.mc_channel_number) > 0:
  if(int(f.mc_channel_number[0]) in DSIDList):
    from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__HadronOriginClassifier
    DFCommonhadronorigintool = DerivationFramework__HadronOriginClassifier(name="DFCommonHadronOriginClassifier",DSID=int(f.mc_channel_number[0]))
    ToolSvc += DFCommonhadronorigintool
    from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__HadronOriginDecorator
    DFCommonhadronorigindecorator = DerivationFramework__HadronOriginDecorator(name = "DFCommonHadronOriginDecorator")
    DFCommonhadronorigindecorator.ToolName = DFCommonhadronorigintool
    ToolSvc += DFCommonhadronorigindecorator
    DerivationFrameworkJob += CfgMgr.DerivationFramework__CommonAugmentation("HFHadronsCommonKernel",
                                                                                 AugmentationTools = [DFCommonhadronorigindecorator]
Exemple #29
0
def buildDict(inputtype, inputfile):
    """Build a dictionary of KEY:VALUE pairs"""
    import re
    import PyUtils.AthFile as af
    try:
        f = af.fopen(inputfile)
    except AssertionError:
        logDigitizationReadMetadata.error("Failed to open input file: %s",
                                          inputfile)
        return dict(), False
    #check evt_type of input file
    if 'evt_type' in f.infos.keys():
        if not re.match(str(f.infos['evt_type'][0]), 'IS_SIMULATION'):
            logDigitizationReadMetadata.error(
                'This input file has incorrect evt_type: %s',
                str(f.infos['evt_type']))
            logDigitizationReadMetadata.info(
                'Please make sure you have set input file metadata correctly.')
            logDigitizationReadMetadata.info(
                'Consider using the job transforms for earlier steps if you aren\'t already.'
            )
            #then exit gracefully
            raise SystemExit(
                "Input file evt_type is incorrect, please check your g4sim and evgen jobs."
            )
    else:
        logDigitizationReadMetadata.warning(
            'Could not find \'evt_type\' key in athfile.infos. Unable to that check evt_type is correct.'
        )

    ## Not part of building the metadata dictionary, but this is the
    ## most convenient time to access this information.
    doSpecialConfiguration(f)

    metadatadict = dict()
    #safety checks before trying to access metadata
    if 'metadata' in f.infos.keys():
        ##if '/TagInfo' in f.infos['metadata'].keys():
        ##    taginfometadata=f.infos['metadata']['/TagInfo']
        ##    assert taginfometadata['beam_energy'] is not None
        ##    print "beamEnergy=%s"%taginfometadata['beam_energy']
        if '/Simulation/Parameters' in f.infos['metadata'].keys():
            metadatadict = f.infos['metadata']['/Simulation/Parameters']
            if isinstance(metadatadict, list):
                logDigitizationReadMetadata.warning(
                    "%s inputfile: %s contained %s sets of Simulation Metadata. Using the final set in the list.",
                    inputtype, inputfile, len(metadatadict))
                metadatadict = metadatadict[-1]
        ##Get IOVDbGlobalTag
        if 'IOVDbGlobalTag' not in metadatadict.keys():
            try:
                assert f.fileinfos['metadata']['/TagInfo'][
                    'IOVDbGlobalTag'] is not None
                metadatadict['IOVDbGlobalTag'] = f.fileinfos['metadata'][
                    '/TagInfo']['IOVDbGlobalTag']
            except:
                try:
                    assert f.fileinfos['conditions_tag'] is not None
                    metadatadict['IOVDbGlobalTag'] = f.fileinfos[
                        'conditions_tag']
                except:
                    logDigitizationReadMetadata.warning(
                        "Failed to find IOVDbGlobalTag.")
                    return metadatadict, False

    ###Hack to get HGTD in the list of simulated detectors, apparently not in the MetaData of the HITS file
    logDigitizationReadMetadata.info("Jose : SimulatedDetectors = %s",
                                     metadatadict['SimulatedDetectors'])
    logDigitizationReadMetadata.info("Jose : eventdata_items = %s",
                                     f.infos['eventdata_items'])
    if 'eventdata_items' in f.infos.keys():
        for entry in f.infos['eventdata_items']:
            if entry[1] == 'HGTDHits':
                if 'HGTD' not in metadatadict['SimulatedDetectors']:
                    metadatadict['SimulatedDetectors'] += ['HGTD']

    logDigitizationReadMetadata.info(
        "Jose : SimulatedDetectors after hack = %s",
        metadatadict['SimulatedDetectors'])

    Nkvp = len(metadatadict)
    ## Dictionary should not be empty
    if Nkvp == 0:
        logDigitizationReadMetadata.error(
            "Found %s KEY:VALUE pairs in %s Simulation MetaData.", Nkvp,
            inputtype)
        return metadatadict, False
    else:
        ##Patch for older hit files
        if 'hitFileMagicNumber' not in metadatadict.keys():
            metadatadict['hitFileMagicNumber'] = 0
            logDigitizationReadMetadata.debug(
                "hitFileMagicNumber key missing from %s Simulation MetaData Dictionary. Adding dummy entry.",
                inputtype)
        if 'SimulatedDetectors' not in metadatadict.keys():
            if 'eventdata_items' in f.infos.keys():
                metadatadict[
                    'SimulatedDetectors'] = hitColls2SimulatedDetectors(
                        f.infos['eventdata_items'])
            else:
                metadatadict['SimulatedDetectors'] = [
                    'pixel', 'SCT', 'TRT', 'BCM', 'HGTD', 'Lucid', 'LAr',
                    'Tile', 'MDT', 'CSC', 'TGC', 'RPC', 'Truth'
                ]
        ## Check whether we should use the old names for the Tile CaloCalibrationHit containers
        if 'eventdata_items' in f.infos.keys():
            checkTileCalibrationHitFormat(f.infos['eventdata_items'])
        else:
            digitizationFlags.experimentalDigi += ['OldTileCalibHitContainers']
        ##End of Patch for older hit files
        logDigitizationReadMetadata.debug(
            "%s Simulation MetaData Dictionary Successfully Created.",
            inputtype)
        logDigitizationReadMetadata.debug(
            "Found %s KEY:VALUE pairs in %s Simulation MetaData.", Nkvp,
            inputtype)
        logDigitizationReadMetadata.debug("KEYS FOUND: %s",
                                          metadatadict.keys())
        return metadatadict, True
print "=== create an EVGEN file... [ok]"


app = accp.AthenaApp(cmdlineargs=['--nprocs=-1'])
app << """
EVTMAX=1000
INPUT=['%(input_file_name)s']
OUTPUT='%(output_file_name)s'
""" % globals()

app.include('McParticleTests/iotest_ReadGenEvent_jobOptions.py')

mp_logfile = open('mp.readback.logfile.txt', 'w+')
print "=== read the EVGEN file back (with athena-mp)... (logfile=%s)" % (mp_logfile.name,)
rc = app.run(stdout=mp_logfile)
if rc:
    raise RuntimeError(rc)
print "=== read the EVGEN file back (with athena-mp)... [ok]"

print ":"*80
print "::: results:"
input_file = af.fopen(input_file_name).infos
print "input_file: [%s]\n nentries: %s" % (input_file['file_name'],
                                           input_file['nentries'],)

output_file = af.fopen('reaccessed.mc.event.pool').infos
print "output_file: [%s]\n nentries: %s" % (output_file['file_name'],
                                            output_file['nentries'],)
print "::: bye."
print ":"*80
Exemple #31
0
    if type(BSRDOInput) == type(''):
        athenaCommonFlags.BSRDOInput = [BSRDOInput]
    else:
        athenaCommonFlags.BSRDOInput = BSRDOInput
elif PoolRDOInput != None:
    globalflags.InputFormat = 'pool'
    if type(PoolRDOInput) == type(''):
        athenaCommonFlags.PoolRDOInput = [PoolRDOInput]
    else:
        athenaCommonFlags.PoolRDOInput = PoolRDOInput

# Conditions and geometry tag
if globalflags.InputFormat.is_pool() and (setDetDescr == None
                                          or setGlobalTag == None):
    import PyUtils.AthFile as athFile
    af = athFile.fopen(athenaCommonFlags.PoolRDOInput()[0])
    if setDetDescr == None:
        setDetDescr = af.fileinfos.get('geometry', None)
        log.info(
            'Geometry tag not specified. Setting from file meta data: setDetDescr="%s"'
            % setDetDescr)
    if setGlobalTag == None:
        setGlobalTag = af.fileinfos.get('conditions_tag', None)
        log.info(
            'Global conditions tag not specified. Setting from file meta data: setGlobalTag="%s"'
            % setGlobalTag)

if setDetDescr == None:
    raise RuntimeError(
        'No geometry tag specified. Please use "setDetDescr" to set it.')
if setGlobalTag == None:
import glob
from AthenaCommon.AthenaCommonFlags import jobproperties as jp
jp.AthenaCommonFlags.EvtMax.set_Value_and_Lock(-1)

jp.AthenaCommonFlags.FilesInput = [
  "/afs/cern.ch/work/v/vdao//xAODs/data15_13TeV.00276330.physics_Main.merge.DAOD_FTAG1.f620_m1480_p2411_tid06320446_00/DAOD_FTAG1.06320446._000001.pool.root.1",
  ]
svcMgr += CfgMgr.THistSvc()
for jet in JetCollections:
  shortJetName=jet.replace("AntiKt","Akt").replace("TopoJets","To").replace("TrackJets","Tr")
  svcMgr.THistSvc.Output += [ shortJetName+" DATAFILE='flav_"+shortJetName+".root' OPT='RECREATE'"]
#svcMgr.THistSvc.Output += ["BTAGSTREAM DATAFILE='flavntuple.root' OPT='RECREATE'"]

from PyUtils import AthFile
##print jp.AthenaCommonFlags.FilesInput
af = AthFile.fopen( "/afs/cern.ch/work/v/vdao//xAODs/data15_13TeV.00276330.physics_Main.merge.DAOD_FTAG1.f620_m1480_p2411_tid06320446_00/DAOD_FTAG1.06320446._000001.pool.root.1",) #opens the first file from the InputCollections list
af.fileinfos #this is a dict of dicts, take a look at what's available! Below are some examples:

#print af.fileinfos
derivation=af.fileinfos['EventStreamInfo']
if "StreamDAOD_" in derivation: derivation=derivation.split("DAOD_")[1]
print "DERIVATION IS: "+derivation
#isMC = 'IS_SIMULATION' in af.fileinfos['evt_type']
#beam_energy = af.fileinfos['beam_energy']
#conditions_tag = af.fileinfos['conditions_tag'] #useful for figuring out which mc production this is
#isFullSim = af.fileinfos['metadata']['/Simulation/Parameters']['SimulationFlavour']=='default' #full sim or atlfast
exit(-1)

##########################################################################################################################################################
##########################################################################################################################################################
### you should normally not need to touch this part
Exemple #33
0
    EtaSGEntry="DFCommonPhotons_eta",
    PhiSGEntry="DFCommonPhotons_phi",
    EtSGEntry="DFCommonPhotons_et"
    #ESGEntry = "DFCommonPhotons_e"
    #PhotonContainer = "Photons"
)
ToolSvc += DFCommonPhotonsDirection

#====================================================================
# SHOWER SHAPE FUDGING IN MC
# (PRESELECTION=16: FUDGE FACTORS GEO21->DATA12)
#====================================================================

from PyUtils import AthFile

af = AthFile.fopen(svcMgr.EventSelector.InputCollections[0]
                   )  #opens the first file from the InputCollections list
af.fileinfos  #this is a dict of dicts, take a look at what's available! Below are some examples:

isMC = 'IS_SIMULATION' in af.fileinfos['evt_type']
beam_energy = af.fileinfos['beam_energy']
conditions_tag = af.fileinfos[
    'conditions_tag']  #useful for figuring out which mc production this is
isFullSim = False
if isMC:
    simulationFlavour = af.fileinfos['metadata']['/Simulation/Parameters'][
        'SimulationFlavour']
    isFullSim = simulationFlavour in ('default', 'MC12G4', 'FullG4')

print "EGammaCommon: isMC = ", isMC
if isMC:
    print "EGammaCommon: isFullSim = ", isFullSim
Exemple #34
0
from AthenaCommon.DetFlags import DetFlags
DetFlags.all_setOff()
DetFlags.pixel_setOn()
DetFlags.Print()

#------------------------------------------
# GlobalFlags
#------------------------------------------
from AthenaCommon.GlobalFlags import globalflags
globalflags.DetDescrVersion = "ATLAS-R2-2016-01-00-01"  # [SGS] how to know exact version (e.g. AMI) ?
globalflags.DetGeo = 'atlas'
globalflags.DataSource = 'data'

# set InputFormat
import PyUtils.AthFile as AthFile
inputfile = AthFile.fopen(collection[0])
if inputfile.fileinfos['file_type'] == 'bs':
    globalflags.InputFormat = 'bytestream'
elif inputfile.fileinfos['file_type'] == 'pool':
    globalflags.InputFormat = 'pool'
else:
    raise RuntimeError, "Unable to read input file (format not supported)"

from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
athenaCommonFlags.FilesInput = collection

# show debug info [SGS]
print '## globalflags.InputFormat = %s' % globalflags.InputFormat
print '## printing inputfile.fileinfos...'
for k, v in inputfile.fileinfos.iteritems():
    print '  * %s = %s' % (k, v)
from AthenaCommon.AthenaCommonFlags import jobproperties as jp
jp.AthenaCommonFlags.EvtMax.set_Value_and_Lock( vars().get('EVTMAX', -1) )

jp.AthenaCommonFlags.FilesInput = [ 
"/afs/cern.ch/user/g/ggonella/ggonella/public/ForValerio/mc15_13TeV.410000.PowhegPythiaEvtGen_P2012_ttbar_hdamp172p5_nonallhad.merge.AOD.e3698_s2608_s2183_r6630_r6264_tid05419191_00/AOD.05419191._000184.pool.root.1"
  ##"/afs/cern.ch/user/v/vdao/mc15_8TeV.110401.PowhegPythia_P2012_ttbar_nonallhad.recon.AOD.e3099_s2578_r7135_tid06628604_00/AOD.06628604._000221.pool.root.1",
  ##"/afs/cern.ch/user/v/vdao/valid1.110401.PowhegPythia_P2012_ttbar_nonallhad.recon.AOD.e3099_s2578_r7058_tid06432679_00/AOD.06432679._000066.pool.root.1",
  ##"/afs/cern.ch/work/v/vdao//xAODs/dataNtuple/Peter/mc15_13TeV.410000.PowhegPythiaEvtGen_P2012_ttbar_hdamp172p5_nonallhad.merge.AOD.e3698_a766_a767_r6264_tid05443510_00/AOD.05443510._000453.pool.root.1",
  ##"/afs/cern.ch/work/v/vdao//xAODs/dataNtuple/Peter/mc15_13TeV.410004.PowhegHerwigppEvtGen_UEEE5_ttbar_hdamp172p5_nonallhad.merge.AOD.e3836_a766_a767_r6264_tid05457978_00/AOD.05457978._000222.pool.root.1",
  ##"/afs/cern.ch/work/v/vdao//xAODs/dataNtuple/Peter/mc15_13TeV.410006.PowhegPythia8EvtGen_A14_ttbar_hdamp172p5_nonallhad.merge.AOD.e3836_a766_a767_r6264_tid05457982_00/AOD.05457982._000217.pool.root.1",
  ##"/afs/cern.ch/work/v/vdao//xAODs/dataNtuple/Peter/mc15_13TeV.410022.Sherpa_CT10_ttbar_SingleLeptonP_MEPS_NLO.merge.AOD.e3959_a766_a780_r6264_tid05960719_00/AOD.05960719._000038.pool.root.1"
  ]

##svcMgr.EventSelector.InputCollections = jp.AthenaCommonFlags.FilesInput()
from PyUtils import AthFile
af = AthFile.fopen( jp.AthenaCommonFlags.FilesInput()[0] )
##svcMgr.EventSelector.InputCollections[0] )

### AF2 or FS
isAF2=False
if af.fileinfos.has_key("evt_type"):
  eventTypeList = af.fileinfos["evt_type"]
  if eventTypeList.__contains__("IS_SIMULATION") :
    simType = af.fileinfos['metadata']['/Simulation/Parameters']['SimulationFlavour']
    if simType  == 'default' :
      print "VALERIO SAYS: THIS IS FS"
    elif simType  == 'atlfast' : 
      print "VALERIO SAYS: THIS IS AF2"
      isAF2=True

evtPrintoutInterval = vars().get('EVTPRINT', 5000)
                elif len(item) == 2:
                    runnbr, evtnbr = long(item[0]), long(item[1])
                else:
                    raise RuntimeError(
                        'item [%s] has invalid arity (%s)' %
                        (item, len(item))
                        )
            else:
                runnbr, evtnbr = None, long(item)
            selection.append((runnbr, evtnbr))

    # put back the massaged selection into our workspace
    args.selection = selection[:]
    
    import PyUtils.AthFile as af
    fi = af.fopen(args.files[0]).infos
    af.save_cache()
    
    if fi['file_type'] == 'bs':
        # optimization: run directly 'AtlCopyBSEvent.exe
        import subprocess
        cmd = ' '.join([
            'AtlCopyBSEvent.exe',
            '-e %(evt-list)s',
            '%(run-list)s',
            '--out %(output)s',
            '%(files)s',
            ])
        evt_list = [str(i) for _,i in args.selection]
        run_list = [str(i) for i,_ in args.selection if not i is None]
        cmd = cmd % {
#Skeleton joboption for a simple analysis job

# theApp.EvtMax=10                                         #says how many events to run over. Set to -1 for all events

import AthenaPoolCnvSvc.ReadAthenaPool                   #sets up reading of POOL files (e.g. xAODs)
# svcMgr.EventSelector.InputCollections=["/data/atlas/atlasdata3/burr/xAOD/testFiles/mc15_13TeV.410000.PowhegPythiaEvtGen_P2012_ttbar_hdamp172p5_nonallhad.merge.AOD.e3698_s2608_s2183_r7380_r6282_tid07626244_00/AOD.07626244._000001.pool.root.1"]   #insert your list of input files here
svcMgr.EventSelector.InputCollections=["/data/atlas/atlasdata3/burr/xAOD/testFiles/mc15cttbar/mc15_13TeV.410000.PowhegPythiaEvtGen_P2012_ttbar_hdamp172p5_nonallhad.merge.AOD.e3698_s2608_s2183_r7725_r7676/AOD.07915933._001658.pool.root.1"]   #insert your list of input files here

from PyUtils import AthFile
af = AthFile.fopen(svcMgr.EventSelector.InputCollections[0])
af.fileinfos    #this is a dict of dicts
isMC = 'IS_SIMULATION' in af.fileinfos['evt_type']
beam_energy = af.fileinfos['beam_energy']
conditions_tag = af.fileinfos['conditions_tag']
if isMC:
    isFullSim = af.fileinfos['metadata']['/Simulation/Parameters']['SimulationFlavour']=='default'

# isData     = 0:  data in ApplySUSYTools

#            = 1:  MC in ApplySUSYTools

# dataSource = 0:  data in ST__SUSYObjDef_xAOD

# dataSource = 1:  fullsim in ST__SUSYObjDef_xAOD

# dataSource = 2:  AtlFastII in ST__SUSYObjDef_xAOD


if isMC:  
    isData = 0
    if isFullSim: 
Exemple #38
0
DataInputCollections = runArgs.pileupBSFile

import PyUtils.AthFile as af


def getHITSFile(runArgs):
    if hasattr(runArgs, "inputHITSFile"):
        return runArgs.inputHITSFile[0]
    elif hasattr(runArgs, "inputHitsFile"):
        return runArgs.inputHitsFile[0]
    else:
        raise SystemExit("No HITS file in runArgs!!")


try:
    f = af.fopen(getHITSFile(runArgs))
except AssertionError:
    skeletonLog.error("Failed to open input file: %s", getHITSFile(runArgs))
metadatadict = dict()
if 'metadata' in f.infos.keys():
    if '/Simulation/Parameters' in f.infos['metadata'].keys():
        metadatadict = f.infos['metadata']['/Simulation/Parameters']
        if isinstance(metadatadict, list):
            skeletonLog.warning(
                "%s inputfile: %s contained %s sets of Simulation Metadata. Using the final set in the list.",
                inputtype, inputfile, len(metadatadict))
            metadatadict = metadatadict[-1]
        if 'RunNumber' in metadatadict.keys():
            year = metadatadict['RunNumber'] % 100
            print "Found Year = %s", year
            from RecExConfig.RecFlags import rec
def configurePRWtool(offset=0):
    from AthenaCommon.AppMgr import ServiceMgr
    from PyUtils import AthFile
    from ClusterSubmission.Utils import ResolvePath, ClearFromDuplicates
    recoLog = logging.getLogger('XAMPP getPrwConfig')

    use1516Data = isData()
    use17Data = isData()
    use18Data = isData()

    ### The actual mu config file is needed to activate the actual mu reweighting recommended for mc16d & mc16e
    ### https://indico.cern.ch/event/712774/contributions/2928042/attachments/1614637/2565496/prw_mc16d.pdf
    prwConfig_mc16a = []
    prwConfig_mc16d = getGRL(17, flavour='actualMu')
    prwConfig_mc16e = getGRL(18, flavour='actualMu')
    run_channel = [] if isData() else [(getRunNumbersMC(), getMCChannelNumber() + offset)]
    athArgs = getAthenaArgs()
    if not isData() and (len(ServiceMgr.EventSelector.InputCollections) > 1 and athArgs.parseFilesForPRW):
        recoLog.info("Run a local job. Try to find foreach job the prw-config file")
        for i, in_file in enumerate(ServiceMgr.EventSelector.InputCollections):
            recoLog.info("Look up the channel number for %s" % (in_file))
            ### That file is used to read the meta-data we do not need to open it twice
            if i == 0: continue
            af = AthFile.fopen(in_file)
            afII = not isData() and 'tag_info' in af.fileinfos and len(
                [key for key in af.fileinfos['tag_info'].iterkeys() if 'AtlfastII' in key or 'Fast' in key]) > 0
            mc_runNumber = af.fileinfos["run_number"][0] if len(af.fileinfos["run_number"]) > 0 else -1
            mc_channel = af.fileinfos["mc_channel_number"][0] if not isData() and len(af.fileinfos["mc_channel_number"]) > 0 else -1
            ## If the user mixes AFII with fullsim calibration
            ## the resuls are likely to mismatch. We must prevent this and kill
            ## the job
            if afII != isAF2():
                recoLog.error("You are mixing AFII with Fullsim files. Scale-factors and jet calibration are largely affected. Please fix")
                exit(1)
            run_channel += [(mc_runNumber, mc_channel + offset)]
    ## Find the central repo
    for period_num, mc_channel in run_channel:
        if period_num == 284500:
            config_file = ResolvePath("dev/PileupReweighting/share/DSID{dsid_short}xxx/pileup_mc16a_dsid{dsid}_{sim}.root".format(
                dsid_short=str(mc_channel)[0:3], dsid=mc_channel, sim="AFII" if isAF2() else "FS"))
            use1516Data = True
            if not config_file: continue
            prwConfig_mc16a += [config_file]
        elif period_num == 300000:
            config_file = ResolvePath("dev/PileupReweighting/share/DSID{dsid_short}xxx/pileup_mc16d_dsid{dsid}_{sim}.root".format(
                dsid_short=str(mc_channel)[0:3], dsid=mc_channel, sim="AFII" if isAF2() else "FS"))
            use17Data = True
            if not config_file: continue
            prwConfig_mc16d += [config_file]
        elif period_num == 310000:
            config_file = ResolvePath("dev/PileupReweighting/share/DSID{dsid_short}xxx/pileup_mc16e_dsid{dsid}_{sim}.root".format(
                dsid_short=str(mc_channel)[0:3], dsid=mc_channel, sim="AFII" if isAF2() else "FS"))
            use18Data = True
            if not config_file: continue
            prwConfig_mc16e += [config_file]
        else:
            recoLog.warning("Nothing has been found for the sample %d in prw period %d" % (mc_channel, period_num))
            continue

    ConfigFiles = []
    if use1516Data: ConfigFiles += prwConfig_mc16a
    if use17Data: ConfigFiles += prwConfig_mc16d
    if use18Data: ConfigFiles += prwConfig_mc16e
    return sorted(ClearFromDuplicates(ConfigFiles)), getLumiCalcConfig(use1516Data=use1516Data, use17Data=use17Data, use18Data=use18Data)
### Define input xAOD and output ntuple file name
import glob
from AthenaCommon.AthenaCommonFlags import jobproperties as jp
#jp.AthenaCommonFlags.SkipEvents.set_Value_and_Lock( MYSTART )
#jp.AthenaCommonFlags.EvtMax.set_Value_and_Lock( vars().get('EVTMAX', 2500) )
evtPrintoutInterval = vars().get('EVTPRINT', 5000)
svcMgr += CfgMgr.AthenaEventLoopMgr(EventPrintoutInterval=evtPrintoutInterval)

jp.AthenaCommonFlags.FilesInput = [
    ###PLEASE USE a TTBAR xAOD file
    # Please give a mc16 sample file for running with rel21
    '/eos/user/l/losterzo/files/BJetTrigger/mc16_13TeV.410000.PowhegPythiaEvtGen_P2012_ttbar_hdamp172p5_nonallhad.merge.AOD.e3698_s2997_r8903_r8906/AOD.10226638._000255.pool.root.1'
]
from PyUtils import AthFile

af = AthFile.fopen(jp.AthenaCommonFlags.FilesInput()[0])
evtPrintoutInterval = vars().get('EVTPRINT', 5000)
svcMgr += CfgMgr.AthenaEventLoopMgr(EventPrintoutInterval=evtPrintoutInterval)
svcMgr += CfgMgr.THistSvc()
svcMgr.THistSvc.Output += ["TriggerJets DATAFILE='MYFLAVFILE' OPT='RECREATE'"]
svcMgr.THistSvc.Output += ["RefFile DATAFILE='MYREFFILE' OPT='RECREATE'"]

from AthenaCommon.DetFlags import DetFlags

DetFlags.BField_setOn()
DetFlags.ID_setOn()
DetFlags.Calo_setOff()
DetFlags.Muon_setOff()

from RecExConfig.RecFlags import rec
app << """
EVTMAX=1000 #-1
INPUT=['%(input_file_name)s']
OUTPUT='%(output_file_name)s'
""" % globals()

app.include('AthExThinning/ReadNonThinnedData_jobOptions.py')

mp_logfile = open('mp.elephantino.readback.logfile.txt', 'w+')
print "=== read the elephantino file back (with athena-mp)... (logfile=%s)" % (mp_logfile.name,)
rc = app.run(stdout=mp_logfile)
if rc:
    raise RuntimeError(rc)
print "=== read the elephantino file back (with athena-mp)... [ok]"

input_file  = af.fopen(input_file_name).infos
output_file = af.fopen(output_file_name).infos

print ":"*80
print "::: results:"

print """\
input_file: [%s]
  nentries: %s""" % (
  input_file['file_name'],
  input_file['nentries'],
  )

print """\
output_file: [%s]
   nentries: %s""" % (
Exemple #42
0
    f.writelines('\n')
    f.writelines(str(BLINE2))
    f.writelines('\n')
    f.close()

    del ALINE1
    del ALINE2
    del BLINE1
    del BLINE2


doG4SimConfig = True
from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
import PyUtils.AthFile as af
try:
    f = af.fopen(athenaCommonFlags.FilesInput()[0])

    if "StreamHITS" in f.infos["stream_names"]:
        from Digitization.DigitizationFlags import digitizationFlags
        simdict = digitizationFlags.specialConfiguration.get_Value()
        doG4SimConfig = False
    else:
        from G4AtlasApps.SimFlags import simFlags
        simdict = simFlags.specialConfiguration.get_Value()
except:
    from G4AtlasApps.SimFlags import simFlags
    simdict = simFlags.specialConfiguration.get_Value()

assert "MASS" in simdict
assert "CHARGE" in simdict
load_files_for_qball_scenario(simdict["MASS"], simdict["CHARGE"])
Exemple #43
0
def setup(ToolSvc):

    augmentationTools = []

    #==============================================================================
    # Set up the MCTruthClassifier
    #==============================================================================
    from MCTruthClassifier.MCTruthClassifierConf import MCTruthClassifier
    TOPQClassifier = MCTruthClassifier(name="TOPQClassifier",
                                       ParticleCaloExtensionTool="")
    ToolSvc += TOPQClassifier
    print "TOPQClassifier: ", TOPQClassifier

    #===============================================================================
    # Add Decoration Tool to Dress the Main Truth Collection with the Classification
    #===============================================================================
    # PhysicsAnalysis/DerivationFramework/DerivationFrameworkMCTruth/trunk/src/TruthClassificationDecorator.cxx
    from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__TruthClassificationDecorator
    TOPQClassificationDecorator = DerivationFramework__TruthClassificationDecorator(
        name="TOPQClassificationDecorator",
        ParticlesKey="TruthParticles",
        MCTruthClassifier=TOPQClassifier)
    ToolSvc += TOPQClassificationDecorator
    augmentationTools.append(TOPQClassificationDecorator)
    print "TOPQClassificationDecorator: ", TOPQClassificationDecorator

    #==============================================================================
    # Schedule the tool for adding new truth collection
    #==============================================================================
    # PhysicsAnalysis/DerivationFramework/DerivationFrameworkMCTruth/trunk/src/TruthCollectionMaker.cxx
    # PhysicsAnalysis/DerivationFramework/DerivationFrameworkMCTruth/trunk/src/TruthCollectionMakerTau.cxx
    from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__TruthCollectionMaker
    from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__TruthCollectionMakerTau

    #===========
    # TruthMuons
    #===========
    TOPQMuonTool = DerivationFramework__TruthCollectionMaker(
        name="TOPQMuonTool",
        NewCollectionName="TruthMuons",
        ParticleSelectionString=
        "(abs(TruthParticles.pdgId) == 13) && (TruthParticles.status == 1) && (TruthParticles.barcode < 200000) "
    )
    ToolSvc += TOPQMuonTool
    print "TOPQMuonTool: ", TOPQMuonTool
    augmentationTools.append(TOPQMuonTool)

    #===============
    # TruthElectrons
    #===============
    TOPQElectronTool = DerivationFramework__TruthCollectionMaker(
        name="TOPQElectronTool",
        NewCollectionName="TruthElectrons",
        ParticleSelectionString=
        "(abs(TruthParticles.pdgId) == 11) && (TruthParticles.status == 1) && (TruthParticles.barcode < 200000)"
    )
    ToolSvc += TOPQElectronTool
    augmentationTools.append(TOPQElectronTool)
    print "TOPQElectronTool: ", TOPQElectronTool

    #=============
    # TruthPhotons
    #=============
    TOPQPhotonTool = DerivationFramework__TruthCollectionMaker(
        name="TOPQPhotonTool",
        NewCollectionName="TruthPhotons",
        ParticleSelectionString=
        "(abs(TruthParticles.pdgId) == 22) && (TruthParticles.status == 1) && (TruthParticles.barcode < 200000)"
    )
    ToolSvc += TOPQPhotonTool
    augmentationTools.append(TOPQPhotonTool)
    print "TOPQPhotonTool: ", TOPQPhotonTool

    #===============
    # TruthNeutrinos
    #===============
    TOPQneutrinoexpression = "(abs(TruthParticles.pdgId) == 12 || abs(TruthParticles.pdgId) == 14 || abs(TruthParticles.pdgId) == 16) && (TruthParticles.status == 1) && (TruthParticles.barcode < 200000)"
    TOPQNeutrinoTool = DerivationFramework__TruthCollectionMaker(
        name="TOPQNeutrinoTool",
        NewCollectionName="TruthNeutrinos",
        ParticleSelectionString=TOPQneutrinoexpression)
    ToolSvc += TOPQNeutrinoTool
    augmentationTools.append(TOPQNeutrinoTool)
    print "TOPQNeutrinoTool: ", TOPQNeutrinoTool

    #==========
    # TruthTaus
    #==========
    TOPQTauTool = DerivationFramework__TruthCollectionMakerTau(
        name="TOPQTauTool",
        NewCollectionName="TruthTaus",
        MCTruthClassifier=TOPQClassifier,
        RunClassifier=True)
    ToolSvc += TOPQTauTool
    augmentationTools.append(TOPQTauTool)
    print "TOPQTauTool: ", TOPQTauTool

    #==============================================================================
    # TRUTH DRESSING
    #==============================================================================
    # PhysicsAnalysis/DerivationFramework/DerivationFrameworkMCTruth/trunk/src/TruthDressingTool.cxx
    from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__TruthDressingTool
    TOPQMuonDressingTool = DerivationFramework__TruthDressingTool(
        name="TOPQMuonDressingTool",
        dressParticlesKey="TruthMuons",
        usePhotonsFromHadrons=False,
        dressingConeSize=0.1,
        particleIDsToDress=[13],
        useAntiKt=True)
    ToolSvc += TOPQMuonDressingTool
    augmentationTools.append(TOPQMuonDressingTool)
    print "TOPQMuonDressingTool: ", TOPQMuonDressingTool

    TOPQElectronDressingTool = DerivationFramework__TruthDressingTool(
        name="TOPQElectronDressingTool",
        dressParticlesKey="TruthElectrons",
        usePhotonsFromHadrons=False,
        dressingConeSize=0.1,
        particleIDsToDress=[11],
        useAntiKt=True)
    ToolSvc += TOPQElectronDressingTool
    augmentationTools.append(TOPQElectronDressingTool)
    print "TOPQElectronDressingTool: ", TOPQElectronDressingTool

    #==============================================================================
    # TRUTH ISOLATION
    #==============================================================================
    # PhysicsAnalysis/DerivationFramework/DerivationFrameworkMCTruth/trunk/src/TruthIsolationTool.cxx
    from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__TruthIsolationTool
    TOPQElectronIsolationTool1 = DerivationFramework__TruthIsolationTool(
        name="TOPQElectronIsolationTool1",
        isoParticlesKey="TruthElectrons",
        allParticlesKey="TruthParticles",
        particleIDsToCalculate=[11],
        IsolationConeSizes=[0.2],
        IsolationVarNamePrefix='etcone',
        ChargedParticlesOnly=False)
    ToolSvc += TOPQElectronIsolationTool1
    augmentationTools.append(TOPQElectronIsolationTool1)
    print "TOPQElectronIsolationTool1: ", TOPQElectronIsolationTool1

    TOPQElectronIsolationTool2 = DerivationFramework__TruthIsolationTool(
        name="TOPQElectronIsolationTool2",
        isoParticlesKey="TruthElectrons",
        allParticlesKey="TruthParticles",
        particleIDsToCalculate=[11],
        IsolationConeSizes=[0.3],
        IsolationVarNamePrefix='ptcone',
        ChargedParticlesOnly=True)
    ToolSvc += TOPQElectronIsolationTool2
    augmentationTools.append(TOPQElectronIsolationTool2)
    print "TOPQElectronIsolationTool2: ", TOPQElectronIsolationTool2

    TOPQMuonIsolationTool1 = DerivationFramework__TruthIsolationTool(
        name="TOPQMuonIsolationTool1",
        isoParticlesKey="TruthMuons",
        allParticlesKey="TruthParticles",
        particleIDsToCalculate=[13],
        IsolationConeSizes=[0.2],
        IsolationVarNamePrefix='etcone',
        ChargedParticlesOnly=False)
    ToolSvc += TOPQMuonIsolationTool1
    augmentationTools.append(TOPQMuonIsolationTool1)
    print "TOPQMuonIsolationTool1: ", TOPQMuonIsolationTool1

    TOPQMuonIsolationTool2 = DerivationFramework__TruthIsolationTool(
        name="TOPQMuonIsolationTool2",
        isoParticlesKey="TruthMuons",
        allParticlesKey="TruthParticles",
        particleIDsToCalculate=[13],
        IsolationConeSizes=[0.3],
        IsolationVarNamePrefix='ptcone',
        ChargedParticlesOnly=True)
    ToolSvc += TOPQMuonIsolationTool2
    augmentationTools.append(TOPQMuonIsolationTool2)
    print "TOPQMuonIsolationTool2: ", TOPQMuonIsolationTool2

    TOPQPhotonIsolationTool1 = DerivationFramework__TruthIsolationTool(
        name="TOPQPhotonIsolationTool1",
        isoParticlesKey="TruthPhotons",
        allParticlesKey="TruthParticles",
        particleIDsToCalculate=[22],
        IsolationConeSizes=[0.2],
        IsolationVarNamePrefix='etcone',
        ChargedParticlesOnly=False)
    ToolSvc += TOPQPhotonIsolationTool1
    augmentationTools.append(TOPQPhotonIsolationTool1)
    print "TOPQPhotonIsolationTool1: ", TOPQPhotonIsolationTool1

    TOPQPhotonIsolationTool2 = DerivationFramework__TruthIsolationTool(
        name="TOPQPhotonIsolationTool2",
        isoParticlesKey="TruthPhotons",
        allParticlesKey="TruthParticles",
        particleIDsToCalculate=[22],
        IsolationConeSizes=[0.3],
        IsolationVarNamePrefix='ptcone',
        ChargedParticlesOnly=True)
    ToolSvc += TOPQPhotonIsolationTool2
    augmentationTools.append(TOPQPhotonIsolationTool2)
    print "TOPQPhotonIsolationTool2: ", TOPQPhotonIsolationTool2

    #==============================================================================
    # BACKGROUND ELECTRON DECORATION TYPE/ORIGIN
    #==============================================================================
    # PhysicsAnalysis/DerivationFramework/DerivationFrameworkEGamma/trunk/src/BkgElectronClassification.cxx
    from MCTruthClassifier.MCTruthClassifierBase import MCTruthClassifier as BkgElectronMCTruthClassifier
    from DerivationFrameworkEGamma.DerivationFrameworkEGammaConf import DerivationFramework__BkgElectronClassification
    BkgElectronClassificationTool = DerivationFramework__BkgElectronClassification(
        name="BkgElectronClassificationTool",
        MCTruthClassifierTool=BkgElectronMCTruthClassifier)
    ToolSvc += BkgElectronClassificationTool
    augmentationTools.append(BkgElectronClassificationTool)
    print "BkgElectronClassificationTool: ", BkgElectronClassificationTool

    #==============================================================================
    # BOOSTED TOP PAIR DECORATION
    #==============================================================================
    # /PhysicsAnalysis/DerivationFramework/DerivationFrameworkTop/trunk/src/BoostedHadTopAndTopPairFilterTool.cxx
    # PhysicsAnalysis/DerivationFramework/DerivationFrameworkTop/trunk/src/BoostedHadTopAndTopPairAugmentation.cxx
    #from DerivationFrameworkTop.DerivationFrameworkTopConf import DerivationFramework__BoostedHadTopAndTopPairFilterTool
    #TOPQboostedtopfiltertool = DerivationFramework__BoostedHadTopAndTopPairFilterTool(
    #                             name = "TOPQBoostedHadTopAndTopPairFilterTool",
    #                             tHadPtCut  = 2000000.0, #cut on hadronic tops in MeV
    #                             tPairPtCut = 3500000.0) #cut on ttbar system in MeV
    #ToolSvc += TOPQboostedtopfiltertool

    #from DerivationFrameworkTop.DerivationFrameworkTopConf import DerivationFramework__BoostedHadTopAndTopPairFilterAugmentation
    #TOPQBoostedHadTopAndTopPairFilterAugmentation = DerivationFramework__BoostedHadTopAndTopPairFilterAugmentation(name = "TOPQBoostedHadTopAndTopPairFilterAugmentation")
    #TOPQBoostedHadTopAndTopPairFilterAugmentation.FilterTool = TOPQboostedtopfiltertool
    #ToolSvc += TOPQBoostedHadTopAndTopPairFilterAugmentation
    #augmentationTools.append(TOPQBoostedHadTopAndTopPairFilterAugmentation)
    #print "TOPQBoostedHadTopAndTopPairFilterAugmentationTool: ", TOPQBoostedHadTopAndTopPairFilterAugmentation

    #==============================================================================
    # HEAVY FLAVOR DECORATION
    #==============================================================================
    # /PhysicsAnalysis/DerivationFramework/DerivationFrameworkTop/trunk/src/TTbarPlusHeavyFlavorFilterTool.cxx
    # PhysicsAnalysis/DerivationFramework/DerivationFrameworkTop/trunk/src/TopHeavyFlavorFilterAugmentation.cxx
    from DerivationFrameworkTop.DerivationFrameworkTopConf import DerivationFramework__TTbarPlusHeavyFlavorFilterTool
    TOPQtthffiltertool = DerivationFramework__TTbarPlusHeavyFlavorFilterTool(
        "TOPQTTbarPlusHeavyFlavorFilterTool")
    ToolSvc += TOPQtthffiltertool

    from DerivationFrameworkTop.DerivationFrameworkTopConf import DerivationFramework__TopHeavyFlavorFilterAugmentation
    TOPQTopHFFilterAugmentation = DerivationFramework__TopHeavyFlavorFilterAugmentation(
        name="TOPQTopHFFilterAugmentation")
    TOPQTopHFFilterAugmentation.FilterTool = TOPQtthffiltertool
    ToolSvc += TOPQTopHFFilterAugmentation
    augmentationTools.append(TOPQTopHFFilterAugmentation)
    print "TOPQTopHFFilterAugmentationTool: ", TOPQTopHFFilterAugmentation

    #==============================================================================
    # HEAVY FLAVOR DECORATIONS (ttbar)
    #==============================================================================
    # PhysicsAnalysis/DerivationFramework/DerivationFrameworkMCTruth/trunk/src/HadronOriginClassifier.cxx
    # PhysicsAnalysis/DerivationFramework/DerivationFrameworkMCTruth/trunk/src/HadronOriginDecorator.cxx
    # list of ttbar samples by mc_channel_number
    TOPQDSIDList = [
        410000,
        410001,
        410002,
        410003,
        410004,
        410007,
        410008,
        410009,
        301528,
        301529,
        301530,
        301531,
        301532,
        303722,
        303723,
        303724,
        303725,
        303726,
        407009,
        407010,
        407011,
        407012,
        410120,
        410121,
        426090,
        426091,
        426092,
        426093,
        426094,
        426095,
        426096,
        426097,
        429007,
    ]

    import PyUtils.AthFile as af
    from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
    f = af.fopen(athenaCommonFlags.PoolAODInput()[0])
    if len(f.mc_channel_number) > 0:
        if (int(f.mc_channel_number[0]) in TOPQDSIDList):
            from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__HadronOriginClassifier
            TOPQhadronorigintool = DerivationFramework__HadronOriginClassifier(
                "TOPQHadronOriginClassifier", DSID=int(f.mc_channel_number[0]))
            ToolSvc += TOPQhadronorigintool
            print "TOPQhadronorigintool: ", TOPQhadronorigintool
            from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__HadronOriginDecorator
            TOPQhadronorigindecorator = DerivationFramework__HadronOriginDecorator(
                name="TOPQHadronOriginDecorator")
            TOPQhadronorigindecorator.ToolName = TOPQhadronorigintool
            ToolSvc += TOPQhadronorigindecorator
            print "TOPQhadronorigindecorator: ", TOPQhadronorigindecorator
            augmentationTools.append(TOPQhadronorigindecorator)

    #==============================================================================
    # TAU TRUTH MATCHING
    #==============================================================================
    # PhysicsAnalysis/DerivationFramework/DerivationFrameworkTau/trunk/src/TauTruthMatchingWrapper.cxx
    # PhysicsAnalysis/TauID/TauAnalysisTools/trunk/Root/TauTruthMatchingTool.cxx
    from DerivationFrameworkTau.DerivationFrameworkTauConf import DerivationFramework__TauTruthMatchingWrapper
    from TauAnalysisTools.TauAnalysisToolsConf import TauAnalysisTools__TauTruthMatchingTool

    from RecExConfig.ObjKeyStore import objKeyStore
    if objKeyStore.isInInput("xAOD::TauJetContainer", "TauJets"):
        TOPQTauTruthMatchingTool = TauAnalysisTools__TauTruthMatchingTool(
            name="TOPQTauTruthMatchingTool")
        ToolSvc += TOPQTauTruthMatchingTool
        print "TOPQTauTruthMatchingTool: ", TOPQTauTruthMatchingTool
        TOPQTauTruthMatchingWrapper = DerivationFramework__TauTruthMatchingWrapper(
            name="TOPQTauTruthMatchingWrapper",
            TauTruthMatchingTool=TOPQTauTruthMatchingTool,
            TauContainerName="TauJets")
        ToolSvc += TOPQTauTruthMatchingWrapper
        augmentationTools.append(TOPQTauTruthMatchingWrapper)
        print "TOPQTauTruthMatchingWrapperTool: ", TOPQTauTruthMatchingWrapper

    #=============
    # RETURN TOOLS
    #=============
    return augmentationTools