Exemplo n.º 1
0
from AthenaCommon.Include import include

include("RecExCommon/RecExCommon_topOptions.py")

#
for i in ServiceMgr:
    i.OutputLevel = INFO

##############################
# Output file
##############################
OutputFile = 'Output_HLT_MetMon.root'

from GaudiSvc.GaudiSvcConf import THistSvc

svcMgr += THistSvc()
svcMgr.THistSvc.Output += [
    "MonitorHLTmet DATAFILE='" + OutputFile + "' OPT='RECREATE'"
]

##############################
# Add the Toplevel Alg
##############################

from AthenaCommon.AlgSequence import AlgSequence

topSequence = AlgSequence()

###############################
# Configure the job
###############################
Exemplo n.º 2
0
def AssembleIO():
    #--------------------------------------------------------------
    # Reduce the event loop spam a bit
    #--------------------------------------------------------------
    from AthenaCommon.Logging import logging
    recoLog = logging.getLogger('MuonAnalysis I/O')
    recoLog.info('****************** STARTING the job *****************')

    if os.path.exists("%s/athfile-cache.ascii.gz" % (os.getcwd())):
        recoLog.info(
            "Old athfile-cache found. Will delete it otherwise athena just freaks out. This little boy."
        )
        os.system("rm %s/athfile-cache.ascii.gz" % (os.getcwd()))
    from GaudiSvc.GaudiSvcConf import THistSvc
    from AthenaCommon.JobProperties import jobproperties
    import AthenaPoolCnvSvc.ReadAthenaPool
    from AthenaCommon.AthenaCommonFlags import athenaCommonFlags as acf
    from AthenaServices.AthenaServicesConf import AthenaEventLoopMgr
    from AthenaCommon.AppMgr import ServiceMgr
    from ClusterSubmission.Utils import ReadListFromFile, ResolvePath, IsROOTFile
    from MuonAnalysis.Utils import IsTextFile
    ServiceMgr += AthenaEventLoopMgr(EventPrintoutInterval=1000000)

    ServiceMgr += THistSvc()
    OutFileName = "AnalysisOutput.root" if not "outFile" in globals(
    ) else outFile
    ServiceMgr.THistSvc.Output += [
        "MuonAnalysis DATAFILE='{}' OPT='RECREATE'".format(OutFileName)
    ]
    recoLog.info("Will save the job's output to " + OutFileName)
    ROOTFiles = []

    if "inputFile" in globals():
        recoLog.info("Use the following %s as input" % (inputFile))
        ROOTFiles = []
        ResolvedInFile = ResolvePath(inputFile)

        if inputFile.startswith('root://'):
            ROOTFiles.append(inputFile)

        elif ResolvedInFile and os.path.isfile(ResolvedInFile):
            if IsTextFile(ResolvedInFile):
                ROOTFiles = ReadListFromFile(ResolvedInFile)
            else:
                ROOTFiles.append(ResolvedInFile)

        elif ResolvedInFile and os.path.isdir(ResolvedInFile):
            for DirEnt in os.listdir(ResolvedInFile):
                if IsROOTFile(DirEnt):
                    if DirEnt.find(ResolvedInFile) != -1:
                        ROOTFiles.append(DirEnt)
                    else:
                        ROOTFiles.append("%s/%s" % (ResolvedInFile, DirEnt))
        else:
            raise RuntimeError("Invalid input " + inputFile)
        if len(ROOTFiles) == 0:
            raise RuntimeError("No ROOT files could be loaded as input")
        ServiceMgr.EventSelector.InputCollections = ROOTFiles
        acf.FilesInput = ROOTFiles

    if "nevents" in globals():
        recoLog.info("Only run on %i events" % (int(nevents)))
        theApp.EvtMax = int(nevents)
    if "nskip" in globals():
        recoLog.info("Skip the first %i events" % (int(nskip)))
        ServiceMgr.EventSelector.SkipEvents = int(nskip)
    """if isData(): recoLog.info("We're running over data today")
topSequence.LArRawChannelBuilder.DataLocation = "LArDigitContainer_Thinned"

#--------------------------------------------------------------
# Set output level threshold (2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
#-------------------------------------------------------------
svcMgr.MessageSvc.OutputLevel = INFO
#increase the number of letter reserved to the alg/tool name from 18 to 30
svcMgr.MessageSvc.Format = "% F%50W%S%7W%R%T %0W%M"
svcMgr.MessageSvc.defaultLimit = 9999999  # all messages
svcMgr.MessageSvc.useColors = False
svcMgr.MessageSvc.defaultLimit = 1000000

if not hasattr(ServiceMgr, 'THistSvc'):
    from GaudiSvc.GaudiSvcConf import THistSvc
    ServiceMgr += THistSvc()

ServiceMgr.THistSvc.Output += [
    "AANT DATAFILE='%s' OPT='RECREATE'" % NtupleName
]

#---------------------------------------------------------------------
# Perfmon
#--------------------------------------------------------------------

from PerfMonComps.PerfMonFlags import jobproperties
jobproperties.PerfMonFlags.doMonitoring = False
#from PerfMonComps.JobOptCfg import PerfMonSvc
#svcMgr += PerfMonSvc()
#jobproperties.PerfMonFlags.doDetailedMonitoring = True
Exemplo n.º 4
0
            histoPathBase="/Tile/MBTS",
            LVL1ConfigSvc="TrigConf::TrigConfigSvc/TrigConfigSvc",
            doOnline=athenaCommonFlags.isOnline(),
            readTrigger=doTrigger)

        ManagedAthenaMBTSMon.AthenaMonTools += [TileMBTSMon]

        print(ManagedAthenaMBTSMon)

print(ManagedAthenaTileMon)

import os
# -- use root histos --
# THistService for native root in Athena
if not athenaCommonFlags.isOnline(
) or storeHisto or athenaCommonFlags.isOnlineStateless():
    #theApp.HistogramPersistency = "ROOT"
    if not hasattr(svcMgr, "THistSvc"):
        from GaudiSvc.GaudiSvcConf import THistSvc
        svcMgr += THistSvc("THistSvc")
    if os.path.exists(RootHistOutputFileName):
        os.remove(RootHistOutputFileName)
    svcMgr.THistSvc.Output = [
        MonitorOutput + " DATAFILE='" + RootHistOutputFileName +
        "' OPT='RECREATE'"
    ]
else:
    from TrigServices.TrigServicesConf import TrigMonTHistSvc
    trigmonTHistSvc = TrigMonTHistSvc("THistSvc")
    svcMgr += trigmonTHistSvc
# (0.) configure the THistStream
from AthenaCommon.AppMgr import ServiceMgr
from GaudiSvc.GaudiSvcConf import THistSvc
if not hasattr(ServiceMgr, 'THistSvc'):
    ServiceMgr += THistSvc("THistSvc")
ServiceMgr.THistSvc.Output = [
    "SolenoidTest DATAFILE='hurz.root' OPT='RECREATE'"
]

# configure the field service
#from MagFieldServices.MagFieldServicesConf import MagField__AtlasFieldSvc
#ServiceMgr += MagField__AtlasFieldSvc("AtlasFieldSvc");
ServiceMgr.AtlasFieldSvc.FullMapFile = "/afs/cern.ch/user/m/masahiro/public/BFieldMap/bfieldmap_7730_20400.root"
ServiceMgr.AtlasFieldSvc.SoleMapFile = "/afs/cern.ch/user/m/masahiro/public/BFieldMap/bfieldmap_7730_0.root"
ServiceMgr.AtlasFieldSvc.ToroMapFile = "/afs/cern.ch/user/m/masahiro/public/BFieldMap/bfieldmap_0_20400.root"
ServiceMgr.AtlasFieldSvc.UseDCS = False
ServiceMgr.AtlasFieldSvc.UseSoleCurrent = 7730
ServiceMgr.AtlasFieldSvc.UseToroCurrent = 0  # 20400

# (1.) setup the SolenoidTest algorithm
from AthenaCommon.AlgSequence import AlgSequence
topSequence = AlgSequence()
from MagFieldUtils.MagFieldUtilsConf import MagField__SolenoidTest
testFull = 1000  # =1000 to measure the speed of the full 3d field
testFast = 1000  # =1000 to measure the speed of the fast 2d field
testOld = 0  # =1000 to measure the speed of the old field
testHist = 100  # =100 to produce ROOT file to compare full vs. fast
if testFull:
    topSequence += MagField__SolenoidTest('SolenoidTestFull')
    topSequence.SolenoidTestFull.UseFullField = True
    topSequence.SolenoidTestFull.UseFastField = False
Exemplo n.º 6
0
def MuonTPConfig(outputFilename, doPlots=False, doEff=False):
    # outputFilename: name of the output file
    # doPlots: steers creation of probe/match histograms
    # doEff: steers additional creation of efficiency histograms if doPlots is true

    from AthenaCommon.AppMgr import ServiceMgr
    from AthenaCommon.AlgSequence import AlgSequence
    from MuonPerformanceAlgs import ZmumuTPRecoAnalysis
    from MuonPerformanceAlgs import ZmumuTPMuonAnalysis
    from MuonPerformanceAlgs import JPsiTPRecoAnalysis
    from MuonPerformanceAlgs import JPsiTPMuonAnalysis
    from MuonPerformanceAlgs import CommonMuonTPConfig

    # Full job is a list of algorithms
    job = AlgSequence()

    ############## Zmumu T&P Configuration ########################
    do_Zmumu_RecoEff_TagProbe = True  # this is for Z->mumu, reco eff. You can add your own analysis in a similar way.
    do_Zmumu_IsolationEff_TagProbe = True  # this is for Z->mumu, isolation eff. You can add your own analysis in a similar way.
    do_Zmumu_TriggerEff_TagProbe = True  # this is for Z->mumu, Trigger eff.

    ############## JPsi T&P Configuration ########################
    do_JPsi_RecoEff_TagProbe = True
    do_JPsi_IsolationEff_TagProbe = True
    do_JPsi_TriggerEff_TagProbe = True

    GRL_to_use = []
    #GRL_to_use  = ["/afs/cern.ch/user/a/atlasdqm/grlgen/All_Good/data15_13TeV.periodA_DetStatus-v62-pro18_DQDefects-00-01-02_PHYS_StandardGRL_All_Good.xml"]

    CommonMuonTPConfig.AddTrigDecisionTool()
    CommonMuonTPConfig.AddTrigMatchingTool()
    CommonMuonTPConfig.AddMuonSelectionTool()
    CommonMuonTPConfig.AddGoodRunsListSelectionTool(GRL_to_use)

    ##### Reco eff analysis options
    if do_Zmumu_RecoEff_TagProbe:
        ZmumuTPRecoAnalysis.AddZmumuTPRecoAnalysis(doEtaSlices=False,
                                                   writeNtuple=True,
                                                   doClosure=False,
                                                   doDRSys=True,
                                                   doValid=False,
                                                   DoProbeMatchPlots=doPlots,
                                                   ProduceEfficiencies=doPlots
                                                   and doEff)
    if do_JPsi_RecoEff_TagProbe:
        JPsiTPRecoAnalysis.AddJPsiTPRecoAnalysis(writeNtuple=True,
                                                 doValid=False,
                                                 doDRSys=True,
                                                 DoProbeMatchPlots=doPlots,
                                                 ProduceEfficiencies=doPlots
                                                 and doEff)

    if do_Zmumu_TriggerEff_TagProbe or do_Zmumu_IsolationEff_TagProbe:
        ZmumuTPMuonAnalysis.AddZmumuTPMuonAnalysis(
            doIso=do_Zmumu_IsolationEff_TagProbe,
            doTrig=do_Zmumu_TriggerEff_TagProbe,
            doTrigEtaSlices=True,
            doIsolEtaSlices=False,
            doIsolPlots=False,
            doTriggerPlots=True,
            writeNtuple=True,
            doClosure=False,
            doDRSys=True,
            doDPhiTPSys=True,
            doProbeCharSys=True,
            doLooseProbes=True,
            doMediumProbes=True,
            doTightProbes=True,
            doTruthProbes=True,
            doHighPtProbes=True,
            doL1=True,
            doL2=False,
            doEF=False,
            doHLT=True,
            DoProbeMatchPlots=False,
            ProduceEfficiencies=doPlots and doEff)

    if do_JPsi_TriggerEff_TagProbe or do_JPsi_IsolationEff_TagProbe:
        JPsiTPMuonAnalysis.AddJPsiTPMuonAnalysis(
            doIso=do_JPsi_IsolationEff_TagProbe,
            doTrig=do_JPsi_TriggerEff_TagProbe,
            doTrigEtaSlices=True,
            doIsolEtaSlices=False,
            doIsolPlots=False,
            doTriggerPlots=True,
            writeNtuple=True,
            doClosure=False,
            doDRSys=True,
            doDPhiTPSys=True,
            doProbeCharSys=True,
            doLooseProbes=True,
            doMediumProbes=True,
            doTightProbes=True,
            doTruthProbes=True,
            doHighPtProbes=True,
            doL1=True,
            doL2=False,
            doEF=False,
            doHLT=True,
            DoProbeMatchPlots=False,
            ProduceEfficiencies=doPlots and doEff)

    # Add HistSvc
    from GaudiSvc.GaudiSvcConf import THistSvc
    ServiceMgr += THistSvc()
    ServiceMgr.THistSvc.Output += [
        "MUONTP DATAFILE='{}' OPT='RECREATE'".format(outputFilename)
    ]
Exemplo n.º 7
0
                                                       , FillPedestalDifference = False
                                                       , CheckDCS           = TileUseDCS
                                                       , SummaryUpdateFrequency = TileSummaryUpdateFrequency );
        
        topSequence.TileTBMonManager.AthenaMonTools += [ TileDigiNoiseMon ];

    print(topSequence.TileTBMonManager)


import os
# -- use root histos --
# THistService for native root in Athena
if not  athenaCommonFlags.isOnline() or storeHisto or athenaCommonFlags.isOnlineStateless():
    if not hasattr(svcMgr, 'THistSvc'):
        from GaudiSvc.GaudiSvcConf import THistSvc
        svcMgr += THistSvc('THistSvc')
    if os.path.exists(RootHistOutputFileName):
        os.remove(RootHistOutputFileName)
    svcMgr.THistSvc.Output = [MonitorOutput + " DATAFILE='" + RootHistOutputFileName + "' OPT='RECREATE'"]
else:
    from TrigServices.TrigServicesConf import TrigMonTHistSvc
    trigmonTHistSvc = TrigMonTHistSvc("THistSvc")
    svcMgr += trigmonTHistSvc



#To read CTP RESULTS and DSP Raw Channels
if not hasattr( svcMgr, "ByteStreamAddressProviderSvc" ):
    from ByteStreamCnvSvcBase.ByteStreamCnvSvcBaseConf import ByteStreamAddressProviderSvc
    svcMgr += ByteStreamAddressProviderSvc()
Exemplo n.º 8
0
def monitoringTools(steering):
    log = logging.getLogger("monitoringTools")
    LBNdepth = 4
    if 'EF' in steering.name():
        LBNdepth = 6

    def assign_prop_value(a):
        try:
            a.LBNHistoryDepth = LBNdepth
        except Exception:
            return False
        return True

    #try:
    #    Set = set
    #except NameError:
    #    from sets import Set
    setOfEnabled = set(TriggerFlags.enableMonitoring()
                       )  # this is set of enabled monitoring targets

    log.info("requested targets: " + str(setOfEnabled) +
             " pruning other tools")
    # prune subalgorithms
    for alg in steering.getChildren():
        tokeep = [
            x for x in alg.AthenaMonTools
            if len(set(x.target()) & setOfEnabled) != 0
        ]
        log.debug("will keep " + str([x.getFullName() for x in tokeep]))
        toscratch = list(set(alg.AthenaMonTools) - set(tokeep))
        log.debug("will scratch " + str([x.getFullName() for x in toscratch]))
        del toscratch

        [assign_prop_value(x) for x in tokeep]
        alg.AthenaMonTools = tokeep

        if "Time" in TriggerFlags.enableMonitoring(
        ) or "TimeTree" in TriggerFlags.enableMonitoring():
            alg.doTiming = True

        if "Log" in TriggerFlags.enableMonitoring():
            alg.OutputLevel = getHLTOutputLevel()  # noqa: ATL900

    # prune steering monitoring tools
    steering.pruneSteeringMonTools(TriggerFlags.enableMonitoring())
    [assign_prop_value(x) for x in steering.MonTools]

    from AthenaCommon.AppMgr import ServiceMgr
    # Enable performance monitoring
    if 'PerfMon' in TriggerFlags.enableMonitoring():
        from PerfMonComps.JobOptCfg import PerfMonSvc
        jobproperties.PerfMonFlags.doMonitoring = True
        ServiceMgr += PerfMonSvc()

    log.debug("Setting up offline THistSvc")
    if not hasattr(ServiceMgr, 'THistSvc'):
        from GaudiSvc.GaudiSvcConf import THistSvc
        ServiceMgr += THistSvc()
    if hasattr(ServiceMgr.THistSvc, "Output"
               ):  # this is offline THistSvc fo which we want to setup files
        setTHistSvcOutput(ServiceMgr.THistSvc.Output)
Exemplo n.º 9
0
def setupCommonServices():
    from AthenaCommon import CfgMgr
    from AthenaCommon.Logging import logging
    from AthenaCommon.Constants import INFO
    from AthenaCommon.AppMgr import ServiceMgr as svcMgr, theApp
    from AthenaCommon.ConcurrencyFlags import jobproperties as jps

    # Setup messaging for Python and C++
    from AthenaCommon.Logging import log
    log.setFormat("%(asctime)s  Py:%(name)-31s %(levelname)7s %(message)s")

    # Create our own logger
    log = logging.getLogger('TriggerUnixStandardSetup::setupCommonServices:')

    from TrigServices.TrigServicesConfig import setupMessageSvc
    setupMessageSvc()

    # Do the default Atlas job configuration first
    import AthenaCommon.AtlasUnixStandardJob  # noqa: F401

    # Now do HLT/thread specific configuration (see e.g. AtlasThreadedJob.py)
    from StoreGate.StoreGateConf import SG__HiveMgrSvc
    svcMgr += SG__HiveMgrSvc("EventDataSvc",
                             NSlots=jps.ConcurrencyFlags.NumConcurrentEvents())

    import StoreGate.StoreGateConf as StoreGateConf
    svcMgr += StoreGateConf.StoreGateSvc("ConditionStore")

    # Configure the CoreDumpSvc
    if not hasattr(svcMgr, "CoreDumpSvc"):
        from AthenaServices.Configurables import CoreDumpSvc
        svcMgr += CoreDumpSvc()

    # ThreadPoolService thread local initialization
    from GaudiHive.GaudiHiveConf import ThreadPoolSvc
    svcMgr += ThreadPoolSvc("ThreadPoolSvc")
    svcMgr.ThreadPoolSvc.ThreadInitTools = ["ThreadInitTool"]

    from GaudiHive.GaudiHiveConf import AlgResourcePool
    svcMgr += AlgResourcePool(OutputLevel=INFO,
                              TopAlg=["AthSequencer/AthMasterSeq"])

    from AthenaCommon.AlgSequence import AlgSequence
    from SGComps.SGCompsConf import SGInputLoader
    topSequence = AlgSequence()
    topSequence += SGInputLoader(
        FailIfNoProxy=False)  # change to True eventually

    from AthenaCommon.AlgScheduler import AlgScheduler
    AlgScheduler.ShowDataDependencies(False)
    AlgScheduler.ShowControlFlow(False)
    AlgScheduler.setDataLoaderAlg('SGInputLoader')

    # Setup SGCommitAuditor to sweep new DataObjects at end of Alg execute
    theApp.AuditAlgorithms = True
    from SGComps.SGCompsConf import SGCommitAuditor
    svcMgr.AuditorSvc += SGCommitAuditor()

    # setup ROOT6
    from PyUtils.Helpers import ROOT6Setup
    ROOT6Setup()

    # Setup online THistSvc unless specifically configured otherwise
    #    setup the THistSvc early and force the creation of the THistSvc
    #    so that it can be used by infrastructure services to book histograms
    #    (to avoid problems e.g. with histograms in ROBDataProviderSvc)
    if _Conf.useOnlineTHistSvc:
        if hasattr(svcMgr, 'THistSvc'):
            log.fatal(
                "The offline histogramming THistSvc is already in place.")
            raise RuntimeError(
                "Cannot setup online histogramming TrigMonTHistSvc")
        log.debug("Using online histogramming service (TrigMonTHistSvc)")
        from TrigServices.TrigServicesConf import TrigMonTHistSvc
        svcMgr += TrigMonTHistSvc("THistSvc")
    else:
        log.debug("Using offline histogramming service (THistSvc)")
        from GaudiSvc.GaudiSvcConf import THistSvc
        svcMgr += THistSvc()

    # StoreGateSvc
    svcMgr.StoreGateSvc.ActivateHistory = False

    # ProxyProviderSvc services configuration
    svcMgr += CfgMgr.ProxyProviderSvc()

    # --- ByteStreamAddressProviderSvc configuration
    svcMgr += CfgMgr.ByteStreamAddressProviderSvc()
    svcMgr.ProxyProviderSvc.ProviderNames += ["ByteStreamAddressProviderSvc"]
    theApp.CreateSvc += [svcMgr.ByteStreamAddressProviderSvc.getFullName()]

    # Initialization of DetDescrCnvSvc
    svcMgr += CfgMgr.DetDescrCnvSvc(
        # specify primary Identifier dictionary to be used
        IdDictName="IdDictParser/ATLAS_IDS.xml")

    theApp.CreateSvc += [svcMgr.DetDescrCnvSvc.getFullName()]
    svcMgr.EventPersistencySvc.CnvServices += ["DetDescrCnvSvc"]

    # Online services for ByteStream input/output
    from TrigByteStreamCnvSvc.TrigByteStreamCnvSvcConf import TrigEventSelectorByteStream
    from TrigByteStreamCnvSvc.TrigByteStreamCnvSvcConfig import TrigByteStreamInputSvc, TrigByteStreamCnvSvc
    svcMgr += TrigByteStreamCnvSvc(
        "ByteStreamCnvSvc")  # this name is hard-coded in some converters
    svcMgr.EventPersistencySvc.CnvServices += ["ByteStreamCnvSvc"]
    svcMgr += TrigByteStreamInputSvc("ByteStreamInputSvc")
    svcMgr += TrigEventSelectorByteStream(
        "EventSelector", ByteStreamInputSvc=svcMgr.ByteStreamInputSvc)
    theApp.EvtSel = "EventSelector"

    # Online event loop manager
    from TrigServices.TrigServicesConfig import HltEventLoopMgr
    loopMgr = HltEventLoopMgr("HltEventLoopMgr")
    loopMgr.WhiteboardSvc = "EventDataSvc"
    loopMgr.SchedulerSvc = AlgScheduler.getScheduler().getName()
    loopMgr.EvtSel = svcMgr.EventSelector
    loopMgr.OutputCnvSvc = svcMgr.ByteStreamCnvSvc
    svcMgr += loopMgr
    theApp.EventLoop = loopMgr.name()

    from TrigOutputHandling.TrigOutputHandlingConfig import HLTResultMTMakerCfg
    svcMgr.HltEventLoopMgr.ResultMaker = HLTResultMTMakerCfg()

    # Configuration of Interval of Validity Service
    svcMgr += CfgMgr.IOVSvc()

    # Configure COOL update helper tool
    from TrigServices.TrigServicesConfig import TrigCOOLUpdateHelper
    svcMgr.HltEventLoopMgr.CoolUpdateTool = TrigCOOLUpdateHelper()

    # Configure the online ROB data provider service
    from TrigServices.TrigServicesConfig import HltROBDataProviderSvc
    svcMgr += HltROBDataProviderSvc()

    # Explicitly set a few OutputLevels (needed because some services are created in
    # different order when running with the PSC)
    svcMgr.IncidentSvc.OutputLevel = theApp.OutputLevel
    svcMgr.ProxyProviderSvc.OutputLevel = theApp.OutputLevel
    svcMgr.StoreGateSvc.OutputLevel = theApp.OutputLevel

    return
Exemplo n.º 10
0
def prepareCostRun(name, option='hlt'):

    log = logging.getLogger('prepareCostRun')

    log.info('Setup for BS reading...')
    prepareTrigSerialize(log)

    from TrigEDMConfig.TriggerEDM import EDMLibraries
    from TrigNavigation.TrigNavigationConfig import HLTNavigationOnline

    run = TrigCostRun(name)

    run.printEvent = True
    run.keyStream = ''

    run.keyResultL2 = 'HLTResult_L2'
    run.keyResultEF = 'HLTResult_EF'
    run.keyResultHLT = 'HLTResult_HLT'

    run.keyConfigL2 = 'HLT_TrigMonConfigCollection_OPI_L2_monitoring_config'
    run.keyConfigEF = 'HLT_TrigMonConfigCollection_OPI_EF_monitoring_config'
    run.keyConfigHLT = 'HLT_TrigMonConfigCollection_OPI_HLT_monitoring_config'

    run.keyEventL2 = 'HLT_TrigMonEventCollection_OPI_L2_monitoring_event'
    run.keyEventEF = 'HLT_TrigMonEventCollection_OPI_EF_monitoring_event'
    run.keyEventHLT = 'HLT_TrigMonEventCollection_OPI_HLT_monitoring_event'

    run.doL2 = False
    run.doEF = False
    run.doHLT = False

    if option.count('l2'):
        run.doL2 = True

    if option.count('ef'):
        run.doEF = True

    if option.count('hlt'):
        run.doHLT = True

    run.navigation = HLTNavigationOnline()
    run.navigation.ReferenceAllClasses = False
    run.navigation.Dlls = EDMLibraries

    tool_conf = Trig__TrigNtConfTool('RunCostConf')
    tool_post = Trig__TrigNtPostTool('RunCostPost')

    tool_conf.printConfig = False
    tool_conf.useDB = True

    #
    # Configure old style output
    #
    if option.count('useSaveTools'):
        from AthenaCommon.AppMgr import ServiceMgr as svcMgr
        if not hasattr(svcMgr, 'THistSvc'):
            from GaudiSvc.GaudiSvcConf import THistSvc
            svcMgr += THistSvc()

        svcMgr.THistSvc.Output += [
            "TrigCostReadBS DATAFILE='TrigCostReadBS.root' OPT='RECREATE'"
        ]

        save_cost = Trig__TrigNtSaveTool('RunCostSave_full')
        save_cost.writeFile = False
        save_cost.fileName = ''
        save_cost.streamConfig = 'TrigCostReadBS'
        save_cost.streamEvent = 'TrigCostReadBS'
        save_cost.writeRateOnly = False

        save_rate = Trig__TrigNtSaveTool('RunCostSave_rate')
        save_rate.writeFile = False
        save_rate.fileName = ''
        save_rate.streamConfig = ''
        save_rate.streamEvent = 'TrigCostReadBS'
        save_rate.writeRateOnly = True
        save_rate.treeNameEvent = 'event_rate_only'
        save_rate.treeNameConfig = ''
        save_rate.printPostSummary = False

        run.tools = [tool_conf, tool_post]
        run.toolsSave = [save_cost, save_rate]

    if option.count('emon'):
        run.tools = []
        try:
            from TrigCost2IS.TrigCost2ISConf import Trig__TrigNtCost2IS
            tool_2is = Trig__TrigNtCost2IS('RunCost2IS')

            log.info('Configuring emon algorithm')
            log.info('Picked up HLT tool: RunCost2IS')

            run.tools = [tool_2is]
        except:
            log.info('HLT tools are not available... continue without them')

    log.info('Prepared TrigCostRun algorithm instance: ' + run.getName())
    return run
Exemplo n.º 11
0
    def  __init__( self, StreamName, FileName, TreeName = None, asAlg = False ):
        """Constructor for the D3PD stream object.

           Arguments:
              StreamName: Logical name of the D3PD stream. Note that beside
                          using it to define the stream in THistSvc, this
                          name is also used as the name of the TTree in the
                          output file in case one is not specified explicitly.
              FileName: Name of the file to write the D3PD TTree into.
              TreeName: Name of the TTree in the output file. If it's not
                        specified, the stream name is used as the tree name.
              asAlg: If set to True, the D3PD::MakerAlg algorithm is added
                     to the job as a regular algorithm. When set to False
                     (default), the D3PD algorithm is added to the application
                     manager as an output stream.
        """
        # Initialize the base class:
        AugmentedStreamBase.__init__( self, StreamName )

        # Check if the user specified a tree name or not:
        if TreeName == None:
            TreeName = StreamName

        # Remember the file and tree names just for bookkeeping:
        self.fileName = FileName
        self.treeName = TreeName

        # We need to add some stuff to the main algorithm sequence:
        from AthenaCommon.AlgSequence import AlgSequence
        topSequence = AlgSequence()

        # Create a sequence where the pre-D3PD-making algorithms are run:
        from D3PDMakerConfig.D3PDMakerFlags import D3PDMakerFlags
        preseq = AlgSequence( D3PDMakerFlags.PreD3PDAlgSeqName(),
                              StopOverride = True )
        if not hasattr( topSequence, D3PDMakerFlags.PreD3PDAlgSeqName() ):
            topSequence += [ preseq ]
            pass
        

        # Add the AANT algorithm for making it possible to back navigate
        # from D3PD events:
        ParentStreamName = StreamName.split( ':' )[ 0 ]
        if StreamName.count( ':' ) != 0:
            if StreamName.count( ':' ) == 1:
                StreamName = StreamName.split( ':' )[ 1 ]
            else:
                raise AttributeError( "Stream name '%s' can't be used!" % StreamName )
        if not hasattr( topSequence, ParentStreamName + "AANTStream" ):
            try:
                from AnalysisTools.AnalysisToolsConf import AANTupleStream
                topSequence += AANTupleStream( ParentStreamName + "AANTStream",
                                               ExtraRefNames = ['StreamRDO',
                                                                'StreamRAW',
                                                                'StreamESD',
                                                                'StreamAOD'],
                                               OutputName = FileName,
                                               WriteInputDataHeader = True,
                                               StreamName = ParentStreamName )
                pass
            except ImportError:
                print(self.Name,": INFO didn't find AnalysisTools.AnalysisToolsConf in release.")
                import traceback
                print(traceback.format_exc())
            pass
        
        # Make sure that THistSvc exists.
        from AthenaCommon.AppMgr import ServiceMgr
        if not hasattr( ServiceMgr, 'THistSvc' ):
            from GaudiSvc.GaudiSvcConf import THistSvc
            ServiceMgr += THistSvc()

        # Check if the requested stream is already defined in THistSvc:
        streamExists = False
        for s in ServiceMgr.THistSvc.Output:
            stream = s.split()[ 0 ]
            if stream == StreamName:
                streamExists = True
                break

        # Add the stream if it's not defined yet:
        if not streamExists:
            ServiceMgr.THistSvc.Output += [ "%s DATAFILE='%s' OPT='RECREATE' CL='%i'" %
                                            ( StreamName, FileName,
                                              D3PDMakerFlags.CompressionLevel() ) ]

        # Finally, create the D3PD::MakerAlg algorithm and add it to the job.
        # Note that here we're specifying that the D3PDMaker code should use
        # ROOT output.
        #
        # If we're adding as an algorithm directly, then pass the parent sequence
        # into MakerAlg(...). MakerAlg(...) will then add itself to the sequence
        # and also set up the accompanying filter sequence. Otherwise, we add it
        # as a stream; in that case we set up backwards compatibility for
        # 'filterSeq'.
        try:
            import D3PDMakerCoreComps
            if asAlg:
                theseq = topSequence
            else:
                theseq = None
                pass
            self.Stream = D3PDMakerCoreComps.MakerAlg( StreamName + "D3PDMaker", seq = theseq,
                                                       file = FileName, stream = ParentStreamName,
                                                       tuplename = TreeName,
                                                       D3PDSvc = "D3PD::RootD3PDSvc" )

            if not asAlg:
                from AthenaCommon.AppMgr import theApp
                theApp.addOutputStream( self.Stream )
                # Backwards compatibility for the filter algoirthm:
                self.filterSeq = _RootStreamFilterHelper( self, topSequence )
                pass
            pass
        except ImportError:
            print(self.Name,": INFO didn't find D3PDMakerCoreComps in release.")
            pass

        return
Exemplo n.º 12
0
forwardTransportSvc.FillRootTree = True

from GaudiSvc.GaudiSvcConf import THistSvc
THistSvc = THistSvc()
THistSvc.Output = ["AANT DATAFILE='fwdTransport.root' OPT='RECREATE'"]
svcMgr += THistSvc
Exemplo n.º 13
0
runArgs.geometryVersion = 'ATLAS-R2-2016-01-00-01'
runArgs.postExec = [
    'topSequence.FTK_RDO_ReaderAlgo.GetTracks=False;topSequence.FTK_RDO_ReaderAlgo.GetOfflineVertex_Offline=False;topSequence.FTK_RDO_ReaderAlgo.GetRefitTracks=False;topSequence.FTK_RDO_ReaderAlgo.GetTrackParticles=False;topSequence.FTK_RDO_ReaderAlgo.GetRefitTrackParticles=False;topSequence.FTK_RDO_ReaderAlgo.GetVertex=False;topSequence.FTK_RDO_ReaderAlgo.GetRefitVertex=False;from TrigFTK_RawDataAlgs.TrigFTK_RawDataAlgsConf import FTK_RDO_MonitorAlgo;FTK_RDO_Monitor = FTK_RDO_MonitorAlgo( "FTK_RDO_MonitorAlgo");FTK_RDO_Monitor.RDO_CollectionName="FTK_RDO_Tracks";FTK_RDO_Monitor.offlineTracksName="Tracks";FTK_RDO_Monitor.FTK_DataProvider=theFTK_DataProviderSvc;alg+= FTK_RDO_Monitor;topSequence.FTK_RDO_MonitorAlgo.GetHashFromTrack=False;topSequence.FTK_RDO_MonitorAlgo.GetHashFromConstants=True;topSequence.FTK_RDO_MonitorAlgo.Nlayers=8;topSequence.FTK_RDO_MonitorAlgo.PatternsVersion=\"DataAlignment_xm05_ym05_Reb64_v2\";topSequence.FTK_RDO_MonitorAlgo.TowerID=40;topSequence.FTK_RDO_MonitorAlgo.OutputLevel=VERBOSE'
]  # set TowerID here

#FTK_RDO_Monitor.mineta=-0.1;FTK_RDO_Monitor.maxeta=1.6;FTK_RDO_Monitor.minphi=2.3;FTK_RDO_Monitor.maxphi=2.9;

runArgs.preExec = [
    'rec.doWriteAOD=False;rec.doWriteESD=False;rec.doTrigger=False;rec.doFTK=True;rec.doCalo=False;rec.doInDet=True;rec.doMuon=False;rec.doJetMissingETTag=False;rec.doEgamma=False;rec.doMuonCombined=False;rec.doTau=False;from TrigFTKByteStream.TrigFTKByteStreamConf import FTK__TrigFTKByteStreamTool as TrigFTKByteStreamTool;ftkbstool=TrigFTKByteStreamTool("FTK::TrigFTKByteStreamTool");ftkbstool.FTKAuxDataFormat=True;ToolSvc+=ftkbstool'
]

# Input data
runArgs.inputBSFile = [
    'data17_5TeV.00341184.physics_Main.merge.DRAW_ZMUMU.f903_m1831._0001.1'
]
runArgs.inputBSFileType = 'BS'

#runArgs.inputBSFileNentries = 240
runArgs.BSFileIO = 'input'

# Output data
#runArgs.outputESDFile = 'ESD.root'
#runArgs.outputESDFileType = 'ESD'
#runArgs.outputHIST_ESD_INTFile = 'tmp.HIST_ESD_INT'
#runArgs.outputHIST_ESD_INTFileType = 'hist_esd_int'

include("RecJobTransforms/skeleton.RAWtoESD_tf.py")

from GaudiSvc.GaudiSvcConf import THistSvc
svcMgr += THistSvc(Output=["TRACKS DATAFILE='ftk.root', OPT='RECREATE'"])
Exemplo n.º 14
0
def Initiate(ConfInstance=None):
  """Sets up the basic global tools required for B-Tagging. This function is idempotent; it will not run again if it has run once. It is
  typically called by other functions in this file to make sure the basic global tools exist.

  The function returns True if B-Tagging is initialized or has been before. It returns False if it B-Tagging has been switched off for various
  reasons (these are checked in the checkFlagsUsingBTaggingFlags function).

  If the B-tagging calibration broker has been registered we assume initiation has already been performed; this allows users to setup their own initiation code."""

  if ConfInstance is None:
    from BTagging.BTaggingConfiguration import getConfiguration
    ConfInstance = getConfiguration()

  if ConfInstance._Initialized:
    return True

  from AtlasGeoModel.CommonGMJobProperties import CommonGeometryFlags as commonGeoFlags
  from AtlasGeoModel.InDetGMJobProperties import InDetGeometryFlags as geoFlags
  from IOVDbSvc.CondDB import conddb
  # Declare the COOL folder to the CondInputLoader
  btagrun1=False
  if conddb.dbdata == 'COMP200':
    btagrun1=True
  elif conddb.isMC:
    # The Run() parameter only exists for ATLAS-R1(...) and ATLAS-R2(...) geo tags,
    # not for ATLAS-GEO(...) and ATLAS-IBL(...) ones. Hence if Run() is undefined,
    # presence of IBL is used to switch between Run1/Run2
    btagrun1 = (commonGeoFlags.Run() == "RUN1" or (commonGeoFlags.Run() == "UNDEFINED" and geoFlags.isIBL() == False))
  if (btagrun1):
    print (ConfInstance.BTagTag()+' - INFO - Setting up Run 1 configuration')
    BTaggingFlags.JetFitterNN=True
    BTaggingFlags.SV2    =True
    BTaggingFlags.JetVertexCharge=False
    BTaggingFlags.SoftMu=False
    BTaggingFlags.MV2c10mu=False
    BTaggingFlags.MV2c10rnn=False
    BTaggingFlags.MV2cl100=False
    BTaggingFlags.RNNIP=False
    BTaggingFlags.DL1=False
    BTaggingFlags.DL1mu=False
    BTaggingFlags.DL1rnn=False
  else:
    print (ConfInstance.BTagTag()+' - INFO - Setting up Run 2 configuration')

  if ConfInstance._name == "Trig":
    BTaggingFlags.MV2c20=True

  print (ConfInstance.BTagTag()+' - INFO - Initializing default basic tools')

  if ConfInstance.checkFlagsUsingBTaggingFlags():

    #Print the flags
    BTaggingFlags.Print()

    #If debugging do a check of the tool collection structure
    if(BTaggingFlags.OutputLevel < 3):
      from BTagging.BTaggingConfiguration import checkToolCollectionStructure
      checkToolCollectionStructure()

    #Get TheTruthCollectionKey from input
    TheTruthCollectionKey = 'TruthEvents'
    BTaggingFlags.RetagJets = BTaggingFlags.Jets
    if BTaggingFlags.AutoInspectInputFile:
      from AthenaCommon.GlobalFlags import globalflags
      if globalflags.InputFormat == 'pool':
        try:
          from RecExConfig.InputFilePeeker import inputFileSummary
          BTaggingFlags.RetagJets = []
          for i in inputFileSummary['eventdata_items']:
            if i[0] == 'McEventCollection':
              # TheTruthCollectionKey = i[1] # disable taking the name from the input file?
              pass
            elif i[0] == 'JetCollection':
              jetC1 = (i[1]).replace('AODJets','')
              jetC = jetC1.replace('Jets','')
              if jetC in BTaggingFlags.Jets:
                BTaggingFlags.RetagJets += [ jetC ]
        except Exception:
          print (ConfInstance.BTagTag()+' - WARNING - Automatic inspection of input file failed (file too old?)')
          import traceback
          traceback.print_exc()

    print (ConfInstance.BTagTag()+' - Using ', TheTruthCollectionKey, ' as truth key')
#    print (ConfInstance.BTagTag()+' - Re-tagging these jet collections: ', BTaggingFlags.RetagJets)

    #
    # ============ Setup basic services
    #
    from AthenaCommon.AppMgr import ServiceMgr as svcMgr
    if not hasattr(svcMgr, 'THistSvc'):
      from GaudiSvc.GaudiSvcConf import THistSvc
      svcMgr += THistSvc()
    if not 'topSequence' in dir():
      from AthenaCommon.AlgSequence import AlgSequence
      topSequence = AlgSequence()


    #Create and add our condition algorithm to the Condition Sequencer

    SetupConditionAlgorithm(ConfInstance)

    #
    # ========== Add tools now
    #

    # -------------- Calibration Broker --------------
    from AthenaCommon.AppMgr import ToolSvc
    from AthenaCommon.Resilience import treatException,protectedInclude
    if ConfInstance._name == "" or ConfInstance._name == "Trig":
      # No calibration broker setup - The condition algorithm is used
      pass
    elif ConfInstance._name == "AODFix":
      protectedInclude("BTagging/BTagCalibBroker_AODFix_jobOptions.py")
      BTagCalibrationBrokerTool = ConfInstance.getTool("BTagCalibrationBrokerTool")
    else:
      print (ConfInstance.BTagTag()+' - ERROR - Configuration instance "'+ConfInstance._name+'" has no calibration broker setup specified!')
      raise RuntimeError
    # -------------- \Calibration Broker --------------

    # -- for reference mode:
    if BTaggingFlags.Runmodus == 'reference':

      svcMgr.THistSvc.Output += ["RefFile DATAFILE='BTagCalibALL.root' OPT='RECREATE'"]

    ConfInstance._Initialized = True
    return True
  else:
    print (ConfInstance.BTagTag()+' - WARNING - Tool initialization requested but B-Tagging is not possible for the current dataset.')
    return False
Exemplo n.º 15
0
# RequireAlgs = logical AND of filters
ElLoose18Stream.AcceptAlgs( ["ElectronLooseSelectorInElLoose18Stream"] )

ElLoose18Stream.AddOtherAlgsToBookkeep( algsToBookkeep )

#---------------------------------------------------
# Add the containers to the output stream
#---------------------------------------------------
from PrimaryDPDMaker import PrimaryDPD_OutputDefinitions as dpdOutput

# Take all items from the input, except for the ones listed in the excludeList
# If the excludeList is empty, all containers from the input file (e.g. AOD)
# are copied to the output file.
excludeList = [ "TrigMuonEFContainer#HLT_MuonEF" ]
dpdOutput.addAllItemsFromInputExceptExcludeList( streamName, excludeList )

# You need to add your newly created output containers from above to the output stream
#ElLoose18Stream.AddItem( ['CompositeParticleContainer#*'] )
#ElLoose18Stream.AddItem( ['INav4MomLinkContainer#*'] )



#====================================================================
# Define the THistSvc
#====================================================================
from GaudiSvc.GaudiSvcConf import THistSvc
ServiceMgr += THistSvc( "ElLoose18StreamTHistSvc",
                        Output = [ "%s DATAFILE='%s' OPT='SHARE'" % ( streamName, fileName ) ]
                        )

Exemplo n.º 16
0
def _setupCommonServices():
    from AthenaCommon.Constants import VERBOSE, DEBUG, INFO, ERROR
    
    # Add timestamp to python logger
    from AthenaCommon.Logging import log
    log.setFormat("%(asctime)s  Py:%(name)-31s %(levelname)7s %(message)s")

    from AthenaCommon.Logging import logging
    log = logging.getLogger( 'TriggerUnixStandardSetup::setupCommonServices:' )
     
    # Do the default Atlas job configuration first
    import AthenaCommon.AtlasUnixStandardJob

    # Now do HLT specific configuration
    from AthenaCommon import CfgMgr
    from AthenaCommon.AppMgr import theApp
    from AthenaCommon.AppMgr import ServiceMgr as svcMgr
    from AthenaCommon.AppMgr import ToolSvc

    # Check whether we are running in athenaXT
    # Only a minimal set of properties should depend on this
    import sys
    if sys.modules.has_key('HLTTestApps'):
        _Conf.athenaXT = True
        log.debug("Configuration for athenaXT running")
    else:
        _Conf.athenaXT = False
        log.debug("Configuration for online running")
        
    # setup ROOT6 if needed
    _setupRoot6IfNeeded()

    # StoreGateSvc
    svcMgr.StoreGateSvc.ActivateHistory = False
    
    # ProxyProviderSvc services configuration
    svcMgr += CfgMgr.ProxyProviderSvc()

    # --- ByteStreamAddressProviderSvc configuration
    svcMgr += CfgMgr.ByteStreamAddressProviderSvc()
    svcMgr.ProxyProviderSvc.ProviderNames += [ "ByteStreamAddressProviderSvc" ]
    theApp.CreateSvc += [ svcMgr.ByteStreamAddressProviderSvc.getFullName() ]

    # Initialization of DetDescrCnvSvc
    svcMgr += CfgMgr.DetDescrCnvSvc(
        # specify primary Identifier dictionary to be used
        IdDictName = "IdDictParser/ATLAS_IDS.xml"
        )
    theApp.CreateSvc += [ svcMgr.DetDescrCnvSvc.getFullName() ]
    svcMgr.EventPersistencySvc.CnvServices += [ "DetDescrCnvSvc" ]

    # --- ByteStreamCnvSvc configuration
    svcMgr += CfgMgr.ByteStreamCnvSvc("ByteStreamCnvSvc")
    svcMgr.EventPersistencySvc.CnvServices += [ "ByteStreamCnvSvc" ]
    
    # Disable history
    svcMgr += CfgMgr.HistorySvc()
    svcMgr.HistorySvc.Activate = False

    # Configuration of Interval of Validity Service
    svcMgr += CfgMgr.IOVSvc()
    
    # Configure TrigISHelper
    from TrigServices.TrigServicesConf import TrigISHelper
    ToolSvc += TrigISHelper("TrigISHelper")

    # Configure TrigPreFlightCheck
    from TrigServices.TrigServicesConf import TrigPreFlightCheck
    ToolSvc += TrigPreFlightCheck("TrigPreFlightCheck",
                                  ReleaseDirs = ["AtlasP1HLT","AtlasHLT"]
                                  )

    # Configure CoreDumpSvc
    if not hasattr(svcMgr,"CoreDumpSvc"):
        from AthenaServices.Configurables import CoreDumpSvc
        svcMgr += CoreDumpSvc()
        
    # Configure COOL update helper tool
    from TrigServices.TrigServicesConfig import TrigCOOLUpdateHelper
    _eventLoopMgr(svcMgr).CoolUpdateTool = TrigCOOLUpdateHelper()
            
    # Setup online THistSvc unless specifically configured otherwise
    if _Conf.useOnlineTHistSvc:
        if hasattr(svcMgr, 'THistSvc'):
            log.fatal("The offline histogramming THistSvc is already in place.")
            raise RuntimeError("Cannot setup online histogramming TrigMonTHistSvc")
        log.debug("Using online histogramming service (TrigMonTHistSvc)")
        from TrigServices.TrigServicesConf import TrigMonTHistSvc
        svcMgr += TrigMonTHistSvc("THistSvc")
    else:
        log.debug("Using offline histogramming service (THistSvc)")
        from GaudiSvc.GaudiSvcConf import THistSvc
        svcMgr += THistSvc()

    # Explicitly set a few OutputLevels (needed because some services are created in
    # different order when running with the PSC)
    svcMgr.StatusCodeSvc.OutputLevel = theApp.OutputLevel
    svcMgr.IncidentSvc.OutputLevel = theApp.OutputLevel
    svcMgr.ProxyProviderSvc.OutputLevel = theApp.OutputLevel
    svcMgr.StoreGateSvc.OutputLevel = theApp.OutputLevel
    
    return