msg.fatal('EvtMax must be >0 for hybrid configuration') sys.exit(AthenaCommon.ExitCodes.CONFIGURATION_ERROR) if (theApp.EvtMax % nProc != 0): msg.warning( 'EvtMax[%s] is not divisible by nProcs[%s]: MP Workers will not process all requested events', theApp.EvtMax, nProc) chunkSize = int(theApp.EvtMax / nProc) from AthenaMP.AthenaMPFlags import jobproperties as jps jps.AthenaMPFlags.ChunkSize = chunkSize msg.info('AthenaMP workers will process %s events each', chunkSize) ## force loading of data. make sure this alg is at the front of the ## AlgSequence # from SGComps.SGCompsConf import SGInputLoader topSequence += SGInputLoader(OutputLevel=DEBUG, ShowEventDump=False) # ThreadPoolService thread local initialization from GaudiHive.GaudiHiveConf import ThreadPoolSvc svcMgr += ThreadPoolSvc("ThreadPoolSvc") svcMgr.ThreadPoolSvc.ThreadInitTools = ["ThreadInitTool"] # MT-specific code #---------------------------------------------------------------------------------# from L1TopoSimulation.L1TopoSimulationTestConfig import L1TopoSimulationTest topSequence += L1TopoSimulationTest() topSequence.L1TopoSimulationTest.InputASCIIFile = fTOBs topSequence.L1TopoSimulationTest.InputXMLFile = fmenu from GaudiSvc.GaudiSvcConf import THistSvc svcMgr += THistSvc()
svcMgr += ForwardSchedulerSvc() svcMgr.ForwardSchedulerSvc.CheckDependencies = True # Use McEventSelector so we can run with AthenaMP import AthenaCommon.AtlasUnixGeneratorJob # Full job is a list of algorithms from AthenaCommon.AlgSequence import AlgSequence job = AlgSequence() manualViewName1 = "view1" manualViewName2 = "view2" #Retrieve MC event info from SGComps.SGCompsConf import SGInputLoader job += SGInputLoader(OutputLevel=INFO, ShowEventDump=False) job.SGInputLoader.Load = [('EventInfo', 'McEventInfo')] #Make views job += CfgMgr.AthViews__ViewMakeAlg("make_alg") job.make_alg.ViewNames = [manualViewName1, manualViewName2] #Make one view job += CfgMgr.AthViews__DFlowAlg1_manualViews("dflow_alg1") job.dflow_alg1.EvtInfo = "McEventInfo" job.dflow_alg1.ViewName = manualViewName1 job.dflow_alg1.ExtraOutputs = [('int', manualViewName1 + '_dflow_int')] # job += CfgMgr.AthViews__DFlowAlg2_manualViews("dflow_alg2") job.dflow_alg2.ViewName = manualViewName1 job.dflow_alg2.ExtraInputs = [('int', manualViewName1 + '_dflow_int')]
def setupCommonServices(): from AthenaCommon import CfgMgr from AthenaCommon.Logging import logging from AthenaCommon.Constants import INFO from AthenaCommon.AppMgr import ServiceMgr as svcMgr, theApp from AthenaCommon.ConcurrencyFlags import jobproperties as jps # Setup messaging for Python and C++ from AthenaCommon.Logging import log log.setFormat("%(asctime)s Py:%(name)-31s %(levelname)7s %(message)s") # Create our own logger log = logging.getLogger('TriggerUnixStandardSetup::setupCommonServices:') from TrigServices.TrigServicesConfig import setupMessageSvc setupMessageSvc() # Do the default Atlas job configuration first import AthenaCommon.AtlasUnixStandardJob # noqa: F401 # Now do HLT/thread specific configuration (see e.g. AtlasThreadedJob.py) from StoreGate.StoreGateConf import SG__HiveMgrSvc svcMgr += SG__HiveMgrSvc("EventDataSvc", NSlots=jps.ConcurrencyFlags.NumConcurrentEvents()) import StoreGate.StoreGateConf as StoreGateConf svcMgr += StoreGateConf.StoreGateSvc("ConditionStore") # Configure the CoreDumpSvc if not hasattr(svcMgr, "CoreDumpSvc"): from AthenaServices.Configurables import CoreDumpSvc svcMgr += CoreDumpSvc() # ThreadPoolService thread local initialization from GaudiHive.GaudiHiveConf import ThreadPoolSvc svcMgr += ThreadPoolSvc("ThreadPoolSvc") svcMgr.ThreadPoolSvc.ThreadInitTools = ["ThreadInitTool"] from GaudiHive.GaudiHiveConf import AlgResourcePool svcMgr += AlgResourcePool(OutputLevel=INFO, TopAlg=["AthSequencer/AthMasterSeq"]) from AthenaCommon.AlgSequence import AlgSequence from SGComps.SGCompsConf import SGInputLoader topSequence = AlgSequence() topSequence += SGInputLoader( FailIfNoProxy=False) # change to True eventually from AthenaCommon.AlgScheduler import AlgScheduler AlgScheduler.ShowDataDependencies(False) AlgScheduler.ShowControlFlow(False) AlgScheduler.setDataLoaderAlg('SGInputLoader') # Setup SGCommitAuditor to sweep new DataObjects at end of Alg execute theApp.AuditAlgorithms = True from SGComps.SGCompsConf import SGCommitAuditor svcMgr.AuditorSvc += SGCommitAuditor() # setup ROOT6 from PyUtils.Helpers import ROOT6Setup ROOT6Setup() # Setup online THistSvc unless specifically configured otherwise # setup the THistSvc early and force the creation of the THistSvc # so that it can be used by infrastructure services to book histograms # (to avoid problems e.g. with histograms in ROBDataProviderSvc) if _Conf.useOnlineTHistSvc: if hasattr(svcMgr, 'THistSvc'): log.fatal( "The offline histogramming THistSvc is already in place.") raise RuntimeError( "Cannot setup online histogramming TrigMonTHistSvc") log.debug("Using online histogramming service (TrigMonTHistSvc)") from TrigServices.TrigServicesConf import TrigMonTHistSvc svcMgr += TrigMonTHistSvc("THistSvc") else: log.debug("Using offline histogramming service (THistSvc)") from GaudiSvc.GaudiSvcConf import THistSvc svcMgr += THistSvc() # StoreGateSvc svcMgr.StoreGateSvc.ActivateHistory = False # ProxyProviderSvc services configuration svcMgr += CfgMgr.ProxyProviderSvc() # --- ByteStreamAddressProviderSvc configuration svcMgr += CfgMgr.ByteStreamAddressProviderSvc() svcMgr.ProxyProviderSvc.ProviderNames += ["ByteStreamAddressProviderSvc"] theApp.CreateSvc += [svcMgr.ByteStreamAddressProviderSvc.getFullName()] # Initialization of DetDescrCnvSvc svcMgr += CfgMgr.DetDescrCnvSvc( # specify primary Identifier dictionary to be used IdDictName="IdDictParser/ATLAS_IDS.xml") theApp.CreateSvc += [svcMgr.DetDescrCnvSvc.getFullName()] svcMgr.EventPersistencySvc.CnvServices += ["DetDescrCnvSvc"] # Online services for ByteStream input/output from TrigByteStreamCnvSvc.TrigByteStreamCnvSvcConf import TrigEventSelectorByteStream from TrigByteStreamCnvSvc.TrigByteStreamCnvSvcConfig import TrigByteStreamInputSvc, TrigByteStreamCnvSvc svcMgr += TrigByteStreamCnvSvc( "ByteStreamCnvSvc") # this name is hard-coded in some converters svcMgr.EventPersistencySvc.CnvServices += ["ByteStreamCnvSvc"] svcMgr += TrigByteStreamInputSvc("ByteStreamInputSvc") svcMgr += TrigEventSelectorByteStream( "EventSelector", ByteStreamInputSvc=svcMgr.ByteStreamInputSvc) theApp.EvtSel = "EventSelector" # Online event loop manager from TrigServices.TrigServicesConfig import HltEventLoopMgr loopMgr = HltEventLoopMgr("HltEventLoopMgr") loopMgr.WhiteboardSvc = "EventDataSvc" loopMgr.SchedulerSvc = AlgScheduler.getScheduler().getName() loopMgr.EvtSel = svcMgr.EventSelector loopMgr.OutputCnvSvc = svcMgr.ByteStreamCnvSvc svcMgr += loopMgr theApp.EventLoop = loopMgr.name() from TrigOutputHandling.TrigOutputHandlingConfig import HLTResultMTMakerCfg svcMgr.HltEventLoopMgr.ResultMaker = HLTResultMTMakerCfg() # Configuration of Interval of Validity Service svcMgr += CfgMgr.IOVSvc() # Configure COOL update helper tool from TrigServices.TrigServicesConfig import TrigCOOLUpdateHelper svcMgr.HltEventLoopMgr.CoolUpdateTool = TrigCOOLUpdateHelper() # Configure the online ROB data provider service from TrigServices.TrigServicesConfig import HltROBDataProviderSvc svcMgr += HltROBDataProviderSvc() # Explicitly set a few OutputLevels (needed because some services are created in # different order when running with the PSC) svcMgr.IncidentSvc.OutputLevel = theApp.OutputLevel svcMgr.ProxyProviderSvc.OutputLevel = theApp.OutputLevel svcMgr.StoreGateSvc.OutputLevel = theApp.OutputLevel return
def _setupAtlasThreadedJob(): from AthenaCommon.AppMgr import theApp from AthenaCommon.AppMgr import ServiceMgr as svcMgr from AthenaCommon import Constants from AthenaCommon.ConcurrencyFlags import jobproperties as jps if (jps.ConcurrencyFlags.NumProcs() == 0): theApp.MessageSvcType = "InertMessageSvc" else: # InertMessageSvc doesn't play nice with MP theApp.MessageSvcType = "MessageSvc" svcMgr.MessageSvc.defaultLimit = 0 svcMgr.MessageSvc.Format = "% F%40W%S%4W%R%e%s%8W%R%T %0W%M" numStores = jps.ConcurrencyFlags.NumConcurrentEvents() from StoreGate.StoreGateConf import SG__HiveMgrSvc svcMgr += SG__HiveMgrSvc("EventDataSvc") svcMgr.EventDataSvc.NSlots = numStores from GaudiHive.GaudiHiveConf import AlgResourcePool arp = AlgResourcePool(OutputLevel=Constants.INFO) arp.TopAlg = ["AthMasterSeq"] #this should enable control flow svcMgr += arp from AthenaCommon.AlgScheduler import AlgScheduler AlgScheduler.ShowDataDependencies(False) AlgScheduler.ShowControlFlow(False) from AthenaCommon.AlgSequence import AlgSequence topSequence = AlgSequence() from SGComps.SGCompsConf import SGInputLoader # FailIfNoProxy=False makes it a warning, not an error, if unmet data # dependencies are not found in the store. It should probably be changed # to True eventually. topSequence += SGInputLoader(FailIfNoProxy=False) AlgScheduler.setDataLoaderAlg('SGInputLoader') if theApp._opts.mtes: # Multi-threaded Event Service from AthenaServices.AthenaServicesConf import AthenaMtesEventLoopMgr svcMgr += AthenaMtesEventLoopMgr() svcMgr.AthenaMtesEventLoopMgr.WhiteboardSvc = "EventDataSvc" svcMgr.AthenaMtesEventLoopMgr.SchedulerSvc = AlgScheduler.getScheduler( ).getName() svcMgr.AthenaMtesEventLoopMgr.EventRangeChannel = theApp._opts.mtes_channel theApp.EventLoop = "AthenaMtesEventLoopMgr" else: from AthenaServices.AthenaServicesConf import AthenaHiveEventLoopMgr svcMgr += AthenaHiveEventLoopMgr() svcMgr.AthenaHiveEventLoopMgr.WhiteboardSvc = "EventDataSvc" svcMgr.AthenaHiveEventLoopMgr.SchedulerSvc = AlgScheduler.getScheduler( ).getName() theApp.EventLoop = "AthenaHiveEventLoopMgr" # enable timeline recording from GaudiHive.GaudiHiveConf import TimelineSvc svcMgr += TimelineSvc(RecordTimeline=True, Partial=False) # ## Setup SGCommitAuditor to sweep new DataObjects at end of Alg execute # theAuditorSvc = svcMgr.AuditorSvc theApp.AuditAlgorithms = True from SGComps.SGCompsConf import SGCommitAuditor theAuditorSvc += SGCommitAuditor()