def getSpecialConfiguration(flags): """Return a dict of Special configuration as parsed from flags.Input.Files""" if len(flags.Input.Files) > 1: log.info("Multiple input files. Using the first for Digitization special configuration.") log.info("Obtaining Digitization special configuration from %s", flags.Input.Files[0]) File = AthFile.fopen(flags.Input.Files[0]) # extract the special config list tag_info = File.infos.get("tag_info", {}) SpecialCfg = tag_info.get("specialConfiguration", "").split(";") # fill containers preIncludes = [] out = {} for KeyEqValue in SpecialCfg: # Ignore empty or "NONE" substrings, e.g. from consecutive or trailing semicolons if not KeyEqValue or KeyEqValue.upper() == "NONE": continue # If not in key=value format, treat as v, with k="preInclude" if "=" not in KeyEqValue: KeyEqValue = "preInclude=" + KeyEqValue # Handle k=v directives key, value = KeyEqValue.split("=") if key == "preInclude": preIncludes += value.split(",") else: out[key] = value # FIXME includes not migrated # from AthenaCommon.Include import include # for inc in preIncludes: # include(inc) return out
def HLTPrescaleCondAlgCfg(flags): log = logging.getLogger('TrigConfigSvcCfg') log.info("Setting up HLTPrescaleCondAlg") from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator acc = ComponentAccumulator() TrigConf__HLTPrescaleCondAlg = CompFactory.getComp( "TrigConf::HLTPrescaleCondAlg") hltPrescaleCondAlg = TrigConf__HLTPrescaleCondAlg("HLTPrescaleCondAlg") tc = getTrigConfigFromFlag(flags) hltPrescaleCondAlg.Source = tc["source"] from AthenaCommon.AthenaCommonFlags import athenaCommonFlags if athenaCommonFlags.isOnline(): from IOVDbSvc.IOVDbSvcConfig import addFolders acc.merge( addFolders(flags, getHLTPrescaleFolderName(), "TRIGGER_ONL", className="AthenaAttributeList")) log.info("Adding folder %s to CompAcc", getHLTPrescaleFolderName()) if tc["source"] == "COOL": hltPrescaleCondAlg.TriggerDB = tc["dbconn"] elif tc["source"] == "DB": hltPrescaleCondAlg.TriggerDB = tc["dbconn"] hltPrescaleCondAlg.HLTPsk = tc["hltpsk"] elif tc["source"] == "FILE": hltPrescaleCondAlg.Filename = getHLTPrescalesSetFileName(flags) else: raise RuntimeError( "trigger configuration flag 'trigConfig' starts with %s, which is not understood" % tc["source"]) acc.addCondAlgo(hltPrescaleCondAlg) return acc
def main(): l1items_xml = None l1items_json = None hltinput = None if len(sys.argv) < 3: log.info( "Please specify at least two menu files (L1Menu.xml, L1Menu.json, HLTMenu.json)" ) for fileName in sys.argv[1:]: if fileName.endswith(".json"): if fileName.startswith("L1"): l1menu = L1MenuAccess(fileName) l1items_json = l1menu.items() else: hltmenu = HLTMenuAccess(fileName) hltinput = hltmenu.chains() elif fileName.endswith(".xml"): l1menu = L1MenuXMLReader(fileName) l1items_xml = l1menu.getL1Items() if l1items_xml and l1items_json: compareBothL1Menus(l1items_xml, l1items_json) if l1items_json and hltinput: pass # check not yet implemented return 0
def ROOT6Setup(): log.info('executing ROOT6Setup') if six.PY3: import builtins as builtin_mod else: import __builtin__ as builtin_mod oldimporthook = builtin_mod.__import__ autoload_var_name = 'ROOT6_NamespaceAutoloadHook' def root6_importhook(name, globals={}, locals={}, fromlist=[], level=-1): if six.PY3 and level < 0: level = 0 m = oldimporthook(name, globals, locals, fromlist, level) if m and (m.__name__== 'ROOT' or name[0:4]=='ROOT') \ and (name!='ROOT' or fromlist is not None): # prevent triggering on just 'import ROOT'; see ATEAM-597 log.debug('Python import module=%s fromlist=%s', name, str(fromlist)) if fromlist: #MN: in this case 'm' is the final nested module already, don't walk the full 'name' vars = [ '.'.join(['', fl, autoload_var_name]) for fl in fromlist ] else: vars = ['.'.join([name, autoload_var_name])] for v in vars: try: mm = m #MN: walk the module chain and try to touch 'autoload_var_name' to trigger ROOT autoloading of namespaces for comp in v.split('.')[1:]: mm = getattr(mm, comp) except Exception: pass return m builtin_mod.__import__ = root6_importhook
def mem_status(msg): """memory usage information: shared/private""" for line in open('/proc/self/status'): if line.startswith('Vm'): msg.debug(line.strip()) private, shared = _get_mem_stats() msg.info("===> private: %s MB | shared: %s MB", private / 1024., shared / 1024.)
def grow_vmem(targetvmem_mb): v = vmem_mb() global l_extra_vmem_holder l_extra_vmem_holder = [] while vmem_mb() < targetvmem_mb: l_extra_vmem_holder += [" " * 1024] v = vmem_mb() - v log.info("Acquired %f mb of extra vmem", v)
def grow_vmem(targetvmem_mb): v = vmem_mb() global l_extra_vmem_holder l_extra_vmem_holder = [] while vmem_mb() < targetvmem_mb: l_extra_vmem_holder += [ " "*1024 ] v = vmem_mb() - v log.info( "Acquired %f mb of extra vmem", v )
def mem_status(msg): """memory usage information: shared/private""" for line in open('/proc/self/status'): if line.startswith('Vm'): msg.debug (line.strip()) private,shared=_get_mem_stats() msg.info ("===> private: %s MB | shared: %s MB", private/1024., shared /1024.)
def setupCommonServicesEnd(): from AthenaCommon.AppMgr import ServiceMgr as svcMgr from AthenaCommon.Logging import logging from AthenaCommon.AlgSequence import AlgSequence log = logging.getLogger( 'TriggerUnixStandardSetup::setupCommonServicesEnd:') topSequence = AlgSequence() # --- create the ByteStreamCnvSvc after the Detector Description otherwise # --- the initialization of converters fails #from AthenaCommon.AppMgr import theApp #theApp.CreateSvc += [ svcMgr.ByteStreamCnvSvc.getFullName() ] # Make sure no THistSvc output/input stream is defined for online running if _Conf.useOnlineTHistSvc: svcMgr.THistSvc.Output = [] if len(svcMgr.THistSvc.Input) > 0: log.error( 'THistSvc.Input = %s. Input not allowed for online running. Disabling input.', svcMgr.THistSvc.Input) svcMgr.THistSvc.Input = [] # For offline running make sure at least the EXPERT stream is defined else: if 1 not in [o.count('EXPERT') for o in svcMgr.THistSvc.Output]: svcMgr.THistSvc.Output += [ "EXPERT DATAFILE='expert-monitoring.root' OPT='RECREATE'" ] # Basic operational monitoring from TrigOnlineMonitor.TrigOnlineMonitorConfig import TrigOpMonitor topSequence += TrigOpMonitor() # Set default properties for some important services after all user job options log.info('Configure core services for online running') svcMgr.CoreDumpSvc.CoreDumpStream = "stdout" svcMgr.CoreDumpSvc.CallOldHandler = False svcMgr.CoreDumpSvc.StackTrace = True svcMgr.CoreDumpSvc.FatalHandler = 0 # no extra fatal handler svcMgr.CoreDumpSvc.TimeOut = 60000000000 # timeout for stack trace generation changed to 60s (ATR-17112) svcMgr.IOVSvc.updateInterval = "RUN" svcMgr.IOVSvc.preLoadData = True svcMgr.IOVSvc.preLoadExtensibleFolders = False # ATR-19392 svcMgr.IOVSvc.forceResetAtBeginRun = False if hasattr(svcMgr, 'IOVDbSvc'): svcMgr.IOVDbSvc.CacheAlign = 0 # VERY IMPORTANT to get unique queries for folder udpates (see Savannah #81092) svcMgr.IOVDbSvc.CacheRun = 0 svcMgr.IOVDbSvc.CacheTime = 0 return
def _setupCommonServicesEnd(): from AthenaCommon.AppMgr import theApp from AthenaCommon.AppMgr import ServiceMgr as svcMgr from AthenaCommon.Logging import logging from TriggerJobOpts.TriggerFlags import TriggerFlags log = logging.getLogger( 'TriggerUnixStandardSetup::setupCommonServicesEnd:' ) # --- create the ByteStreamCnvSvc after the Detector Description otherwise # --- the initialization of converters fails theApp.CreateSvc += [ svcMgr.ByteStreamCnvSvc.getFullName() ] # Make sure no THistSvc output/input stream is defined for online running if _Conf.useOnlineTHistSvc: svcMgr.THistSvc.Output = [] if len(svcMgr.THistSvc.Input)>0: log.error('THistSvc.Input = %s. Input not allowed for online running. Disabling input.' % svcMgr.THistSvc.Input) svcMgr.THistSvc.Input = [] # For offline running make sure at least the EXPERT stream is defined else: if 1 not in [ o.count('EXPERT') for o in svcMgr.THistSvc.Output ]: svcMgr.THistSvc.Output += ["EXPERT DATAFILE='expert-monitoring.root' OPT='RECREATE'"] # Set default properties for some important services after all user job options log.info('Configure core services for online runnig') svcMgr.CoreDumpSvc.CoreDumpStream = "stdout" svcMgr.CoreDumpSvc.CallOldHandler = True svcMgr.CoreDumpSvc.FatalHandler = 0 # no extra fatal handler svcMgr.CoreDumpSvc.TimeOut = 60000000000 # no timeout for stack trace generation -> changed to 60s (ATR17112) # Disable StatusCodeSvc (causes problems with shutting down children at stop in HLTPU) svcMgr.StatusCodeSvc.SuppressCheck = True svcMgr.StatusCodeSvc.AbortOnError = False svcMgr.IOVSvc.updateInterval = "RUN" svcMgr.IOVSvc.preLoadData = True svcMgr.IOVSvc.forceResetAtBeginRun = False if hasattr(svcMgr,'IOVDbSvc'): svcMgr.IOVDbSvc.CacheAlign = 0 # VERY IMPORTANT to get unique queries for folder udpates (see Savannah #81092) svcMgr.IOVDbSvc.CacheRun = 0 svcMgr.IOVDbSvc.CacheTime = 0 # Flag to extract trigger configuration if TriggerFlags.Online.doDBConfig(): from TrigConfigSvc import DoDBConfig # --- print out configuration details _printConfiguration(log.name) return
def enable_seeking(silent=False): """ try to install seek-stuff on the EventSelector side. if `silent` is True, only an attempt at installing the seeking is performed. otherwise an exception is raised if the seeking could not be installed. """ import sys from AthenaCommon.Logging import log as msg if not sys.modules.has_key('AthenaPoolCnvSvc.ReadAthenaPool'): if silent: _msg = msg.debug else: _msg = msg.info # user did not import that module so we give up _msg("Cannot enable 'seeking' b/c module " "[AthenaPoolCnvSvc.ReadAthenaPool] hasn't been imported...") _msg( "Modify your jobOptions to import that module "+ \ "(or just ignore this message)" ) if not silent: raise RuntimeError("configuration-logic error") return from AthenaCommon.AppMgr import ServiceMgr as svcMgr from AthenaCommon.Configurable import Configurable collectionType = svcMgr.EventSelector.properties()["CollectionType"] if collectionType in ( "ImplicitROOT", Configurable.propertyNoValue, ): svcMgr.EventSelector.CollectionType = "SeekableROOT" msg.info("=> Seeking enabled.") elif collectionType in ("SeekableROOT", ): msg.verbose("=> Seeking already enabled.") else: msg.warning( "Input seeking is not compatible with collection type of %s", svcMgr.EventSelector.properties()["CollectionType"]) msg.warning("=> Seeking disabled.") if not silent: raise RuntimeError("could not install seeking") from AthenaCommon.AppMgr import theApp if theApp.state() != theApp.State.OFFLINE: # do not bring up the whole C++ kaboodle too early in the game svcMgr.EventSelector.setup() return
def print_fds(msg): """print all file descriptors of current process""" import os, fcntl _realpath = os.path.realpath _join = os.path.join # print out file descriptors procfd = '/proc/self/fd' fds = os.listdir(procfd) for fd in fds: i = int(fd) realname = _realpath(_join(procfd,fd)) msg.info("fd=[%i], realname=[%s] exists=[%s]", i, realname, os.path.exists(realname)) return
def enable_seeking(silent=False): """ try to install seek-stuff on the EventSelector side. if `silent` is True, only an attempt at installing the seeking is performed. otherwise an exception is raised if the seeking could not be installed. """ import sys from AthenaCommon.Logging import log as msg if not sys.modules.has_key( 'AthenaPoolCnvSvc.ReadAthenaPool' ): if silent: _msg = msg.debug else: _msg = msg.info # user did not import that module so we give up _msg( "Cannot enable 'seeking' b/c module " "[AthenaPoolCnvSvc.ReadAthenaPool] hasn't been imported..." ) _msg( "Modify your jobOptions to import that module "+ \ "(or just ignore this message)" ) if not silent: raise RuntimeError("configuration-logic error") return from AthenaCommon.AppMgr import ServiceMgr as svcMgr from AthenaCommon.Configurable import Configurable collectionType = svcMgr.EventSelector.properties()["CollectionType"] if collectionType in ( "ImplicitROOT", Configurable.propertyNoValue, ): svcMgr.EventSelector.CollectionType = "SeekableROOT" msg.info ( "=> Seeking enabled." ) elif collectionType in ( "SeekableROOT", ): msg.verbose( "=> Seeking already enabled." ) else: msg.warning( "Input seeking is not compatible with collection type of %s", svcMgr.EventSelector.properties()["CollectionType"] ) msg.warning( "=> Seeking disabled." ) if not silent: raise RuntimeError("could not install seeking") from AthenaCommon.AppMgr import theApp if theApp.state() != theApp.State.OFFLINE: # do not bring up the whole C++ kaboodle too early in the game svcMgr.EventSelector.setup() return
def watch(msg=None, message=""): import time """Timer (elap, user, system, child) with time-interval-reports into msg stream""" global time_list, time_list2 time_list.append(os.times()) time_list2.append(time.time()) if msg is not None: (utime, stime, cutime, cstime, etime) = dt() elap_time = "%s_ELAP_TIME=%.4f seconds" % (message, etime) user_time = "%s_USER_TIME=%.2f" % (message, utime) system_time = "%s_SYSTEM_TIME=%.2f" % (message, stime) child_utime = "%s_CHILD_UTIME=%.2f" % (message, cutime) child_stime = "%s_CHILD_STIME=%.2f" % (message, cstime) msg.info(elap_time) msg.debug("%s %s" % (user_time, system_time)) msg.debug("%s %s" % (child_utime, child_stime)) return len(time_list)
def watch(msg=None, message=""): import time """Timer (elap, user, system, child) with time-interval-reports into msg stream""" global time_list, time_list2 time_list.append(os.times()) time_list2.append(time.time()) if msg is not None: (utime, stime, cutime, cstime, etime) = dt(); elap_time = "%s_ELAP_TIME=%.4f seconds" % (message, etime) user_time = "%s_USER_TIME=%.2f" % (message, utime) system_time = "%s_SYSTEM_TIME=%.2f" % (message, stime) child_utime = "%s_CHILD_UTIME=%.2f" % (message, cutime) child_stime = "%s_CHILD_STIME=%.2f" % (message, cstime) msg.info(elap_time) msg.debug("%s %s" % (user_time, system_time) ) msg.debug("%s %s" % (child_utime, child_stime) ) return len(time_list)
def getHLTConfigSvc(flags): log = logging.getLogger('TrigConfigSvcCfg') TrigConf__HLTConfigSvc = CompFactory.getComp("TrigConf::HLTConfigSvc") hltConfigSvc = TrigConf__HLTConfigSvc("HLTConfigSvc") hltXMLFile = "None" hltConfigSvc.ConfigSource = "None" hltConfigSvc.XMLMenuFile = hltXMLFile hltConfigSvc.InputType = "file" hltJsonFileName = getHLTMenuFileName(flags) hltConfigSvc.JsonFileName = hltJsonFileName # TODO revisit if needed from AthenaCommon.AppMgr import theApp theApp.CreateSvc += ["TrigConf::HLTConfigSvc/HLTConfigSvc"] log.info("Configured HLTConfigSvc with run 2 style input file : %s", hltXMLFile) log.info( "Configured HLTConfigSvc with InputType='file' and JsonFileName=%s", hltJsonFileName) return hltConfigSvc
def muonRdoDecodeTestMC(): from AthenaCommon.Configurable import Configurable Configurable.configurableRun3Behavior = 1 from AthenaConfiguration.AllConfigFlags import ConfigFlags ConfigFlags.Input.Files = [ "/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/TriggerTest/valid1.110401.PowhegPythia_P2012_ttbar_nonallhad.recon.RDO.e3099_s2578_r7572_tid07644622_00/RDO.07644622._000001.pool.root.1" ] ConfigFlags.lock() ConfigFlags.dump() from AthenaCommon.Logging import log log.setLevel(DEBUG) log.info('About to setup Rpc RDO data decoding') cfg = ComponentAccumulator() # We are reading a pool file for this test from AthenaPoolCnvSvc.PoolReadConfig import PoolReadCfg cfg.merge(PoolReadCfg(ConfigFlags)) # Schedule RDO conversion # RPC decoding rpcdecodingAcc = RpcRDODecodeCfg(ConfigFlags) cfg.merge(rpcdecodingAcc) # TGC decoding tgcdecodingAcc = TgcRDODecodeCfg(ConfigFlags) cfg.merge(tgcdecodingAcc) # MDT decoding mdtdecodingAcc = MdtRDODecodeCfg(ConfigFlags) cfg.merge(mdtdecodingAcc) # CSC decoding cscdecodingAcc = CscRDODecodeCfg(ConfigFlags) cfg.merge(cscdecodingAcc) cscbuildingAcc = CscClusterBuildCfg(ConfigFlags) cfg.merge(cscbuildingAcc) log.info('Print Config') cfg.printConfig(withDetails=True) # Store config as pickle log.info('Save Config') with open('MuonRdoDecode.pkl', 'wb') as f: cfg.store(f) f.close() return cfg
def getL1ConfigSvc(flags): log = logging.getLogger('TrigConfigSvcCfg') from AthenaCommon.Logging import log # generate menu file generatedFile = generateL1Menu(flags) # configure config svc TrigConf__LVL1ConfigSvc = CompFactory.getComp("TrigConf::LVL1ConfigSvc") l1ConfigSvc = TrigConf__LVL1ConfigSvc("LVL1ConfigSvc") l1ConfigSvc.ConfigSource = "XML" from TriggerJobOpts.TriggerFlags import TriggerFlags l1XMLFile = TriggerFlags.inputLVL1configFile( ) if flags is None else flags.Trigger.LVL1ConfigFile # check if file exists in this directory otherwise add the package to aid path resolution # also a '/' in the file name indicates that no package needs to be added import os.path if not ("/" in l1XMLFile or os.path.isfile(l1XMLFile)): l1XMLFile = "TriggerMenuMT/" + l1XMLFile l1ConfigSvc.XMLMenuFile = l1XMLFile log.info( "For run 2 style menu access configured LVL1ConfigSvc with input file : %s", l1XMLFile) if generatedFile is None: l1ConfigSvc.InputType = "none" l1ConfigSvc.JsonFileName = "" log.info( "For run 3 style menu access configured LVL1ConfigSvc with InputType='none'" ) else: l1ConfigSvc.InputType = "file" l1JsonFileName = generatedFile l1ConfigSvc.JsonFileName = l1JsonFileName log.info( "For run 3 style menu access configured LVL1ConfigSvc with InputType='file' and JsonFileName=%s", l1JsonFileName) from AthenaCommon.AppMgr import theApp theApp.CreateSvc += ["TrigConf::LVL1ConfigSvc/LVL1ConfigSvc"] return l1ConfigSvc
Key_DB2="Y2") condDbFile = "condDb.txt" import os import string for dir in (".:" + os.environ.get('DATAPATH')).split(':'): cdb = os.path.join(dir, condDbFile) if (os.path.isfile(cdb)): found = 1 break if (found == 0): msg.fatal('ASCII condDb file \"' + condDbFile + '\" not found') sys.exit(AthenaCommon.ExitCodes.CONFIGURATION_ERROR) else: msg.info("using ASCIICondDb file from " + cdb) svcMgr += ASCIICondDbSvc(OutputLevel=DEBUG, CondFile=cdb) #-------------------------------------------------------------- # Event related parameters #-------------------------------------------------------------- theApp.EvtMax = 20 nProc = jp.ConcurrencyFlags.NumProcs() if (nProc > 0): # ## Basic setup for MP/Hive #
# FIXME hack to match to buggy behaviour in old style configuration OutputStreamHITS = cfg.getEventAlgo("OutputStreamHITS") OutputStreamHITS.ItemList.remove("xAOD::EventInfo#EventInfo") OutputStreamHITS.ItemList.remove("xAOD::EventAuxInfo#EventInfoAux.") # FIXME hack because deduplication is broken PoolAttributes = ["TREE_BRANCH_OFFSETTAB_LEN = '100'"] PoolAttributes += ["DatabaseName = '" + ConfigFlags.Output.HITSFileName + "'; ContainerName = 'TTree=CollectionTree'; TREE_AUTO_FLUSH = '1'"] cfg.getService("AthenaPoolCnvSvc").PoolAttributes += PoolAttributes # Dump config cfg.addEventAlgo(CompFactory.JobOptsDumperAlg(FileName="G4AtlasTestConfig.txt")) cfg.getService("StoreGateSvc").Dump = True cfg.getService("ConditionStore").Dump = True cfg.printConfig(withDetails=True, summariseProps = True) ConfigFlags.dump() # Execute and finish #sc = cfg.run(maxEvents=evtMax) b = time.time() log.info("Run G4AtlasAlg in " + str(b-a) + " seconds") # Success should be 0 #os.sys.exit(not sc.isSuccess()) f = open("test.pkl","wb") cfg.store(f) f.close()
def configureStrategy(self, strategy, pileup, events_before_fork): from .AthenaMPFlags import jobproperties as jp from AthenaCommon.ConcurrencyFlags import jobproperties as jp event_range_channel = jp.AthenaMPFlags.EventRangeChannel() if (jp.AthenaMPFlags.ChunkSize() > 0): chunk_size = jp.AthenaMPFlags.ChunkSize() msg.info('Chunk size set to %i', chunk_size) #Use auto flush only if file is compressed with LZMA, else use default chunk_size elif (jp.AthenaMPFlags.ChunkSize() == -1): from PyUtils.MetaReaderPeeker import metadata if (metadata['file_comp_alg'] == 2): chunk_size = metadata['auto_flush'] msg.info('Chunk size set to auto flush (%i)', chunk_size) else: chunk_size = jp.AthenaMPFlags.ChunkSize.__class__.StoredValue msg.info( 'LZMA algorithm not in use, chunk_size set to default (%i)', chunk_size) #Use auto flush only if file is compressed with LZMA or ZLIB, else use default chunk_size elif (jp.AthenaMPFlags.ChunkSize() == -2): from PyUtils.MetaReaderPeeker import metadata if (metadata['file_comp_alg'] == 1 or metadata['file_comp_alg'] == 2): chunk_size = metadata['auto_flush'] msg.info('Chunk size set to auto flush (%i)', chunk_size) else: chunk_size = jp.AthenaMPFlags.ChunkSize.__class__.StoredValue msg.info( 'LZMA nor ZLIB in use, chunk_size set to default (%i)', chunk_size) #Use auto flush only if file is compressed with LZMA, ZLIB or LZ4, else use default chunk_size elif (jp.AthenaMPFlags.ChunkSize() == -3): from PyUtils.MetaReaderPeeker import metadata if (metadata['file_comp_alg'] == 1 or metadata['file_comp_alg'] == 2 or metadata['file_comp_alg'] == 4): chunk_size = metadata['auto_flush'] msg.info('Chunk size set to auto flush (%i)', chunk_size) else: chunk_size = jp.AthenaMPFlags.ChunkSize.__class__.StoredValue msg.info( 'LZMA, ZLIB nor LZ4 in use, chunk_size set to default (%i)', chunk_size) #Use auto flush value for chunk_size, regarldess of compression algorithm elif (jp.AthenaMPFlags.ChunkSize() <= -4): from PyUtils.MetaReaderPeeker import metadata chunk_size = metadata['auto_flush'] msg.info('Chunk size set to auto flush (%i)', chunk_size) else: chunk_size = jp.AthenaMPFlags.ChunkSize.__class__.StoredValue msg.warning('Invalid ChunkSize, Chunk Size set to default (%i)', chunk_size) debug_worker = jp.ConcurrencyFlags.DebugWorkers() use_shared_reader = jp.AthenaMPFlags.UseSharedReader() use_shared_writer = jp.AthenaMPFlags.UseSharedWriter() if strategy == 'SharedQueue' or strategy == 'RoundRobin': if use_shared_reader: from AthenaCommon.AppMgr import ServiceMgr as svcMgr from AthenaIPCTools.AthenaIPCToolsConf import AthenaSharedMemoryTool svcMgr.EventSelector.SharedMemoryTool = AthenaSharedMemoryTool( "EventStreamingTool", SharedMemoryName="EventStream" + str(os.getpid())) if 'AthenaPoolCnvSvc.ReadAthenaPool' in sys.modules: svcMgr.AthenaPoolCnvSvc.InputStreamingTool = AthenaSharedMemoryTool( "InputStreamingTool", SharedMemoryName="InputStream" + str(os.getpid())) if use_shared_writer: if 'AthenaPoolCnvSvc.WriteAthenaPool' in sys.modules: from AthenaCommon.AppMgr import ServiceMgr as svcMgr from AthenaIPCTools.AthenaIPCToolsConf import AthenaSharedMemoryTool svcMgr.AthenaPoolCnvSvc.OutputStreamingTool += [ AthenaSharedMemoryTool( "OutputStreamingTool_0", SharedMemoryName="OutputStream" + str(os.getpid())) ] from AthenaMPTools.AthenaMPToolsConf import SharedEvtQueueProvider self.Tools += [ SharedEvtQueueProvider(UseSharedReader=use_shared_reader, IsPileup=pileup, EventsBeforeFork=events_before_fork, ChunkSize=chunk_size) ] if (self.nThreads >= 1): from AthenaMPTools.AthenaMPToolsConf import SharedHiveEvtQueueConsumer self.Tools += [ SharedHiveEvtQueueConsumer( UseSharedReader=use_shared_reader, IsPileup=pileup, IsRoundRobin=(strategy == 'RoundRobin'), EventsBeforeFork=events_before_fork, Debug=debug_worker) ] else: from AthenaMPTools.AthenaMPToolsConf import SharedEvtQueueConsumer self.Tools += [ SharedEvtQueueConsumer( UseSharedReader=use_shared_reader, UseSharedWriter=use_shared_writer, IsPileup=pileup, IsRoundRobin=(strategy == 'RoundRobin'), EventsBeforeFork=events_before_fork, ReadEventOrders=jp.AthenaMPFlags.ReadEventOrders(), EventOrdersFile=jp.AthenaMPFlags.EventOrdersFile(), Debug=debug_worker) ] if use_shared_writer: from AthenaMPTools.AthenaMPToolsConf import SharedWriterTool self.Tools += [SharedWriterTool()] # Enable seeking if not use_shared_reader: setupEvtSelForSeekOps() elif strategy == 'FileScheduling': from AthenaMPTools.AthenaMPToolsConf import FileSchedulingTool self.Tools += [ FileSchedulingTool(IsPileup=pileup, Debug=debug_worker) ] elif strategy == 'EventService': channelScatterer2Processor = "AthenaMP_Scatterer2Processor" channelProcessor2EvtSel = "AthenaMP_Processor2EvtSel" from AthenaMPTools.AthenaMPToolsConf import EvtRangeScatterer self.Tools += [ EvtRangeScatterer( ProcessorChannel=channelScatterer2Processor, EventRangeChannel=event_range_channel, DoCaching=jp.AthenaMPFlags.EvtRangeScattererCaching()) ] from AthenaMPTools.AthenaMPToolsConf import EvtRangeProcessor self.Tools += [ EvtRangeProcessor(IsPileup=pileup, Channel2Scatterer=channelScatterer2Processor, Channel2EvtSel=channelProcessor2EvtSel, Debug=debug_worker) ] # Enable seeking setupEvtSelForSeekOps() else: msg.warning("Unknown strategy. No MP tools will be configured")
if __name__ == "__main__": # To run this, do e.g. # python -m MuonConfig.MuonTrackBuildingConfig --run --threads= from MuonConfig.MuonConfigUtils import SetupMuonStandaloneArguments, SetupMuonStandaloneConfigFlags, SetupMuonStandaloneOutput, SetupMuonStandaloneCA args = SetupMuonStandaloneArguments() ConfigFlags = SetupMuonStandaloneConfigFlags(args) cfg = SetupMuonStandaloneCA(args, ConfigFlags) # Run the actual test. acc = MuonTrackBuildingCfg(ConfigFlags) cfg.merge(acc) if args.threads > 1 and args.forceclone: from AthenaCommon.Logging import log log.info('Forcing track building cardinality to be equal to ' + str(args.threads)) # We want to force the algorithms to run in parallel (eventually the algorithm will be marked as cloneable in the source code) AlgResourcePool = CompFactory.AlgResourcePool cfg.addService(AlgResourcePool(OverrideUnClonable=True)) track_builder = acc.getPrimary() track_builder.Cardinality = args.threads # This is a temporary fix - it should go someplace central as it replaces the functionality of addInputRename from here: # https://gitlab.cern.ch/atlas/athena/blob/master/Control/SGComps/python/AddressRemappingSvc.py AddressRemappingSvc, ProxyProviderSvc = CompFactory.getComps( "AddressRemappingSvc", "ProxyProviderSvc", ) pps = ProxyProviderSvc() ars = AddressRemappingSvc() pps.ProviderNames += ['AddressRemappingSvc']
def setupEvtSelForSeekOps(): """ try to install seek-stuff on the EventSelector side """ #import sys #from AthenaCommon.Logging import log as msg msg.debug("setupEvtSelForSeekOps:") if sys.modules.has_key('AthenaRootComps.ReadAthenaRoot'): # athenarootcomps has seeking enabled by default msg.info('=> Seeking enabled.') return if not sys.modules.has_key('AthenaPoolCnvSvc.ReadAthenaPool'): ## user did not import that module so we give up msg.info( "Cannot enable 'seeking' b/c module " + \ "[AthenaPoolCnvSvc.ReadAthenaPool] hasn't been imported..." ) msg.info( "Modify your jobOptions to import that module "+ \ "(or just ignore this message)" ) return from AthenaCommon.AppMgr import theApp, AthAppMgr if theApp.state() != AthAppMgr.State.OFFLINE: msg.info( "C++ ApplicationMgr already instantiated, probably seeking "+\ "will be ill-configured..." ) msg.info( "EventSelector writers should implement updateHandlers" ) from AthenaCommon.AppMgr import ServiceMgr as svcMgr from AthenaCommon.Configurable import Configurable collectionType = svcMgr.EventSelector.properties()["CollectionType"] if collectionType in ( "ImplicitROOT", Configurable.propertyNoValue, ): svcMgr.EventSelector.CollectionType = "SeekableROOT" msg.info ( "=> Seeking enabled." ) elif collectionType in ( "SeekableROOT", ): msg.verbose( "=> Seeking already enabled." ) else: msg.warning( "Input seeking is not compatible with collection type of %s", svcMgr.EventSelector.properties()["CollectionType"] ) msg.warning( "=> Seeking disabled." ) return
ConfigFlags.fillFromArgs(parser=parser) # override Input.Files with result from our own arguments # if --filesInput was specified as well (!) this will override if args.inputFiles is not None: ConfigFlags.Input.Files = args.inputFiles.split(',') # if --evtMax was specified as well this will override if args.maxEvents is not None: ConfigFlags.Exec.MaxEvents = args.maxEvents isReadingRaw = (GetFileMD(ConfigFlags.Input.Files).get( 'file_type', 'POOL') == 'BS') if isReadingRaw: if ConfigFlags.DQ.Environment not in ('tier0', 'tier0Raw', 'online'): log.warning('Reading RAW file, but DQ.Environment set to %s', ConfigFlags.DQ.Environment) log.warning('Will proceed but best guess is this is an error') log.info('Will schedule reconstruction, as best we know') else: if ConfigFlags.DQ.Environment in ('tier0', 'tier0Raw', 'online'): log.warning('Reading POOL file, but DQ.Environment set to %s', ConfigFlags.DQ.Environment) log.warning('Will proceed but best guess is this is an error') if args.preExec: # bring things into scope from AthenaMonitoring.DQConfigFlags import allSteeringFlagsOff log.info('Executing preExec: %s', args.preExec) exec(args.preExec) if hasattr(ConfigFlags, "DQ") and hasattr( ConfigFlags.DQ, "Steering") and hasattr(ConfigFlags, "Detector"): if hasattr(ConfigFlags.DQ.Steering, "InDet"):
def muonRdoDecodeTestData(forTrigger=False): # Add a flag, forTrigger, which will initially put the ByteStreamDecodeCfg code into "Cached Container" mode from AthenaCommon.Configurable import Configurable Configurable.configurableRun3Behavior = 1 from AthenaConfiguration.AllConfigFlags import ConfigFlags from AthenaConfiguration.TestDefaults import defaultTestFiles ConfigFlags.Input.Files = defaultTestFiles.RAW # Set global tag by hand for now ConfigFlags.IOVDb.GlobalTag = "CONDBR2-BLKPA-2018-13" #"CONDBR2-BLKPA-2015-17" ConfigFlags.GeoModel.AtlasVersion = "ATLAS-R2-2016-01-00-01" #"ATLAS-R2-2015-03-01-00" ConfigFlags.lock() ConfigFlags.dump() from AthenaCommon.Logging import log log.setLevel(INFO) log.info('About to setup Raw data decoding') cfg = ComponentAccumulator() # Seem to need this to read BS properly from ByteStreamCnvSvc.ByteStreamConfig import ByteStreamReadCfg cfg.merge(ByteStreamReadCfg(ConfigFlags)) # Add the MuonCache to ComponentAccumulator for trigger/RoI testing mode if forTrigger: # cache creators loaded independently from MuonConfig.MuonBytestreamDecodeConfig import MuonCacheCfg cfg.merge(MuonCacheCfg()) # Schedule Rpc bytestream data decoding from MuonConfig.MuonBytestreamDecodeConfig import RpcBytestreamDecodeCfg rpcdecodingAcc = RpcBytestreamDecodeCfg(ConfigFlags, forTrigger) cfg.merge(rpcdecodingAcc) # Schedule Mdt bytestream data decoding from MuonConfig.MuonBytestreamDecodeConfig import TgcBytestreamDecodeCfg tgcdecodingAcc = TgcBytestreamDecodeCfg(ConfigFlags, forTrigger) cfg.merge(tgcdecodingAcc) from MuonConfig.MuonBytestreamDecodeConfig import MdtBytestreamDecodeCfg mdtdecodingAcc = MdtBytestreamDecodeCfg(ConfigFlags, forTrigger) cfg.merge(mdtdecodingAcc) from MuonConfig.MuonBytestreamDecodeConfig import CscBytestreamDecodeCfg cscdecodingAcc = CscBytestreamDecodeCfg(ConfigFlags, forTrigger) cfg.merge(cscdecodingAcc) # Schedule RDO conversion rpcdecodingAcc = RpcRDODecodeCfg(ConfigFlags) cfg.merge(rpcdecodingAcc) tgcdecodingAcc = TgcRDODecodeCfg(ConfigFlags) cfg.merge(tgcdecodingAcc) mdtdecodingAcc = MdtRDODecodeCfg(ConfigFlags) cfg.merge(mdtdecodingAcc) cscdecodingAcc = CscRDODecodeCfg(ConfigFlags) cfg.merge(cscdecodingAcc) cscbuildingAcc = CscClusterBuildCfg(ConfigFlags) cfg.merge(cscbuildingAcc) # Need to add POOL converter - may be a better way of doing this? cfg.addService(CompFactory.AthenaPoolCnvSvc()) cfg.getService("EventPersistencySvc").CnvServices += ["AthenaPoolCnvSvc"] log.info('Print Config') cfg.printConfig(withDetails=True) if forTrigger: pklName = 'MuonRdoDecode_Cache.pkl' else: pklName = 'MuonRdoDecode.pkl' # Store config as pickle log.info('Save Config') with open(pklName, 'wb') as f: cfg.store(f) f.close() return cfg
from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator from AthenaConfiguration.AllConfigFlags import ConfigFlags from AthenaConfiguration.TestDefaults import defaultTestFiles from AthenaCommon.Logging import log from AthenaCommon.Constants import DEBUG from AthenaCommon.Configurable import Configurable from SCT_ConditionsTools.SCT_DCSConditionsConfig import SCT_DCSConditionsCfg from SCT_ConditionsTools.SCT_SiliconConditionsConfig import SCT_SiliconConditionsToolCfg, SCT_SiliconConditionsCfg from SCT_ConditionsTools.SCT_ReadCalibChipDataConfig import SCT_ReadCalibChipDataCfg # test setup log.setLevel(DEBUG) Configurable.configurableRun3Behavior = True ConfigFlags.Input.Files = defaultTestFiles.HITS # call tests tool = SCT_SiliconConditionsToolCfg(ConfigFlags, name="SiliconTestTool") dcs_acc = SCT_DCSConditionsCfg(ConfigFlags, name="DCSTest") dcs_acc.popPrivateTools() acc1 = SCT_SiliconConditionsCfg(ConfigFlags, name="SiliconTest") log.info(acc1.popPrivateTools()) acc2 = SCT_SiliconConditionsCfg(ConfigFlags, name="SiliconTest") log.info(acc2.popPrivateTools()) acc3 = SCT_ReadCalibChipDataCfg(ConfigFlags, name="ReadTest") log.info(acc3.popPrivateTools()) dcs_acc.wasMerged() acc1.wasMerged() acc2.wasMerged() acc3.wasMerged()
ConfigFlags.TrackingGeometry.MaterialSource = "Input" ConfigFlags.Concurrency.NumThreads = 10 ConfigFlags.Concurrency.NumConcurrentEvents = 10 ConfigFlags.lock() ConfigFlags.dump() cfg = MainServicesCfg(ConfigFlags) from BeamPipeGeoModel.BeamPipeGMConfig import BeamPipeGeometryCfg cfg.merge(BeamPipeGeometryCfg(ConfigFlags)) alignCondAlgCfg = ActsAlignmentCondAlgCfg(ConfigFlags) cfg.merge(alignCondAlgCfg) alg = ActsExtrapolationAlgCfg(ConfigFlags, OutputLevel=VERBOSE, NParticlesPerEvent=int(10), EtaRange=[-0.5, 0.5], PtRange=[20, 100]) cfg.merge(alg) cfg.printConfig() log.info("CONFIG DONE") cfg.run(1)
def setupEvtSelForSeekOps(): """ try to install seek-stuff on the EventSelector side """ #import sys #from AthenaCommon.Logging import log as msg msg.debug("setupEvtSelForSeekOps:") if sys.modules.has_key('AthenaRootComps.ReadAthenaRoot'): # athenarootcomps has seeking enabled by default msg.info('=> Seeking enabled.') return if not sys.modules.has_key('AthenaPoolCnvSvc.ReadAthenaPool'): ## user did not import that module so we give up msg.info( "Cannot enable 'seeking' b/c module " + \ "[AthenaPoolCnvSvc.ReadAthenaPool] hasn't been imported..." ) msg.info( "Modify your jobOptions to import that module "+ \ "(or just ignore this message)" ) return from AthenaCommon.AppMgr import theApp, AthAppMgr if theApp.state() != AthAppMgr.State.OFFLINE: msg.info( "C++ ApplicationMgr already instantiated, probably seeking "+\ "will be ill-configured..." ) msg.info("EventSelector writers should implement updateHandlers") from AthenaCommon.AppMgr import ServiceMgr as svcMgr from AthenaCommon.Configurable import Configurable collectionType = svcMgr.EventSelector.properties()["CollectionType"] if collectionType in ( "ImplicitROOT", Configurable.propertyNoValue, ): svcMgr.EventSelector.CollectionType = "SeekableROOT" msg.info("=> Seeking enabled.") elif collectionType in ("SeekableROOT", ): msg.verbose("=> Seeking already enabled.") else: msg.warning( "Input seeking is not compatible with collection type of %s", svcMgr.EventSelector.properties()["CollectionType"]) msg.warning("=> Seeking disabled.") return
ConfigFlags.Input.Files = [ "/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/TrigP1Test/data17_13TeV.00327265.physics_EnhancedBias.merge.RAW._lb0100._SFO-1._0001.1" ] #from AthenaConfiguration.TestDefaults import defaultTestFiles #ConfigFlags.Input.Files = defaultTestFiles.RAW # Set global tag by hand for now ConfigFlags.IOVDb.GlobalTag = "CONDBR2-BLKPA-2018-13" #"CONDBR2-BLKPA-2015-17" ConfigFlags.GeoModel.AtlasVersion = "ATLAS-R2-2016-01-00-01" #"ATLAS-R2-2015-03-01-00" ConfigFlags.lock() ConfigFlags.dump() from AthenaCommon.Logging import log log.setLevel(DEBUG) log.info('About to setup Rpc Raw data decoding') cfg = ComponentAccumulator() # Seem to need this to read BS properly from ByteStreamCnvSvc.ByteStreamConfig import ByteStreamReadCfg cfg.merge(ByteStreamReadCfg(ConfigFlags)) # Schedule Rpc data decoding rpcdecodingAcc = RpcBytestreamDecodeCfg(ConfigFlags) cfg.merge(rpcdecodingAcc) # Schedule Tgc data decoding tgcdecodingAcc = TgcBytestreamDecodeCfg(ConfigFlags) cfg.merge(tgcdecodingAcc)
import traceback traceback.print_exception( sys.exc_type, '%s, ROOT version or setup problem?' % str(e), sys.exc_traceback) sys.exit(1) try: # test if we have Cintex (ROOT5) cppyy.Cintex.Debug # if the previous line did not throw exception, then we have ROOT5 with Cintex and Reflex cppyy.hasCintex = True except AttributeError, e: # no Cintex! do ROOT6 stuff # but don't initialize more than once if not hasattr(cppyy, 'hasFakeCintex'): log.info('executing ROOT6Setup') cppyyFakeCintex() addROOTIncludePaths() install_root6_importhook() import re from tempfile import NamedTemporaryFile class ShutUp(object): """ A little helper class to keep ROOT silent... """ DefaultFilter = [ re.compile("Warning in <TClass::TClass>: no dictionary for class."),
def compareBothL1Menus(l1items_xml, l1items_json): # legacyCaloItems = [ k for k,v in l1items_json.items() if 'legacy' in v ] itemNames_xml = [x['name'] for x in l1items_xml] itemNames_json = l1items_json.keys() ids_xml = dict([(x['name'], int(x['ctpid'])) for x in l1items_xml]) ids_json = dict([(x['name'], x['ctpid']) for x in l1items_json.values()]) if l1items_xml and l1items_json: itemsOnlyInJson = list(set(itemNames_json) - set(itemNames_xml)) itemsOnlyInXML = list(set(itemNames_xml) - set(itemNames_json)) log.info("These %i items are new in the json version", len(itemsOnlyInJson)) log.info(itemsOnlyInJson) log.info("\nThese %i items have disappeared in the json version", len(itemsOnlyInXML)) log.info(itemsOnlyInXML) inboth = set(itemNames_json).intersection(set(itemNames_xml)) noMatchId = [] for name in sorted(inboth): if ids_xml[name] != ids_json[name]: noMatchId += [(name, ids_xml[name], ids_json[name])] log.info( "\nFrom %i items that are in both, these %i have non-matching CTP id's", len(inboth), len(noMatchId)) log.info("Name, CTPID in xml, CTPID in json") for x in noMatchId: log.info(x)
if (nProc > 0) : from AthenaCommon.Logging import log as msg if (theApp.EvtMax == -1) : msg.fatal('EvtMax must be >0 for hybrid configuration') sys.exit(AthenaCommon.ExitCodes.CONFIGURATION_ERROR) if ( theApp.EvtMax % nProc != 0 ) : msg.warning('EvtMax[%s] is not divisible by nProcs[%s]: MP Workers will not process all requested events',theApp.EvtMax,nProc) chunkSize = int (theApp.EvtMax / nProc) from AthenaMP.AthenaMPFlags import jobproperties as jps jps.AthenaMPFlags.ChunkSize= chunkSize msg.info('AthenaMP workers will process %s events each',chunkSize) # MT-specific code #---------------------------------------------------------------------------------# theApp.EvtMax = 5 from xAODEventInfoCnv.xAODEventInfoCreator import xAODMaker__EventInfoCnvAlg topSequence+=xAODMaker__EventInfoCnvAlg() #---------------------------------------------------------------------------------# # NEW Conditions access infrastructure # from IOVSvc.IOVSvcConf import CondInputLoader topSequence += CondInputLoader( "CondInputLoader", OutputLevel=DEBUG, )
if __name__ == "__main__": # To run this, do e.g. # python -m MuonConfig.MuonSegmentFindingConfig --run --threads=1 from MuonConfig.MuonConfigUtils import SetupMuonStandaloneArguments, SetupMuonStandaloneConfigFlags, SetupMuonStandaloneOutput, SetupMuonStandaloneCA args = SetupMuonStandaloneArguments() ConfigFlags = SetupMuonStandaloneConfigFlags(args) cfg = SetupMuonStandaloneCA(args, ConfigFlags) # Run the actual test. acc = MuonSegmentFindingCfg(ConfigFlags, cardinality=args.threads) cfg.merge(acc) if args.threads > 1 and args.forceclone: from AthenaCommon.Logging import log log.info('Forcing segment finding cardinality to be equal to ' + str(args.threads)) # We want to force the algorithms to run in parallel (eventually the algorithm will be marked as cloneable in the source code) AlgResourcePool = CompFactory.AlgResourcePool cfg.addService(AlgResourcePool(OverrideUnClonable=True)) segment_finder = acc.getPrimary() segment_finder.Cardinality = args.threads # This is a temporary fix - it should go someplace central as it replaces the functionality of addInputRename from here: # https://gitlab.cern.ch/atlas/athena/blob/master/Control/SGComps/python/AddressRemappingSvc.py AddressRemappingSvc, ProxyProviderSvc = CompFactory.getComps( "AddressRemappingSvc", "ProxyProviderSvc", ) pps = ProxyProviderSvc() ars = AddressRemappingSvc() pps.ProviderNames += ['AddressRemappingSvc']