def _setup(): from PyUtils.MetaReader import read_metadata from AthenaCommon.Logging import logging msg = logging.getLogger('MetaReader') global metadata global metadata_all_files # get input file name from RecExConfig.RecoFunctions import InputFileNames from AthenaCommon.AthenaCommonFlags import athenaCommonFlags if athenaCommonFlags.isOnline() and (not InputFileNames() or all( [f.strip() == '' for f in InputFileNames()])): # set minimal items of inputFileSummary metadata = { 'file_type': 'BS', 'eventTypes': ['IS_DATA', 'IS_ATLAS', 'IS_PHYSICS'], 'TagStreamsRef': '' } else: inFiles = InputFileNames() if len(inFiles) < 1: msg.warning("No input files specified yet! Cannot do anything.") return metadata_all_files = read_metadata(inFiles, mode='peeker', promote=True) first_filename = inFiles[0] metadata = metadata_all_files[first_filename] metadata['file_name'] = first_filename
def _getFileMD(filenames): if type(filenames) == list: filename = filenames[0] else: filename = filenames if filename not in _fileMetadata: logger.info("Obtaining full metadata of %s", filename) thisFileMD = read_metadata(filename, None, 'full') _fileMetadata.update(thisFileMD) return _fileMetadata[filename]
def GetFileMD(filenames): from AthenaCommon.Logging import logging msg = logging.getLogger('AutoConfigFlags') filename = filenames[0] if filename not in _fileMetaData: if len(filenames) > 1: msg.info( "Multiple input files. Use the first one for auto-configuration" ) msg.info("Obtaining metadata of auto-configuration by peeking into %s", filename) thisFileMD = read_metadata(filename, None, 'peeker') _fileMetaData.update(thisFileMD) return _fileMetaData[filename]
def get_metadata(mode='lite'): # Allow the input check to be skipped. This should only be done in production # jobs, in order to avoid peeking and spoiling performance on some systems import os if not ('G4ATLAS_SKIPFILEPEEK' in os.environ and os.environ['G4ATLAS_SKIPFILEPEEK']): from AthenaCommon.AthenaCommonFlags import athenaCommonFlags if athenaCommonFlags.PoolEvgenInput.statusOn: try: from PyUtils.MetaReader import read_metadata input_file = athenaCommonFlags.PoolEvgenInput()[0] metadata = read_metadata(input_file, mode=mode) metadata = metadata[ input_file] # promote all keys one level up return metadata except: simMDlog.warning("MetaReader failed to open %s", athenaCommonFlags.PoolEvgenInput()[0]) else: simMDlog.info( "G4ATLAS_SKIPFILEPEEK environment variable present, so skipping all input file peeking." ) return None
def GetCurrentStreamName( msg ): """ Helper to decide where to get the input stream name from.""" # First, try to get the info from the RecFlags try: from RecExConfig.RecFlags import rec msg.debug("Got the stream name from the RecFlags: %s" % rec.mergingStreamName()) streamName = rec.mergingStreamName() if streamName == "": streamName = "unknownStream" return streamName except ImportError: msg.info("Couldn't get input stream name from the RecFlags... trying AthFile directly.") from PyUtils.MetaReader import read_metadata from AthenaCommon.AppMgr import ServiceMgr as svcMgr input_file = svcMgr.EventSelector.InputCollections[0] metadata = read_metadata(input_file) metadata = metadata[input_file] # promote all keys one level up for class_name, name in metadata['metadata_items'].items(): if name == 'EventStreamInfo': return class_name return 'unknownStream'
# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration from __future__ import print_function from JetMonitoring.JetHistoTools import jhm, selectionAndHistos from JetMonitoring.JetMonitoringConf import JetAttributeHisto, HistoDefinitionTool, JetMonitoringTool, JetKinematicHistos, JetContainerHistoFiller from AthenaCommon.AppMgr import ToolSvc from AthenaCommon.AppMgr import ServiceMgr as svcMgr from PyUtils.MetaReader import read_metadata input_file = svcMgr.EventSelector.InputCollections[0] metadata = read_metadata( input_file) # opens the first file from the InputCollections list metadata = metadata[ input_file] # promote keys stored under input_file key one level up to access them directly # this is a dict of dicts, take a look at what's available! Below are some examples: isMC = 'IS_SIMULATION' in metadata['eventTypes'] beam_energy = metadata['beam_energy'] conditions_tag = metadata[ 'IOVDbGlobalTag'] #useful for figuring out which mc production this is print("PhysicsValidationHistos: isMC=", isMC, " beam=", beam_energy, " conditions_tag=", conditions_tag) def commonPhysValTool(container, refcontainer="", onlyKinematics=False, globalSelection=""): containerfiller = JetContainerHistoFiller(container + "HistoFiller", JetContainer=container) filler = containerfiller
def _main(): # Parsing the arguments provided by user parser = argparse.ArgumentParser(description='This script reads metadata from a given file') parser.add_argument('filenames', nargs = '+', help= 'The filenames to read. User can provide a single file or a list of files.') parser.add_argument('-v', '--verbose', action='store_true', help='print detailed output on screen') parser.add_argument('-o', '--output', metavar='FILE', default=None, help="Saves the output in a file. By default, the output is written on the screen (stdout) in a prettier format for better readabiilty.") parser.add_argument('--json', action='store_true', help="Sets the output file format as json.") parser.add_argument('--indent', metavar='N', type=int, default=2, help="Sets the indent spaces in the output either on screen (without -o flag) either on file (with -o flag). By default, uses two spaces as indent.") parser.add_argument('-m', '--mode', default= 'lite', metavar='MODE', type=str, choices=['tiny', 'lite', 'full', 'peeker'], help="This flag provides the user capability to select the amount of metadata retrieved. There three options: " "tiny (only those values used in PyJobTransforms), " "lite (same output as dump-athfile) " "and full ( all available data found) ") parser.add_argument('-t', '--type', default= None, metavar='TYPE', type=str, choices=['POOL', 'BS'], help="The file type of the input filename. By default, it tries to determine itself the file type of the input.") parser.add_argument('-f', '--filter', default= [], metavar='FILTER', nargs = '+', type=str, help="The metadata keys to filter. ") parser.add_argument('--promote', default=None, type=bool, help="Force promotion or not of the metadata keys ") args = parser.parse_args() verbose = args.verbose filenames = args.filenames output = args.output is_json = args.json indent = args.indent mode = args.mode file_type = args.type meta_key_filter = args.filter msg.setLevel(logging.INFO if verbose else logging.WARNING) # create a stream handler handler = logging.StreamHandler() handler.setLevel(logging.INFO if verbose else logging.WARNING) # create a logging format formatter = logging.Formatter('%(name)s %(levelname)s %(message)s') handler.setFormatter(formatter) # add the handlers to the logger msg.addHandler(handler) startTime = time.time() msg.info('Imported headers in: {0} miliseconds'.format((time.time() - startTime) * 1e3)) msg.info('The output file is: {0}'.format(output)) metadata = read_metadata(filenames, file_type, mode= mode, meta_key_filter= meta_key_filter, promote=args.promote) if output is None: if is_json: print(json.dumps(metadata, indent=indent)) else: enc = sys.stdout.encoding ascii = not sys.stdout.isatty() or not enc or enc.lower().find('ansi') >= 0 or enc.lower().find('ascii') >= 0 _tree_print(metadata, indent= indent, pad= 18, dict_sort='key', list_max_items = 8, ascii = True) print(_tree_print(metadata, indent= indent, pad= 18, dict_sort='key', list_max_items = 8, ascii = ascii)) else: if is_json: with open(output, 'w') as fd: print (json.dumps(metadata, indent=indent), file=fd) else: with open(output, 'w') as fd: print (_tree_print(metadata, indent = indent, pad = 18, dict_sort = 'key', list_max_items = 8, ascii = True), file=fd) msg.info('Done!')
def HitsFilePeeker(runArgs, skeletonLog): from PyUtils.MetaReader import read_metadata try: input_file = getHITSFile(runArgs) metadata_peeker = read_metadata(input_file, mode='peeker') metadata_peeker = metadata_peeker[ input_file] # promote all keys one level up metadata_full = read_metadata(input_file, mode='full') metadata_full = metadata_full[ input_file] # promote all keys one level up except AssertionError: skeletonLog.error("Failed to open input file: %s", getHITSFile(runArgs)) # check eventTypes of input file if 'eventTypes' in metadata_peeker: import re if 'IS_SIMULATION' not in metadata_peeker['eventTypes']: skeletonLog.error('This input file has incorrect evt_type: %s', metadata_peeker['eventTypes']) skeletonLog.info( 'Please make sure you have set input file metadata correctly.') skeletonLog.info( 'Consider using the job transforms for earlier steps if you aren\'t already.' ) #then exit gracefully raise SystemExit( "Input file eventTypes is incorrect, please check your g4sim and evgen jobs." ) else: skeletonLog.warning( 'Could not find \'eventTypes\' key in metadata. Unable to that check eventTypes is correct.' ) metadatadict = dict() if metadata_full: if '/Simulation/Parameters' in metadata_full: metadatadict = metadata_full['/Simulation/Parameters'] ##Get IOVDbGlobalTag if 'IOVDbGlobalTag' not in metadatadict: try: assert metadata_full['/TagInfo']['IOVDbGlobalTag'] is not None metadatadict['IOVDbGlobalTag'] = metadata_full['/TagInfo'][ 'IOVDbGlobalTag'] except: try: assert metadata_full['/Digitization/Parameters'][ 'IOVDbGlobalTag'] is not None metadatadict['IOVDbGlobalTag'] = metadata_full[ '/Digitization/Parameters']['IOVDbGlobalTag'] except: skeletonLog.warning("Failed to find IOVDbGlobalTag.") else: ##Patch for older hit files if 'SimulatedDetectors' not in metadatadict: if 'itemList' in metadata_peeker: metadatadict[ 'SimulatedDetectors'] = hitColls2SimulatedDetectors( metadata_peeker['itemList']) else: metadatadict['SimulatedDetectors'] = [ 'pixel', 'SCT', 'TRT', 'BCM', 'Lucid', 'LAr', 'Tile', 'MDT', 'CSC', 'TGC', 'RPC', 'Truth' ] import re from AthenaCommon.GlobalFlags import globalflags globalflags.DataSource = "geant4" ## Configure DetDescrVersion if hasattr(runArgs, "geometryVersion"): inputGeometryVersion = runArgs.geometryVersion if isinstance( inputGeometryVersion, basestring) and inputGeometryVersion.endswith("_VALIDATION"): inputGeometryVersion = inputGeometryVersion.replace( "_VALIDATION", "") if 'SimLayout' in metadatadict: if not re.match(metadatadict['SimLayout'], inputGeometryVersion): skeletonLog.warning( "command-line geometryVersion (%s) does not match the value used in the Simulation step (%s) !", inputGeometryVersion, metadatadict['SimLayout']) globalflags.DetDescrVersion.set_Value_and_Lock(inputGeometryVersion) skeletonLog.info("Using geometryVersion from command-line: %s", globalflags.DetDescrVersion.get_Value()) elif 'SimLayout' in metadatadict: globalflags.DetDescrVersion.set_Value_and_Lock( metadatadict['SimLayout']) skeletonLog.info("Using geometryVersion from HITS file metadata %s", globalflags.DetDescrVersion.get_Value()) else: raise SystemExit( "geometryVersion not found in HITS file metadata or on transform command-line!" ) ## Configure ConditionsTag if hasattr(runArgs, "conditionsTag"): if 'IOVDbGlobalTag' in metadatadict: if not re.match(metadatadict['IOVDbGlobalTag'], runArgs.conditionsTag): skeletonLog.warning( "command-line conditionsTag (%s) does not match the value used in the Simulation step (%s) !", runArgs.conditionsTag, metadatadict['IOVDbGlobalTag']) if not globalflags.ConditionsTag.is_locked(): globalflags.ConditionsTag.set_Value_and_Lock( runArgs.conditionsTag) skeletonLog.info("Using conditionsTag from command-line: %s", globalflags.ConditionsTag.get_Value()) else: skeletonLog.info( "globalflags.ConditionsTag already locked to %s - will not alter it.", globalflags.ConditionsTag.get_Value()) elif 'IOVDbGlobalTag' in metadatadict: globalflags.ConditionsTag.set_Value_and_Lock( metadatadict['IOVDbGlobalTag']) skeletonLog.info("Using conditionsTag from HITS file metadata %s", globalflags.ConditionsTag.get_Value()) else: skeletonLog.fatal( "conditionsTag not found in HITS file metadata or on transform command-line!" ) raise SystemExit( "conditionsTag not found in HITS file metadata or on transform command-line!" ) ## Configure DetFlags if 'SimulatedDetectors' in metadatadict: from AthenaCommon.DetFlags import DetFlags # by default everything is off DetFlags.all_setOff() skeletonLog.debug( "Switching on DetFlags for subdetectors which were simulated") simulatedDetectors = eval(metadatadict['SimulatedDetectors']) for subdet in simulatedDetectors: cmd = 'DetFlags.%s_setOn()' % subdet skeletonLog.debug(cmd) try: exec(cmd) except: skeletonLog.warning('Failed to switch on subdetector %s', subdet) DetFlags.simulateLVL1.all_setOff() DetFlags.digitize.all_setOff() if hasattr(DetFlags, 'overlay'): DetFlags.overlay.all_setOff() DetFlags.pileup.all_setOff() DetFlags.readRDOBS.all_setOff() DetFlags.readRDOPool.all_setOff() DetFlags.readRIOBS.all_setOff() DetFlags.readRIOPool.all_setOff() DetFlags.makeRIO.all_setOff() DetFlags.writeBS.all_setOff() DetFlags.writeRDOPool.all_setOff() DetFlags.writeRIOPool.all_setOff() print('{} -> __Test__001__:\n{}'.format(__file__, metadatadict)) return
def makeBkgInputCol(initialList, nBkgEvtsPerCrossing, correctForEmptyBunchCrossings, logger): uberList = [] refreshrate = 1.0 nSignalEvts = 1000 from AthenaCommon.AthenaCommonFlags import athenaCommonFlags if (athenaCommonFlags.EvtMax.get_Value() > 0): nSignalEvts = int(athenaCommonFlags.EvtMax.get_Value()) logger.info( 'Number of signal events (from athenaCommonFlags.EvtMax) = %s.', nSignalEvts) else: nSignalEvts = 0 from PyUtils.MetaReader import read_metadata for inFile in athenaCommonFlags.PoolHitsInput.get_Value(): try: metadata = read_metadata(inFile) metadata = metadata[inFile] # promote all keys one level up nSignalEvts += int(metadata['nentries']) print('{} -> __Test__001__:\n{}'.format(__file__, nSignalEvts)) except: logger.warning("Unable to open file [%s]" % inFile) logger.warning('caught:\n%s', err) import traceback traceback.print_exc() logger.info('Number of signal events (read from files) = %s.', nSignalEvts) nBkgEventsPerFile = 5000 try: from PyUtils.MetaReader import read_metadata metadata = read_metadata(initialList[0]) metadata = metadata[initialList[0]] # promote all keys one level up nBkgEventsPerFile = int(metadata['nentries']) print('{} -> __Test__001__:\n{}'.format(__file__, nBkgEventsPerFile)) logger.info( 'Number of background events per file (read from file) = %s.', nBkgEventsPerFile) except: import traceback traceback.print_exc() logger.warning( 'Failed to count the number of background events in %s. Assuming 5000 - if this is an overestimate the job may die.', initialList[0]) from Digitization.DigitizationFlags import digitizationFlags from AthenaCommon.BeamFlags import jobproperties Nbunches = 1 + digitizationFlags.finalBunchCrossing.get_Value( ) - digitizationFlags.initialBunchCrossing.get_Value() nbunches = int(Nbunches) if correctForEmptyBunchCrossings: nbunches = int( math.ceil( float(nbunches) * float(digitizationFlags.bunchSpacing.get_Value()) / float(jobproperties.Beam.bunchSpacing.get_Value()))) logger.info( 'Simulating a maximum of %s colliding-bunch crossings (%s colliding+non-colliding total) per signal event', nbunches, Nbunches) nBkgEventsForJob = pileUpCalc(float(nSignalEvts), 1.0, float(nBkgEvtsPerCrossing), nbunches) logger.info( 'Number of background events required: %s. Number of background events in input files: %s', nBkgEventsForJob, (nBkgEventsPerFile * len(initialList))) numberOfRepetitionsRequired = float(nBkgEventsForJob) / float( nBkgEventsPerFile * len(initialList)) NumberOfRepetitionsRequired = 1 + int( math.ceil(numberOfRepetitionsRequired)) for i in range(0, NumberOfRepetitionsRequired): uberList += initialList logger.info('Expanding input list from %s to %s', len(initialList), len(uberList)) return uberList
def meta_diff( files, verbose=False, ordered=False, drop=None, mode="lite", meta_key_filter=None, file_type=None, promote=False, diff_format="simple", ): """ Compare the in-file metadata in two given files. Uses PyUtils.MetaReader to obtain file content. Generates list of string that show difference. Returns empty list if no difference is found Keyword arguments: files -- Names of two files to compare verbose -- toggle to get debug information ordered -- whether to check order of lists in the metadata drop -- keys to drop from metadata retrieved by MetaReader mode -- MetaReader argument setting amount of content (default 'lite'). Allowed values are: tiny, lite, peeker, and full meta_key_filter -- MetaReader argument selecting keys to retrieve (default get all) file_type -- Type of files, POOL or BS (default: auto-configure) promote -- MetaReader argument (default: False) diff_format -- Return 'simple' or 'diff' style string (default: 'simple') """ if len(files) != 2: raise ValueError("Wrong number of files passes, need two") reader_msg = logging.getLogger("MetaReader") reader_msg.setLevel(logging.INFO if verbose else logging.WARNING) msg = logging.getLogger("MetaDiff") msg.setLevel(logging.DEBUG if verbose else logging.INFO) msg.debug("Reading from %s and %s", files[0], files[1]) metadata = read_metadata( files, file_type, mode=mode, meta_key_filter=meta_key_filter, promote=promote, ) try: for key in drop: for _, value in metadata.items(): value.pop(key, None) except TypeError: pass result = compare_dicts( metadata[files[0]], metadata[files[1]], ordered=ordered, diff_format=diff_format, ) if not result: msg.info("No differences found") return result
import os if os.path.isfile("inputfilelist"): for line in open("inputfilelist"): collection.append(line.strip()) else: raise RuntimeError, "Unable to open inputfilelist" ## GlobalFlags from AthenaCommon.GlobalFlags import globalflags globalflags.DetGeo = 'atlas' globalflags.DataSource = 'data' ## input file parameters from PyUtils.MetaReader import read_metadata inputfile = read_metadata(collection[0]) inputfile = inputfile[collection[ 0]] # promote keys stored under input filename key one level up to access them directly if inputfile['file_type'] == 'BS': globalflags.InputFormat = 'bytestream' elif inputfile['file_type'] == 'POOL': globalflags.InputFormat = 'pool' else: raise RuntimeError, "Unable to read input file (format not supported)" if inputfile['file_type'] == 'POOL': globalflags.DetDescrVersion = inputfile['GeoAtlas'] else: globalflags.ConditionsTag = 'CONDBR2-BLKPA-2016-07' # yosuke # globalflags.ConditionsTag = 'CONDBR2-BLKPA-2015-10' # steffen
def RDOFilePeeker(runArgs, skeletonLog): from PyUtils.MetaReader import read_metadata try: input_file = runArgs.inputRDOFile[0] metadata_lite = read_metadata( input_file ) # Use this only to read the key 'eventTypes', which is promoted in 'lite' mode. # promote keys stored under input filename key one level up to access them directly metadata_lite = metadata_lite[input_file] # use the mode 'full' to access all metadata (needed for '/Digitization/Parameters') metadata = read_metadata(input_file, mode='full') # promote keys stored under input filename key one level up to access them directly metadata = metadata[input_file] except AssertionError: skeletonLog.error("Failed to open input file: %s", runArgs.inputRDOFile[0]) #check eventTypes of input file if 'eventTypes' in metadata_lite: if 'IS_SIMULATION' not in metadata_lite['eventTypes']: skeletonLog.error('This input file has incorrect eventTypes: %s', metadata_lite['eventTypes']) skeletonLog.info( 'Please make sure you have set input file metadata correctly.') skeletonLog.info( 'Consider using the job transforms for earlier steps if you aren\'t already.' ) #then exit gracefully raise SystemExit( "Input file eventTypes is incorrect, please check your digi, g4sim and evgen jobs." ) else: skeletonLog.warning( 'Could not find \'eventTypes\' key in MetaReader -> metadata. Unable to that check if eventTypes is correct.' ) metadatadict = {} if '/Digitization/Parameters' in metadata: metadatadict = metadata['/Digitization/Parameters'] if isinstance(metadatadict, list): skeletonLog.warning( "%s inputfile: %s contained %s sets of Dititization Metadata. Using the final set in the list.", inputtype, inputfile, len(metadatadict)) metadatadict = metadatadict[-1] ##Get IOVDbGlobalTag if 'IOVDbGlobalTag' not in metadatadict: try: if metadata['/TagInfo']['IOVDbGlobalTag'] is not None: metadatadict['IOVDbGlobalTag'] = metadata['/TagInfo'][ 'IOVDbGlobalTag'] except: skeletonLog.warning("Failed to find IOVDbGlobalTag.") else: ##Patch for older hit files if 'DigitizedDetectors' not in metadatadict: metadatadict['DigitizedDetectors'] = [ 'pixel', 'SCT', 'TRT', 'BCM', 'Lucid', 'LAr', 'Tile', 'MDT', 'CSC', 'TGC', 'RPC', 'Truth' ] import re from AthenaCommon.GlobalFlags import globalflags ## Configure DetDescrVersion if hasattr(runArgs, "geometryVersion"): inputGeometryVersion = runArgs.geometryVersion if isinstance( inputGeometryVersion, basestring) and inputGeometryVersion.endswith("_VALIDATION"): inputGeometryVersion = inputGeometryVersion.replace( "_VALIDATION", "") if 'DetDescrVersion' in metadatadict: if not re.match(metadatadict['DetDescrVersion'], inputGeometryVersion): skeletonLog.warning( "command-line geometryVersion (%s) does not match the value used in the Simulation step (%s) !", inputGeometryVersion, metadatadict['DetDescrVersion']) globalflags.DetDescrVersion.set_Value_and_Lock(inputGeometryVersion) skeletonLog.info("Using geometryVersion from command-line: %s", globalflags.DetDescrVersion.get_Value()) elif 'DetDescrVersion' in metadatadict: globalflags.DetDescrVersion.set_Value_and_Lock( metadatadict['DetDescrVersion']) skeletonLog.info("Using geometryVersion from RDO file metadata %s", globalflags.DetDescrVersion.get_Value()) else: raise SystemExit( "geometryVersion not found in RDO file metadata or on transform command-line!" ) ## Configure ConditionsTag if hasattr(runArgs, "conditionsTag"): if 'IOVDbGlobalTag' in metadatadict: if not re.match(metadatadict['IOVDbGlobalTag'], runArgs.conditionsTag): skeletonLog.warning( "command-line conditionsTag (%s) does not match the value used in the Simulation step (%s) !", runArgs.conditionsTag, metadatadict['IOVDbGlobalTag']) #globalflags.ConditionsTag.set_Value_and_Lock( runArgs.conditionsTag ) ## already done in CommonSkeletonJobOptions.py skeletonLog.info("Using conditionsTag from command-line: %s", globalflags.ConditionsTag.get_Value()) elif 'IOVDbGlobalTag' in metadatadict: globalflags.ConditionsTag.set_Value_and_Lock( metadatadict['IOVDbGlobalTag']) skeletonLog.info("Using conditionsTag from RDO file metadata %s", globalflags.ConditionsTag.get_Value()) else: raise SystemExit( "conditionsTag not found in RDO file metadata or on transform command-line!" ) ## Configure DetFlags if 'DigitizedDetectors' in metadatadict: from AthenaCommon.DetFlags import DetFlags # by default everything is off DetFlags.all_setOff() skeletonLog.debug( "Switching on DetFlags for subdetectors which were simulated") for subdet in metadatadict['DigitizedDetectors']: cmd = 'DetFlags.%s_setOn()' % subdet skeletonLog.debug(cmd) try: exec(cmd) except: skeletonLog.warning('Failed to switch on subdetector %s', subdet) #hacks to reproduce the sub-set of DetFlags left on by RecExCond/AllDet_detDescr.py DetFlags.simulate.all_setOff() DetFlags.simulateLVL1.all_setOff() DetFlags.digitize.all_setOff() DetFlags.pileup.all_setOff() DetFlags.readRDOBS.all_setOff() DetFlags.readRDOPool.all_setOff() DetFlags.readRIOBS.all_setOff() DetFlags.readRIOPool.all_setOff() DetFlags.makeRIO.all_setOff() DetFlags.writeBS.all_setOff() DetFlags.writeRDOPool.all_setOff() DetFlags.writeRIOPool.all_setOff() return