def make_file(filename, events, verbose=False): """Generates an EventStorage file with as many (dummy) events as you want. This method will generate a number of dummy events and store them in the file you designate. Parameters: filename -- This is the name of the output file you want to have events -- This is the number of random events to be generated. Level-1 identifiers will be sequential starting from zero. Bunch Crossing identifier will be set to zero in all events. The same is valid for the RunNumber. verbose -- If this flag is set to True, this function will print a '.' (dot) for every generated fragment. """ import os, sys f = eformat.ostream() if verbose: sys.stdout.write('Generating %d events => \'%s\'\n' % (events, filename)) sys.stdout.flush() for k in range(events): f.write(make_fe(lvl1_id=k)) if verbose: sys.stdout.write('.') sys.stdout.flush() if verbose: sys.stdout.write('\n') sys.stdout.flush() tmp = f.last_filename() del f os.rename(tmp, filename)
def split_data(files, robset, output_dir): events = eformat.istream(files) logging.info("Reading %d event(s) from file '%s' and dumping to %d files." % (len(events), os.path.basename(files), len(robset))) # out will contain a stream to all possible ROB ids. out = {} for r in robset: out[r] = eformat.ostream(directory=output_dir, core_name='0x%08X' % r) for ife in events: toc = {} for k in ife.toc(): toc[k.key()] = k.data() model = toc[toc.keys()[0]] #gets the first rob as a model for the others diff = robset.difference(toc.keys()) for source_id in diff: toc[source_id] = None for source_id, ostream in out.iteritems(): ostream.write(surround_with_fullevent(toc[source_id], ife)) sys.stdout.write('.') sys.stdout.flush() sys.stdout.write('\n') sys.stdout.flush() # we return a map with the list of files generated. retval = {} for rob_id, ostr in out.iteritems(): retval[rob_id] = ostr.last_filename() return retval
def main(): from optparse import OptionParser import os log.basicConfig(level=log.DEBUG, format='%(levelname)s %(message)s') parser = OptionParser(description=__doc__, usage='%prog infile outfile') (opt, args) = parser.parse_args() if len(args) != 2: parser.print_help() return 1 bsfile = eformat.istream([args[0]]) ostr = eformat.ostream() for event in bsfile: ro_event = modify(event) rw_event = eformat.write.FullEventFragment(ro_event) ostr.write(rw_event) if ostr: storage_filename = ostr.last_filename() del ostr # makes sure we flush all buffers os.rename(storage_filename, args[1]) return
def test02_WriteReadMany(self): # For this test we use a trick from the library. # We write each time the same event with changed # L1 identifiers only. out = eformat.ostream() event = eformat.write.FullEventFragment() event.lvl1_id(0x53) out.write(event) event.lvl1_id(0x7) out.write(event) event.lvl1_id(0x22) out.write(event) outfile = out.last_filename() del out input = eformat.istream(outfile) self.assertEqual(3, len(input)) read = [] for e in input: read.append(e) self.assertEqual(read[0].lvl1_id(), 0x53) self.assertEqual(read[1].lvl1_id(), 0x7) self.assertEqual(read[2].lvl1_id(), 0x22) self.assertEqual(read[0].checksum(), True) self.assertEqual(read[1].checksum(), True) self.assertEqual(read[2].checksum(), True) os.unlink(outfile)
def form_rod(): m=roses(0) print_roses(m) counter = 1 #istr = eformat.istream('data10_7TeV.00167661.physics_Egamma.daq.RAW._lb0000._SFO-1._0001.data') #istr = eformat.istream('data11_cos.00178514.physics_CosmicCalo.daq.RAW._lb0026._SFO-5._0001.data') #istr = eformat.istream('data11_7TeV.00177682.physics_EnhancedBias.daq.RAW._lb0550._SFO-4._0001.data') istr = eformat.istream([ 'data11_7TeV.00177682.physics_Egamma.daq.RAW._lb0566._SFO-11._0001.data', ]) output = eformat.ostream() for e in istr: # take one rob to help build virtual robs newevent= eformat.write.FullEventFragment(e) for ros_id,list_robs_ids in m.iteritems(): if True: payload=[0] DSPmode=0 count_rob=0 list_robs = e.children() arob = eformat.write.ROBFragment() found_any=False for rob in list_robs: if rob.source_id() in list_robs_ids: arob=rob found_any=True source_id = int(rob.source_id()) [feb1_ff,ex1,ey1,ez1,feb2_ff,ex2,ey2,ez2,DSPfirmware] = extract_febinfo(rob) if ( DSPfirmware > 0 ) : DSPmode=DSPfirmware sume1=ex1+ey1; sume2=ex2+ey2; payload.append(feb1_ff) payload.append(ex1) payload.append(ey1) payload.append(ez1) payload.append(sume1) payload.append(feb2_ff) payload.append(ex2) payload.append(ey2) payload.append(ez2) payload.append(sume2) count_rob=count_rob+1 payload[0] = (count_rob | 0xa<<16) if found_any: newrob=eformat.write.ROBFragment(arob) newrob.source_id(eformat.helper.SourceIdentifier(ros_id) ) newrob.minor_version(12) newrob.rod_minor_version(12) newrob.rod_data(payload) newrob.status([]) newevent.append(newrob) output.write(newevent) print 'show counter = ', counter if ( counter == -1 ): break counter = counter+1
def test03_WriteReadBroken(self): rodData = [1,2,3,4,5,6] rodStatus = [7,8,9,10,11,12,13,14,15] sid_rob = eformat.helper.SourceIdentifier(eformat.helper.SubDetector.TDAQ_LVL2, 0x1) rob = eformat.write.ROBFragment() rob.source_id(sid_rob) rob.rod_status(rodStatus) rob.rod_data(rodData) # now we do some screw-up on these data serial_rob = rob.__raw__() serial_rob[-1] = 0 serial_rob[-2] = 0 serial_rob[-3] = 0 test_this = eformat.ROBFragment(serial_rob) # if it pass this point, we have a damaged ROB self.assertRaises(RuntimeError, test_this.check) self.assertRaises(RuntimeError, test_this.check_rod) self.assertEqual(test_this.check_rob(), True) event = eformat.write.FullEventFragment() event.append_unchecked(test_this) #appends unchecked ROB event.append(rob) #appends checked ROB event.checksum_type(eformat.helper.CheckSum.ADLER32) out = eformat.ostream() out.write(event) outfile = out.last_filename() del out #so we close the file input = eformat.istream(outfile) self.assertEqual(1, len(input)) read = input[0] #there should be only one self.assertEqual(read.checksum(), True) self.assertEqual(event, read) #check_tree should not fail for a truncated ROD fragment self.assertEqual(read.check_tree(), True) self.assertEqual(read.check_tree_noex(), True) # make sure the ROB is still bad self.assertRaises(RuntimeError, read[1].check) # bad ROB self.assertEqual(read[1].check_noex(), False) # bad ROB self.assertRaises(RuntimeError, read[1].check_rod) # bad ROB self.assertEqual(read[1].check_rod_noex(), False) # bad ROB self.assertEqual(read[1].check_rob_noex(), True) # bad ROB # compare the damaged ROB and the one serialized word after word self.assertEqual(serial_rob, read[1].__raw__()) os.unlink(outfile)
def test04_WriteReadBrokenNormally(self): rodData = [1,2,3,4,5,6] rodStatus = [7,8,9,10,11,12,13,14,15] sid_rob = eformat.helper.SourceIdentifier(eformat.helper.SubDetector.TDAQ_LVL2, 0x1) rob = eformat.write.ROBFragment() rob.source_id(sid_rob) rob.rod_status(rodStatus) rob.rod_data(rodData) # now we do some screw-up on these data serial_rob = rob.__raw__() serial_rob[-1] = 0 serial_rob[-2] = 0 serial_rob[-3] = 0 for k in range(len(serial_rob)): if serial_rob[k] == eformat.helper.HeaderMarker.ROD: serial_rob[k] = 0 test_this = eformat.write.ROBFragment(eformat.ROBFragment(serial_rob)) event = eformat.write.FullEventFragment() event.append(test_this) #appends unchecked ROB event.append(rob) #appends checked ROB event.checksum_type(eformat.helper.CheckSum.ADLER32) out = eformat.ostream() out.write(event) outfile = out.last_filename() del out #so we close the file input = eformat.istream(outfile) self.assertEqual(1, len(input)) read = input[0] #there should be only one self.assertEqual(read.nchildren(), 2) self.assertEqual(len(read), len(event)) self.assertEqual(read.checksum(), True) self.assertEqual(event, read) # truncated ROD shall not cause a failure self.assertEqual(read.check_tree_noex(), True) # make sure the ROB is still bad self.assertEqual(read[0].check_noex(), False) # bad ROB self.assertEqual(read[0].check_rod_noex(), False) # bad ROB self.assertEqual(read[0].check_rob_noex(), True) # bad ROB # retrieve ROB problems self.assertEqual(read[0].problems(), [eformat.helper.FragmentProblem.WRONG_ROD_MARKER, eformat.helper.FragmentProblem.WRONG_ROD_FRAGMENT_SIZE]) # compare the damaged ROB and the one serialized word after word self.assertEqual(serial_rob, read[0].__raw__()) os.unlink(outfile)
def ReplaceMUCTPI(input_file, output_file): input = eformat.istream([input_file]) dr = EventStorage.pickDataReader(input_file) output = eformat.ostream(core_name="subset", run_number=dr.runNumber(), trigger_type=dr.triggerType(), detector_mask=dr.detectorMask(), beam_type=dr.beamType(), beam_energy=dr.beamEnergy()) for event in input: output.write(modify(event)) tmp_file_name = output.last_filename() del output os.rename(tmp_file_name, output_file)
def datawriter(self, directory, core_name, compression=0): """ Creates and returns a new eformat.ostream with the same meta data of the current input stream, but using the directory and core_name as given. """ compargs = {} if compression in range(1, 6): compargs['compression'] = EventStorage.CompressionType.ZLIB compargs['complevel'] = compression return eformat.ostream(directory, core_name, self.dr.runNumber(), self.dr.triggerType(), self.dr.detectorMask(), self.dr.beamType(), self.dr.beamEnergy(), **compargs)
def my_conf(argv): """Runs the merging routines""" import eformat, logging import EventApps.myopt as myopt option = {} option['output'] = {'short': 'o', 'arg': True, 'default': '', 'description': 'Filename of the output file'} option['verbosity'] = {'short': 'V', 'arg': True, 'default': logging.INFO, 'description': 'From which level to print system messag es [%d, %d]. For details please consult the documentation of python\'s "logging" module' % (logging.NOTSET, logging.CRITICAL)} parser = myopt.Parser(extra_args=True) for (k,v) in option.items(): parser.add_option(k, v['short'], v['description'], v['arg'], v['default']) if len(sys.argv) == 1: print parser.usage('global "%s" options:' % sys.argv[0]) sys.exit(1) #process the global options (kwargs, extra) = parser.parse(sys.argv[1:], prefix='global "%s" options:' % sys.argv[0]) #now the things which require global defaults logging.getLogger('').setLevel(kwargs['verbosity']) stream = eformat.istream(extra) ostream = eformat.ostream() total = 0 for e in stream: ostream.write(e) sys.stdout.write('.') sys.stdout.flush() total += 1 sys.stdout.write('\n') sys.stdout.flush() oname = ostream.last_filename() if len(kwargs['output']) != 0: del ostream os.rename(oname, kwargs['output']) oname = kwargs['output'] logging.info('Wrote %d events in %s\n' % (total, oname)) sys.exit(0)
def test01_WriteReadOne(self): rodData = [1,2,3,4,5,6] rodStatus = [7,8,9,10,11,12,13,14,15] lvl2Info = [16, 17] efInfo = [18] status = [19,20,21] stream = [] for i in range(5): a = eformat.helper.StreamTag() a.name = 'Name-%d' % i a.type = 'calibration' a.obeys_lumiblock = bool(i % 3) if i==3: a.robs.append(0xff) if i==5: a.dets.append(eformat.helper.SubDetector.TDAQ_BEAM_CRATE) stream.append(a) sid_rob = eformat.helper.SourceIdentifier(eformat.helper.SubDetector.TDAQ_LVL2, 0x1) rob = eformat.write.ROBFragment() rob.source_id(sid_rob) rob.rod_status(rodStatus) rob.rod_data(rodData) event = eformat.write.FullEventFragment() event.append(rob) event.lvl2_trigger_info(lvl2Info) event.event_filter_info(efInfo) event.status(status) event.stream_tag(stream) event.checksum_type(eformat.helper.CheckSum.ADLER32) out = eformat.ostream() out.write(event) outfile = out.last_filename() del out #so we close the file input = eformat.istream(outfile) self.assertEqual(1, len(input)) read = input[0] #there should be only one self.assertEqual(event, read) self.assertEqual(read.checksum(), True) os.unlink(outfile)
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-f', '--file', metavar='FILE', nargs='*', default=[], help='local file or castor path') parser.add_argument('-g', '--globalid', type=int, action='store', nargs='*', help='Global event ID') parser.add_argument('-l', '--lvl1id', type=int, action='store', nargs='*', help='LVL1 ID') parser.add_argument('-t', '--time', action=StoreTime, nargs='*', help='Nanosecond time stamp (seconds:nanoseconds)') parser.add_argument('-d', '--debug', type=int, action='store', metavar='RUN', help='Find event in debug streams of RUN') parser.add_argument('-s', '--save', metavar='OUTFILE', nargs='?', action='store', const='trigbs_findevent', help='Save selected events in OUTFILE') parser.add_argument('--debugPath', action='store', default='/castor/cern.ch/grid/atlas/DAQ/2012', help='Path to debug stream events %(default)s') parser.add_argument('-v', '--verbose', action='store_true', help='Be verbose') args = parser.parse_args() files = [] for f in args.file: if f.find('castor') != -1: files += nsls(f) else: files += [f] if args.debug != None: # Set/reset castor environment for debug stream access stage_host = os.environ.get('STAGE_HOST', None) stage_svcclass = os.environ.get('STAGE_SVCCLASS', None) os.environ['STAGE_HOST'] = 'castoratlas' os.environ['STAGE_SVCCLASS'] = 'atlcal' debug_dirs = nsls( os.path.join(args.debugPath, '%08d' % args.debug, 'debug_*')) for d in debug_dirs: files += nsls(os.path.join(d, '*.data')) if stage_host: os.environ['STAGE_HOST'] = stage_host if stage_svcclass: os.environ['STAGE_SVCCLASS'] = stage_svcclass ofs = None if args.save != None: ofs = eformat.ostream(core_name=args.save) for f in files: ifs = eformat.istream(f) if args.verbose == True: print '==%s' % f for e in ifs: found = True if ofs: ofs.write(e) if args.globalid != None and e.global_id() not in args.globalid: found = False if args.lvl1id != None and e.lvl1_id() not in args.lvl1id: found = False if args.time != None and ( e.bc_time_seconds(), e.bc_time_nanoseconds()) not in args.time: found = False if found: print f, fmtEvent(e, args.time != None)
import eformat eformat.ostream(core_name='empty')
parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('file', metavar='FILE', nargs=1, help='input file') parser.add_argument('-n', '--events', type=int, default=-1, help='number of events to process') parser.add_argument('-o', '--output', type=str, help='core output file name') args = parser.parse_args() dr = EventStorage.pickDataReader(args.file[0]) output = eformat.ostream(core_name=args.output or dr.fileNameCore(), run_number=dr.runNumber(), trigger_type=dr.triggerType(), detector_mask=dr.detectorMask(), beam_type=dr.beamType(), beam_energy=dr.beamEnergy(), meta_data_strings=dr.freeMetaDataStrings(), compression=dr.compression()) i = 0 for event in eformat.istream(args.file[0]): i += 1 if args.events > 0 and i > args.events: break newevt = modify(event) output.write(newevt)
def main(filelist, EventsList): ####### Input-Output Info follows input_file = filelist max = len(EventsList) tmpdir = commands.getoutput("echo $TMPDIR") if (os.path.exists(tmpdir)): print '..', tmpdir, "already exists" else: print ".. Generating", tmpdir os.system("mkdir $TMPDIR") currentTime = datetime.now().strftime("%Y-%m-%d_%H%M%S") os.system("mkdir $TMPDIR/" + currentTime) output_dir = tmpdir + "/" + currentTime print print '****** Output dir is:', output_dir, '******' print flag_written = (0, 0) write_counter = 0 print ".. Opening file: %s" % (input_file) file = open(input_file, 'r') line_counter = 0 for line in file: # print line.strip(), flag_written if (flag_written[0] == 1): break command_cp_from_CAF = 'xrdcp root://castoratlas/' + line.strip( ) + " " + tmpdir + '/Data.data' print '.... making local copy: ', command_cp_from_CAF copyOutput = commands.getoutput(command_cp_from_CAF) if "Permission denied" in copyOutput: print ". Permission denied, continue..." continue if "No such file or directory" in copyOutput: print ". No such file or directory, continue..." continue try: # no_try = 1 # if (no_try==1): file_to_read = tmpdir + '/Data.data' print ".. Opening local copy of input file: %s" % line.strip() line_counter += 1 input = eformat.istream(file_to_read.strip()) ## Updated from Brian's script - this info needs to be attached in all output files. dr = EventStorage.pickDataReader(file_to_read.strip()) output = eformat.ostream(core_name="subset", directory=output_dir, run_number=dr.runNumber(), trigger_type=dr.triggerType(), detector_mask=dr.detectorMask(), beam_type=dr.beamType(), beam_energy=dr.beamEnergy()) (flag_written, write_counter) = event_analysis(input, output, write_counter, EventsList) print '... Processed File #', line_counter print '... Events written out so far', write_counter print command_delete = 'rm -rf ' + tmpdir + '/Data.data' print '.... cleaning up: ', command_delete os.system(command_delete) print print if (flag_written[1] == 1): print "*** Wrote", write_counter, "events" tmp_file_name = output.last_filename() del output output_name = output_dir + "/" + opt.run + "_" + str( write_counter ) + "_" + opt.strtype + "_" + opt.strname + "_" + str( line_counter) print "Writting output file: ", output_name, "with", write_counter, "events" os.rename(tmp_file_name, output_name) else: tmp_file_name = output.last_filename() del output os.remove(tmp_file_name) print print except NameError, IOError: print "OOPS! Input Data File Not Found - or a Bug..! (Please report it!)",
def peb_writer(argv): """Runs the splitting routines""" import eformat, logging import EventApps.myopt as myopt option = {} # run mode options option['start-event'] = { 'short': 'a', 'arg': True, 'default': 0, 'group': 'Run mode', 'description': 'Number of events which should be skippped from the begin' } option['max-events'] = { 'short': 'n', 'arg': True, 'default': 0, 'group': 'Run mode', 'description': 'Maximum number of events in the output file. 0 means, all useful events from the input.' } option['verbosity'] = { 'short': 'v', 'arg': True, 'default': logging.INFO, 'group': 'Run mode', 'description': 'Log verbosity' } option['progress-bar'] = { 'short': 'P', 'arg': False, 'default': None, 'group': 'Run mode', 'description': 'Show progress bar when running interactively' } option['output-dir'] = { 'short': 'd', 'arg': True, 'default': '.', 'group': 'Run mode', 'description': 'Directory in which the output file should be written' } # stream tag options option['stream-name'] = { 'short': 's', 'arg': True, 'default': 'DataScouting_05_Jets', 'group': 'Stream Tag', 'description': 'Name of stream which should be written out' } option['project-tag'] = { 'short': 'p', 'arg': True, 'default': 'data18_13Tev', 'group': 'Stream Tag', 'description': 'Project tag which should be used for the output file' } option['lumi-block'] = { 'short': 'l', 'arg': True, 'default': 0, 'group': 'Stream Tag', 'description': 'Lumiblock number used for the output file. Use 0 if multiple LB in file.' } parser = myopt.Parser(extra_args=True) for (k, v) in option.items(): parser.add_option(k, v['short'], v['description'], v['arg'], v['default'], v['group']) if len(sys.argv) == 1: print parser.usage('global "%s" options:' % sys.argv[0]) sys.exit(1) # process the global options (kwargs, extra) = parser.parse(sys.argv[1:], prefix='global "%s" options:' % sys.argv[0]) # global defaults logging.getLogger('').name = os.path.splitext(os.path.basename( sys.argv[0]))[0] logging.getLogger('').setLevel(kwargs['verbosity']) # input data stream stream = eformat.istream(extra) # input event counter totalEvents_in = 0 # get metadata from inputfile dr = eformat.EventStorage.pickDataReader(extra[0]) # parameters for building the output file name runNumber = dr.runNumber() outputDirectory = kwargs['output-dir'] streamName = kwargs['stream-name'] projectTag = kwargs['project-tag'] lumiBlockNumber = kwargs[ 'lumi-block'] # if output file can have multiple lumi blocks, use 0 applicationName = 'athenaHLT' productionStep = 'merge' # output file with multiple lumi blocks streamType = 'unknown' # the real stream type will be extracted from the matching stream tag # check the output directory if it exists if (not os.path.exists(outputDirectory)) or ( not os.path.isdir(outputDirectory)): logging.fatal(' Output directory %s does not exist ' % outputDirectory) sys.exit(1) # output event counter totalEvents_out = 0 # counter of skipped events totalEvents_skipped = 0 # Loop over events for e in stream: totalEvents_in += 1 # select events if kwargs['start-event'] > 0: kwargs['start-event'] -= 1 totalEvents_skipped += 1 continue if kwargs['max-events'] > 0 and totalEvents_in >= kwargs['max-events']: logging.info(' Maximum number of events reached : %d' % kwargs['max-events']) break # find StreamTags and see if there is a match streamTags = e.stream_tag() logging.debug(' === New Event nr = %s (Run,Global ID) = (%d,%d) === ' % (totalEvents_in, e.run_no(), e.global_id())) for tag in streamTags: if tag.name == streamName: # the event should be written out logging.debug(' Matching event found for stream tag = %s' % tag) logging.debug(' Stream Tag:Robs = %s' % [hex(r) for r in tag.robs]) logging.debug(' Stream Tag:Dets = %s' % [hex(d) for d in tag.dets]) # check the lumi block number from the event against the lumi block number defined for the file # this check is only done if the lumi block number for the file is different from 0 if lumiBlockNumber > 0: if e.lumi_block() != lumiBlockNumber: logging.error( ' Event (Run,Global ID) = (%d,%d) has a lumi block number %d,' ' which is different from LB = %d for the output file. Event skipped.' % (e.run_no(), e.global_id(), e.lumi_block(), lumiBlockNumber)) continue # check that all events have the same run number as the output file indicates otherwise skip event if e.run_no() != runNumber: logging.error( ' Event (Run,Global ID) = (%d,%d) has a run number,' ' which is different from the run number = %d for the output file. Event skipped.' % (e.run_no(), e.global_id(), runNumber)) continue # set the overall tag type for the first match if streamType != tag.type: streamType = tag.type logging.debug(' streamType set to = %s' % streamType) # create the RAW output file name outRawFile = eformat.EventStorage.RawFileName( projectTag, runNumber, streamType, streamName, lumiBlockNumber, applicationName, productionStep) logging.debug(' set output file name = %s' % outRawFile.fileNameCore()) # create the output stream ostream = eformat.ostream( directory=outputDirectory, core_name=outRawFile.fileNameCore(), run_number=dr.runNumber(), trigger_type=dr.triggerType(), detector_mask=dr.detectorMask(), beam_type=dr.beamType(), beam_energy=dr.beamEnergy()) # decide what to write out if streamType == 'physics' or streamType == 'express' or (len( tag.robs) == 0 and len(tag.dets) == 0): # write out the full event fragment pbev = eformat.write.FullEventFragment(e) logging.debug(' Write full event fragment ') else: # select ROBs to write out rob_output_list = [] logging.debug(' Write partial event fragment ') for rob in e: if rob.source_id().code() in tag.robs: rob_output_list.append(rob) if rob.source_id().subdetector_id() in tag.dets: rob_output_list.append(rob) # write out the partial event fragment pbev = eformat.write.FullEventFragment() pbev.copy_header(e) for out_rob in rob_output_list: pbev.append_unchecked(out_rob) # put the event onto the output stream ostream.write(pbev) if (logging.getLogger('').getEffectiveLevel() > logging.DEBUG) and kwargs['progress-bar']: sys.stdout.write('.') sys.stdout.flush() # increase output event counter totalEvents_out += 1 # print final statistics logging.info('Total number of events processed = %d ' % totalEvents_in) logging.info('Number of events skipped at the beginning = %d ' % totalEvents_skipped) logging.info('Number of events written to output file = %d ' % totalEvents_out) if totalEvents_out > 0: logging.info('Output file = %s ' % ostream.last_filename()) sys.exit(0)
def main(): args = get_parser().parse_args() logging.basicConfig(stream=sys.stdout, format='%(levelname)-8s %(message)s', level=logging.DEBUG if args.verbose else logging.INFO) if args.copyFrom: logging.info('Reading events from %s and metadata from %s', args.file, args.copyFrom) else: logging.info('Reading events and metadata from %s', args.file) meta_input = args.copyFrom if args.copyFrom else args.file reader = EventStorage.pickDataReader(meta_input) input_stream = eformat.istream(args.file) # Read metadata from input file metadata_basic = {} # arguments for eformat.ostream metadata_extra = {} # metadata passed as dictionary metadata_basic['runNumber'] = reader.runNumber() metadata_basic['triggerType'] = reader.triggerType() metadata_basic['detectorMask'] = reader.detectorMask() metadata_basic['beamType'] = reader.beamType() metadata_basic['beamEnergy'] = reader.beamEnergy() metadata_extra['Stream'] = reader.stream() metadata_extra['Project'] = reader.projectTag() metadata_extra['LumiBlock'] = reader.lumiblockNumber() logging.debug('Input metadata_basic = %s', metadata_basic) logging.debug('Input metadata_extra = %s', metadata_extra) # Change metadata if args.runNumber: metadata_basic['runNumber'] = args.runNumber if args.triggerType: metadata_basic['triggerType'] = args.triggerType if args.detectorMask: metadata_basic['detectorMask'] = args.detectorMask if args.beamType: metadata_basic['beamType'] = beam_type_dict[args.beamType] if args.beamEnergy: metadata_basic['beamEnergy'] = args.beamEnergy if args.stream: metadata_extra['Stream'] = args.stream if args.projectTag: metadata_extra['Project'] = args.projectTag if args.lumiBlock: metadata_extra['LumiBlock'] = args.lumiBlock logging.debug('Updated metadata_basic = %s', metadata_basic) logging.debug('Updated metadata_extra = %s', metadata_extra) # Create new file name file_name_base = args.outputName if not file_name_base: # Get the name elements ptag = metadata_extra['Project'] runno = metadata_basic['runNumber'] stream = metadata_extra['Stream'] lbn = metadata_extra['LumiBlock'] # Build the name file_name_list = [] file_name_list.append(ptag if ptag else 'data') file_name_list.append('{:08d}'.format(runno if runno else 0)) file_name_list.append(stream if stream else 'unknown_stream') file_name_list.append('lb{:04d}'.format(lbn if lbn else 0)) file_name_base = '.'.join(file_name_list) # Write the new file metadata_extra_strings = ['{:s}={:s}'.format(k, str(v)) for k, v in six.iteritems(metadata_extra)] output_stream = eformat.ostream( core_name = file_name_base, run_number = metadata_basic['runNumber'], trigger_type = metadata_basic['triggerType'], detector_mask = metadata_basic['detectorMask'], beam_type = metadata_basic['beamType'], beam_energy = metadata_basic['beamEnergy'], meta_data_strings = metadata_extra_strings) logging.info('Writing file %s', output_stream.current_filename().replace('.writing', '.data')) ievt = 0 nmax = args.numEvents or -1 for event in input_stream: ievt+=1 if nmax >= 0 and ievt > nmax: break logging.debug('Writing event %d', ievt) output_stream.write(event)
data = rob.rod_data() newdata=reducedLARFEB(data) if len(data)>data[0]: newdata+=[data[data[0]+ii] for ii in range(7)] #middle "ROD" header data=data[data[0]+7:] newdata+=reducedLARFEB(data) newrob.rod_data(newdata) new_event.append(newrob) return new_event.readonly() if __name__ == "__main__": if len(sys.argv)!=3: print('usage: %s <infile> <outfile>' % sys.argv[0]) sys.exit(1) input_file = sys.argv[1] output_file = sys.argv[2] input = eformat.istream([input_file]) dr=EventStorage.pickDataReader(input_file) output = eformat.ostream(core_name="subset", run_number=dr.runNumber(), trigger_type=dr.triggerType(), detector_mask=dr.detectorMask(), beam_type=dr.beamType(), beam_energy=dr.beamEnergy()) for event in input: output.write(modify(event)) tmp_file_name = output.last_filename() del output os.rename(tmp_file_name,output_file)
import os version = eformat.helper.Version().human_major() if len(sys.argv) == 1: print "Converts files from any older supported format to v%s" % version print "Files are stored in the current working directory" print "usage: %s <data-file> [+data-file]" % sys.argv[0] sys.exit(1) logging.info("Legend: 'x' => invalid fragments; '.' => converted correctly") for f in sys.argv[1:]: istr = eformat.istream(f) logging.info("Working at file %s (%d fragments)" % (f, len(istr))) ostr = eformat.ostream() curr = istr.__iter__() try: while True: try: e = curr.next() e.check_tree() except RuntimeError, ex: sys.stdout.write('x') sys.stdout.write('\n') sys.stdout.write(str(ex)) sys.stdout.write('\n => Fragment Ignored!\n') sys.stdout.flush() else: sys.stdout.write('.') sys.stdout.flush()
if __name__ == "__main__": if len(sys.argv) <= 1: print "Syntax: trigbs_prescaleL1.py FILE" sys.exit(1) log.setLevel(logging.DEBUG) kwargs = { 'configuration': { 'db-server': 'TRIGGERDB_RUN1', 'db-extra': { 'lvl1key': 300 } } } kwargs = { 'configuration': { 'db-server': 'TRIGGERDBREPR', 'db-extra': { 'lvl1key': 30 } } } os = eformat.ostream() for e in eformat.istream(sys.argv[1]): kwargs['event'] = e new_event = modify_general(**kwargs) os.write(new_event)
def main(): import argparse parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('file', metavar='FILE', nargs=1, help='input file') parser.add_argument('-o', '--output', type=str, help='output base file name') parser.add_argument('-n', '--events', type=int, default=-1, help='number of events to process') parser.add_argument('-r', '--runNumber', type=int, help='set run number') parser.add_argument( '-l', '--eventsPerLB', type=int, help='increment lumiblock number in steps [%(default)s]') parser.add_argument('--firstLB', type=int, default=1, help='first lumiblock number [%(default)s]') parser.add_argument( '--incLB', type=int, default=1, help='increment steps for lumiblock number [%(default)s]') parser.add_argument('-t', '--timestamp', type=int, help='set timestamp in seconds [%(default)s]') parser.add_argument('--removeRobs', metavar='PATTERN', type=str, help='regex for removing specific ROB IDs') args = parser.parse_args() Config.firstLB = Store.currentLB = args.firstLB Config.incLB = args.incLB Config.eventsPerLB = args.eventsPerLB Config.runNumber = args.runNumber Config.bc_sec = args.timestamp log.info('Opening file %s', args.file[0]) dr = eformat.EventStorage.pickDataReader(args.file[0]) ostr = eformat.ostream(core_name=args.output or dr.fileNameCore(), run_number=Config.runNumber or dr.runNumber(), trigger_type=dr.triggerType(), detector_mask=dr.detectorMask(), beam_type=dr.beamType(), beam_energy=dr.beamEnergy()) bsfile = eformat.istream([args.file[0]]) i = 0 for event in bsfile: i += 1 if args.events > 0 and i > args.events: break ro_event = modify(event) if args.removeRobs: rw_event = eformat.write.FullEventFragment( ro_event, re.compile(args.removeRobs)) else: rw_event = eformat.write.FullEventFragment(ro_event) ostr.write(rw_event) return
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-f', '--file', metavar='FILE', nargs='*', default=[], help='file name') parser.add_argument('-g', '--globalid', type=int, action='store', nargs='*', help='Global event ID') parser.add_argument('-l', '--lvl1id', type=int, action='store', nargs='*', help='LVL1 ID') parser.add_argument('-t', '--time', action=StoreTime, nargs='*', help='Nanosecond time stamp (seconds:nanoseconds)') parser.add_argument('-s', '--save', metavar='OUTFILE', nargs='?', action='store', const='trigbs_findevent', help='Save selected events in OUTFILE') parser.add_argument('-v', '--verbose', action='store_true', help='Be verbose') args = parser.parse_args() ofs = None if args.save is not None: ofs = eformat.ostream(core_name=args.save) for f in args.file: ifs = eformat.istream(f) if args.verbose: print('==%s' % f) for e in ifs: found = True if ofs: ofs.write(e) if args.globalid is not None and e.global_id( ) not in args.globalid: found = False if args.lvl1id is not None and e.lvl1_id() not in args.lvl1id: found = False if args.time is not None and ( e.bc_time_seconds(), e.bc_time_nanoseconds()) not in args.time: found = False if found: print('%s %s' % (f, fmtEvent(e, args.time is not None)))
def main(filelist, chain_to_write, max, run_number): ####### Input-Output Info follows input_file = filelist tmpdir =commands.getoutput("echo $TMPDIR") if (os.path.exists(tmpdir)): print tmpdir,"already exists" else: print "Generating",tmpdir os.system("mkdir $TMPDIR") currentTime = datetime.now().strftime("%Y-%m-%d_%H%M%S") os.system("mkdir $TMPDIR/"+currentTime) output_dir = tmpdir+"/"+currentTime print print '****** Output dir is:',output_dir,'******' print flag_written=(0,0) write_counter = 0 print "Opening file: %s" % (input_file) print "Will write to file chain: ", chain_to_write file = open(input_file,'r') line_counter = 0 for line in file: # print line.strip(), flag_written if (flag_written[0]==1): break command_cp_from_CAF = 'rfcp '+line.strip()+" "+tmpdir+'/Data.data' print command_cp_from_CAF os.system(command_cp_from_CAF) try: file_to_read = tmpdir+'/Data.data' print "Opening file of input file: %s" % line.strip() line_counter+=1 input = eformat.istream(file_to_read.strip()) ## Updated from Brian's script - this info needs to be attached in all output files. dr=EventStorage.pickDataReader(file_to_read.strip()) output = eformat.ostream(core_name="subset", directory=output_dir, run_number=dr.runNumber(), trigger_type=dr.triggerType(), detector_mask=dr.detectorMask(), beam_type=dr.beamType(), beam_energy=dr.beamEnergy()) (flag_written,write_counter) = event_analysis(input, output, chain_to_write, max, run_number, write_counter) print '... Processed File #',line_counter print '... Events written out so far',write_counter print command_delete = 'rm -rf '+tmpdir+'/Data.data' print command_delete os.system(command_delete) print print if (flag_written[1]==1): if (int(max)==-1): print "*** Wrote all available events", write_counter tmp_file_name = output.last_filename() del output output_name = output_dir+"/"+run_number+"_"+chain_to_write+"_"+max+"_"+stream_name+"_"+str(line_counter) print "Writting output file: ", output_name,"with",write_counter,"events" os.rename(tmp_file_name,output_name) print print os.system("rm -rf "+filelist) except NameError, IOError: print "OOPS! Input Data File Not Found - or a Bug..!"
def my_split(argv): """Runs the splitting routines""" import eformat, logging import EventApps.myopt as myopt option = {} option["start-event"] = { "short": "a", "arg": True, "default": 0, "description": "The number of events I should skip from the begin", } option["max-events"] = { "short": "n", "arg": True, "default": 0, "description": "The total maximum number of events you want in the output file. The special value 0 means up to the end.", } option["output"] = {"short": "o", "arg": True, "default": "", "description": "Filename of the output file"} option["verbosity"] = { "short": "d", "arg": True, "default": logging.INFO, "description": "How much I should say about the execution", } parser = myopt.Parser(extra_args=True) for (k, v) in option.items(): parser.add_option(k, v["short"], v["description"], v["arg"], v["default"]) if len(sys.argv) == 1: print parser.usage('global "%s" options:' % sys.argv[0]) sys.exit(1) # process the global options (kwargs, extra) = parser.parse(sys.argv[1:], prefix='global "%s" options:' % sys.argv[0]) # now the things which require global defaults logging.getLogger("").setLevel(kwargs["verbosity"]) stream = eformat.istream(extra) ostream = eformat.ostream() total = 0 for e in stream: if kwargs["start-event"] > 0: kwargs["start-event"] -= 1 continue if kwargs["max-events"] > 0 and total >= kwargs["max-events"]: break ostream.write(e) sys.stdout.write(".") sys.stdout.flush() total += 1 sys.stdout.write("\n") sys.stdout.flush() oname = ostream.last_filename() if len(kwargs["output"]) != 0: del ostream os.rename(oname, kwargs["output"]) oname = kwargs["output"] sys.stdout.write("Wrote %d events in %s\n" % (total, oname)) sys.exit(0)