def __init__(self, f): #we leave completely to the data reader to defined "valid files". self.total_events = 0 dr = EventStorage.pickDataReader(f) if dr is None: raise IOError("Invalid file or format at '{}'".format(f)) self.total_events = dr.eventsInFile() self.file = f def __len__(self): """Returns the number of events available in this stream""" return self.total_events
def ReplaceMUCTPI(input_file, output_file): input = eformat.istream([input_file]) dr = EventStorage.pickDataReader(input_file) output = eformat.ostream(core_name="subset", run_number=dr.runNumber(), trigger_type=dr.triggerType(), detector_mask=dr.detectorMask(), beam_type=dr.beamType(), beam_energy=dr.beamEnergy()) for event in input: output.write(modify(event)) tmp_file_name = output.last_filename() del output os.rename(tmp_file_name, output_file)
def __iter__(self): for f in self.filelist: dr = EventStorage.pickDataReader(f) for k in range(dr.eventsInFile()): blob = dr.getData() # check for people trying old versions and convert it on the spot fragment_version = helper.Version(blob[3]) if fragment_version.major_version() != helper.MAJOR_DEFAULT_VERSION: current_version = helper.Version() logging.debug("Converting from version %s to %s" % \ (fragment_version.human_major(), current_version.human_major())) blob = convert_old(blob) if blob[0] == helper.HeaderMarker.FULL_EVENT: yield FullEventFragment(blob) else: raise SyntaxError, "Expecting event marker, not 0x%08x" % blob[0]
def __init__(self, l): """Initializes an event stream with a file or a list of files.""" #make sure we are always dealing with lists if type(l) is str: l = [l] if type(l) is not list: raise TypeError, \ "stream class accepts only a string or a list of strings as parameters" #we leave completely to the data reader to defined "valid files". self.total_events = 0 for f in l: dr = EventStorage.pickDataReader(f) if dr is None: raise IOError("Invalid file or format at '%s'" % f) self.total_events += dr.eventsInFile() self.filelist = list(l) # deep copy
def __iter__(self): dr = EventStorage.pickDataReader(self.file) for k in range(dr.eventsInFile()): offset = dr.getPosition() blob = dr.getData() # check for people trying old versions and convert it on the spot fragment_version = helper.Version(blob[3]) if fragment_version.major_version( ) != helper.MAJOR_DEFAULT_VERSION: current_version = helper.Version() logging.debug("Converting from version {} to {}".format( fragment_version.human_major(), current_version.human_major())) blob = convert_old(blob) if blob[0] == helper.HeaderMarker.FULL_EVENT: yield (offset, FullEventFragment(blob)) else: raise SyntaxError, "Expecting event marker, not 0x{:08x}".format( blob[0])
def main(filelist, chain_to_write, max, run_number): ####### Input-Output Info follows input_file = filelist tmpdir =commands.getoutput("echo $TMPDIR") if (os.path.exists(tmpdir)): print tmpdir,"already exists" else: print "Generating",tmpdir os.system("mkdir $TMPDIR") currentTime = datetime.now().strftime("%Y-%m-%d_%H%M%S") os.system("mkdir $TMPDIR/"+currentTime) output_dir = tmpdir+"/"+currentTime print print '****** Output dir is:',output_dir,'******' print flag_written=(0,0) write_counter = 0 print "Opening file: %s" % (input_file) print "Will write to file chain: ", chain_to_write file = open(input_file,'r') line_counter = 0 for line in file: # print line.strip(), flag_written if (flag_written[0]==1): break command_cp_from_CAF = 'rfcp '+line.strip()+" "+tmpdir+'/Data.data' print command_cp_from_CAF os.system(command_cp_from_CAF) try: file_to_read = tmpdir+'/Data.data' print "Opening file of input file: %s" % line.strip() line_counter+=1 input = eformat.istream(file_to_read.strip()) ## Updated from Brian's script - this info needs to be attached in all output files. dr=EventStorage.pickDataReader(file_to_read.strip()) output = eformat.ostream(core_name="subset", directory=output_dir, run_number=dr.runNumber(), trigger_type=dr.triggerType(), detector_mask=dr.detectorMask(), beam_type=dr.beamType(), beam_energy=dr.beamEnergy()) (flag_written,write_counter) = event_analysis(input, output, chain_to_write, max, run_number, write_counter) print '... Processed File #',line_counter print '... Events written out so far',write_counter print command_delete = 'rm -rf '+tmpdir+'/Data.data' print command_delete os.system(command_delete) print print if (flag_written[1]==1): if (int(max)==-1): print "*** Wrote all available events", write_counter tmp_file_name = output.last_filename() del output output_name = output_dir+"/"+run_number+"_"+chain_to_write+"_"+max+"_"+stream_name+"_"+str(line_counter) print "Writting output file: ", output_name,"with",write_counter,"events" os.rename(tmp_file_name,output_name) print print os.system("rm -rf "+filelist) except NameError, IOError: print "OOPS! Input Data File Not Found - or a Bug..!"
def main(): args = get_parser().parse_args() logging.basicConfig(stream=sys.stdout, format='%(levelname)-8s %(message)s', level=logging.DEBUG if args.verbose else logging.INFO) if args.copyFrom: logging.info('Reading events from %s and metadata from %s', args.file, args.copyFrom) else: logging.info('Reading events and metadata from %s', args.file) meta_input = args.copyFrom if args.copyFrom else args.file reader = EventStorage.pickDataReader(meta_input) input_stream = eformat.istream(args.file) # Read metadata from input file metadata_basic = {} # arguments for eformat.ostream metadata_extra = {} # metadata passed as dictionary metadata_basic['runNumber'] = reader.runNumber() metadata_basic['triggerType'] = reader.triggerType() metadata_basic['detectorMask'] = reader.detectorMask() metadata_basic['beamType'] = reader.beamType() metadata_basic['beamEnergy'] = reader.beamEnergy() metadata_extra['Stream'] = reader.stream() metadata_extra['Project'] = reader.projectTag() metadata_extra['LumiBlock'] = reader.lumiblockNumber() logging.debug('Input metadata_basic = %s', metadata_basic) logging.debug('Input metadata_extra = %s', metadata_extra) # Change metadata if args.runNumber: metadata_basic['runNumber'] = args.runNumber if args.triggerType: metadata_basic['triggerType'] = args.triggerType if args.detectorMask: metadata_basic['detectorMask'] = args.detectorMask if args.beamType: metadata_basic['beamType'] = beam_type_dict[args.beamType] if args.beamEnergy: metadata_basic['beamEnergy'] = args.beamEnergy if args.stream: metadata_extra['Stream'] = args.stream if args.projectTag: metadata_extra['Project'] = args.projectTag if args.lumiBlock: metadata_extra['LumiBlock'] = args.lumiBlock logging.debug('Updated metadata_basic = %s', metadata_basic) logging.debug('Updated metadata_extra = %s', metadata_extra) # Create new file name file_name_base = args.outputName if not file_name_base: # Get the name elements ptag = metadata_extra['Project'] runno = metadata_basic['runNumber'] stream = metadata_extra['Stream'] lbn = metadata_extra['LumiBlock'] # Build the name file_name_list = [] file_name_list.append(ptag if ptag else 'data') file_name_list.append('{:08d}'.format(runno if runno else 0)) file_name_list.append(stream if stream else 'unknown_stream') file_name_list.append('lb{:04d}'.format(lbn if lbn else 0)) file_name_base = '.'.join(file_name_list) # Write the new file metadata_extra_strings = ['{:s}={:s}'.format(k, str(v)) for k, v in six.iteritems(metadata_extra)] output_stream = eformat.ostream( core_name = file_name_base, run_number = metadata_basic['runNumber'], trigger_type = metadata_basic['triggerType'], detector_mask = metadata_basic['detectorMask'], beam_type = metadata_basic['beamType'], beam_energy = metadata_basic['beamEnergy'], meta_data_strings = metadata_extra_strings) logging.info('Writing file %s', output_stream.current_filename().replace('.writing', '.data')) ievt = 0 nmax = args.numEvents or -1 for event in input_stream: ievt+=1 if nmax >= 0 and ievt > nmax: break logging.debug('Writing event %d', ievt) output_stream.write(event)
data = rob.rod_data() newdata=reducedLARFEB(data) if len(data)>data[0]: newdata+=[data[data[0]+ii] for ii in range(7)] #middle "ROD" header data=data[data[0]+7:] newdata+=reducedLARFEB(data) newrob.rod_data(newdata) new_event.append(newrob) return new_event.readonly() if __name__ == "__main__": if len(sys.argv)!=3: print('usage: %s <infile> <outfile>' % sys.argv[0]) sys.exit(1) input_file = sys.argv[1] output_file = sys.argv[2] input = eformat.istream([input_file]) dr=EventStorage.pickDataReader(input_file) output = eformat.ostream(core_name="subset", run_number=dr.runNumber(), trigger_type=dr.triggerType(), detector_mask=dr.detectorMask(), beam_type=dr.beamType(), beam_energy=dr.beamEnergy()) for event in input: output.write(modify(event)) tmp_file_name = output.last_filename() del output os.rename(tmp_file_name,output_file)
except getopt: usage() for opt, arg in opts: if opt == '-n': eventsPerFile = int(arg) if opt == '-v': verbose = True basename = args[0] files = args[1:] ifile = 0 id = 1 outnum = 1 dr = EventStorage.pickDataReader(files[0]) numFiles = 0 output = eformat.ostream(core_name=basename, run_number=dr.runNumber(), trigger_type=dr.triggerType(), detector_mask=dr.detectorMask(), beam_type=dr.beamType(), beam_energy=dr.beamEnergy()) output.max_events_per_file(eventsPerFile) for input_file in files: ifile += 1 print "Opening file %d: %s" % (ifile, input_file) input = eformat.istream([input_file])
def __iter__(self): dr = EventStorage.pickDataReader(self.file) for k in range(dr.eventsInFile()): yield (dr.getPosition(), dr.getData())
def processRAW(input_file, eif, nfile, nfirst, evtmax): log.info("Opening data file: {}".format(input_file)) eif['StartProcTime_{:d}'.format(nfile)] = int(time.time() * 1000) dr = EventStorage.pickDataReader(input_file) if dr is None: raise IOError("Invalid file or format at '{}'".format(input_file)) log.info("total_events: {}".format(dr.eventsInFile())) log.debug("dataMB: {}".format(dr.dataMB_InFile())) log.debug("LumiBlock: {}".format(dr.lumiblockNumber())) log.debug("Stream: {}".format(dr.stream())) log.debug("App Name: {}".format(dr.appName())) log.debug("beamEnergy: {}".format(dr.beamEnergy())) log.debug("beamType: {}".format(dr.beamType())) log.debug("detectorMask: {}".format(dr.detectorMask())) log.debug("Core Name: {}".format(dr.fileNameCore())) log.debug("projectTag: {}".format(dr.projectTag())) log.debug("GUID: {}".format(dr.GUID())) log.debug("runNumber: {}".format(dr.runNumber())) log.debug("stream: {}".format(dr.stream())) log.debug("triggerType: {}".format(dr.triggerType())) GUID = dr.GUID() eif['ProjName_{:d}'.format(nfile)] = dr.projectTag() eif['TrigStream_{:d}'.format(nfile)] = dr.stream() eif['AMITag_{:d}'.format(nfile)] = "" # no tag for RAW data eif['GUID_{:d}'.format(nfile)] = GUID #input = eformat.istream([input_file]) input = istream2(input_file) ################################################################### cntEvt = 0 cntEvtEI = nfirst for (offset, event) in input: if evtmax > 0 and cntEvtEI >= evtmax: break eirec = EIrecord() L1TBP = event.lvl1_trigger_info()[0:8] L1TAP = event.lvl1_trigger_info()[8:16] L1TAV = event.lvl1_trigger_info()[16:24] log.debug("--------------------------------") log.debug('Event: {}'.format(cntEvt)) log.debug('Offset: {}'.format(offset)) log.debug('RunNumber: {}'.format(event.run_no())) log.debug("EventNumber: {}".format(event.global_id())) log.debug('L1ID: {}'.format(event.lvl1_id())) log.debug("EventTime: {}".format(event.bc_time_seconds())) log.debug("EventTimeNanoSec: {}".format(event.bc_time_nanoseconds())) log.debug("LumiBlockN: {}".format(event.lumi_block())) log.debug("BunchId: {}".format(event.bc_id())) log.debug('L1 type: 0x{:02x}'.format(event.lvl1_trigger_type())) log.debug('L1 Before Prescale: {}'.format(L1TBP)) log.debug('L1 After Prescale: {}'.format(L1TAP)) log.debug('L1 After Veto: {}'.format(L1TAV)) log.debug("RunType: {}".format(event.run_type())) eirec['RunNumber'] = event.run_no() eirec['EventNumber'] = event.global_id() eirec['LumiBlockN'] = event.lumi_block() eirec["BunchId"] = event.bc_id() eirec['EventTime'] = event.bc_time_seconds() eirec['EventTimeNanoSec'] = event.bc_time_nanoseconds() eirec['EventWeight'] = 1.0 eirec['McChannelNumber'] = 0 eirec['Lvl1ID'] = event.lvl1_id() #Run Type Value #Physics 0x00000000 #Calibration 0x00000001 #Cosmics 0x00000002 #Test 0x0000000f #Simulation 0x80000000 runtype = event.run_type().__str__() eirec['IsSimulation'] = 1 eirec['IsTestBeam'] = 0 eirec['IsCalibration'] = 0 if "PHYSICS" in runtype: eirec['IsSimulation'] = 0 if "CALIBRATION" in runtype: eirec['IsCalibration'] = 1 if "TEST" in runtype: eirec['IsTestBeam'] = 1 eirec['SMK'] = 0 eirec['L1PSK'] = 0 eirec['HLTPSK'] = 0 eirec['Snam0'] = "StreamRAW" offset_str = "{0:016X}".format(offset) offset_str1 = offset_str[:8] offset_str2 = offset_str[8:] tk_tmpl = "[DB={}][CNT=00000000][CLID=00000000-0000-0000-0000-000000000000][TECH=00001000][OID={}-{}]" eirec['Sref0'] = tk_tmpl.format(GUID, offset_str1, offset_str2) L1 = event.lvl1_trigger_info() trigL1 = "" for l in L1: trigL1 += "{0:032b}".format(l)[::-1] L2 = event.lvl2_trigger_info() trigL2 = "" for l in L2: trigL2 += "{0:032b}".format(l)[::-1] EF = event.event_filter_info() trigEF = "" for l in EF: trigEF += "{0:032b}".format(l)[::-1] trigL1 = compressB64(trigL1) trigL2 = compressB64(trigL2) trigEF = compressB64(trigEF) eirec['L1PassedTrigMask'] = trigL1 eirec['L2PassedTrigMask'] = trigL2 eirec['EFPassedTrigMask'] = trigEF # write to db eif['Entry_{:d}'.format(cntEvtEI)] = eirec.getRec() cntEvt += 1 cntEvtEI += 1 eif['Nentries_{:d}'.format(nfile)] = cntEvt eif['EndProcTime_{:d}'.format(nfile)] = int(time.time() * 1000) return cntEvt
def __iter__(self): for f in self.filelist: dr = EventStorage.pickDataReader(f) for k in range(dr.eventsInFile()): yield dr.getData()
def main(filelist, EventsList): ####### Input-Output Info follows input_file = filelist max = len(EventsList) tmpdir = commands.getoutput("echo $TMPDIR") if (os.path.exists(tmpdir)): print '..', tmpdir, "already exists" else: print ".. Generating", tmpdir os.system("mkdir $TMPDIR") currentTime = datetime.now().strftime("%Y-%m-%d_%H%M%S") os.system("mkdir $TMPDIR/" + currentTime) output_dir = tmpdir + "/" + currentTime print print '****** Output dir is:', output_dir, '******' print flag_written = (0, 0) write_counter = 0 print ".. Opening file: %s" % (input_file) file = open(input_file, 'r') line_counter = 0 for line in file: # print line.strip(), flag_written if (flag_written[0] == 1): break command_cp_from_CAF = 'xrdcp root://castoratlas/' + line.strip( ) + " " + tmpdir + '/Data.data' print '.... making local copy: ', command_cp_from_CAF copyOutput = commands.getoutput(command_cp_from_CAF) if "Permission denied" in copyOutput: print ". Permission denied, continue..." continue if "No such file or directory" in copyOutput: print ". No such file or directory, continue..." continue try: # no_try = 1 # if (no_try==1): file_to_read = tmpdir + '/Data.data' print ".. Opening local copy of input file: %s" % line.strip() line_counter += 1 input = eformat.istream(file_to_read.strip()) ## Updated from Brian's script - this info needs to be attached in all output files. dr = EventStorage.pickDataReader(file_to_read.strip()) output = eformat.ostream(core_name="subset", directory=output_dir, run_number=dr.runNumber(), trigger_type=dr.triggerType(), detector_mask=dr.detectorMask(), beam_type=dr.beamType(), beam_energy=dr.beamEnergy()) (flag_written, write_counter) = event_analysis(input, output, write_counter, EventsList) print '... Processed File #', line_counter print '... Events written out so far', write_counter print command_delete = 'rm -rf ' + tmpdir + '/Data.data' print '.... cleaning up: ', command_delete os.system(command_delete) print print if (flag_written[1] == 1): print "*** Wrote", write_counter, "events" tmp_file_name = output.last_filename() del output output_name = output_dir + "/" + opt.run + "_" + str( write_counter ) + "_" + opt.strtype + "_" + opt.strname + "_" + str( line_counter) print "Writting output file: ", output_name, "with", write_counter, "events" os.rename(tmp_file_name, output_name) else: tmp_file_name = output.last_filename() del output os.remove(tmp_file_name) print print except NameError, IOError: print "OOPS! Input Data File Not Found - or a Bug..! (Please report it!)",