def convert(self, _file_in, _file_out=None, max_entries=None): # if there is not an output file, the output is the input with a new file extension: if _file_out is None: directory = os.path.dirname(_file_in) file_root = os.path.basename(_file_in) _file_out = directory + os.path.splitext( file_root)[0] + '_larcv.root' # print _file_out self._input_file = _file_in self._output_file = _file_out if not self._initialized: self.initialize_geometry() self._initialized = True # Create the instances of IO managers: self._next_io = IOManager() self._next_io.set_file(self._input_file) # larcv io: self._larcv_io = larcv.IOManager(larcv.IOManager.kWRITE) self._larcv_io.set_out_file(self._output_file) self._larcv_io.initialize() self.event_loop(max_entries=max_entries)
def main(input_file_name): # path = os.path.basename(input_file_name) basename = os.path.basename(input_file_name) print basename # Open this file with larcv and see what is inside: larcv_io = larcv.IOManager() larcv_io.add_in_file(input_file_name) larcv_io.initialize() # larcv_io.read_entry(0) output_name = basename.rstrip('.root') + '.h5' print output_name if os.path.exists(output_name): os.remove(output_name) _h5_out = h5py.File(output_name, 'a') products = larcv_io.product_list() n_entries = larcv_io.get_n_entries() for i in range(n_entries): print("Processing entry {}".format(i)) larcv_io.read_entry(i) for product in products: producers = larcv_io.producer_list(product) for producer in producers: if product == 'image2d': convert_image2d(larcv_io, _h5_out, producer) if product == 'particle': convert_particle(larcv_io, _h5_out, producer) convert_eventid(larcv_io, _h5_out) _h5_out.flush() _h5_out.close()
def GenerateImages(inputRootFile, inputEventList, boundLeft, boundRight): outName = 'empty.root' eventList = FromSsv(inputEventList, dtype='str') shortList = SelectListEvents(eventList, boundLeft, boundRight) nListEvents = len(shortList) Warning("%i events selected from %i (in selection [%.2f,%.2f])" % (nListEvents, len(eventList[0]), boundLeft, boundRight)) evProcessed = 0 # Initialization iom = larcv.IOManager(2) iom.add_in_file(inputRootFile) iom.set_out_file(outName) iom.initialize() # Execution for k in xrange(iom.get_n_entries()): if (k % 100 == 0): PrintFlush('%i of %i entries analyzed.' % (k, iom.get_n_entries())) iom.read_entry(k) imData = iom.get_data(larcv.kProductImage2D, 'tpc') eventNumber = int(iom.event_id().event()) if eventNumber in shortList: evProcessed += 1 print "\nFound event %i from list... [%i more to find]" % ( eventNumber, nListEvents - evProcessed) imageVector = [] ev = int(iom.event_id().event()) nRows = int(imData.Image2DArray().at(0).meta().rows()) nCols = int(imData.Image2DArray().at(0).meta().cols()) imageVector.append(imData.Image2DArray().at(0).as_vector()) imageVector.append(imData.Image2DArray().at(1).as_vector()) imageVector.append(imData.Image2DArray().at(2).as_vector()) SaveEventDisplays(ev, nRows, nCols, imageVector) iom.clear_entry() # Finalization iom.finalize() iom.reset() os.remove(outName)
import ROOT #ROOT.gSystem.Load("libLArCV") #ROOT.gSystem.Load("libLArCVData") from ROOT import larcv o=larcv.IOManager(larcv.IOManager.kREAD) o.reset() o.set_verbosity(0) o.add_in_file("aho.root") o.initialize() #o.get_data("boke") o.read_entry(0) print o.get_data(larcv.kProductImage2D,"aho").Image2DArray().size() print o.get_data(larcv.kProductImage2D,"aho").event_key() o.read_entry(1) print o.get_data(larcv.kProductImage2D,"aho").Image2DArray().size() print o.get_data(larcv.kProductImage2D,"aho").event_key() o.read_entry(2) print o.get_data(larcv.kProductImage2D,"aho").Image2DArray().size() print o.get_data(larcv.kProductImage2D,"aho").event_key() #o.save_entry() #o.save_entry() #o.save_entry() #o.save_entry() o.finalize()
def run(self): # check if configuration is set if not self._ready_(): print '\033[93mAborting\033[00m' return # check which gpu to use gpu = self._pick_gpu_() if gpu<0: print '\033[93mNo GPU available...\033[93m' return caffe.set_device(gpu) # load a list of pre-processed events, if output already exists # also prepare output fstream descriptor pointer done_list=None fout=None if os.path.isfile(self._output): df = pandas.read_csv(self._output) done_list = [int(x) for x in df.entry.values.astype(np.uint32)] fout=open(self._output,'a') else: fout=open(self._output,'w') line='' for v in CSV_VARS: line += '%s,' % v fout.write(line.rstrip(',') + '\n') # construct a net, this also configures internal larcv IO processor net = caffe.Net( self._proto, self._weight, caffe.TEST) # check if larcv IO processor does in fact exist and registered in a factory if not larcv.ThreadFillerFactory.exist_filler(self._filler_name): print '\033[93mFiller',self._filler_name,'does not exist...\033[00m' return # get IO instance, ThreadDatumFiller instance, from the factory filler = larcv.ThreadFillerFactory.get_filler(self._filler_name) # get # events to be processed num_events = filler.get_n_entries() # force random access to be false for an inference filler.set_random_access(False) # construct our own IO to fetch ROI object for physics analysis, use RED mode w/ same input files myio = larcv.IOManager(0,"AnaIO") for f in filler.pd().io().file_list(): myio.add_in_file(f) myio.initialize() print print '\033[95mTotal number of events\033[00m:',num_events print '\033[95mBatch size\033[00m:', self._batch_size print event_counter = 0 # this variable denotes which TTree entry we are @ in the loop below stop_counter = 1e10 # well, unused, but one can set a break condition by configuring this parameter # now continue a loop till the end of the input file (event list) while 1: # if previous result is loaded, check if we should process the current entry or not if done_list and (event_counter in done_list): event_counter+=1 continue # force the filler to move the next event-to-read pointer to the entry of our interest filler.set_next_index(event_counter) # number of entries we expect to process in this mini-batch num_entries = num_events - event_counter if num_entries > self._batch_size: num_entries = self._batch_size # now run the network for a mini-batch, sleep while the thread is running net.forward() while filler.thread_running(): time.sleep(0.001) # retrieve ROI product producer from the filler, so we can read-in ROI products through myroi roi_producer = filler.producer(1) # get a vector of integers that record TTree entry numbers processed in this mini-batch entries = filler.processed_entries() if entries.size() != self._batch_size: print "\033[93mBatch counter mis-match!\033[00m" raise Exception # retrieve data already read-and-stored-in-memory from caffe blob adcimgs = net.blobs["data"].data # this is image labels = net.blobs["label"].data # this is label scores = net.blobs["softmax"].data # this is final output softmax vector # loop over entry of mini-batch outcome for index in xrange(num_entries): if not entries[index] == event_counter: print '\033[93mLogic error... inconsistency found in expected entry (%d) vs. processing entry (%d)' % (event_counter,entries[index]) self.__class__._terminate = True break # skip if this is alredy recorded entry if done_list and (event_counter in done_list): event_counter +=1 continue # update an user which entry we are processing sys.stdout.write('Processing entry %d\r' % event_counter) # declare csv_vals dictionary instance, and fill necessary key-value pairs. # later we have an explicit check if all keys are filled. # this is helpful to avoid a mistake when someone udpate later the script # to include/exclude variables in CSV_VARS definition and forgot to update this # portion of the code. csv_vals={} adcimg = adcimgs[index] # ADC raw image label = labels[index] # Labels score = scores[index] # results # fill things that can be filled from caffe blob csv_vals['entry' ] = entries[index] csv_vals['npx' ] = (adcimg > 0).sum() csv_vals['label' ] = int(label) csv_vals['prediction'] = score.argmax() csv_vals['eminus' ] = score[0] csv_vals['gamma' ] = score[1] csv_vals['muminus'] = score[2] csv_vals['piminus'] = score[3] csv_vals['proton' ] = score[4] # now get ROI data from myroi, our separate IO handle, to record physics parameters myio.read_entry(entries[index]) event_roi = myio.get_data(1,roi_producer) csv_vals['nparticle']=0 csv_vals['ndecay']=0 csv_vals['energy_dep']=0. # loop over ROIs for roi in event_roi.ROIArray(): if roi.MCSTIndex() == larcv.kINVALID_USHORT: # ROI from simb::MCTruth csv_vals['energy_start']=roi.EnergyInit() csv_vals['mass'] = larcv.ParticleMass(roi.PdgCode()) px,py,pz = (roi.Px(),roi.Py(),roi.Pz()) ptot = np.sqrt(np.power(px,2)+np.power(py,2)+np.power(pz,2)) csv_vals['mom_start'] = ptot csv_vals['dcosx_start'] = px/ptot csv_vals['dcosy_start'] = py/ptot csv_vals['dcosz_start'] = pz/ptot else: # ROI from sim::MCShower and sim::MCTrack csv_vals['nparticle']+=1 if roi.ParentTrackID() == roi.TrackID(): csv_vals['energy_dep'] = roi.EnergyDeposit() elif np.abs(roi.PdgCode()) == 13 and np.abs(roi.ParentPdgCode()) == 211: csv_vals['ndecay'] += 1 elif np.abs(roi.PdgCode()) == 11 and np.abs(roi.ParentPdgCode()) == 13: csv_vals['ndecay'] += 1 # record in csv format line = '' for v in CSV_VARS: try: line += '%s,' % str(csv_vals[v]) except KeyError: print '\033[93mCould not locate field\033[00m:',v self.__class__._terminate=True break line=line.rstrip(',') line+='\n' fout.write(line) # break if stop counter is met event_counter += 1 if event_counter >= stop_counter: break # break if termination is called if self.__class__._terminate: break # break if all entries are processed if num_entries < self._batch_size: break # break if stop counter is met if event_counter >= stop_counter: break # break if termination is called if self.__class__._terminate: print print '\033[93mAborting upon kernel kill signal...\033[00m' break print # close outputs and input io fout.close() myio.finalize() # destroy thread filler via factory, an owner larcv.ThreadFillerFactory.destroy_filler(self._filler_name)
from ROOT import larcv iom = larcv.IOManager(larcv.IOManager.kWRITE) iom.set_verbosity(0) iom.set_out_file("butthole.root") iom.initialize() evi = iom.get_data(0,"event_image") im1 = larcv.Image2D(10,10) im2 = larcv.Image2D(10,10) im3 = larcv.Image2D(10,10) evi.Append(im1) evi.Append(im2) evi.Append(im3) iom.set_id(1,0,0); iom.save_entry() evi = iom.get_data(0,"event_image") im1 = larcv.Image2D(20,20) im2 = larcv.Image2D(20,20) im3 = larcv.Image2D(20,20) evi.Append(im1)
for plane, (pos,size) in enumerate( [ (upos,usize), (vpos,vsize), (ypos,ysize) ] ): #width,height,row_count,col_count,origin_x,origin_y = roi2imgcord( img, size, pos, img_coordinates=True ) #bbox_meta = larcv.ImageMeta(width,height,row_count,col_count,origin_x,origin_y,plane) bbox_meta = larcv.ImageMeta(size[0],size[1],0,0,pos[0],pos[1],plane) roi.AppendBB(bbox_meta) roi_dict[rse].Append(roi) print "Number of entries: ",len(roi_dict) # Now, open proton file and an output file. input = larcv.IOManager(larcv.IOManager.kREAD) output = larcv.IOManager(larcv.IOManager.kWRITE) input.add_in_file( larcvPath + "/larcv_" + larcvName + ".root" ) input.initialize() output.set_out_file( larcvPath + "/roi_files/roi_" + larcvName + ".root" ) output.initialize() for entry in xrange( input.get_n_entries() ): #for entry in xrange( 10 ): # for debug input.read_entry( entry ) in_imgs = input.get_data( larcv.kProductImage2D, "tpc" ) in_rse = (in_imgs.run(),in_imgs.subrun(),in_imgs.event())
t0 = t # Load LArCV sample events print 'Step2. Loading events.' caffe.set_mode_gpu() caffe.set_device(0) if not larcv.ThreadFillerFactory.exist_filler("DataFiller"): print '\033[93mFiller', self._filler_name, 'does not exist...\033[00m' exit(1) filler = larcv.ThreadFillerFactory.get_filler("DataFiller") n_evts = filler.get_n_entries() filler.set_random_access(False) myio = larcv.IOManager(0) for f in filler.pd().io().file_list(): myio.add_in_file(f) myio.initialize() n_evts = 5 for i in xrange(n_evts): myio.read_entry(i) d = myio.get_data(0) t = time() print "Checkpoint 2: Done loading events. Step time: %.1f" % (t - t0) t0 = t # Extract data print 'Step3. Loading data from each event.'
import ROOT #ROOT.gSystem.Load("libLArCV") #ROOT.gSystem.Load("libLArCVData") from ROOT import larcv o=larcv.IOManager(larcv.IOManager.kBOTH) o.reset() o.set_verbosity(0) o.add_in_file("aho.root") o.set_out_file("baka.root") o.initialize() o.read_entry(0) o.get_data(larcv.kProductImage2D,"aho") o.get_data(larcv.kProductImage2D,"boke") o.save_entry() o.read_entry(1) o.save_entry() o.read_entry(2) o.save_entry() o.read_entry(3) o.save_entry() o.finalize()
MODEL = None ANA_OUTPUT_CFG = "ana_out.cfg" debug = False for argv in sys.argv: if argv == 'debug': debug = True if argv.find('.caffemodel') >= 0: MODEL = argv proc = larcv.ProcessDriver('OutputProcessDriver') proc.configure(ANA_OUTPUT_CFG) proc.override_output_file(ROOTNAME) proc.initialize() py_image_maker = proc.process_ptr(proc.process_id("PyImageMaker")) outman = larcv.IOManager(larcv.IOManager.kWRITE) net = caffe.Net(proto_cfg.name, MODEL, caffe.TEST) filler = larcv.ThreadFillerFactory.get_filler("DataFiller") num_events = filler.get_n_entries() print print 'Total number of events:', num_events print event_counter = 0 BATCH_CTR = None current_index = 0 filler.set_next_index(current_index) while event_counter < num_events: