def event(self): """Fill the histograms with the values of the MCParticle collection""" T = Belle2.PCmsLabTransform() mcParticles = Belle2.PyStoreArray('MCParticles') nMCParticles = mcParticles.getEntries() for i in range(nMCParticles): mc = mcParticles[i] if (mc.getPDG() == 15): plab = mc.get4Vector() pcms = T.rotateLabToCms() * plab h_E.Fill(pcms.E()) h_px.Fill(pcms.Px()) h_py.Fill(pcms.Py()) h_pz.Fill(pcms.Pz()) # print (mc.getNDaughters()) vec_p4 = [] for idau in range(int(ndau)): dau = mc.getDaughters()[idau] p4lab = dau.get4Vector() p4cms = T.rotateLabToCms() * p4lab vec_p4.append(p4cms) h_E_i[idau].Fill(p4cms.E()) # print (idau, dau.getPDG(), p4cms.E(), p4cms.M()) ij = 0 for idau in range(int(ndau)): for jdau in range(idau): mij = (vec_p4[idau] + vec_p4[jdau]).M() h_m_ij[ij].Fill(mij) # print(idau, jdau, ij, mij) ij = ij + 1
def event(self): geoCache = Belle2.VXD.GeoCache.getInstance() mcpart = Belle2.PyStoreArray('MCParticles') for part in mcpart: mctraj = part.getRelationsTo('MCParticleTrajectorys') #if(len(mctraj) > 0): #print(mc) pxd_clusters = Belle2.PyStoreArray('PXDClusters') #print("event=", Belle2.PyStoreObj('EventMetaData').obj().getEvent()) for cluster in pxd_clusters: mcparticles = cluster.getRelationsTo('MCParticles') #print(len(mcparticles)) # Event identification self.data.exp = Belle2.PyStoreObj( 'EventMetaData').obj().getExperiment() self.data.run = Belle2.PyStoreObj('EventMetaData').obj().getRun() self.data.evt = Belle2.PyStoreObj('EventMetaData').obj().getEvent() # Sensor identification vxd_id = cluster.getSensorID() self.data.vxd_id = vxd_id.getID() self.data.layer = vxd_id.getLayerNumber() self.data.ladder = vxd_id.getLadderNumber() self.data.sensor = vxd_id.getSensorNumber() #Find digits in cluster digits = cluster.getRelationsTo('PXDDigits') mc_part = cluster.getRelationsTo('MCParticles') true_parts = cluster.getRelationsTo('PXDTrueHits') sim_parts0 = cluster.getRelationsTo('PXDSimHits') #print(len(digits)) info = geoCache.get(vxd_id) r_local = ROOT.TVector3(cluster.getU(), cluster.getV(), 0) r_global = info.pointToGlobal(r_local) self.data.cls_u = r_local.X() self.data.cls_v = r_local.Y() self.data.cls_w = r_local.Z() self.data.cls_x = r_global.X() self.data.cls_y = r_global.Y() self.data.cls_z = r_global.Z() # Cluster size and charge self.data.cls_uSize = cluster.getUSize() self.data.cls_vSize = cluster.getVSize() self.data.charge = cluster.getCharge() self.data.seed_charge = cluster.getSeedCharge() # Fill tree self.file.cd() self.tree.Fill()
def event(self): """ Return True if event is fine, False otherwise """ someOK = False EventMetaData = Belle2.PyStoreObj('EventMetaData') event = EventMetaData.getEvent() digits = Belle2.PyStoreArray('EKLMDigits') hit2ds = Belle2.PyStoreArray('EKLMHit2ds') self.hist_nDigit.Fill(len(digits)) self.hist_nHit2d.Fill(len(hit2ds)) super(EventInspectorEKLM, self).return_value(someOK)
def initialize(self): """ Initialise module before any events are processed""" # Create a StoreArray to save predictions to self.e_e_info = Belle2.PyStoreObj('EventExtraInfo') self.e_e_info.registerInDataStore() # Apparently needed to make thread safe self.model = None
def event(self): rawFTSW = Belle2.PyStoreArray('RawFTSWs') if not rawFTSW.isValid(): b2.B2ERROR('No RawFTSW available - event ignored') self.return_value(0) return # unknown meaning of this number unknownInt = 0 if rawFTSW[0].GetTRGType(unknownInt) != Belle2.TRGSummary.TTYP_RAND: self.return_value(1) else: self.return_value(0)
def store_content(self): """ Store the current content of the store array into the internal list. """ registered_store_arrays = Belle2.PyStoreArray.list() registered_store_objects = Belle2.PyStoreObj.list() event_store_content_list = [] for store_array_name in registered_store_arrays: store_array = Belle2.PyStoreArray(store_array_name) event_store_content_list.append( StoreContent(store_array_name, len(store_array))) for store_array_name in registered_store_objects: event_store_content_list.append(StoreContent(store_array_name, 0)) event_store_content = StoreContentList( content=event_store_content_list, event_number=self.event_number) self.store_content_list.append(event_store_content)
def initialize(self): '''Create a member to access event info StoreArray''' self.eventinfo = Belle2.PyStoreObj('EventMetaData') self.index_names = ['label', 'evtNum', 'arrayIndex'] # Dataframe to hold training data, format: (label, Dataframe of event particles, event level vars) self.columns = [ 'PDG', 'mass', 'charge', 'energy', 'prodTime', 'x', 'y', 'z', 'px', 'py', 'pz', 'nDaughters', 'status', 'motherPDG', 'motherIndex', ] self.events_list = [] self.decay_str_list = [] # If the evtNum file does not exist we need to create it self.create_evtNum_file = False try: # Eventually want to have series of LFN: [eventNums] # self.evtNum_series = pd.read_hdf(self.evt_num_file, 'evtNum') self.evtNum_arr = np.load(self.evt_num_file).tolist() b2.B2INFO('Event file {} loaded'.format(self.evt_num_file)) # except FileNotFoundError: # For pd series except IOError: b2.B2INFO('Event file {} not found, creating'.format( self.evt_num_file)) self.create_evtNum_file = True self.evtNum_arr = []
def event(self): """Fill the histograms with the values of the MCParticle collection""" mcParticles = Belle2.PyStoreArray('MCParticles') nMCParticles = mcParticles.getEntries() h_nMCParticles.Fill(nMCParticles) for i in range(nMCParticles): mc = mcParticles[i] if mc.hasStatus(Belle2.MCParticle.c_PrimaryParticle): p = mc.getMomentum() t = mc.get4Vector() h_momentum.Fill(p.Mag()) h_px.Fill(p.Px()) h_py.Fill(p.Py()) h_pz.Fill(p.Pz()) h_E.Fill(t.E()) h_pt.Fill(p.Perp()) h_phi.Fill(p.Phi() / math.pi * 180) h_theta.Fill(p.Theta() / math.pi * 180) h_costheta.Fill(math.cos(p.Theta())) h_pdg.Fill(mc.getPDG()) h_vertex.Fill(mc.getProductionVertex().X(), mc.getProductionVertex().Y())
def event(self): '''Return match of event number to input list''' mcplist = Belle2.PyStoreArray("MCParticles") # Get event number, need for DF index evtNum = self.eventinfo.getEvent() # parentLFN = self.eventinfo.getParentLFN() # for pd series # Get training label, need for DF index useful = True # If we already have list of event numbers then only keep requested events if not self.create_evtNum_file: useful = evtNum in self.evtNum_arr if self.keep_only != -1 and self.keep_only != useful: return else: self.evtNum_arr.append(evtNum) event_dict = {} # Create particle vars for mcp in mcplist: if mcp.isPrimaryParticle() and self._check_status_bit( mcp.getStatus()): # Load particle's data arrayIndex = mcp.getArrayIndex() four_vec = mcp.get4Vector() prod_vec = mcp.getProductionVertex() mother = mcp.getMother() motherPDG = 0 motherArrayIndex = 0 if mother: motherPDG = mother.getPDG() motherArrayIndex = mother.getArrayIndex() # Append to dict for making dataframe later event_dict[(useful, evtNum, arrayIndex)] = { 'PDG': mcp.getPDG(), 'mass': mcp.getMass(), 'charge': mcp.getCharge(), 'energy': mcp.getEnergy(), 'prodTime': mcp.getProductionTime(), 'x': prod_vec.x(), 'y': prod_vec.y(), 'z': prod_vec.z(), 'px': four_vec.Px(), 'py': four_vec.Py(), 'pz': four_vec.Pz(), 'nDaughters': mcp.getNDaughters(), 'status': mcp.getStatus(), 'motherPDG': motherPDG, 'motherIndex': motherArrayIndex, } # Create event wide feedforward vars # First particle is always the top of the decay chain if len(event_dict) > 0: MCdecay_string = self._build_decay_string(mcplist[0]) # data_dict['decay_input'] = self.cap.preproc_single_decay_string(MCdecay_string, self.LSTM_flag) decay_str_df = pd.DataFrame( data=[{ 'label': useful, 'decay_str': MCdecay_string }], index=[evtNum], ) self.decay_str_list.append(decay_str_df) # If I tokenize the decay string here then root_pandas should work event_df = pd.DataFrame.from_dict(event_dict, orient='index') event_df.index.names = self.index_names self.events_list.append(event_df)
import ROOT from ROOT import Belle2 beam_spot = Belle2.BeamSpot() ip = ROOT.TVector3(-490e-4, 170e-4, -250e-4) ipC = ROOT.TMatrixDSym(3) ipC[0, 0] = (0.3e-4)**2 ipC[1, 1] = (0.3e-4)**2 ipC[2, 2] = (4e-4)**2 cov = ROOT.TMatrixDSym(3) cov[0, 0] = (10e-4)**2 cov[1, 1] = (2e-4)**2 cov[2, 2] = (250e-4)**2 beam_spot.setIP(ip, ipC) beam_spot.setSizeCovMatrix(cov) iov = Belle2.IntervalOfValidity(0, 0, -1, -1) Belle2.Database.Instance().storeData("BeamSpot", beam_spot, iov)
def event(self): """ Return True if event is fine, False otherwise """ someOK = False EventMetaData = Belle2.PyStoreObj('EventMetaData') event = EventMetaData.getEvent() rawklms = Belle2.PyStoreArray('RawKLMs') digits = Belle2.PyStoreArray('EKLMDigits') hit2ds = Belle2.PyStoreArray('EKLMHit2ds') #klmdigi = Belle2.PyStoreArray('KLMDigitEventInfo') #eklmids = Belle2.PyStoreArray('EKLMHitBases') self.hist_nDigit.Fill(len(digits)) self.hist_nHit2d.Fill(len(hit2ds)) for copper in range(0, len(rawklms)): rawklm = rawklms[copper] if rawklm.GetNumEntries() != 1: print('##0 Event', event, 'copper', copper, ' getNumEntries=', rawklm.GetNumEntries()) continue nodeID = rawklm.GetNodeID(0) - self.BKLM_ID if nodeID >= self.EKLM_ID - self.BKLM_ID: nodeID = nodeID - (self.EKLM_ID - self.BKLM_ID) + 4 self.hist_rawKLMnodeID.Fill(nodeID, copper) if (nodeID < 0) or (nodeID > 4): # skip EKLM nodes continue for digit in digits: sector = digit.getSector() endcap = digit.getEndcap() time = digit.getTime() ctime = digit.getCTime() tdc = digit.getTDC() #klmdigi = digit.getRelatedTo('KLMDigitEventInfo') #triggtime = digit.getRelativeCTime() #print (ctime, tdc)#, triggtime) #print(time) self.hist_time.Fill(time) self.hist_ctime.Fill(ctime) self.hist_tdc.Fill(tdc) if (endcap == 1): self.hist_BackwardSectorOccupancy.Fill(sector) self.hist_BackwardSectorbyctime.Fill(sector, ctime) else: self.hist_ForwardSectorOccupancy.Fill(sector) self.hist_ForwardSectorbyctime.Fill(sector, ctime) self.hist_EndcapOccupancy.Fill(endcap) for hit2d in hit2ds: sector = hit2d.getSector() endcap = hit2d.getEndcap() layer = hit2d.getLayer() gx = hit2d.getPositionX() gy = hit2d.getPositionY() gz = hit2d.getPositionZ() if (endcap == 1): self.hist_occupancyBackwardXY.Fill(gx, gy) self.hist_LayeroccupancyBackwardRZ.Fill(layer, gz) self.hist_LayeroccupancyBackward.Fill(layer) self.hist_occupancyBackwardXYPerLayer[layer - 1].Fill(gx, gy) else: self.hist_occupancyForwardXY.Fill(gx, gy) self.hist_LayeroccupancyForwardRZ.Fill(layer, gz) self.hist_LayeroccupancyForward.Fill(layer) self.hist_occupancyForwardXYPerLayer[layer - 1].Fill(gx, gy) super(EventInspectorEKLM, self).return_value(someOK)
def endRun(self): EventMetaData = Belle2.PyStoreObj('EventMetaData') print('endRun', EventMetaData.getRun())
def event(self): """Return match of event number to input list""" if self.model is None: from keras.models import load_model self.model = load_model(self.model_file) # Required to be initialised before multithreading self.model._make_predict_function() # And if needed infer the name of the extra info var if self.extra_info_var is None: self.extra_info_var = self.model.name b2.B2INFO('EventExtraInfo variable set to: {}'.format( self.extra_info_var)) b2.B2INFO('Initialised model') # Need to create the eventExtraInfo entry for each event if self.extra_info_var: self.e_e_info.create() mcplist = Belle2.PyStoreArray("MCParticles") # Can get away with list because we don't use arrayIndex event_list = [] data_dict = {} if self.model_type in [ 'combined-LSTM', 'combined-wideCNN', 'particles' ]: # Create particle vars for mcp in mcplist: if mcp.isPrimaryParticle(): # Check mc particle is useable if not self.cap.check_status_bit(mcp.getStatus()): continue four_vec = mcp.get4Vector() prod_vec = mcp.getProductionVertex() mother = mcp.getMother() motherPDG = 0 if mother: motherPDG = mother.getPDG() event_list.append({ 'PDG': mcp.getPDG(), # 'mass': mcp.getMass(), 'charge': mcp.getCharge(), 'energy': mcp.getEnergy(), 'prodTime': mcp.getProductionTime(), 'x': prod_vec.x(), 'y': prod_vec.y(), 'z': prod_vec.z(), 'px': four_vec.Px(), 'py': four_vec.Py(), 'pz': four_vec.Pz(), 'motherPDG': motherPDG, }) # Convert to a dataframe for preprocessing event_df = pd.DataFrame(event_list) # Perform event preprocessing and get back the numpy array of particles data_dict['particle_input'], data_dict['pdg_input'], data_dict[ 'mother_pdg_input'] = self.cap.preproc_single_whole_decay( event_df) # Need to do reshaping here I think # x_arr = np.reshape(x_arr, (1, x_arr.shape[0], x_arr.shape[1])) # pdg_arr = np.reshape(pdg_arr, (1, pdg_arr.shape[0])) # mother_pdg_arr = np.reshape(mother_pdg_arr, (1, mother_pdg_arr.shape[0])) # Build decay string if self.model_type in [ 'combined-LSTM', 'combined-wideCNN', 'decstr-LSTM', 'decstr-wideCNN' ]: # First particle is always the top of the decay chain if len(mcplist) > 0: MCdecay_string = self.cap.build_decay_string(mcplist[0]) data_dict['decay_input'] = self.cap.preproc_single_decay_string( MCdecay_string, self.LSTM_flag) # Outputs pass probability pred = self.model.predict(data_dict) # Need to set this to some debug mode # b2.B2INFO('Pass probability:\t{}'.format(pred[0][0])) # b2.B2INFO('Passes threshold:\t{}'.format(int(pred[0][0] >= self.threshold))) # Save the pass probability to EventExtraInfo if self.extra_info_var: self.e_e_info.addExtraInfo(self.extra_info_var, pred[0][0]) # Module returns bool of whether prediciton passes threshold for use in basf2 path flow control self.return_value(int(pred[0][0] >= self.threshold))
def event(self): geoCache = Belle2.VXD.GeoCache.getInstance() trg = Belle2.PyStoreArray('TRGSummary') gdl_25 = ((trgSum.getPsnmBits(0) & 0x6000000) > 25); gdl_26 = ((trgSum.getPsnmBits(0) & 0x4000000) > 26); if gdl_25 != 0 or gld_26 != 0: pxd_clusters = Belle2.PyStoreArray('PXDClusters') #print("event=", Belle2.PyStoreObj('EventMetaData').obj().getEvent()) for cluster in pxd_clusters: # Event identification self.data.exp = Belle2.PyStoreObj('EventMetaData').obj().getExperiment() self.data.run = Belle2.PyStoreObj('EventMetaData').obj().getRun() self.data.evt = Belle2.PyStoreObj('EventMetaData').obj().getEvent() # Sensor identification vxd_id = cluster.getSensorID() self.data.vxd_id = vxd_id.getID() self.data.layer = vxd_id.getLayerNumber() self.data.ladder = vxd_id.getLadderNumber() self.data.sensor = vxd_id.getSensorNumber() #Find digits in cluster digits = cluster.getRelationsTo('PXDDigits') mc_part = cluster.getRelationsTo('MCParticles') true_parts = cluster.getRelationsTo('PXDTrueHits') sim_parts0 = cluster.getRelationsTo('PXDSimHits') #for sim_part in sim_parts0: #print("sim_part0") #for true_part in true_parts: #print("true_part") #sim_parts = cluster.getRelationsTo('PXDSimHits') #for sim_part in sim_parts: #print("sim_part") #print(sim_part.getBackgroundTag()) #for particle in mc_part: #sim_parts = particle.getRelationsTo('PXDSimHits') #for sim_part in sim_parts: #print("sim_part") #print(sim_part.getBackgroundTag()) # if particle.isPrimaryParticle() == True: # i = i + 1 # print(i) #Get u,v coordinates of each digit #print ("new cluster") #for digit in digits: # print(digit.getSensorID()) #static VXD::GeoCache& geo = VXD::GeoCache::getInstance(); #const VXD::SensorInfoBase& info = geo.get(sensorID); #abs_pos = info.pointToGlobal(local_pos) info = geoCache.get(vxd_id) r_local = ROOT.TVector3(cluster.getU(), cluster.getV(), 0) r_global = info.pointToGlobal(r_local) self.data.cls_u = r_local.X() self.data.cls_v = r_local.Y() self.data.cls_w = r_local.Z() self.data.cls_x = r_global.X() self.data.cls_y = r_global.Y() self.data.cls_z = r_global.Z() # Cluster size and charge self.data.cls_uSize = cluster.getUSize() self.data.cls_vSize = cluster.getVSize() self.data.charge = cluster.getCharge() self.data.seed_charge = cluster.getSeedCharge() # Fill tree self.file.cd() self.tree.Fill()