def record_aviv_reco_jets(is_signal, input_list, events_to_read, event_data_dump): tree_name = 'Nominal' branches = [ 'eventWeight', ('truth_particles', ['tpartpdgID', 'tpartstatus', 'tpartpT', 'tparteta', 'tpartphi', 'tpartm'] ), ('reco_jets', ['tj0pT', 'j0_isPU', 'j0_QGTagger', 'j0_JVT', 'j0_fJVT_Tight', 'j0pT', 'j0eta', 'j0phi', 'j0m'] ) ] for event in event_iterator(input_list, tree_name, branches, events_to_read): # Loop over reco jets and convert them into generic acorn_jet objects #truth_particles = [ tp.copy() for tp in event['truth_particles'] ] particle_list = [] for truth_particle in event['truth_particles']: if truth_particle['tpartpdgID'] == autils.PDGID['higgs']: continue v = TLorentzVector.from_ptetaphim(truth_particle['tpartpT'], truth_particle['tparteta'], truth_particle['tpartphi'], truth_particle['tpartm']) particle_list.append( (v, truth_particle['tpartpdgID'], truth_particle['tpartstatus']) ) recorded_jets = [] for rj in event['reco_jets']: v = TLorentzVector.from_ptetaphim(rj['j0pT'], rj['j0eta'], rj['j0phi'], rj['j0m']) pdgid = match_jet(v, particle_list) #is_pileup = rj['j0_isPU'] is_pileup = rj['tj0pT'] < 0 new_jet = acorn_jet(v, pdgid, is_pileup, rj['j0_JVT'], rj['j0_fJVT_Tight'], rj['j0_QGTagger'], None, []) recorded_jets.append(new_jet) # Categorize event, and then either discard the event or perform tagging on it for category in event_data_dump.values(): category.add_event(recorded_jets, is_signal, event['eventWeight'])
def event_fails_photon_cut(truth_particles): photon_pts = [] photon_4vector = TLorentzVector.from_ptetaphim(0,0,0,0) for tp in truth_particles: if tp['tpartpdgID'] != autils.PDG['photon']: continue if tp['tpartstatus'] != autils.Status['photon_out']: continue photon_pts.append(tp['tpartpT']) v = TLorentzVector.from_ptetaphim(tp['tpartpT'], tp['tparteta'], tp['tpartphi'], tp['tpartm']) photon_4vector += v if len(photon_pts) != 2: return True mgg = photon_4vector.mass photon_pts.sort(reverse=True) if photon_pts[0] < 0.35*mgg or photon_pts[1] < 0.25*mgg: return True return False
def record_cmilkeV1_truth_jets(is_signal, input_list, events_to_read, event_data_dump): tree_name = 'ntuple' branches = [ 'EventWeight', ('truth_jets', ['TruthJetPt', 'TruthJetEta', 'TruthJetPhi', 'TruthJetM', 'TruthJetID'] ) ] for event in event_iterator(input_list, tree_name, branches, events_to_read): # Loop over truth jets and convert them into generic acorn_jet objects recorded_jets = [] for truth_jet in event['truth_jets']: v = TLorentzVector.from_ptetaphim( truth_jet['TruthJetPt'], truth_jet['TruthJetEta'], truth_jet['TruthJetPhi'], truth_jet['TruthJetM'] ) pdgid = truth_jet['TruthJetID'] # Create jet object storing the essential aspects of the ntuple truth jet, # faking some of the data normally associated with reco jets new_jet = acorn_jet(v, pdgid, False, True, True, -1) recorded_jets.append(new_jet) # Categorize event, and then either discard the event or perform tagging on it for category in event_data_dump.values(): category.add_event(recorded_jets, is_signal, event['EventWeight'])
def __init__(self, event, selections): jet_list = event.jets total_4vector = TLorentzVector(0, 0, 0, 0) for jet in jet_list: total_4vector += jet.vector mjjj = total_4vector.mass self.discriminant = mjjj
def load_cmilke(event_generator, input_type, validation_data): for event in event_generator: validation_data['EventWeight'][input_type].append(event['EventWeight']) init_jet_vectors = [] for reco_jet in event['reco_jets']: if not reco_jet['JetJVT']: continue if reco_jet['JetPt_calib'] < 20: continue init_jet_vectors.append( TLorentzVector.from_ptetaphim(reco_jet['JetPt_calib'], reco_jet['JetEta_calib'], reco_jet['JetPhi_calib'], reco_jet['JetM_calib'])) #jet_vectors = [ v for v in init_jet_vectors if v.pt > 30 and abs(v.eta) < 4 ] jet_vectors = init_jet_vectors if len(jet_vectors) < 1: continue if jet_vectors[0].pt < 60: continue #if len(jet_vectors) > 1 and jet_vectors[1].pt < 60: continue #if len(jet_vectors) == 3: for v in jet_vectors: validation_data['JetPt_calib'][input_type].append(v.pt) validation_data['JetEta_calib'][input_type].append(v.eta) validation_data['JetPhi_calib'][input_type].append(v.phi) validation_data['JetM_calib'][input_type].append(v.mass)
def load_aviv(event_generator, input_type, validation_data): for event in event_generator: validation_data['eventWeight'][input_type].append(event['eventWeight']) truth_particles = [tp.copy() for tp in event['truth_particles']] init_jet_vectors = [] for truth_jet in event['truth_jets']: v = TLorentzVector.from_ptetaphim(truth_jet['truthjpT'], truth_jet['truthjeta'], truth_jet['truthjphi'], truth_jet['truthjm']) pdgid = match_aviv_reco_jet3(v, truth_particles) if pdgid == autils.PDGID['photon']: continue init_jet_vectors.append(v) jet_vectors = [ v for v in init_jet_vectors if v.pt > 20 and abs(v.eta) < 4 ] if len(jet_vectors) == 3: for v in jet_vectors: validation_data['truthjpT'][input_type].append(v.pt) validation_data['truthjeta'][input_type].append(v.eta) validation_data['truthjphi'][input_type].append(v.phi) validation_data['truthjm'][input_type].append(v.mass) load_extra(jet_vectors, input_type, validation_data)
def record_aviv_truth_jets(is_signal, input_list, events_to_read, event_data_dump): tree_name = 'Nominal' branches = [ 'eventWeight', ('truth_particles', ['tpartpdgID', 'tpartstatus', 'tpartpT', 'tparteta', 'tpartphi', 'tpartm'] ), ('truth_jets', ['truthjpT', 'truthjeta', 'truthjphi', 'truthjm'] ) ] for event in event_iterator(input_list, tree_name, branches, events_to_read): # Loop over truth jets and convert them into generic acorn_jet objects truth_particles = [ tp.copy() for tp in event['truth_particles'] ] recorded_jets = [] for tj in event['truth_jets']: v = TLorentzVector.from_ptetaphim(tj['truthjpT'], tj['truthjeta'], tj['truthjphi'], tj['truthjm']) pdgid = match_aviv_reco_jet(v, truth_particles) # Create jet object storing the essential aspects of the ntuple truth jet, # faking some of the data normally associated with reco jets new_jet = acorn_jet(v, pdgid, False, True, True, -1, None, []) recorded_jets.append(new_jet) # Categorize event, and then either discard the event or perform tagging on it for category in event_data_dump.values(): category.add_event(recorded_jets, is_signal, event['eventWeight'])
def smart_total_invariant_mass(vectors): num_candidates = len(vectors) mjNs = [] for num_jets in range(2, num_candidates + 1): for vec_collecton in itertools.combinations(vectors, num_jets): total_vector = TLorentzVector(0, 0, 0, 0) for vec in vec_collecton: total_vector += vec mjNs.append(total_vector.mass) return max(mjNs)
def get_pdgID(vector_to_match, truth_particles): for tp in truth_particles: if tp['tpartpdgID'] == autils.PDG['photon']: if tp['tpartstatus'] != autils.Status['photon_out']: continue elif tp['tpartstatus'] != autils.Status['outgoing']: continue truth_vec = TLorentzVector.from_ptetaphim(tp['tpartpT'], tp['tparteta'], tp['tpartphi'], tp['tpartm']) deltaR = vector_to_match.delta_r(truth_vec) if deltaR < 0.3: return tp['tpartpdgID'] return -1
def get_pdgID_raw(vector_to_match, truth_particles ): for tp in truth_particles: pt, eta, phi, m, pdgid, status = tp if pdgid == autils.PDG['photon']: if status != autils.Status['photon_out']: continue elif status != autils.Status['outgoing']: continue truth_vec = TLorentzVector.from_ptetaphim(pt, eta, phi, m) deltaR = vector_to_match.delta_r(truth_vec) if deltaR < 0.3: return pdgid return -1
def make_data_tuple(event): vector_list = [] for jet in event['jets']: vec = LV.from_ptetaphie( jet['vbf_candidates_pT'], jet['vbf_candidates_eta'], jet['vbf_candidates_phi'], jet['vbf_candidates_E'] ) vector_list.append(vec) return( prepare_tuple(vector_list) )
def count_tjets_with_truthj(event, truth_particles): num_tjets = 0 num_tquarks = 0 for tj in autils.jet_iterator(event['truthj']): v = TLorentzVector.from_ptetaphim(tj['truthjpT'], tj['truthjeta'], tj['truthjphi'], tj['truthjm']) pdg = match_jet(v, truth_particles) if pdg == autils.PDG['photon']: continue if pdg < 0: continue if tj['truthjpT'] < _pt_cut: continue num_tjets += 1 if pdg in autils.PDG['quarks']: num_tquarks += 1 return num_tjets, num_tquarks
def _rec_build(nid, node): branches = [ node.harder, node.softer ] if DGLGraphDatasetLund.fill_secondary else [node.harder] for branch in branches: if branch is None or branch.lundCoord is None: # stop when reaching the leaf nodes # we do not add the leaf nodes to the graph/tree as they do not have Lund coordinates continue cid = g.number_of_nodes() if DGLGraphDatasetLund.node_coordinates == 'lund': spatialCoord = branch.lundCoord.state()[:2] else: node_p4 = TLorentzVector(*branch.node) spatialCoord = np.array([ delta_eta_reflect(node_p4, jet_p4), node_p4.delta_phi(jet_p4) ], dtype='float32') g.add_node(cid, coordinates=spatialCoord, features=branch.lundCoord.state()) g.add_edge(cid, nid) _rec_build(cid, branch)
def count_rjets(event, truth_particles): num_rjets = 0 num_rquarks = 0 for rj in autils.jet_iterator(event['j0']): v = TLorentzVector.from_ptetaphim(rj['j0pT'], rj['j0eta'], rj['j0phi'], rj['j0m']) pdg = match_jet(v, truth_particles) if pdg == autils.PDG['photon']: continue #if rj['j0_isTightPhoton']: continue if rj['j0pT'] < _pt_cut: continue if not (rj['j0_JVT'] and rj['j0_fJVT_Tight']): continue num_rjets += 1 if pdg in autils.PDG['quarks']: num_rquarks += 1 return num_rjets, num_rquarks
def _build_tree(self, root): g = nx.Graph() jet_p4 = TLorentzVector(*root.node) def _rec_build(nid, node): branches = [ node.harder, node.softer ] if DGLGraphDatasetLund.fill_secondary else [node.harder] for branch in branches: if branch is None or branch.lundCoord is None: # stop when reaching the leaf nodes # we do not add the leaf nodes to the graph/tree as they do not have Lund coordinates continue cid = g.number_of_nodes() if DGLGraphDatasetLund.node_coordinates == 'lund': spatialCoord = branch.lundCoord.state()[:2] else: node_p4 = TLorentzVector(*branch.node) spatialCoord = np.array([ delta_eta_reflect(node_p4, jet_p4), node_p4.delta_phi(jet_p4) ], dtype='float32') g.add_node(cid, coordinates=spatialCoord, features=branch.lundCoord.state()) g.add_edge(cid, nid) _rec_build(cid, branch) # add root if root.lundCoord is not None: if DGLGraphDatasetLund.node_coordinates == 'lund': spatialCoord = root.lundCoord.state()[:2] else: spatialCoord = np.zeros(2, dtype='float32') g.add_node(0, coordinates=spatialCoord, features=root.lundCoord.state()) _rec_build(0, root) else: # when a jet has only one particle (?) g.add_node(0, coordinates=np.zeros(2, dtype='float32'), features=np.zeros(LundCoordinates.dimension, dtype='float32')) ret = dgl.from_networkx(g, node_attrs=['coordinates', 'features']) # print(ret.number_of_nodes()) return ret
def prepare_events(cls, event_selections_tuple_list, label_list): organized_data = {key: [] for key in cls.input_keys} for event, selections in event_selections_tuple_list: E_list = [] pt_list = [] eta_list = [] phi_list = [] Deta_list = [] mjj_list = [] total_vector = TLorentzVector(0, 0, 0, 0) for jet_index, jet in enumerate(event.jets): E_list.append(jet.vector.energy) pt_list.append(jet.vector.pt) eta_list.append(jet.vector.eta) phi_list.append(jet.vector.phi) for other_jet in event.jets[jet_index + 1:]: Deta = abs(jet.vector.eta - other_jet.vector.eta) mjj = (jet.vector + other_jet.vector).mass Deta_list.append(Deta) mjj_list.append(mjj) total_vector += jet.vector mjj_list.append(total_vector.mass) organized_data['E'].append(E_list) organized_data['pt'].append(pt_list) organized_data['eta'].append(eta_list) organized_data['phi'].append(phi_list) organized_data['Deta'].append(Deta_list) organized_data['mjj'].append(mjj_list) if label_list != None: label_list.append(cls.get_label(event)) prepared_data = { key: numpy.array(val) for key, val in organized_data.items() } return prepared_data
def vector(self): return TLorentzVector.from_ptetaphim(*self.floats[:4])
def draw(input_type): correct_pt = 0 correct_mjj = 0 contains_leading_quark_pt = 0 contains_leading_quark_mjj = 0 tagged_pt = 0 tagged_mjj = 0 events_with_3_jets = 0 pt_based_jets = 0 mjj_based_jets = 0 ntuple_type = 'aviv' #sys.argv[1] branch_list = _branch_options[ntuple_type] tree_name = _tree_options[ntuple_type] input_list = _input_type_options[ntuple_type][input_type] for event in event_iterator(input_list, tree_name, branch_list, _Nevents): particle_list = [] for truth_particle in event['truth_particles']: if truth_particle['tpartpdgID'] == autils.PDGID['higgs']: continue v = TLorentzVector.from_ptetaphim(truth_particle['tpartpT'], truth_particle['tparteta'], truth_particle['tpartphi'], truth_particle['tpartm']) particle_list.append((v, truth_particle['tpartpdgID'], truth_particle['tpartstatus'])) #if truth_particle['tpartpdgID'] == 22 and truth_particle['tpartstatus'] not in (1,23): print(truth_particle['tpartstatus']) jet_list = [] #for jet in event['truth_jets']: num_dual_matched = 0 for jet in event['reco_jets']: if not (jet['j0_JVT'] and jet['j0_fJVT_Tight']): continue #v = TLorentzVector.from_ptetaphim(jet['truthjpT'], jet['truthjeta'], jet['truthjphi'], jet['truthjm']) v = TLorentzVector.from_ptetaphim(jet['j0pT'], jet['j0eta'], jet['j0phi'], jet['j0m']) if v.pt < 30 or abs(v.eta) > 4: continue pdgid, dual_matched = match_jet(v, particle_list) if pdgid == autils.PDGID['photon']: continue jet_list.append((v, pdgid in autils.PDGID['quarks'])) if not passes_cuts(input_type, jet_list): continue quark_jets = [jet for jet in jet_list if jet[1]] #print(jet_list) events_with_3_jets += 1 # Leading Pt pt_chosen_jets = jet_list[:2] if input_type == 'sig': correct_jets, has_leading_pt = check_if_signature_jets( pt_chosen_jets, quark_jets[0]) correct_pt += int(correct_jets) contains_leading_quark_pt += int(has_leading_pt) if (pt_chosen_jets[0][0] + pt_chosen_jets[1][0]).mass > 500: pt_based_jets += 1 #tagged_pt += int( (pt_chosen_jets[0][0] + pt_chosen_jets[1][0]).mass > 252 ) tagged_pt += int( (pt_chosen_jets[0][0] + pt_chosen_jets[1][0]).mass > 700) # Maximized Mjj mass_pairs = sorted([((i[0] + j[0]).mass, [i, j]) for i, j in combinations(jet_list, 2)], reverse=True, key=lambda t: t[0]) mjj_chosen_jets = mass_pairs[0][1] if input_type == 'sig': correct_jets, has_leading_pt = check_if_signature_jets( mjj_chosen_jets, quark_jets[0]) correct_mjj += int(correct_jets) contains_leading_quark_mjj += int(has_leading_pt) if (mjj_chosen_jets[0][0] + mjj_chosen_jets[1][0]).mass > 500: mjj_based_jets += 1 #tagged_mjj += int( (mjj_chosen_jets[0][0] + mjj_chosen_jets[1][0]).mass > 310 ) tagged_mjj += int( (mjj_chosen_jets[0][0] + mjj_chosen_jets[1][0]).mass > 700) num_jets = events_with_3_jets print() print(num_jets) if input_type == 'sig': print('{}, {}, {:.02}, {:.02} | {:.02}, {:.02}'.format( correct_pt, correct_mjj, correct_pt / num_jets, correct_mjj / num_jets, contains_leading_quark_pt / num_jets, contains_leading_quark_mjj / num_jets)) print('{}, {} | {}, {} | {:.02}, {:.02}'.format( tagged_pt, pt_based_jets, tagged_mjj, mjj_based_jets, tagged_pt / pt_based_jets, tagged_mjj / mjj_based_jets))
marker='.', color='green') #_plots['rocs'].add_marker('mjjmax', 1000, annotation='1000 GeV', marker='.', color='blue') #_plots['rocs_2jet'].add_marker('mjjSL', 735, annotation='735 GeV', marker='*', color='red') #_plots['rocs_3jet'].add_marker('mjjSL', 735, annotation='735 GeV', marker='*', color='red') _output_branches = [ 'run_number', 'event_number', 'mc_sf', 'ntag', 'njets', 'n_vbf_candidates', ('jets', [ 'vbf_candidates_E', 'vbf_candidates_pT', 'vbf_candidates_eta', 'vbf_candidates_phi' ]) ] _output_branches += [f'FoxWolfram{i}' for i in _fw_moments] make_reco_vector = lambda jet: LV.from_ptetaphie(jet['resolvedJets_pt'], jet[ 'resolvedJets_eta'], jet['resolvedJets_phi'], jet['resolvedJets_E']) make_nano_vector = lambda jet: LV.from_ptetaphie(jet['vbf_candidates_pT'], jet[ 'vbf_candidates_eta'], jet['vbf_candidates_phi'], jet['vbf_candidates_E']) def process_events(events, skip_num=0, bgd=False, cvv_value=-1): basic_efficiency_count = [0, 0, 0] num_jets = [0] * 20 num_shared = 0 num_not_shared = 0 num_pt_matched = 0 num_pt_not_matched = 0 num_negative_weighted = 0 for event_index, event in enumerate(events): if event_index < skip_num: continue weight = event['mc_sf'][0]
def total_invariant_mass(event): total_vector = TLorentzVector(0,0,0,0) for jet in event.jets: total_vector += jet.vector() return total_vector.mass
'nresolvedJets', ( 'resolved_jets', [ 'resolvedJets_pt', 'resolvedJets_phi', 'resolvedJets_eta', 'resolvedJets_E', #resolved pt in GeV 'resolvedJets_HadronConeExclTruthLabelID', 'resolvedJets_is_DL1r_FixedCutBEff_77' ]) ] _output_branches = ['event_number'] make_reco_vector = lambda jet: LV.from_ptetaphie(jet['resolvedJets_pt'], jet[ 'resolvedJets_eta'], jet['resolvedJets_phi'], jet['resolvedJets_E']) def get_vbf_jet_info(non_b_tagged_jets): vbf_jet_info = {'x': [], 'y': [], 'c': []} if len(non_b_tagged_jets) > 1: mjj_pair = max([ ((pair[0] + pair[1]).mass, pair) for pair in itertools.combinations(non_b_tagged_jets, 2) ])[1] color = 'white' if abs(mjj_pair[0].eta - mjj_pair[1].eta) > 3 else 'black' vbf_jet_info['x'] = [mjj_pair[0].eta, mjj_pair[1].eta] vbf_jet_info['y'] = [mjj_pair[0].phi, mjj_pair[1].phi] vbf_jet_info['c'] = [color, color] return vbf_jet_info
def total_invariant_mass(vectors): total_vector = TLorentzVector(0, 0, 0, 0) for vec in vectors: total_vector += vec return total_vector.mass
def draw(input_type): correct_pt = 0 correct_mjj = 0 tagged_pt = 0 tagged_mjj = 0 events_with_3_jets = 0 ntuple_type = 'aviv' #sys.argv[1] branch_list = _branch_options[ntuple_type] tree_name = _tree_options[ntuple_type] input_list = _input_type_options[ntuple_type][input_type] total_events_read_in = 0 for basket in basket_generator(input_list, tree_name, branch_list): for event_index in range(len(basket[b'eventWeight'])): total_events_read_in += 1 if total_events_read_in > _Nevents: break particle_list = [] for particle_index in range(len( basket[b'tpartpdgID'][event_index])): if basket[b'tpartpdgID'][event_index][ particle_index] == autils.PDGID['higgs']: continue v = TLorentzVector.from_ptetaphim( basket[b'tpartpT'][event_index][particle_index], basket[b'tparteta'][event_index][particle_index], basket[b'tpartphi'][event_index][particle_index], basket[b'tpartm'][event_index][particle_index]) particle_list.append( (v, basket[b'tpartpdgID'][event_index][particle_index], basket[b'tpartstatus'][event_index][particle_index])) jet_list = [] #for jet in event['truth_jets']: num_dual_matched = 0 for jet_index in range(len(basket[b'j0truthid'][event_index])): if not (basket[b'j0_JVT'][event_index][jet_index] and basket[b'j0_fJVT_Tight'][event_index][jet_index]): continue #v = TLorentzVector.from_ptetaphim(basket[b'truthjpT'][event_index][jet_index], basket[b'truthjeta'][event_index][jet_index], basket[b'truthjphi'][event_index][jet_index], basket[b'truthjm'][event_index][jet_index]) v = TLorentzVector.from_ptetaphim( basket[b'j0pT'][event_index][jet_index], basket[b'j0eta'][event_index][jet_index], basket[b'j0phi'][event_index][jet_index], basket[b'j0m'][event_index][jet_index]) if v.pt < 30 or abs(v.eta) > 4: continue pdgid = match_jet(v, particle_list) if pdgid == autils.PDGID['photon']: continue jet_list.append((v, pdgid in autils.PDGID['quarks'])) #pt_ordered_jets = sorted(jet_list, key=lambda j: j[0].pt, reverse=True) if not passes_cuts(input_type, jet_list): continue events_with_3_jets += 1 # Leading Pt #pt_chosen_jets = pt_ordered_jets[:2] pt_chosen_jets = jet_list[:2] if input_type == 'sig': correct_jets = check_if_signature_jets(pt_chosen_jets) correct_pt += int(correct_jets) tagged_pt += int( (pt_chosen_jets[0][0] + pt_chosen_jets[1][0]).mass > 252) # Maximized Mjj mass_pairs = sorted([((i[0] + j[0]).mass, [i, j]) for i, j in combinations(jet_list, 2)], reverse=True, key=lambda t: t[0]) mjj_chosen_jets = mass_pairs[0][1] if input_type == 'sig': correct_jets = check_if_signature_jets(mjj_chosen_jets) correct_mjj += int(correct_jets) tagged_mjj += int( (mjj_chosen_jets[0][0] + mjj_chosen_jets[1][0]).mass > 310) if total_events_read_in > _Nevents: break num_jets = events_with_3_jets print() print(num_jets) if input_type == 'sig': print('{}, {}, {:.02}, {:.02}'.format(correct_pt, correct_mjj, correct_pt / num_jets, correct_mjj / num_jets)) print('{}, {}, {:.02}, {:.02}'.format(tagged_pt, tagged_mjj, tagged_pt / num_jets, tagged_mjj / num_jets))
def test(input_type): input_list = _input_type_options[input_type] final = [] for ntuple_file in input_list: print('\nnutple file: ' + ntuple_file) tree = uproot.rootio.open(ntuple_file)['Nominal'] tree_iterator = tree.iterate(branches=branch_list, entrysteps=10000) for basket_number, basket in enumerate(tree_iterator): for i in range(Nevents): tps = [] for j in range( len(basket[b'tpartpT'][i]) ): Tpt = basket[b'tpartpT'][i][j] Teta = basket[b'tparteta'][i][j] Tphi = basket[b'tpartphi'][i][j] Tm = basket[b'tpartm'][i][j] TpdgID = basket[b'tpartpdgID'][i][j] Tstatus = basket[b'tpartstatus'][i][j] tps.append( (Tpt, Teta, Tphi, Tm, TpdgID, Tstatus) ) vs = [] tmp = [] for j in range( len(basket[b'j0pT'][i]) ): if not ( basket[b'j0pT'][i][j] > 30 and abs(basket[b'j0eta'][i][j]) < 4 ): continue if not ( basket[b'j0_JVT'][i][j] and basket[b'j0_fJVT_Tight'][i][j]): continue v = TLorentzVector.from_ptetaphim(basket[b'j0pT'][i][j], basket[b'j0eta'][i][j], basket[b'j0phi'][i][j], basket[b'j0m'][i][j]) ID = get_pdgID_raw(v,tps) tmp.append(ID) #ID = rj['j0truthid'] if ID != 22: #if not basket[b'j0_isTightPhoton'][i][j]: vs.append( ID in range(1,7) ) if len(vs) == 3 and vs.count(1) == 2: final.append( vs[0] and vs[1] ) break break print() print( final.count(1) / len(final) ) final = [] for event in event_iterator(input_list, 'Nominal', _branch_list, Nevents): #tp = list(event['truth_particles']) tps = [ tp.copy() for tp in event['truth_particles'] ] #for tp in event['truth_particles']: pass vs = [] tmp = [] for rj in event['reco_jets']: if not (rj['j0pT'] > 30 and abs(rj['j0eta']) < 4 ): continue if not (rj['j0_JVT'] and rj['j0_fJVT_Tight']): continue v = TLorentzVector.from_ptetaphim(rj['j0pT'], rj['j0eta'], rj['j0phi'], rj['j0m']) ID = get_pdgID(v,tps) #print() tmp.append(ID) if ID != 22: #if not rj['j0_isTightPhoton']: vs.append( ID in range(1,7) ) if len(vs) == 3 and vs.count(1) == 2: final.append( vs[0] and vs[1] ) print( final.count(1) / len(final) )
def lifetime_weight(pf, fake = True): print("Adding lifetime weight branch...") if fake: ctau_weight_central = np.ones(len(pf)) ctau_weight_up = np.ones(len(pf)) ctau_weight_down = np.ones(len(pf)) pf['ctau_weight_central'] = ctau_weight_central pf['ctau_weight_up'] = ctau_weight_up pf['ctau_weight_down'] = ctau_weight_down return pf else: Bc_mass = 6.274 ctau_pdg = 0.510e-12 * speed_of_light * 1000. # in mm ctau_actual = 0.1358 ctau_up = (0.510+0.009)*1e-12 * speed_of_light * 1000. # in mm ctau_down = (0.510-0.009)*1e-12 * speed_of_light * 1000. # in mm ctau_weight_central = [] ctau_weight_up = [] ctau_weight_down = [] for i in range(len(pf)): flag = 0 #jpsi vertex if( abs(pf.mu1_mother_pdgId[i]) == 443 ): jpsi_vertex = TVector3(pf.mu1_mother_vx[i],pf.mu1_mother_vy[i],pf.mu1_mother_vz[i]) elif( abs(pf.mu2_mother_pdgId[i]) == 443 ): jpsi_vertex = TVector3(pf.mu2_mother_vx[i],pf.mu2_mother_vy[i],pf.mu2_mother_vz[i]) else: flag = 1 #Bc vertex if(abs(pf.mu1_grandmother_pdgId[i]) == 541): Bc_vertex = TVector3(pf.mu1_grandmother_vx[i],pf.mu1_grandmother_vy[i],pf.mu1_grandmother_vz[i]) Bc_p4 = TLorentzVector.from_ptetaphim(pf.mu1_grandmother_pt[i],pf.mu1_grandmother_eta[i],pf.mu1_grandmother_phi[i],Bc_mass) elif(abs(pf.mu2_grandmother_pdgId[i]) == 541): Bc_vertex = TVector3(pf.mu2_grandmother_vx[i],pf.mu2_grandmother_vy[i],pf.mu2_grandmother_vz[i]) Bc_p4 = TLorentzVector.from_ptetaphim(pf.mu2_grandmother_pt[i],pf.mu2_grandmother_eta[i],pf.mu2_grandmother_phi[i],Bc_mass) else: flag = 1 if(flag == 1): ctau_weight_central.append(1) ctau_weight_up.append (1) ctau_weight_down.append(1) else: # distance lxyz = (jpsi_vertex - Bc_vertex).mag beta = Bc_p4.beta gamma = Bc_p4.gamma ct = lxyz/(beta * gamma) #print(lxyz,beta,gamma,ct) ctau_weight_central.append( weight_to_new_ctau(ctau_actual, ctau_pdg , ct*10.)) ctau_weight_up.append (weight_to_new_ctau(ctau_actual, ctau_up , ct*10.)) ctau_weight_down.append(weight_to_new_ctau(ctau_actual, ctau_down, ct*10.)) pf['ctau_weight_central'] = ctau_weight_central pf['ctau_weight_up'] = ctau_weight_up pf['ctau_weight_down'] = ctau_weight_down return pf
import itertools from uproot_methods import TLorentzVector as LV _fourvec_names = [ f'vbf_candidates_{v}' for v in ['pT', 'eta', 'phi', 'E'] ] make_vector_list = lambda datarow: [ LV.from_ptetaphie(*vec) for vec in zip(*datarow[_fourvec_names]) ] def valid_vbf(datarow): vector_list = make_vector_list(datarow) Deta = max([ ( (i+j).mass, abs(i.eta-j.eta) ) for i,j in itertools.combinations(vector_list, 2) ])[1] return Deta > 3 def get_features_mjj_deta(datarow): vector_list = make_vector_list(datarow) pair_list = [ (i,j) for i,j in itertools.combinations(vector_list, 2) ] if len(pair_list) > 0: mjj_deta_pair_list = [ ( (i+j).mass, abs(i.eta-j.eta) ) for i,j in pair_list] mjj_deta_pair_list.sort(reverse=True) prepared_features = [ mjj_deta_pair_list[0][0], mjj_deta_pair_list[0][1] ] else: prepared_features = [-1,-1] return prepared_features