def configure(self): # use this to count through the single events self.event_idx = 0 # get the original file to compare to filename = data_path( "offline/mcv6.0.gsg_muon_highE-CC_50-500GeV.km3sim.jterbr00008357.jorcarec.aanet.905.root" ) self.f = km3io.OfflineReader(filename)
def test_apply_to_hits_from_km3io_iterator(self): calib = Calibration(filename=data_path("detx/km3net_offline.detx")) f = km3io.OfflineReader(data_path("offline/km3net_offline.root")) for event in f: chits = calib.apply(event.hits) assert 176 == len(chits.t0) assert np.allclose( [207747.825, 207745.656, 207743.836], chits.t0.tolist()[:3] ) break
def test_apply_to_hits_from_km3io(self): calib = Calibration(filename=data_path("detx/km3net_offline.detx")) hits = km3io.OfflineReader(data_path("offline/km3net_offline.root"))[0].hits chits = calib.apply(hits) assert 176 == len(chits.t0) assert np.allclose([207747.825, 207745.656, 207743.836], chits.t0.tolist()[:3]) chits = calib.apply(hits[:3]) assert 3 == len(chits.t0) assert np.allclose([207747.825, 207745.656, 207743.836], chits.t0.tolist()[:3])
def configure(self): self._filename = self.get("filename") step_size = self.get("step_size", default=2000) self._reader = km3io.OfflineReader(self._filename, step_size=step_size) self.header = self._reader.header self.blobs = self._blob_generator() Provenance().record_input(self._filename, uuid=str(self._reader.uuid), comment="OfflinePump input") self.expose(self.header, "offline_header")
def test_apply_to_hits_with_pmt_id_aka_mc_hits_from_km3io(self): calib = Calibration(filename=data_path("detx/KM3NeT_-00000001_20171212.detx")) f = km3io.OfflineReader( data_path( "offline/mcv6.gsg_nue-CCHEDIS_1e4-1e6GeV.sirene.jte.jchain.aanet.1.root" ) ) for event in f: chits = calib.apply(event.mc_hits) assert 840 == len(chits.t0) assert np.allclose([3, 26, 24, 4, 23, 25], chits.channel_id[:6]) assert np.allclose([3401, 3401, 3406, 3411, 5501, 5501], chits.dom_id[:6]) assert np.allclose([1, 1, 6, 11, 1, 1], chits.floor[:6]) assert np.allclose([34, 34, 34, 34, 55, 55], chits.du[:6]) assert np.allclose( [ 1679.18706571, 1827.14262054, 1926.71722628, 2433.83097585, 1408.35942832, 1296.51397496, ], chits.time[:6], ) assert np.allclose( [2.034, 1.847, 1.938, 2.082, -54.96, -55.034], chits.pos_x[:6] ) assert np.allclose( [-233.415, -233.303, -233.355, -233.333, -341.346, -341.303], chits.pos_y[:6], ) assert np.allclose( [65.059, 64.83, 244.83, 425.111, 64.941, 64.83], chits.pos_z[:6] ) assert np.allclose([4, 4, 4, 26, 4, 4], f.mc_hits.origin[0][:6].tolist()) assert np.allclose( [36835, 36881, 37187, 37457, 60311, 60315], f.mc_hits.pmt_id[0][:6].tolist(), ) break
def test_conversion_of_km3io_header(self): header = km3io.OfflineReader(data_path("offline/numucc.root")).header tab = header2table(header) print(tab) for p in [ b"DAQ", b"PDF", b"can", b"can_user", b"coord_origin", b"cut_in", b"cut_nu", b"cut_primary", b"cut_seamuon", b"decay", b"detector", b"drawing", b"genhencut", b"genvol", b"kcut", b"livetime", b"model", b"ngen", b"norma", b"nuflux", b"physics", b"seed", b"simul", b"sourcemode", b"spectrum", b"start_run", b"target", b"usedetfile", b"xlat_user", b"xparam", b"zed_user", ]: assert p in tab.parameter h5header = HDF5Header.from_table(tab) assert h5header.can.zmin == header.can.zmin
def h5extractf(root_file, outfile=None, without_full_reco=False, without_calibration=False): if without_calibration: calibration_fields = [] else: calibration_fields = [ "pos_x", "pos_y", "pos_z", "dir_x", "dir_y", "dir_z", "tdc", ] fields = { "event_info": [ ("id", "event_id"), # id gets renamed to event_id "run_id", ("t_sec", "timestamp"), ("t_ns", "nanoseconds"), ("mc_t", "mc_time"), "trigger_mask", "trigger_counter", "overlays", "det_id", "frame_index", "mc_run_id", ], # weights get put into event_info as well "event_info_weights": [ "weight_w1", "weight_w2", "weight_w3", "weight_w4", ], "hits": [ "channel_id", "dom_id", ("t", "time"), "tot", ("trig", "triggered"), *calibration_fields, ], "mc_hits": [ "a", "origin", "pmt_id", ("t", "time"), ], "tracks": [ "pos_x", "pos_y", "pos_z", "dir_x", "dir_y", "dir_z", "E", "t", ("len", "length"), "rec_type", ("lik", "likelihood"), "id", ], "mc_tracks": [ "pos_x", "pos_y", "pos_z", "dir_x", "dir_y", "dir_z", ("E", "energy"), ("t", "time"), ("len", "length"), "pdgid", "id", ], } if outfile is None: outfile = root_file + ".h5" start_time = time.time() with h5py.File(outfile, "w") as f: _uuid = str(uuid4()) Provenance().record_output(outfile, uuid=_uuid, comment="Converted HDF5 file") with km3io.OfflineReader(root_file) as r: Provenance().record_input(root_file, uuid=str(r.uuid), comment="Input ROOT file") if r.header is not None: print("Processing header") f.create_dataset( "raw_header", data=kp.io.hdf5.header2table(r.header), ) print("Processing event_info") np_event_info = _branch_to_numpy(r, fields["event_info"]) np_weights = _ak_to_numpy(r.w, fields["event_info_weights"]) np_event_info[0].update(np_weights[0]) np_w2 = _parse_w2list(r) if np_w2 is not None: np_event_info[0].update(np_w2[0]) _to_hdf(f, "event_info", np_event_info) # TODO remove group_info once km3pipe does not require it anymore group_info = np.core.records.fromarrays( [np.arange(len(np_event_info[1]))], names=["group_id"]) f.create_dataset("group_info", data=group_info) print("Processing tracks") reco = f.create_group("reco") for branch_data in _yield_tracks( r.tracks, fields["tracks"], without_full_reco=without_full_reco): _to_hdf(reco, *branch_data) for field_name in ("hits", "mc_hits", "mc_tracks"): if r[field_name] is None: continue print("Processing", field_name) np_branch = _branch_to_numpy(r[field_name], fields[field_name]) if np_branch[1].sum() == 0: # empty branch, e.g. mc_hits for data files continue _to_hdf(f, field_name, np_branch) f.attrs.create("format_version", FORMAT_VERSION) f.attrs.create("km3pipe", kp.__version__) f.attrs.create("origin", root_file) f.attrs.create("kid", _uuid) print("Completed in {:.1f} s".format(time.time() - start_time))