示例#1
0
def write_particles(tempfile, rand_num_events, particles_per_event=-1):

    io_manager = larcv.IOManager(larcv.IOManager.kWRITE)
    io_manager.set_out_file(tempfile)
    io_manager.initialize()

    for i in range(rand_num_events):
        io_manager.set_id(1001, 0, i)

        # Get a piece of data, particle:
        ev_particle = io_manager.get_data("particle", "test")

        if particles_per_event == -1:
            n_particles = i + 1
        else:
            n_particles = particles_per_event

        for j in range(n_particles):
            part = larcv.Particle()
            part.energy_deposit(j)
            part.pdg_code(0)
            ev_particle.append(part)

        io_manager.save_entry()

    assert (io_manager.get_n_entries_out() == rand_num_events)

    io_manager.finalize()

    return
示例#2
0
def read_sparse_tensors(file_name, dimension):

    io_manager = larcv.IOManager(larcv.IOManager.kREAD)
    io_manager.add_in_file(file_name)
    io_manager.initialize()

    voxel_set_list = []

    for i in range(io_manager.get_n_entries()):
        voxel_set_list.append([])

        io_manager.read_entry(i)

        # Get a piece of data, sparse tensor:
        if dimension == 2:
            ev_sparse = io_manager.get_data("sparse2d", "test")
        else:
            ev_sparse = io_manager.get_data("sparse3d", "test")

        for projection in range(ev_sparse.size()):
            voxel_set_list[i].append({
                "indexes": [],
                "values": [],
                "n_voxels":
                ev_sparse.sparse_tensor(projection).size()
            })
            for j in range(ev_sparse.sparse_tensor(projection).size()):
                voxel_set_list[i][projection]['indexes'].append(
                    ev_sparse.sparse_tensor(projection).as_vector()[j].id())
                voxel_set_list[i][projection]['values'].append(
                    ev_sparse.sparse_tensor(projection).as_vector()[j].value())

    return voxel_set_list
示例#3
0
def read_bboxes(tempfile, dimension, use_core_driver=False):

    io_manager = larcv.IOManager(larcv.IOManager.kREAD)
    if use_core_driver:
        io_manager.set_core_driver()
    io_manager.add_in_file(tempfile)
    io_manager.initialize()

    if dimension == 2:
        product = "bbox2d"
    elif dimension == 3:
        product = "bbox3d"
    else:
        raise Exception("Can't read bbox of dimension ", dimension)

    bbox_list = []
    for i in range(io_manager.get_n_entries()):
        io_manager.read_entry(i)
        # print(io_manager.current_entry())
        event_id = io_manager.event_id()
        data = io_manager.get_data(product, 'test')
        collection = [data.at(i) for i in range(data.size())]

        bbox_list.append(collection)

    return bbox_list
示例#4
0
def count_2d(file_name, product, producer):
    io = larcv.IOManager()
    io.add_in_file(file_name)
    io.initialize()
    n_classes = 3
    voxel_counts = {
        plane: numpy.zeros((io.get_n_entries(), n_classes))
        for plane in [0, 1, 2]
    }

    energy = numpy.zeros((io.get_n_entries(), ))
    flavor = numpy.zeros((io.get_n_entries(), ))
    cc_nc = numpy.zeros((io.get_n_entries(), ))

    for i in range(io.get_n_entries()):
        io.read_entry(i)
        particles = io.get_data(product, producer)
        energy[i] = particles.at(0).energy_init()
        flavor[i] = particles.at(0).pdg_code()
        cc_nc[i] = particles.at(0).nu_current_type()

        if i % 100 == 0:
            print("On entry ", i, " of ", io.get_n_entries())

        # if i > 1000:
        #     break

    # return energy, flavor, cc_nc
    return energy[0:i], flavor[0:i], cc_nc[0:i]
示例#5
0
def read_tensor(file_name, dimensions):

    from copy import copy

    io_manager = larcv.IOManager(larcv.IOManager.kREAD)
    io_manager.add_in_file(file_name)
    io_manager.initialize()

    event_image_list = []

    for i in range(io_manager.get_n_entries()):
        event_image_list.append([])

        io_manager.read_entry(i)

        # Get a piece of data, sparse tensor:
        if dimensions == 1:
            ev_tensor = io_manager.get_data("tensor1d", "test")
        if dimensions == 2:
            ev_tensor = io_manager.get_data("image2d", "test")
        if dimensions == 3:
            ev_tensor = io_manager.get_data("tensor3d", "test")
        if dimensions == 4:
            ev_tensor = io_manager.get_data("tensor4d", "test")

        print("Number of images read: ", ev_tensor.size())
        for projection in range(ev_tensor.size()):
            image = ev_tensor.tensor(projection).as_array()
            event_image_list[i].append(copy(image))

    return event_image_list
示例#6
0
def write_tensor(file_name, event_image_list, dimension):

    io_manager = larcv.IOManager(larcv.IOManager.kWRITE)
    io_manager.set_out_file(file_name)
    io_manager.initialize()

    for event in range(len(event_image_list)):
        io_manager.set_id(1001, 0, event)
        images = event_image_list[event]

        if dimension == 1:
            ev_tensor = io_manager.get_data("tensor1d", "test")
        if dimension == 2:
            ev_tensor = io_manager.get_data("image2d", "test")
        if dimension == 3:
            ev_tensor = io_manager.get_data("tensor3d", "test")
        if dimension == 4:
            ev_tensor = io_manager.get_data("tensor4d", "test")

        for projection in range(len(images)):
            if dimension == 1: tensor = larcv.Tensor1D(images[projection])
            if dimension == 2: tensor = larcv.Tensor2D(images[projection])
            if dimension == 3: tensor = larcv.Tensor3D(images[projection])
            if dimension == 4: tensor = larcv.Tensor4D(images[projection])

            ev_tensor.append(tensor)

        io_manager.save_entry()

    io_manager.finalize()

    return
示例#7
0
def read_temp_file(tempfile, rand_num_events):

    # This function is purely to test the IO read write capabilities.
    io_manager = larcv.IOManager(larcv.IOManager.kREAD)
    io_manager.add_in_file(tempfile)
    io_manager.initialize()

    return io_manager.get_n_entries()
示例#8
0
def test_augment_config():
    io_manager = larcv.IOManager()
    # Static default config:
    default_config = larcv.IOManager.default_config()

    default_config["Input"]["InputFiles"] = ['temp.h5']
    io_manager.configure(default_config)

    print(io_manager.get_config())
示例#9
0
def write_temp_file(tempfile, rand_num_events):

    # This function is purely to test the IO read write capabilities.
    io_manager = larcv.IOManager(larcv.IOManager.kWRITE)
    io_manager.set_out_file(tempfile)
    io_manager.initialize()
    for i in range(rand_num_events):
        io_manager.set_id(0, 0, i)
        io_manager.save_entry()
    io_manager.finalize()
def main():


    next_new_meta = larcv.ImageMeta3D()
    next_new_meta.set_dimension(0, 510, 51, -205)
    next_new_meta.set_dimension(1, 510, 51, -205)
    next_new_meta.set_dimension(2, 540, 108, 0)
    #next_new_meta.set_dimension(0, 2600, 260, -1300)
    #next_new_meta.set_dimension(1, 2600, 260, -1300)
    #next_new_meta.set_dimension(2, 2600, 260, -1300)

    next_new_meta_mc = larcv.ImageMeta3D()
    next_new_meta_mc.set_dimension(0, 510, 510, -205)
    next_new_meta_mc.set_dimension(1, 510, 510, -205)
    next_new_meta_mc.set_dimension(2, 540, 540, 0)
    # # This code loops over training set files:
    file_list = "/Users/corey.adams/data/NEXT/mmkekic_second_production/all_labels.cvs"

    # read in list of events
    df = pd.read_csv(file_list)


    groups = list(df.groupby('filename'))

    # Prune off the directory:
    # groups = [ os.path.basename(g) for g in groups]


    # split between test and train files
    nfiles = len(groups)
    ntrain = int(nfiles*0.8)
    # train_list = groups[:ntrain]
    # test_list = groups[ntrain:]

    # print('Found %s input training files'%len(train_list))
    # print('Found %s input testing files'%len(test_list))


    for i, f in enumerate(groups):

        file_name = os.path.basename(f[0])
        output_file = output_path + file_name.replace(".root.h5", "_larcv.h5")

        if os.path.exists(output_file):
            continue

        # output_trn = os.path.basename('NextNEW_Tl208_10mm_larcv_noshf_train_200k.h5')
        io_manager = larcv.IOManager(larcv.IOManager.kWRITE)
        io_manager.set_out_file(output_file)
        io_manager.initialize()
        # convert train files
        print(f'Converting file {i}: {file_name}')
        convert_files(io_manager, [next_new_meta, next_new_meta_mc], (i, f))
        io_manager.finalize()
示例#11
0
def read_sparse_clusters(file_name, dimension):

    io_manager = larcv.IOManager(larcv.IOManager.kREAD)
    io_manager.add_in_file(file_name)
    io_manager.initialize()

    voxel_set_array_list = []

    for event in range(io_manager.get_n_entries()):
        # append a list of projections for this event:
        voxel_set_array_list.append([])

        io_manager.read_entry(event)

        # Get a piece of data, sparse cluster:\
        if dimension == 2:
            ev_cluster = io_manager.get_data("cluster2d", "test")
        else:
            ev_cluster = io_manager.get_data("cluster3d", "test")

        for projection in range(ev_cluster.size()):
            # Append a list of clusters for this projection:
            voxel_set_array_list[event].append([])
            print("Number of clusters: ",
                  ev_cluster.sparse_cluster(projection).size())
            sparse_cluster = ev_cluster.sparse_cluster(projection)
            print("Current voxel_set_array_list length: ",
                  len(voxel_set_array_list))
            print("Current voxel_set_array_list[] length: ",
                  len(voxel_set_array_list[event]))
            for cluster in range(sparse_cluster.size()):
                # Append a dict of values for this cluster
                voxel_set_array_list[event][projection].append({
                    "indexes": [],
                    "values": [],
                    "n_voxels":
                    sparse_cluster.voxel_set(cluster).size()
                })
                for j in range(sparse_cluster.voxel_set(cluster).size()):
                    voxel_set_array_list[event][projection][cluster][
                        'indexes'].append(
                            sparse_cluster.voxel_set(cluster).as_vector()
                            [j].id())
                    voxel_set_array_list[event][projection][cluster][
                        'values'].append(
                            sparse_cluster.voxel_set(cluster).as_vector()
                            [j].value())

    return voxel_set_array_list
示例#12
0
def write_sparse_tensors(file_name, voxel_set_list, dimension, n_projections):

    from copy import copy
    io_manager = larcv.IOManager(larcv.IOManager.kWRITE)
    io_manager.set_out_file(file_name)
    io_manager.initialize()

    # For this test, the meta is pretty irrelevant as long as it is consistent
    meta_list = []
    for projection in range(n_projections):
        if dimension == 2:
            meta_list.append(larcv.ImageMeta2D())
        else:
            meta_list.append(larcv.ImageMeta3D())

        for dim in range(dimension):
            L = 10.
            N = 128
            meta_list[-1].set_dimension(dim, L, N)

        meta_list[-1].set_projection_id(projection)

    for i in range(len(voxel_set_list)):
        io_manager.set_id(1001, 0, i)

        # Get a piece of data, sparse tensor:
        if dimension == 2:
            ev_sparse = io_manager.get_data("sparse2d", "test")
        else:
            ev_sparse = io_manager.get_data("sparse3d", "test")

        for projection in range(n_projections):
            vs = larcv.VoxelSet()
            indexes = voxel_set_list[i][projection]['indexes']
            values = voxel_set_list[i][projection]['values']
            for j in range(voxel_set_list[i][projection]['n_voxels']):
                vs.emplace(indexes[j], values[j], False)

            ev_sparse.set(vs, meta_list[projection])
        io_manager.save_entry()

    print("Finished event loop")

    # assert(io_manager.get_n_entries_out() == rand_num_events)

    io_manager.finalize()

    return
示例#13
0
def write_bboxes(tempfile, bbox_list, dimension, n_projections):

    io_manager = larcv.IOManager(larcv.IOManager.kWRITE)
    io_manager.set_out_file(tempfile)
    io_manager.initialize()

    if dimension == 2:
        box_constructor = larcv.BBox2D
        collection_constructor = larcv.BBoxCollection2D
        datatype = "bbox2d"
    elif dimension == 3:
        box_constructor = larcv.BBox3D
        collection_constructor = larcv.BBoxCollection3D
        datatype = "bbox3d"
    else:
        raise Exception("Can't get bbox of dimension ", dimension)

    n_events = len(bbox_list)

    for i_evt in range(n_events):
        io_manager.set_id(1001, 0, i_evt)

        ev_bbox = io_manager.get_data(datatype, "test")

        for i_projection in range(n_projections):
            n_bboxes = len(bbox_list[i_evt][i_projection])

            bbox_collection = collection_constructor()

            for j in range(n_bboxes):
                bbox = box_constructor(
                    centroid=bbox_list[i_evt][i_projection][j]["centroid"],
                    half_length=bbox_list[i_evt][i_projection][j]
                    ["half_length"])

                bbox_collection.append(bbox)

            # Add the whole collection:
            ev_bbox.append(bbox_collection)

        io_manager.save_entry()

    assert (io_manager.get_n_entries_out() == n_events)

    io_manager.finalize()

    return
示例#14
0
def read_particles(tempfile, use_core_driver=False):

    io_manager = larcv.IOManager(larcv.IOManager.kREAD)
    if use_core_driver:
        io_manager.set_core_driver()
    io_manager.add_in_file(tempfile)
    io_manager.initialize()

    read_events = 0
    for i in range(io_manager.get_n_entries()):
        io_manager.read_entry(i)
        # print(io_manager.current_entry())
        event_id = io_manager.event_id()

        ev_particles = io_manager.get_data('particle', 'test')
        read_events += 1

    return read_events
示例#15
0
def count_2d(file_name, product, producer):
    io = larcv.IOManager()
    io.add_in_file(file_name)
    io.initialize()
    n_classes = 3
    voxel_counts = {
        plane: numpy.zeros((io.get_n_entries(), n_classes))
        for plane in [0, 1, 2]
    }

    for i in range(io.get_n_entries()):
        io.read_entry(i)
        image = io.get_data(product, producer)
        for plane in [0, 1, 2]:
            sparse_tensor = image.at(plane)
            labels, counts = numpy.unique(sparse_tensor.values(),
                                          return_counts=True)
            for i_l, l in enumerate(labels):
                voxel_counts[plane][i][int(l)] = counts[i_l]
            # for label in range(n_classes):
            #     if label in labels:
            #         voxel_counts[plane][i][label] = counts[label]
            #     else:
            #         voxel_counts[plane][i][label] = 0

        if i % 100 == 0:
            print("On entry ", i, " of ", io.get_n_entries())

        # if i > 100:
        #     break

    print(voxel_counts)

    # print ("Average Voxel Occupation: ")
    # for p in [0,1,2]:
    #     print("  {p}: {av:.2f} +/- {rms:.2f} ({max} max)".format(
    #         p   = p,
    #         av  = numpy.mean(voxel_counts[:,p]),
    #         rms = numpy.std(voxel_counts[:,p]),
    #         max = numpy.max(voxel_counts[:,p])
    #         )
    #     )

    return voxel_counts
示例#16
0
def main(args):

    # Create the meta object:
    next_new_meta = larcv.ImageMeta3D()
    next_new_meta.set_dimension(0, 510, 51, -205)
    next_new_meta.set_dimension(1, 510, 51, -205)
    next_new_meta.set_dimension(2, 540, 108, 0)

    # Next, make sure the input file exists:
    args.input_file = pathlib.Path(args.input_file)
    args.output_file = pathlib.Path(args.output_file)

    if not args.input_file.exists():
        raise Exception("Input file doesn't exist!")

    # Make sure the output directory exists:
    args.output_file.parent.mkdir(exist_ok=True)

    # # This code loops over training set files:
    file_list = list(top_input_path.glob("*.h5"))

    # Now, begin conversion:

    file_name = args.input_file.name

    file_name = args.input_file.name

    _, run, subrun = file_name.replace("_v1.2.0_trigger2_bg",
                                       "").replace(".h5", "").split("_")

    _, run, subrun = file_name.replace(".h5", "").split("_")

    # output_trn = os.path.basename('NextNEW_Tl208_10mm_larcv_noshf_train_200k.h5')
    io_manager = larcv.IOManager(larcv.IOManager.kWRITE)
    io_manager.set_out_file(str(args.output_file))
    io_manager.initialize()
    # convert train files
    print(
        f'Converting file: {file_name} from {args.start_entry} to {args.end_entry}'
    )
    convert_file(io_manager, next_new_meta, args.input_file, run, subrun,
                 args.start_entry, args.end_entry)
    io_manager.finalize()
示例#17
0
    def __init__(self, io_config, output_file=None):
        '''larcv_writer accesses an IOManager instance directly to manage writing
        
        In the init function, the IOManager is created and prepared for writing.
        
        Arguments:
             {} -- [description]
        '''

        self._io = larcv.IOManager(io_config)

        if output_file is not None:
            self._io.set_out_file(output_file)

        self._io.initialize()

        self._write_workers = {
            'sparse2d': self._write_sparse2d,
            'image2d': self._write_image2d,
            'tensor1d': self._write_tensor1d
        }

        pass
示例#18
0
def test_import_IOManager_h():
    io = larcv.IOManager()
示例#19
0
def write_sparse_clusters(file_name,
                          voxel_set_array_list,
                          dimension=2,
                          n_projections=3):

    import copy

    io_manager = larcv.IOManager(larcv.IOManager.kWRITE)
    io_manager.set_out_file(file_name)
    io_manager.initialize()

    # For this test, the meta is pretty irrelevant as long as it is consistent
    meta_list = []
    for projection in range(n_projections):
        if dimension == 2:
            meta_list.append(larcv.ImageMeta2D())
        else:
            meta_list.append(larcv.ImageMeta3D())

        for dim in range(dimension):
            L = 10.
            N = 128
            meta_list[-1].set_dimension(dim, L, N)

        meta_list[-1].set_projection_id(projection)

    for i in range(len(voxel_set_array_list)):
        io_manager.set_id(1001, 0, i)
        # Get a piece of data, sparse tensor:
        if dimension == 2:
            ev_cluster = io_manager.get_data("cluster2d", "test")
        else:
            ev_cluster = io_manager.get_data("cluster3d", "test")

        # Holder for the voxels to store:

        for projection in range(n_projections):
            clusters = voxel_set_array_list[i][projection]
            if dimension == 2:
                vsa = larcv.SparseCluster2D()
            else:
                vsa = larcv.SparseCluster3D()
            for cluster in range(len(clusters)):
                vs = larcv.VoxelSet()

                vs.id(cluster)
                indexes = clusters[cluster]['indexes']
                values = clusters[cluster]['values']
                for j in range(clusters[cluster]['n_voxels']):
                    vs.emplace(indexes[j], values[j], False)
                vsa.insert(vs)
            # sparse_cluster.set(vsa, )
            vsa.meta(meta_list[projection])
            # ev_cluster.emplace(vsa, copy.copy(meta_list[projection]))
            ev_cluster.set(vsa)

        io_manager.save_entry()

    # assert(io_manager.get_n_entries_out() == rand_num_events)

    io_manager.finalize()

    return
示例#20
0
def convert_file(input_file, output_directory):

    # First, open and validate the input file:
    evtfile = tables.open_file(str(input_file), 'r')

    # No DECO table?  No good, just skip this file
    if not hasattr(evtfile.root, "DECO"):
        evtfile.close()
        return

    # Read if we have MC or not:
    if hasattr(evtfile.root, "MC"):
        is_mc = True
    else:
        is_mc = False

    # Format output name:
    output_name = input_file.name.replace(".h5", "_larcv.h5")
    output = output_directory / pathlib.Path(output_name)

    # Create an output larcv file:
    io_manager = larcv.IOManager(larcv.IOManager.kWRITE)
    io_manager.set_out_file(str(output))
    io_manager.initialize()

    # Now, ready to go.  Read in a couple tables:

    # - Summary table.  gives event number, ntrks, min and max of all coords.
    #  - Use this to reject multi track events and events near the walls.
    #  - use this to get the event number.
    # - Run has just run info.
    #  - read this and use it to get the run number.
    # - DECO contains the deconvolved hits.  They are stored by event number, contain x/y/z/E
    #  - read this and get the hits from each event.
    # - (ONLY MC): MC contains mc truth information.
    #  - read this for whole-event labels, but also gather out

    if is_mc:
        mc_extents = evtfile.root.MC.extents.read()
        mc_hits = evtfile.root.MC.hits.read()
        mc_particles = evtfile.root.MC.particles.read()

    events = evtfile.root.Run.events.read()
    run = evtfile.root.Run.runInfo.read()
    summary = evtfile.root.SUMMARY.Events.read()
    # event no is events[i_evt][0]
    # run no is run[i_evt][0]
    # We'll set all subrun info to 0, it doesn't matter.

    this_run = run[0][0]

    event_numbers = events['evt_number']
    # event_energy  = summary['evt_energy']

    lr_hits = evtfile.root.DECO.Events.read()

    next_new_meta = get_NEW_meta()

    mask = basic_event_pass(summary)

    passed_events = summary['event'][mask]

    # print(numpy.unique(lr_hits['event'], return_counts=True))

    for i_evt, event_no in enumerate(event_numbers):

        # Did this event pass the basic event cuts?
        if event_no not in passed_events: continue

        io_manager.set_id(this_run, 0, event_no)

        # Slice off this summary object:
        this_summary = summary[summary['event'] == event_no]

        # Parse out the deconv hits:
        this_lr_hits = lr_hits[lr_hits['event'] == event_no]
        store_lr_hits(io_manager, this_lr_hits)

        # We store the measured energy, correct, in 'energy_deposit'
        # We store the mc energy, if we have it, in 'energy_init'
        particle = larcv.Particle()

        if is_mc:
            # Store the mc infomation.  Extract this events hits, particles, etc.

            # Slice this extents:
            mc_mask = mc_extents['evt_number'] == event_no
            this_index = numpy.argwhere(mc_mask)[0][0]

            this_mc_extents = mc_extents[this_index]
            particle_stop = int(
                this_mc_extents['last_particle'] + 1
            )  # Particle index is not inclusive in the last index, add one
            hit_stop = int(
                this_mc_extents['last_hit'] + 1
            )  # Particle index is not inclusive in the last index, add one

            if this_index != 0:
                previous_mc_extents = mc_extents[this_index - 1]
                particle_start = int(previous_mc_extents['last_particle'] + 1)
                hit_start = int(previous_mc_extents['last_hit'] + 1)
            else:
                particle_start = 0
                hit_start = 0

            this_particles = mc_particles[particle_start:particle_stop]
            this_hits = mc_hits[hit_start:hit_stop]

            positron = store_mc_info(io_manager, this_hits, this_particles)

            # First, we figure out the extents for this event.

            if positron:
                particle.pdg_code(0)
            else:
                particle.pdg_code(1)

            # Calculate the true energy of the event:
            true_e = numpy.sum(this_hits['hit_energy'])
            particle.energy_init(true_e)

        # Calculate the reconstructed energy of the event:
        energy = numpy.sum(this_lr_hits['E'])
        energy = energy_corrected(energy, this_summary['z_min'][0],
                                  this_summary['z_max'][0])
        particle.energy_deposit(energy)
        # Store the whole measured energy of the event
        event_part = io_manager.get_data("particle", "event")
        event_part.append(particle)

        # for event in numpy.unique(df.event):
        #     sub_df = df.query("event == {}".format(event))
        #     Run=sub_df.Run.iloc[0]
        #     if is_mc:
        #         sub_run = sub_df.file_int.iloc[0]
        #     else:
        #         sub_run = 0

        #     io_manager.set_id(int(Run), int(sub_run), int(event))

        #     ################################################################################
        #     # Store the particle information:
        #     if is_mc:
        #         larcv_particle = larcv.EventParticle.to_particle(io_manager.get_data("particle", "label"))
        #         particle = larcv.Particle()
        #         particle.energy_init(sub_df.true_energy.iloc[0])
        #         particle.pdg_code(int(sub_df.label.iloc[0]))
        #         larcv_particle.emplace_back(particle)
        #     ################################################################################

        io_manager.save_entry()
        # if i_evt > 50:
        #     break

    # Close Larcv:
    io_manager.finalize()

    # Close tables:
    evtfile.close()

    return