Example #1
0
def assign_energy_weights_multiprocess(
    ids, 
    db, 
    interpolator_quadratic, 
    true_key=['true_primary_energy'], 
    debug=False
    ):
    
    # Create packs - loop over all events
    n_chunks = len(ids)//AVAILABLE_CORES
    chunks = np.array_split(ids, n_chunks)
    packed = make_multiprocess_pack(
        chunks, interpolator_quadratic, true_key, db
    )

    # Multiprocess and recombine
    with Pool(AVAILABLE_CORES) as p:
        weights = p.map(calc_weights_multiprocess, packed)
    weights_combined = flatten_list_of_lists(weights)
    
    # # Make a dictionary with weights - put nan where mask didn't apply
    # event_ids = db.ids
    # all_weights_dict = {str(event_id): np.nan for event_id in event_ids}
    # weights_dict = {
    #     str(event_id): weight for event_id, weight in zip(ids, weights_combined)
    # }
    # all_weights_dict.update(weights_dict)
    
    return weights_combined
Example #2
0
def make_dom_interval_mask(db,
                           ids,
                           min_doms,
                           max_doms,
                           multiprocess=True,
                           dom_mask='SplitInIcePulses'):

    # Split the candidates into chunks for multiprocessing
    if multiprocess:
        ids_chunked = np.array_split(ids, AVAILABLE_CORES)
        packed = make_multiprocess_pack(ids_chunked, db, min_doms, max_doms,
                                        dom_mask)

        with Pool(AVAILABLE_CORES) as p:
            accepted_lists = p.map(find_dom_interval_passed_cands, packed)

        # Combine again
        mask = sorted(flatten_list_of_lists(accepted_lists))

    else:
        raise ValueError(
            'make_dom_interval_mask: Only multiprocessing solution implemented'
        )

    # save it
    mask_name = 'dom_interval_%s_min%d_max%d' % (dom_mask, min_doms, max_doms)

    return mask, mask_name
Example #3
0
def make_particle_mask(db, ids, particle, multiprocess=True):

    # find the particle code to make mask for
    particle_code = get_particle_code(particle)

    # Split the candidates into chunks for multiprocessing
    if multiprocess:
        ids_chunked = np.array_split(ids, AVAILABLE_CORES)
        packed = make_multiprocess_pack(ids_chunked, db, particle_code)
        with Pool(AVAILABLE_CORES) as p:
            accepted_lists = p.map(find_particles, packed)

        # Combine again
        mask = sorted(flatten_list_of_lists(accepted_lists))
        # x = np.arange(len(mask))
        # d = {'x': [x], 'y': [mask]}
        # d['savefig'] = get_project_root()+'/MASK'+str(len(mask))+'_TESTLOL.png'
        # _ = make_plot(d)
    else:
        raise ValueError(
            'make_particle_mask: Only multiprocessing solution implemented')

    mask_name = particle

    return mask, mask_name
Example #4
0
def make_energy_interval_mask(db,
                              ids,
                              min_energy,
                              max_energy,
                              multiprocess=True):

    # Split the candidates into chunks for multiprocessing
    if multiprocess:
        ids_chunked = np.array_split(ids, AVAILABLE_CORES)
        packed = make_multiprocess_pack(ids_chunked, db, min_energy,
                                        max_energy)

        with Pool(AVAILABLE_CORES) as p:
            accepted_lists = p.map(find_energy_interval_passed_cands, packed)

        # Combine again
        mask = sorted(flatten_list_of_lists(accepted_lists))

    else:
        raise ValueError(
            'make_energy_interval_mask: Only multiprocessing solution implemented'
        )
    print(len(mask) / len(ids))

    # save it
    mask_name = 'energy_interval_min%.1f_max%.1f' % (min_energy, max_energy)

    return mask, mask_name
Example #5
0
def assign_energy_balanced_weights_multiprocess(
    ids, 
    db, 
    interpolator_quadratic, 
    true_key=['true_primary_energy'], 
    debug=False
    ):
    
    # Create packs - loop over all events
    # n_chunks = len(ids)//AVAILABLE_CORES
    chunks = np.array_split(ids, AVAILABLE_CORES)
    packed = make_multiprocess_pack(
        chunks, interpolator_quadratic, true_key, db
    )

    # Multiprocess and recombine
    with Pool(AVAILABLE_CORES) as p:
        weights = p.map(calc_weights_multiprocess, packed)
    weights_combined = flatten_list_of_lists(weights)
    
    return weights_combined
Example #6
0
# Create packs - loop over all events
event_no_val_chunks = np.array_split(data_val, AVAILABLE_CORES)

# Fetch from meta and prep for multiprocess
wanted = [
    'true_primary_direction_x', 'true_primary_direction_y',
    'true_primary_direction_z', 'event_no', 'interaction_type',
    'cascade_energy', 'length'
]
query = 'SELECT {features} FROM features'.format(features=', '.join(wanted))

start = time.time()
data_meta_tupled = exec_query(PATH_META_DB, query)
data_meta = np.array(data_meta_tupled)
end = time.time()
print(get_time(), 'Time spent fetching from meta: %d seconds' % (end - start))
packed = make_multiprocess_pack(event_no_val_chunks,
                                data_meta,
                                enumerate_processes=True)

with Pool(AVAILABLE_CORES) as p:
    matches_list = p.map(multiprocess_match_indices, packed)
matches = {}
for d in matches_list:
    matches.update(d)

with open(PATH_DATA_OSCNEXT + '/matched_val.pickle', 'wb') as f:
    pickle.dump(matches, f)

end = time.time()
print(get_time(), 'Time spent matching: %d seconds' % (end - start))