Exemplo n.º 1
0
def test_saving_loading_and_copying_process_for_Attribute_dict():
    """
    Checks the saving,loading and copying  functionality for an attribute dict
    Returns An Assertion Error in case an exception is thrown
    -------

    """

    try:
        ad = AttributeDict(test_p, read_only=False)
        ad[1]["b"] = 2
        assert "b" in ad[1]
        for i in range(100):
            ad[i] = {"glia_probability": np.ones((10, 2)).astype(np.uint8)}
        start = time.time()
        ad.push()
        logging.debug("Saving AttributeDict took %0.4f." %
                      (time.time() - start))
        logging.debug("AttributeDict file size:\t%0.2f kB" %
                      (os.path.getsize(test_p) / 1.e3))
        del ad
        logging.info('PASSED: test_saving_process_for_Attribute_dict')
    except Exception as e:
        logging.warning('FAILED: test_saving_process_for_Attribute_dict. ' +
                        str(e))
        raise AssertionError
    try:
        start = time.time()
        ad = AttributeDict(test_p, read_only=True)
        logging.debug("Loading AttributeDict took %0.4f." %
                      (time.time() - start))
        assert len(list(ad.keys())) == 100
        assert np.all(
            ad[0]["glia_probability"] == np.ones((10, 2)).astype(np.uint8))
        ad.update({100: "b"})
        assert 100 in ad
        start = time.time()
        dc_constr = ad.copy_intern()
        logging.debug("Copying dict from AttributeDict took %0.4f." %
                      (time.time() - start))
        assert len(list(dc_constr.keys())) == 101
        del ad
        os.remove(test_p)
        logging.info(
            'PASSED: test_loading_and_copying_process_for_Attribute_dict')
    except Exception as e:
        logging.warning(
            'FAILED: test_loading_and_copying_process_for_Attribute_dict. ' +
            str(e))
        raise AssertionError
Exemplo n.º 2
0
def helper_func(paths):
    num_locs = []
    for ad_p in paths:
        ad = AttributeDict(ad_p + 'attr_dict.pkl',
                           read_only=True,
                           disable_locking=True)
        sample_locs = np.concatenate(ad['sample_locations'])
        num_locs.append(len(sample_locs))
    return num_locs
Exemplo n.º 3
0
        try:
            args.append(pkl.load(f))
        except EOFError:
            break

so_chunk_paths = args[0]
so_kwargs = args[1]

working_dir = so_kwargs['working_dir']
global_params.wd = working_dir
# TODO: preprocess meshes in case they dont exist and then load mesh dict next to the attribute dict
for p in so_chunk_paths:
    # get SV IDs stored in this storage
    attr_dc_p = p + "/attr_dict.pkl"
    mesh_dc_p = p + "/mesh.pkl"
    ad = AttributeDict(attr_dc_p, disable_locking=True)
    md = MeshStorage(mesh_dc_p, disable_locking=True)
    svixs = list(ad.keys())
    # replace SO objects by using rep_coord and IDs stored in the attribute dict
    sd = sos_dict_fact(svixs, **so_kwargs)
    sos = init_sos(sd)
    # compute locations and use already processed meshes
    loc_dc_p = p + "/locations.pkl"
    loc_dc = CompressedStorage(loc_dc_p, disable_locking=True)
    for so in sos:
        try:
            ix = so.id
            if not ix in md.keys():
                verts = so.mesh[1].reshape(-1, 3)
            else:
                verts = md[ix][1].reshape(-1, 3)
del pred_kwargs["woglia"]
pred_key = pred_kwargs["pred_key"]
if 'raw_only' in pred_kwargs:
    raw_only = pred_kwargs['raw_only']
    del pred_kwargs['raw_only']
else:
    raw_only = False

model = NeuralNetworkInterface(**model_kwargs)
for p in so_chunk_paths:
    view_dc_p = p + "/views_woglia.pkl" if woglia else p + "/views.pkl"
    view_dc = CompressedStorage(view_dc_p, disable_locking=True)
    svixs = list(view_dc.keys())
    if len(svixs) == 0:
        continue
    views = list(view_dc.values())
    if raw_only and views[0].shape[1] != 1:
        for ii in range(len(views)):
            views[ii] = views[ii][:, 1]
    sd = sos_dict_fact(svixs, **so_kwargs)
    sos = init_sos(sd)
    probas = predict_views(model, views, sos, return_proba=True, **pred_kwargs)
    attr_dc_p = p + "/attr_dict.pkl"
    ad = AttributeDict(attr_dc_p, disable_locking=True)
    for ii in range(len(sos)):
        ad[sos[ii].id][pred_key] = probas[ii]
    ad.push()

with open(path_out_file, "wb") as f:
    pkl.dump("0", f)