Beispiel #1
0
def test_saving_loading_and_copying_process_for_Attribute_dict():
    """
    Checks the saving,loading and copying  functionality for an attribute dict
    Returns An Assertion Error in case an exception is thrown
    -------

    """

    try:
        ad = AttributeDict(test_p, read_only=False)
        ad[1]["b"] = 2
        assert "b" in ad[1]
        for i in range(100):
            ad[i] = {"glia_probability": np.ones((10, 2)).astype(np.uint8)}
        start = time.time()
        ad.push()
        logging.debug("Saving AttributeDict took %0.4f." %
                      (time.time() - start))
        logging.debug("AttributeDict file size:\t%0.2f kB" %
                      (os.path.getsize(test_p) / 1.e3))
        del ad
        logging.info('PASSED: test_saving_process_for_Attribute_dict')
    except Exception as e:
        logging.warning('FAILED: test_saving_process_for_Attribute_dict. ' +
                        str(e))
        raise AssertionError
    try:
        start = time.time()
        ad = AttributeDict(test_p, read_only=True)
        logging.debug("Loading AttributeDict took %0.4f." %
                      (time.time() - start))
        assert len(list(ad.keys())) == 100
        assert np.all(
            ad[0]["glia_probability"] == np.ones((10, 2)).astype(np.uint8))
        ad.update({100: "b"})
        assert 100 in ad
        start = time.time()
        dc_constr = ad.copy_intern()
        logging.debug("Copying dict from AttributeDict took %0.4f." %
                      (time.time() - start))
        assert len(list(dc_constr.keys())) == 101
        del ad
        os.remove(test_p)
        logging.info(
            'PASSED: test_loading_and_copying_process_for_Attribute_dict')
    except Exception as e:
        logging.warning(
            'FAILED: test_loading_and_copying_process_for_Attribute_dict. ' +
            str(e))
        raise AssertionError
Beispiel #2
0
        except EOFError:
            break

so_chunk_paths = args[0]
so_kwargs = args[1]

working_dir = so_kwargs['working_dir']
global_params.wd = working_dir
# TODO: preprocess meshes in case they dont exist and then load mesh dict next to the attribute dict
for p in so_chunk_paths:
    # get SV IDs stored in this storage
    attr_dc_p = p + "/attr_dict.pkl"
    mesh_dc_p = p + "/mesh.pkl"
    ad = AttributeDict(attr_dc_p, disable_locking=True)
    md = MeshStorage(mesh_dc_p, disable_locking=True)
    svixs = list(ad.keys())
    # replace SO objects by using rep_coord and IDs stored in the attribute dict
    sd = sos_dict_fact(svixs, **so_kwargs)
    sos = init_sos(sd)
    # compute locations and use already processed meshes
    loc_dc_p = p + "/locations.pkl"
    loc_dc = CompressedStorage(loc_dc_p, disable_locking=True)
    for so in sos:
        try:
            ix = so.id
            if not ix in md.keys():
                verts = so.mesh[1].reshape(-1, 3)
            else:
                verts = md[ix][1].reshape(-1, 3)
            if len(verts) == 0:
                coords = np.array([