Example #1
0
    def forward_pass(self, inputs, param_vector):
        if inputs.shape[2] == 32492:
            pool_map = genfromtxt('../mesh/neighs_L1.csv', delimiter=',')
            coords_old = coords_0
            faces_old = faces_0
            coords = coords_1
            faces = faces_1
        elif inputs.shape[2] == 5356:
            pool_map = genfromtxt('../mesh/neighs_L2.csv', delimiter=',')
            coords_old = coords_1
            faces_old = faces_1
            coords = coords_2
            faces = faces_2

        adj_mtx_old, _, _ = mesh_traversal.create_adj_mtx(
            coords_old, faces_old)
        adj_mtx, _, _ = mesh_traversal.create_adj_mtx(coords, faces)
        pool_map = list(map(int, pool_map))

        patches = []
        for i in range(coords.shape[0]):
            org_vert = int(pool_map[i])
            neighs = mesh_traversal.get_neighs(adj_mtx_old, coords_old,
                                               org_vert, 1)
            patch = inputs[:, :, neighs]
            patch = np.mean(patch, axis=2)
            patches.append(patch)

        out = np.array(patches)
        out = np.swapaxes(out, 0, 1)
        out = np.swapaxes(out, 1, 2)

        return out
Example #2
0
def load():
    """
    Loads 3D surface data and creates its adjacency matrix.
    :return: training & test data, adjacency matrix of the mesh, the list of mesh coordinates and faces
    """
    try:
        coords_gii = genfromtxt('coords.csv', delimiter=',')
        faces_gii = genfromtxt('faces.csv', delimiter=',')
    except:
        # try:
        #     file_gii = './load_data/601127.L.inflated.32k_fs_LR.surf.gii'
        #     file_gii = os.path.join(file_gii)
        # except:
        file_gii = '/Users/semo/PycharmProjects/Conv_CNN/fmri_convnet/load_data/601127.L.inflated.32k_fs_LR.surf.gii'
        img = nib.load(file_gii)

        # these are the spatial coordinates
        coords_gii = img.darrays[0].data

        # these are the mesh connections
        faces_gii = img.darrays[1].data

        np.savetxt("coords.csv", coords_gii, delimiter=",")
        np.savetxt("faces.csv", faces_gii, delimiter=",")

    faces_gii = faces_gii.astype(int)
    adj_mtx, _, _ = mesh_traversal.create_adj_mtx(coords_gii, faces_gii, is_sparse=True)

    ###############

    train_data, train_labels, test_data, test_labels = load_all_examples()

    return train_data, train_labels, test_data, test_labels, adj_mtx, coords_gii, faces_gii
Example #3
0
    def forward_pass(self, inputs, param_vector):
        # Input dimensions:  [data, color_in, y, x]
        # Params dimensions: [color_in, color_out, y, x]
        # Output dimensions: [data, color_out, y, x]
        params = self.parser.get(param_vector, 'params')  # filters
        biases = self.parser.get(param_vector, 'biases')
        biases = biases.reshape(biases.shape[0], biases.shape[1], 1)
        if inputs.shape[2] == 5356:
            coords = coords_1
            faces = faces_1
            adj_mtx, _, _ = mesh_traversal.create_adj_mtx(coords_1, faces_1)
        elif inputs.shape[2] == 914:
            coords = coords_2
            faces = faces_2
            adj_mtx, _, _ = mesh_traversal.create_adj_mtx(coords_2, faces_2)

        conv = mesh_traversal.tensorize_and_convolve_fmri(
            params, adj_mtx, inputs, coords, r, stride)
        conv = np.squeeze(conv)

        return conv + biases
Example #4
0
def load(subdiv=2):
    """
    generates an sphere mesh representing a smaller brain mesh, and embeds random patterns
    drawn from a distribution, creating data for a basic discrimination task.
    :param subdiv: # subdivisions for the recursive generation of mesh. More subdivisions lead to a more complex mesh.
    :return: vertices, faces and adjacency matrix for the mesh.
    """
    # -----------------------------------------------------------------------------
    # Settings

    scale = 1

    # -----------------------------------------------------------------------------
    # Functions

    middle_point_cache = {}

    def vertex(x, y, z):
        """ Return vertex coordinates fixed to the unit sphere """

        length = sqrt(x**2 + y**2 + z**2)

        return [(i * scale) / length for i in (x, y, z)]

    def middle_point(point_1, point_2):
        """ Find a middle point and project to the unit sphere """

        # We check if we have already cut this edge first
        # to avoid duplicated verts
        smaller_index = min(point_1, point_2)
        greater_index = max(point_1, point_2)

        key = '{0}-{1}'.format(smaller_index, greater_index)

        if key in middle_point_cache:
            return middle_point_cache[key]

        # If it's not in cache, then we can cut it
        vert_1 = verts[point_1]
        vert_2 = verts[point_2]
        middle = [sum(i) / 2 for i in zip(vert_1, vert_2)]

        verts.append(vertex(*middle))

        index = len(verts) - 1
        middle_point_cache[key] = index

        return index

    # -----------------------------------------------------------------------------
    # Make the base icosahedron

    # Golden ratio
    PHI = (1 + sqrt(5)) / 2

    verts = [
        vertex(-1, PHI, 0),
        vertex(1, PHI, 0),
        vertex(-1, -PHI, 0),
        vertex(1, -PHI, 0),
        vertex(0, -1, PHI),
        vertex(0, 1, PHI),
        vertex(0, -1, -PHI),
        vertex(0, 1, -PHI),
        vertex(PHI, 0, -1),
        vertex(PHI, 0, 1),
        vertex(-PHI, 0, -1),
        vertex(-PHI, 0, 1),
    ]

    faces = [
        # 5 faces around point 0
        [0, 11, 5],
        [0, 5, 1],
        [0, 1, 7],
        [0, 7, 10],
        [0, 10, 11],

        # Adjacent faces
        [1, 5, 9],
        [5, 11, 4],
        [11, 10, 2],
        [10, 7, 6],
        [7, 1, 8],

        # 5 faces around 3
        [3, 9, 4],
        [3, 4, 2],
        [3, 2, 6],
        [3, 6, 8],
        [3, 8, 9],

        # Adjacent faces
        [4, 9, 5],
        [2, 4, 11],
        [6, 2, 10],
        [8, 6, 7],
        [9, 8, 1],
    ]

    # -----------------------------------------------------------------------------
    # Subdivisions

    for i in range(subdiv):
        faces_subdiv = []
        for tri in faces:
            v1 = middle_point(tri[0], tri[1])
            v2 = middle_point(tri[1], tri[2])
            v3 = middle_point(tri[2], tri[0])

            faces_subdiv.append([tri[0], v1, v3])
            faces_subdiv.append([tri[1], v2, v1])
            faces_subdiv.append([tri[2], v3, v2])
            faces_subdiv.append([v1, v2, v3])

        faces = faces_subdiv

    mesh = om.TriMesh()

    vlist = []
    for i in verts:
        vlist.append(mesh.add_vertex(i))

    flist = []
    for i in faces:
        flist.append(mesh.add_face(vlist[i[0]], vlist[i[1]], vlist[i[2]]))

    om.write_mesh('../data/small_sphere.off', mesh)

    adj_mtx, _, _ = mesh_traversal.create_adj_mtx(np.array(verts),
                                                  np.array(faces),
                                                  is_sparse=True)

    return verts, faces, adj_mtx
from numpy import linalg as LA

"""
This client script creates an adjacency matrix from existing data, strides the mesh and convolves the result
with arbitrary data. These arbitrary filters (as well as the convolution values which are vertex IDs for now)
will be replaced in actual usages.
"""

_author_ = "Semih Cantürk"
_credits_ = "Cassiano Becker"

v, f = load_sphere.load()
v = np.array(v)
f = np.array(f)

adj_mtx, coords, faces = mesh_traversal.create_adj_mtx(v, f)

#traversal_list = mesh_traversal_debug.traverse_mesh(coords, faces, 28105, verbose=True, is_sparse=True)
#pickle.dump(traversal_list, open("var.pickle", "wb"))
#print(len(traversal_list))


# the rest is the actual convolution

center = 93
radius = 1
stride = 1
# strides = mesh_traversal_debug.traverse_mesh(coords, faces, center)
# print("Strides: ")
# print(strides)