Ejemplo n.º 1
0
def test_multiplicity_stats():
    stats1 = csr.summarize(csr.Skeleton(skeleton0))
    stats2 = csr.summarize(csr.Skeleton(skeleton0, spacing=2))
    assert_almost_equal(2 * stats1['branch-distance'].values,
                        stats2['branch-distance'].values)
    assert_almost_equal(2 * stats1['euclidean-distance'].values,
                        stats2['euclidean-distance'].values)
Ejemplo n.º 2
0
def bench_suite():
    times = OrderedDict()
    skeleton = np.load(os.path.join(rundir, 'infected3.npz'))['skeleton']
    with timer() as t_build_graph:
        g, indices = csr.skeleton_to_csgraph(skeleton, spacing=2.24826)
    times['build graph'] = t_build_graph[0]
    with timer() as t_build_graph2:
        g, indices = csr.skeleton_to_csgraph(skeleton, spacing=2.24826)
    times['build graph again'] = t_build_graph2[0]
    with timer() as t_stats:
        stats = csr.branch_statistics(g)
    times['compute statistics'] = t_stats[0]
    with timer() as t_stats2:
        stats = csr.branch_statistics(g)
    times['compute statistics again'] = t_stats2[0]
    with timer() as t_skeleton:
        skel_obj = csr.Skeleton(skeleton)
    times['skeleton object'] = t_skeleton[0]
    with timer() as t_skeleton2:
        skel_obj = csr.Skeleton(skeleton)
    times['skeleton object again'] = t_skeleton2[0]
    with timer() as t_summary:
        summary = csr.summarize(skel_obj)
    times['compute per-skeleton statistics'] = t_summary[0]
    return times
Ejemplo n.º 3
0
def test_summarize_spacing():
    df = csr.summarize(csr.Skeleton(skeleton2))
    df2 = csr.summarize(csr.Skeleton(skeleton2, spacing=2))
    assert_equal(np.array(df['node-id-src']), np.array(df2['node-id-src']))
    assert_almost_equal(np.array(df2['euclidean-distance']),
                        np.array(2 * df['euclidean-distance']))
    assert_almost_equal(np.array(df2['branch-distance']),
                        np.array(2 * df['branch-distance']))
Ejemplo n.º 4
0
def test_fast_graph_center_idx():
    s = csr.Skeleton(skeleton0)
    i = csr._fast_graph_center_idx(s)
    assert i == 6

    s = csr.Skeleton(skeleton4)
    i = csr._fast_graph_center_idx(s)
    assert i == 1
Ejemplo n.º 5
0
def test_topograph_summary():
    stats = csr.summarize(
        csr.Skeleton(topograph1d, spacing=2.5, value_is_height=True),
        value_is_height=True,
    )
    assert stats.loc[0, 'euclidean-distance'] == 5.0
    columns = ['coord-src-0', 'coord-src-1', 'coord-dst-0', 'coord-dst-1']
    assert_almost_equal(sorted(stats.loc[0, columns]), [0, 3, 3, 5])
Ejemplo n.º 6
0
def test_diagonal():
    s = csr.Skeleton(skeleton4)
    # We choose the shells so that we catch all three, then two, then one arm
    # of the skeleton, while not triggering the "shell spacing too small"
    # warning
    c, r, counts = csr.sholl_analysis(s,
                                      center=[1, 1],
                                      shells=np.arange(0.09, 5, 1.45))
    np.testing.assert_equal(counts, [3, 2, 1, 0])
Ejemplo n.º 7
0
def _old_branch_statistics(skeleton_image,
                           *,
                           spacing=1,
                           value_is_height=False):
    skel = csr.Skeleton(skeleton_image,
                        spacing=spacing,
                        value_is_height=value_is_height)
    summary = csr.summarize(skel, value_is_height=value_is_height)
    columns = ['node-id-src', 'node-id-dst', 'branch-distance', 'branch-type']
    return summary[columns].to_numpy()
Ejemplo n.º 8
0
def test_transpose_image():
    image = np.zeros((10, 10))

    rr, cc = line(4, 0, 4, 2)
    image[rr, cc] = 1
    rr, cc = line(3, 2, 3, 5)
    image[rr, cc] = 1
    rr, cc = line(1, 2, 8, 2)
    image[rr, cc] = 1
    rr, cc = line(1, 0, 1, 8)
    image[rr, cc] = 1

    skeleton1 = csr.Skeleton(image)
    skeleton2 = csr.Skeleton(image.T)

    assert (skeleton1.n_paths == skeleton2.n_paths)
    np.testing.assert_allclose(
        np.sort(skeleton1.path_lengths()),
        np.sort(skeleton2.path_lengths()),
    )
Ejemplo n.º 9
0
def test_sholl_spacing():
    s = csr.Skeleton(skeleton0, spacing=(1, 5))
    with pytest.warns(UserWarning):
        c, r, counts = csr.sholl_analysis(s,
                                          center=[3, 15],
                                          shells=np.arange(17))
        for i in range(4):
            assert np.isin(i, counts)
    c, r, counts = csr.sholl_analysis(s,
                                      center=[3, 15],
                                      shells=np.arange(1, 20, 6))
    np.testing.assert_equal(counts, [3, 2, 2, 0])
Ejemplo n.º 10
0
    def skeletonize(self, threshold):
        fin_mask = (self.z_scores > threshold).astype(int)
        skel = skeletonize(fin_mask).astype(int)
        sk_obj = csr.Skeleton(skel, spacing=1, keep_images=True)

        path_lengths = sk_obj.path_lengths()
        this_one = np.argmax(path_lengths)
        path_coordinates = sk_obj.path_coordinates(this_one).astype(int)

        pruned_skel = np.zeros(fin_mask.shape)
        for pair in path_coordinates:
            pruned_skel[pair[0], pair[1]] = 1
        return pruned_skel, path_coordinates[::-1, :]
Ejemplo n.º 11
0
def mask_2_swc(TIFFileName, SWCFileName, smp=4, ZRatio=1):

    # Read binary mask
    Skl_ImFile = tiff.TiffFile(TIFFileName)
    Skl_np = Skl_ImFile.asarray()
    branch_data = csr.summarise(Skl_np)
    skel_obj = csr.Skeleton(Skl_np)
    Brch_vox = skel_obj.path_coordinates
    NSkeletons = np.unique(branch_data['skeleton-id']).shape[0]

    # Check that the mask only holds one skeleton
    if NSkeletons > 1:
        exit("Error: more than one skeleton found in the mask!")

    # Extract relevant data from skeleton branches
    NBranches = branch_data.shape[0]
    id_0 = np.zeros(NBranches, dtype=int)
    id_1 = np.zeros(NBranches, dtype=int)
    list = skel_obj.paths_list()
    for i in range(NBranches):
        id_0[i] = list[i][0]
        id_1[i] = list[i][-1]

    # Count number of unique vertices and build renumbering LUT
    AllNodes = (np.unique([id_0, id_1]))
    MaxVertexIdx = (np.amax(AllNodes))
    NVertices = AllNodes.size
    LUTVertices = np.zeros(MaxVertexIdx + 1, dtype=int)
    VertExist = np.zeros(MaxVertexIdx + 1, dtype=int)

    # Estimate total number of segments
    TotSegments = 1
    for i in range(NBranches):
        TotSegments = TotSegments + (
            1 + np.floor(Brch_vox(i).shape[0] / smp)).astype(int)
    print("Number of branches: %i" % NBranches)
    print("Number of nodes: %i" % NVertices)
    print("Estimated number of segments: %i" % TotSegments)

    # Fill SWC array
    global SWC_data
    SWC_data = np.ones((TotSegments, 7), dtype=int)
    BranchOrphaned = np.zeros(NBranches, dtype=int)
    cntVert = 0

    # Insert first branch
    Idx0 = id_0[0]
    Idx1 = id_1[0]
    SWC_data[cntVert, 0] = cntVert
    LUTVertices[Idx0] = cntVert
    VertExist[Idx0] = 1
    SWC_data[cntVert, 6] = -1
    SWC_data[cntVert, 2] = Brch_vox(0)[0, 2]
    SWC_data[cntVert, 3] = Brch_vox(0)[0, 1]
    SWC_data[cntVert, 4] = Brch_vox(0)[0, 0] * ZRatio
    Vox = Brch_vox(0)
    (SWC_data, cntVert) = insertNodes(cntVert, LUTVertices[Idx0], Vox, smp,
                                      ZRatio)
    LUTVertices[Idx1] = cntVert
    VertExist[Idx1] = 1

    # Main loop
    BrchToBeAdded = np.arange(NBranches, dtype=int)
    cntIt = 0
    for it in range(10):
        if np.sum(BrchToBeAdded) == 0:
            break
        cntIt = cntIt + 1
        for j in range(1, NBranches):
            i = BrchToBeAdded[j]
            if i > 0:
                Idx0 = id_0[i]
                Idx1 = id_1[i]
                if VertExist[Idx0]:
                    # First node exists, it is then an ancestor
                    Vox = Brch_vox(i)
                    (SWC_data, cntVert) = insertNodes(cntVert,
                                                      LUTVertices[Idx0], Vox,
                                                      smp, ZRatio)
                    LUTVertices[Idx1] = cntVert
                    VertExist[Idx1] = 1
                    BrchToBeAdded[j] = 0
                else:
                    if VertExist[Idx1]:
                        # Second node exists, it is then an ancestor
                        Vox = np.flip(Brch_vox(i), 0)
                        (SWC_data,
                         cntVert) = insertNodes(cntVert, LUTVertices[Idx1],
                                                Vox, smp, ZRatio)
                        LUTVertices[Idx0] = cntVert
                        VertExist[Idx0] = 1
                        BrchToBeAdded[j] = 0

    # Truncate SWC array and add 1 to all IDs (SWC convention)
    SWC_data = SWC_data[0:cntVert + 1, :]
    for i in range(SWC_data.shape[0]):
        SWC_data[i, 0] = SWC_data[i, 0] + 1
        if SWC_data[i, 6] > -1:
            SWC_data[i, 6] = SWC_data[i, 6] + 1

    # Check for duplicated nodes
    unique_rows = np.unique(SWC_data[:, 2:4], axis=0)
    if unique_rows.shape[0] != SWC_data.shape[0]:
        #print("Warning: the skeleton holds loop(s), this is incompatible with SWC format and it will be encoded with duplicated nodes!")
        exit(
            "Error: the skeleton holds loop(s), this is incompatible with SWC format!"
        )

    # Display status
    print("Performed %i iterations" % cntIt)
    print("Remaining branches: %i " % np.count_nonzero(BrchToBeAdded))
    print("Number of segments: %i" % SWC_data.shape[0])

    # Write SWC file
    with open(SWCFileName, "w") as f:
        f.write("# ORIGINAL_SOURCE Mask2SWC 1.0\n# SCALE 1.0 1.0 1.0\n\n")
    f.close()
    with open(SWCFileName, "a") as f:
        np.savetxt(f, SWC_data, fmt='%i', delimiter=" ")
    f.close()
Ejemplo n.º 12
0
def mask_2_obj(TIFFileName, OBJFileName, smp=4, ZRatio=1):

    # Read skeleton image
    Skl_ImFile = tiff.TiffFile(TIFFileName)
    Skl_np = Skl_ImFile.asarray()

    # Analyze skeleton
    branch_data = csr.summarise(Skl_np)
    NBranches = branch_data.shape[0]
    skel_obj = csr.Skeleton(Skl_np)
    Brch_vox = skel_obj.path_coordinates
    NSkeletons = np.unique(branch_data['skeleton-id']).shape[0]

    # Extract relevant data from skeleton branches
    NBranches = branch_data.shape[0]
    id_0 = np.zeros(NBranches, dtype=int)
    id_1 = np.zeros(NBranches, dtype=int)
    list = skel_obj.paths_list()
    for i in range(NBranches):
        id_0[i] = list[i][0]
        id_1[i] = list[i][-1]

    # Parse all nodes to find unique vertices
    AllNodes = (np.unique([id_0, id_1]))
    MaxVertexIdx = (np.amax(AllNodes))
    NVertices = AllNodes.size

    # Estimate total number of segments
    TotSegments = 1
    for i in range(NBranches):
        TotSegments = TotSegments + (
            1 + np.floor(Brch_vox(i).shape[0] / smp)).astype(int)

    # Display model information
    print("Number of skeletons: %i" % NSkeletons)
    print("Number of branches: %i" % NBranches)
    print("Number of nodes: %i" % NVertices)
    print("Estimated number of segments: %i" % TotSegments)

    # Build re-indexing LUT
    LUTVertices = np.zeros([MaxVertexIdx + 1, 1], dtype=int)
    for i in range(NVertices):
        LUTVertices[AllNodes[i]] = i

    # Fill OBJ v-data
    OBJ_Vdata = np.zeros([TotSegments, 3], dtype=int)
    for i in range(NBranches):
        OBJ_Vdata[LUTVertices[id_0[i]], 0] = Brch_vox(i)[0, 2]
        OBJ_Vdata[LUTVertices[id_0[i]], 1] = Brch_vox(i)[0, 1]
        OBJ_Vdata[LUTVertices[id_0[i]], 2] = Brch_vox(i)[0, 0] * ZRatio
        OBJ_Vdata[LUTVertices[id_1[i]], 0] = Brch_vox(i)[-1, 2]
        OBJ_Vdata[LUTVertices[id_1[i]], 1] = Brch_vox(i)[-1, 1]
        OBJ_Vdata[LUTVertices[id_1[i]], 2] = Brch_vox(i)[-1, 0] * ZRatio
    cntVertices = NVertices

    # Fill OBJ l-data
    OBJ_Ldata = np.ones((TotSegments, 2), dtype=int)
    cntSegments = 0
    for i in range(NBranches):
        Vox = Brch_vox(i)
        L = Vox.shape[0]
        nNodes = (1 + np.floor(L / smp)).astype(int)
        PrevNode = LUTVertices[id_0[i]] + 1
        for s in range(1, nNodes - 1):
            OBJ_Vdata[cntVertices, 0] = Vox[np.round(s * (L - 1) /
                                                     (nNodes - 1)).astype(int),
                                            2]
            OBJ_Vdata[cntVertices, 1] = Vox[np.round(s * (L - 1) /
                                                     (nNodes - 1)).astype(int),
                                            1]
            OBJ_Vdata[cntVertices,
                      2] = Vox[np.round(s * (L - 1) /
                                        (nNodes - 1)).astype(int), 0] * ZRatio
            OBJ_Ldata[cntSegments, 0] = PrevNode
            OBJ_Ldata[cntSegments, 1] = cntVertices + 1
            PrevNode = cntVertices + 1
            cntVertices = cntVertices + 1
            cntSegments = cntSegments + 1
        OBJ_Ldata[cntSegments, 0] = PrevNode
        OBJ_Ldata[cntSegments, 1] = LUTVertices[id_1[i]] + 1
        cntSegments = cntSegments + 1

    # Display status
    print("Actual number of vertices: %i" % cntVertices)
    print("Actual number of segments: %i" % cntSegments)

    # Export OBJ file
    with open(OBJFileName, "w") as f:
        f.write("")
    f.close()
    with open(OBJFileName, "a") as f:
        for i in range(cntVertices):
            f.write("v %i %i %i\n" %
                    (OBJ_Vdata[i, 0], OBJ_Vdata[i, 1], OBJ_Vdata[i, 2]))
    f.close()
    with open(OBJFileName, "a") as f:
        for i in range(cntSegments):
            f.write("l %i %i\n" % (OBJ_Ldata[i, 0], OBJ_Ldata[i, 1]))
    f.close()
Ejemplo n.º 13
0
def test_2skeletons():
    df = csr.summarize(csr.Skeleton(skeleton2))
    assert_almost_equal(np.unique(df['euclidean-distance']), np.sqrt([5, 10]))
    assert_equal(np.unique(df['skeleton-id']), [0, 1])
    assert_equal(np.bincount(df['branch-type']), [0, 4, 4])
Ejemplo n.º 14
0
def test_sholl():
    s = csr.Skeleton(skeleton0)
    c, r, counts = csr.sholl_analysis(s, shells=np.arange(0, 5, 1.5))
    np.testing.assert_equal(c, [3, 3])
    np.testing.assert_equal(counts, [0, 3, 3, 0])
Ejemplo n.º 15
0
def test_tip_junction_edges():
    stats1 = csr.summarize(csr.Skeleton(skeleton4))
    assert stats1.shape[0] == 3  # ensure all three branches are counted
Ejemplo n.º 16
0
def test_pixel_values():
    image = np.random.random((45, ))
    expected = np.mean(image)
    stats = csr.summarize(csr.Skeleton(image))
    assert_almost_equal(stats.loc[0, 'mean-pixel-value'], expected)
Ejemplo n.º 17
0
def test_stats(test_skeleton):
    stats = csr.summarize(csr.Skeleton(test_skeleton))
    return stats
Ejemplo n.º 18
0
def skl2obj(Skl_np, smp, ZRatio, OBJToExport):

    # Analyze skeleton
    branch_data = csr.summarise(Skl_np)
    NBranches = branch_data.shape[0]
    skel_obj = csr.Skeleton(Skl_np)
    Brch_vox = skel_obj.path_coordinates
    NSkeletons = np.unique(branch_data['skeleton-id']).shape[0]

    # Extract required information on skeleton branches
    #NBranches = branch_data.shape[0]
    brclist = skel_obj.paths_list()
    NBranches = len(brclist)
    id_0 = np.zeros(NBranches, dtype=int)
    id_1 = np.zeros(NBranches, dtype=int)
    for i in range(NBranches):
        id_0[i] = brclist[i][0]
        id_1[i] = brclist[i][-1]

    # Parse all nodes to find unique vertices
    AllNodes = (np.unique([id_0, id_1]))
    MaxVertexIdx = (np.amax(AllNodes))
    NVertices = AllNodes.size

    # Over-estimate of total number of segments after skeleton sampling (node to node links)
    TotSegments = 1
    for i in range(NBranches):
        TotSegments = TotSegments + (
            1 + np.floor(Brch_vox(i).shape[0] / smp)).astype(int)

    # Display model information
    #print("Number of skeletons: %i"%NSkeletons)
    #print("Number of branches: %i"%NBranches)
    #print("Number of nodes: %i"%NVertices)
    #print("Estimated number of segments: %i"%TotSegments)

    # Build re-indexing LUT
    LUTVertices = np.zeros([MaxVertexIdx + 1, 1], dtype=int)
    for i in range(NVertices):
        LUTVertices[AllNodes[i]] = i

    # Fill OBJ v-data
    OBJ_Vdata = np.zeros([TotSegments, 3], dtype=int)
    for i in range(NBranches):
        OBJ_Vdata[LUTVertices[id_0[i]], 0] = Brch_vox(i)[0, 2]
        OBJ_Vdata[LUTVertices[id_0[i]], 1] = Brch_vox(i)[0, 1]
        OBJ_Vdata[LUTVertices[id_0[i]], 2] = Brch_vox(i)[0, 0] * ZRatio
        OBJ_Vdata[LUTVertices[id_1[i]], 0] = Brch_vox(i)[-1, 2]
        OBJ_Vdata[LUTVertices[id_1[i]], 1] = Brch_vox(i)[-1, 1]
        OBJ_Vdata[LUTVertices[id_1[i]], 2] = Brch_vox(i)[-1, 0] * ZRatio
    cntVertices = NVertices

    # Fill OBJ l-data
    OBJ_Ldata = np.ones((TotSegments, 2), dtype=int)
    cntSegments = 0
    for i in range(NBranches):
        Vox = Brch_vox(i)
        L = Vox.shape[0]
        nNodes = (1 + np.floor(L / smp)).astype(int)
        PrevNode = LUTVertices[id_0[i]] + 1
        for s in range(1, nNodes - 1):
            OBJ_Vdata[cntVertices, 0] = Vox[np.round(s * (L - 1) /
                                                     (nNodes - 1)).astype(int),
                                            2]
            OBJ_Vdata[cntVertices, 1] = Vox[np.round(s * (L - 1) /
                                                     (nNodes - 1)).astype(int),
                                            1]
            OBJ_Vdata[cntVertices,
                      2] = Vox[np.round(s * (L - 1) /
                                        (nNodes - 1)).astype(int), 0] * ZRatio
            OBJ_Ldata[cntSegments, 0] = PrevNode
            OBJ_Ldata[cntSegments, 1] = cntVertices + 1
            PrevNode = cntVertices + 1
            cntVertices = cntVertices + 1
            cntSegments = cntSegments + 1
        OBJ_Ldata[cntSegments, 0] = PrevNode
        OBJ_Ldata[cntSegments, 1] = LUTVertices[id_1[i]] + 1
        cntSegments = cntSegments + 1

    # Display model statistics
    #print("Actual number of vertices: %i" %cntVertices)
    #print("Actual number of segments: %i" %cntSegments)

    with open(OBJToExport, "w") as f:
        f.write("")
    f.close()
    with open(OBJToExport, "a") as f:
        for i in range(cntVertices):
            f.write("v %i %i %i\n" %
                    (OBJ_Vdata[i, 0], OBJ_Vdata[i, 1], OBJ_Vdata[i, 2]))
    f.close()
    with open(OBJToExport, "a") as f:
        for i in range(cntSegments):
            f.write("l %i %i\n" % (OBJ_Ldata[i, 0], OBJ_Ldata[i, 1]))
    f.close()