def EvaluateWidthsSequentially(meta_filename):
    data = ReadMetaData(meta_filename)

    # iterate over all labels and generate width statistics
    for label in range(1, data.NLabels()):
        EvaluateWidths(data, label)

    CombineEvaluatedWidths(data)
def EvaluateGeodesicDistancesSequentially(meta_filename):
    data = ReadMetaData(meta_filename)

    # iterate over all labels and generate geodesic statistics
    for label in range(1, data.NLabels()):
        EvaluateGeodesicDistances(data, label)

    CombineGeodesicDistances(data)
Exemple #3
0
def CollectSurfacesSequentially(meta_filename):
    # read in the data for this block
    data = ReadMetaData(meta_filename)

    assert (not data.SurfacesDirectory() == None)

    # compute the first step to save the walls of each file
    for iz in range(data.StartZ(), data.EndZ()):
        for iy in range(data.StartY(), data.EndY()):
            for ix in range(data.StartX(), data.EndX()):
                GenerateSurfacesPerBlock(data, iz, iy, ix)

    CombineSurfaceVoxels(data)
Exemple #4
0
def CalculateBlockStatisticsSequentially(meta_filename):
    data = ReadMetaData(meta_filename)

    # iterate over all blocks
    for iz in range(data.StartZ(), data.EndZ()):
        for iy in range(data.StartY(), data.EndY()):
            for ix in range(data.StartX(), data.EndX()):
                CalculatePerBlockStatistics(data, iz, iy, ix)

    CombineStatistics(data)
Exemple #5
0
def CalculateSomataStatistics(meta_filename):
    data = ReadMetaData(meta_filename)

    somata_statistics = {}

    # iterate over all blocks
    for iz in range(data.StartZ(), data.EndZ()):
        for iy in range(data.StartY(), data.EndY()):
            for ix in range(data.StartX(), data.EndX()):
                print('{} {:04d}z-{:04d}y-{:04d}x'.format(
                    meta_filename, iz, iy, ix))
                # some datasets have no somata (default value)
                upsampled_non_zero_voxels = 0

                if data.SomataDownsampleRate():
                    somata = data.ReadSomataBlock(iz, iy, ix)

                    # get the number of non zero voxels
                    non_zero_voxels = np.count_nonzero(somata)

                    # the upsample factor is the number of voxels at full resolution correspond
                    # to one voxel at the downsampled resolution
                    upsample_factor = data.SomataDownsampleRate()**3

                    # the number of voxels masked as full resolution
                    upsampled_non_zero_voxels = upsample_factor * non_zero_voxels

                somata_statistics[(iz, iy, ix)] = upsampled_non_zero_voxels

    statistics_directory = '{}/statistics'.format(data.TempDirectory())
    if not os.path.exists(statistics_directory):
        os.makedirs(statistics_directory, exist_ok=True)

    statistics_filename = '{}/somata-statistics.pickle'.format(
        statistics_directory)
    PickleData(somata_statistics, statistics_filename)
    # replace strings and write file
    replaceStrings(deploy_template_fp, deploy_out_fp, replacements_batch)


if __name__ == "__main__":

    # read meta filepath
    meta_fp, mf_param_fp, prefix = readArgv(sys.argv)

    # import parameters from file
    import_name = "blockbased_synapseaware.makeflow_example." + mf_param_fp[:-3].replace(
        "/", ".")
    mf_param = importlib.import_module(import_name)

    # read in the data for this block
    data = ReadMetaData(meta_fp)

    # create working directory and directory for temporary makeflow files
    working_dir = data.MFCodeDirectory(
    ) + "/working_directories/{}_working_dir_{:04d}x{:04d}x{:04d}/".format(
        prefix,
        data.BlockSize()[OR_X],
        data.BlockSize()[OR_Y],
        data.BlockSize()[OR_Z])
    temp_file_dir = working_dir + "temp_files/"

    if not os.path.exists(working_dir):
        os.mkdir(working_dir)
    if not os.path.exists(temp_file_dir):
        os.mkdir(temp_file_dir)
def EvaluateNeuralReconstructionIntegrity(meta_filename):
    data = ReadMetaData(meta_filename)

    synapses_per_label = {}

    # read in all of the synapses from all of the blocks
    synapse_directory = data.SynapseDirectory()
    for iz in range(data.StartZ(), data.EndZ()):
        for iy in range(data.StartY(), data.EndY()):
            for ix in range(data.StartX(), data.EndX()):
                synapses_filename = '{}/{:04d}z-{:04d}y-{:04d}x.pts'.format(synapse_directory, iz, iy, ix)

                # ignore the local coordinates
                block_synapses, _ = ReadPtsFile(data, synapses_filename)

                # add all synapses for each label in this block to the global synapses
                for label in block_synapses.keys():
                    if not label in synapses_per_label:
                        synapses_per_label[label] = []

                    synapses_per_label[label] += block_synapses[label]

    # get the output filename
    evaluation_directory = data.EvaluationDirectory()
    if not os.path.exists(evaluation_directory):
        os.makedirs(evaluation_directory, exist_ok=True)

    output_filename = '{}/nri-results.txt'.format(evaluation_directory)
    fd = open(output_filename, 'w')

    # keep track of global statistics
    total_true_positives = 0
    total_false_positives = 0
    total_false_negatives = 0

    # for each label, find if synapses correspond to endpoints
    for label in range(1, data.NLabels()):
        # read the refined skeleton for this synapse
        skeleton_directory = '{}/skeletons'.format(data.SkeletonOutputDirectory())
        skeleton_filename = '{}/{:016d}.pts'.format(skeleton_directory, label)

        # skip over labels not processed
        if not os.path.exists(skeleton_filename): continue

        # get the synapses only for this one label
        synapses = synapses_per_label[label]

        # ignore the local coordinates
        skeletons, _ = ReadPtsFile(data, skeleton_filename)
        skeleton = skeletons[label]

        # read in the somata surfaces (points on the surface should not count as endpoints)
        somata_directory = '{}/somata_surfaces'.format(data.TempDirectory())
        somata_filename = '{}/{:016d}.pts'.format(somata_directory, label)

        # path may not exist if soma not found
        if os.path.exists(somata_filename):
            somata_surfaces, _ = ReadPtsFile(data, somata_filename)
            somata_surface = set(somata_surfaces[label])
        else:
            somata_surface = set()

        # get the endpoints in this skeleton for this label
        endpoints = FindEndpoints(data, skeleton, somata_surface)

        true_positives, false_positives, false_negatives = CalculateNeuralReconstructionIntegrityScore(data, synapses, endpoints)


        # if there are no true positives the NRI score is 0
        if true_positives == 0:
            nri_score = 0
        else:
            precision = true_positives / float(true_positives + false_positives)
            recall = true_positives / float(true_positives + false_negatives)

            nri_score = 2 * (precision * recall) / (precision + recall)

        print ('Label: {}'.format(label))
        print ('  True Positives:  {:10d}'.format(true_positives))
        print ('  False Positives: {:10d}'.format(false_positives))
        print ('  False Negatives: {:10d}'.format(false_negatives))
        print ('  NRI Score:           {:0.4f}'.format(nri_score))

        fd.write ('Label: {}\n'.format(label))
        fd.write ('  True Positives:  {:10d}\n'.format(true_positives))
        fd.write ('  False Positives: {:10d}\n'.format(false_positives))
        fd.write ('  False Negatives: {:10d}\n'.format(false_negatives))
        fd.write ('  NRI Score:           {:0.4f}\n'.format(nri_score))

        # update the global stats
        total_true_positives += true_positives
        total_false_positives += false_positives
        total_false_negatives += false_negatives

    precision = total_true_positives / float(total_true_positives + total_false_positives)
    recall = total_true_positives / float(total_true_positives + total_false_negatives)

    nri_score = 2 * (precision * recall) / (precision + recall)

    print ('Total Volume'.format(label))
    print ('  True Positives:  {:10d}'.format(total_true_positives))
    print ('  False Positives: {:10d}'.format(total_false_positives))
    print ('  False Negatives: {:10d}'.format(total_false_negatives))
    print ('  NRI Score:           {:0.4f}'.format(nri_score))

    fd.write ('Total Volume\n'.format(label))
    fd.write ('  True Positives:  {:10d}\n'.format(total_true_positives))
    fd.write ('  False Positives: {:10d}\n'.format(total_false_positives))
    fd.write ('  False Negatives: {:10d}\n'.format(total_false_negatives))
    fd.write ('  NRI Score:           {:0.4f}\n'.format(nri_score))
def EvaluateHoleFilling(meta_filename):
    data = ReadMetaData(meta_filename)

    # make sure a results folder is specified
    assert (not data.EvaluationDirectory() == None)

    hole_sizes = {}

    neighbor_label_dicts = {}

    # read in the hole sizes from each block
    for iz in range(data.StartZ(), data.EndZ()):
        for iy in range(data.StartY(), data.EndY()):
            for ix in range(data.StartX(), data.EndX()):
                tmp_block_directory = data.TempBlockDirectory(iz, iy, ix)

                # read the saved hole sizes for this block
                hole_sizes_filename = '{}/hole-sizes.pickle'.format(tmp_block_directory)

                holes_sizes_per_block = ReadPickledData(hole_sizes_filename)

                for label in holes_sizes_per_block:
                    hole_sizes[label] = holes_sizes_per_block[label]

                # any value already determined in the local step mush have no neighbors
                associated_label_dict = ReadPickledData('{}/associated-label-set-local.pickle'.format(tmp_block_directory))
                for label in associated_label_dict:
                    neighbor_label_dicts[label] = []

    # read in the neighbor label dictionary that maps values to its neighbors
    tmp_directory = data.TempDirectory()
    neighbor_label_filename = '{}/hole-filling-neighbor-label-dict-global.pickle'.format(tmp_directory)
    neighbor_label_dict_global = ReadPickledData(neighbor_label_filename)
    associated_label_filename = '{}/hole-filling-associated-labels.pickle'.format(tmp_directory)
    associated_label_dict = ReadPickledData(associated_label_filename)

    # make sure that the keys are identical for hole sizes and associated labels (sanity check)
    assert (sorted(hole_sizes.keys()) == sorted(associated_label_dict.keys()))

    # make sure no query component in the global dictionary occurs in the local dictionary
    for label in neighbor_label_dict_global.keys():
        assert (not label in neighbor_label_dicts)

    # create a unified neighbor labels dictionary that combines local and global information
    neighbor_label_dicts.update(neighbor_label_dict_global)

    # make sure that the keys are identical for hole sizes and the neighbor label dicts
    assert (sorted(hole_sizes.keys()) == sorted(neighbor_label_dicts.keys()))

    # union find data structure to link together holes across blocks
    class UnionFindElement:
        def __init__(self, label):
            self.label = label
            self.parent = self
            self.rank = 0

    def Find(element):
        if not element.parent == element:
            element.parent = Find(element.parent)
        return element.parent

    def Union(element_one, element_two):
        root_one = Find(element_one)
        root_two = Find(element_two)

        if root_one == root_two: return

        if root_one.rank < root_two.rank:
            root_one.parent = root_two
        elif root_one.rank > root_two.rank:
            root_two.parent = root_one
        else:
            root_two.parent = root_one
            root_one.rank = root_one.rank + 1

    union_find_elements = {}
    for label in neighbor_label_dicts.keys():
        # skip over elements that remain background
        if not associated_label_dict[label]: continue

        union_find_elements[label] = UnionFindElement(label)

    for label in neighbor_label_dicts.keys():
        # skip over elements that remain background
        if not associated_label_dict[label]: continue

        for neighbor_label in neighbor_label_dicts[label]:
            # skip over the actual neuron label
            if neighbor_label > 0: continue

            # merge these two labels together
            Union(union_find_elements[label], union_find_elements[neighbor_label])

    root_holes_sizes = {}

    # go through all labels in the union find data structure and update the hole size for the parent
    for label in union_find_elements.keys():
        root_label = Find(union_find_elements[label])

        # create this hole if it does not already exist
        if not root_label.label in root_holes_sizes:
            root_holes_sizes[root_label.label] = 0

        root_holes_sizes[root_label.label] += hole_sizes[label]

    # read in the statistics data to find total volume size
    statistics_directory = '{}/statistics'.format(data.TempDirectory())
    statistics_filename = '{}/combined-statistics.pickle'.format(statistics_directory)
    volume_statistics = ReadPickledData(statistics_filename)
    total_volume = volume_statistics['neuronal_volume']

    holes = []

    small_holes = 0

    for root_label in root_holes_sizes.keys():
        if root_holes_sizes[root_label] < 5:
            small_holes += 1

        holes.append(root_holes_sizes[root_label])

    # get statistics on the number of holes
    nholes = len(holes)
    total_hole_volume = sum(holes)

    print ('Percent Small: {}'.format(100.0 * small_holes / nholes))
    print ('No. Holes: {}'.format(nholes))
    print ('Total Volume: {} ({:0.2f}%)'.format(total_hole_volume, 100.0 * total_hole_volume / total_volume))
def EvaluateSkeletons(meta_filename):
    data = ReadMetaData(meta_filename)

    # make sure a results folder is specified
    assert (not data.EvaluationDirectory() == None)

    # read in statistics about this data set
    statistics_directory = '{}/statistics'.format(data.TempDirectory())
    statistics_filename = '{}/combined-statistics.pickle'.format(statistics_directory)
    statistics = ReadPickledData(statistics_filename)

    label_volumes = statistics['label_volumes']

    total_volume = 0
    total_thinned_skeleton_length = 0
    total_refined_skeleton_length = 0
    nlabels = 0

    # get the output filename
    evaluation_directory = data.EvaluationDirectory()
    if not os.path.exists(evaluation_directory):
        os.makedirs(evaluation_directory, exist_ok=True)

    output_filename = '{}/skeleton-results.txt'.format(evaluation_directory)
    fd = open(output_filename, 'w')

    for label in sorted(label_volumes):
        # read pre-refinement skeleton
        thinning_filename = '{}/skeletons/{:016d}.pts'.format(data.TempDirectory(), label)

        # skip files that do not exist (no synapses, e.g.)
        if not os.path.exists(thinning_filename): continue

        thinned_skeletons_global_pts, _ = ReadPtsFile(data, thinning_filename)
        thinned_skeletons = thinned_skeletons_global_pts[label]

        refined_filename = '{}/skeletons/{:016d}.pts'.format(data.SkeletonOutputDirectory(), label)

        refined_skeletons_global_pts, _ = ReadPtsFile(data, refined_filename)
        refined_skeletons = refined_skeletons_global_pts[label]

        # get the volume and total remaining voxels
        volume = label_volumes[label]
        thinned_skeleton_length = len(thinned_skeletons)
        refined_skeleton_length = len(refined_skeletons)

        # update the variables that aggregate all labels
        total_volume += volume
        total_thinned_skeleton_length += thinned_skeleton_length
        total_refined_skeleton_length += refined_skeleton_length

        # calculate the percent and reduction of total voxels remaining
        thinned_remaining_percent = 100 * thinned_skeleton_length / volume
        thinning_reduction_factor = volume / thinned_skeleton_length
        refined_remaining_percent = 100 * refined_skeleton_length / thinned_skeleton_length
        refinement_reduction_factor = thinned_skeleton_length / refined_skeleton_length

        # caluclate the total percent/reduction after all steps
        total_skeleton_percent = 100 * refined_skeleton_length / volume
        total_skeleton_reduction = volume / refined_skeleton_length

        print ('Label: {}'.format(label))
        print ('  Input Volume:           {:10d}'.format(volume))
        print ('  Topological Thinning:   {:10d}    ({:05.2f}%)  {:10.2f}x'.format(thinned_skeleton_length, thinned_remaining_percent, thinning_reduction_factor))
        print ('  Skeleton Refinement:    {:10d}    ({:05.2f}%)  {:10.2f}x'.format(refined_skeleton_length, refined_remaining_percent, refinement_reduction_factor))
        print ('  Total:                                ({:05.2f}%)  {:10.2f}x'.format(total_skeleton_percent, total_skeleton_reduction))

        fd.write ('Label: {}\n'.format(label))
        fd.write ('  Input Volume:           {:10d}\n'.format(volume))
        fd.write ('  Topological Thinning:   {:10d}    ({:05.2f}%)  {:10.2f}x\n'.format(thinned_skeleton_length, thinned_remaining_percent, thinning_reduction_factor))
        fd.write ('  Skeleton Refinement:    {:10d}    ({:05.2f}%)  {:10.2f}x\n'.format(refined_skeleton_length, refined_remaining_percent, refinement_reduction_factor))
        fd.write ('  Total:                                ({:05.2f}%)  {:10.2f}x\n'.format(total_skeleton_percent, total_skeleton_reduction))

        nlabels += 1

    # calculate the percent and reduction of total voxels remaining
    thinned_remaining_percent = 100 * total_thinned_skeleton_length / total_volume
    thinning_reduction_factor = total_volume / total_thinned_skeleton_length
    refined_remaining_percent = 100 * total_refined_skeleton_length / total_thinned_skeleton_length
    refinement_reduction_factor = total_thinned_skeleton_length / total_refined_skeleton_length

    # caluclate the total percent/reduction after all steps
    total_skeleton_percent = 100 * total_refined_skeleton_length / total_volume
    total_skeleton_reduction = total_volume / total_refined_skeleton_length

    print ('Input Volume:             {:10d}'.format(total_volume))
    print ('Topological Thinning:     {:10d}    ({:05.2f}%)  {:10.2f}x'.format(total_thinned_skeleton_length, thinned_remaining_percent, thinning_reduction_factor))
    print ('Skeleton Refinement:      {:10d}    ({:05.2f}%)  {:10.2f}x'.format(total_refined_skeleton_length, refined_remaining_percent, refinement_reduction_factor))
    print ('Total:                                  ({:05.2f}%)  {:10.2f}x'.format(total_skeleton_percent, total_skeleton_reduction))
    print ('Average Skeleton: {:0.0f}'.format(total_refined_skeleton_length / nlabels))

    fd.write ('Input Volume:             {:10d}\n'.format(total_volume))
    fd.write ('Topological Thinning:     {:10d}    ({:05.2f}%)  {:10.2f}x\n'.format(total_thinned_skeleton_length, thinned_remaining_percent, thinning_reduction_factor))
    fd.write ('Skeleton Refinement:      {:10d}    ({:05.2f}%)  {:10.2f}x\n'.format(total_refined_skeleton_length, refined_remaining_percent, refinement_reduction_factor))
    fd.write ('Total:                                  ({:05.2f}%)  {:10.2f}x\n'.format(total_skeleton_percent, total_skeleton_reduction))
    fd.write ('Average Skeleton: {:0.0f}'.format(total_refined_skeleton_length / nlabels))

    # close the file
    fd.close()
Exemple #10
0
def MapSynapsesAndSurfaces(input_meta_filename, output_meta_filename):
    # read in the meta data files
    input_data = ReadMetaData(input_meta_filename)
    output_data = ReadMetaData(output_meta_filename)

    input_synapse_directory = input_data.SynapseDirectory()
    output_synapse_directory = output_data.SynapseDirectory()

    # create the output synapse directory if it does not exist
    if not os.path.exists(output_synapse_directory):
        os.makedirs(output_synapse_directory, exist_ok=True)

    # read in all of the input syanpses
    input_synapses = {}

    for iz in range(input_data.StartZ(), input_data.EndZ()):
        for iy in range(input_data.StartY(), input_data.EndY()):
            for ix in range(input_data.StartX(), input_data.EndX()):
                input_synapse_filename = '{}/{:04d}z-{:04d}y-{:04d}x.pts'.format(input_synapse_directory, iz, iy, ix)

                # ignore the local points
                global_pts, _ = ReadPtsFile(input_data, input_synapse_filename)

                for label in global_pts:
                    if not label in input_synapses:
                        input_synapses[label] = []

                    # add the array from this block to the input synapses
                    input_synapses[label] += global_pts[label]

    # create an output dictionary of synapses per block
    output_synapses_per_block = {}

    # iterate over all output blocks
    for iz in range(output_data.StartZ(), output_data.EndZ()):
        for iy in range(output_data.StartY(), output_data.EndY()):
            for ix in range(output_data.StartX(), output_data.EndX()):
                # create empty synapses_per_block dictionaries whose keys will be labels
                output_synapses_per_block[(iz, iy, ix)] = {}

    # for every label, go through the discovered synapses from the input data
    for label in input_synapses.keys():
        input_synapses_per_label = input_synapses[label]

        for input_global_index in input_synapses_per_label:
            # the global iz, iy, ix coordinates remain the same across blocks
            global_iz, global_iy, global_ix = input_data.GlobalIndexToIndices(input_global_index)

            # get the new block from the global coordinates
            output_block_iz = global_iz // output_data.BlockZLength()
            output_block_iy = global_iy // output_data.BlockYLength()
            output_block_ix = global_ix // output_data.BlockXLength()

            if not label in output_synapses_per_block[(output_block_iz, output_block_iy, output_block_ix)]:
                output_synapses_per_block[(output_block_iz, output_block_iy, output_block_ix)][label] = []

            # get the new global index
            output_global_index = output_data.GlobalIndicesToIndex(global_iz, global_iy, global_ix)

            output_synapses_per_block[(output_block_iz, output_block_iy, output_block_ix)][label].append(output_global_index)

    # write all of the synapse block files
    output_synapses_directory = output_data.SynapseDirectory()
    for iz in range(output_data.StartZ(), output_data.EndZ()):
        for iy in range(output_data.StartY(), output_data.EndY()):
            for ix in range(output_data.StartX(), output_data.EndX()):
                output_synapse_filename = '{}/{:04d}z-{:04d}y-{:04d}x.pts'.format(output_synapse_directory, iz, iy, ix)

                # write the pts file (use global indices)
                WritePtsFile(output_data, output_synapse_filename, output_synapses_per_block[(iz, iy, ix)], input_local_indices = False)

    # get the input/output directories for the surfaces
    input_surfaces_directory = input_data.SurfacesDirectory()
    output_surfaces_directory = output_data.SurfacesDirectory()

    # create the output synapse directory if it does not exist
    if not os.path.exists(output_surfaces_directory):
        os.makedirs(output_surfaces_directory, exist_ok=True)

    # iterate over all labels
    for label in range(1, input_data.NLabels()):
        start_time = time.time()

        # skip over labels that do not exist
        input_surface_filename = '{}/{:016d}.pts'.format(input_surfaces_directory, label)
        if not os.path.exists(input_surface_filename): continue

        # read in the input global points
        input_global_points, _ = ReadPtsFile(input_data, input_surface_filename)

        # create an empty dictionary for the output points
        output_global_points = {}
        output_global_points[label] = []

        for input_global_index in input_global_points[label]:
            # the global iz, iy, ix coordinates remain the same across blocks
            global_iz, global_iy, global_ix = input_data.GlobalIndexToIndices(input_global_index)

            # get the new global index
            output_global_index = output_data.GlobalIndicesToIndex(global_iz, global_iy, global_ix)

            output_global_points[label].append(output_global_index)

        # write the new surface filename
        output_surface_filename = '{}/{:016d}.pts'.format(output_surfaces_directory, label)
        WritePtsFile(output_data, output_surface_filename, output_global_points, input_local_indices = False)

        print ('Completed label {} in {:0.2f} seconds'.format(label, time.time() - start_time))
Exemple #11
0
import os
import sys

from blockbased_synapseaware.utilities.constants import *
from blockbased_synapseaware.utilities.dataIO import ReadMetaData
from blockbased_synapseaware.makeflow_example.makeflow_helperfunctions import *

from blockbased_synapseaware.hole_filling.mapping import RemoveHoles

# read passed arguments
meta_fp, iz, iy = ReadArguments_Short(sys.argv)

# read in the data for this block
data = ReadMetaData(meta_fp)

# iterate over x blocks, preventing very short jobs on the cluster
for ix in range(data.StartX(), data.EndX()):

    # Redirect stdout and stderr
    RedirectOutStreams(data, "HF", 4, iz, iy, ix)

    # check that beforehand step has executed successfully
    CheckSuccessFile(data, "HF", 3, "all", "all", "all")

    # users must provide an output directory
    assert (not data.HoleFillingOutputDirectory() == None)
    os.makedirs(data.HoleFillingOutputDirectory(), exist_ok=True)

    RemoveHoles(data, iz, iy, ix)

    # Create and Write Success File
Exemple #12
0
def ConvertSynapsesAndProject(meta_filename, input_synapse_directory, xyz,
                              conversion_rate):
    data = ReadMetaData(meta_filename)

    resolution = data.Resolution()

    # create an empty set of synapses
    synapses = {}

    # iterate over all labels
    for label in range(1, data.NLabels()):
        # read the surfaces for this label
        surface_filename = '{}/{:016d}.pts'.format(data.SurfacesDirectory(),
                                                   label)
        # some surfaces (i.e., labels) will not exist in the volume
        if not os.path.exists(surface_filename): continue

        # read in the surface points, ignore the local coordinates
        surfaces, _ = ReadPtsFile(data, surface_filename)
        surface = surfaces[label]

        npts = len(surface)
        surface_point_cloud = np.zeros((npts, 3), dtype=np.int32)

        for index, iv in enumerate(surface):
            iz, iy, ix = data.GlobalIndexToIndices(iv)

            surface_point_cloud[index, :] = (iz * resolution[OR_Z],
                                             iy * resolution[OR_Y],
                                             ix * resolution[OR_X])

        # create an empty array for the synapses
        synapses[label] = []

        projected = 0
        missed = 0

        # read in the original synapses
        input_synapse_filename = '{}/syn_{:04}.txt'.format(
            input_synapse_directory, label)
        if os.path.exists(input_synapse_filename):
            with open(input_synapse_filename, 'r') as fd:
                for line in fd:
                    # separate the line into coordinates
                    coordinates = line.strip().split()

                    if xyz:
                        ix = round(int(coordinates[0]) / conversion_rate[OR_X])
                        iy = round(int(coordinates[1]) / conversion_rate[OR_Y])
                        iz = round(int(coordinates[2]) / conversion_rate[OR_Z])
                    else:
                        iz = round(int(coordinates[0]) / conversion_rate[OR_Z])
                        iy = round(int(coordinates[1]) / conversion_rate[OR_Y])
                        ix = round(int(coordinates[2]) / conversion_rate[OR_X])

                    # create a 2D vector for this point
                    vec = np.zeros((1, 3), dtype=np.int32)
                    vec[0, :] = (iz * resolution[OR_Z], iy * resolution[OR_Y],
                                 ix * resolution[OR_X])

                    closest_point = surface[scipy.spatial.distance.cdist(
                        surface_point_cloud, vec).argmin()]

                    closest_iz, closest_iy, closest_ix = data.GlobalIndexToIndices(
                        closest_point)

                    deltaz = resolution[OR_Z] * (iz - closest_iz)
                    deltay = resolution[OR_Y] * (iy - closest_iy)
                    deltax = resolution[OR_X] * (ix - closest_ix)

                    distance = math.sqrt(deltaz * deltaz + deltay * deltay +
                                         deltax * deltax)

                    # skip distances that are clearly off (over 200 nanometers)
                    max_deviation = 800
                    if distance < max_deviation:
                        # add to the list of valid synapses
                        synapses[label].append(closest_point)
                        projected += 1
                    else:
                        missed += 1

                print('Synapses within {} nanometers from surface: {}'.format(
                    max_deviation, projected))
                print('Synapses over {} nanometers from surface: {}'.format(
                    max_deviation, missed))

    # divide all synapses into blocks
    synapses_per_block = {}

    # iterate over all blocks
    for iz in range(data.StartZ(), data.EndZ()):
        for iy in range(data.StartY(), data.EndY()):
            for ix in range(data.StartX(), data.EndX()):
                # create empty synapses_per_block dictionaries whose keys will be labels
                synapses_per_block[(iz, iy, ix)] = {}

    # for every label, iterate over the discovered synapses
    for label in synapses.keys():
        synapses_per_label = synapses[label]

        # iterate over all of the projected synapses
        for global_index in synapses_per_label:
            global_iz, global_iy, global_ix = data.GlobalIndexToIndices(
                global_index)

            block_iz = global_iz // data.BlockZLength()
            block_iy = global_iy // data.BlockYLength()
            block_ix = global_ix // data.BlockXLength()

            # create the array for this label per block if it does not exist
            if not label in synapses_per_block[(block_iz, block_iy, block_ix)]:
                synapses_per_block[(block_iz, block_iy, block_ix)][label] = []

            synapses_per_block[(block_iz, block_iy,
                                block_ix)][label].append(global_index)

    # write all of the synapse block files
    synapse_directory = data.SynapseDirectory()
    if not os.path.exists(synapse_directory):
        os.makedirs(synapse_directory, exist_ok=True)

    for iz in range(data.StartZ(), data.EndZ()):
        for iy in range(data.StartY(), data.EndY()):
            for ix in range(data.StartX(), data.EndX()):
                synapse_filename = '{}/{:04d}z-{:04d}y-{:04d}x.pts'.format(
                    synapse_directory, iz, iy, ix)

                # write the pts file (use global indices)
                WritePtsFile(data,
                             synapse_filename,
                             synapses_per_block[(iz, iy, ix)],
                             input_local_indices=False)
Exemple #13
0
def SkeletonizeSequentially(meta_filename):
    # read in the data for this block
    data = ReadMetaData(meta_filename)

    # users must provide an output directory
    assert (not data.SkeletonOutputDirectory() == None)
    os.makedirs(data.SkeletonOutputDirectory(), exist_ok=True)

    # compute the first step to save the walls of each file
    for iz in range(data.StartZ(), data.EndZ()):
        for iy in range(data.StartY(), data.EndY()):
            for ix in range(data.StartX(), data.EndX()):
                SaveAnchorWalls(data, iz, iy, ix)

    # compute the second step to find the anchors between blocks
    for iz in range(data.StartZ(), data.EndZ()):
        for iy in range(data.StartY(), data.EndY()):
            for ix in range(data.StartX(), data.EndX()):
                ComputeAnchorPoints(data, iz, iy, ix)

    # compute the third step to thin each block independently
    for iz in range(data.StartZ(), data.EndZ()):
        for iy in range(data.StartY(), data.EndY()):
            for ix in range(data.StartX(), data.EndX()):
                TopologicalThinning(data, iz, iy, ix)

    # compute the fourth step to refine the skeleton
    for label in range(1, data.NLabels()):
        RefineSkeleton(data, label)
from blockbased_synapseaware.utilities.constants import *
from blockbased_synapseaware.utilities.dataIO import ReadMetaData
from blockbased_synapseaware.makeflow_example.makeflow_helperfunctions import *

from blockbased_synapseaware.benchmark.kimimaro.connectskeletons import ConnectSkeletons

# pass arguments
# read passed arguments
if len(sys.argv) != 2:
    raise ValueError(" Scripts needs exactley 1 input arguments (Prefix) ")
else:
    meta_fp = sys.argv[1]

# read in the data for this block
data = ReadMetaData(meta_fp)

# Redirect stdout and stderr
RedirectOutStreams(data.BlockSize(), "KM", 2, "all", "all", "all")

# check that beforehand step has executed successfully
for iz in range(data.StartZ(), data.EndZ()):
    for iy in range(data.StartY(), data.EndY()):
        for ix in range(data.StartX(), data.EndX()):
            CheckSuccessFile(data.BlockSize(), "KM", 1, iz, iy, ix)

# users must provide an output directory
assert (not data.SkeletonOutputDirectory() == None)
os.makedirs(data.SkeletonOutputDirectory(), exist_ok=True)

# compute the second step to find adjacencies between borders
Exemple #15
0
import os
import sys

from blockbased_synapseaware.utilities.constants import *
from blockbased_synapseaware.utilities.dataIO import ReadMetaData
from blockbased_synapseaware.makeflow_example.makeflow_helperfunctions import *

from blockbased_synapseaware.evaluate.statistics import CombineStatistics

if len(sys.argv) != 2:
    raise ValueError(" Scripts needs exactley 1 input arguments (Prefix) ")
else:
    meta_fp = sys.argv[1]

# read in the data for this block
data = ReadMetaData(meta_fp)

# Redirect stdout and stderr
RedirectOutStreams(data, "ST", 2, "all", "all", "all")

# check that beforehand step has executed successfully
for iz in range(data.StartZ(), data.EndZ()):
    for iy in range(data.StartY(), data.EndY()):
        for ix in range(data.StartX(), data.EndX()):
            CheckSuccessFile(data, "ST", 1, iz, iy, ix)

# compute the first step to fill holes in each block
CombineStatistics(data)

# Create and Write Success File
WriteSuccessFile(data, "ST", 2, "all", "all", "all")
import os
import sys

from blockbased_synapseaware.utilities.dataIO import ReadMetaData
from blockbased_synapseaware.makeflow_example.makeflow_helperfunctions import *

from blockbased_synapseaware.skeletonize.thinning import TopologicalThinning

# read passed arguments
meta_fp, iz, iy, ix = ReadArguments(sys.argv)

# read in the data for this block
data = ReadMetaData(meta_fp)

# Redirect stdout and stderr
RedirectOutStreams(data, "SK", 3, iz, iy, ix)

# check that beforehand step has executed successfully
CheckSuccessFile(data, "SK", 2, iz, iy, ix)

# users must provide an output directory
assert (not data.SkeletonOutputDirectory() == None)
os.makedirs(data.SkeletonOutputDirectory(), exist_ok=True)

TopologicalThinning(data, iz, iy, ix)

# Create and Write Success File
WriteSuccessFile(data, "SK", 3, iz, iy, ix)
Exemple #17
0
from blockbased_synapseaware.utilities.dataIO import ReadMetaData
from blockbased_synapseaware.makeflow_example.makeflow_helperfunctions import *

from blockbased_synapseaware.skeletonize.refinement import RefineSkeleton



# pass arguments
if len(sys.argv)!=3:
    raise ValueError(" Scripts needs exactley 2 input arguments (Prefix label) ")
else:
    meta_fp = sys.argv[1]
    label = int(sys.argv[2])

# read in the data for this block
data = ReadMetaData(meta_fp)

# Redirect stdout and stderr
RedirectOutStreams_SK_4(data, "SK", 4, label)

for iz in range(data.StartZ(), data.EndZ()):
    for iy in range(data.StartY(), data.EndY()):
        for ix in range(data.StartX(), data.EndX()):

            # check that beforehand step has executed successfully
            CheckSuccessFile(data, "SK", 3, iz, iy, ix)

# users must provide an output directory
assert (not data.SkeletonOutputDirectory() == None)
os.makedirs(data.SkeletonOutputDirectory(), exist_ok=True)