Esempio n. 1
0
def SkeletonizeSequentially(meta_filename):
    # read in the data for this block
    data = ReadMetaData(meta_filename)

    # users must provide an output directory
    assert (not data.SkeletonOutputDirectory() == None)
    os.makedirs(data.SkeletonOutputDirectory(), exist_ok=True)

    # compute the first step to save the walls of each file
    for iz in range(data.StartZ(), data.EndZ()):
        for iy in range(data.StartY(), data.EndY()):
            for ix in range(data.StartX(), data.EndX()):
                SaveAnchorWalls(data, iz, iy, ix)

    # compute the second step to find the anchors between blocks
    for iz in range(data.StartZ(), data.EndZ()):
        for iy in range(data.StartY(), data.EndY()):
            for ix in range(data.StartX(), data.EndX()):
                ComputeAnchorPoints(data, iz, iy, ix)

    # compute the third step to thin each block independently
    for iz in range(data.StartZ(), data.EndZ()):
        for iy in range(data.StartY(), data.EndY()):
            for ix in range(data.StartX(), data.EndX()):
                TopologicalThinning(data, iz, iy, ix)

    # compute the fourth step to refine the skeleton
    for label in range(1, data.NLabels()):
        RefineSkeleton(data, label)
Esempio n. 2
0
def EvaluateNeuralReconstructionIntegrity(meta_filename):
    data = ReadMetaData(meta_filename)

    synapses_per_label = {}

    # read in all of the synapses from all of the blocks
    synapse_directory = data.SynapseDirectory()
    for iz in range(data.StartZ(), data.EndZ()):
        for iy in range(data.StartY(), data.EndY()):
            for ix in range(data.StartX(), data.EndX()):
                synapses_filename = '{}/{:04d}z-{:04d}y-{:04d}x.pts'.format(synapse_directory, iz, iy, ix)

                # ignore the local coordinates
                block_synapses, _ = ReadPtsFile(data, synapses_filename)

                # add all synapses for each label in this block to the global synapses
                for label in block_synapses.keys():
                    if not label in synapses_per_label:
                        synapses_per_label[label] = []

                    synapses_per_label[label] += block_synapses[label]

    # get the output filename
    evaluation_directory = data.EvaluationDirectory()
    if not os.path.exists(evaluation_directory):
        os.makedirs(evaluation_directory, exist_ok=True)

    output_filename = '{}/nri-results.txt'.format(evaluation_directory)
    fd = open(output_filename, 'w')

    # keep track of global statistics
    total_true_positives = 0
    total_false_positives = 0
    total_false_negatives = 0

    # for each label, find if synapses correspond to endpoints
    for label in range(1, data.NLabels()):
        # read the refined skeleton for this synapse
        skeleton_directory = '{}/skeletons'.format(data.SkeletonOutputDirectory())
        skeleton_filename = '{}/{:016d}.pts'.format(skeleton_directory, label)

        # skip over labels not processed
        if not os.path.exists(skeleton_filename): continue

        # get the synapses only for this one label
        synapses = synapses_per_label[label]

        # ignore the local coordinates
        skeletons, _ = ReadPtsFile(data, skeleton_filename)
        skeleton = skeletons[label]

        # read in the somata surfaces (points on the surface should not count as endpoints)
        somata_directory = '{}/somata_surfaces'.format(data.TempDirectory())
        somata_filename = '{}/{:016d}.pts'.format(somata_directory, label)

        # path may not exist if soma not found
        if os.path.exists(somata_filename):
            somata_surfaces, _ = ReadPtsFile(data, somata_filename)
            somata_surface = set(somata_surfaces[label])
        else:
            somata_surface = set()

        # get the endpoints in this skeleton for this label
        endpoints = FindEndpoints(data, skeleton, somata_surface)

        true_positives, false_positives, false_negatives = CalculateNeuralReconstructionIntegrityScore(data, synapses, endpoints)


        # if there are no true positives the NRI score is 0
        if true_positives == 0:
            nri_score = 0
        else:
            precision = true_positives / float(true_positives + false_positives)
            recall = true_positives / float(true_positives + false_negatives)

            nri_score = 2 * (precision * recall) / (precision + recall)

        print ('Label: {}'.format(label))
        print ('  True Positives:  {:10d}'.format(true_positives))
        print ('  False Positives: {:10d}'.format(false_positives))
        print ('  False Negatives: {:10d}'.format(false_negatives))
        print ('  NRI Score:           {:0.4f}'.format(nri_score))

        fd.write ('Label: {}\n'.format(label))
        fd.write ('  True Positives:  {:10d}\n'.format(true_positives))
        fd.write ('  False Positives: {:10d}\n'.format(false_positives))
        fd.write ('  False Negatives: {:10d}\n'.format(false_negatives))
        fd.write ('  NRI Score:           {:0.4f}\n'.format(nri_score))

        # update the global stats
        total_true_positives += true_positives
        total_false_positives += false_positives
        total_false_negatives += false_negatives

    precision = total_true_positives / float(total_true_positives + total_false_positives)
    recall = total_true_positives / float(total_true_positives + total_false_negatives)

    nri_score = 2 * (precision * recall) / (precision + recall)

    print ('Total Volume'.format(label))
    print ('  True Positives:  {:10d}'.format(total_true_positives))
    print ('  False Positives: {:10d}'.format(total_false_positives))
    print ('  False Negatives: {:10d}'.format(total_false_negatives))
    print ('  NRI Score:           {:0.4f}'.format(nri_score))

    fd.write ('Total Volume\n'.format(label))
    fd.write ('  True Positives:  {:10d}\n'.format(total_true_positives))
    fd.write ('  False Positives: {:10d}\n'.format(total_false_positives))
    fd.write ('  False Negatives: {:10d}\n'.format(total_false_negatives))
    fd.write ('  NRI Score:           {:0.4f}\n'.format(nri_score))
Esempio n. 3
0
def EvaluateSkeletons(meta_filename):
    data = ReadMetaData(meta_filename)

    # make sure a results folder is specified
    assert (not data.EvaluationDirectory() == None)

    # read in statistics about this data set
    statistics_directory = '{}/statistics'.format(data.TempDirectory())
    statistics_filename = '{}/combined-statistics.pickle'.format(statistics_directory)
    statistics = ReadPickledData(statistics_filename)

    label_volumes = statistics['label_volumes']

    total_volume = 0
    total_thinned_skeleton_length = 0
    total_refined_skeleton_length = 0
    nlabels = 0

    # get the output filename
    evaluation_directory = data.EvaluationDirectory()
    if not os.path.exists(evaluation_directory):
        os.makedirs(evaluation_directory, exist_ok=True)

    output_filename = '{}/skeleton-results.txt'.format(evaluation_directory)
    fd = open(output_filename, 'w')

    for label in sorted(label_volumes):
        # read pre-refinement skeleton
        thinning_filename = '{}/skeletons/{:016d}.pts'.format(data.TempDirectory(), label)

        # skip files that do not exist (no synapses, e.g.)
        if not os.path.exists(thinning_filename): continue

        thinned_skeletons_global_pts, _ = ReadPtsFile(data, thinning_filename)
        thinned_skeletons = thinned_skeletons_global_pts[label]

        refined_filename = '{}/skeletons/{:016d}.pts'.format(data.SkeletonOutputDirectory(), label)

        refined_skeletons_global_pts, _ = ReadPtsFile(data, refined_filename)
        refined_skeletons = refined_skeletons_global_pts[label]

        # get the volume and total remaining voxels
        volume = label_volumes[label]
        thinned_skeleton_length = len(thinned_skeletons)
        refined_skeleton_length = len(refined_skeletons)

        # update the variables that aggregate all labels
        total_volume += volume
        total_thinned_skeleton_length += thinned_skeleton_length
        total_refined_skeleton_length += refined_skeleton_length

        # calculate the percent and reduction of total voxels remaining
        thinned_remaining_percent = 100 * thinned_skeleton_length / volume
        thinning_reduction_factor = volume / thinned_skeleton_length
        refined_remaining_percent = 100 * refined_skeleton_length / thinned_skeleton_length
        refinement_reduction_factor = thinned_skeleton_length / refined_skeleton_length

        # caluclate the total percent/reduction after all steps
        total_skeleton_percent = 100 * refined_skeleton_length / volume
        total_skeleton_reduction = volume / refined_skeleton_length

        print ('Label: {}'.format(label))
        print ('  Input Volume:           {:10d}'.format(volume))
        print ('  Topological Thinning:   {:10d}    ({:05.2f}%)  {:10.2f}x'.format(thinned_skeleton_length, thinned_remaining_percent, thinning_reduction_factor))
        print ('  Skeleton Refinement:    {:10d}    ({:05.2f}%)  {:10.2f}x'.format(refined_skeleton_length, refined_remaining_percent, refinement_reduction_factor))
        print ('  Total:                                ({:05.2f}%)  {:10.2f}x'.format(total_skeleton_percent, total_skeleton_reduction))

        fd.write ('Label: {}\n'.format(label))
        fd.write ('  Input Volume:           {:10d}\n'.format(volume))
        fd.write ('  Topological Thinning:   {:10d}    ({:05.2f}%)  {:10.2f}x\n'.format(thinned_skeleton_length, thinned_remaining_percent, thinning_reduction_factor))
        fd.write ('  Skeleton Refinement:    {:10d}    ({:05.2f}%)  {:10.2f}x\n'.format(refined_skeleton_length, refined_remaining_percent, refinement_reduction_factor))
        fd.write ('  Total:                                ({:05.2f}%)  {:10.2f}x\n'.format(total_skeleton_percent, total_skeleton_reduction))

        nlabels += 1

    # calculate the percent and reduction of total voxels remaining
    thinned_remaining_percent = 100 * total_thinned_skeleton_length / total_volume
    thinning_reduction_factor = total_volume / total_thinned_skeleton_length
    refined_remaining_percent = 100 * total_refined_skeleton_length / total_thinned_skeleton_length
    refinement_reduction_factor = total_thinned_skeleton_length / total_refined_skeleton_length

    # caluclate the total percent/reduction after all steps
    total_skeleton_percent = 100 * total_refined_skeleton_length / total_volume
    total_skeleton_reduction = total_volume / total_refined_skeleton_length

    print ('Input Volume:             {:10d}'.format(total_volume))
    print ('Topological Thinning:     {:10d}    ({:05.2f}%)  {:10.2f}x'.format(total_thinned_skeleton_length, thinned_remaining_percent, thinning_reduction_factor))
    print ('Skeleton Refinement:      {:10d}    ({:05.2f}%)  {:10.2f}x'.format(total_refined_skeleton_length, refined_remaining_percent, refinement_reduction_factor))
    print ('Total:                                  ({:05.2f}%)  {:10.2f}x'.format(total_skeleton_percent, total_skeleton_reduction))
    print ('Average Skeleton: {:0.0f}'.format(total_refined_skeleton_length / nlabels))

    fd.write ('Input Volume:             {:10d}\n'.format(total_volume))
    fd.write ('Topological Thinning:     {:10d}    ({:05.2f}%)  {:10.2f}x\n'.format(total_thinned_skeleton_length, thinned_remaining_percent, thinning_reduction_factor))
    fd.write ('Skeleton Refinement:      {:10d}    ({:05.2f}%)  {:10.2f}x\n'.format(total_refined_skeleton_length, refined_remaining_percent, refinement_reduction_factor))
    fd.write ('Total:                                  ({:05.2f}%)  {:10.2f}x\n'.format(total_skeleton_percent, total_skeleton_reduction))
    fd.write ('Average Skeleton: {:0.0f}'.format(total_refined_skeleton_length / nlabels))

    # close the file
    fd.close()
Esempio n. 4
0
import sys

from blockbased_synapseaware.utilities.dataIO import ReadMetaData
from blockbased_synapseaware.makeflow_example.makeflow_helperfunctions import *

from blockbased_synapseaware.skeletonize.anchors import ComputeAnchorPoints

# read passed arguments
meta_fp, iz, iy = ReadArguments_Short(sys.argv)

# read in the data for this block
data = ReadMetaData(meta_fp)

# iterate over x blocks, preventing very short jobs on the cluster
for ix in range(data.StartX(), data.EndX()):

    # Redirect stdout and stderr
    RedirectOutStreams(data, "SK", 2, iz, iy, ix)

    # check that beforehand step has executed successfully
    CheckSuccessFile(data, "SK", 1, iz, iy, ix)

    # users must provide an output directory
    assert (not data.SkeletonOutputDirectory() == None)
    os.makedirs(data.SkeletonOutputDirectory(), exist_ok=True)

    ComputeAnchorPoints(data, iz, iy, ix)

    # Create and Write Success File
    WriteSuccessFile(data, "SK", 2, iz, iy, ix)