def __call__(self, prefix: str):
        logging.info(f'aggregate skeletons with prefix of {prefix}')

        id2filenames = defaultdict(list)
        for filename in self.fragments_storage.list_files(prefix=prefix):
            filename = os.path.basename(filename)
            # `match` implies the beginning (^). `search` matches whole string
            matches = re.search(r'(\d+):', filename)

            if not matches:
                continue

            # skeleton ID
            skl_id = int(matches.group(0)[:-1])
            id2filenames[skl_id].append(filename)

        for skl_id, filenames in id2filenames.items():
            logging.info(f'skeleton id: {skl_id}')
            frags = self.fragments_storage.get(filenames)
            frags = [
                PrecomputedSkeleton.from_precomputed(x['content'])
                for x in frags
            ]
            skel = PrecomputedSkeleton.simple_merge(frags).consolidate()
            skel = kimimaro.postprocess(skel,
                                        dust_threshold=1000,
                                        tick_threshold=3500)
            self.output_storage.put(
                file_path=str(skl_id),
                content=skel.to_precomputed(),
            )
            # the last few hundred files will not be uploaded without sleeping!
            sleep(0.01)
Exemplo n.º 2
0
    def process_skeletons(self, unfused_skeletons, in_place=False):
        skeletons = {}
        if in_place:
            skeletons = unfused_skeletons

        for label in tqdm(unfused_skeletons.keys(),
                          desc="Postprocessing",
                          disable=(not self.progress)):
            skels = unfused_skeletons[label]
            skel = PrecomputedSkeleton.simple_merge(skels)
            skel.id = label
            skel.extra_attributes = [
              attr for attr in skel.extra_attributes \
              if attr['data_type'] == 'float32'
            ]
            if self.max_cable_length is not None and skel.cable_length(
            ) > self.max_cable_length:
                skeletons[label] = skel.to_precomputed()
            else:
                skeletons[label] = kimimaro.postprocess(
                    skel,
                    dust_threshold=self.dust_threshold,  # voxels 
                    tick_threshold=self.tick_threshold,  # nm
                ).to_precomputed()

        return skeletons
Exemplo n.º 3
0
 def execute(self):
     corgie_logger.info(f"Merging skeletons at {self.dst_path}")
     fragment_filenames = self.cf.list(prefix=self.prefix, flat=True)
     skeleton_files = self.cf.get(fragment_filenames)
     skeletons = defaultdict(list)
     for skeleton_file in skeleton_files:
         try:
             colon_index = skeleton_file["path"].index(":")
         except ValueError:
             # File is full skeleton, not fragment
             continue
         seg_id = skeleton_file["path"][0:colon_index]
         skeleton_fragment = pickle.loads(skeleton_file["content"])
         if not skeleton_fragment.empty():
             skeletons[seg_id].append(skeleton_fragment)
     for seg_id, skeleton_fragments in skeletons.items():
         skeleton = PrecomputedSkeleton.simple_merge(
             skeleton_fragments).consolidate()
         skeleton = kimimaro.postprocess(skeleton, self.dust_threshold,
                                         self.tick_threshold)
         skeleton.id = int(seg_id)
         self.cf.put(path=seg_id,
                     content=skeleton.to_precomputed(),
                     compress="gzip")
         corgie_logger.info(f"Finished skeleton {seg_id}")
Exemplo n.º 4
0
  def process_skeletons(self, locations, cv):
    skeletons = {}
    for label, locs in locations.items():
      skel = PrecomputedSkeleton.simple_merge(
        self.get_unfused(label, locs, cv)
      )
      skel.id = label
      skel.extra_attributes = [ 
        attr for attr in skel.extra_attributes \
        if attr['data_type'] == 'float32' 
      ]      
      skeletons[label] = kimimaro.postprocess(
        skel, 
        dust_threshold=self.dust_threshold, # voxels 
        tick_threshold=self.tick_threshold, # nm
      ).to_precomputed()

    return skeletons
Exemplo n.º 5
0
    def execute(self):
        self.vol = CloudVolume(self.cloudpath, mip=self.mip, cdn_cache=False)

        fragment_filenames = self.get_filenames()
        skels = self.get_skeletons_by_segid(fragment_filenames)

        skeletons = []
        for segid, frags in skels.items():
            skeleton = self.fuse_skeletons(frags)
            skeleton = kimimaro.postprocess(skeleton, self.dust_threshold,
                                            self.tick_threshold)
            skeleton.id = segid
            skeletons.append(skeleton)

        self.vol.skeleton.upload(skeletons)

        if self.delete_fragments:
            with Storage(self.cloudpath, progress=True) as stor:
                stor.delete_files(fragment_filenames)
Exemplo n.º 6
0
    def process_skeletons(self, locations, cv):
        filenames = set(itertools.chain(*locations.values()))
        labels = set(locations.keys())
        unfused_skeletons = self.get_unfused(labels, filenames, cv)

        skeletons = {}
        for label, skels in tqdm(unfused_skeletons.items(),
                                 desc="Postprocessing",
                                 disable=(not self.progress)):
            skel = PrecomputedSkeleton.simple_merge(skels)
            skel.id = label
            skel.extra_attributes = [
              attr for attr in skel.extra_attributes \
              if attr['data_type'] == 'float32'
            ]
            skeletons[label] = kimimaro.postprocess(
                skel,
                dust_threshold=self.dust_threshold,  # voxels 
                tick_threshold=self.tick_threshold,  # nm
            ).to_precomputed()

        return skeletons
Exemplo n.º 7
0
    def execute(self):
        self.vol = CloudVolume(self.cloudpath, cdn_cache=False)
        self.vol.mip = self.vol.skeleton.meta.mip

        fragment_filenames = self.get_filenames()
        skels = self.get_skeletons_by_segid(fragment_filenames)

        skeletons = []
        for segid, frags in skels.items():
            skeleton = self.fuse_skeletons(frags)
            if self.max_cable_length is None or skel.cable_length(
            ) <= self.max_cable_length:
                skeleton = kimimaro.postprocess(skeleton, self.dust_threshold,
                                                self.tick_threshold)
            skeleton.id = segid
            skeletons.append(skeleton)

        self.vol.skeleton.upload(skeletons)

        if self.delete_fragments:
            cf = CloudFiles(self.cloudpath, progress=True)
            cf.delete(fragment_filenames)
Exemplo n.º 8
0
 def complex_merge(skel):
     return kimimaro.postprocess(
         skel,
         dust_threshold=1000,  # voxels 
         tick_threshold=1300,  # nm
     )
    for label in all_ids:
        print("----------------------------------------------------")
        print("Combining skeletons for label " + str(label))
        all_skels = []

        for bz in bz_range:
            for by in by_range:
                for bx in bx_range:

                    fname = output_folder + "skel_out-label{:09d}-{:04d}z-{:04d}y-{:04d}x.swc".format(label,bz,by,bx)
                    if os.path.exists(fname):
                        skel_read = readSkelFromFile(fname, bz, by, bx, bsize, anisotropy)
                        print("Adding part from block " + str((bz,by,bx)) + " with " + str(len(skel_read.vertices)) + " vertices")
                        all_skels.insert(0, skel_read)

        skel_joined = kimimaro.join_close_components(all_skels, radius=1500) # 1500 units threshold
        print("Skeleton joined with " + str(len(skel_joined.vertices)) + " vertices")
        skel_final = kimimaro.postprocess(skel_joined, dust_threshold=1000, tick_threshold=3500)
        print("Skeleton final with " + str(len(skel_final.vertices)) + " vertices")
        writeFinalSkeletonToFile(skel_final, label, output_folder)

        try:
            print("xmin, xmax: " + str(np.min(skel_final.vertices[:,0])) + ", " + str(np.max(skel_final.vertices[:,0])))
            print("ymin, ymax: " + str(np.min(skel_final.vertices[:,1])) + ", " + str(np.max(skel_final.vertices[:,1])))
            print("zmin, zmax: " + str(np.min(skel_final.vertices[:,2])) + ", " + str(np.max(skel_final.vertices[:,2])))
        except:
            print("not able to print")

    print("total time combine: " + str(time.time()-start_time_combine))
    print("total time thinning: " + str(total_time_thinning))
Exemplo n.º 10
0
def ConnectSkeletons(data):

    for label in range(0, data.NLabels()):

        # print("--------------------- \n processing label {}".format(label))

        # start timing statistics
        total_time = time.time()

        read_time = time.time()
        # list to store skeletons of different blocks
        all_skels = []
        # iterate over all blocks and read in skeletons for respective label
        for iz in range(data.StartZ(), data.EndZ()):
            for iy in range(data.StartY(), data.EndY()):
                for ix in range(data.StartX(), data.EndX()):

                    # get the location for the temporary directory
                    tmp_directory = data.TempBlockDirectory(iz, iy, ix)

                    fname = tmp_directory + "/skel_out-label{:09d}-{:04d}z-{:04d}y-{:04d}x.swc".format(
                        label, iz, iy, ix)
                    if os.path.exists(fname):
                        skel_read = readSkelFromFile(fname, iz, iy, ix,
                                                     data.BlockSize(),
                                                     data.Resolution())
                        all_skels.insert(0, skel_read)
                        # print("Adding part from block " + str((iz,iy,ix)) + " with " + str(len(skel_read.vertices)) + " vertices")

        read_time = time.time() - read_time

        # if label not present, skip
        if len(all_skels) == 0: continue

        # output directory for final skeletons
        out_dir = data.SkeletonOutputDirectory()

        # join skeleton components to one skeleton
        join_time = time.time()
        skel_joined = kimimaro.join_close_components(
            all_skels, radius=1500)  # 1500 units threshold
        join_time = time.time() - join_time
        # writeSkeletonToFile(skel_joined, label, out_dir, "joined")

        # postprocess and connect skeleton parts
        postprocess_time = time.time()
        skel_final = kimimaro.postprocess(skel_joined,
                                          dust_threshold=1000,
                                          tick_threshold=0)
        postprocess_time = time.time() - postprocess_time

        # check if it actually contains vertices
        if skel_final.vertices.shape[1] == 0: continue

        # write final skeleton to file
        write_time = time.time()
        writeSkeletonToFile(skel_final, label, out_dir, "final")
        write_time = time.time() - write_time

        total_time = time.time() - total_time

        print('Read Time: {:0.2f} seconds.'.format(read_time))
        print('Join Time: {:0.2f} seconds.'.format(join_time))
        print('Postprocess Time: {:0.2f} seconds.'.format(postprocess_time))
        print('Write Time: {:0.2f} seconds.'.format(write_time))
        print('Total Time: {:0.2f} seconds.'.format(total_time))

        # output timing statistics
        timing_directory = '{}/skeletons-combine'.format(
            data.TimingDirectory())
        if not os.path.exists(timing_directory):
            os.makedirs(timing_directory, exist_ok=True)
        timing_filename = '{}/{:016d}.txt'.format(timing_directory, label)
        with open(timing_filename, 'w') as fd:
            fd.write('Read Time: {:0.2f} seconds.\n'.format(read_time))
            fd.write('Join Time: {:0.2f} seconds.\n'.format(join_time))
            fd.write('Postprocess Time: {:0.2f} seconds.\n'.format(
                postprocess_time))
            fd.write('Write Time: {:0.2f} seconds.\n'.format(write_time))
            fd.write('Total Time: {:0.2f} seconds.\n'.format(total_time))