Exemplo n.º 1
0
 def save_color_data(self, u_info, dir):
     colordata = m.load_hdf5(u_info.color_map_file, u_info.hdf_color_name)
     filename = dir + os.sep + u_info.export_col_name + '.csv'
     print(filename)
     with open(filename, 'w') as f:
         writer = csv.writer(f, lineterminator='\n')
         writer.writerows(colordata)
Exemplo n.º 2
0
    def run(self, u_info):

        ## Load DB
        db = DB(u_info)
        self.u_info = u_info
        ##
        ## Update split and adjust
        ##

        for iz in range(db.num_tiles_z):

            print('Saving: ', iz, '/', db.num_tiles_z)
            # Check teemporary data
            data_path = u_info.tmp_tile_ids_path + u_info.tile_path_wz.format(0, iz)
            if not os.path.isdir(data_path):
                continue

            # print('Copy from ', data_path)
            for iw in range(db.num_tiles_w):
                source_dir = u_info.tmp_tile_ids_path \
                                 + u_info.tile_path_wz.format(iw, iz)
                destination_dir = u_info.tile_ids_path \
                                      + u_info.tile_path_wz.format(iw, iz)
                shutil.rmtree(destination_dir)
                shutil.move(source_dir, destination_dir)

                ## Remove temp file
                # shutil.rmtree(source_dir)

        ##
        ## Update merges
        ##

        # print(u_info.merge_table)

        for iw in range(db.num_tiles_w):
            for iz, iy, ix in itertools.product(range(db.num_tiles_z), range(db.num_tiles_y_at_w[iw]), range(db.num_tiles_x_at_w[iw])):

                ### Load tile file
                tile_ids_filename = u_info.tile_ids_path + u_info.tile_ids_filename_wzyx.format( iw, iz, iy, ix )
                tile_ids = m.load_hdf5( tile_ids_filename, u_info.tile_var_name )

                ## Color exchange [for merge? check __merge_table.keys() ]
                for mm in u_info.merge_table.keys():
                    mm_id = self.lookup_label(mm, u_info.merge_table)
                    tile_ids[ tile_ids == int(mm) ] = mm_id

                ### Save tile file
                    m.save_hdf5(tile_ids_filename, u_info.tile_var_name, tile_ids)

        u_info.merge_table = {}
        u_info.flag_undo = 0
        u_info.flag_redo = 0

        ## Update
        print('Updating database.')
        db.Update()
        print('Successfully saved.')
Exemplo n.º 3
0
    def ObtainUpdateIdsInfo(self):
        ###
        id_tile_list = []
        id_max = 0
        id_counts = np.zeros(0, dtype=np.int64)
        for iw in range(self.num_tiles_w):
            for iz, iy, ix in itertools.product(
                    range(self.num_tiles_z), range(self.num_tiles_y_at_w[iw]),
                    range(self.num_tiles_x_at_w[iw])):

                ### Load tile file
                ### tile_ids( ( tile_num_pixels_y, tile_num_pixels_x ), np.uint32 )

                tile_ids_filename = self.u_info.tile_ids_path + self.u_info.tile_ids_filename_wzyx.format(
                    iw, iz, iy, ix)
                tile_ids = m.load_hdf5(tile_ids_filename,
                                       self.u_info.tile_var_name)
                unique_tile_ids = np.unique(tile_ids)

                ## Update database

                # Max id
                current_max = np.max(unique_tile_ids)
                if id_max < current_max:
                    id_max = current_max
                    id_counts.resize(id_max + 1)
                    # print id_max

                # id list
                for unique_tile_id in unique_tile_ids:
                    id_tile_list.append((unique_tile_id, iw, iz, iy, ix))

                # Pixel number of each id
                if iw == 0:
                    current_ids_counts = np.bincount(tile_ids.ravel())
                    current_ids_counts_ids = np.nonzero(current_ids_counts)[0]
                    id_counts[current_ids_counts_ids] = \
                        id_counts[current_ids_counts_ids] + np.int64(current_ids_counts[current_ids_counts_ids])

        ## Sort the tile list so that the same id appears together
        id_tile_list = np.array(sorted(id_tile_list), np.uint32)

        ## Max color number check
        if (id_max >= self.u_info.ncolors):
            print('Number of panels exceeds max_number')

        return id_tile_list, id_max, id_counts
Exemplo n.º 4
0
    def __init__(self, u_info):

        ## User info
        self.u_info = u_info

        ## Makedir
        if os.path.isdir(self.u_info.data_annotator_path) == False:
            os.makedirs(self.u_info.data_annotator_path)

        ## Load color file
        colordata = m.load_hdf5(self.u_info.color_map_file,
                                self.u_info.hdf_color_name)
        colnum = colordata.shape[0]

        ## Load database file
        query = "select * from segmentInfo;"
        con = sqlite3.connect(self.u_info.segment_info_db_file)
        cur = con.cursor()
        cur.execute(query)  # Obtain max id
        #data = cur.fetchone()
        data = cur.fetchall()
        con.close()

        keys = ['id', 'name', 'size', 'confidence']
        data_dict = [dict(zip(keys, valuerecord)) for valuerecord in data]

        for i, datum_dict in enumerate(data_dict):
            id = datum_dict['id']
            if id >= colnum:
                col = {'r': 128, 'g': 128, 'b': 128, 'act': 0}
            else:
                col = {
                    'r': int(colordata[id][0]),
                    'g': int(colordata[id][1]),
                    'b': int(colordata[id][2]),
                    'act': 0
                }
            data_dict[i].update(col)

        ##
        ## Save
        ##
        with open(
                os.path.join(self.u_info.data_annotator_path,
                             "segmentInfo.json"), 'w') as f:
            json.dump(data_dict, f, indent=2, ensure_ascii=False)
Exemplo n.º 5
0
    def run(self, u_info, dir, fname, ftype, startid, numdigit, flag):

        ## Load DB
        db = DB(u_info)
        print("Tile Num: z {0}, y {1}, x {2}".format(db.num_tiles_z, db.num_tiles_y, db.num_tiles_x))

        ## Makedir
        #self.mkdir_safe(dir)

        ## Save ColorInfo & SegmentationInfo
        if flag == 'ids':
            self.save_color_data(u_info, dir)
            self.save_segmentInfo(u_info, dir)

        ## Volume storage
        VOLUME_FORMAT = ["MTIF16G", "MTIF8G", "MTIF8C","NUMPY32", "NUMPY32C","HDF64"]
        if ftype in VOLUME_FORMAT:
            volume_images_ids = np.zeros((db.num_voxels_z, db.num_voxels_y, db.num_voxels_x), np.uint32)

        ##
        ## Export image/segmentation files
        ##
        print("Tile Num: z {0}, y {1}, x {2}".format(db.num_tiles_z, db.num_tiles_y, db.num_tiles_x))
        iw = 0
        for iz in range(db.num_tiles_z): # 100): # 

            print("iz ", iz)
            merged_images_ids = np.zeros( ( db.canvas_size_y, db.canvas_size_x ), np.uint32 )
            tile_image = []
            for iy, ix in itertools.product( range(db.num_tiles_y), range(db.num_tiles_x)):

                if flag == 'images':
                    filename = u_info.tile_images_path + u_info.tile_images_filename_wzyx.format(iw, iz, iy, ix)
                    print(filename)
                    tile_image = PIL.Image.open(filename)
                elif flag == 'ids' :
                    filename = u_info.tile_ids_path + u_info.tile_ids_filename_wzyx.format(iw, iz, iy, ix)
                    print(filename)
                    tile_image = m.load_hdf5(filename, u_info.tile_var_name)
                else:
                    return False

                y = iy * db.num_voxels_per_tile_y
                x = ix * db.num_voxels_per_tile_x
                merged_images_ids[  y : y + db.num_voxels_per_tile_y, x : x + db.num_voxels_per_tile_x ] = tile_image

            # Crop by original image size
            merged_images_ids = merged_images_ids[  0:db.num_voxels_y, 0:db.num_voxels_x ]

            # Filename setting
            # u_info, dir, fname, ftype, startid, numdigit
            current_frefix = dir + os.sep + fname + str(int(iz+startid)).zfill(numdigit)
            print(current_frefix)
            #

            if ftype in VOLUME_FORMAT:
                volume_images_ids[iz,:,:] = merged_images_ids
            elif ftype == "PNG16G":
                m.save_png16(merged_images_ids, current_frefix+".png")
            elif ftype == "PNG8G":
                m.save_png8(merged_images_ids, current_frefix+".png")
            elif ftype == "PNG8C":
                colordata = m.load_hdf5(u_info.color_map_file, u_info.hdf_color_name)
                m.save_pngc(merged_images_ids, current_frefix+".png", colordata)
            elif ftype == "TIF16G":
                m.save_tif16(merged_images_ids, current_frefix+".tif")
            elif ftype == "TIF8G":
                m.save_tif8(merged_images_ids, current_frefix+".tif")
            elif ftype == "TIF8C":
                colordata = m.load_hdf5(u_info.color_map_file, u_info.hdf_color_name)
                m.save_tifc(merged_images_ids, current_frefix+".tif", colordata)
            else:
                print("Export filetype error (Internal Error).")
        ###
        ###

        current_frefix = dir + os.sep + fname
        print('Save file to ', current_frefix)
        if ftype == "MTIF16G":
            volume_images_ids = volume_images_ids.astype(np.uint16)
            tifffile.imsave(current_frefix + ".tif", volume_images_ids)
        elif ftype == "MTIF8G":
            volume_images_ids = volume_images_ids.astype(np.uint8)
            tifffile.imsave(current_frefix + ".tif", volume_images_ids)
        elif ftype == "MTIF8C":
            print('Multi-tiff 8 color, save.')
            colordata = m.load_hdf5(u_info.color_map_file, u_info.hdf_color_name)
            volume_images_ids = self.gen_col_multi(volume_images_ids, colordata)
            tifffile.imsave(current_frefix + ".tif", volume_images_ids)
        elif ftype == "NUMPY32":
            volume_images_ids = volume_images_ids.astype(np.uint32)
            np.save(current_frefix + ".npy", volume_images_ids)
        elif ftype == "NUMPY32C":
            volume_images_ids = volume_images_ids.astype(np.uint32)
            np.savez(current_frefix + ".npz", stack=volume_images_ids)
        elif ftype == "HDF64":
            volume_images_ids = volume_images_ids.astype(np.int64)
            m.save_hdf5(current_frefix + ".h5", "stack", volume_images_ids)


        print('Images/segmentations were Exported.')