Пример #1
0
def plot_image_(image_file, compression_factor=2., test_image_name='test.png'):
    """Plots entire SVS/other image.

	Parameters
	----------
	image_file:str
		Image file.
	compression_factor:float
		Amount to shrink each dimension of image.
	test_image_name:str
		Output image file.

	"""
    from pathflowai.utils import svs2dask_array, npy2da
    import cv2
    if image_file.endswith('.zarr'):
        arr = da.from_zarr(image_file)
    else:
        arr = svs2dask_array(
            image_file,
            tile_size=1000,
            overlap=0,
            remove_last=True,
            allow_unknown_chunksizes=False) if (
                not image_file.endswith('.npy')) else npy2da(image_file)
    arr2 = to_pil(
        cv2.resize(arr.compute(),
                   dsize=tuple((np.array(arr.shape[:2]) /
                                compression_factor).astype(int).tolist()),
                   interpolation=cv2.INTER_CUBIC))
    arr2.save(test_image_name)
Пример #2
0
    def __init__(self,
                 dask_arr_dict,
                 patch_info_db,
                 compression_factor=3,
                 alpha=0.5,
                 patch_size=224,
                 no_db=False,
                 plot_annotation=False,
                 segmentation=False,
                 n_segmentation_classes=4,
                 input_dir='',
                 annotation_col='annotation',
                 scaling_factor=1.):

        self.segmentation = segmentation
        self.scaling_factor = scaling_factor
        self.segmentation_maps = None
        self.n_segmentation_classes = float(n_segmentation_classes)
        self.pred_palette = sns.cubehelix_palette(start=0, as_cmap=True)
        if not no_db:
            self.compression_factor = compression_factor
            self.alpha = alpha
            self.patch_size = patch_size
            conn = sqlite3.connect(patch_info_db)
            patch_info = pd.read_sql('select * from "{}";'.format(patch_size),
                                     con=conn)
            conn.close()
            self.annotations = {
                str(a): i
                for i, a in enumerate(
                    patch_info['annotation'].unique().tolist())
            }
            self.plot_annotation = plot_annotation
            self.palette = sns.color_palette(
                n_colors=len(list(self.annotations.keys())))
            #print(self.palette)
            if 'y_pred' not in patch_info.columns:
                patch_info['y_pred'] = 0.
            self.patch_info = patch_info[[
                'ID', 'x', 'y', 'patch_size', 'annotation', annotation_col
            ]]  # y_pred
            if 0:
                for ID in predictions:
                    patch_info.loc[patch_info["ID"] == ID,
                                   'y_pred'] = predictions[ID]
            self.patch_info = self.patch_info[np.isin(
                self.patch_info['ID'], np.array(list(dask_arr_dict.keys())))]
        if self.segmentation:
            self.segmentation_maps = {
                slide: npy2da(join(input_dir, '{}_mask.npy'.format(slide)))
                for slide in dask_arr_dict.keys()
            }
        #self.patch_info[['x','y','patch_size']]/=self.compression_factor
        self.dask_arr_dict = {k: v[..., :3] for k, v in dask_arr_dict.items()}
Пример #3
0
def alter_masks(mask_dir, output_dir, from_annotations, to_annotations):
	"""Map list of values to other values in mask."""
	import glob
	from pathflowai.utils import npy2da
	import numpy as np
	from dask.distributed import Client
	assert len(from_annotations)==len(to_annotations)
	c=Client()
	from_annotations=list(map(int,from_annotations))
	to_annotations=list(map(int,to_annotations))
	os.makedirs(output_dir,exist_ok=True)
	masks=glob.glob(join(mask_dir,'*_mask.npy'))
	from_to=list(zip(from_annotations,to_annotations))
	for mask in masks:
		output_mask=join(output_dir,os.path.basename(mask))
		arr=npy2da(mask)
		for fr,to in from_to:
			arr[arr==fr]=to
		np.save(output_mask,arr.compute())
Пример #4
0
    reference_mask = args.reference_mask
    if not os.path.exists('widths.pkl'):
        m = np.load(reference_mask)
        bbox_df = get_boxes(m)
        official_widths = dict(
            bbox_df.groupby('class_label')['width'].mean() +
            2 * bbox_df.groupby('class_label')['width'].std())
        pickle.dump(official_widths, open('widths.pkl', 'wb'))
    else:
        official_widths = pickle.load(open('widths.pkl', 'rb'))

    patch_info = load_sql_df(patch_info_file, patch_size)
    IDs = patch_info['ID'].unique()
    #slides = {slide:da.from_zarr(join(input_dir,'{}.zarr'.format(slide))) for slide in IDs}
    masks = {
        mask: npy2da(join(input_dir, '{}_mask.npy'.format(mask)))
        for mask in IDs
    }

    if p_sample < 1.:
        patch_info = patch_info.sample(frac=p_sample)

    if not os.path.exists(annotation_file):
        bbox_df = bb.util.new('annotation').drop(
            columns=['difficult', 'ignore', 'lost', 'occluded', 'truncated'])[[
                'image', 'class_label', 'x_top_left', 'y_top_left', 'width',
                'height'
            ]]
    else:
        bbox_df = bb.io.load('pandas', annotation_file)