def gen_raw_data(n: int, **kwargs): """ Generate a csbdeep RawData object with random images generated by :func:`my text <dispim.neural.datagen.gen_training_data>` :param n: :param use_noise: :param use_psf: :param use_subsampling: :param shape: :return: """ images_degr, images, psf_as, psf_bs = gen_training_data(n, **kwargs) return RawData.from_arrays(images_degr, images, axes='XYZC'), psf_as, psf_bs
def _create(img_size,img_axes,patch_size,patch_axes): U,V = (rng.uniform(size=(n_images,)+img_size) for _ in range(2)) X,Y,XYaxes = create_patches ( raw_data = RawData.from_arrays(U,V,img_axes), patch_size = patch_size, patch_axes = patch_axes, n_patches_per_image = n_patches_per_image, save_file = save_file ) (_X,_Y), val_data, _XYaxes = load_training_data(save_file,verbose=True) assert val_data is None assert _XYaxes[-1 if backend_channels_last else 1] == 'C' _X,_Y = (move_image_axes(u,fr=_XYaxes,to=XYaxes) for u in (_X,_Y)) assert np.allclose(X,_X,atol=1e-6) assert np.allclose(Y,_Y,atol=1e-6) assert set(XYaxes) == set(_XYaxes) assert load_training_data(save_file,validation_split=0.5)[2] is not None assert all(len(x)==3 for x in load_training_data(save_file,n_images=3)[0])
imgList = os.listdir(train_dir) labelList = os.listdir(label_dir) imgArray = [] for image in tqdm(imgList, 'Reading img'): imgArray.append(imread(os.path.join(train_dir, image))) labelArray = [] for label in tqdm(labelList, 'Reading label'): labelArray.append(imread(os.path.join(label_dir, label))) print(imgArray[0].shape) print(labelArray[0].shape) raw_data = RawData.from_arrays(imgArray, labelArray, axes='YX') X, Y, XY_axes = create_patches( raw_data=raw_data, patch_size=(128, 128, 1), patch_axes='YXC', n_patches_per_image=25, save_file= '/mnt/AE3205C73205958D/Data/3dliver_local/pc_adult/2d_slices/imagesXY/image_full/mydata_128x128patch.npz' ) (X, Y), (X_val, Y_val), axes = load_training_data( '/mnt/AE3205C73205958D/Data/3dliver_local/pc_adult/2d_slices/imagesXY/image_full/mydata_128x128patch.npz', validation_split=0.1, verbose=True)
# print('image size =', x.shape) # print('Z subsample factor =', subsample) # plt.figure(figsize=(389/100, 389/100)) # plt.imshow(x, cmap='gray') # plt.show() # print('image size =', x.shape) # print('Z subsample factor =', subsample) # raw_data = RawData.from_folder ( # basepath = 'data', # source_dirs = ['simulator_data'], # target_dir = 'simulator_data', # axes = 'CYX', # ) raw_data = RawData.from_arrays(X=[x], Y=[x], axes='CYX') anisotropic_transform = anisotropic_distortions( subsample=1, psf=np.ones((3)) / 9, # use the actual PSF here psf_axes='Y', # poisson_noise = True, # gauss_sigma = 0.1 ) X, Y, XY_axes = create_patches( raw_data=raw_data, patch_size=(x.shape[0], x.shape[1], x.shape[2]), n_patches_per_image=1, transforms=[anisotropic_transform], )
def get_dataset(pd_scribbles,n_patches_per_image_train=30,n_patches_per_image_val=8,patch_size=(128, 128), p_label = 0.6,val_perc = 0.3,verbose = True, border = False): X_train = None X_val = None for i in range(len(pd_scribbles)): ## read image and label npz_read = np.load(pd_scribbles['input_dir'][i] + pd_scribbles['input_file'][i]) image = npz_read['image'] label = npz_read['label'] nuclei = np.zeros_like(label) nuclei[label > 0] = 1 ## read scribbles npz_read = np.load(pd_scribbles['input_dir'][i] + pd_scribbles['scribble_file'][i]) scribble = npz_read['scribble'] raw_image_in = image + 0 # normalize(image,pmin=pmin,pmax=pmax,clip = False) ## Sample validation mask patch_val_size = [int(image.shape[0] * val_perc), int(image.shape[1] * val_perc)] all_back = True while all_back: val_mask = np.zeros([raw_image_in.shape[0], raw_image_in.shape[1]]) ix_x = np.random.randint(0, raw_image_in.shape[0] - patch_val_size[0]) ix_y = np.random.randint(0, raw_image_in.shape[0] - patch_val_size[1]) val_mask[ix_x:ix_x + patch_val_size[0], ix_y:ix_y + patch_val_size[1]] = 1 if np.sum(val_mask * np.sum(scribble[...], axis=-1)) > 10: all_back = False ## Generate patches raw_data = RawData.from_arrays(raw_image_in[np.newaxis, ...], scribble[np.newaxis, ...]) ## for plot ## if verbose: aux = np.zeros([raw_image_in.shape[0], raw_image_in.shape[1], 3]) if len(raw_image_in.shape)>2: aux[..., 1] = np.sum(raw_image_in,axis=-1) * 0.8 else: aux[..., 1] = raw_image_in * 0.8 aux[..., 0] = scribble[..., 0] aux[..., 2] = np.sum(scribble[..., 1:], axis=2) ### for group in ['val', 'train']: if group == 'val': fov_mask = np.array(val_mask) n_patches_per_image = n_patches_per_image_val + 0 if verbose: plt.figure(figsize=(10, 5)) plt.subplot(1, 2, 1) plt.title('Validation FOV') plt.imshow(fov_mask[..., np.newaxis] * aux) else: fov_mask = 1 - np.array(val_mask) n_patches_per_image = n_patches_per_image_train + 0 if verbose: plt.subplot(1, 2, 2) plt.title('Train FOV') plt.imshow(fov_mask[..., np.newaxis] * aux) plt.show() X_aux, Y_aux, axes = generate_patches_syxc(raw_data, patch_size, int(n_patches_per_image * (1 - p_label)), normalization=None, patch_filter=None, fov_mask=fov_mask) n_patches_add = int(n_patches_per_image - X_aux.shape[0]) if n_patches_add > 0: X_labeled_aux, Y_labeled_aux, axes = generate_patches_syxc(raw_data, patch_size, n_patches_add, normalization=None, mask_filter_index=np.arange( scribble.shape[-1]), fov_mask=fov_mask) if X_labeled_aux is not None: X_aux = np.concatenate([X_aux, X_labeled_aux], axis=0) Y_aux = np.concatenate([Y_aux, Y_labeled_aux], axis=0) if group == 'val': if X_val is None: X_val = np.array(X_aux) Y_val = np.array(Y_aux) else: X_val = np.concatenate([X_val, X_aux], axis=0) Y_val = np.concatenate([Y_val, Y_aux], axis=0) else: if X_train is None: X_train = np.array(X_aux) Y_train = np.array(Y_aux) else: X_train = np.concatenate([X_train, X_aux], axis=0) Y_train = np.concatenate([Y_train, Y_aux], axis=0) print(Y_train.shape,Y_val.shape) if border: return X_train,Y_train,X_val,Y_val else: out_channels = int(Y_train.shape[-1]/3) Y_train_aux = np.zeros([Y_train.shape[0], Y_train.shape[1], Y_train.shape[2], out_channels * 2]) Y_val_aux = np.zeros([Y_val.shape[0], Y_val.shape[1], Y_val.shape[2], out_channels * 2]) # print(out_channels,Y_train.shape[2]) for j in np.arange(out_channels): # print(j*2,j*out_channels) Y_train_aux[..., 2*j] = np.array(Y_train[..., out_channels*j]) # foreground Y_train_aux[..., 2*j+1] = Y_train[..., out_channels*j+1] + Y_train[..., out_channels*j+2] # Border + background are background Y_val_aux[..., 2*j] = np.array(Y_val[..., out_channels*j]) # foreground Y_val_aux[..., 2*j+1] = Y_val[..., out_channels*j+1] + Y_val[..., out_channels*j+2] # Border + background are background return X_train,Y_train_aux,X_val,Y_val_aux