def load_all_data(self, split, split_type, modality='MR', normalise=True, value_crop=True, segmentation_option='-1'): """ Load all images, unlabelled and labelled, meaning all images from all cardiac phases. :param split: Cross validation split: can be 0, 1, 2. :param split_type: Cross validation type: can be training, validation, test, all :param modality: Data modality. Unused here. :param normalise: Use normalised data: can be True/False :param value_crop: Crop extreme values: can be True/False :return: a Data object """ images, index, slice = self.load_unlabelled_images('liverct', split, split_type, True, normalise, value_crop, modality=modality) masks = np.zeros(shape=(images.shape[:-1]) + (1, )) scanner = np.array([modality] * index.shape[0]) return Data(images, masks, '-1', index, slice, scanner)
def load_unlabelled_data(self, split, split_type, modality='LGE', normalise=True, value_crop=True): """ Load unlabelled data. In ACDC, this contains images from the cardiac phases between ES and ED. :param split: Cross validation split: can be 0, 1, 2. :param split_type: Cross validation type: can be training, validation, test, all :param modality: Data modality. Unused here. :param normalise: Use normalised data: can be True/False :param value_crop: Crop extreme values: can be True/False :return: a Data object """ images, index, slice = self.load_unlabelled_images('liverct', split, split_type, False, normalise, value_crop, modality=modality) masks = np.zeros(shape=(images.shape[:-1]) + (1, )) scanner = np.array([modality] * index.shape[0]) return Data(images, masks, '-1', index, slice, scanner)
def load_unlabelled_data(self, split, split_type, modality='MR', normalise=True, value_crop=True): images, index = self.base_load_unlabelled_images('acdc', split, split_type, False, normalise, value_crop) masks = np.zeros(shape=(images.shape[:-1]) + (1,)) vol_scanner = self.load_scanner_type() scanner = index.copy() for v in self.volumes: scanner[index == v] = vol_scanner[v] return Data(images, masks, index, scanner)
def load_labelled_data(self, split, split_type, modality, normalise=True, downsample=1, root_folder=None): data = self.load_all_modalities_concatenated(split, split_type, downsample) images_t1 = data.get_images_modi(0) images_t2 = data.get_images_modi(1) labels_t1 = data.get_masks_modi(0) labels_t2 = data.get_masks_modi(1) if modality == 'all': all_images = np.concatenate([images_t1, images_t2], axis=0) all_labels = np.concatenate([labels_t1, labels_t2], axis=0) all_index = np.concatenate([data.index, data.index.copy()], axis=0) elif modality == 't1': all_images = images_t1 all_labels = labels_t1 all_index = data.index elif modality == 't2': all_images = images_t2 all_labels = labels_t2 all_index = data.index else: raise Exception('Unknown modality: %s' % modality) assert split_type in ['training', 'validation', 'test', 'all'], split_type assert all_images.max() - 1 < 0.01 and all_images.min() + 1 < 0.01, \ 'max: %.3f, min: %.3f' % (all_images.max(), all_images.min()) self.log.debug('Loaded compressed data of shape: ' + str(all_images.shape) + ' ' + str(all_index.shape)) if split_type == 'all': return Data(all_images, all_labels, all_index, 1) volumes = self.splits()[split][split_type] all_images = np.concatenate([all_images[all_index == v] for v in volumes]) assert all_labels.max() == 1 and all_labels.min() == 0, \ 'max: %d - min: %d' % (all_labels.max(), all_labels.min()) all_masks = np.concatenate([all_labels[all_index == v] for v in volumes]) assert all_images.shape[0] == all_masks.shape[0] all_index = np.concatenate([all_index[all_index == v] for v in volumes]) assert all_images.shape[0] == all_index.shape[0] self.log.debug(split_type + ' set: ' + str(all_images.shape)) return Data(all_images, all_masks, all_index, 1)
def load_labelled_data(self, split, split_type, modality='MR', normalise=True, value_crop=True, downsample=1): """ Load labelled data, and return a Data object. In ACDC there are ES and ED annotations. Preprocessed data are saved in .npz files. If they don't exist, load the original images and preprocess. :param split: Cross validation split: can be 0, 1, 2. :param split_type: Cross validation type: can be training, validation, test, all :param modality: Data modality. Unused here. :param normalise: Use normalised data: can be True/False :param value_crop: Crop extreme values: can be True/False :param downsample: Downsample data to smaller size. Only used for testing. :return: a Data object """ if split < 0 or split > 4: raise ValueError( 'Invalid value for split: %d. Allowed values are 0, 1, 2.' % split) if split_type not in ['training', 'validation', 'test', 'all']: raise ValueError( 'Invalid value for split_type: %s. Allowed values are training, validation, test, all' % split_type) npz_prefix = 'norm_' if normalise else 'unnorm_' # If numpy arrays are not saved, load and process raw data if not os.path.exists( os.path.join(self.data_folder, npz_prefix + 'acdc_images.npz')): images, masks_lv, masks_rv, masks_myo, index = self.load_raw_labelled_data( normalise, value_crop) # save numpy arrays np.savez_compressed( os.path.join(self.data_folder, npz_prefix + 'acdc_images'), images) np.savez_compressed( os.path.join(self.data_folder, npz_prefix + 'acdc_masks_lv'), masks_lv) np.savez_compressed( os.path.join(self.data_folder, npz_prefix + 'acdc_masks_rv'), masks_rv) np.savez_compressed( os.path.join(self.data_folder, npz_prefix + 'acdc_masks_myo'), masks_myo) np.savez_compressed( os.path.join(self.data_folder, npz_prefix + 'acdc_index'), index) # Load data from saved numpy arrays else: images = np.load( os.path.join(self.data_folder, npz_prefix + 'acdc_images.npz'))['arr_0'] masks_lv = np.load( os.path.join(self.data_folder, npz_prefix + 'acdc_masks_lv.npz'))['arr_0'] masks_rv = np.load( os.path.join(self.data_folder, npz_prefix + 'acdc_masks_rv.npz'))['arr_0'] masks_myo = np.load( os.path.join(self.data_folder, npz_prefix + 'acdc_masks_myo.npz'))['arr_0'] index = np.load( os.path.join(self.data_folder, npz_prefix + 'acdc_index.npz'))['arr_0'] assert images is not None and masks_myo is not None and masks_lv is not None and masks_rv is not None \ and index is not None, 'Could not find saved data' assert images.max() == 1 and images.min() == -1, \ 'Images max=%.3f, min=%.3f' % (images.max(), images.min()) self.log.debug('Loaded compressed acdc data of shape: ' + str(images.shape) + ' ' + str(index.shape)) scanner = np.array([modality] * index.shape[0]) # Case to load data from all splits. if split_type == 'all': masks = np.concatenate([masks_myo, masks_lv, masks_rv], axis=-1) return Data(images, masks, index, scanner, downsample) # Select images belonging to the volumes of the split_type (training, validation, test) volumes = self.splits()[split][split_type] images = np.concatenate([images[index == v] for v in volumes]) masks = np.concatenate([masks_myo, masks_lv, masks_rv], axis=-1) assert masks.max() == 1 and masks.min( ) == 0, 'Masks max=%.3f, min=%.3f' % (masks.max(), masks.min()) masks = np.concatenate([masks[index == v] for v in volumes]) assert images.shape[0] == masks.shape[0] # create a volume index index = np.concatenate([index[index == v] for v in volumes]) scanner = np.array([modality] * index.shape[0]) assert images.shape[0] == index.shape[0] self.log.debug(split_type + ' set: ' + str(images.shape)) return Data(images, masks, index, scanner, downsample)
def load_labelled_data(self, split, split_type, modality='LGE', normalise=True, value_crop=True, downsample=1, segmentation_option=-1): """ Load labelled data, and return a Data object. In ACDC there are ES and ED annotations. Preprocessed data are saved in .npz files. If they don't exist, load the original images and preprocess. :param split: Cross validation split: can be 0, 1, 2. :param split_type: Cross validation type: can be training, validation, test, all :param modality: Data modality. Unused here. :param normalise: Use normalised data: can be True/False :param value_crop: Crop extreme values: can be True/False :param downsample: Downsample data to smaller size. Only used for testing. :return: a Data object """ # if segmentation_option == 0: # input("Segmentation 0") if split < 0 or split > 4: raise ValueError( 'Invalid value for split: %d. Allowed values are 0, 1, 2.' % split) if split_type not in ['training', 'validation', 'test', 'all']: raise ValueError( 'Invalid value for split_type: %s. Allowed values are training, validation, test, all' % split_type) npz_prefix = 'norm_' if normalise else 'unnorm_' def _only_get_pahtology_data(): data_num = masks_tumour.shape[0] new_images, new_anato_masks, new_patho_masks,new_index, news_slice = [],[],[],[],[] for ii in range(data_num): if np.sum(patho_masks[ii, :, :, :]) == 0: continue new_images.append(np.expand_dims(images[ii, :, :, :], axis=0)) new_anato_masks.append( np.expand_dims(anato_masks[ii, :, :, :], axis=0)) new_patho_masks.append( np.expand_dims(patho_masks[ii, :, :, :], axis=0)) new_index.append(index[ii]) news_slice.append(slice[ii]) new_images = np.concatenate(new_images) new_anato_masks = np.concatenate(new_anato_masks) new_patho_masks = np.concatenate(new_patho_masks) new_index = np.concatenate(np.expand_dims(new_index, axis=0)) news_slice = np.concatenate(np.expand_dims(news_slice, axis=0)) return new_images, new_anato_masks, new_patho_masks, new_index, news_slice # If numpy arrays are not saved, load and process raw data if not os.path.exists( os.path.join(self.data_folder, npz_prefix + 'liverct_image.npz')): if modality == 'LGE': value_crop = False images, masks_liver, masks_tumour, patient_index,index, slice = \ self.load_raw_labelled_data(normalise, value_crop) # save numpy arrays np.savez_compressed( os.path.join(self.data_folder, npz_prefix + 'liverct_image'), images) np.savez_compressed( os.path.join(self.data_folder, npz_prefix + 'liverct_liver_mask'), masks_liver) np.savez_compressed( os.path.join(self.data_folder, npz_prefix + 'liverct_tumour_mask'), masks_tumour) np.savez_compressed( os.path.join(self.data_folder, npz_prefix + 'liverct_patienet_index'), patient_index) np.savez_compressed( os.path.join(self.data_folder, npz_prefix + 'liverct_index'), index) np.savez_compressed( os.path.join(self.data_folder, npz_prefix + 'liverct_slice'), slice) # Load data from saved numpy arrays else: images = np.load( os.path.join(self.data_folder, npz_prefix + 'liverct_image.npz'))['arr_0'] masks_liver = np.load( os.path.join(self.data_folder, npz_prefix + 'liverct_liver_mask.npz'))['arr_0'] masks_tumour = np.load( os.path.join(self.data_folder, npz_prefix + 'liverct_tumour_mask.npz'))['arr_0'] patient_index = np.load( os.path.join(self.data_folder, npz_prefix + 'liverct_index.npz'))['arr_0'] index = np.load( os.path.join(self.data_folder, npz_prefix + 'liverct_index.npz'))['arr_0'] slice = np.load( os.path.join(self.data_folder, npz_prefix + 'liverct_slice.npz'))['arr_0'] assert images is not None and masks_liver is not None and masks_tumour is not None \ and index is not None, 'Could not find saved data' assert images.max() == 1 and images.min() == -1, \ 'Images max=%.3f, min=%.3f' % (images.max(), images.min()) self.log.debug('Loaded compressed liverct data of shape: ' + str(images.shape) + ' ' + str(index.shape)) anato_masks = masks_liver patho_masks = masks_tumour anato_mask_names = ['liver'] patho_mask_names = ['tumour'] images, anato_masks, patho_masks, index, slice = _only_get_pahtology_data( ) assert anato_masks.max() == 1 and anato_masks.min() == 0, 'Anatomy Masks max=%.3f, min=%.3f' \ % (anato_masks.max(), anato_masks.min()) assert patho_masks.max() == 1 and patho_masks.min() == 0, 'Pathology Masks max=%.3f, min=%.3f' \ % (anato_masks.max(), anato_masks.min()) scanner = np.array([modality] * index.shape[0]) # Select images belonging to the volumes of the split_type (training, validation, test) volumes = self.splits()[split][split_type] images = np.concatenate([images[index == v] for v in volumes]) anato_masks = np.concatenate( [anato_masks[index == v] for v in volumes]) patho_masks = np.concatenate( [patho_masks[index == v] for v in volumes]) assert images.shape[0] == anato_masks.shape[0] == patho_masks.shape[ 0], "Num of Images inconsistent" # create a volume index slice = np.concatenate([slice[index == v] for v in volumes]) index = np.concatenate([index[index == v] for v in volumes]) scanner = np.array([modality] * index.shape[0]) assert images.shape[0] == index.shape[0] self.log.debug(split_type + ' set: ' + str(images.shape)) return Data(images, [anato_masks, patho_masks], [anato_mask_names, patho_mask_names], index, slice, scanner, downsample)
def load_labelled_data(self, split, split_type, modality='MR', normalise=False, value_crop=True, downsample=1): if split < 0 or split > 4: raise ValueError('Invalid value for split: %d. Allowed values are 0, 1, 2.' % split) if split_type not in ['training', 'validation', 'test', 'all']: raise ValueError('Invalid value for split_type: %s. Allowed values are training, validation, test, all' % split_type) npz_prefix = 'norm_' if normalise else 'unnorm_' # If numpy arrays are not saved, load and process raw data if not os.path.exists(os.path.join(self.data_folder, npz_prefix + 'acdc_images.npz')): images, masks_lv, masks_rv, masks_myo, index = self.load_raw_labelled_data(normalise, value_crop) # save numpy arrays np.savez_compressed(os.path.join(self.data_folder, npz_prefix + 'acdc_images'), images) np.savez_compressed(os.path.join(self.data_folder, npz_prefix + 'acdc_masks_lv'), masks_lv) np.savez_compressed(os.path.join(self.data_folder, npz_prefix + 'acdc_masks_rv'), masks_rv) np.savez_compressed(os.path.join(self.data_folder, npz_prefix + 'acdc_masks_myo'), masks_myo) np.savez_compressed(os.path.join(self.data_folder, npz_prefix + 'acdc_index'), index) # Load data from saved numpy arrays else: images = np.load(os.path.join(self.data_folder, npz_prefix + 'acdc_images.npz'))['arr_0'] masks_lv = np.load(os.path.join(self.data_folder, npz_prefix + 'acdc_masks_lv.npz'))['arr_0'] masks_rv = np.load(os.path.join(self.data_folder, npz_prefix + 'acdc_masks_rv.npz'))['arr_0'] masks_myo = np.load(os.path.join(self.data_folder, npz_prefix + 'acdc_masks_myo.npz'))['arr_0'] index = np.load(os.path.join(self.data_folder, npz_prefix + 'acdc_index.npz'))['arr_0'] assert images is not None and masks_myo is not None and masks_lv is not None and masks_rv is not None \ and index is not None, 'Could not find saved data' assert images.max() == 1 and images.min() == -1, \ 'Images max=%.3f, min=%.3f' % (images.max(), images.min()) self.log.debug('Loaded compressed acdc data of shape: ' + str(images.shape) + ' ' + str(index.shape)) vol_scanner = self.load_scanner_type() # Case to load data from all splits. if split_type == 'all': masks = np.concatenate([masks_myo, masks_lv, masks_rv], axis=-1) scanner = index.copy() for v in self.volumes: scanner[index == v] = vol_scanner[v] return Data(images, masks, index, scanner, downsample) # Select images belonging to the volumes of the split_type (training, validation, test) volumes = self.splits()[split][split_type] images = np.concatenate([images[index == v] for v in volumes]) masks = np.concatenate([masks_myo, masks_lv, masks_rv], axis=-3) assert masks.max() == 1 and masks.min() == 0, 'Masks max=%.3f, min=%.3f' % (masks.max(), masks.min()) masks = np.concatenate([masks[index == v] for v in volumes]) assert images.shape[0] == masks.shape[0] # create a volume index index = np.concatenate([index[index == v] for v in volumes]) assert images.shape[0] == index.shape[0] scanner = index.copy() for v in volumes: scanner[index == v] = vol_scanner[v] self.log.debug(split_type + ' set: ' + str(images.shape)) # spthermo insertion for resizing acdc data # images_resized = np.zeros((images.shape[0], 1, 64, 64)) # masks_resized = np.zeros((masks.shape[0], 3, 64, 64)) # for idx in range(images.shape[0]): # resized = cv2.resize(images[idx].squeeze(), dsize=(64, 64), interpolation=cv2.INTER_AREA) # resized = -1 + (resized - np.min(resized)*2)/(np.max(resized) - np.min(resized)) # images_resized[idx] = np.expand_dims(resized, axis=0) # m_resized = cv2.resize(masks[idx][0], dsize=(64, 64), interpolation=cv2.INTER_AREA) # m_resized = -1 + (m_resized - np.min(m_resized)*2)/(np.max(m_resized) - np.min(m_resized)) # masks_resized[idx][0] = np.expand_dims(m_resized, axis=0) # masks_resized[idx][1] = np.expand_dims(m_resized, axis=0) # masks_resized[idx][2] = np.expand_dims(m_resized, axis=0) return Data(images, masks, index, scanner, downsample)