def main(model, metrics): if metrics: old_metrics = {} new_metrics = {} files = f'data/predict/{model}/*_0000.nii.gz' files = f'data/predict/{model}/*/*.nii.gz' for file in glob.glob(files): sample, i = os.path.basename(file).split('_') i = i[:4] header = util.header(file) volume = util.read_vol(file) if metrics: label = util.read_vol( glob.glob(f'data/labels/{sample}/{sample}_0_all_brains.nii.gz') [0]) old_metrics[sample] = util.dice_coef(label, volume) n = 2 if sample in constants.TWINS else 1 volume = remove_artifacts(volume, n) if metrics: new_metrics[sample] = util.dice_coef(label, volume) os.makedirs(f'data/predict_cleaned/{model}/{sample}', exist_ok=True) util.save_vol( volume, f'data/predict_cleaned/{model}/{sample}/{sample}_{i}.nii.gz', header) if metrics: print('before: ', old_metrics) print('after: ', new_metrics)
def voxel_wise(sample, num_frames): resting = int(num_frames / len(O2_TIME[sample])) courses = np.zeros((847000, resting)) for i in range(1, resting): registered = util.read_vol( '{}/{}/{}_{}.nii.gz_to_{}_0180.nii.gz'.format( REG_DIR, sample, sample, str(i).zfill(4), sample)) mask = util.read_vol('{}/{}/{}_{}.nii.gz'.format( SEG_DIR, sample, sample, str(i).zfill(4))) #print(registered.shape) # print(mask.shape) brain = np.ravel_multi_index(np.argwhere(mask == 1).T, mask.shape) for voxel in brain: # print(courses[voxel, i].shape) # print(registered[voxel].shape) courses[voxel, i] = registered[np.unravel_index(voxel, registered.shape)] courses = courses[np.nonzero(courses)[0], :] means = np.mean(courses, axis=1) std = np.std(courses, axis=1) print(std) print(courses.T - means) normalized = ((courses.T - means) / std).T return normalized
def preprocess(file, resize=False, rescale=False): vol = read_vol(file) if resize: vol = crop(vol) if rescale: vol = scale(vol) return vol
def register(sample): g_truth_frame = labelled_frames[sample] all_frames = sorted(os.listdir(os.path.join(data_dir, sample))) for i in range(len(all_frames)): all_frames[i] = os.path.join(os.path.join(data_dir, sample), all_frames[i]) #mask1 = util.read_vol('{}/{}/{}_{}_all_brains.nii.gz'.format(g_truth_dir, sample, sample, g_truth_frame)) mask2 = util.read_vol('{}/{}/{}_{}.nii.gz'.format(seg_dir, sample, sample, g_truth_frame)) # mask1 = util.read_vol('../data/labels/021218L/021218L_0180_all_brains.nii.gz') # mask2 = util.read_vol('../data/data/021218L_0180.nii.gz') #u = union(mask1, mask2) dil = dilate(mask2, sample) commands = [ command, registration_dir + sample + '/', len(all_frames), './registered/mask/{}.nii.gz'.format(sample), '1', '0' ] back_commands = commands.copy() prev_frames = all_frames[:int(g_truth_frame) + 1] prev_frames.reverse() back_commands.extend(prev_frames) commands.extend(all_frames[int(g_truth_frame):]) back_commands[2] = str(int(g_truth_frame)) commands[2] = str(len(all_frames) - int(g_truth_frame)) # print(commands) # print(back_commands) subprocess.run(back_commands) subprocess.run(commands)
def preprocess( files, resize=None, #tuple of desired shape top_clip_percent=None, #float in [0,1] of desired top value, preceeds rescale bot_clip_percent=None, #float in [0,1] of desired bot value, preceeds rescale rescale_percentile=None, #float in [0,1] of desired percentile ): if isinstance(files, str): vol = read_vol(files) else: vol = np.concatenate([read_vol(f) for f in files], axis=-1) percentile_list = [] if top_clip_percent is not None: percentile_list.append(top_clip_percent) else: percentile_list.append(100) if bot_clip_percent is not None: percentile_list.append(bot_clip_percent) else: percentile_list.append(0) #rescale and clip if rescale_percentile is not None: #TODO: type check here percentile_list.append(rescale_percentile) else: #OLD rescale percentile_list.append(100) top, bot, rescale_val = np.percentile(vol.flat, percentile_list) vol = np.clip(vol, a_min=bot, a_max=top) vol = vol / rescale_val if resize is not None: vol = resize_zero_padding_nd( vol, resize, ) return vol
def timecourse_interpolated(sample): good_frames = GOOD_FRAMES[sample] frames = os.listdir('../data/raw/{}'.format(sample)) fpm = len(frames) / sum(O2_TIME[sample]) signal_good = np.empty(len(frames)) signal_good[:] = np.nan for frame in good_frames: predict = util.read_vol('../data/data/{}_{}.nii.gz'.format( sample, get_frame_num(frame))) scan = util.read_vol('../data/raw/{}/{}_{}.nii.gz'.format( sample, sample, get_frame_num(frame))) brain = scan[np.nonzero(predict)] avg_signal = np.mean(brain) signal_good[frame] = avg_signal points = np.argwhere(np.isnan(signal_good)) interpolate = np.zeros(points.shape) for i in range(len(points)): p = points[i] try: next_val = signal_good[p + 1] j = 1 while np.isnan(next_val) and p + j < len(signal_good): j += 1 next_val = signal_good[p + j] avg_endpoints = np.mean([signal_good[p - 1], next_val]) except IndexError: avg_endpoints = signal_good[p - 1] interpolate[i] = avg_endpoints signal_good[p] = avg_endpoints O2 = get_O2_frames(sample, len(frames)) plt.figure() plt.plot(signal_good) plt.plot(points, interpolate, 'o') colors = ['r', 'g'] for i in range(len(O2)): color = colors[i % 2] plt.axvline(x=O2[i], color=color) # savefile = './interpolated/greater95/{}.png'.format(sample) # plt.savefig(savefile) plt.show() plt.close()
def preprocess(files, resize=False, tile=False): if isinstance(files, str): vol = read_vol(files) else: vol = np.concatenate([read_vol(f) for f in files], axis=-1) vol = vol / np.max(vol, axis=(0, 1, 2)) if (vol.shape[0] < constants.SHAPE[0] or vol.shape[1] < constants.SHAPE[1] or vol.shape[2] < constants.SHAPE[2] or vol.shape[0] > 2 * constants.SHAPE[0] or vol.shape[1] > 2 * constants.SHAPE[1] or vol.shape[2] > 2 * constants.SHAPE[2]): raise ValueError(f'The input shape {vol.shape} is not supported.') if tile: vol = split(vol) elif resize: vol = crop(vol) return vol
def timecourse_avg(sample): good_frames = GOOD_FRAMES[sample] frames = os.listdir('../data/raw/raw/{}'.format(sample)) signal_all = np.zeros(len(frames)) signal_good = np.zeros(len(frames)) for i in range(len(frames)): frame = frames[i] predict = util.read_vol('../data/data/{}'.format(frame)) scan = util.read_vol('../data/raw/raw/{}/{}'.format(sample, frame)) brain = scan[np.nonzero(predict)] avg_signal = np.mean(brain) signal_all[i] = avg_signal if i in good_frames: signal_good[i] = avg_signal fig = plt.figure() plt.plot(signal_good) plt.plot(signal_all) plt.legend(['good frames', 'all frames']) savefile = './avg_plots/less90/{}.png'.format(sample) plt.savefig(savefile) plt.close(fig)
def register_by_frame(sample): g_truth_frame = labelled_frames[sample] all_frames = os.listdir(os.path.join(data_dir, sample)) # for i in range(len(all_frames)): # all_frames[i] = os.path.join(os.path.join(data_dir, sample), all_frames[i]) mask = '{}/{}/{}_{}_all_brains.nii.gz'.format(g_truth_dir, sample, sample, g_truth_frame) commands = [command, registration_dir + '/byframe', '2', 'mask', '1', '0'] i = g_truth_frame q = np.ones(6) # forward pass while i < len(all_frames): frame = util.read_vol(data_dir + sample + '/' + all_frames[i]) transformed = transform(frame, q) mask = util.read_vol(seg_dir + all_frames[i]) mask_trans = transform(mask, q) commands[3] = mask_trans commands[6] = transformed commands[7] = data_dir + sample + '/' + all_frames[i + 1] output = subprocess.run(commands, capture_output=True) q = output.stdout i += 1
def preprocess(file, resize=False, tile=False): vol = read_vol(file) vol = vol / np.max(vol) if (vol.shape[0] < constants.SHAPE[0] or vol.shape[1] < constants.SHAPE[1] or vol.shape[2] < constants.SHAPE[2] or vol.shape[0] > 2 * constants.SHAPE[0] or vol.shape[1] > 2 * constants.SHAPE[1] or vol.shape[2] > 2 * constants.SHAPE[2]): raise ValueError(f'The input shape {vol.shape} is not supported.') if tile: vol = split(vol) elif resize: vol = crop(vol) return vol
def main(model): folder = options.sample if options.sample else '*' files = f'data/predict/{model}/{folder}/*.nii.gz' for file in glob.glob(files): sample, i = os.path.basename(file).split('_') i = i[:4] header = util.header(file) volume = util.read_vol(file) n = 2 if sample in constants.TWINS else 1 volume = remove_artifacts(volume, n) os.makedirs(f'data/predict_cleaned/{model}/{sample}', exist_ok=True) util.save_vol( volume, f'data/predict_cleaned/{model}/{sample}/{sample}_{i}.nii.gz', header)
def main(options): start = time.time() np.random.seed(123454321) organ = 'all_brains' if options.organ == 'brains' else options.organ if options.temporal: logging.info('Splitting data.') samples = constants.GOOD_FRAMES.keys() n = len(samples) shuffled = np.random.permutation(samples) train = shuffled[:(2*n)//3] val = shuffled[(2*n)//3:(5*n)//6] test = shuffled[(5*n)//6:] logging.info('Creating data generators.') label_types = LABELS[options.model] train_for = [] train_rev = [] train_label_for = [] train_label_rev = [] for s in train: frames = constants.GOOD_FRAMES[s] train_for.extend([f'data/raw/{s}/{s}_{str(i).zfill(4)}.nii.gz' for i in frames]) train_rev.extend([f'data/raw/{s}/{s}_{str(i-1).zfill(4)}.nii.gz' for i in frames]) train_label_for.extend([f'data/predict_cleaned/unet3000/{s}/{s}_{str(i).zfill(4)}.nii.gz' for i in frames]) train_label_rev.extend([f'data/predict_cleaned/unet3000/{s}/{s}_{str(i-1).zfill(4)}.nii.gz' for i in frames]) train_gen = AugmentGenerator(train_for + train_rev, label_files=train_label_for + train_label_rev, concat_files=[[train_rev + train_for], [train_label_rev + train_label_for]], label_types=label_types) weights = util.get_weights(train_gen.labels) if not options.skip_training: val_for = [] val_rev = [] val_label_for = [] val_label_rev = [] for s in val: frames = constants.GOOD_FRAMES[s] val_for.extend([f'data/raw/{s}/{s}_{str(i).zfill(4)}.nii.gz' for i in frames]) val_rev.extend([f'data/raw/{s}/{s}_{str(i-1).zfill(4)}.nii.gz' for i in frames]) val_label_for.extend([f'data/predict_cleaned/unet3000/{s}/{s}_{str(i).zfill(4)}.nii.gz' for i in frames]) val_label_rev.extend([f'data/predict_cleaned/unet3000/{s}/{s}_{str(i-1).zfill(4)}.nii.gz' for i in frames]) val_gen = VolumeGenerator(val_for + val_rev, label_files=val_label_for + val_label_rev, concat_files=[[val_rev + val_for], [val_label_rev + val_label_for]], label_types=label_types) if options.predict_all: pass else: test_for = [] test_rev = [] test_label_for = [] test_label_rev = [] for s in test: frames = constants.GOOD_FRAMES[s] test_for.extend([f'data/raw/{s}/{s}_{str(i).zfill(4)}.nii.gz' for i in frames]) test_rev.extend([f'data/raw/{s}/{s}_{str(i-1).zfill(4)}.nii.gz' for i in frames]) test_label_for.extend([f'data/predict_cleaned/unet3000/{s}/{s}_{str(i).zfill(4)}.nii.gz' for i in frames]) test_label_rev.extend([f'data/predict_cleaned/unet3000/{s}/{s}_{str(i-1).zfill(4)}.nii.gz' for i in frames]) pred_gen = VolumeGenerator(test_for + test_rev, tile_inputs=True) test_gen = VolumeGenerator(test_for + test_rev, label_files=test_label_for + test_label_rev, concat_files=[[test_rev + test_for], [test_label_rev + test_label_for]], label_types=label_types) logging.info('Creating model.') shape = constants.SHAPE[:-1] + (3,) model = MODELS[options.model](shape, name=options.name, filename=options.model_file, weights=weights) else: logging.info('Splitting data.') n = len(constants.SAMPLES) shuffled = np.random.permutation(constants.SAMPLES) train = shuffled[:(2*n)//3] val = shuffled[(2*n)//3:(5*n)//6] test = shuffled[(5*n)//6:] logging.info('Creating data generators.') label_types = LABELS[options.model] train_files = [f'data/raw/{sample}/{sample}_0000.nii.gz' for sample in train] train_label_files = [f'data/labels/{sample}/{sample}_0_{organ}.nii.gz' for sample in train] train_gen = AugmentGenerator(train_files, label_files=train_label_files, label_types=label_types) weights = util.get_weights(train_gen.labels) if not options.skip_training: val_files = [f'data/raw/{sample}/{sample}_0000.nii.gz' for sample in val] val_label_files = [f'data/labels/{sample}/{sample}_0_{organ}.nii.gz' for sample in val] val_gen = VolumeGenerator(val_files, label_files=val_label_files, label_types=label_types) if options.predict_all: pass else: test_files = [f'data/raw/{sample}/{sample}_0000.nii.gz' for sample in test] test_label_files = [f'data/labels/{sample}/{sample}_0_{organ}.nii.gz' for sample in test] pred_gen = VolumeGenerator(test_files, tile_inputs=True) test_gen = VolumeGenerator(test_files, label_files=test_label_files, label_types=label_types) logging.info('Creating model.') shape = constants.SHAPE model = MODELS[options.model](shape, name=options.name, filename=options.model_file, weights=weights) if not options.skip_training: logging.info('Training model.') model.train(train_gen, val_gen, options.epochs) # FIXME if options.predict_all: for folder in glob.glob('data/raw/*'): try: sample = folder.split('/')[-1] logging.info(f'{sample}..............................') if options.temporal: # TODO else: pred_files = glob.glob(f'data/raw/{sample}/{sample}_*.nii.gz') pred_gen = VolumeGenerator(pred_files, tile_inputs=True) model.predict(pred_gen, f'data/predict/{options.name}/{sample}/') except Exception as e: logging.error(f'ERROR during {sample}: {e}') else: logging.info('Making predictions.') model.predict(pred_gen, f'data/predict/{options.name}/') logging.info('Testing model.') metrics = model.test(test_gen) logging.info(metrics) dice = {} for i in range(len(test)): sample = test[i] dice[sample] = util.dice_coef(util.read_vol(test_label_files[i]), util.read_vol(f'data/predict/{options.name}/{sample}_0000.nii.gz')) logging.info(metrics) logging.info(np.mean(list(metrics.values()))) end = time.time() logging.info(f'total time: {datetime.timedelta(seconds=(end - start))}')
from argparse import ArgumentParser parser = ArgumentParser() parser.add_argument('model', type=str) parser.add_argument('--sample', type=str) options = parser.parse_args() folder = options.sample if options.sample else '*' samples = [i.split('/')[-1] for i in glob.glob(f'data/predict_cleaned/{options.model}/{folder}')] os.makedirs(f'data/volumes/{options.model}', exist_ok=True) var = {} for s in sorted(samples): print(s) segs = np.array([util.read_vol(f) for f in sorted(glob.glob(f'data/predict_cleaned/{options.model}/{s}/{s}_*.nii.gz'))]) if s in constants.TWINS: brains = [measurements.label(seg)[0] for seg in segs] vols = [measurements.sum(segs[0], brains[0], [1,2])] for i in range(1, len(brains)): intersect = brains[i-1] * brains[i] if 1 not in intersect and 4 not in intersect: brains[i] = - brains[i] + 3 vols.append(measurements.sum(segs[i], brains[i], [1,2])) vols = np.array(vols).T else: vols = [np.sum(segs, axis=(1, 2, 3, 4))] for i in range(len(vols)): var[f'{s}_{i}'] = np.sqrt(np.var(vols[i]))
train = ['031317T', '031616', '013018S', '041318S', '050318S', '032318c', '032818', '022318S', '013018L', '021218S', '040218', '013118L', '022618', '031615', '031317L', '012115', '032318d', '031516', '050917', '021218L', '040716', '032318b', '021015', '040417', '041818', '022318L', '041017'] samples = [i.split('/')[-1] for i in glob.glob('data/predict_cleaned/unet3000/*')] os.makedirs(f'data/volumes/', exist_ok=True) var = {} for s in sorted(samples): print(s) segs = np.array([util.read_vol(f) for f in sorted(glob.glob(f'data/predict_cleaned/unet3000/{s}/{s}_*.nii.gz'))]) if s in constants.TWINS: brains = [measurements.label(seg)[0] for seg in segs] vols = [measurements.sum(segs[0], brains[0], [1,2])] for i in range(1, len(brains)): intersect = brains[i-1] * brains[i] if 1 not in intersect and 4 not in intersect: brains[i] = - brains[i] + 3 vols.append(measurements.sum(segs[i], brains[i], [1,2])) vols = np.array(vols).T else: vols = [np.sum(segs, axis=(1, 2, 3, 4))] for i in range(len(vols)): var[f'{s}_{i}'] = np.var(vols[i])
OVERALL_VOL_DIF = 0.1 OVERALL_DICE = 0.8 SEQ_VOL_DIF = 0.05 SEQ_DICE = 0.9 PERCENT_GOOD = 0.6 samples = [ i.split('/')[-1] for i in glob.glob('data/predict_cleaned/unet3000/*') ] good_frames = {} for s in sorted(samples): print(s) segs = np.array([ util.read_vol(f) for f in sorted( glob.glob(f'data/predict_cleaned/unet3000/{s}/{s}_*.nii.gz')) ]) label = glob.glob(f'data/labels/{s}/{s}_*_all_brains.nii.gz') frames = [] if label: n = label[0].split('/')[-1].split('_')[1] vol = util.read_vol( f'data/predict_cleaned/unet3000/{s}/{s}_{str(n).zfill(4)}.nii.gz') volume = np.sum(vol) prev = None prev_vol = None for i in range(len(segs)): seg = segs[i] curr_vol = np.sum(seg) dif = abs(volume - curr_vol)
import glob import numpy as np import util for sample in [ '010918L', '010918S', '012115', '013018L', '013018S', '013118L', '013118S', '021015', '021218L', '021218S', '022318L', '022318S', '022415', '022618', '030217', '030315', '031317L', '031317T', '031516', '031615', '031616', '031716', '032217', '032318a', '032318b', '032318c', '032318d', '032818', '040218', '040417' ]: files = glob.glob('data/labels/{}/{}_0_*_brain.nii.gz'.format( sample, sample)) volume = np.zeros( util.shape('data/raw/{}/{}_0.nii.gz'.format(sample, sample))) header = util.header(files[0]) for file in files: volume += util.read_vol(file) util.save_vol( volume, 'data/labels/{}/{}_0_all_brains.nii.gz'.format(sample, sample), header)
def main(folder, volume): files = glob.glob(folder + '*.nii.gz') vols = np.concatenate([util.read_vol(file) for file in files], axis=-1) if vols.shape[0] == vols.shape[1] == vols.shape[2]: axis = int(input('shape: {}\n> '.format(vols.shape))) elif vols.shape[0] == vols.shape[1]: axis = 2 elif vols.shape[0] == vols.shape[2]: axis = 1 elif vols.shape[1] == vols.shape[2]: axis = 0 else: axis = int(input('shape: {}\n> '.format(vols.shape))) vols = np.moveaxis(vols, axis, 0) shape = vols.shape even_i = np.arange(0, shape[0], 2) even_slices = vols[::2, ...] even_interpolator = interp1d(even_i, even_slices, kind='linear', axis=0) odd_i = np.arange(1, shape[0], 2) odd_slices = vols[1::2, ...] odd_interpolator = interp1d(odd_i, odd_slices, kind='linear', axis=0) if shape[0] % 2 == 0: evens = even_interpolator(np.arange(0, shape[0] - 1)) odds = odd_interpolator(np.arange(1, shape[0])) evens = np.concatenate((evens, evens[np.newaxis, -1, ...])) else: evens = even_interpolator(np.arange(0, shape[0])) odds = odd_interpolator(np.arange(1, shape[0] - 1)) odds = np.concatenate((odds, odds[np.newaxis, -1, ...])) odds = np.concatenate((np.zeros([ 1, ] + list(shape[1:])), odds)) if volume: even_1 = evens[shape[0] // 3, ...] even_2 = evens[shape[0] * 2 // 3, ...] odd_1 = odds[shape[0] // 3, ...] odd_2 = odds[shape[0] * 2 // 3, ...] even_img_1 = np.zeros((shape[1], shape[2], shape[3] * 2)) even_img_1[..., ::2] = even_1 even_img_1[..., 1::2] = odd_1 even_img_2 = np.zeros((shape[1], shape[2], shape[3] * 2)) even_img_2[..., ::2] = even_2 even_img_2[..., 1::2] = odd_2 odd_img_1 = np.zeros((shape[1], shape[2], shape[3] * 2)) odd_img_1[..., ::2] = odd_1 odd_img_1[..., 1::2] = even_1 odd_img_2 = np.zeros((shape[1], shape[2], shape[3] * 2)) odd_img_2[..., ::2] = odd_2 odd_img_2[..., 1::2] = even_2 temp_folder = 'data/temp/' os.makedirs(temp_folder, exist_ok=True) util.save_vol(even_img_1, temp_folder + 'even_1.nii.gz') util.save_vol(even_img_2, temp_folder + 'even_2.nii.gz') util.save_vol(odd_img_1, temp_folder + 'odd_1.nii.gz') util.save_vol(odd_img_2, temp_folder + 'odd_2.nii.gz') else: even_1 = evens[shape[0] // 3, shape[1] // 2, ...] even_2 = evens[shape[0] * 2 // 3, shape[1] // 2, ...] odd_1 = odds[shape[0] // 3, shape[1] // 2, ...] odd_2 = odds[shape[0] * 2 // 3, shape[1] // 2, ...] even_img_1 = np.zeros((shape[2], shape[3] * 2)) even_img_1[:, ::2] = even_1 even_img_1[:, 1::2] = odd_1 even_img_2 = np.zeros((shape[2], shape[3] * 2)) even_img_2[:, ::2] = even_2 even_img_2[:, 1::2] = odd_2 odd_img_1 = np.zeros((shape[2], shape[3] * 2)) odd_img_1[:, ::2] = odd_1 odd_img_1[:, 1::2] = even_1 odd_img_2 = np.zeros((shape[2], shape[3] * 2)) odd_img_2[:, ::2] = odd_2 odd_img_2[:, 1::2] = even_2 fig = plt.figure(figsize=(16, 8)) fig.add_subplot(2, 2, 1) plt.imshow(odd_img_1) plt.axis('off') plt.title('odd') fig.add_subplot(2, 2, 2) plt.imshow(even_img_1) plt.axis('off') plt.title('even') fig.add_subplot(2, 2, 3) plt.imshow(odd_img_2) plt.axis('off') plt.title('odd') fig.add_subplot(2, 2, 4) plt.imshow(even_img_2) plt.axis('off') plt.title('even') plt.show(block=False) order = input('1. odd\n2. even\n> ') plt.close() new_shape = list(shape) new_shape[-1] *= 2 series = np.zeros(new_shape) if order == '1': series[..., ::2] = odds series[..., 1::2] = evens elif order == '2': series[..., ::2] = evens series[..., 1::2] = odds else: raise ValueError('Must be even or odd slice.') series = np.moveaxis(series, 0, axis) sample = folder.split('/')[2] new_folder = 'data/originals/{}/'.format(folder.split('/')[2]) os.makedirs(new_folder, exist_ok=True) for i in range(new_shape[-1]): util.save_vol(series[..., i], new_folder + sample + '_{}.nii.gz'.format(i))
def preprocess(file, funcs=['rescale', 'resize']): vol = read_vol(file) for f in funcs: vol = PRE_FUNCTIONS[f](vol) return vol
def main(sample, order): files = sorted(glob.glob(f'data/nifti/{sample}/*.nii.gz')) vols = np.concatenate([util.read_vol(file) for file in files], axis=-1) if vols.shape[0] == vols.shape[1] == vols.shape[2]: axis = int(input(f'shape: {vols.shape}\n> ')) elif vols.shape[0] == vols.shape[1]: axis = 2 elif vols.shape[0] == vols.shape[2]: axis = 1 elif vols.shape[1] == vols.shape[2]: axis = 0 else: axis = int(input(f'shape: {vols.shape}\n> ')) vols = np.moveaxis(vols, axis, 0) shape = vols.shape even_i = np.arange(0, shape[0], 2) even_slices = vols[::2, ...] even_interpolator = interp1d(even_i, even_slices, kind='linear', axis=0) odd_i = np.arange(1, shape[0], 2) odd_slices = vols[1::2, ...] odd_interpolator = interp1d(odd_i, odd_slices, kind='linear', axis=0) if shape[0] % 2 == 0: evens = even_interpolator(np.arange(0, shape[0] - 1)) odds = odd_interpolator(np.arange(1, shape[0])) evens = np.concatenate((evens, evens[-1:, ...])) else: evens = even_interpolator(np.arange(0, shape[0])) odds = odd_interpolator(np.arange(1, shape[0] - 1)) odds = np.concatenate((odds, odds[-1:, ...])) odds = np.concatenate((odds[:1, ...], odds)) new_shape = list(shape) new_shape[-1] *= 2 series = np.zeros(new_shape) if order is None: import matplotlib.pyplot as plt even_1 = evens[shape[0] // 3, shape[1] // 2, ...] even_2 = evens[shape[0] * 2 // 3, shape[1] // 2, ...] odd_1 = odds[shape[0] // 3, shape[1] // 2, ...] odd_2 = odds[shape[0] * 2 // 3, shape[1] // 2, ...] even_img_1 = np.zeros((shape[2], shape[3] * 2)) even_img_1[:, ::2] = even_1 even_img_1[:, 1::2] = odd_1 even_img_2 = np.zeros((shape[2], shape[3] * 2)) even_img_2[:, ::2] = even_2 even_img_2[:, 1::2] = odd_2 odd_img_1 = np.zeros((shape[2], shape[3] * 2)) odd_img_1[:, ::2] = odd_1 odd_img_1[:, 1::2] = even_1 odd_img_2 = np.zeros((shape[2], shape[3] * 2)) odd_img_2[:, ::2] = odd_2 odd_img_2[:, 1::2] = even_2 img_1 = np.concatenate((odd_img_1, even_img_1), axis=0) img_2 = np.concatenate((odd_img_2, even_img_2), axis=0) fig = plt.figure(figsize=(9, 9)) fig.add_subplot(2, 1, 1) plt.imshow(img_1) plt.axis('off') fig.add_subplot(2, 1, 2) plt.imshow(img_2) plt.axis('off') plt.suptitle(sample) plt.show(block=False) order = input('0. 3D\n1. odd\n2. even\n> ') if order == '0': even = evens[shape[0] // 2, ...] odd = odds[shape[0] // 2, ...] even_img = np.zeros((shape[1], shape[2], shape[3] * 2)) even_img[..., ::2] = even even_img[..., 1::2] = odd odd_img = np.zeros((shape[1], shape[2], shape[3] * 2)) odd_img[..., ::2] = odd odd_img[..., 1::2] = even temp_folder = 'data/temp' os.makedirs(temp_folder, exist_ok=True) util.save_vol(even_img, temp_folder + 'even.nii.gz') util.save_vol(odd_img, temp_folder + 'odd.nii.gz') os.system(f'open {temp_folder}/even.nii.gz') os.system(f'open {temp_folder}/odd.nii.gz') order = input('1. odd\n2. even\n> ') plt.close() if order == '1' or order == 'o': series[..., ::2] = odds series[..., 1::2] = evens elif order == '2' or order == 'e': series[..., ::2] = evens series[..., 1::2] = odds else: raise ValueError('Must be even or odd slice.') series = np.moveaxis(series, 0, axis) new_folder = f'data/raw/{sample}/' os.makedirs(new_folder, exist_ok=True) for i in range(new_shape[-1]): util.save_vol(series[..., i], new_folder + sample + f'_{str(i).zfill(4)}.nii.gz')
parser = ArgumentParser() parser.add_argument('model', type=str) parser.add_argument('--sample', type=str) options = parser.parse_args() folder = options.sample if options.sample else '*' samples = [ i.split('/')[-1] for i in glob.glob(f'data/predict_cleaned/{options.model}/{folder}') ] os.makedirs(f'data/gifs/{options.model}', exist_ok=True) for s in sorted(samples): print(s) vols = np.asarray([ util.read_vol(f) for f in sorted(glob.glob(f'data/raw/{s}/{s}_*.nii.gz')) ]) vols = np.clip(np.log(1 + vols / np.percentile(vols, 95)), 0, 1) segs = np.asarray([ util.read_vol(f) for f in sorted( glob.glob( f'data/predict_cleaned/{options.model}/{s}/{s}_*.nii.gz')) ]) if s in constants.TWINS: brains = [label(seg)[0] for seg in segs] for i in range(1, len(brains)): intersect = brains[i - 1] * brains[i] if 1 not in intersect and 4 not in intersect: brains[i] = -brains[i] + 3