def get_img(imgloc, segloc=None): npimg, _, npseg = preprocess.reorient(imgloc, segloc) npimg = preprocess.resize_to_nn(npimg).astype(np.float32) npimg = preprocess.window(npimg, -100, 400) npimg = preprocess.rescale(npimg, -100, 400) npseg = preprocess.resize_to_nn(npseg).astype(np.uint8) assert npimg.shape == npseg.shape return npimg[..., np.newaxis], npseg[..., np.newaxis]
def generate_physical_noise_distribution_table(dbfile_mda, rootloc_mda): imgs, _ = get_mda_imgs(dbfile=dbfile_mda, rootloc=rootloc_mda) imgs = preprocess.window(imgs, hu_lb, hu_ub) stdev = get_noise_dist_3d(imgs, k=5) dlist = compile_noise_dist(imgs, stdev, b=(hu_ub - hu_lb, std_ub - std_lb), r=[[hu_lb, hu_ub], [std_lb, std_ub]]) return dlist
def get_img(imgloc, segloc=None): npimg, _, npseg = preprocess.reorient(imgloc, segloc) npimg = preprocess.resize_to_nn(npimg).astype(np.float32) npimg = preprocess.window(npimg, hu_lb, hu_ub) npimg = preprocess.rescale(npimg, hu_lb, hu_ub) npseg = preprocess.resize_to_nn(npseg).astype(np.uint8) print(npimg.shape) assert npimg.shape == npseg.shape return npimg, npseg
def get_img(imgloc): npimg, _, _ = preprocess.reorient(imgloc) npimg = preprocess.resize_to_nn(npimg, transpose=True).astype(np.int16) npimg = preprocess.window(npimg, -100,300) npimg = preprocess.rescale(npimg, -100,300) print(npimg.shape) midslice = npimg[int(npimg.shape[0] / 2),:,:] return midslice[np.newaxis,:,:,np.newaxis]
def PredictModel(model=settings.options.predictmodel, image=settings.options.predictimage, imageheader=None, outdir=settings.options.segmentation): if (model != None and image != None and outdir != None): os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" # see issue #152 numpypredict, origheader, _ = preprocess.reorient(image) assert numpypredict.shape[0:2] == (settings._globalexpectedpixel, settings._globalexpectedpixel) resizepredict = preprocess.resize_to_nn(numpypredict) resizepredict = preprocess.window(resizepredict, settings.options.hu_lb, settings.options.hu_ub) resizepredict = preprocess.rescale(resizepredict, settings.options.hu_lb, settings.options.hu_ub) opt = GetOptimizer() lss, met = GetLoss() loaded_model = get_unet() loaded_model.compile(loss=lss, metrics=met, optimizer=opt) loaded_model.load_weights(model) segout_float = loaded_model.predict(resizepredict[..., np.newaxis])[..., 0] segout_int = (segout_float >= settings.options.segthreshold).astype( settings.SEG_DTYPE) segin_windowed = preprocess.resize_to_original(resizepredict) segin_windowed_img = nib.Nifti1Image(segin_windowed, None, header=origheader) segin_windowed_img.to_filename( outdir.replace('.nii', '-imgin-windowed.nii')) segout_float_resize = preprocess.resize_to_original(segout_float) segout_float_img = nib.Nifti1Image(segout_float_resize, None, header=origheader) segout_float_img.to_filename(outdir.replace('.nii', '-pred-float.nii')) segout_int_resize = preprocess.resize_to_original(segout_int) segout_int_img = nib.Nifti1Image(segout_int_resize, None, header=origheader) segout_int_img.to_filename(outdir.replace('.nii', '-pred-seg.nii')) return segout_float_resize, segout_int_resize
def setup_training_from_file(): datacsv = settings.options.dbfile # create custom data frame database type os.environ['TF_FORCE_GPU_ALLOW_GROWTH'] = 'true' logfileoutputdir = settings.options.outdir os.system ('mkdir -p ' + logfileoutputdir) os.system ('mkdir -p ' + logfileoutputdir + '/data') os.system ('mkdir -p ' + logfileoutputdir + '/data/img') os.system ('mkdir -p ' + logfileoutputdir + '/data/seg') print("Output to\t", logfileoutputdir) imgdir = logfileoutputdir+'/data/img' segdir = logfileoutputdir+'/data/seg' imglist = [] seglist = [] dataidlist = [] with open(datacsv, 'r') as csvfile: myreader = csv.DictReader(csvfile, delimiter=',') for row in myreader: dataid = int(row['dataid']) imagelocation = '%s/%s' % (settings.options.rootlocation,row['image']) truthlocation = '%s/%s' % (settings.options.rootlocation,row['label']) print(imagelocation,truthlocation ) numpyimage, orig_header, numpytruth = preprocess.reorient(imagelocation, segloc=truthlocation) resimage = preprocess.resize_to_nn(numpyimage, transpose=False).astype(settings.IMG_DTYPE) resimage = preprocess.window(resimage, settings.options.hu_lb, settings.options.hu_ub) resimage = preprocess.rescale(resimage, settings.options.hu_lb, settings.options.hu_ub) restruth = preprocess.resize_to_nn(numpytruth, transpose=False).astype(settings.SEG_DTYPE) imgROI, segROI = isolate_ROI(resimage, restruth) Xloc = imgdir+'/volume-'+str(dataid) Yloc = segdir+'/segmentation-'+str(dataid) Xlist, Ylist = save_img_into_substacks(imgROI, segROI, Xloc, Yloc) imglist += Xlist seglist += Ylist dataidlist += [dataid]*len(Xlist) savelistsloc = logfileoutputdir+'/data/datalocations.txt' with open(savelistsloc, 'w') as csvfile: writer = csv.writer(csvfile, delimiter=',') writer.writerow(['dataid','imgloc','segloc']) for row in zip(dataidlist, imglist, seglist): writer.writerow(row) return savelistsloc
def get_img(imgloc, segloc=None): npimg, _, npseg = preprocess.reorient(imgloc, segloc) npimg = preprocess.resize_to_nn(npimg).astype(np.float32) npimg = preprocess.window(npimg, -100, 200) npimg = preprocess.rescale(npimg, -100, 200) npseg = preprocess.resize_to_nn(npseg).astype(np.uint8) print(npimg.shape) print(npseg.shape) assert npimg.shape == npseg.shape npimg = np.transpose(npimg, (2, 0, 1)) npseg = np.transpose(npseg, (2, 0, 1)) return npimg[..., np.newaxis], npseg[..., np.newaxis]
def PredictNifti(model, saveloc, imageloc, segloc=None): print('loading data: ', imageloc) image, origheader, trueseg = preprocess.reorient(imageloc, segloc=segloc) image = preprocess.resize_to_nn(image, transpose=False).astype( settings.FLOAT_DTYPE) image = preprocess.window(image, settings.options.hu_lb, settings.options.hu_ub) image = preprocess.rescale(image, settings.options.hu_lb, settings.options.hu_ub) image_img = nib.Nifti1Image(image, None, header=origheader) image_img.to_filename(saveloc + '-img.nii') predseg_float, predseg = PredictNpy(model, image) if segloc: trueseg = preprocess.resize_to_nn(trueseg, transpose=False).astype( settings.SEG_DTYPE) names = ['DSC'] metrics = [dsc_l2_3D_npy] if settings.options.liver: tseg = (trueseg >= 1).astype(np.int32)[..., np.newaxis] elif settings.options.tumor: tseg = (trueseg > 1).astype(np.int32)[..., np.newaxis] print('pred ', predseg.shape, predseg.dtype, '\ttrue ', tseg.shape, tseg.dtype) scores = [ met(tseg.astype(np.int32), predseg.astype(np.float32)) for met in metrics ] print('DSC:\t', end='') for idx, s in enumerate(scores): print(names[idx], '\t', 1.0 - s, end='\t') print() print('saving data: ', saveloc) print(predseg_float.shape) for i in range(predseg_float.shape[-1]): segout_float_img = nib.Nifti1Image(predseg_float[..., i], None, header=origheader) segout_float_img.to_filename(saveloc + '-float-' + str(i) + '.nii') trueseg_i = (trueseg == i).astype(settings.SEG_DTYPE) segout_float_img = nib.Nifti1Image(predseg_float[..., i], None, header=origheader) segout_float_img.to_filename(saveloc + '-truth-' + str(i) + '.nii') trueseg_img = nib.Nifti1Image(trueseg, None, header=origheader) trueseg_img.to_filename(saveloc + '-truth.nii') predseg_img = nib.Nifti1Image(predseg, None, header=origheader) predseg_img.to_filename(saveloc + '-int.nii') if settings.options.ttdo and segloc: print('starting TTDO...') loaded_model = model.layers[-2] imagedata = np.transpose(image, (2, 0, 1))[..., np.newaxis] f = K.function([loaded_model.layers[0].input, K.learning_phase()], [loaded_model.layers[-1].output]) print('\tgenerating trials...') results = np.zeros(trueseg.shape + ( 1, settings.options.ntrials, )) nvalidslices = imagedata.shape[0] stride = 128 for jj in range(settings.options.ntrials): zidx = 0 while zidx < nvalidslices: maxz = min(zidx + stride, nvalidslices) # out = f([imagedata[zidx:maxz,...], 1])[0] # out = np.transpose(out, (1,2,0,3)) # results[...,zidx:maxz,0,jj] = out[...,0] results[..., zidx:maxz, 0, jj] = np.transpose( f([imagedata[zidx:maxz, ...], 1])[0], (1, 2, 0, 3))[..., 0] zidx += stride print('\tcalculating statistics...') pred_avg = results.mean(axis=-1) pred_var = results.var(axis=-1) pred_ent = np.zeros(pred_avg.shape) ent_idx0 = pred_avg > 0 ent_idx1 = pred_avg < 1 ent_idx = np.logical_and(ent_idx0, ent_idx1) pred_ent[ent_idx] = -1*np.multiply( pred_avg[ent_idx], np.log( pred_avg[ent_idx])) \ -1*np.multiply(1.0 - pred_avg[ent_idx], np.log(1.0 - pred_avg[ent_idx])) print('\tsaving statistics...') # save pred_avg for i in range(pred_avg.shape[-1]): segout_float_img = nib.Nifti1Image(pred_avg[..., i], None, header=origheader) segout_float_img.to_filename(saveloc + '-avg-float-' + str(i) + '.nii') pred_avg_int = np.argmax(pred_avg, axis=-1) predseg_img = nib.Nifti1Image(pred_avg_int, None, header=origheader) predseg_img.to_filename(saveloc + '-avg-int.nii') # save pred_var for i in range(pred_var.shape[-1]): segout_float_img = nib.Nifti1Image(pred_var[..., i], None, header=origheader) segout_float_img.to_filename(saveloc + '-var-float-' + str(i) + '.nii') # save pred_ent for i in range(pred_ent.shape[-1]): segout_float_img = nib.Nifti1Image(pred_ent[..., i], None, header=origheader) segout_float_img.to_filename(saveloc + '-ent-float-' + str(i) + '.nii') return predseg, names, scores
noisyval = imgval return noisyval def generate_noise_vec(dlist): f = lambda x: generate_noise_at_val(x, dlist) return np.vectorize(f) def generate_noise_2(img, dlist): noisy = generate_noise_vec(dlist)(img) return noisy imgs, segs = get_imgs(dbfile=dbfile_mda, rootloc=rootloc_mda) imgs = window(imgs, hu_lb, hu_ub) #liver_idx = (segs > 0) * (segs < 5) #tumor_idx = (segs >= 2) * (segs <= 3) #only_liver_idx = liver_idx * (1.0 - tumor_idx) #all_idx = np.ones_like(segs, dtype=bool) s, dlist = plot_histogram(imgs, b=hu_ub - hu_lb, r=(hu_lb, hu_ub)) #this_img = imgs[10,...] #plt.imshow(this_img) #plt.show() #noisy_img = window(generate_noise_2(this_img, dlist), hu_lb, hu_ub) #plt.imshow(noisy_img) #plt.show() #plt.imshow(this_img - noisy_img) #plt.show()
def PredictModel(model=settings.options.predictmodel, image=settings.options.predictimage, imageheader=None, outdir=settings.options.segmentation, seg=None): if (model != None and image != None and outdir != None): os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" # see issue #152 numpypredict, origheader, origseg = preprocess.reorient(image, segloc=seg) assert numpypredict.shape[0:2] == (settings._globalexpectedpixel, settings._globalexpectedpixel) resizepredict = preprocess.resize_to_nn(numpypredict) resizepredict = preprocess.window(resizepredict, settings.options.hu_lb, settings.options.hu_ub) resizepredict = preprocess.rescale(resizepredict, settings.options.hu_lb, settings.options.hu_ub) if settings.options.D3 or settings.options.D25: dataid = np.ones((resizepredict.shape[0])) idx = np.array([1]) resizepredict2 = thick_slices(resizepredict, settings.options.thickness, dataid, idx) else: resizepredict2 = resizepredict if seg: origseg = preprocess.resize_to_nn(origseg) origseg = preprocess.livermask(origseg) if settings.options.D25: dataid_origseg = np.ones((origseg.shape[0])) origseg = thick_slices(origseg, 1, dataid_origseg, idx) origseg = origseg[..., 0] # if not settings.options.D25 and not settings.options.D3: # origseg = origseg.transpose((0,2,1)).astype(settings.FLOAT_DTYPE) origseg_img = nib.Nifti1Image( preprocess.resize_to_original(origseg), None) origseg_img.to_filename(outdir.replace('.nii', '-trueseg.nii')) ### ### set up model ### loaded_model = load_model(model, custom_objects={ 'dsc_l2': dsc_l2, 'l1': l1, 'dsc': dsc, 'dsc_int': dsc, 'ISTA': ISTA }, compile=False) #loaded_model.summary() if settings.options.D25: segout_float = loaded_model.predict(resizepredict2)[..., 0] else: segout_float = loaded_model.predict( resizepredict2[..., np.newaxis])[..., 0] segout_int = (segout_float >= settings.options.segthreshold).astype( settings.SEG_DTYPE) if settings.options.D3: segout_float = unthick_slices(segout_float, settings.options.thickness, dataid, idx) segout_int = unthick_slices(segout_int, settings.options.thickness) elif settings.options.D25: resizepredict = resizepredict.transpose((0, 2, 1)) #segout_int = preprocess.largest_connected_component(segout_int).astype(settings.SEG_DTYPE) segin_windowed = preprocess.resize_to_original(resizepredict) segin_windowed_img = nib.Nifti1Image(segin_windowed, None, header=origheader) segin_windowed_img.to_filename( outdir.replace('.nii', '-imgin-windowed.nii')) segout_float_resize = preprocess.resize_to_original(segout_float) segout_float_img = nib.Nifti1Image(segout_float_resize, None, header=origheader) segout_float_img.to_filename(outdir.replace('.nii', '-pred-float.nii')) segout_int_resize = preprocess.resize_to_original(segout_int) segout_int_img = nib.Nifti1Image(segout_int_resize, None, header=origheader) segout_int_img.to_filename(outdir.replace('.nii', '-pred-seg.nii')) if seg: #score = dsc_l2_3D(origseg, segout_int) score = dsc_l2_3D(origseg.astype(settings.FLOAT_DTYPE), segout_float) print('dsc:\t', 1.0 - score) return segout_float_resize, segout_int_resize
def PredictDropout(model=settings.options.predictmodel, image=settings.options.predictimage, outdir=settings.options.segmentation, seg=None): if not (model != None and image != None and outdir != None): return if model is None: model = settings.options.predictmodel if outdir is None: outdir = settings.options.segmentation os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" # see issue #152 numpypredict, origheader, origseg = preprocess.reorient(image, segloc=seg) assert numpypredict.shape[0:2] == (settings._globalexpectedpixel, settings._globalexpectedpixel) resizepredict = preprocess.resize_to_nn(numpypredict) resizepredict = preprocess.window(resizepredict, settings.options.hu_lb, settings.options.hu_ub) resizepredict = preprocess.rescale(resizepredict, settings.options.hu_lb, settings.options.hu_ub) if seg: origseg = preprocess.resize_to_nn(origseg) origseg = preprocess.livermask(origseg) # save unprocessed image_in img_in_nii = nib.Nifti1Image(image, None, header=origheader) img_in_nii.to_filename(outdir.replace('.nii', '-imgin.nii')) # save preprocessed image_in segin_windowed_img = nib.Nifti1Image(resizepredict, None) segin_windowed_img.to_filename( outdir.replace('.nii', '-imgin-windowed.nii')) # save true segmentation if seg: origseg_img = nib.Nifti1Image(origseg, None) origseg_img.to_filename(outdir.replace('.nii', '-seg.nii')) ### ### set up model ### loaded_model = load_model(model, custom_objects={ 'dsc_l2': dsc_l2, 'l1': l1, 'dsc': dsc, 'dsc_int': dsc, 'ISTA': ISTA }) ### ### making baseline prediction and saving to file ### print('\tmaking baseline predictions...') segout_float = loaded_model.predict(resizepredict[..., np.newaxis])[..., 0] segout_int = (segout_float >= settings.options.segthreshold).astype( settings.SEG_DTYPE) segout_int = preprocess.largest_connected_component(segout_int).astype( settings.SEG_DTYPE) segout_float_img = nib.Nifti1Image(segout_float, None) segout_float_img.to_filename(outdir.replace('.nii', '-pred-float.nii')) segout_int_img = nib.Nifti1Image(segout_int, None) segout_int_img.to_filename(outdir.replace('.nii', '-pred-seg.nii')) if seg: score = dsc_l2_3D(origseg, segout_int) print('\t\t\tdsc:\t', 1.0 - score) ### ### making predictions using different Bernoulli draws for dropout ### print('\tmaking predictions with different dropouts trials...') f = K.function([loaded_model.layers[0].input, K.learning_phase()], [loaded_model.layers[-1].output]) results = np.zeros(resizepredict.shape + (settings.options.ntrials, )) for jj in range(settings.options.ntrials): results[..., jj] = f([resizepredict[..., np.newaxis], 1])[0][..., 0] print('\tcalculating statistics...') pred_avg = results.mean(axis=-1) pred_var = results.var(axis=-1) pred_ent = np.zeros(pred_avg.shape) ent_idx0 = pred_avg > 0 ent_idx1 = pred_avg < 1 ent_idx = np.logical_and(ent_idx0, ent_idx1) pred_ent[ent_idx] = -1*np.multiply( pred_avg[ent_idx], np.log( pred_avg[ent_idx])) \ -1*np.multiply(1.0 - pred_avg[ent_idx], np.log(1.0 - pred_avg[ent_idx])) print('\tsaving trial statistics...') # save pred_avg pred_avg_img = nib.Nifti1Image(pred_avg, None) pred_avg_img.to_filename(outdir.replace('.nii', '-pred-avg.nii')) # save pred_var pred_var_img = nib.Nifti1Image(pred_var, None) pred_var_img.to_filename(outdir.replace('.nii', '-pred-var.nii')) # save pred_ent pred_ent_img = nib.Nifti1Image(pred_ent, None) pred_ent_img.to_filename(outdir.replace('.nii', '-pred-ent.nii')) print('\n') return segout_int, segout_float
def PredictNifti(model, saveloc, imageloc, segloc=None): print('loading data: ', imageloc) image, origheader, trueseg = preprocess.reorient(imageloc, segloc=segloc) image = preprocess.resize_to_nn(image, transpose=False).astype( settings.FLOAT_DTYPE) image = preprocess.window(image, settings.options.hu_lb, settings.options.hu_ub) image = preprocess.rescale(image, settings.options.hu_lb, settings.options.hu_ub) image_img = nib.Nifti1Image(image, None, header=origheader) image_img.to_filename(saveloc + '-img.nii') predseg_float, predseg = PredictNpy(model, image) if segloc: trueseg = preprocess.resize_to_nn(trueseg, transpose=False).astype( settings.SEG_DTYPE) # names = ['Background','Liver','Tumor'] # metrics = [dsc_l2_background_npy, dsc_l2_liver_npy, dsc_l2_tumor_npy] names = ['DSC'] metrics = [dsc_l2_npy] if settings.options.liver: tseg = (trueseg >= 1).astype(np.int32)[..., np.newaxis] elif settings.options.tumor: tseg = (trueseg > 1).astype(np.int32)[..., np.newaxis] print('pred ', predseg.shape, predseg.dtype, '\ttrue ', tseg.shape, tseg.dtype) scores = [ met(tseg.astype(np.int32), predseg.astype(np.float32)) for met in metrics ] print('DSC:\t', end='') for idx, s in enumerate(scores): print(names[idx], '\t', 1.0 - s, end='\t') print() print('saving data: ', saveloc) print(predseg_float.shape) for i in range(predseg_float.shape[-1]): segout_float_img = nib.Nifti1Image(predseg_float[..., i], None, header=origheader) segout_float_img.to_filename(saveloc + '-float-' + str(i) + '.nii') trueseg_i = (trueseg == i).astype(settings.SEG_DTYPE) segout_float_img = nib.Nifti1Image(predseg_float[..., i], None, header=origheader) segout_float_img.to_filename(saveloc + '-truth-' + str(i) + '.nii') trueseg_img = nib.Nifti1Image(trueseg, None, header=origheader) trueseg_img.to_filename(saveloc + '-truth.nii') predseg_img = nib.Nifti1Image(predseg, None, header=origheader) predseg_img.to_filename(saveloc + '-int.nii') if settings.options.ttdo and segloc: print('starting TTDO...') f = K.function([loaded_model.layers[0].input, K.learning_phase()], [loaded_model.layers[-1].output]) print('\tgenerating trials...') results = np.zeros(trueseg.shape + ( 3, settings.options.ntrials, )) for jj in range(settings.options.ntrials): segdata = np.zeros((*image.shape, 3)) nvalidslices = image.shape[2] - settings.options.thickness + 1 for z in range(nvalidslices): indata = image[np.newaxis, :, :, z:z + settings.options.thickness, np.newaxis] segdata[:, :, z:z + settings.options.thickness, :] += f([indata, 1])[0, ...] for i in range(settings.options.thickness): segdata[:, :, i, :] *= (settings.options.thickness) / (i + 1) for i in range(settings.options.thickness): segdata[:, :, -1 - i, :] *= (settings.options.thickness) / (i + 1) results[..., jj] = segdata / settings.options.thickness print('\tcalculating statistics...') pred_avg = results.mean(axis=-1) pred_var = results.var(axis=-1) pred_ent = np.zeros(pred_avg.shape) ent_idx0 = pred_avg > 0 ent_idx1 = pred_avg < 1 ent_idx = np.logical_and(ent_idx0, ent_idx1) pred_ent[ent_idx] = -1*np.multiply( pred_avg[ent_idx], np.log( pred_avg[ent_idx])) \ -1*np.multiply(1.0 - pred_avg[ent_idx], np.log(1.0 - pred_avg[ent_idx])) print('\tsaving statistics...') # save pred_avg for i in range(pred_avg.shape[-1]): segout_float_img = nib.Nifti1Image(pred_avg[..., i], None, header=origheader) segout_float_img.to_filename(saveloc + '-avg-float-' + str(i) + '.nii') pred_avg_int = np.argmax(pred_avg, axis=-1) predseg_img = nib.Nifti1Image(pred_avg_int, None, header=origheader) predseg_img.to_filename(saveloc + '-avg-int.nii') # save pred_var for i in range(pred_var.shape[-1]): segout_float_img = nib.Nifti1Image(pred_var[..., i], None, header=origheader) segout_float_img.to_filename(saveloc + '-var-float-' + str(i) + '.nii') # save pred_ent for i in range(pred_ent.shape[-1]): segout_float_img = nib.Nifti1Image(pred_ent[..., i], None, header=origheader) segout_float_img.to_filename(saveloc + '-ent-float-' + str(i) + '.nii') return predseg, names, scores
def TrainModel(idfold=0): from setupmodel import GetSetupKfolds, GetCallbacks, GetOptimizer, GetLoss from buildmodel import get_unet ### ### set up output, logging, and callbacks ### kfolds = settings.options.kfolds logfileoutputdir = '%s/%03d/%03d' % (settings.options.outdir, kfolds, idfold) os.system('mkdir -p ' + logfileoutputdir) os.system('mkdir -p ' + logfileoutputdir + '/nii') os.system('mkdir -p ' + logfileoutputdir + '/liver') print("Output to\t", logfileoutputdir) ### ### load data ### print('loading memory map db for large dataset') numpydatabase = np.load(settings._globalnpfile) (train_index, test_index, valid_index) = GetSetupKfolds(settings.options.dbfile, kfolds, idfold) print('copy data subsets into memory...') axialbounds = numpydatabase['axialliverbounds'] dataidarray = numpydatabase['dataid'] dbtrainindex = np.isin(dataidarray, train_index) dbtestindex = np.isin(dataidarray, test_index) dbvalidindex = np.isin(dataidarray, valid_index) subsetidx_train = np.all(np.vstack((axialbounds, dbtrainindex)), axis=0) subsetidx_test = np.all(np.vstack((axialbounds, dbtestindex)), axis=0) subsetidx_valid = np.all(np.vstack((axialbounds, dbvalidindex)), axis=0) if np.sum(subsetidx_train) + np.sum(subsetidx_test) + np.sum( subsetidx_valid) != min(np.sum(axialbounds), np.sum(dbtrainindex)): raise ("data error: slice numbers dont match") print('copy memory map from disk to RAM...') trainingsubset = numpydatabase[subsetidx_train] validsubset = numpydatabase[subsetidx_valid] testsubset = numpydatabase[subsetidx_test] # trimg = trainingsubset['imagedata'] # trseg = trainingsubset['truthdata'] # vaimg = validsubset['imagedata'] # vaseg = validsubset['truthdata'] # teimg = testsubset['imagedata'] # teseg = testsubset['truthdata'] # trimg_img = nib.Nifti1Image(trimg, None) # trimg_img.to_filename( logfileoutputdir+'/nii/train-img.nii.gz') # vaimg_img = nib.Nifti1Image(vaimg, None) # vaimg_img.to_filename( logfileoutputdir+'/nii/valid-img.nii.gz') # teimg_img = nib.Nifti1Image(teimg, None) # teimg_img.to_filename( logfileoutputdir+'/nii/test-img.nii.gz') # # trseg_img = nib.Nifti1Image(trseg, None) # trseg_img.to_filename( logfileoutputdir+'/nii/train-seg.nii.gz') # vaseg_img = nib.Nifti1Image(vaseg, None) # vaseg_img.to_filename( logfileoutputdir+'/nii/valid-seg.nii.gz') # teseg_img = nib.Nifti1Image(teseg, None) # teseg_img.to_filename( logfileoutputdir+'/nii/test-seg.nii.gz') np.random.seed(seed=0) np.random.shuffle(trainingsubset) ntrainslices = len(trainingsubset) nvalidslices = len(validsubset) x_train = trainingsubset['imagedata'] y_train = trainingsubset['truthdata'] x_valid = validsubset['imagedata'] y_valid = validsubset['truthdata'] print("\nkfolds : ", kfolds) print("idfold : ", idfold) print("slices training : ", ntrainslices) print("slices validation : ", nvalidslices) ### ### data preprocessing : applying liver mask ### y_train_typed = y_train.astype(settings.SEG_DTYPE) y_train_liver = preprocess.livermask(y_train_typed) x_train_typed = x_train x_train_typed = preprocess.window(x_train_typed, settings.options.hu_lb, settings.options.hu_ub) x_train_typed = preprocess.rescale(x_train_typed, settings.options.hu_lb, settings.options.hu_ub) y_valid_typed = y_valid.astype(settings.SEG_DTYPE) y_valid_liver = preprocess.livermask(y_valid_typed) x_valid_typed = x_valid x_valid_typed = preprocess.window(x_valid_typed, settings.options.hu_lb, settings.options.hu_ub) x_valid_typed = preprocess.rescale(x_valid_typed, settings.options.hu_lb, settings.options.hu_ub) ### ### create and run model ### opt = GetOptimizer() callbacks, modelloc = GetCallbacks(logfileoutputdir, "liver") lss, met = GetLoss() model = get_unet() model.compile(loss=lss, metrics=met, optimizer=opt) print("\n\n\tlivermask training...\tModel parameters: {0:,}".format( model.count_params())) if settings.options.augment: train_datagen = ImageDataGenerator( brightness_range=[0.9, 1.1], preprocessing_function=preprocess.post_augment, ) train_maskgen = ImageDataGenerator() else: train_datagen = ImageDataGenerator() train_maskgen = ImageDataGenerator() sd = 2 # arbitrary but fixed seed for ImageDataGenerators() dataflow = train_datagen.flow(x_train_typed[..., np.newaxis], batch_size=settings.options.trainingbatch, seed=sd, shuffle=True) maskflow = train_maskgen.flow(y_train_liver[..., np.newaxis], batch_size=settings.options.trainingbatch, seed=sd, shuffle=True) train_generator = zip(dataflow, maskflow) # train_generator = train_datagen.flow(x_train_typed[...,np.newaxis], # y=y_train_liver[...,np.newaxis], # batch_size=settings.options.trainingbatch, # seed=sd, # shuffle=True) valid_datagen = ImageDataGenerator() valid_maskgen = ImageDataGenerator() validdataflow = valid_datagen.flow( x_valid_typed[..., np.newaxis], batch_size=settings.options.validationbatch, seed=sd, shuffle=True) validmaskflow = valid_maskgen.flow( y_valid_liver[..., np.newaxis], batch_size=settings.options.validationbatch, seed=sd, shuffle=True) valid_generator = zip(validdataflow, validmaskflow) ### ### visualize augmentation ### # # import matplotlib # matplotlib.use('TkAgg') # from matplotlib import pyplot as plt # for i in range(8): # plt.subplot(4,4,2*i + 1) # imbatch = dataflow.next() # sgbatch = maskflow.next() # imaug = imbatch[0][:,:,0] # sgaug = sgbatch[0][:,:,0] # plt.imshow(imaug) # plt.subplot(4,4,2*i + 2) # plt.imshow(sgaug) # plt.show() # return # history_liver = model.fit_generator( train_generator, steps_per_epoch=ntrainslices / settings.options.trainingbatch, epochs=settings.options.numepochs, validation_data=valid_generator, callbacks=callbacks, shuffle=True, validation_steps=nvalidslices / settings.options.validationbatch, ) ### ### make predicions on validation set ### print("\n\n\tapplying models...") y_pred_float = model.predict(x_valid_typed[..., np.newaxis]) y_pred_seg = (y_pred_float[..., 0] >= settings.options.segthreshold).astype(settings.SEG_DTYPE) print("\tsaving to file...") trueinnii = nib.Nifti1Image(x_valid, None) truesegnii = nib.Nifti1Image(y_valid, None) # windownii = nib.Nifti1Image(x_valid_typed, None) truelivernii = nib.Nifti1Image(y_valid_liver, None) predsegnii = nib.Nifti1Image(y_pred_seg, None) predfloatnii = nib.Nifti1Image(y_pred_float, None) trueinnii.to_filename(logfileoutputdir + '/nii/trueimg.nii.gz') truesegnii.to_filename(logfileoutputdir + '/nii/trueseg.nii.gz') # windownii.to_filename( logfileoutputdir+'/nii/windowedimg.nii.gz') truelivernii.to_filename(logfileoutputdir + '/nii/trueliver.nii.gz') predsegnii.to_filename(logfileoutputdir + '/nii/predtumorseg.nii.gz') predfloatnii.to_filename(logfileoutputdir + '/nii/predtumorfloat.nii.gz') print("\done saving.") return modelloc
def TrainModel(idfold=0): from setupmodel import GetSetupKfolds, GetCallbacks, GetOptimizer, GetLoss from buildmodel import get_unet os.environ['TF_FORCE_GPU_ALLOW_GROWTH'] = 'true' ### ### load data ### kfolds = settings.options.kfolds logfileoutputdir = '%s/%03d/%03d' % (settings.options.outdir, kfolds, idfold) os.system('mkdir -p ' + logfileoutputdir) os.system('mkdir -p ' + logfileoutputdir + '/nii') os.system('mkdir -p ' + logfileoutputdir + '/tumor') print("Output to\t", logfileoutputdir) print('loading memory map db for large dataset') numpydatabase = np.load(settings._globalnpfile) (train_index, test_index, valid_index) = GetSetupKfolds(settings.options.dbfile, kfolds, idfold) print('copy data subsets into memory...') axialbounds = numpydatabase['axialtumorbounds'] dataidarray = numpydatabase['dataid'] dbtrainindex = np.isin(dataidarray, train_index) dbtestindex = np.isin(dataidarray, test_index) dbvalidindex = np.isin(dataidarray, valid_index) subsetidx_train = np.all(np.vstack((axialbounds, dbtrainindex)), axis=0) subsetidx_test = np.all(np.vstack((axialbounds, dbtestindex)), axis=0) subsetidx_valid = np.all(np.vstack((axialbounds, dbvalidindex)), axis=0) if np.sum(subsetidx_train) + np.sum(subsetidx_test) + np.sum( subsetidx_valid) != min(np.sum(axialbounds), np.sum(dbtrainindex)): raise ("data error: slice numbers dont match") print('copy memory map from disk to RAM...') trainingsubset = numpydatabase[subsetidx_train] validsubset = numpydatabase[subsetidx_valid] testsubset = numpydatabase[subsetidx_test] del numpydatabase del axialbounds del dataidarray del dbtrainindex del dbtestindex del dbvalidindex del subsetidx_train del subsetidx_test del subsetidx_valid np.random.seed(seed=0) np.random.shuffle(trainingsubset) ntrainslices = len(trainingsubset) nvalidslices = len(validsubset) x_train = trainingsubset['imagedata'] y_train = trainingsubset['truthdata'] x_valid = validsubset['imagedata'] y_valid = validsubset['truthdata'] print('\nkfolds : ', kfolds) print("idfold : ", idfold) print("slices training : ", ntrainslices) print("slices validation : ", nvalidslices) ### ### data preprocessing : applying liver mask ### y_train_typed = y_train.astype(settings.SEG_DTYPE) y_train_liver = preprocess.livermask(y_train_typed) y_train_tumor = preprocess.tumormask(y_train_typed) x_train_typed = x_train x_train_typed = preprocess.window(x_train_typed, settings.options.hu_lb, settings.options.hu_ub) x_train_typed = preprocess.rescale(x_train_typed, settings.options.hu_lb, settings.options.hu_ub) x_train_masked = x_train_typed * y_train_liver.astype( settings.IMG_DTYPE) - (1.0 - y_train_liver.astype(settings.IMG_DTYPE)) y_valid_typed = y_valid.astype(settings.SEG_DTYPE) y_valid_liver = preprocess.livermask(y_valid_typed) y_valid_tumor = preprocess.tumormask(y_valid_typed) x_valid_typed = x_valid x_valid_typed = preprocess.window(x_valid_typed, settings.options.hu_lb, settings.options.hu_ub) x_valid_typed = preprocess.rescale(x_valid_typed, settings.options.hu_lb, settings.options.hu_ub) x_valid_masked = x_valid_typed * y_valid_liver.astype( settings.IMG_DTYPE) - (1.0 - y_valid_liver.astype(settings.IMG_DTYPE)) ### ### create and run model ### opt = GetOptimizer() callbacks, modelloc = GetCallbacks(logfileoutputdir, "tumor") lss, met = GetLoss() model = get_unet() model.compile(loss=lss, metrics=met, optimizer=opt) print("\n\n\tlivermask training...\tModel parameters: {0:,}".format( model.count_params())) if settings.options.augment: train_datagen = ImageDataGenerator( brightness_range=[0.9, 1.1], fill_mode='nearest', preprocessing_function=preprocess.post_augment) train_maskgen = ImageDataGenerator() else: train_datagen = ImageDataGenerator() train_maskgen = ImageDataGenerator() test_datagen = ImageDataGenerator() sd = 2 # dataflow = train_datagen.flow(x_train_typed[...,np.newaxis], dataflow = train_datagen.flow(x_train_masked[..., np.newaxis], batch_size=settings.options.trainingbatch, seed=sd, shuffle=True) maskflow = train_maskgen.flow(y_train_tumor[..., np.newaxis], batch_size=settings.options.trainingbatch, seed=sd, shuffle=True) train_generator = zip(dataflow, maskflow) valid_datagen = ImageDataGenerator() valid_maskgen = ImageDataGenerator() valid_dataflow = valid_datagen.flow( x_valid_masked[..., np.newaxis], batch_size=settings.options.validationbatch, seed=sd, shuffle=True) valid_maskflow = valid_maskgen.flow( y_valid_tumor[..., np.newaxis], batch_size=settings.options.validationbatch, seed=sd, shuffle=True) valid_generator = zip(valid_dataflow, valid_maskflow) history_tumor = model.fit_generator( train_generator, steps_per_epoch=ntrainslices / settings.options.trainingbatch, epochs=settings.options.numepochs, validation_data=valid_generator, callbacks=callbacks, shuffle=True, validation_steps=nvalidslices / settings.options.validationbatch, ) del x_train del y_train del x_train_typed del y_train_typed del y_train_liver del y_train_tumor del x_train_masked ### ### make predicions on validation set ### print("\n\n\tapplying models...") y_pred_float = model.predict(x_valid_masked[..., np.newaxis]) y_pred_seg = (y_pred_float[..., 0] >= settings.options.segthreshold).astype(settings.SEG_DTYPE) print("\tsaving to file...") trueinnii = nib.Nifti1Image(x_valid, None) truesegnii = nib.Nifti1Image(y_valid, None) truelivernii = nib.Nifti1Image(y_valid_liver, None) truetumornii = nib.Nifti1Image(y_valid_tumor, None) windownii = nib.Nifti1Image(x_valid_typed, None) maskednii = nib.Nifti1Image(x_valid_masked, None) predsegnii = nib.Nifti1Image(y_pred_seg, None) predfloatnii = nib.Nifti1Image(y_pred_float, None) trueinnii.to_filename(logfileoutputdir + '/nii/trueimg.nii.gz') truesegnii.to_filename(logfileoutputdir + '/nii/trueseg.nii.gz') truetumornii.to_filename(logfileoutputdir + '/nii/truetumor.nii.gz') truelivernii.to_filename(logfileoutputdir + '/nii/trueliver.nii.gz') windownii.to_filename(logfileoutputdir + '/nii/windowedimg.nii.gz') maskednii.to_filename(logfileoutputdir + '/nii/masked.nii.gz') predsegnii.to_filename(logfileoutputdir + '/nii/predtumorseg.nii.gz') predfloatnii.to_filename(logfileoutputdir + '/nii/predtumorfloat.nii.gz') del x_valid del y_valid del x_valid_typed del y_valid_typed del y_valid_liver del y_valid_tumor del x_valid_masked print("\done saving.") return modelloc
imgloc = 'C:/Users/sofia/OneDrive/Documents/GitHub/unlinked_livermask/data/LiTS/TrainingBatch2/volume-130.nii' segloc = 'C:/Users/sofia/OneDrive/Documents/GitHub/unlinked_livermask/data/LiTS/TrainingBatch2/segmentation-130.nii' #img, seg = get_img(imgloc, segloc) modelloclist = ['C:/Users/sofia/OneDrive/Documents/GitHub/convswap_shallow.h5'] outloclist = [ 'C:/Users/sofia/OneDrive/Documents/GitHub/convswap_images/D2_shallow_test_imgs2/' ] # preprocess image for prediction img, origheader, origseg = preprocess.reorient(imgloc, segloc=segloc) assert img.shape[0:2] == (settings._globalexpectedpixel, settings._globalexpectedpixel) img = preprocess.resize_to_nn(img) img = preprocess.window(img, settings.options.hu_lb, settings.options.hu_ub) img = preprocess.rescale(img, settings.options.hu_lb, settings.options.hu_ub) img = img[..., np.newaxis] # run functions for j in range(len(outloclist)): modelloc = modelloclist[j] outloc = outloclist[j] vzm, names, mdict = make_viz_model(modelloc) predict_viz_model(vzm, img, names, mdict, outloc) ## histogram of image pixel values #def plot_histogram(data, b=100, r=(-110,410)): # counts, bin_edges = np.histogram(data, bins=b, range=r) # plt.bar(bin_edges[:-1], counts, width=[0.8*(bin_edges[i+1]-bin_edges[i]) for i in range(len(bin_edges)-1)]) # plt.show()
def TrainModel(idfold=0): from setupmodel import GetSetupKfolds, GetCallbacks, GetOptimizer, GetLoss from buildmodel import get_unet, thick_slices, unthick_slices, unthick ### ### set up output, logging and callbacks ### kfolds = settings.options.kfolds logfileoutputdir= '%s/%03d/%03d' % (settings.options.outdir, kfolds, idfold) os.system ('mkdir -p ' + logfileoutputdir) os.system ('mkdir -p ' + logfileoutputdir + '/nii') os.system ('mkdir -p ' + logfileoutputdir + '/liver') print("Output to\t", logfileoutputdir) ### ### load data ### print('loading memory map db for large dataset') numpydatabase = np.load(settings._globalnpfile) (train_index,test_index,valid_index) = GetSetupKfolds(settings.options.dbfile, kfolds, idfold) print('copy data subsets into memory...') axialbounds = numpydatabase['axialliverbounds'] dataidarray = numpydatabase['dataid'] dbtrainindex = np.isin(dataidarray, train_index ) dbtestindex = np.isin(dataidarray, test_index ) dbvalidindex = np.isin(dataidarray, valid_index ) subsetidx_train = np.all( np.vstack((axialbounds , dbtrainindex)) , axis=0 ) subsetidx_test = np.all( np.vstack((axialbounds , dbtestindex )) , axis=0 ) subsetidx_valid = np.all( np.vstack((axialbounds , dbvalidindex)) , axis=0 ) print(np.sum(subsetidx_train) + np.sum(subsetidx_test) + np.sum(subsetidx_valid)) print(min(np.sum(axialbounds ),np.sum(dbtrainindex ))) if np.sum(subsetidx_train) + np.sum(subsetidx_test) + np.sum(subsetidx_valid) != min(np.sum(axialbounds ),np.sum(dbtrainindex )) : raise("data error: slice numbers dont match") print('copy memory map from disk to RAM...') trainingsubset = numpydatabase[subsetidx_train] validsubset = numpydatabase[subsetidx_valid] testsubset = numpydatabase[subsetidx_test] # np.random.seed(seed=0) # np.random.shuffle(trainingsubset) ntrainslices = len(trainingsubset) nvalidslices = len(validsubset) if settings.options.D3: x_data = trainingsubset['imagedata'] y_data = trainingsubset['truthdata'] x_valid = validsubset['imagedata'] y_valid = validsubset['truthdata'] x_train = thick_slices(x_data, settings.options.thickness, trainingsubset['dataid'], train_index) y_train = thick_slices(y_data, settings.options.thickness, trainingsubset['dataid'], train_index) x_valid = thick_slices(x_valid, settings.options.thickness, validsubset['dataid'], valid_index) y_valid = thick_slices(y_valid, settings.options.thickness, validsubset['dataid'], valid_index) np.random.seed(seed=0) train_shuffle = np.random.permutation(x_train.shape[0]) valid_shuffle = np.random.permutation(x_valid.shape[0]) x_train = x_train[train_shuffle,...] y_train = y_train[train_shuffle,...] x_valid = x_valid[valid_shuffle,...] y_valid = y_valid[valid_shuffle,...] elif settings.options.D25: x_data = trainingsubset['imagedata'] y_data = trainingsubset['truthdata'] x_valid = validsubset['imagedata'] y_valid = validsubset['truthdata'] x_train = thick_slices(x_data, settings.options.thickness, trainingsubset['dataid'], train_index) x_valid = thick_slices(x_valid, settings.options.thickness, validsubset['dataid'], valid_index) y_train = thick_slices(y_data, 1, trainingsubset['dataid'], train_index) y_valid = thick_slices(y_valid, 1, validsubset['dataid'], valid_index) np.random.seed(seed=0) train_shuffle = np.random.permutation(x_train.shape[0]) valid_shuffle = np.random.permutation(x_valid.shape[0]) x_train = x_train[train_shuffle,...] y_train = y_train[train_shuffle,...] x_valid = x_valid[valid_shuffle,...] y_valid = y_valid[valid_shuffle,...] else: np.random.seed(seed=0) np.random.shuffle(trainingsubset) x_train=trainingsubset['imagedata'] y_train=trainingsubset['truthdata'] x_valid=validsubset['imagedata'] y_valid=validsubset['truthdata'] # slicesplit = int(0.9 * totnslice) # TRAINING_SLICES = slice( 0, slicesplit) # VALIDATION_SLICES = slice(slicesplit, totnslice ) print("\nkfolds : ", kfolds) print("idfold : ", idfold) print("slices training : ", ntrainslices) print("slices validation : ", nvalidslices) try: print("slices testing : ", len(testsubset)) except: print("slices testing : 0") ### ### data preprocessing : applying liver mask ### y_train_typed = y_train.astype(settings.SEG_DTYPE) y_train_liver = preprocess.livermask(y_train_typed) x_train_typed = x_train x_train_typed = preprocess.window(x_train_typed, settings.options.hu_lb, settings.options.hu_ub) x_train_typed = preprocess.rescale(x_train_typed, settings.options.hu_lb, settings.options.hu_ub) y_valid_typed = y_valid.astype(settings.SEG_DTYPE) y_valid_liver = preprocess.livermask(y_valid_typed) x_valid_typed = x_valid x_valid_typed = preprocess.window(x_valid_typed, settings.options.hu_lb, settings.options.hu_ub) x_valid_typed = preprocess.rescale(x_valid_typed, settings.options.hu_lb, settings.options.hu_ub) # liver_idx = y_train_typed > 0 # y_train_liver = np.zeros_like(y_train_typed) # y_train_liver[liver_idx] = 1 # # tumor_idx = y_train_typed > 1 # y_train_tumor = np.zeros_like(y_train_typed) # y_train_tumor[tumor_idx] = 1 # # x_masked = x_train * y_train_liver - 100.0*(1.0 - y_train_liver) # x_masked = x_masked.astype(settings.IMG_DTYPE) ### ### create and run model tf.keras.losses.mean_squared_error, ### opt = GetOptimizer() callbacks, modelloc = GetCallbacks(logfileoutputdir, "liver") lss, met = GetLoss() model = get_unet() model.compile(loss = lss, metrics = met, optimizer = opt) print("\n\n\tlivermask training...\tModel parameters: {0:,}".format(model.count_params())) if settings.options.D3: if settings.options.augment: train_datagen = ImageDataGenerator3D( brightness_range=[0.9,1.1], width_shift_range=[-0.1,0.1], height_shift_range=[-0.1,0.1], horizontal_flip=True, vertical_flip=True, zoom_range=0.1, fill_mode='nearest', preprocessing_function=preprocess.post_augment ) train_maskgen = ImageDataGenerator3D() else: train_datagen = ImageDataGenerator3D() train_maskgen = ImageDataGenerator3D() valid_datagen = ImageDataGenerator3D() valid_maskgen = ImageDataGenerator3D() else: if settings.options.augment: train_datagen = ImageDataGenerator2D( brightness_range=[0.9,1.1], width_shift_range=[-0.1,0.1], height_shift_range=[-0.1,0.1], horizontal_flip=True, vertical_flip=True, zoom_range=0.1, fill_mode='nearest', preprocessing_function=preprocess.post_augment ) train_maskgen = ImageDataGenerator2D() else: train_datagen = ImageDataGenerator2D() train_maskgen = ImageDataGenerator2D() valid_datagen = ImageDataGenerator2D() valid_maskgen = ImageDataGenerator2D() sd = 2 # arbitrary but fixed seed for ImageDataGenerators() if settings.options.D25: dataflow = train_datagen.flow(x_train_typed, batch_size=settings.options.trainingbatch, seed=sd, shuffle=True) maskflow = train_maskgen.flow(y_train_liver, batch_size=settings.options.trainingbatch, seed=sd, shuffle=True) validdataflow = valid_datagen.flow(x_valid_typed, batch_size=settings.options.validationbatch, seed=sd, shuffle=True) validmaskflow = valid_maskgen.flow(y_valid_liver, batch_size=settings.options.validationbatch, seed=sd, shuffle=True) else: dataflow = train_datagen.flow(x_train_typed[...,np.newaxis], batch_size=settings.options.trainingbatch, seed=sd, shuffle=True) maskflow = train_maskgen.flow(y_train_liver[...,np.newaxis], batch_size=settings.options.trainingbatch, seed=sd, shuffle=True) validdataflow = valid_datagen.flow(x_valid_typed[...,np.newaxis], batch_size=settings.options.validationbatch, seed=sd, shuffle=True) validmaskflow = valid_maskgen.flow(y_valid_liver[...,np.newaxis], batch_size=settings.options.validationbatch, seed=sd, shuffle=True) train_generator = zip(dataflow, maskflow) valid_generator = zip(validdataflow, validmaskflow) history_liver = model.fit_generator( train_generator, steps_per_epoch= ntrainslices // settings.options.trainingbatch, validation_steps = nvalidslices // settings.options.validationbatch, epochs=settings.options.numepochs, validation_data=valid_generator, callbacks=callbacks, shuffle=True) ### ### make predicions on validation set ### print("\n\n\tapplying models...") if settings.options.D25: y_pred_float = model.predict( x_valid_typed )[...,0] #[...,settings.options.thickness] ) else: y_pred_float = model.predict( x_valid_typed[...,np.newaxis] )[...,0] y_pred_seg = (y_pred_float >= settings.options.segthreshold).astype(settings.SEG_DTYPE) if settings.options.D3: x_valid = unthick(x_valid, settings.options.thickness, validsubset['dataid'], valid_index) y_valid = unthick(y_valid, settings.options.thickness, validsubset['dataid'], valid_index) y_valid_liver = unthick(y_valid_liver, settings.options.thickness, validsubset['dataid'], valid_index) y_pred_float = unthick(y_pred_float, settings.options.thickness, validsubset['dataid'], valid_index) y_pred_seg = unthick(y_pred_seg, settings.options.thickness, validsubset['dataid'], valid_index) print("\tsaving to file...") trueinnii = nib.Nifti1Image(x_valid, None) truesegnii = nib.Nifti1Image(y_valid, None) # windownii = nib.Nifti1Image(x_valid_typed, None) truelivernii = nib.Nifti1Image(y_valid_liver, None) predsegnii = nib.Nifti1Image(y_pred_seg, None ) predfloatnii = nib.Nifti1Image(y_pred_float, None) trueinnii.to_filename( logfileoutputdir+'/nii/trueimg.nii.gz') truesegnii.to_filename( logfileoutputdir+'/nii/truseg.nii.gz') # windownii.to_filename( logfileoutputdir+'/nii/windowedimg.nii.gz') truelivernii.to_filename( logfileoutputdir+'/nii/trueliver.nii.gz') predsegnii.to_filename( logfileoutputdir+'/nii/predtumorseg.nii.gz') predfloatnii.to_filename( logfileoutputdir+'/nii/predtumorfloat.nii.gz') print("t\done saving.") return modelloc