def add_stump(self, name=None, aliases=None, isin_id=None, yahoo_id=None, type=None): unavailable = 'unknown' + Helper.rand_str() if not name: name = unavailable if not aliases: aliases = [] if not isin_id: isin_id = unavailable if not yahoo_id: yahoo_id = unavailable if not type: type = unavailable self.add(name, aliases, isin_id, yahoo_id, type)
def target(param_names, param_values, model_nr): # model_nr = pickle.load(open(nr_steps_path, "rb")) + 1 # pickle.dump(model_nr, open(nr_steps_path, "wb")) s = Settings() param_values = list(param_values) for i in range(len(param_names)): eval('s.{}'.format(param_names[i])) if isinstance(param_values[i], str): param_values[i] = '\'' + param_values[i] + '\'' expr = 's.{} = {}'.format(param_names[i], param_values[i]) exec(expr) # print('s.DROPOUT == {}'.format(s.DROPOUT)) # print('s.LEARNING_RATE == {}'.format(s.LEARNING_RATE)) # print('s.LOSS_FUNCTION == {}'.format(s.LOSS_FUNCTION)) s.MODEL_NAME = MAIN_FOLDER + str(model_nr) s.VALTEST_MODEL_NAMES = [s.MODEL_NAME] h = Helper(s) # with suppress_stdout(): # print('here1') not_model_nrs = [] if model_nr not in not_model_nrs: # print('here2') t = Train(s, h) t.train() del t metric_means, metric_sds = Test(s, h).test() else: # print('here3') s.CALC_PROBS = False metric_means, metric_sds = Test(s, h).test() s.CALC_PROBS = True return metric_means[s.MODEL_NAME]['Dice'], metric_sds[s.MODEL_NAME]['Dice']
plt.figure() m = len(log['training'].keys()) cnt = 1 for i in log['training']: plt.subplot(2, m, cnt) plt.plot(log['training'][i], lw=orig_lw, alpha=.3) plt.plot(self.h.smooth(log['training'][i], w), lw=smooth_lw) # plt.plot(np.log10(log['training'][i]), lw=orig_lw, alpha=.3) # plt.plot(np.log10(self.h.smooth(log['training'][i], w)), lw=smooth_lw) plt.title('train: ' + i, fontsize=8) plt.subplot(2, m, m + cnt) plt.plot(log['validation'][i], lw=orig_lw, alpha=.3) plt.plot(self.h.smooth(log['validation'][i], w), lw=smooth_lw) # plt.plot(np.log10(log['validation'][i]), lw=orig_lw, alpha=.3) # plt.plot(np.log10(self.h.smooth(log['validation'][i], w)), lw=smooth_lw) plt.title('val: ' + i, fontsize=8) cnt += 1 plt.show() if __name__ == "__main__": s = Settings() h = Helper(s) log_inspector = LogInspector(s, h) log_inspector.inspect_log()
raise Exception('Image shapes of all elements should be equal. Found {}, while expected {}.' .format(self.__elements[0].get_image().shape, el.get_image().shape)) self.__elements.append(el) def get_result(self): return self.__result def show_result(self): print(self.get_result().shape) imshow3D(self.get_result()) if __name__ == '__main__': s = Settings() h = Helper(s) # Get a mask which represents a left atrium _, _, la_path = h.getImagePaths([24], True) la_image = h.loadImages(["../data/4chamber/GlassHeartNonIsotropicCT_seg.gipl"]) imshow3D(la_image[0]) # Make left atrium object with mask image la = LeftAtrium() la.set_mask_image((la_image[0] == 2).astype(int)) la.init_generation() # Add scar and fibrosis to the image la.add_adjustment(Wall(2)) la.add_adjustment(ScarFibrosis(2)) la.apply_adjustments()
def get_grid(): s = Settings() h = Helper(s) ip = [] gt = [] an = [] nrs = np.array([ 70, 21, 95, 73, 78, 26, 38, 82, 47, 40, 66, 59, 13, 89, 71, 88, 37, 22, 84, 10, 97, 68, 65, 48, 45 ]) d = np.array([ 0.9009270902037788, 0.9104197765530493, 0.9128334854875481, 0.8607061285160114, 0.726180976928685, 0.7618735476244846, 0.8426088283800738, 0.9227242238885163, 0.9267448462842333, 0.8202146853529186, 0.9124323842524247, 0.8758631939535643, 0.8686964143471794, 0.9156216299184503, 0.9226021312080136, 0.8982460315886207, 0.9316061013262126, 0.8248859357030646, 0.8955985800466059, 0.7870071142712975, 0.6458948916498899, 0.9089561365052262, 0.9061868164772646, 0.8842184960264304, 0.8842468629005924 ]) nrs = np.flip(nrs[np.argsort(d)], 0) for nr in nrs: path = '{}input_image_{}_-1.nii'.format( h.getModelPredictPath(s.MODEL_NAME), nr) im = sitk.GetArrayFromImage(sitk.ReadImage(path)) ip.append(im) path = '{}anno_image_{}_-1.nii'.format( h.getModelPredictPath(s.MODEL_NAME), nr) im = sitk.GetArrayFromImage(sitk.ReadImage(path)) gt.append(im) path = '{}prob_thresh_image_{}_-1.nii'.format( h.getModelPredictPath(s.MODEL_NAME), nr) im = sitk.GetArrayFromImage(sitk.ReadImage(path)) an.append(im) grid_all = [] yx_size = (200, 200) for i in range(len(nrs)): # nz = np.argwhere(np.sum(gt[i], axis=(1, 2)) > 0) # nz = list(np.reshape(nz, nz.shape[:1])) # print(nz) # s = nz[int(round(len(nz) * perc))] # print(s) s = 44 filter = sitk.LabelShapeStatisticsImageFilter() filter.Execute(sitk.GetImageFromArray(gt[i][s])) center = list(reversed(filter.GetCentroid(1))) # print(np.min(n)) # print(np.max(n)) ip_rgb = grey2rgb(ip[i][s]) cropped = crop_around(ip_rgb, yx_size, center) cropped = normalize(cropped) gt_cropped = crop_around(gt[i][s], yx_size, center) an_cropped = crop_around(an[i][s], yx_size, center) gt_masked = get_mask_overlay(cropped, gt_cropped, [1, 0, 0], 0.5) gt_an_masked = get_mask_overlay(gt_masked, an_cropped, [0, 1, 0], 0.5) gt_an_masked = np.pad(gt_an_masked, ((10, 10), (10, 10), (0, 0)), mode='constant', constant_values=255) grid_all.append(gt_an_masked) # grid_all.append(crop_around(ip_rgb, yx_size, center)) # grid_all.append(crop_around(get_mask_overlay(ip_rgb, gt[i][s]), yx_size, center)) # grid_all.append(crop_around(get_mask_overlay(ip_rgb, an[i][s]), yx_size, center)) print(grid_all[-1].shape) grid_size = [5, 5] print(len(grid_all)) rows = [] for y in range(grid_size[1]): print(y) rows.append( np.concatenate(grid_all[y * grid_size[0]:y * grid_size[0] + grid_size[0]], axis=1)) img_out = np.concatenate(rows, axis=0) return img_out
@contextmanager def suppress_stdout(): with open(os.devnull, "w") as devnull: old_stdout = sys.stdout sys.stdout = devnull try: yield finally: sys.stdout = old_stdout # MAIN_FOLDER = 'la_2018_challenge_1/' MAIN_FOLDER = 'sf_grid_search_05July2018/' h = Helper(Settings()) bo_path = h.getBOPath(MAIN_FOLDER) # nr_steps_path = h.getNrStepsPath(MAIN_FOLDER) def target(param_names, param_values, model_nr): # model_nr = pickle.load(open(nr_steps_path, "rb")) + 1 # pickle.dump(model_nr, open(nr_steps_path, "wb")) s = Settings() param_values = list(param_values) for i in range(len(param_names)): eval('s.{}'.format(param_names[i])) if isinstance(param_values[i], str):
prob_patches = self.probPatches(patches, model) prob_image = self.fullImageFromPatches(im.shape, prob_patches, patch_corners) if did_rescale: print(old_input_shape) prob_image = self.h.rescaleImage(prob_image, old_input_shape[1:]) print(prob_image.shape) sess.close() return prob_image if __name__ == '__main__': s = Settings() h = Helper(s) p = Predict(s, h) for i in [1]: # set(range(18, 26)) - set([11, 17, 23]): t0 = time.time() keras.losses.custom_loss = h.custom_loss model_path = h.getModelPath(s.MODEL_NAME) model = load_model(model_path) # Tk().withdraw() # im_path = askopenfilename(title='Select LGE image') # Tk().withdraw() # output_file = asksaveasfilename(title='Select output folder') # folder = '{}extra/Dataset_case{}/'.format(s.PATH_TO_DATA, i)
# mapper.SetInputData(v_a) # mapper.SetOrientationToX() # mapper.SetSliceNumber(v_a.GetDimensions()[0] // 2) # # actor = vtk.vtkImageActor() # actor.SetMapper(mapper) # actor.GetProperty().SetOpacity(0.5) # # renderer.AddActor(actor) # # display(vtk_show(renderer, 800, 800)) if __name__ == '__main__': s = Settings() h = Helper(s) predict_path = h.getModelPredictPath(s.MODEL_NAME) gt = [] pred = [] for i in s.VALTEST_SET[:1]: gt_path = '{}anno_image_{}_{}.nii.gz'.format(predict_path, i, -1) pred_path = '{}prob_thresh_image_{}_{}.nii.gz'.format( predict_path, i, -1) gt_i = sitk.ReadImage(gt_path) pred_i = sitk.ReadImage(pred_path) # gt_i = sitk.GetArrayFromImage(gt_i)
def mean_dilated_mask(img, msk): return np.mean(img[dilated_mask(msk) == 1]) def std_dilated_mask(img, msk): return np.std(img[dilated_mask(msk) == 1]) if __name__ == '__main__': interest_nr = 22 s = Settings() s.GROUND_TRUTH = 'left_atrium' h = Helper(s) explore_set = s.TESTING_SET print('explore_set == {}'.format(explore_set)) img_paths, msk_paths = h.getImagePaths(explore_set, False) imgs = h.loadImages(img_paths) msks = h.loadImages(msk_paths) interest_idx = int(np.argwhere(np.array(explore_set) == interest_nr)) props = { 'Mean intensity': lambda img, msk: mean_intensity(img, msk), 'Std intensity': lambda img, msk: std_intensity(img, msk), 'Mean intensity mask': lambda img, msk: mean_intensity_mask(img, msk), 'Std intensity mask': lambda img, msk: std_intensity_mask(img, msk),
def target(learning_rate_power, dropout, loss_function): global bo if bo != -1: pickle.dump(bo, open(bo_path, "wb")) # return (1 - (learning_rate_power - .6) ** 2) * (1 - (dropout - .2) ** 2) * (1 - (art_fraction - .2) ** 2) domains = { # 'unet_depth': (3, 5), 'learning_rate_power': (-5, -3), # 'patch_size_factor': (1, 6), 'dropout': (0, 1), # 'art_fraction': (0, 1), # 'feature_map_inc_rate': (1., 2.), 'loss_function': (0, 1) } # print(domains.keys()) hp = {} for k in domains.keys(): mx = domains[k][1] mn = domains[k][0] new_value = mn + (mx - mn) * eval(k) hp[k] = new_value print(' '.join(list(hp.keys()))) print(' '.join([str(i) for i in list(hp.values())])) # return hp['unet_depth'] * hp['learning_rate_power'] * hp['patch_size_factor'] * hp['dropout'] * \ # hp['feature_map_inc_rate'] * -1 * hp['loss_function'] s = Settings() # hp['unet_depth'] = int(round(hp['unet_depth'])) # hp['patch_size_factor'] = int(round(hp['patch_size_factor'])) loc = locals() args_name = [arg for arg in inspect.getfullargspec(target).args] model_nr = pickle.load(open(nr_steps_path, "rb")) + 1 s.MODEL_NAME = MAIN_FOLDER + str(model_nr) s.VALTEST_MODEL_NAMES = [s.MODEL_NAME] s.DROPOUT = hp['dropout'] s.LEARNING_RATE = math.pow(10, hp['learning_rate_power']) # s.ART_FRACTION = hp['art_fraction'] # s.UNET_DEPTH = hp['unet_depth'] # s.PATCH_SIZE = (1, hp['patch_size_factor'] * 64, hp['patch_size_factor'] * 64) # s.FEATURE_MAP_INC_RATE = hp['feature_map_inc_rate'] s.LOSS_FUNCTION = 'dice' if hp['loss_function'] < .5 else 'weighted_binary_cross_entropy' # s.NR_DIM = int(round(hp['nr_dim'])) # if s.NR_DIM == 3: # s.PATCH_SIZE = (3, hp['patch_size_factor'] * 32, hp['patch_size_factor'] * 32) # elif s.NR_DIM == 2: # s.PATCH_SIZE = (1, hp['patch_size_factor'] * 64, hp['patch_size_factor'] * 64) # else: # raise Exception('Wrong number of dimensions: {}'.format(s.NR_DIM)) with suppress_stdout(): h = Helper(s) Train(s, h).train() metric_means, metric_sds = Test(s, h).test() pickle.dump(model_nr, open(nr_steps_path, "wb")) return metric_means[s.MODEL_NAME]['Dice']
from tabulate import tabulate @contextmanager def suppress_stdout(): with open(os.devnull, "w") as devnull: old_stdout = sys.stdout sys.stdout = devnull try: yield finally: sys.stdout = old_stdout MAIN_FOLDER = 'hp_la_1/' h = Helper(Settings()) bo_path = h.getBOPath(MAIN_FOLDER) nr_steps_path = h.getNrStepsPath(MAIN_FOLDER) bo = -1 def target(learning_rate_power, dropout, loss_function): global bo if bo != -1: pickle.dump(bo, open(bo_path, "wb")) # return (1 - (learning_rate_power - .6) ** 2) * (1 - (dropout - .2) ** 2) * (1 - (art_fraction - .2) ** 2) domains = { # 'unet_depth': (3, 5), 'learning_rate_power': (-5, -3),