n_training_samples = int(float(len(patient_ids) - len(validation_patients)) / float(len(patient_ids)) * memmap_data.shape[0]) n_val_samples = int(float(len(validation_patients)) / float(len(patient_ids)) * memmap_data.shape[0]) '''d, s, l = data_gen_train.next() plt.figure(figsize=(12, 5)) plt.subplot(1, 3, 1) plt.imshow(d[0,0], cmap="gray") plt.subplot(1, 3, 2) d1=elastic_transform_2d(d[0,0], 550., 20.) plt.imshow(d1, cmap="gray") plt.subplot(1, 3, 3) plt.imshow(d[0,0]-d1) plt.show() plt.close()''' data_gen_validation = memmapGenerator_allInOne_segmentation_lossSampling(memmap_data, memmap_gt, BATCH_SIZE, validation_patients, mode="test", ignore=[40]) data_gen_validation = center_crop_generator(data_gen_validation, (PATCH_SIZE, PATCH_SIZE)) data_gen_validation = seg_channel_selection_generator(data_gen_validation, [2]) data_gen_validation = multi_threaded_generator(data_gen_validation, num_threads=4, num_cached=10) net = build_UNet(20, BATCH_SIZE, num_output_classes=5, base_n_filters=16, input_dim=(PATCH_SIZE, PATCH_SIZE)) output_layer_for_loss = net["output_flattened"] '''with open(os.path.join(results_dir, "%s_Params_ep30.pkl"%EXPERIMENT_NAME, 'r') as f: params = cPickle.load(f) lasagne.layers.set_all_param_values(output_layer_for_loss, params) with open(os.path.join(results_dir, "%s_allLossesNAccur_ep30.pkl"%EXPERIMENT_NAME, 'r') as f: # [all_training_losses, all_training_accuracies, all_validation_losses, all_validation_accuracies, auc_all] = cPickle.load(f) [all_training_losses, all_training_accuracies, all_validation_losses, all_validation_accuracies] = cPickle.load(f)''' n_batches_per_epoch = 500 # n_batches_per_epoch = np.floor(n_training_samples/float(BATCH_SIZE))
n_training_samples = int(float(len(patient_ids) - len(validation_patients)) / float(len(patient_ids)) * memmap_data.shape[0]) n_val_samples = int(float(len(validation_patients)) / float(len(patient_ids)) * memmap_data.shape[0]) '''d, s, l = data_gen_train.next() plt.figure(figsize=(12, 5)) plt.subplot(1, 3, 1) plt.imshow(d[0,0], cmap="gray") plt.subplot(1, 3, 2) d1=elastic_transform_2d(d[0,0], 550., 20.) plt.imshow(d1, cmap="gray") plt.subplot(1, 3, 3) plt.imshow(d[0,0]-d1) plt.show() plt.close()''' data_gen_validation = memmapGenerator_allInOne_segmentation_lossSampling(memmap_data, memmap_gt, BATCH_SIZE, validation_patients, mode="test", ignore=[40]) data_gen_validation = center_crop_generator(data_gen_validation, (PATCH_SIZE, PATCH_SIZE)) data_gen_validation = seg_channel_selection_generator(data_gen_validation, [2]) data_gen_validation = multi_threaded_generator(data_gen_validation, num_threads=4, num_cached=10) net = build_UNet(20, BATCH_SIZE, num_output_classes=5, base_n_filters=16, input_dim=(PATCH_SIZE, PATCH_SIZE)) output_layer_for_loss = net["output_flattened"] with open(os.path.join(results_dir, "%s_Params_ep26.pkl"%EXPERIMENT_NAME), 'r') as f: params = cPickle.load(f) lasagne.layers.set_all_param_values(output_layer_for_loss, params) with open(os.path.join(results_dir, "%s_allLossesNAccur_ep26.pkl"%EXPERIMENT_NAME), 'r') as f: # [all_training_losses, all_training_accuracies, all_validation_losses, all_validation_accuracies, auc_all] = cPickle.load(f) [all_training_losses, all_training_accuracies, all_validation_losses, all_validation_accuracies, auc_all, losses] = cPickle.load(f) n_batches_per_epoch = 250 # n_batches_per_epoch = np.floor(n_training_samples/float(BATCH_SIZE))
memmap_name = "patchSegmentation_allInOne_ws_t1km_flair_adc_cbv_resized" BATCH_SIZE = 10 PATCH_SIZE = 15 with open(dataset_folder + "%s_properties.pkl" % (memmap_name), 'r') as f: my_dict = cPickle.load(f) data_ctr = my_dict['n_data'] train_shape = my_dict['train_neg_shape'] info_memmap_shape = my_dict['info_shape'] memmap_data = memmap(dataset_folder + "%s.memmap" % (memmap_name), dtype=np.float32, mode="r", shape=train_shape) memmap_gt = memmap(dataset_folder + "%s_info.memmap" % (memmap_name), dtype=np.float32, mode="r", shape=info_memmap_shape) data_gen = memmapGenerator_allInOne_segmentation_lossSampling(memmap_data, memmap_gt, 1, [0, 1], num_batches=10) for data, seg, ids in data_gen: print ids[0] for _ in range(5): data_gen = memmapGenerator_allInOne_segmentation_lossSampling(memmap_data, memmap_gt, 1, [0, 1]) data_gen_mt = Multithreaded_Generator(data_gen, 8, 30) ctr = 0 for data, seg, ids in data_gen_mt: print ids[0] ctr += 1 if ctr > 10: break data_gen_mt._finish()