def get_model(): return unet_model( N_CLASSES, PATCH_SZ, n_channels=N_BANDS, upconv=UPCONV, class_weights=CLASS_WEIGHTS, )
def run_model(learning_rate): print("Training with learning rate: " + str(learning_rate)) train_name = datetime.now().strftime("%Y%m%d-%H%M%S") + "_lr" + str( learning_rate).split('.')[1] logdir = "logs/" + train_name tensorboard_callback = keras.callbacks.TensorBoard(log_dir=logdir) model = unet_model((512, 512, 1)) opt = keras.optimizers.Adam( learning_rate=learning_rate) # default is 0.001 model.compile(optimizer=opt, loss="binary_crossentropy", metrics=["accuracy"]) # create_generators seed = 1 batchsize = 2 train_generator, val_generator = create_generators(seed=seed, batchsize=batchsize) # train_generator, val_generator = create_generators("tranformed_val", seed=seed, batchsize=batchsize) epochs = 50 model.fit( train_generator, steps_per_epoch=24 / batchsize, # TODO: fix this #TODO: check that mult by 10 reduces number of epochs to get to same thing validation_data=val_generator, validation_steps=6 / batchsize, # TODO: fix this epochs=epochs, verbose=1, callbacks=[tensorboard_callback]) os.makedirs("saved_models/" + train_name + "/") model.save("saved_models/" + train_name + "/") print()
parser.add_argument('--labels', help='labels file', default='../stage1_labels.csv') parser.add_argument('--model', help='model', required=True) parser.add_argument('--augment', help='augment images', dest='deterministic', action='store_false') parser.set_defaults(deterministic=True) args = parser.parse_args() opts = vars(args) lmap = read_labels(args.labels) model = unet_model(True, False) print "loading existing weights into model from ", args.model model.load_weights(args.model) patients = os.listdir(args.input_folder) patients.sort() print "deterministic -> ", args.deterministic t1 = int(round(time.time() * 1000)) fnum = 0 for filename in patients: fnum += 1 print "processing file ", filename, fnum, "/", len(patients) key = filename.split(".")[0]
def get_model(): if MODEL == 'U': model = unet_model(N_CLASSES, PATCH_SZ, n_channels=N_BANDS) elif MODEL == 'W': model = wnet_model(N_CLASSES, PATCH_SZ, n_channels=N_BANDS) return model
parser.add_argument('--embedding', help='train embedding', dest='embed', action='store_true') parser.set_defaults(embed=False) args = parser.parse_args() opts = vars(args) train_folder = args.luna16_folder + "/1_1_1mm_slices_lung/subset[0-8]" test_folder = args.luna16_folder + "/1_1_1mm_slices_lung/subset9" # embedding params if args.embed: print "Training an embedding model" model = unet_model(False, True) else: print "Training a unet model from scratch" model = unet_model(True, False) if not args.checkpoint is None: print "loading existing weights into model from ", args.checkpoint model.load_weights(args.checkpoint, by_name=True) file_glob = train_folder + "/*.pkl.gz" print file_glob train_filenames = glob.glob(file_glob) shuffle(train_filenames) file_glob = test_folder + "/*.pkl.gz" print file_glob
def get_model(): return unet_model(N_CLASSES, PATCH_SZ, n_channels=N_BANDS, upconv=UPCONV, class_weights=[0.5, 0.1, 0.4])
def get_model(): return unet_model(outputClasses, dimpatchsz, n_channels=numbands, upconv=True, class_weights=outputClassWeight)