def test_unet(setup, params): test_input = np.random.random((1, 224, 224, 3)) unet = model.UNet(params) output = unet.predict(test_input) assert output.shape == (1, 224, 224, params["n_classes"])
def test_unet_regularizer(setup, params): unet = model.UNet(params) for layer in unet.layers: if "kernel_regularizer" in layer.__dict__: assert abs(float(layer.kernel_regularizer.l2) - params["l2"]) < 1e-6
def train(args: dict): lr: float = args["learning_rate"] n_classes: int = args["n_classes"] unet = model.UNet(args) unet.compile( optimizer=tf.keras.optimizers.Adam(lr), loss=dice_coef_loss, metrics=["accuracy"], ) unet.summary() ckpt = tf.keras.callbacks.ModelCheckpoint( filepath="./UNet/params/model.h5", monitor="loss", save_best_only=True, save_weights_only=True, verbose=1, ) segmented_data = args[ "segmented_data"] # os.path.join(args.train_data, "../segmented_images") generator = data_gen(args["train_data"], segmented_data, args["batch_size"], n_classes) unet.fit_generator(generator, steps_per_epoch=30, epochs=100, callbacks=[ckpt])