Exemplo n.º 1
0
    "validate_test_split": args.validate_test_split,
    "augment": False,
    "shuffle": False,
    "seed": args.random_seed
}
validation_generator = DataGenerator("validate", args.data_path,
                                     **validation_data_params)

if (hvd.rank() == 0):
    validation_generator.print_info()

# Fit the model
# Do at least 3 steps for training and validation
steps_per_epoch = max(
    3,
    training_generator.get_length() // (args.bz * hvd.size()))
validation_steps = max(
    3, 3 * training_generator.get_length() // (args.bz * hvd.size()))

unet_model.model.fit_generator(
    training_generator,
    steps_per_epoch=steps_per_epoch,
    epochs=args.epochs,
    verbose=verbose,
    validation_data=validation_generator,
    #validation_steps=validation_steps,
    callbacks=callbacks,
    max_queue_size=1,  #args.num_prefetched_batches,
    workers=1,  #args.num_data_loaders,
    use_multiprocessing=True)
Exemplo n.º 2
0
    "validate_test_split": args.validate_test_split,
    "augment": False,
    "shuffle": False,
    "seed": args.random_seed
}
validation_generator = DataGenerator("validate", args.data_path,
                                     **validation_data_params)

if (hvd.rank() == 0):
    validation_generator.print_info()

# Fit the model
# Do at least 3 steps for training and validation
steps_per_epoch = max(
    3,
    training_generator.get_length() // (args.bz * hvd.size()))
validation_steps = max(
    3,
    validation_generator.get_length() // (args.bz * hvd.size()))

unet_model.model.fit_generator(training_generator,
                               steps_per_epoch=steps_per_epoch,
                               epochs=args.epochs,
                               verbose=verbose,
                               validation_data=validation_generator,
                               validation_steps=validation_steps,
                               callbacks=callbacks,
                               max_queue_size=args.num_prefetched_batches,
                               workers=args.num_data_loaders,
                               use_multiprocessing=False)