Esempio n. 1
0
#                        dataset_csv_file=test_csv,
#                        output_size=cnn_image_size)

# dataloader_test = DataLoader(dataset_test, batch_size=args.batch_size,
#                         shuffle=True, num_workers=4)

training_set = StrongLossDataset(file=args.training_file,
                                 image_path=args.image_path,
                                 transforms=transformer)
validation_set = StrongLossDataset(file=args.validation_file,
                                   image_path=args.image_path,
                                   transforms=transformer)

# build dataloader
training_loader = DataLoader(training_set,
                             batch_size=args.batch_size,
                             num_workers=4,
                             shuffle=True)
validation_loader = DataLoader(validation_set,
                               batch_size=args.batch_size,
                               num_workers=4,
                               shuffle=True)

# Define checkpoint name
checkpoint_name = os.path.join(
    args.result_model_dir,
    datetime.datetime.now().strftime("%Y-%m-%d_%H:%M") + '_' +
    args.result_model_fn + '.pth.tar')

print('Checkpoint name: ' + checkpoint_name)

# Train
Esempio n. 2
0
    args = parser.parse_args()
    return args.spec, args.test_dataset


if __name__ == "__main__":
    spec, dataset = parse_spec()

    if not os.path.exists(spec):
        raise FileNotFoundError("Path to spec file not found")

    with open(spec, "r") as file:
        name = os.path.basename(spec)[:-5]
        spec_data = json.load(file)

    model_path = os.path.join(SAVED_MODEL_DIR, f'{name}.hp5')
    if not os.path.exists(model_path):
        raise UnsavedModelError("Run the experiment before evaluating")

    datasets = spec_data['data']['datasets']
    data_params = spec_data['data'].get('data_params', {})

    loader = DataLoader(datasets, **data_params)
    test_data = loader.generate_test_dataset(dataset)

    # TODO: This could easily be done to exhaustion. Alter DataLoader API to handle test sets better

    model = tf.keras.models.load_model(model_path)
    results = model.evaluate(test_data)

    print(f"Model Results: {results}")
Esempio n. 3
0
    normalization_tnf = normalize_image_dict_caffe
else:
    normalization_tnf = NormalizeImageDict(['source_image', 'target_image'])

batch_preprocessing_fn = BatchTensorToVars(use_cuda=use_cuda)

# Dataset and dataloader
dataset = Dataset(transform=normalization_tnf,
                  dataset_image_path=args.dataset_image_path,
                  dataset_csv_path=args.dataset_csv_path,
                  dataset_csv_file=train_csv,
                  output_size=cnn_image_size,
                  random_affine=bool(args.random_affine))

dataloader = DataLoader(dataset,
                        batch_size=args.batch_size,
                        shuffle=True,
                        num_workers=0)

dataset_test = Dataset(transform=normalization_tnf,
                       dataset_image_path=args.dataset_image_path,
                       dataset_csv_path=args.dataset_csv_path,
                       dataset_csv_file=test_csv,
                       output_size=cnn_image_size)

dataloader_test = DataLoader(dataset_test,
                             batch_size=args.batch_size,
                             shuffle=True,
                             num_workers=4)

# Define checkpoint name
checkpoint_name = os.path.join(
Esempio n. 4
0
        spec_data = json.load(file)

    # Validate the json file
    validate_spec(spec_data)

    if spec_data["architecture"] == "simple":
        builder = partial(build_simple_cnn1, **spec_data["builder_args"])
        trainer = partial(train_from_scratch, **spec_data["trainer_args"])
    elif spec_data["architecture"] == "baseline":
        builder = partial(build_baseline)
        trainer = partial(train_from_scratch, **spec_data["trainer_args"])
    elif spec_data["architecture"] == "vgg16":
        builder = partial(build_vgg_model, **spec_data["builder_args"])
        if spec_data["builder_args"].get("weights") == "imagenet":
            trainer = partial(train_from_pretrained,
                              **spec_data["trainer_args"])
        else:
            trainer = partial(train_from_scratch, **spec_data["trainer_args"])
    save_dir = "/home/matt/Projects/CV/final/saved_models/"
    results_dir = "/home/matt/Projects/CV/final/experiments/results/"

    datasets = spec_data['data']['datasets']

    data_params = spec_data['data'].get('data_params', {})
    loader = DataLoader(datasets, **data_params)

    save_path = os.path.join(save_dir, f'{name}.hp5')
    results_path = os.path.join(results_dir, name)

    main(builder, trainer, loader, save_path, results_path)