Exemple #1
0
                loss_name = loss.__class__.__name__
                print(f"Loss: {loss_name}\n")
                for noise_value in noise_values:
                    # RUN Experiments

                    name = f'CNN_{loss_name}_{tp_noise}_{noise_value}'

                    print(f"Training {name} with noise of type {tp_noise} and probability {noise_value}...")

                    # data preparation
                    dataset = FashionMnistHandler(data_dir, False)
                    dataset.load()
                    train_loader, val_loader, test_loader = dataset.get_noisy_loaders(p_noise=noise_value,
                                                                                      type_noise=tp_noise,
                                                                                      val_size=1 / 6,
                                                                                      train_batch_size=batch_size,
                                                                                      val_batch_size=128,
                                                                                      test_batch_size=128)

                    # model, optimizer, summary
                    model = CNNModel()
                    optimizer = torch.optim.Adam(model.parameters(), lr=lr)
                    summ = Summary(name, type_noise=tp_noise, noise_rate=noise_value)

                    solver = Solver(name, PROJECT_DIR, batch_model_dir, batch_summaries_dir, model,
                                    optimizer, loss, summ, train_loader, val_loader, test_loader)
                    solver.pretrain()
                    solver.train(loss)

                    print(f"Completed training...")
Exemple #2
0
    args = parser.parse_args()
    args, lg = parse(args)

    # Tensorboard save directory
    resume = args['solver']['resume']
    tensorboard_path = 'Tensorboard/{}'.format(args['name'])

    if resume == False:
        if osp.exists(tensorboard_path):
            shutil.rmtree(tensorboard_path, True)
            lg.info('Remove dir: [{}]'.format(tensorboard_path))
    writer = SummaryWriter(tensorboard_path)

    # create dataset
    train_data = DIV2K(args['datasets']['train'])
    lg.info('Create train dataset successfully!')
    lg.info('Training: [{}] iterations for each epoch'.format(len(train_data)))

    val_data = DIV2K(args['datasets']['val'])
    lg.info('Create val dataset successfully!')
    lg.info('Validating: [{}] iterations for each epoch'.format(len(val_data)))

    # create solver
    lg.info('Preparing for experiment: [{}]'.format(args['name']))
    solver = Solver(args, train_data, val_data, writer)

    # train
    lg.info('Start training...')
    solver.train()