Beispiel #1
0
                                           batch_size=args.batch_size,
                                           shuffle=True)

test_loader = torch.utils.data.DataLoader(dataset=test_dataset,
                                          batch_size=args.batch_size,
                                          shuffle=False)

###############   Model   ##################
if (args.network == 'CNN'):
    cnn = CNN()
elif (args.network == 'NIN'):
    cnn = NIN()
elif (args.network == 'ResNet'):
    cnn = ResNet(ResidualBlock, [2, 2, 2, 2])
if not args.no_cuda:
    cnn.cuda()
print(cnn)

################   Loss   #################
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(cnn.parameters(), lr=args.lr)
mtr = meter.ConfusionMeter(k=10)


################   Training   #############
def train(epoch):
    cnn.train()

    for i, (images, labels) in enumerate(train_loader):
        if not args.no_cuda:
            images = images.cuda()
def run(config, kwargs):
    config['model_signature'] = str(datetime.datetime.now())[0:19]

    model_name = '' + config['model_name']

    if config['loc_gauss'] or config['loc_inv_q'] or config['loc_att']:
        config['loc_info'] = True

    if config['att_gauss_abnormal'] or config['att_inv_q_abnormal'] or config['att_gauss_spatial'] or \
            config['att_inv_q_spatial'] or config['att_module']:
        config['self_att'] = True

    print(config)

    with open('experiment_log_' + config['operator'] + '.txt', 'a') as f:
        print(config, file=f)

    print('\nSTART KFOLDS CROSS VALIDATION\n')

    train_error_folds = []
    test_error_folds = []
    labels = pd.read_csv(config['labels_filename'], index_col=0)
    patches = pd.read_csv(config['patches_filename'], index_col=0)
    features = torch.load(config['features_filename'])
    curr_class = config['curr_class']
    curr_fold = config['curr_fold']
    for current_fold in [curr_fold]:

        print(
            '################ Train-Test fold: {}/{} #################'.format(
                current_fold + 1, config['kfold']))

        snapshots_path = 'snapshots/'
        dir = snapshots_path + model_name + '_' + config[
            'model_signature'] + '/'
        sw = SummaryWriter(
            f"tensorboard/{model_name}_{config['model_signature']}_fold_{current_fold}"
        )

        if not os.path.exists(dir):
            os.makedirs(dir)

        train_set, val_set, test_set = load_bacteria_cv(labels,
                                                        patches,
                                                        features,
                                                        config['split'],
                                                        curr_class,
                                                        shuffle=True)
        clss, counts = np.unique(train_set.label_list, return_counts=True)
        counts = 1 - counts / np.sum(counts)
        class_counts = {int(clss[c]): counts[c] for c in range(len(clss))}
        train_sampleweights = [
            class_counts[int(y_bi)] for y_bi in train_set.label_list
        ]
        sampler = WeightedRandomSampler(
            weights=train_sampleweights,
            num_samples=len(train_sampleweights),
        )

        print('\tcreate models')
        args = munchify(config)
        args.activation = nn.ReLU()
        model = Model(args)
        model.cuda(config['device'])

        print('\tinit optimizer')
        if config['optimizer'] == 'Adam':
            optimizer = optim.Adam(model.parameters(),
                                   lr=config['lr'],
                                   betas=(0.9, 0.999),
                                   weight_decay=config['reg'])
        elif config['optimizer'] == 'SGD':
            optimizer = optim.SGD(model.parameters(),
                                  lr=config['lr'],
                                  weight_decay=config['reg'],
                                  momentum=0.9)
        else:
            raise Exception('Wrong name of the optimizer!')

        scheduler = optim.lr_scheduler.StepLR(optimizer,
                                              step_size=45,
                                              gamma=0.1)

        print('\tperform experiment\n')

        train_error, test_error = experiment(
            args,
            kwargs,
            current_fold,
            train_set,
            val_set,
            test_set,
            sampler,
            model,
            optimizer,
            scheduler,
            dir,
            sw,
        )

        train_error_folds.append(train_error)
        test_error_folds.append(test_error)

        with open('final_results_' + config['operator'] + '.txt', 'a') as f:
            print('Class: {}\n'
                  'RESULT FOR A SINGLE FOLD\n'
                  'SEED: {}\n'
                  'OPERATOR: {}\n'
                  'FOLD: {}\n'
                  'ERROR (TRAIN): {}\n'
                  'ERROR (TEST): {}\n\n'.format(curr_class, config['seed'],
                                                config['operator'],
                                                current_fold, train_error,
                                                test_error),
                  file=f)
    # ==================================================================================================================
    print(
        '-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-'
    )
    with open('experiment_log_' + config['operator'] + '.txt', 'a') as f:
        print(
            '-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-\n',
            file=f)

    return np.mean(train_error_folds), np.std(train_error_folds), np.mean(
        test_error_folds), np.std(test_error_folds)