Esempio n. 1
0
def fit_fn(ds_train, ds_val, verbose):

    net = LeNet5()
    criterion = nn.CrossEntropyLoss()
    optimizer = SGD(net.parameters(),
                    lr=0.01,
                    momentum=0.9,
                    weight_decay=1e-4,
                    nesterov=True)
    # lr_scheduler = MultiStepLR(optimizer, [10, 20], gamma=0.1)
    lr_scheduler = CosineAnnealingLR(optimizer,
                                     T_max=30,
                                     eta_min=0.001,
                                     warmup=5,
                                     warmup_eta_min=0.01)

    metrics = {
        'loss': TrainLoss(),
        'acc': Accuracy(),
    }

    test_metrics = {
        'loss': Loss(criterion),
        'acc': Accuracy(),
    }

    trainer = Trainer(net,
                      criterion,
                      optimizer,
                      lr_scheduler,
                      metrics=metrics,
                      test_metrics=test_metrics,
                      work_dir="./checkpoints/MNIST-LeNet5")
    trainer._verbose = False
    # summary(net, (1, 32, 32))

    train_loader = DataLoader(ds_train,
                              batch_size=128,
                              shuffle=True,
                              num_workers=2,
                              pin_memory=True)
    val_loader = DataLoader(ds_val, batch_size=128)

    accs = trainer.fit(train_loader, 5, val_loader=val_loader)['acc']
    return accs[-1], max(accs)
                                  ('sep_conv_5x5', 0), ('sep_conv_3x3', 1),
                                  ('avg_pool_3x3', 0), ('dil_conv_3x3', 1)],
                          normal_concat=[2, 3, 4, 5],
                          reduce=[('sep_conv_5x5', 1), ('max_pool_3x3', 0),
                                  ('sep_conv_5x5', 1), ('sep_conv_5x5', 2),
                                  ('sep_conv_3x3', 0), ('sep_conv_3x3', 3),
                                  ('sep_conv_3x3', 1), ('sep_conv_3x3', 2)],
                          reduce_concat=[2, 3, 4, 5])

drop_path = 0.3
epochs = 600
# net = NASNet(36, 20, True, drop_path, 10, PC_DARTS_cifar)
net = NASNet(4, 5, True, drop_path, 10, PC_DARTS_cifar)
criterion = CrossEntropyLoss(auxiliary_weight=0.4)
optimizer = SGD(net.parameters(), lr=0.025, momentum=0.9, weight_decay=3e-4)
lr_scheduler = CosineAnnealingLR(optimizer, epochs, min_lr=0)

train_metrics = {
    'loss': TrainLoss(),
    'acc': Accuracy(),
}

eval_metrics = {
    'loss': Loss(CrossEntropyLoss()),
    'acc': Accuracy(),
}

trainer = CNNLearner(net,
                     criterion,
                     optimizer,
                     lr_scheduler,
Esempio n. 3
0
batch_size = 128
steps_per_epoch = math.ceil(len(ds_train) / batch_size)
min_lr = 0.01 * mul
max_lr = 0.1 * mul

net = LeNet5()
criterion = nn.CrossEntropyLoss()
optimizer = SGD(net.parameters(),
                lr=min_lr,
                momentum=0.9,
                weight_decay=1e-4,
                nesterov=True)
# lr_scheduler = MultiStepLR(optimizer, [10, 20], gamma=0.1)
lr_scheduler = CosineAnnealingLR(optimizer,
                                 T_max=30,
                                 eta_min=0.001,
                                 warmup=5,
                                 warmup_eta_min=0.001)

metrics = {
    'loss': TrainLoss(),
    'acc': Accuracy(),
}

test_metrics = {
    'loss': Loss(criterion),
    'acc': Accuracy(),
}

trainer = Trainer(net,
                  criterion,
Esempio n. 4
0
ds_test = CIFAR10(data_home,
                  train=False,
                  download=True,
                  transform=test_transform)

net = LeNet5()
# net = efficientnet_b0(num_classes=10, dropout=0.3, drop_connect=0.2)
criterion = nn.CrossEntropyLoss()
optimizer = SGD(net.parameters(),
                lr=0.01,
                momentum=0.9,
                weight_decay=1e-4,
                nesterov=True)
lr_scheduler = CosineAnnealingLR(optimizer,
                                 100,
                                 eta_min=1e-3,
                                 warmup=5,
                                 warmup_eta_min=1e-3)

metrics = {
    'loss': TrainLoss(),
    'acc': Accuracy(),
}

trainer = Trainer(net,
                  criterion,
                  optimizer,
                  lr_scheduler,
                  metrics=metrics,
                  save_path="./checkpoints",
                  name="CIFAR10-EfficientNet")
Esempio n. 5
0
mul = 1
batch_size = 128
steps_per_epoch = math.ceil(len(ds_train) / batch_size)
min_lr = 0.01 * mul

net = LeNet5()
criterion = nn.CrossEntropyLoss()
optimizer = SGD(net.parameters(),
                lr=min_lr,
                momentum=0.9,
                weight_decay=1e-4,
                nesterov=True)
lr_scheduler = CosineAnnealingLR(optimizer,
                                 epochs=30,
                                 min_lr=0.001,
                                 warmup_epoch=5,
                                 warmup_min_lr=0.001)

train_metrics = {
    'loss': TrainLoss(),
    'acc': Accuracy(),
}

eval_metrics = {
    'loss': Loss(criterion),
    'acc': Accuracy(),
}

trainer = CNNLearner(net,
                     criterion,