Esempio n. 1
0
def fit_fn(ds_train, ds_val, verbose):

    net = LeNet5()
    criterion = nn.CrossEntropyLoss()
    optimizer = SGD(net.parameters(),
                    lr=0.01,
                    momentum=0.9,
                    weight_decay=1e-4,
                    nesterov=True)
    # lr_scheduler = MultiStepLR(optimizer, [10, 20], gamma=0.1)
    lr_scheduler = CosineAnnealingLR(optimizer,
                                     T_max=30,
                                     eta_min=0.001,
                                     warmup=5,
                                     warmup_eta_min=0.01)

    metrics = {
        'loss': TrainLoss(),
        'acc': Accuracy(),
    }

    test_metrics = {
        'loss': Loss(criterion),
        'acc': Accuracy(),
    }

    trainer = Trainer(net,
                      criterion,
                      optimizer,
                      lr_scheduler,
                      metrics=metrics,
                      test_metrics=test_metrics,
                      work_dir="./checkpoints/MNIST-LeNet5")
    trainer._verbose = False
    # summary(net, (1, 32, 32))

    train_loader = DataLoader(ds_train,
                              batch_size=128,
                              shuffle=True,
                              num_workers=2,
                              pin_memory=True)
    val_loader = DataLoader(ds_val, batch_size=128)

    accs = trainer.fit(train_loader, 5, val_loader=val_loader)['acc']
    return accs[-1], max(accs)
Esempio n. 2
0
    def __init__(self,
                 model,
                 criterion,
                 optimizer_model,
                 optimizer_arch,
                 lr_scheduler,
                 metrics=None,
                 test_metrics=None,
                 save_path="checkpoints",
                 device=None):
        self.device = device or ('cuda' if CUDA else 'cpu')
        model.to(self.device)

        self.model = model
        self.criterion = criterion
        self.optimizer_model = optimizer_model
        self.optimizer_arch = optimizer_arch
        self.lr_scheduler = lr_scheduler
        self._output_transform = get(["y_pred", "y"])
        self.metrics = metrics or {
            "loss": TrainLoss(),
            "acc": Accuracy(self._output_transform),
        }
        self.test_metrics = test_metrics or {
            "loss": Loss(self.criterion, self._output_transform),
            "acc": Accuracy(self._output_transform),
        }
        self.save_path = save_path
        self._log_path = os.path.join(self.save_path, "runs")

        current_time = datetime.now().strftime('%b%d_%H-%M-%S')
        log_dir = os.path.join(self._log_path, current_time)
        self.writer = SummaryWriter(log_dir)

        self.train_engine = self._create_train_engine()
        self.eval_engine = self._create_eval_engine()
        self.checkpoint_handler = Checkpoint(
            self.to_save(),
            DiskSaver(self.save_path, create_dir=True, require_empty=False))
Esempio n. 3
0
criterion = nn.CrossEntropyLoss()
tau_max, tau_min = 10, 0.1
model = Network(8, 8, steps=4, multiplier=4, stem_multiplier=1, tau=tau_max)

optimizer_arch = Adam(model.arch_parameters(),
                      lr=3e-4,
                      betas=(0.5, 0.999),
                      weight_decay=1e-3)
optimizer_model = SGD(model.parameters(),
                      0.025,
                      momentum=0.9,
                      weight_decay=3e-4)
lr_scheduler = CosineAnnealingLR(optimizer_model, T_max=50, eta_min=0.001)

metrics = {
    "loss": TrainLoss(),
    "acc": Accuracy(),
}

test_metrics = {
    "loss": Loss(criterion),
    "acc": Accuracy(),
}

trainer = DARTSTrainer(model,
                       criterion, [optimizer_arch, optimizer_model],
                       lr_scheduler,
                       metrics,
                       test_metrics,
                       save_path='checkpoints/DARTS')
Esempio n. 4
0
    train_loader = get_data_loader(cfg.Dataset.Train, ds_train)
    val_loader = get_data_loader(cfg.Dataset.Val, ds_val)
    test_loader = get_data_loader(cfg.Dataset.Test, ds_test)

    cfg.Model.num_classes = num_classes
    model = get_model(cfg.Model, horch.models.cifar)

    criterion = CrossEntropyLoss(non_sparse=use_mix,
                                 label_smoothing=cfg.get("label_smooth"))

    epochs = cfg.epochs
    optimizer = get_optimizer(cfg.Optimizer, model.parameters())
    lr_scheduler = get_lr_scheduler(cfg.LRScheduler, optimizer, epochs)

    train_metrics = {'loss': TrainLoss()}
    if not use_mix:
        train_metrics['acc'] = Accuracy()

    test_metrics = {
        'loss': Loss(CrossEntropyLoss()),
        'acc': Accuracy(),
    }

    work_dir = fmt_path(cfg.get("work_dir"))
    trainer = Trainer(model,
                      criterion,
                      optimizer,
                      lr_scheduler,
                      train_metrics,
                      test_metrics,
                          reduce=[('sep_conv_5x5', 1), ('max_pool_3x3', 0),
                                  ('sep_conv_5x5', 1), ('sep_conv_5x5', 2),
                                  ('sep_conv_3x3', 0), ('sep_conv_3x3', 3),
                                  ('sep_conv_3x3', 1), ('sep_conv_3x3', 2)],
                          reduce_concat=[2, 3, 4, 5])

drop_path = 0.3
epochs = 600
# net = NASNet(36, 20, True, drop_path, 10, PC_DARTS_cifar)
net = NASNet(4, 5, True, drop_path, 10, PC_DARTS_cifar)
criterion = CrossEntropyLoss(auxiliary_weight=0.4)
optimizer = SGD(net.parameters(), lr=0.025, momentum=0.9, weight_decay=3e-4)
lr_scheduler = CosineAnnealingLR(optimizer, epochs, min_lr=0)

train_metrics = {
    'loss': TrainLoss(),
    'acc': Accuracy(),
}

eval_metrics = {
    'loss': Loss(CrossEntropyLoss()),
    'acc': Accuracy(),
}

trainer = CNNLearner(net,
                     criterion,
                     optimizer,
                     lr_scheduler,
                     train_metrics=train_metrics,
                     eval_metrics=eval_metrics,
                     work_dir="../train/v3/models")
Esempio n. 6
0
def main():
    torch.backends.cudnn.enabled = True
    torch.backends.cudnn.benchmark = True
    torch.backends.cudnn.deterministic = False
    manual_seed(args.seed)

    train_transform = Compose([
        RandomCrop(32, padding=4),
        RandomHorizontalFlip(),
        ToTensor(),
        Normalize([0.491, 0.482, 0.447], [0.247, 0.243, 0.262]),
    ])

    ds = CIFAR10(root=args.data, train=True, download=True)

    ds_train, ds_search = train_test_split(
        ds, test_ratio=0.5, shuffle=True, random_state=args.seed,
        transform=train_transform, test_transform=train_transform)

    train_queue = DataLoader(
        ds_train, batch_size=args.batch_size, pin_memory=True, shuffle=True, num_workers=2)

    valid_queue = DataLoader(
        ds_search, batch_size=args.batch_size, pin_memory=True, shuffle=True, num_workers=2)

    set_defaults({
        'relu': {
            'inplace': False,
        },
        'bn': {
            'affine': False,
        }
    })
    model = Network(args.init_channels, args.layers, num_classes=CIFAR_CLASSES)
    criterion = nn.CrossEntropyLoss()

    optimizer_arch = Adam(
        model.arch_parameters(),
        lr=args.arch_learning_rate,
        betas=(0.5, 0.999),
        weight_decay=args.arch_weight_decay)
    optimizer_model = SGD(
        model.model_parameters(),
        args.learning_rate,
        momentum=args.momentum,
        weight_decay=args.weight_decay)

    scheduler = CosineLR(
        optimizer_model, float(args.epochs), min_lr=args.learning_rate_min)

    train_metrics = {
        "loss": TrainLoss(),
        "acc": Accuracy(),
    }

    eval_metrics = {
        "loss": Loss(criterion),
        "acc": Accuracy(),
    }

    learner = DARTSLearner(model, criterion, optimizer_arch, optimizer_model, scheduler,
                           train_metrics=train_metrics, eval_metrics=eval_metrics,
                           search_loader=valid_queue, grad_clip_norm=5.0, work_dir='models')

    for epoch in range(args.epochs):
        scheduler.step()
        lr = scheduler.get_lr()[0]
        logging.info('epoch %d lr %e', epoch, lr)

        genotype = model.genotype()
        logging.info('genotype = %s', genotype)

        print(F.softmax(model.alphas_normal, dim=-1))
        print(F.softmax(model.alphas_reduce, dim=-1))
        print(F.softmax(model.betas_normal[2:5], dim=-1))
        # training

        train_acc, train_obj = train(learner, train_queue, epoch)
        logging.info('train_acc %f', train_acc)

        utils.save(model, os.path.join(args.save, 'weights.pt'))