Example #1
0
        x = x.view(-1, 4 * 4 * 50)
        x = F.relu(self.fc1(x))
        x = self.fc2(x)
        return F.log_softmax(x, dim=1)


if __name__ == '__main__':
    base_model = Net(128)
    trainer = PyTorchImageClassificationTrainer(
        base_model,
        dataset_cls="MNIST",
        dataset_kwargs={
            "root": "data/mnist",
            "download": True
        },
        dataloader_kwargs={"batch_size": 32},
        optimizer_kwargs={"lr": 1e-3},
        trainer_kwargs={"max_epochs": 1})

    simple_startegy = RandomStrategy()

    exp = RetiariiExperiment(base_model, trainer, [], simple_startegy)

    exp_config = RetiariiExeConfig('local')
    exp_config.experiment_name = 'mnist_search'
    exp_config.trial_concurrency = 2
    exp_config.max_trial_number = 10
    exp_config.training_service.use_active_gpu = False

    exp.run(exp_config, 8081 + random.randint(0, 100))
from nni.retiarii.trainer import PyTorchImageClassificationTrainer

from darts_model import CNN

if __name__ == '__main__':
    base_model = CNN(32, 3, 16, 10, 8)
    trainer = PyTorchImageClassificationTrainer(
        base_model,
        dataset_cls="CIFAR10",
        dataset_kwargs={
            "root": "data/cifar10",
            "download": True
        },
        dataloader_kwargs={"batch_size": 32},
        optimizer_kwargs={"lr": 1e-3},
        trainer_kwargs={"max_epochs": 1})

    simple_startegy = TPEStrategy()

    exp = RetiariiExperiment(base_model, trainer, [], simple_startegy)

    exp_config = RetiariiExeConfig('local')
    exp_config.experiment_name = 'darts_search'
    exp_config.trial_concurrency = 2
    exp_config.max_trial_number = 10
    exp_config.trial_gpu_number = 1
    exp_config.training_service.use_active_gpu = True
    exp_config.training_service.gpu_indices = [1, 2]

    exp.run(exp_config, 8081, debug=True)
Example #3
0
                         _DEFAULT_KERNEL_SIZES, _DEFAULT_NUM_LAYERS,
                         _DEFAULT_SKIPS)
    trainer = PyTorchImageClassificationTrainer(
        base_model,
        dataset_cls="CIFAR10",
        dataset_kwargs={
            "root": "data/cifar10",
            "download": True
        },
        dataloader_kwargs={"batch_size": 32},
        optimizer_kwargs={"lr": 1e-3},
        trainer_kwargs={"max_epochs": 1})

    # new interface
    applied_mutators = []
    applied_mutators.append(BlockMutator('mutable_0'))
    applied_mutators.append(BlockMutator('mutable_1'))

    simple_startegy = TPEStrategy()

    exp = RetiariiExperiment(base_model, trainer, applied_mutators,
                             simple_startegy)

    exp_config = RetiariiExeConfig('local')
    exp_config.experiment_name = 'mnasnet_search'
    exp_config.trial_concurrency = 2
    exp_config.max_trial_number = 10
    exp_config.training_service.use_active_gpu = False

    exp.run(exp_config, 8081)
Example #4
0
    correct = pred.eq(target.view(1, -1).expand_as(pred))

    res = dict()
    for k in topk:
        correct_k = correct[:k].view(-1).float().sum(0)
        res["acc{}".format(k)] = correct_k.mul_(1.0 / batch_size).item()
    return res

if __name__ == '__main__':
    base_model = CNN(32, 3, 16, 10, 8)

    dataset_train, dataset_valid = get_dataset("cifar10")
    criterion = nn.CrossEntropyLoss()
    optim = torch.optim.SGD(base_model.parameters(), 0.025, momentum=0.9, weight_decay=3.0E-4)
    lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optim, 50, eta_min=0.001)
    trainer = DartsTrainer(
        model=base_model,
        loss=criterion,
        metrics=lambda output, target: accuracy(output, target, topk=(1,)),
        optimizer=optim,
        num_epochs=50,
        dataset=dataset_train,
        batch_size=32,
        log_frequency=10,
        unrolled=False
    )

    exp = RetiariiExperiment(base_model, trainer)
    exp.run()