コード例 #1
0
def train(datasets, model, loss, optimizer, meters, args):
    task = (ClassificationTask()
            .set_num_epochs(args.num_epochs)
            .set_loss(loss)
            .set_model(model)
            .set_optimizer(optimizer)
            .set_meters(meters))
    for phase in ["train", "test"]:
        task.set_dataset(datasets[phase], phase)

    hooks = [LossLrMeterLoggingHook(log_freq=args.print_freq)]
    # show progress
    hooks.append(ProgressBarHook())
    if not args.skip_tensorboard:
        try:
            from tensorboardX import SummaryWriter
            tb_writer = SummaryWriter(log_dir=args.video_dir + "/tensorboard")
            hooks.append(TensorboardPlotHook(tb_writer))
        except ImportError:
            print("tensorboardX not installed, skipping tensorboard hooks")

    checkpoint_dir = f"{args.video_dir}/checkpoint/classy_checkpoint_{time.time()}"
    os.mkdir(checkpoint_dir)
    hooks.append(CheckpointHook(checkpoint_dir, input_args={}))

    task = task.set_hooks(hooks)
    trainer = LocalTrainer(use_gpu=args.cuda, num_dataloader_workers=args.num_workers)
    trainer.train(task)
コード例 #2
0
    def test_one(self):
        train_dataset = MyDataset(
            batchsize_per_replica=32,
            shuffle=False,
            transform=GenericImageTransform(
                transform=transforms.Compose(
                    [
                        transforms.RandomResizedCrop(224),
                        transforms.RandomHorizontalFlip(),
                        transforms.ToTensor(),
                        transforms.Normalize(
                            mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
                        ),
                    ]
                )
            ),
            num_samples=100,
            crop_size=224,
            class_ratio=0.5,
            seed=0,
        )

        test_dataset = MyDataset(
            batchsize_per_replica=32,
            shuffle=False,
            transform=GenericImageTransform(
                transform=transforms.Compose(
                    [
                        transforms.Resize(256),
                        transforms.CenterCrop(224),
                        transforms.ToTensor(),
                        transforms.Normalize(
                            mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
                        ),
                    ]
                )
            ),
            num_samples=100,
            crop_size=224,
            class_ratio=0.5,
            seed=0,
        )

        model = MyModel()
        loss = MyLoss()

        optimizer = SGD(lr_scheduler=ConstantParamScheduler(0.01))

        task = (
            ClassificationTask()
            .set_model(model)
            .set_dataset(train_dataset, "train")
            .set_dataset(test_dataset, "test")
            .set_loss(loss)
            .set_optimizer(optimizer)
            .set_num_epochs(1)
        )

        trainer = LocalTrainer()
        trainer.train(task)
コード例 #3
0
    def _build_task(self, num_epochs, skip_param_schedulers=False):
        config = self._get_config(skip_param_schedulers)
        config["optimizer"]["num_epochs"] = num_epochs
        task = (ClassificationTask().set_num_epochs(num_epochs).set_loss(
            build_loss(config["loss"])).set_model(build_model(
                config["model"])).set_optimizer(
                    build_optimizer(config["optimizer"])))
        for phase_type in ["train", "test"]:
            dataset = build_dataset(config["dataset"][phase_type])
            task.set_dataset(dataset, phase_type)

        self.assertTrue(task is not None)
        return task
コード例 #4
0
    def test_get_state(self):
        config = get_test_task_config()
        loss = build_loss(config["loss"])
        task = (
            ClassificationTask().set_num_epochs(1).set_loss(loss).set_model(
                build_model(config["model"])).set_optimizer(
                    build_optimizer(config["optimizer"])))
        for phase_type in ["train", "test"]:
            dataset = build_dataset(config["dataset"][phase_type])
            task.set_dataset(dataset, phase_type)

        task.prepare()

        task = build_task(config)
        task.prepare()
コード例 #5
0
    def test_training(self):
        """Checks we can train a small MLP model."""
        config = get_test_mlp_task_config()
        task = (ClassificationTask().set_num_epochs(10).set_loss(
            build_loss(config["loss"])).set_model(build_model(
                config["model"])).set_optimizer(
                    build_optimizer(config["optimizer"])).set_meters([
                        AccuracyMeter(topk=[1])
                    ]).set_hooks([LossLrMeterLoggingHook()]))
        for split in ["train", "test"]:
            dataset = build_dataset(config["dataset"][split])
            task.set_dataset(dataset, split)

        self.assertTrue(task is not None)

        trainer = LocalTrainer()
        trainer.train(task)
        accuracy = task.meters[0].value["top_1"]
        self.assertAlmostEqual(accuracy, 1.0)