def __init__(self,
                 system_config: configuration.SystemConfig = configuration.
                 SystemConfig(),
                 dataset_config: configuration.DatasetConfig = configuration.
                 DatasetConfig(),
                 dataloader_config: configuration.
                 DataloaderConfig = configuration.DataloaderConfig(),
                 optimizer_config: configuration.
                 OptimizerConfig = configuration.OptimizerConfig()):
        self.loader_train, self.loader_test = get_data(
            batch_size=dataloader_config.batch_size,
            num_workers=dataloader_config.num_workers,
            data_root=dataset_config.root_dir)

        setup_system(system_config)

        self.model = LeNet5()
        self.loss_fn = nn.CrossEntropyLoss()
        self.metric_fn = AccuracyEstimator(topk=(1, ))
        self.optimizer = optim.SGD(self.model.parameters(),
                                   lr=optimizer_config.learning_rate,
                                   weight_decay=optimizer_config.weight_decay,
                                   momentum=optimizer_config.momentum)
        self.lr_scheduler = MultiStepLR(
            self.optimizer,
            milestones=optimizer_config.lr_step_milestones,
            gamma=optimizer_config.lr_gamma)
        self.visualizer = TensorBoardVisualizer()
Exemplo n.º 2
0
    def run(self, trainer_config: configuration.TrainerConfig):
        setup_system(self.system_config)
        device = torch.device(trainer_config.device)
        self.model = self.model.to(device)
        self.loss_fn = self.loss_fn.to(device)

        model_trainer = Trainer(
            model=self.model,
            loader_train=self.loader_train,
            loader_test=self.loader_test,
            loss_fn=self.loss_fn,
            metric_fn=self.metric_fn,
            optimizer=self.optimizer,
            lr_scheduler=self.lr_scheduler,
            device=device,
            data_getter=itemgetter("image"),
            target_getter=itemgetter("target"),
            stage_progress=trainer_config.progress_bar,
            get_key_metric=itemgetter("mAP"),
            visualizer=self.visualizer,
            model_save_best=trainer_config.model_save_best,
            model_saving_frequency=trainer_config.model_saving_frequency,
            save_dir=trainer_config.model_dir
        )

        model_trainer.register_hook("train", hooks.train_hook_detection)
        model_trainer.register_hook("test", hooks.test_hook_detection)
        model_trainer.register_hook("end_epoch", hooks.end_epoch_hook_detection)
        self.metrics = model_trainer.fit(trainer_config.epoch_num)
        return self.metrics
Exemplo n.º 3
0
    def __init__(
        self,
        system_config: configuration.SystemConfig = configuration.SystemConfig(),
        dataset_config: configuration.DatasetConfig = configuration.DatasetConfig(),  # pylint: disable=redefined-outer-name
        dataloader_config: configuration.DataloaderConfig = configuration.DataloaderConfig(),  # pylint: disable=redefined-outer-name
        optimizer_config: configuration.OptimizerConfig = configuration.OptimizerConfig(),  # pylint: disable=redefined-outer-name
    ):
        self.system_config = system_config
        setup_system(system_config)
        self.dataset_train = ListDataset(
            root_dir=dataset_config.root_dir,
            list_file='../train_anno.txt',
            classes=["__background__", "person"],
            mode='train',
            transform=Compose(dataset_config.train_transforms),
            input_size=300
        )

        self.loader_train = DataLoader(
            dataset=self.dataset_train,
            batch_size=dataloader_config.batch_size,
            shuffle=True,
            collate_fn=self.dataset_train.collate_fn,
            num_workers=dataloader_config.num_workers,
            pin_memory=True
        )

        self.dataset_test = ListDataset(
            root_dir=dataset_config.root_dir,
            list_file='../test_anno.txt',
            classes=["__background__", "person"],
            mode='val',
            transform=Compose([Normalize(), ToTensorV2()]),
            input_size=300
        )
        self.loader_test = DataLoader(
            dataset=self.dataset_test,
            batch_size=dataloader_config.batch_size,
            shuffle=False,
            collate_fn=self.dataset_test.collate_fn,
            num_workers=dataloader_config.num_workers,
            pin_memory=True
        )
        self.model = Detector(len(self.dataset_train.classes))
        self.loss_fn = DetectionLoss(len(self.dataset_train.classes))
        self.metric_fn = APEstimator(classes=self.dataset_test.classes)
        self.optimizer = optim.SGD(
            self.model.parameters(),
            lr=optimizer_config.learning_rate,
            weight_decay=optimizer_config.weight_decay,
            momentum=optimizer_config.momentum
        )
        self.lr_scheduler = MultiStepLR(
            self.optimizer, milestones=optimizer_config.lr_step_milestones, gamma=optimizer_config.lr_gamma
        )
        self.visualizer = MatplotlibVisualizer()
    def __init__(self,
                 system_config: configuration.SystemConfig = configuration.
                 SystemConfig(),
                 dataset_config: configuration.DatasetConfig = configuration.
                 DatasetConfig(),
                 dataloader_config: configuration.
                 DataloaderConfig = configuration.DataloaderConfig(),
                 optimizer_config: configuration.
                 OptimizerConfig = configuration.OptimizerConfig()):

        # train dataloader
        train_dataset = KenyanFood13Dataset(
            dataset_config.root_dir,
            flag=0,
            split=dataset_config.split,
            transform=dataset_config.train_transforms,
            random_state=system_config.seed)
        class_weight = train_dataset.get_class_weight()
        self.loader_train = torch.utils.data.DataLoader(
            train_dataset,
            batch_size=dataloader_config.batch_size,
            shuffle=True,
            num_workers=dataloader_config.num_workers)

        # validation dataloader
        val_dataset = KenyanFood13Dataset(
            dataset_config.root_dir,
            flag=1,
            split=dataset_config.split,
            transform=dataset_config.test_transforms,
            random_state=system_config.seed)
        self.loader_test = torch.utils.data.DataLoader(
            val_dataset,
            batch_size=dataloader_config.batch_size,
            shuffle=False,
            num_workers=dataloader_config.num_workers)

        setup_system(system_config)
        self.model = pretrained_resnext50(pretrained=True, fine_tune_start=4)

        self.loss_fn = nn.CrossEntropyLoss(
            weight=torch.FloatTensor(class_weight))
        self.metric_fn = AccuracyEstimator(topk=(1, ))
        #self.optimizer = optim.SGD(
        #    self.model.parameters(),
        #    lr=optimizer_config.learning_rate,
        #    weight_decay=optimizer_config.weight_decay,
        #    momentum=optimizer_config.momentum
        #)
        #self.lr_scheduler = MultiStepLR(
        #    self.optimizer, milestones=optimizer_config.lr_step_milestones, gamma=optimizer_config.lr_gamma
        #)
        self.optimizer = optim.Adam(self.model.parameters())
        self.lr_scheduler = lr_scheduler.ReduceLROnPlateau(self.optimizer)
        self.visualizer = TensorBoardVisualizer()
    def __init__(self,
                 system_config: configuration.SystemConfig = configuration.
                 SystemConfig(),
                 dataset_config: configuration.DatasetConfig = configuration.
                 DatasetConfig(),
                 dataloader_config: configuration.
                 DataloaderConfig = configuration.DataloaderConfig(),
                 optimizer_config: configuration.
                 OptimizerConfig = configuration.OptimizerConfig()):

        # train dataloader
        train_dataset = KenyanFood13Dataset(
            dataset_config.root_dir,
            flag=0,
            split=1.0,
            transform=dataset_config.train_transforms,
            random_state=system_config.seed)
        class_weight = train_dataset.get_class_weight()
        self.loader_train = torch.utils.data.DataLoader(
            train_dataset,
            batch_size=dataloader_config.batch_size,
            shuffle=True,
            num_workers=dataloader_config.num_workers)

        setup_system(system_config)
        self.model = pretrained_resnext50(pretrained=True, fine_tune_start=4)
        self.model.load_state_dict(torch.load('test/model_39_0.917'))

        self.loss_fn = nn.CrossEntropyLoss(
            weight=torch.FloatTensor(class_weight))
        self.metric_fn = AccuracyEstimator(topk=(1, ))
        #self.optimizer = optim.SGD(
        #    self.model.parameters(),
        #    lr=optimizer_config.learning_rate,
        #    weight_decay=optimizer_config.weight_decay,
        #    momentum=optimizer_config.momentum
        #)
        #self.lr_scheduler = MultiStepLR(
        #    self.optimizer, milestones=optimizer_config.lr_step_milestones, gamma=optimizer_config.lr_gamma
        #)
        self.optimizer = optim.AdamW(self.model.parameters())
        self.lr_scheduler = lr_scheduler.ReduceLROnPlateau(
            self.optimizer,
            mode='min',
            factor=0.1,
            patience=10,
            threshold=0.0001,
            threshold_mode='rel',
            cooldown=0,
            min_lr=0,
            eps=1e-08,
            verbose=False)
        self.visualizer = TensorBoardVisualizer()