Beispiel #1
0
def __run_from_config(config):
    dataset_config = config['dataset']
    model_config = config['model']
    test_model_config = config['test_model']
    optimizer = config['optimizer']
    output_path = config['output_path']
    resume_path = config['resume_path']
    batch_size = config['batch_size']
    epoch = config['epoch']
    train = config['train']
    # dataset
    dataset = get_clf_data(use_memory=dataset_config['use_memory'],
                           img_size=dataset_config['img_size'],
                           img_type=dataset_config['img_type'])
    # model
    if train:
        model = get_model(model_config['type'], model_config['path'])
    else:
        model = get_model(test_model_config['type'], test_model_config['path'])

    # optimizer
    optimizer = get_optimizer(optimizer['type'], model)
    #run
    __run(dataset,
          model,
          optimizer,
          output_path=output_path,
          resume_path=resume_path,
          batch_size=batch_size,
          epoch=epoch,
          execute_train=train)
Beispiel #2
0
def __train(root_path):
    category = {
        'category_1' : [14, 11, 12, 13, 5, 4, 17, 15],
        'category_2' : [18, 7],
        'category_3' : [9, 8, 6, 10],
        'category_4' : [23, 22, 21, 16, 0, 24, 19],
        'category_5' : [20, 1, 3, 2]
    }
    dataset = __get_dataset(root_path)
    for category_name, category_ids in category.items():
        output_path = os.path.join(
            root_path, 'result/resnet50_pretrain_warp/%s' % category_name)
        train, val = __filter_class(dataset, category_ids)
        model = get_model(
            'MLP2-cls',
            output=len(category_ids)
        )
        optimizer = get_optimizer('MomentumSGD', model)
        train_network(
            (train, val),
            model,
            optimizer,
            output_path=output_path,
            save_epoch=10
        )
        val_network(val, model, output_path=output_path)
Beispiel #3
0
    def configure_optimizers(self):
        # REQUIRED
        if self.hparams["training"]["loss"][
                "class_name"] == 'losses.loss.AdaptiveLossFunction':
            optimizer = get_optimizer(
                list(self.model.parameters()) +
                list(self.criteria.adaptive.parameters()), self.hparams)
        else:
            optimizer = get_optimizer(self.model.parameters(), self.hparams)

        scheduler = get_scheduler(optimizer, self.hparams)
        return (
            [optimizer],
            [{
                "scheduler": scheduler,
                "monitor": "avg_val_loss",
                "interval": self.hparams["training"]["scheduler"]["interval"],
            }],
        )
Beispiel #4
0
    def prepare(self, loss_op):
        from optimizer.optimizer import get_optimizer
        self.train_step = get_optimizer(self.cfg_parser,
                                        loss_op,
                                        tf.get_collection(
                                            tf.GraphKeys.GLOBAL_VARIABLES,
                                            scope='train_net'),
                                        global_step=self.num_updates)

        init = tf.global_variables_initializer()
        self.sess.run(init)
        self.sess.run(self.copy_op)

        from action_policy.action_policy import Policy
        self.policy = Policy(self.cfg_parser, self)
Beispiel #5
0
    base_model.eval()
    if diff_attention_model is not None:
        diff_attention_model = diff_attention_model.to(device)
        diff_attention_model.eval()
    logger.info('Get model.')

    # Get data loaders.
    train_loader, query_loader, gallery_loader = loader.get_data_loaders(config, base_model=base_model, device=device)
    logger.info('Get data loaders.')

    # Get loss.
    loss, center_loss = loss.get_loss(config, device)
    logger.info('Get loss.')

    # Get optimizer.
    base_optimizer, base_scheduler, diff_optimizer, diff_scheduler, center_optimizer, center_scheduler = optimizer.get_optimizer(
        config, base_model, diff_attention_model, center_loss=center_loss)
    logger.info('Get optimizer.')

    # Get trainer.
    trainer = trainer.get_trainer(config, base_model, diff_attention_model, loss, device, logger, query_loader,
                                  gallery_loader, base_optimizer=base_optimizer, base_scheduler=base_scheduler,
                                  diff_optimizer=diff_optimizer, diff_scheduler=diff_scheduler,
                                  center_optimizer=center_optimizer, center_scheduler=center_scheduler)
    logger.info('Get trainer.')

    # Do train.
    logger.info('Start training.')
    trainer.run(train_loader, max_epochs=config['trainer'].getint('epochs'))
    logger.info('Finish training.')