Beispiel #1
0
 def get_model_size(self):
     return count_parameters_in_MB(self.graph)
Beispiel #2
0
 def get_model_size(self):
     return count_parameters_in_MB(self.history)
Beispiel #3
0
    def __init__(self, graph, parser, *args, **kwargs):
        self.graph = graph
        self.parser = parser
        try:
            self.config = kwargs.get('config', graph.config)
        except:
            raise ('No configuration specified in graph or kwargs')
        np.random.seed(self.config.seed)
        random.seed(self.config.seed)
        if torch.cuda.is_available():
            torch.manual_seed(self.config.seed)
            torch.cuda.set_device(self.config.gpu)
            cudnn.benchmark = False
            cudnn.enabled = True
            cudnn.deterministic = True
            torch.cuda.manual_seed_all(self.config.seed)

        self.device = torch.device(
            "cuda:0" if torch.cuda.is_available() else "cpu")

        # dataloaders
        train_queue, valid_queue, test_queue, train_transform, valid_transform = parser.get_train_val_loaders(
        )
        self.train_queue = train_queue
        self.valid_queue = valid_queue
        self.test_queue = test_queue
        self.train_transform = train_transform
        self.valid_transform = valid_transform

        criterion = eval('nn.' + self.config.criterion)()
        self.criterion = criterion.cuda()

        self.model = self.graph.to(self.device)

        n_parameters = utils.count_parameters_in_MB(self.model)
        logging.info("param size = %fMB", n_parameters)

        optimizer = torch.optim.SGD(self.model.parameters(),
                                    self.config.learning_rate,
                                    momentum=self.config.momentum,
                                    weight_decay=self.config.weight_decay)
        self.optimizer = optimizer

        self.scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(
            optimizer,
            float(self.config.epochs),
            eta_min=self.config.learning_rate_min)

        logging.info('Args: {}'.format(self.config))
        self.run_kwargs = {}

        self.errors_dict = utils.AttrDict({
            'train_acc': [],
            'train_loss': [],
            'valid_acc': [],
            'valid_loss': [],
            'test_acc': [],
            'test_loss': [],
            'runtime': [],
            'params': n_parameters
        })