Beispiel #1
0
 def __init__(self, **kwargs):
     """
     :param kwargs:
     Args of data:
         train_iter : train batch data iterator
         dev_iter : dev batch data iterator
         test_iter : test batch data iterator
     Args of train:
         model : nn model
         config : config
     """
     print("Training Start......")
     # for k, v in kwargs.items():
     #     self.__setattr__(k, v)
     self.train_iter = kwargs["train_iter"]
     self.dev_iter = kwargs["dev_iter"]
     self.test_iter = kwargs["test_iter"]
     self.model = kwargs["model"]
     self.config = kwargs["config"]
     self.early_max_patience = self.config.early_max_patience
     self.optimizer = Optimizer(name=self.config.learning_algorithm,
                                model=self.model,
                                lr=self.config.learning_rate,
                                weight_decay=self.config.weight_decay,
                                grad_clip=self.config.clip_max_norm)
     self.loss_function = self._loss(
         learning_algorithm=self.config.learning_algorithm)
     print(self.optimizer)
     print(self.loss_function)
     self.best_score = Best_Result()
     self.train_iter_len = len(self.train_iter)
Beispiel #2
0
 def __init__(self, **kwargs):
     """
     :param kwargs:
     Args of data:
         train_iter : train batch data iterator
         dev_iter : dev batch data iterator
         test_iter : test batch data iterator
     Args of train:
         model : nn model
         config : config
     """
     print("Training Start......")
     # for k, v in kwargs.items():
     #     self.__setattr__(k, v)
     self.train_iter = kwargs["train_iter"]
     self.dev_iter = kwargs["dev_iter"]
     self.test_iter = kwargs["test_iter"]
     self.model = kwargs["model"]
     self.config = kwargs["config"]
     self.early_max_patience = self.config.early_max_patience
     self.optimizer = Optimizer(name=self.config.learning_algorithm,
                                model=self.model,
                                lr=self.config.learning_rate,
                                weight_decay=self.config.weight_decay,
                                grad_clip=self.config.clip_max_norm)
     if self.config.learning_algorithm == "SGD":
         self.loss_function = nn.CrossEntropyLoss(size_average=False)
     else:
         self.loss_function = nn.CrossEntropyLoss(size_average=True)
     print(self.optimizer)
     self.best_score = Best_Result()
     self.train_eval, self.dev_eval_seg, self.dev_eval_pos, self.test_eval_seg, self.test_eval_pos = Eval(
     ), Eval(), Eval(), Eval(), Eval()
     self.train_iter_len = len(self.train_iter)
Beispiel #3
0
    def __init__(self, **kwargs):

        self.config = kwargs["config"]
        self.config.logger.info("Training Start......")
        self.train_iter = kwargs["train_iter"]
        self.dev_iter = kwargs["dev_iter"]
        self.test_iter = kwargs["test_iter"]
        self.model = kwargs["model"]
        self.use_crf = self.config.use_crf
        self.average_batch = self.config.average_batch
        self.early_max_patience = self.config.early_max_patience
        self.optimizer = Optimizer(name=self.config.learning_algorithm,
                                   model=self.model,
                                   lr=self.config.learning_rate,
                                   weight_decay=self.config.weight_decay,
                                   grad_clip=self.config.clip_max_norm)
        self.loss_function = self._loss(
            learning_algorithm=self.config.learning_algorithm,
            label_paddingId=self.config.label_paddingId,
            use_crf=self.use_crf)
        self.config.logger.info(self.optimizer)
        self.config.logger.info(self.loss_function)
        self.best_score = Best_Result()
        self.train_eval, self.dev_eval, self.test_eval = Eval(), Eval(), Eval()
        self.train_iter_len = len(self.train_iter)
    def __init__(self, **kwargs):
        """
        :param kwargs:
        Args of data:
            train_iter : train batch data iterator
            dev_iter : dev batch data iterator
            test_iter : test batch data iterator
        Args of train:
            model : nn model
            config : config
        """
        print("Training Start......")
        # for k, v in kwargs.items():
        #     self.__setattr__(k, v)
        self.train_iter = kwargs["train_iter"]
        self.dev_iter = kwargs["dev_iter"]
        self.test_iter = kwargs["test_iter"]
        self.model = kwargs["model"]
        self.config = kwargs["config"]
        self.device = self.config.device
        self.cuda = False
        if self.device != cpu_device:
            self.cuda = True
        self.early_max_patience = self.config.early_max_patience
        self.optimizer = Optimizer(name=self.config.learning_algorithm,
                                   model=self.model,
                                   lr=self.config.learning_rate,
                                   weight_decay=self.config.weight_decay,
                                   grad_clip=self.config.clip_max_norm)
        if self.config.learning_algorithm == "SGD":
            self.loss_function = nn.CrossEntropyLoss(reduction="sum")
        else:
            self.loss_function = nn.CrossEntropyLoss(reduction="mean")
            # self.loss_function = nn.MultiLabelSoftMarginLoss(size_average=True)
        print(self.optimizer)
        self.best_score = Best_Result()
        self.train_iter_len = len(self.train_iter)

        # define accu eval
        self.accu_train_eval_micro, self.accu_dev_eval_micro, self.accu_test_eval_micro = Eval(
        ), Eval(), Eval()
        self.accu_train_eval_macro, self.accu_dev_eval_macro, self.accu_test_eval_macro = [], [], []
        for i in range(self.config.accu_class_num):
            self.accu_train_eval_macro.append(Eval())
            self.accu_dev_eval_macro.append(Eval())
            self.accu_test_eval_macro.append(Eval())

        # define law eval
        self.law_train_eval_micro, self.law_dev_eval_micro, self.law_test_eval_micro = Eval(
        ), Eval(), Eval()
        self.law_train_eval_macro, self.law_dev_eval_macro, self.law_test_eval_macro = [], [], []
        for i in range(self.config.law_class_num):
            self.law_train_eval_macro.append(Eval())
            self.law_dev_eval_macro.append(Eval())
            self.law_test_eval_macro.append(Eval())
Beispiel #5
0
 def __init__(self, **kwargs):
     """
     :param kwargs:
     Args of data:
         train_iter : train batch data iterator
         dev_iter : dev batch data iterator
         test_iter : test batch data iterator
     Args of train:
         model : nn model
         config : config
     """
     print("Training Start......")
     # for k, v in kwargs.items():
     #     self.__setattr__(k, v)
     self.train_iter = kwargs["train_iter"]
     self.dev_iter = kwargs["dev_iter"]
     self.test_iter = kwargs["test_iter"]
     self.parser = kwargs["model"]
     self.config = kwargs["config"]
     self.device = self.config.device
     self.cuda = False
     if self.device != cpu_device:
         self.cuda = True
     self.early_max_patience = self.config.early_max_patience
     self.optimizer = Optimizer(
         name=self.config.learning_algorithm,
         model=self.parser.model,
         lr=self.config.learning_rate,
         # weight_decay=self.config.weight_decay, grad_clip=self.config.clip_max_norm,
         weight_decay=self.config.weight_decay,
         grad_clip="None",
         betas=(0.9, 0.9),
         eps=1.0e-12)
     if self.config.learning_algorithm == "SGD":
         self.loss_function = nn.CrossEntropyLoss(reduction="sum")
     else:
         self.loss_function = nn.CrossEntropyLoss(reduction="mean")
     print(self.optimizer)
     self.best_score = Best_Result()
     self.train_iter_len = len(self.train_iter)