Example #1
0
 def __init__(self,
              args=None,
              vocab=None,
              emb_matrix=None,
              model_file=None,
              use_cuda=False):
     self.use_cuda = use_cuda
     if model_file is not None:
         # load from file
         self.load(model_file, use_cuda)
     else:
         self.args = args
         self.model = None if args['dict_only'] else Seq2SeqModel(
             args, emb_matrix=emb_matrix)
         self.vocab = vocab
         self.expansion_dict = dict()
     if not self.args['dict_only']:
         self.crit = loss.SequenceLoss(self.vocab.size)
         self.parameters = [
             p for p in self.model.parameters() if p.requires_grad
         ]
         if use_cuda:
             self.model.cuda()
             self.crit.cuda()
         else:
             self.model.cpu()
             self.crit.cpu()
         self.optimizer = utils.get_optimizer(self.args['optim'],
                                              self.parameters,
                                              self.args['lr'])
Example #2
0
 def __init__(self,
              args=None,
              vocab=None,
              pretrain=None,
              model_file=None,
              use_cuda=False):
     self.use_cuda = use_cuda
     if model_file is not None:
         # load everything from file
         self.load(model_file, pretrain)
     else:
         # build model from scratch
         self.args = args
         self.vocab = vocab
         self.model = Parser(
             args,
             vocab,
             emb_matrix=pretrain.emb if pretrain is not None else None)
     self.parameters = [
         p for p in self.model.parameters() if p.requires_grad
     ]
     if self.use_cuda:
         self.model.cuda()
     else:
         self.model.cpu()
     self.optimizer = utils.get_optimizer(self.args['optim'],
                                          self.parameters,
                                          self.args['lr'],
                                          betas=(0.9, self.args['beta2']),
                                          eps=1e-6)
 def __init__(self, args=None, vocab=None, emb_matrix=None, model_file=None, use_cuda=False, teacher_model_file=None):
     self.use_cuda = use_cuda
     self.vocab = vocab
     if model_file is not None:
         # load everything from file
         self.load(model_file, use_cuda, args=args, vocab=vocab)
     else:
         # build model from scratch
         self.args = args
         self.model = Seq2SeqModel(args, emb_matrix=emb_matrix, use_cuda=use_cuda)
         # dict-based components
         self.word_dict = dict()
         self.composite_dict = dict()
     self.crit = torch.nn.NLLLoss(ignore_index=constant.PAD_ID)
     self.nce_parameters = []
     if self.args['lambda2'] != 0:
         self.init_teacher(teacher_model_file, use_cuda, emb_matrix=emb_matrix)
     self.teacher_update_count = 0
     self.parameters = [p for p in self.model.parameters() if p.requires_grad]
     if use_cuda:
         self.model.cuda()
         self.crit.cuda()
     else:
         self.model.cpu()
         self.crit.cpu()
     self.optimizer = utils.get_optimizer(self.args['optim'], self.parameters, self.args['lr'])
Example #4
0
    def __init__(self,
                 args=None,
                 vocab=None,
                 pretrain=None,
                 model_file=None,
                 use_cuda=False,
                 train_classifier_only=False):
        self.use_cuda = use_cuda
        if model_file is not None:
            # load everything from file
            self.load(model_file, args)
        else:
            assert all(var is not None for var in [args, vocab, pretrain])
            # build model from scratch
            self.args = args
            self.vocab = vocab
            self.model = NERTagger(args, vocab, emb_matrix=pretrain.emb)

        if train_classifier_only:
            logger.info('Disabling gradient for non-classifier layers')
            exclude = ['tag_clf', 'crit']
            for pname, p in self.model.named_parameters():
                if pname.split('.')[0] not in exclude:
                    p.requires_grad = False
        self.parameters = [
            p for p in self.model.parameters() if p.requires_grad
        ]
        if self.use_cuda:
            self.model.cuda()
        else:
            self.model.cpu()
        self.optimizer = utils.get_optimizer(self.args['optim'],
                                             self.parameters,
                                             self.args['lr'],
                                             momentum=self.args['momentum'])
Example #5
0
 def __init__(self, args=None, vocab=None, emb_matrix=None, model_file=None, use_cuda=False):
     self.use_cuda = use_cuda
     if model_file is not None:
         # load everything from file
         self.load(model_file, use_cuda)
     else:
         # build model from scratch
         self.args = args
         self.model = None if args['dict_only'] else Seq2SeqModel(args, emb_matrix=emb_matrix, use_cuda=use_cuda)
         self.vocab = vocab
         # dict-based components
         self.word_dict = dict()
         self.composite_dict = dict()
     if not self.args['dict_only']:
         if self.args.get('edit', False):
             self.crit = loss.MixLoss(self.vocab['char'].size, self.args['alpha'])
             logger.debug("Running seq2seq lemmatizer with edit classifier...")
         else:
             self.crit = loss.SequenceLoss(self.vocab['char'].size)
         self.parameters = [p for p in self.model.parameters() if p.requires_grad]
         if use_cuda:
             self.model.cuda()
             self.crit.cuda()
         else:
             self.model.cpu()
             self.crit.cpu()
         self.optimizer = utils.get_optimizer(self.args['optim'], self.parameters, self.args['lr'])
Example #6
0
 def __init__(self,
              args=None,
              vocab=None,
              pretrain=None,
              model_file=None,
              use_cuda=False):
     self.use_cuda = use_cuda
     if model_file is not None:
         # load everything from file
         self.load(model_file, args)
     else:
         assert all(var is not None for var in [args, vocab, pretrain])
         # build model from scratch
         self.args = args
         self.vocab = vocab
         self.model = NERTagger(args, vocab, emb_matrix=pretrain.emb)
     self.parameters = [
         p for p in self.model.parameters() if p.requires_grad
     ]
     if self.use_cuda:
         self.model.cuda()
     else:
         self.model.cpu()
     self.optimizer = utils.get_optimizer(self.args['optim'],
                                          self.parameters,
                                          self.args['lr'],
                                          momentum=self.args['momentum'])