コード例 #1
0
 def __init__(self,
              args=None,
              vocab=None,
              pretrain=None,
              model_file=None,
              use_cuda=False):
     self.use_cuda = use_cuda
     if model_file is not None:
         # load everything from file
         self.load(pretrain, model_file)
     else:
         assert all(var is not None for var in [args, vocab, pretrain])
         # build model from scratch
         self.args = args
         self.vocab = vocab
         self.model = Tagger(args,
                             vocab,
                             emb_matrix=pretrain.emb,
                             share_hid=args['share_hid'])
     self.parameters = [
         p for p in self.model.parameters() if p.requires_grad
     ]
     if self.use_cuda:
         self.model.cuda()
     else:
         self.model.cpu()
     self.optimizer = utils.get_optimizer(self.args['optim'],
                                          self.parameters,
                                          self.args['lr'],
                                          betas=(0.9, self.args['beta2']),
                                          eps=1e-6)
コード例 #2
0
 def __init__(self, args=None, vocab=None, emb_matrix=None, model_file=None, use_cuda=False):
     self.use_cuda = use_cuda
     if model_file is not None:
         # load everything from file
         self.load(model_file, use_cuda)
     else:
         # build model from scratch
         self.args = args
         self.model = None if args['dict_only'] else Seq2SeqModel(args, emb_matrix=emb_matrix, use_cuda=use_cuda)
         self.vocab = vocab
         # dict-based components
         self.fallback_dict = dict()
         self.composite_dict = dict()
     if not self.args['dict_only']:
         if self.args.get('edit', False):
             self.crit = loss.MixLoss(self.vocab['char'].size, self.args['alpha'])
             print("[Running seq2seq lemmatizer with edit classifier]")
         else:
             self.crit = loss.SequenceLoss(self.vocab['char'].size)
         self.parameters = [p for p in self.model.parameters() if p.requires_grad]
         if use_cuda:
             self.model.cuda()
             self.crit.cuda()
         else:
             self.model.cpu()
             self.crit.cpu()
         self.optimizer = utils.get_optimizer(self.args['optim'], self.parameters, self.args['lr'])
コード例 #3
0
 def __init__(self,
              args=None,
              vocab=None,
              emb_matrix=None,
              model_file=None,
              use_cuda=False):
     self.use_cuda = use_cuda
     if model_file is not None:
         # load from file
         self.load(model_file, use_cuda)
     else:
         self.args = args
         self.model = None if args['dict_only'] else Seq2SeqModel(
             args, emb_matrix=emb_matrix)
         self.vocab = vocab
         self.expansion_dict = dict()
     if not self.args['dict_only']:
         self.crit = loss.SequenceLoss(self.vocab.size)
         self.parameters = [
             p for p in self.model.parameters() if p.requires_grad
         ]
         if use_cuda:
             self.model.cuda()
             self.crit.cuda()
         else:
             self.model.cpu()
             self.crit.cpu()
         self.optimizer = utils.get_optimizer(self.args['optim'],
                                              self.parameters,
                                              self.args['lr'])
コード例 #4
0
ファイル: trainer.py プロジェクト: IgorTavcar/classla
 def __init__(self,
              args=None,
              vocab=None,
              pretrain=None,
              model_file=None,
              use_cuda=False):
     self.use_cuda = use_cuda
     if model_file is not None:
         # load everything from file
         self.load(model_file, args)
     else:
         assert all(var is not None for var in [args, vocab, pretrain])
         # build model from scratch
         self.args = args
         self.vocab = vocab
         self.model = NERTagger(args, vocab, emb_matrix=pretrain.emb)
     self.parameters = [
         p for p in self.model.parameters() if p.requires_grad
     ]
     if self.use_cuda:
         self.model.cuda()
     else:
         self.model.cpu()
     self.optimizer = utils.get_optimizer(self.args['optim'],
                                          self.parameters,
                                          self.args['lr'],
                                          momentum=self.args['momentum'])
コード例 #5
0
    def __init__(self,
                 args=None,
                 vocab=None,
                 pretrain=None,
                 model_file=None,
                 use_cuda=False):
        self.use_cuda = use_cuda
        if model_file is not None:
            # load everything from file
            self.load(model_file, pretrain)
        else:
            # build model from scratch
            self.args = args
            self.vocab = vocab
            self.model = Tagger(
                args,
                vocab,
                emb_matrix=pretrain.emb if pretrain is not None else None,
                share_hid=args['share_hid'])

        self.constrain_via_lexicon = args[
            'constrain_via_lexicon'] if args is not None and 'constrain_via_lexicon' in args else None
        self.inflectional_lexicon = None
        if self.constrain_via_lexicon:
            inflectional_lexicon = LemmaTrainer(
                model_file=self.constrain_via_lexicon).composite_dict
            args['shorthand'] = args[
                'shorthand'] if 'shorthand' in args else self.args['shorthand']
            self.inflectional_lexicon = InflectionalLexicon(
                inflectional_lexicon, args['shorthand'], self.vocab, pretrain)
        self.parameters = [
            p for p in self.model.parameters() if p.requires_grad
        ]
        if self.use_cuda:
            self.model.cuda()
        else:
            self.model.cpu()
        self.optimizer = utils.get_optimizer(self.args['optim'],
                                             self.parameters,
                                             self.args['lr'],
                                             betas=(0.9, self.args['beta2']),
                                             eps=1e-6)