def create(cls,
            train_path,
            valid_path,
            vocab_file,
            batch_size=16,
            cuda=True,
            is_cls=False,
            data_type="bert_cased",
            max_seq_len=424,
            is_meta=False):
     if ipython_info():
         global tqdm_notebook
         tqdm_notebook = tqdm
     if data_type == "bert_cased":
         do_lower_case = False
         fn = get_bert_data_loaders
     elif data_type == "bert_uncased":
         do_lower_case = True
         fn = get_bert_data_loaders
     else:
         raise NotImplementedError("No requested mode :(.")
     return cls(*fn(train_path, valid_path, vocab_file, batch_size, cuda,
                    is_cls, do_lower_case, max_seq_len, is_meta),
                batch_size=batch_size,
                cuda=cuda,
                is_meta=is_meta)
Example #2
0
 def create(cls,
            train_path,
            valid_path,
            model_dir,
            config_name,
            batch_size=16,
            cuda=True,
            is_cls=False,
            oov='<oov>',
            pad='<pad>'):
     if ipython_info():
         global tqdm_notebook
         tqdm_notebook = tqdm
     fn = get_elmo_data_loaders
     return cls(*fn(train_path, valid_path, model_dir, config_name,
                    batch_size, cuda, is_cls, oov, pad),
                batch_size=batch_size,
                cuda=cuda)
Example #3
0
 def __init__(self,
              model,
              data,
              best_model_path,
              lr=0.001,
              betas=list([0.8, 0.9]),
              clip=5,
              verbose=True,
              sup_labels=None,
              t_total=-1,
              warmup=0.1,
              weight_decay=0.01):
     if ipython_info():
         global tqdm_notebook
         tqdm_notebook = tqdm
     self.model = model
     self.optimizer = BertAdam(model,
                               lr,
                               t_total=t_total,
                               b1=betas[0],
                               b2=betas[1],
                               max_grad_norm=clip)
     self.optimizer_defaults = dict(model=model,
                                    lr=lr,
                                    warmup=warmup,
                                    t_total=t_total,
                                    schedule='warmup_linear',
                                    b1=betas[0],
                                    b2=betas[1],
                                    e=1e-6,
                                    weight_decay=0.01,
                                    max_grad_norm=clip)
     self.data = data
     if sup_labels is None:
         sup_labels = data.id2label[1:]
     self.sup_labels = sup_labels
     self.best_model_path = best_model_path
     self.verbose = verbose
     self.history = []
     self.cls_history = []
     self.epoch = 0
     self.clip = clip
     self.best_target_metric = 0.
     self.lr_scheduler = None