Ejemplo n.º 1
0
 def __init__(self, pc: BK.ParamCollection, conf: MaskLMNodeConf,
              vpack: VocabPackage):
     super().__init__(pc, None, None)
     self.conf = conf
     # vocab and padder
     self.word_vocab = vpack.get_voc("word")
     self.padder = DataPadder(
         2, pad_vals=self.word_vocab.pad,
         mask_range=2)  # todo(note): <pad>-id is very large
     # models
     self.hid_layer = self.add_sub_node(
         "hid", Affine(pc, conf._input_dim, conf.hid_dim, act=conf.hid_act))
     self.pred_layer = self.add_sub_node(
         "pred",
         Affine(pc,
                conf.hid_dim,
                conf.max_pred_rank + 1,
                init_rop=NoDropRop()))
     if conf.init_pred_from_pretrain:
         npvec = vpack.get_emb("word")
         if npvec is None:
             zwarn(
                 "Pretrained vector not provided, skip init pred embeddings!!"
             )
         else:
             with BK.no_grad_env():
                 self.pred_layer.ws[0].copy_(
                     BK.input_real(npvec[:conf.max_pred_rank + 1].T))
             zlog(
                 f"Init pred embeddings from pretrained vectors (size={conf.max_pred_rank+1})."
             )
Ejemplo n.º 2
0
 def __init__(self, pc: BK.ParamCollection, comp_name: str, ec_conf: EmbedderCompConf,
              conf: EmbedderNodeConf, vpack: VocabPackage):
     super().__init__(pc, comp_name, ec_conf, conf, vpack)
     # -----
     # get embeddings
     npvec = None
     if self.ec_conf.comp_init_from_pretrain:
         npvec = vpack.get_emb(comp_name)
         zlog(f"Try to init InputEmbedNode {comp_name} with npvec.shape={npvec.shape if (npvec is not None) else None}")
         if npvec is None:
             zwarn("Warn: cannot get pre-trained embeddings to init!!")
     # get rare unk range
     # - get freq vals, make sure special ones will not be pruned; todo(note): directly use that field
     voc_rare_mask = [float(z is not None and z<=ec_conf.comp_rare_thr) for z in self.voc.final_vals]
     self.rare_mask = BK.input_real(voc_rare_mask)
     self.use_rare_unk = (ec_conf.comp_rare_unk>0. and ec_conf.comp_rare_thr>0)
     # --
     # dropout outside explicitly
     self.E = self.add_sub_node(f"E{self.comp_name}", Embedding(
         pc, len(self.voc), self.comp_dim, fix_row0=conf.embed_fix_row0, npvec=npvec, name=comp_name,
         init_rop=NoDropRop(), init_scale=self.comp_init_scale))
     self.create_dropout_node()
Ejemplo n.º 3
0
 def __init__(self, pc: BK.ParamCollection, econf: EmbedConf,
              vpack: VocabPackage):
     super().__init__(pc, None, None)
     self.conf = econf
     #
     repr_sizes = []
     # word
     self.has_word = (econf.dim_word > 0)
     if self.has_word:
         npvec = vpack.get_emb(
             "word") if econf.init_words_from_pretrain else None
         self.word_embed = self.add_sub_node(
             "ew",
             Embedding(self.pc,
                       len(vpack.get_voc("word")),
                       econf.dim_word,
                       npvec=npvec,
                       name="word",
                       freeze=econf.word_freeze))
         repr_sizes.append(econf.dim_word)
     # char
     self.has_char = (econf.dim_char > 0)
     if self.has_char:
         # todo(warn): cnns will also use emb's drop?
         self.char_embed = self.add_sub_node(
             "ec",
             Embedding(self.pc,
                       len(vpack.get_voc("char")),
                       econf.dim_char,
                       name="char"))
         per_cnn_size = econf.char_cnn_hidden // len(econf.char_cnn_windows)
         self.char_cnns = [
             self.add_sub_node(
                 "cnnc",
                 CnnLayer(self.pc,
                          econf.dim_char,
                          per_cnn_size,
                          z,
                          pooling="max",
                          act="tanh")) for z in econf.char_cnn_windows
         ]
         repr_sizes.append(econf.char_cnn_hidden)
     # posi: absolute positional embeddings
     self.has_posi = (econf.dim_posi > 0)
     if self.has_posi:
         self.posi_embed = self.add_sub_node(
             "ep",
             PosiEmbedding(self.pc, econf.dim_posi, econf.posi_clip,
                           econf.posi_fix_sincos, econf.posi_freeze))
         repr_sizes.append(econf.dim_posi)
     # extras: like POS, ...
     self.dim_extras = econf.dim_extras
     self.extra_names = econf.extra_names
     zcheck(
         len(self.dim_extras) == len(self.extra_names),
         "Unmatched dims and names!")
     self.extra_embeds = []
     for one_extra_dim, one_name in zip(self.dim_extras, self.extra_names):
         self.extra_embeds.append(
             self.add_sub_node(
                 "ext",
                 Embedding(self.pc,
                           len(vpack.get_voc(one_name)),
                           one_extra_dim,
                           npvec=vpack.get_emb(one_name, None),
                           name="extra:" + one_name)))
         repr_sizes.append(one_extra_dim)
     # auxes
     self.dim_auxes = econf.dim_auxes
     self.fold_auxes = econf.fold_auxes
     self.aux_overall_gammas = []
     self.aux_fold_lambdas = []
     for one_aux_dim, one_aux_fold in zip(self.dim_auxes, self.fold_auxes):
         repr_sizes.append(one_aux_dim)
         # aux gamma and fold trainable lambdas
         self.aux_overall_gammas.append(self.add_param("AG", (),
                                                       1.))  # scalar
         self.aux_fold_lambdas.append(
             self.add_param(
                 "AL", (), [1. / one_aux_fold
                            for _ in range(one_aux_fold)]))  # [#fold]
     # =====
     # another projection layer? & set final dim
     if len(repr_sizes) <= 0:
         zwarn("No inputs??")
     # zcheck(len(repr_sizes)>0, "No inputs?")
     self.repr_sizes = repr_sizes
     self.has_proj = (econf.emb_proj_dim > 0)
     if self.has_proj:
         proj_layer = Affine(self.pc, sum(repr_sizes), econf.emb_proj_dim)
         if econf.emb_proj_norm:
             norm_layer = LayerNorm(self.pc, econf.emb_proj_dim)
             self.final_layer = self.add_sub_node(
                 "fl", Sequential(self.pc, [proj_layer, norm_layer]))
         else:
             self.final_layer = self.add_sub_node("fl", proj_layer)
         self.output_dim = econf.emb_proj_dim
     else:
         self.final_layer = None
         self.output_dim = sum(repr_sizes)
     # =====
     # special MdDropout: dropout the entire last dim (for word, char, extras, but not posi)
     self.dropmd_word = self.add_sub_node("md", DropoutLastN(pc, lastn=1))
     self.dropmd_char = self.add_sub_node("md", DropoutLastN(pc, lastn=1))
     self.dropmd_extras = [
         self.add_sub_node("md", DropoutLastN(pc, lastn=1))
         for _ in self.extra_names
     ]
     # dropouts for aux
     self.drop_auxes = [
         self.add_sub_node("aux", Dropout(pc, (one_aux_dim, )))
         for one_aux_dim in self.dim_auxes
     ]