def __init__(self, ntoken, ninp, nhid, nhidlast, dropout=0.5, dropouth=0.5, dropoutx=0.5, dropouti=0.5, dropoute=0.1, cell_cls=DARTSCell, genotype=None): super(RNNModel, self).__init__() self.lockdrop = LockedDropout() self.encoder = nn.Embedding(ntoken, ninp) assert ninp == nhid == nhidlast if cell_cls == DARTSCell: assert genotype is not None self.rnns = [cell_cls(ninp, nhid, dropouth, dropoutx, genotype)] else: assert genotype is None self.rnns = [cell_cls(ninp, nhid, dropouth, dropoutx)] self.rnns = torch.nn.ModuleList(self.rnns) self.decoder = nn.Linear(ninp, ntoken) self.decoder.weight = self.encoder.weight # weight tying self.init_weights() self.ninp = ninp self.nhid = nhid self.nhidlast = nhidlast self.dropout = dropout self.dropouti = dropouti self.dropoute = dropoute self.ntoken = ntoken self.cell_cls = cell_cls
def __init__(self, n_token, n_inp, n_hid, n_hid_last, dropout=0.5, dropout_h=0.5, dropout_x=0.5, dropout_i=0.5, dropout_e=0.1, cell_cls=DARTSCell): super().__init__() self.lockdrop = LockedDropout() self.encoder = nn.Embedding( n_token, n_inp) # n_inp 词向量长度 embedding的作用是把输入的index转化为向量 self.rnn = cell_cls(n_inp, n_hid, dropout_h, dropout_x) self.decoder = nn.Linear(n_inp, n_token) self.decoder.weight = self.encoder.weight self.init_weights() self.n_inp = n_inp self.n_hid = n_hid self.n_hid_last = n_hid_last self.dropout = dropout self.dropout_i = dropout_i self.dropout_e = dropout_e self.n_token = n_token self.cell_cls = cell_cls
def __init__(self, ntoken, args, cell_cls=DARTSCell, genotype=None): super(RNNModel, self).__init__() self.lockdrop = LockedDropout() self.encoder = nn.Embedding(ntoken, args.emsize) self.nlayers = args.nlayers self.rnns = [] assert args.emsize == args.nhid == args.nhidlast for layers in range(args.nlayers): if cell_cls == DARTSCell: assert genotype is not None self.rnns.append( cell_cls(args.emsize, args.nhid, args.dropouth, args.dropoutx, genotype, args.use_matrices_on_edge, args.use_glorot, args.num_intermediate_nodes, args.handle_hidden_mode)) else: assert genotype is None # TODO: for the init of DartsCell there is no default value for genotype, # next line will raise error in theory self.rnns = [ cell_cls(args.emsize, args.nhid, args.dropouth, args.dropoutx) ] self.rnns = torch.nn.ModuleList(self.rnns) self.decoder = nn.Linear(args.emsize, ntoken) self.decoder.weight = self.encoder.weight self.init_weights() self.ninp = args.emsize self.nhid = args.nhid self.nhidlast = args.nhidlast self.dropout = args.dropout self.dropouti = args.dropouti self.dropoute = args.dropoute self.ntoken = ntoken self.cell_cls = cell_cls model_logger.info("MODEL INITIALIZED ")
def __init__(self, ntoken, ninp, nhid, nhidlast, dropout=0.5, dropouth=0.5, dropoutx=0.5, dropouti=0.5, dropoute=0.1, nner=None, npos=None, token_emb_path=None, nclasses=None, nloc=None, cell_cls=DARTSCell, genotype=None): super(RNNModel, self).__init__() # TACRED attributes self.nner = nner self.npos = npos self.nhid = nhid self.nloc = nloc self.token_emb_path = token_emb_path self.nclasses = nclasses # Original attributes self.ninp = ninp self.nhidlast = nhidlast self.dropout = dropout self.dropouti = dropouti self.dropoute = dropoute self.ntoken = ntoken self.cell_cls = cell_cls self.lockdrop = LockedDropout() self.encoder = nn.Embedding(ntoken, ninp, padding_idx=constant.PAD_ID) self.peripheral_emb_dim = 0 if self.nner is not None: self.ner_encoder = nn.Embedding(len(constant.NER_TO_ID), self.nner, padding_idx=constant.PAD_ID) self.peripheral_emb_dim += self.nner if self.npos is not None: self.npos_encoder = nn.Embedding(len(constant.POS_TO_ID), self.npos, padding_idx=constant.PAD_ID) self.peripheral_emb_dim += self.npos if self.nloc is not None: self.nloc_encoder = nn.Embedding(constant.MAX_LEN * 2 + 1, self.nloc) self.peripheral_emb_dim += self.nloc + self.nloc # If using additional token attributes, need to encode them using smaller size if self.peripheral_emb_dim > 0: input_dim = self.ninp + self.peripheral_emb_dim self.input_aggregator = nn.Linear(in_features=input_dim, out_features=self.ninp) assert ninp == nhid == nhidlast if cell_cls == DARTSCell: assert genotype is not None self.rnns = [cell_cls(ninp, nhid, dropouth, dropoutx, genotype)] else: assert genotype is None self.rnns = [cell_cls(ninp, nhid, dropouth, dropoutx)] self.rnns = torch.nn.ModuleList(self.rnns) self.decoder = nn.Linear(ninp, nclasses) # self.decoder.weight = self.encoder.weight self.init_weights()