コード例 #1
0
    def __init__(self,
        input_dim,
        emb_dim, 
        enc_hid_dim,
        dec_hid_dim, 
        attn_dim,
        num_layers,
        dropout, 
        attention,
        dec_method,
        emb_freeze, 
        pad_idx,
        embeddings=None
    ):
        super().__init__()

        self.emb_dim = emb_dim
        self.enc_hid_dim = enc_hid_dim
        self.dec_hid_dim = dec_hid_dim
        dec_input_dim = self.calc_dec_input_dim()
        self.output_dim = input_dim
        self.num_layers = num_layers
        self.dec_method = dec_method

        self.emb = utils.embedding(input_dim, emb_dim, embeddings, emb_freeze, pad_idx)
        self.decoder = nn.GRU(dec_input_dim, dec_hid_dim, 
                num_layers=num_layers, bidirectional=False)
        self.out = nn.Linear(enc_hid_dim + dec_hid_dim + emb_dim, self.output_dim)
        self.bias_out = nn.Linear(enc_hid_dim + dec_hid_dim + emb_dim, self.output_dim)
        self.Vo = nn.Parameter(torch.Tensor(dec_hid_dim, 1))
        self.attend = attention 
        self.dropout = nn.Dropout(dropout)

        nn.init.kaiming_uniform_(self.Vo, a=math.sqrt(5))
コード例 #2
0
ファイル: modules.py プロジェクト: lightcome/persona
    def __init__(
        self, 
        input_dim, 
        emb_dim, 
        enc_hid_dim, 
        n_profile, 
        attention,
        dropout, 
        emb_freeze, 
        pad_idx,
        embeddings=None
    ):
        """use better classify: 
         https://github.com/brightmart/text_classification
         https://github.com/kk7nc/Text_Classification
         https://github.com/nadbordrozd/text-top-model
         https://github.com/prakashpandey9/Text-Classification-Pytorch
         https://github.com/dennybritz/cnn-text-classification-tf
        """
        super().__init__()
        self.emb_dim = emb_dim

        self.emb = utils.embedding(input_dim, emb_dim, embeddings, emb_freeze, pad_idx)
        self.attention = attention
        self.dropout = nn.Dropout(dropout)
コード例 #3
0
ファイル: modules.py プロジェクト: lightcome/persona
    def __init__(self,
        input_dim,
        emb_dim, 
        enc_hid_dim,
        dec_hid_dim, 
        attn_dim,
        num_layers,
        dropout, 
        attention,
        emb_freeze, 
        pad_idx,
        embeddings=None
    ):
        super().__init__()

        self.emb_dim = emb_dim
        self.enc_hid_dim = enc_hid_dim
        self.dec_hid_dim = dec_hid_dim
        dec_input_dim = self.calc_dec_input_dim()
        self.output_dim = input_dim
        self.num_layers = num_layers

        self.emb = utils.embedding(input_dim, emb_dim, embeddings, emb_freeze, pad_idx)
        self.decoder = nn.GRU(dec_input_dim, dec_hid_dim, 
                num_layers=num_layers, bidirectional=False)
        self.out = nn.Linear(enc_hid_dim + dec_hid_dim + emb_dim, self.output_dim)
        self.attend = attention 
        self.dropout = nn.Dropout(dropout)
コード例 #4
0
    def __init__(
        self,
        input_dim,
        emb_dim, 
        dropout,
        emb_freeze, 
        pad_idx,
        embeddings=None
    ):
        super().__init__()

        self.emb = utils.embedding(input_dim, emb_dim, embeddings, emb_freeze, pad_idx)
        self.dropout = nn.Dropout(dropout)
コード例 #5
0
    def __init__(self,
        input_dim,
        emb_dim, 
        enc_hid_dim,
        dec_hid_dim, 
        num_layers,
        dropout,
        enc_bidi,
        emb_freeze, 
        pad_idx,
        embeddings=None
    ):
        super().__init__()
        self.enc_bidi = enc_bidi
        self.num_layers = num_layers
        self.num_directions = _num_dir(enc_bidi)
        self.enc_hid_dim = enc_hid_dim

        self.emb = utils.embedding(input_dim, emb_dim, embeddings, emb_freeze, pad_idx)
        self.encoder = nn.GRU(emb_dim, enc_hid_dim, 
                num_layers=num_layers, bidirectional=enc_bidi)
        self.out = nn.Linear(enc_hid_dim * self.num_directions, dec_hid_dim)
        self.dropout = nn.Dropout(dropout)