Exemple #1
0
    def embedding_layer(self):
        self.input_layer()
        # 随机初始化
        self.positions_up_embed = load_random_embedding(len(self.all_word_index[position_index]), self.embedding_size, self.positons_up)
        self.positions_down_embed = load_random_embedding(len(self.all_word_index[position_index]), self.embedding_size, self.positons_down)
        self.poses_up_embed = load_random_embedding(len(self.all_word_index[poses_index]), self.embedding_size, self.poses_up)
        self.poses_down_embed = load_random_embedding(len(self.all_word_index[poses_index]), self.embedding_size, self.poses_down)
        self.sts_up_embed = load_random_embedding(len(self.all_word_index[poses_index]), self.embedding_size, self.sts_up)
        self.sts_down_embed = load_random_embedding(len(self.all_word_index[poses_index]), self.embedding_size, self.sts_down)

        # glove
        embedding_maxtrix = load_glove_embedding(
            word_index=self.all_word_index[word_index],
            file='',
            trimmed_filename=self.embedding_path,
            load=True,
            dim=self.embedding_size
        )
        glove_w2v = tf.Variable(embedding_maxtrix, dtype=tf.float32, name='glove_w2v')
        self.word_up_embed = tf.nn.embedding_lookup(glove_w2v, self.word_up)
        self.word_down_embed = tf.nn.embedding_lookup(glove_w2v, self.word_down)

        # dropout [b_s, max_len, 300]
        self.word_up_embed = tf.nn.dropout(self.word_up_embed, self.dropout_keep_prob)
        self.word_down_embed = tf.nn.dropout(self.word_down_embed, self.dropout_keep_prob)
        self.positions_up_embed = tf.nn.dropout(self.positions_up_embed, self.dropout_keep_prob)
        self.positions_down_embed = tf.nn.dropout(self.positions_down_embed, self.dropout_keep_prob)
        self.poses_up_embed = tf.nn.dropout(self.poses_up_embed, self.dropout_keep_prob)
        self.poses_down_embed = tf.nn.dropout(self.poses_down_embed, self.dropout_keep_prob)
        self.sts_up_embed = tf.nn.dropout(self.sts_up_embed, self.dropout_keep_prob)
        self.sts_down_embed = tf.nn.dropout(self.sts_down_embed, self.dropout_keep_prob)

        # 合成 [b_s, max_len, 300]
        self.up_compose = tf.add(
            tf.add(self.word_up_embed, self.positions_up_embed),
            tf.add(self.sts_up_embed, self.poses_up_embed)
        )
        self.down_compose = tf.add(
            tf.add(self.word_down_embed, self.positions_down_embed),
            tf.add(self.sts_down_embed, self.poses_down_embed)
        )
Exemple #2
0
    def embedding_layer(self):
        self.input_layer()
        # 随机初始化
        self.positions_up_embed = load_random_embedding(
            len(self.all_word_index[position_index]), self.embedding_size,
            self.positons_up)
        self.positions_down_embed = load_random_embedding(
            len(self.all_word_index[position_index]), self.embedding_size,
            self.positons_down)
        self.poses_up_embed = load_random_embedding(
            len(self.all_word_index[poses_index]), self.embedding_size,
            self.poses_up)
        self.poses_down_embed = load_random_embedding(
            len(self.all_word_index[poses_index]), self.embedding_size,
            self.poses_down)
        self.sts_up_embed = load_random_embedding(
            len(self.all_word_index[poses_index]), self.embedding_size,
            self.sts_up)
        self.sts_down_embed = load_random_embedding(
            len(self.all_word_index[poses_index]), self.embedding_size,
            self.sts_down)

        # glove
        embedding_maxtrix = load_glove_embedding(
            word_index=self.all_word_index[word_index],
            file='',
            trimmed_filename=self.embedding_path,
            load=True,
            dim=self.embedding_size)
        glove_w2v = tf.Variable(embedding_maxtrix,
                                dtype=tf.float32,
                                name='glove_w2v')
        self.word_up_embed = tf.nn.embedding_lookup(glove_w2v, self.word_up)
        self.word_down_embed = tf.nn.embedding_lookup(glove_w2v,
                                                      self.word_down)

        # 讲aspect词向量求平均然后拓展开来
        self.asp_embed = tf.nn.embedding_lookup(
            glove_w2v, self.aspects)  # [b_s, asp_max_len, 300]
        self.asp_embed = tf.reduce_mean(
            self.asp_embed, axis=1,
            keepdims=True) / self.asp_max_len  # [b_s, 1, 300]
        self.asp_embed = tf.multiply(
            tf.constant(1, tf.float32,
                        [self.batch_size, self.max_len, self.embedding_size]),
            self.asp_embed / self.asp_max_len)

        # dropout [b_s, max_len, 300]
        self.word_up_embed = tf.nn.dropout(self.word_up_embed,
                                           self.dropout_keep_prob)
        self.word_down_embed = tf.nn.dropout(self.word_down_embed,
                                             self.dropout_keep_prob)
        self.positions_up_embed = tf.nn.dropout(self.positions_up_embed,
                                                self.dropout_keep_prob)
        self.positions_down_embed = tf.nn.dropout(self.positions_down_embed,
                                                  self.dropout_keep_prob)
        self.poses_up_embed = tf.nn.dropout(self.poses_up_embed,
                                            self.dropout_keep_prob)
        self.poses_down_embed = tf.nn.dropout(self.poses_down_embed,
                                              self.dropout_keep_prob)
        self.sts_up_embed = tf.nn.dropout(self.sts_up_embed,
                                          self.dropout_keep_prob)
        self.sts_down_embed = tf.nn.dropout(self.sts_down_embed,
                                            self.dropout_keep_prob)

        # 合成 [b_s, max_len, 300]
        self.up_compose = tf.add(
            tf.add(self.word_up_embed, self.positions_up_embed),
            tf.add(self.sts_up_embed, self.poses_up_embed))
        self.down_compose = tf.add(
            tf.add(self.word_down_embed, self.positions_down_embed),
            tf.add(self.sts_down_embed, self.poses_down_embed))