def char_lm(key, sentence, labels): return ar_lm(key, sentence, labels, char_embeddings=ex.embeddings( id_space_size=len(get_chars()), embedding_size=qnd.FLAGS.char_embedding_size, name='char_embeddings'))
def word2sent2doc(document, *, word_space_size, word_embedding_size, **rd2sent2doc_hyperparams): assert ex.static_rank(document) == 3 with tf.variable_scope("word_embeddings"): word_embeddings = tf.gather( ex.embeddings(id_space_size=word_space_size, embedding_size=word_embedding_size, name="word_embeddings"), ex.flatten(document)) return rd2sent2doc(document, word_embeddings, save_memory=True, **rd2sent2doc_hyperparams)
def char2word2sent2doc(document, *, words, char_space_size, char_embedding_size, **ar2word2sent2doc_hyperparams): """ The argument `document` is in the shape of (#examples, #sentences per document, #words per sentence). """ assert ex.static_rank(document) == 3 assert ex.static_rank(words) == 2 with tf.variable_scope("char_embeddings"): char_embeddings = ex.embeddings(id_space_size=char_space_size, embedding_size=char_embedding_size, name="char_embeddings") return ar2word2sent2doc(document, words=words, char_embeddings=char_embeddings, **ar2word2sent2doc_hyperparams)