def load_model(self, file_path): self.model = self.load_customed_model(file_path) picked = Pick()(self.model.get_layer(index=2).output) self.encoder = Model(self.model.input, picked) self.word_embedding_size = self.model.input_shape[2] self.max_length = self.model.input_shape[1] self.latent_size = self.model.get_layer( index=2).layer.recurrent_layer.units self.encoding_size = self.model.get_layer(index=3).input_shape[2] self.input_transformer = WordEmbeddingTransformer( self.word2vec_model, self.max_length) self.output_transformer = WordEmbeddingTransformer( self.word2vec_model, self.max_length)
def __init__(self, word2vec_model, max_length=10, latent_size=300, learning_rate=0.0001, conv_size=5, channel_size=10, **kwargs): self.word2vec_model = word2vec_model self.input_transformer = WordEmbeddingConv3DTransformer( word2vec_model, max_length, ) self.output_transformer = WordEmbeddingTransformer( word2vec_model, max_length) self.word_embedding_size = word2vec_model.get_size() self.conv_size = conv_size self.channel_size = channel_size self.encoding_size = (self.word_embedding_size // self.conv_size * self.channel_size) super(Seq2VecC2RWord, self).__init__(max_length, latent_size, learning_rate) self.custom_objects['RNNDecoder'] = RNNDecoder self.custom_objects['MaskPooling'] = MaskPooling self.custom_objects['MaskToSeq'] = MaskToSeq self.custom_objects['MaskConv'] = MaskConv self.custom_objects['MaskConvNet'] = MaskConvNet self.custom_objects['ConvEncoder'] = ConvEncoder self.custom_objects['RNNCell'] = RNNCell self.custom_objects['Pick'] = Pick
def __init__(self, word2vec_model, max_length=10, latent_size=20, encoding_size=100, learning_rate=0.0001, **kwargs): self.word2vec_model = word2vec_model self.input_transformer = WordEmbeddingTransformer( word2vec_model, max_length) self.output_transformer = WordEmbeddingTransformer( word2vec_model, max_length) self.word_embedding_size = word2vec_model.get_size() self.encoding_size = encoding_size super(Seq2VecR2RWord, self).__init__(max_length=max_length, latent_size=latent_size, learning_rate=learning_rate) self.custom_objects[ 'BidirectionalRNNEncoder'] = BidirectionalRNNEncoder self.custom_objects['RNNDecoder'] = RNNDecoder self.custom_objects['RNNCell'] = RNNCell self.custom_objects['Pick'] = Pick
class TestWordEmbeddingTransformerClass(WordEmbeddingTransformerBase, TestCase): def setUp(self): super(TestWordEmbeddingTransformerClass, self).setUp() self.transformer = WordEmbeddingTransformer( word2vec_model=self.word2vec, max_length=self.max_length) def test_seq_transform_shape(self): transformed_seq = self.transformer.seq_transform(self.seqs[0]) self.assertEqual(len(transformed_seq[0]), self.word2vec.get_size()) def test_call_zero_padding(self): transformed_seqs = self.transformer(self.seqs) np.testing.assert_array_almost_equal( np.zeros((len(self.seqs), self.word2vec.get_size())), transformed_seqs[:, -1, :]) self.assertTrue(np.sum(transformed_seqs[:, :-1, :], dtype=bool)) def test_call_shape(self): self.assertEqual( self.transformer(self.seqs).shape, (len(self.seqs), self.max_length, self.word2vec.get_size()))
def load_model(self, file_path): self.model = self.load_customed_model(file_path) picked = Pick()(self.model.get_layer(index=7).output) self.encoder = Model(self.model.input, picked) self.char_embedding_size = self.model.get_layer( index=1).output_shape[2] self.max_length = self.model.get_layer(index=0).output_shape[1] self.max_index = self.model.input_shape[2] self.conv_size = self.char_embedding_size \ // self.model.get_layer(index=4).output_shape[2] self.channel_size = self.model.get_layer(index=4).output_shape[3] self.encoding_size = self.encoder.output_shape[1] self.latent_size = self.model.get_layer( index=8).layer.recurrent_layer.units self.input_transformer = CharEmbeddingOneHotTransformer( self.max_index, self.max_length, ) self.output_transformer = WordEmbeddingTransformer( self.word2vec, self.max_length, )
def setUp(self): super(TestWordEmbeddingTransformerClass, self).setUp() self.transformer = WordEmbeddingTransformer( word2vec_model=self.word2vec, max_length=self.max_length)