Пример #1
0
 def _build_block(self, hp, output_node, block_type):
     max_tokens = self.max_tokens or hp.Choice(
         MAX_TOKENS, [500, 5000, 20000], default=5000)
     if block_type == NGRAM:
         output_node = preprocessing.TextToNgramVector(
             max_tokens=max_tokens).build(hp, output_node)
         return basic.DenseBlock().build(hp, output_node)
     if block_type == BERT:
         output_node = basic.BertBlock().build(hp, output_node)
     else:
         output_node = preprocessing.TextToIntSequence(
             max_tokens=max_tokens).build(hp, output_node)
         if block_type == TRANSFORMER:
             output_node = basic.Transformer(
                 max_features=max_tokens + 1,
                 pretraining=self.pretraining,
             ).build(hp, output_node)
         else:
             output_node = basic.Embedding(
                 max_features=max_tokens + 1,
                 pretraining=self.pretraining,
             ).build(hp, output_node)
             output_node = basic.ConvBlock().build(hp, output_node)
         output_node = reduction.SpatialReduction().build(hp, output_node)
         output_node = basic.DenseBlock().build(hp, output_node)
     return output_node
Пример #2
0
 def build(self, hp, inputs=None):
     input_node = nest.flatten(inputs)[0]
     output_node = input_node
     block_type = self.block_type or hp.Choice(
         'block_type', ['vanilla', 'transformer', 'ngram'],
         default='vanilla')
     max_tokens = self.max_tokens or hp.Choice(
         'max_tokens', [500, 5000, 20000], default=5000)
     if block_type == 'ngram':
         output_node = preprocessing.TextToNgramVector(
             max_tokens=max_tokens).build(hp, output_node)
         output_node = basic.DenseBlock().build(hp, output_node)
     else:
         output_node = preprocessing.TextToIntSequence(
             max_tokens=max_tokens).build(hp, output_node)
         if block_type == 'transformer':
             output_node = basic.Transformer(
                 max_features=max_tokens + 1,
                 pretraining=self.pretraining).build(hp, output_node)
         else:
             output_node = basic.Embedding(
                 max_features=max_tokens + 1,
                 pretraining=self.pretraining).build(hp, output_node)
             output_node = basic.ConvBlock().build(hp, output_node)
         output_node = reduction.SpatialReduction().build(hp, output_node)
         output_node = basic.DenseBlock().build(hp, output_node)
     return output_node
Пример #3
0
def test_text_to_ngram_vector():
    utils.block_basic_exam(
        preprocessing.TextToNgramVector(),
        tf.keras.Input(shape=(1, ), dtype=tf.string),
        ['ngrams'],
    )