def _build_block(self, hp, output_node, block_type): max_tokens = self.max_tokens or hp.Choice( MAX_TOKENS, [500, 5000, 20000], default=5000) if block_type == NGRAM: output_node = preprocessing.TextToNgramVector( max_tokens=max_tokens).build(hp, output_node) return basic.DenseBlock().build(hp, output_node) if block_type == BERT: output_node = basic.BertBlock().build(hp, output_node) else: output_node = preprocessing.TextToIntSequence( max_tokens=max_tokens).build(hp, output_node) if block_type == TRANSFORMER: output_node = basic.Transformer( max_features=max_tokens + 1, pretraining=self.pretraining, ).build(hp, output_node) else: output_node = basic.Embedding( max_features=max_tokens + 1, pretraining=self.pretraining, ).build(hp, output_node) output_node = basic.ConvBlock().build(hp, output_node) output_node = reduction.SpatialReduction().build(hp, output_node) output_node = basic.DenseBlock().build(hp, output_node) return output_node
def build(self, hp, inputs=None): if self.num_classes: expected = self.num_classes if self.num_classes > 2 else 1 if self.output_shape[-1] != expected: raise ValueError('The data doesn\'t match the expected shape. ' 'Expecting {} but got {}'.format( expected, self.output_shape[-1])) inputs = nest.flatten(inputs) utils.validate_num_inputs(inputs, 1) input_node = inputs[0] output_node = input_node # Reduce the tensor to a vector. if len(output_node.shape) > 2: output_node = reduction.SpatialReduction().build(hp, output_node) if self.dropout_rate is not None: dropout_rate = self.dropout_rate else: dropout_rate = hp.Choice('dropout_rate', [0.0, 0.25, 0.5], default=0) if dropout_rate > 0: output_node = layers.Dropout(dropout_rate)(output_node) output_node = layers.Dense(self.output_shape[-1])(output_node) if self.loss == 'binary_crossentropy': output_node = layers.Activation(activations.sigmoid, name=self.name)(output_node) else: output_node = layers.Softmax(name=self.name)(output_node) return output_node
def build(self, hp, inputs=None): inputs = nest.flatten(inputs) utils.validate_num_inputs(inputs, 1) input_node = inputs[0] output_node = input_node # Reduce the tensor to a vector. if len(output_node.shape) > 2: output_node = reduction.SpatialReduction().build(hp, output_node) if self.dropout is not None: dropout = self.dropout else: dropout = hp.Choice("dropout", [0.0, 0.25, 0.5], default=0) if dropout > 0: output_node = layers.Dropout(dropout)(output_node) output_node = layers.Dense(self.shape[-1])(output_node) if isinstance(self.loss, keras.losses.BinaryCrossentropy): output_node = layers.Activation(activations.sigmoid, name=self.name)( output_node ) else: output_node = layers.Softmax(name=self.name)(output_node) return output_node
def build(self, hp, inputs=None): input_node = nest.flatten(inputs)[0] output_node = input_node block_type = self.block_type or hp.Choice( 'block_type', ['vanilla', 'transformer', 'ngram'], default='vanilla') max_tokens = self.max_tokens or hp.Choice( 'max_tokens', [500, 5000, 20000], default=5000) if block_type == 'ngram': output_node = preprocessing.TextToNgramVector( max_tokens=max_tokens).build(hp, output_node) output_node = basic.DenseBlock().build(hp, output_node) else: output_node = preprocessing.TextToIntSequence( max_tokens=max_tokens).build(hp, output_node) if block_type == 'transformer': output_node = basic.Transformer( max_features=max_tokens + 1, pretraining=self.pretraining).build(hp, output_node) else: output_node = basic.Embedding( max_features=max_tokens + 1, pretraining=self.pretraining).build(hp, output_node) output_node = basic.ConvBlock().build(hp, output_node) output_node = reduction.SpatialReduction().build(hp, output_node) output_node = basic.DenseBlock().build(hp, output_node) return output_node
def test_spatial_reduction(): utils.block_basic_exam( reduction.SpatialReduction(), tf.keras.Input(shape=(32, 32, 3), dtype=tf.float32), ['reduction_type'], )