Пример #1
0
    def build(self, hp, inputs=None):
        inputs = nest.flatten(inputs)
        utils.validate_num_inputs(inputs, 1)
        input_node = inputs[0]
        shape = input_node.shape.as_list()
        if len(shape) != 3:
            raise ValueError('Expect the input tensor to have '
                             'at least 3 dimensions for rnn models, '
                             'but got {shape}'.format(shape=input_node.shape))

        feature_size = shape[-1]
        output_node = input_node

        bidirectional = self.bidirectional
        if bidirectional is None:
            bidirectional = hp.Boolean('bidirectional', default=True)
        layer_type = self.layer_type or hp.Choice(
            'layer_type', ['gru', 'lstm'], default='lstm')
        num_layers = self.num_layers or hp.Choice('num_layers', [1, 2, 3],
                                                  default=2)
        rnn_layers = {'gru': layers.GRU, 'lstm': layers.LSTM}
        in_layer = rnn_layers[layer_type]
        for i in range(num_layers):
            return_sequences = True
            if i == num_layers - 1:
                return_sequences = self.return_sequences
            if bidirectional:
                output_node = layers.Bidirectional(
                    in_layer(feature_size,
                             return_sequences=return_sequences))(output_node)
            else:
                output_node = in_layer(
                    feature_size,
                    return_sequences=return_sequences)(output_node)
        return output_node
Пример #2
0
 def build(self, hp, inputs=None):
     inputs = nest.flatten(inputs)
     utils.validate_num_inputs(inputs, 1)
     input_node = inputs[0]
     output_node = input_node
     output_node = Flatten().build(hp, output_node)
     layer_stack = hp.Choice('layer_stack', ['dense-bn-act', 'dense-act'],
                             default='dense-bn-act')
     dropout_rate = hp.Choice('dropout_rate', [0, 0.25, 0.5], default=0.5)
     for i in range(hp.Choice('num_layers', [1, 2, 3], default=2)):
         units = hp.Choice('units_{i}'.format(i=i),
                           [16, 32, 64, 128, 256, 512, 1024],
                           default=32)
         if layer_stack == 'dense-bn-act':
             output_node = tf.keras.layers.Dense(units)(output_node)
             output_node = tf.keras.layers.BatchNormalization()(output_node)
             output_node = tf.keras.layers.ReLU()(output_node)
             output_node = tf.keras.layers.Dropout(dropout_rate)(
                 output_node)
         elif layer_stack == 'dense-act':
             output_node = tf.keras.layers.Dense(units)(output_node)
             output_node = tf.keras.layers.ReLU()(output_node)
             output_node = tf.keras.layers.Dropout(dropout_rate)(
                 output_node)
     return output_node
Пример #3
0
    def build(self, hp, inputs=None):
        inputs = nest.flatten(inputs)
        utils.validate_num_inputs(inputs, 1)
        input_node = inputs[0]
        output_node = input_node

        kernel_size = hp.Choice('kernel_size', [3, 5, 7], default=3)
        for i in range(hp.Choice('num_blocks', [1, 2, 3], default=2)):
            output_node = tf.keras.layers.Conv2D(
                hp.Choice('filters_{i}_1'.format(i=i), [16, 32, 64],
                          default=32),
                kernel_size,
                padding=self._get_padding(kernel_size,
                                          output_node))(output_node)

            output_node = tf.keras.layers.Conv2D(
                hp.Choice('filters_{i}_2'.format(i=i), [16, 32, 64],
                          default=32),
                kernel_size,
                padding=self._get_padding(kernel_size,
                                          output_node))(output_node)

            output_node = tf.keras.layers.MaxPool2D(
                kernel_size - 1,
                padding=self._get_padding(kernel_size - 1,
                                          output_node))(output_node)
        return output_node
Пример #4
0
    def build(self, hp, inputs=None):
        if self.identity:
            return IdentityLayer(name=self.name)(inputs)
        if self.num_classes:
            expected = self.num_classes if self.num_classes > 2 else 1
            if self.output_shape[-1] != expected:
                raise ValueError('The data doesn\'t match the expected shape. '
                                 'Expecting {} but got {}'.format(
                                     expected, self.output_shape[-1]))
        inputs = nest.flatten(inputs)
        utils.validate_num_inputs(inputs, 1)
        input_node = inputs[0]
        output_node = input_node

        dropout_rate = self.dropout_rate or hp.Choice(
            'dropout_rate', [0.0, 0.25, 0.5], default=0)

        if dropout_rate > 0:
            output_node = tf.keras.layers.Dropout(dropout_rate)(output_node)
        output_node = block_module.Flatten().build(hp, output_node)
        output_node = tf.keras.layers.Dense(self.output_shape[-1])(output_node)
        if self.loss == 'binary_crossentropy':
            output_node = Sigmoid(name=self.name)(output_node)
        else:
            output_node = tf.keras.layers.Softmax(name=self.name)(output_node)
        return output_node
Пример #5
0
    def build(self, hp, inputs=None):
        inputs = nest.flatten(inputs)
        utils.validate_num_inputs(inputs, 1)
        input_node = inputs[0]
        output_node = input_node
        output_node = reduction.Flatten().build(hp, output_node)

        num_layers = self.num_layers or hp.Choice('num_layers', [1, 2, 3],
                                                  default=2)
        use_batchnorm = self.use_batchnorm
        if use_batchnorm is None:
            use_batchnorm = hp.Boolean('use_batchnorm', default=False)
        if self.dropout_rate is not None:
            dropout_rate = self.dropout_rate
        else:
            dropout_rate = hp.Choice('dropout_rate', [0.0, 0.25, 0.5],
                                     default=0)

        for i in range(num_layers):
            units = hp.Choice('units_{i}'.format(i=i),
                              [16, 32, 64, 128, 256, 512, 1024],
                              default=32)
            output_node = layers.Dense(units)(output_node)
            if use_batchnorm:
                output_node = layers.BatchNormalization()(output_node)
            output_node = layers.ReLU()(output_node)
            if dropout_rate > 0:
                output_node = layers.Dropout(dropout_rate)(output_node)
        return output_node
Пример #6
0
    def build(self, hp, inputs=None):
        inputs = nest.flatten(inputs)
        utils.validate_num_inputs(inputs, 1)
        input_node = inputs[0]
        output_node = input_node

        # No need to reduce.
        if len(output_node.shape) <= 2:
            return output_node

        reduction_type = self.reduction_type or hp.Choice('reduction_type',
                                                          ['flatten',
                                                           'global_max',
                                                           'global_avg'],
                                                          default='global_avg')

        if reduction_type == 'flatten':
            output_node = Flatten().build(hp, output_node)
        elif reduction_type == 'global_max':
            output_node = tf.math.reduce_max(output_node, axis=-2)
        elif reduction_type == 'global_avg':
            output_node = tf.math.reduce_mean(output_node, axis=-2)
        elif reduction_type == 'global_min':
            output_node = tf.math.reduce_min(output_node, axis=-2)

        return output_node
Пример #7
0
 def build(self, hp, inputs=None):
     inputs = nest.flatten(inputs)
     utils.validate_num_inputs(inputs, 1)
     input_node = inputs[0]
     if len(input_node.shape) > 2:
         return tf.keras.layers.Flatten()(input_node)
     return input_node
Пример #8
0
    def build(self, hp, inputs=None):
        if self.num_classes:
            expected = self.num_classes if self.num_classes > 2 else 1
            if self.output_shape[-1] != expected:
                raise ValueError('The data doesn\'t match the expected shape. '
                                 'Expecting {} but got {}'.format(
                                     expected, self.output_shape[-1]))
        inputs = nest.flatten(inputs)
        utils.validate_num_inputs(inputs, 1)
        input_node = inputs[0]
        output_node = input_node

        # Reduce the tensor to a vector.
        if len(output_node.shape) > 2:
            output_node = reduction.SpatialReduction().build(hp, output_node)

        if self.dropout_rate is not None:
            dropout_rate = self.dropout_rate
        else:
            dropout_rate = hp.Choice('dropout_rate', [0.0, 0.25, 0.5],
                                     default=0)

        if dropout_rate > 0:
            output_node = layers.Dropout(dropout_rate)(output_node)
        output_node = layers.Dense(self.output_shape[-1])(output_node)
        if self.loss == 'binary_crossentropy':
            output_node = keras_layers.Sigmoid(name=self.name)(output_node)
        else:
            output_node = layers.Softmax(name=self.name)(output_node)
        return output_node
Пример #9
0
 def build(self, hp, inputs=None):
     inputs = nest.flatten(inputs)
     utils.validate_num_inputs(inputs, 1)
     input_node = inputs[0]
     output_node = input_node
     output_node = hyper_block.Flatten().build(hp, output_node)
     output_node = tf.keras.layers.Dense(self.output_shape[-1])(output_node)
     return output_node
Пример #10
0
 def build(self, hp, inputs=None):
     inputs = nest.flatten(inputs)
     utils.validate_num_inputs(inputs, 1)
     input_node = inputs[0]
     output_node = input_node
     output_node = hyper_block.Flatten().build(hp, output_node)
     output_node = tf.keras.layers.Dense(self.output_shape[-1])(output_node)
     if self.binary:
         output_node = tf.keras.activations.sigmoid(output_node)
     else:
         output_node = tf.keras.layers.Softmax()(output_node)
     return output_node
Пример #11
0
    def build(self, hp, inputs=None):
        inputs = nest.flatten(inputs)
        utils.validate_num_inputs(inputs, 1)
        input_node = inputs[0]
        output_node = input_node

        kernel_size = self.kernel_size or hp.Choice('kernel_size', [3, 5, 7],
                                                    default=3)
        num_blocks = self.num_blocks or hp.Choice('num_blocks', [1, 2, 3],
                                                  default=2)
        num_layers = self.num_layers or hp.Choice('num_layers', [1, 2],
                                                  default=2)
        separable = self.separable
        if separable is None:
            separable = hp.Boolean('separable', default=False)

        if separable:
            conv = utils.get_sep_conv(input_node.shape)
        else:
            conv = utils.get_conv(input_node.shape)

        max_pooling = self.max_pooling
        if max_pooling is None:
            max_pooling = hp.Boolean('max_pooling', default=True)
        pool = utils.get_max_pooling(input_node.shape)

        if self.dropout_rate is not None:
            dropout_rate = self.dropout_rate
        else:
            dropout_rate = hp.Choice('dropout_rate', [0.0, 0.25, 0.5],
                                     default=0)

        for i in range(num_blocks):
            for j in range(num_layers):
                output_node = conv(hp.Choice('filters_{i}_{j}'.format(i=i,
                                                                      j=j),
                                             [16, 32, 64, 128, 256, 512],
                                             default=32),
                                   kernel_size,
                                   padding=self._get_padding(
                                       kernel_size, output_node),
                                   activation='relu')(output_node)
            if max_pooling:
                output_node = pool(kernel_size - 1,
                                   padding=self._get_padding(
                                       kernel_size - 1,
                                       output_node))(output_node)
            if dropout_rate > 0:
                output_node = layers.Dropout(dropout_rate)(output_node)
        return output_node
Пример #12
0
    def build(self, hp, inputs=None):
        inputs = nest.flatten(inputs)
        utils.validate_num_inputs(inputs, 1)
        input_node = inputs[0]
        output_node = input_node

        block_type = hp.Choice('block_type', ['resnet', 'xception', 'vanilla'],
                               default='resnet')

        if block_type == 'resnet':
            output_node = ResNetBlock().build(hp, output_node)
        elif block_type == 'xception':
            output_node = XceptionBlock().build(hp, output_node)
        elif block_type == 'vanilla':
            output_node = ConvBlock().build(hp, output_node)
        return output_node
Пример #13
0
    def build(self, hp, inputs=None):
        inputs = nest.flatten(inputs)
        utils.validate_num_inputs(inputs, 1)
        input_node = inputs[0]
        shape = input_node.shape.as_list()
        if len(shape) < 3:
            raise ValueError("Expect the input tensor to have "
                             "at least 3 dimensions for rnn models, "
                             "but got {shape}".format(shape=input_node.shape))

        # Flatten feature_list to a single dimension.
        # Final shape 3-D (num_sample , time_steps , features)
        feature_size = np.prod(shape[2:])
        input_node = tf.reshape(input_node, [-1, shape[1], feature_size])
        output_node = input_node

        in_layer = const.Constant.RNN_LAYERS[hp.Choice('rnn_type',
                                                       ['gru', 'lstm'],
                                                       default='lstm')]
        choice_of_layers = hp.Choice('num_layers', [1, 2, 3], default=2)

        bidirectional = self.bidirectional
        if bidirectional is None:
            bidirectional = hp.Choice('bidirectional', [True, False],
                                      default=True)

        return_sequences = self.return_sequences
        if return_sequences is None:
            return_sequences = hp.Choice('return_sequences', [True, False],
                                         default=True)

        for i in range(choice_of_layers):
            temp_return_sequences = True
            if i == choice_of_layers - 1:
                temp_return_sequences = return_sequences
            if bidirectional:
                output_node = tf.keras.layers.Bidirectional(
                    in_layer(
                        feature_size,
                        return_sequences=temp_return_sequences))(output_node)
            else:
                output_node = in_layer(feature_size,
                                       return_sequences=return_sequences)

        return output_node
Пример #14
0
    def build(self, hp, inputs=None):
        if self.output_dim and self.output_shape[-1] != self.output_dim:
            raise ValueError('The data doesn\'t match the output_dim. '
                             'Expecting {} but got {}'.format(
                                 self.output_dim, self.output_shape[-1]))
        inputs = nest.flatten(inputs)
        utils.validate_num_inputs(inputs, 1)
        input_node = inputs[0]
        output_node = input_node

        dropout_rate = self.dropout_rate or hp.Choice(
            'dropout_rate', [0.0, 0.25, 0.5], default=0)

        if dropout_rate > 0:
            output_node = tf.keras.layers.Dropout(dropout_rate)(output_node)
        output_node = block.Flatten().build(hp, output_node)
        output_node = tf.keras.layers.Dense(self.output_shape[-1])(output_node)
        return output_node
Пример #15
0
    def build(self, hp, inputs=None):
        inputs = nest.flatten(inputs)
        utils.validate_num_inputs(inputs, 1)
        input_node = inputs[0]
        output_node = input_node
        channel_axis = 1 \
            if tf.keras.backend.image_data_format() == 'channels_first' else -1

        # Parameters
        # [general]
        kernel_size = hp.Range("kernel_size", 3, 5)
        initial_strides = (2, 2)
        activation = hp.Choice("activation", ["relu", "selu"])
        # [Entry Flow]
        conv2d_filters = hp.Choice("conv2d_num_filters", [32, 64, 128])
        sep_filters = hp.Range("sep_num_filters", 128, 768)
        # [Middle Flow]
        residual_blocks = hp.Range("num_residual_blocks", 2, 8)
        # [Exit Flow]

        # Model
        # Initial conv2d
        dims = conv2d_filters
        output_node = self._conv(dims,
                                 kernel_size=(kernel_size, kernel_size),
                                 activation=activation,
                                 strides=initial_strides)(output_node)
        # Separable convs
        dims = sep_filters
        for _ in range(residual_blocks):
            output_node = self._residual(
                dims,
                activation=activation,
                max_pooling=False,
                channel_axis=channel_axis)(output_node)
        # Exit
        dims *= 2
        output_node = self._residual(dims,
                                     activation=activation,
                                     max_pooling=True,
                                     channel_axis=channel_axis)(output_node)

        return output_node
Пример #16
0
    def build(self, hp, inputs=None):
        inputs = nest.flatten(inputs)
        utils.validate_num_inputs(inputs, 1)
        input_node = inputs[0]
        output_node = input_node

        kernel_size = self.kernel_size or hp.Choice('kernel_size',
                                                    [3, 5, 7],
                                                    default=3)
        num_blocks = self.num_blocks or hp.Choice('num_blocks',
                                                  [1, 2, 3],
                                                  default=2)
        separable = self.separable
        if separable is None:
            separable = hp.Choice('separable', [True, False], default=False)

        if separable:
            conv = utils.get_sep_conv(input_node.shape)
        else:
            conv = utils.get_conv(input_node.shape)
        pool = utils.get_max_pooling(input_node.shape)

        for i in range(num_blocks):
            output_node = conv(
                hp.Choice('filters_{i}_1'.format(i=i),
                          [16, 32, 64],
                          default=32),
                kernel_size,
                padding=self._get_padding(kernel_size, output_node),
                activation='relu')(output_node)
            output_node = conv(
                hp.Choice('filters_{i}_2'.format(i=i),
                          [16, 32, 64],
                          default=32),
                kernel_size,
                padding=self._get_padding(kernel_size, output_node),
                activation='relu')(output_node)
            output_node = pool(
                kernel_size - 1,
                padding=self._get_padding(kernel_size - 1, output_node))(output_node)
        return output_node
Пример #17
0
    def build(self, hp, inputs=None):
        inputs = nest.flatten(inputs)
        utils.validate_num_inputs(inputs, 1)
        input_node = inputs[0]
        output_node = input_node

        separable = self.separable
        if separable is None:
            separable = hp.Choice('separable', [True, False], default=False)
        if separable:
            conv = utils.get_sep_conv(input_node.shape)
        else:
            conv = utils.get_conv(input_node.shape)
        pool = utils.get_max_pooling(input_node.shape)
        dropout = utils.get_dropout(input_node.shape)
        kernel_size = hp.Choice('kernel_size', [3, 5, 7], default=3)
        dropout_rate = hp.Choice('dropout_rate', [0, 0.25, 0.5], default=0.5)

        for i in range(hp.Choice('num_blocks', [1, 2, 3], default=2)):
            if dropout_rate > 0:
                output_node = dropout(dropout_rate)(output_node)
            output_node = conv(hp.Choice('filters_{i}_1'.format(i=i),
                                         [16, 32, 64],
                                         default=32),
                               kernel_size,
                               padding=self._get_padding(
                                   kernel_size, output_node))(output_node)

            output_node = conv(hp.Choice('filters_{i}_2'.format(i=i),
                                         [16, 32, 64],
                                         default=32),
                               kernel_size,
                               padding=self._get_padding(
                                   kernel_size, output_node))(output_node)

            output_node = pool(kernel_size - 1,
                               padding=self._get_padding(
                                   kernel_size - 1, output_node))(output_node)
        return output_node
Пример #18
0
    def build(self, hp, inputs=None):
        if self.num_classes and self.output_shape[-1] != self.num_classes:
            raise ValueError('The data doesn\'t match the num_classes. '
                             'Expecting {} but got {}'.format(
                                 self.num_classes, self.output_shape[-1]))
        inputs = nest.flatten(inputs)
        utils.validate_num_inputs(inputs, 1)
        input_node = inputs[0]
        output_node = input_node

        dropout_rate = self.dropout_rate or hp.Choice(
            'dropout_rate', [0.0, 0.25, 0.5], default=0)

        if dropout_rate > 0:
            output_node = tf.keras.layers.Dropout(dropout_rate)(output_node)
        output_node = block.Flatten().build(hp, output_node)
        output_node = tf.keras.layers.Dense(self.output_shape[-1])(output_node)
        if self.loss == 'binary_crossentropy':
            output_node = tf.keras.activations.sigmoid(output_node)
        else:
            output_node = tf.keras.layers.Softmax()(output_node)
        return output_node
Пример #19
0
    def build(self, hp, inputs=None):
        inputs = nest.flatten(inputs)
        utils.validate_num_inputs(inputs, 1)
        input_node = inputs[0]
        output_node = input_node

        # No need to reduce.
        if len(output_node.shape) <= 2:
            return output_node

        reduction_type = self.reduction_type or hp.Choice(
            'reduction_type', ['flatten', 'global_max', 'global_avg'],
            default='global_avg')
        if reduction_type == 'flatten':
            output_node = Flatten().build(hp, output_node)
        elif reduction_type == 'global_max':
            output_node = utils.get_global_max_pooling(
                output_node.shape)()(output_node)
        elif reduction_type == 'global_avg':
            output_node = utils.get_global_average_pooling(
                output_node.shape)()(output_node)
        return output_node
Пример #20
0
    def build(self, hp, inputs=None):
        inputs = nest.flatten(inputs)
        utils.validate_num_inputs(inputs, 1)
        input_node = inputs[0]
        output_node = input_node
        output_node = Flatten().build(hp, output_node)

        num_layers = self.num_layers or hp.Choice('num_layers', [1, 2, 3],
                                                  default=2)
        use_bn = self.use_batchnorm or hp.Choice(
            'use_batchnorm', [True, False], default=False)
        dropout_rate = self.dropout_rate or hp.Choice(
            'dropout_rate', [0.0, 0.25, 0.5], default=0)

        for i in range(num_layers):
            units = hp.Choice('units_{i}'.format(i=i),
                              [16, 32, 64, 128, 256, 512, 1024],
                              default=32)
            output_node = tf.keras.layers.Dense(units)(output_node)
            if use_bn:
                output_node = tf.keras.layers.BatchNormalization()(output_node)
            output_node = tf.keras.layers.ReLU()(output_node)
            output_node = tf.keras.layers.Dropout(dropout_rate)(output_node)
        return output_node