示例#1
0
    def build_subnetwork(self, features, logits_dimension, training,
                         iteration_step, summary, previous_ensemble):
        """See `adanet.subnetwork.Builder`."""

        # Required for scoping weight decay.
        self._name_scope = tf.get_default_graph().get_name_scope()

        if len(features) != 1:
            raise ValueError(
                "Features dict must only contain a single image tensor; got {}"
                .format(features))
        if logits_dimension == 1:
            raise ValueError("Only multi-class classification is supported")

        images = tf.to_float(list(features.values())[0])

        nasnet_fn = nets_factory.get_network_fn(
            self._model_name,
            num_classes=logits_dimension,
            weight_decay=self._weight_decay,
            is_training=training)
        logits, end_points = nasnet_fn(images,
                                       config=self._config,
                                       current_step=iteration_step)

        persisted_tensors = {}
        if "AuxLogits" in end_points:
            persisted_tensors["aux_logits"] = end_points["AuxLogits"]
        return adanet.Subnetwork(last_layer=logits,
                                 logits=logits,
                                 complexity=1,
                                 persisted_tensors=persisted_tensors)
示例#2
0
    def build_subnetwork(self,
                         features,
                         logits_dimension,
                         training,
                         iteration_step,
                         summary,
                         previous_ensemble=None):
        """See `adanet.subnetwork.Builder`."""

        images = list(features.values())[0]
        x = images

        for i in range(self._n_convs):
            x = Conv2D(32, kernel_size=7, activation='relu')(x)
            x = MaxPooling2D(strides=2)(x)

        x = Flatten()(x)
        x = Dense(120, activation='relu')(x)

        logits = Dense(120)(x)

        complexity = tf.constant(1)

        persisted_tensors = {'n_convs': tf.constant(self._n_convs)}

        return adanet.Subnetwork(last_layer=x,
                                 logits=logits,
                                 complexity=complexity,
                                 persisted_tensors=persisted_tensors)
示例#3
0
    def build_subnetwork(self,
                         features,
                         logits_dimension,
                         training,
                         iteration_step,
                         summary,
                         previous_ensemble=None):

        input_layer = tf.to_float(features[FEATURES_KEY])
        kernel_initializer = tf.glorot_uniform_initializer(seed=self._seed)
        last_layer = input_layer

        # hidden layesr
        for _ in range(self._num_layers):
            last_layer = tf.layers.dense(last_layer,
                                         units=self._layer_size,
                                         activation=tf.nn.relu,
                                         kernel_initializer=kernel_initializer)

        # logits are input to activation function
        logits = tf.layers.dense(last_layer,
                                 units=logits_dimension,
                                 kernel_initializer=kernel_initializer)

        # At the end of iteration,the tf.Tensor instances (hidden layers) will be available to subnetworks in the next iterations
        persisted_tensors = {_NUM_LAYERS_KEY: tf.constant(self._num_layers)}

        return adanet.Subnetwork(last_layer=last_layer,
                                 logits=logits,
                                 complexity=self._measure_complexity(),
                                 persisted_tensors=persisted_tensors)
示例#4
0
    def build_subnetwork(self,
                         features,
                         logits_dimension,
                         training,
                         iteration_step,
                         summary,
                         previous_ensemble=None):
        """See `adanet.subnetwork.Builder`."""

        images = list(features.values())[0]
        kernel_initializer = tf.glorot_uniform_initializer(seed=self._seed)

        x = tf.keras.layers.Flatten()(images)
        last_layer = x
        for _ in range(self._num_layers):
            x = tf.layers.dense(x,
                                units=self._layer_size,
                                activation=tf.nn.relu,
                                kernel_initializer=kernel_initializer)
        logits = tf.layers.dense(last_layer,
                                 units=10,
                                 kernel_initializer=kernel_initializer)

        persisted_tensors = {_NUM_LAYERS_KEY: tf.constant(self._num_layers)}
        return adanet.Subnetwork(last_layer=last_layer,
                                 logits=logits,
                                 complexity=self._measure_complexity(),
                                 persisted_tensors=persisted_tensors)
示例#5
0
    def build_subnetwork(self,
                         features,
                         logits_dimension,
                         training,
                         iteration_step,
                         summary,
                         previous_ensemble=None):
        """See `adanet.subnetwork.Builder`."""

        input_layer = tf.to_float(features[utils.FEATURES_KEY])
        kernel_initializer = tf.glorot_uniform_initializer(seed=self._seed)
        last_layer = input_layer
        for _ in range(self._num_layers):
            last_layer = tf.layers.dense(last_layer,
                                         units=self._layer_size,
                                         activation=tf.nn.relu,
                                         kernel_initializer=kernel_initializer)
        logits = tf.layers.dense(last_layer,
                                 units=logits_dimension,
                                 kernel_initializer=kernel_initializer)

        persisted_tensors = {_NUM_LAYERS_KEY: tf.constant(self._num_layers)}
        return adanet.Subnetwork(last_layer=last_layer,
                                 logits=logits,
                                 complexity=self._measure_complexity(),
                                 persisted_tensors=persisted_tensors)
示例#6
0
  def build_subnetwork(self,
                       features,
                       logits_dimension,
                       training,
                       iteration_step,
                       summary,
                       previous_ensemble=None):
    """See `adanet.subnetwork.Builder`."""
    print(features['images'])
    images = features['images']
    kernel_initializer = tf.keras.initializers.he_normal(seed=self._seed)
    x = tf.layers.conv1d(
        images,
        filters=16,
        kernel_size=32,
        padding="same",
        activation="relu",
        kernel_initializer=kernel_initializer)
    x = tf.layers.max_pooling1d(x, pool_size=1, strides=1)
    x = tf.layers.conv1d(
        images,
        filters=32,
        kernel_size=32,
        padding="same",
        activation="relu",
        kernel_initializer=kernel_initializer)
    x = tf.layers.max_pooling1d(x, pool_size=1, strides=1)
    x = tf.layers.conv1d(
        images,
        filters=64,
        kernel_size=32,
        padding="same",
        activation="relu",
        kernel_initializer=kernel_initializer)
    x = tf.layers.max_pooling1d(x, pool_size=1, strides=1)


    x = tf.layers.flatten(x)
    
    x = tf.layers.dense(
        x, units=512, activation="relu", kernel_initializer=kernel_initializer)

    # The `Head` passed to adanet.Estimator will apply the softmax activation.
    logits = tf.layers.dense(
        x, units=3, activation=None, kernel_initializer=kernel_initializer)

    # Use a constant complexity measure, since all subnetworks have the same
    # architecture and hyperparameters.
    complexity = tf.constant(1)

    return adanet.Subnetwork(
        last_layer=x,
        logits=logits,
        complexity=complexity,
        persisted_tensors={})
示例#7
0
 def __init__(self, num_layers):
     shared_tensors = {"num_layers": tf.constant(num_layers)}
     self._weighted_subnetworks = [
         adanet.WeightedSubnetwork(name=None,
                                   iteration_number=None,
                                   weight=None,
                                   logits=None,
                                   subnetwork=adanet.Subnetwork(
                                       last_layer=[1],
                                       logits=[1],
                                       complexity=1,
                                       shared=shared_tensors))
     ]
 def __init__(self, num_layers):
   persisted_tensors = {"num_layers": tf.constant(num_layers)}
   self._weighted_subnetworks = [
       adanet.WeightedSubnetwork(
           name=None,
           weight=None,
           logits=None,
           subnetwork=adanet.Subnetwork(
               last_layer=[1],
               logits=[1],
               complexity=1,
               persisted_tensors=persisted_tensors))
   ]
示例#9
0
    def build_subnetwork(self,
                         features,
                         logits_dimension,
                         training,
                         iteration_step,
                         summary,
                         previous_ensemble=None):
        """See `adanet.subnetwork.Builder`."""

        # Prepare the input.
        assert len(
            self._feature_columns) == 1, "Got feature columns: {}".format(
                self._feature_columns)
        images = tf.to_float(features[self._feature_columns[0].name])
        self._name_scope = tf.get_default_graph().get_name_scope()

        seed = self._seed
        if seed is not None and previous_ensemble:
            # Deterministically change the seed for different iterations so that
            # subnetworks are not correlated.
            seed += len(previous_ensemble.weighted_subnetworks)

        arg_scope = nasnet.nasnet_cifar_arg_scope(
            weight_decay=self._weight_decay)

        with tf.contrib.slim.arg_scope(arg_scope):
            build_fn = nasnet.build_nasnet_cifar
            logits, end_points = build_fn(images,
                                          num_classes=logits_dimension,
                                          is_training=training,
                                          config=self._hparams)
        last_layer = end_points["global_pool"]

        subnetwork_shared_data = {
            _PREVIOUS_NUM_CELLS: tf.constant(self._num_cells),
            _PREVIOUS_CONV_FILTERS: tf.constant(self._num_conv_filters)
        }

        return adanet.Subnetwork(last_layer=last_layer,
                                 logits=logits,
                                 complexity=1,
                                 shared=subnetwork_shared_data)
示例#10
0
    def build_subnetwork(self,
                         features,
                         logits_dimension,
                         training,
                         iteration_step,
                         summary,
                         previous_ensemble=None):
        """See `adanet.subnetwork.Builder`."""

        input_layer = tf.compat.v1.feature_column.input_layer(
            features=features, feature_columns=self._feature_columns)
        last_layer = input_layer
        for _ in range(self._num_layers):
            last_layer = tf.compat.v1.layers.dense(
                last_layer,
                units=self._layer_size,
                activation=tf.nn.relu,
                kernel_initializer=tf.compat.v1.glorot_uniform_initializer(
                    seed=self._seed))
            last_layer = tf.compat.v1.layers.dropout(last_layer,
                                                     rate=self._dropout,
                                                     seed=self._seed,
                                                     training=training)
        logits = tf.compat.v1.layers.dense(
            last_layer,
            units=logits_dimension,
            kernel_initializer=tf.compat.v1.glorot_uniform_initializer(
                seed=self._seed))

        # Approximate the Rademacher complexity of this subnetwork as the square-
        # root of its depth.
        complexity = tf.sqrt(tf.cast(self._num_layers, dtype=tf.float32))

        with tf.name_scope(""):
            summary.scalar("complexity", complexity)
            summary.scalar("num_layers", self._num_layers)

        shared = {_NUM_LAYERS_KEY: self._num_layers}
        return adanet.Subnetwork(last_layer=last_layer,
                                 logits=logits,
                                 complexity=complexity,
                                 shared=shared)
示例#11
0
    def build_subnetwork(self,
                         features,
                         logits_dimension,
                         training,
                         iteration_step,
                         summary,
                         previous_ensemble=None):
        """See `adanet.subnetwork.Builder`."""
        images = list(features.values())[0]

        # Visualize some of the input images in TensorBoard.
        summary.image("images", images)

        kernel_initializer = tf.keras.initializers.he_normal(seed=self._seed)
        x = tf.keras.layers.Conv2D(
            filters=16,
            kernel_size=3,
            padding="same",
            activation="relu",
            kernel_initializer=kernel_initializer)(images)
        x = tf.keras.layers.MaxPool2D(pool_size=2, strides=2)(x)
        x = tf.keras.layers.Flatten()(x)
        x = tf.keras.layers.Dense(units=64,
                                  activation="relu",
                                  kernel_initializer=kernel_initializer)(x)

        # The `Head` passed to adanet.Estimator will apply the softmax activation.
        logits = tf.keras.layers.Dense(
            units=10, activation=None,
            kernel_initializer=kernel_initializer)(x)

        # Use a constant complexity measure, since all subnetworks have the same
        # architecture and hyperparameters.
        complexity = tf.constant(1)

        return adanet.Subnetwork(last_layer=x,
                                 logits=logits,
                                 complexity=complexity,
                                 persisted_tensors={})