Beispiel #1
0
def _build_keras_model(hparams: kerastuner.HyperParameters) -> tf.keras.Model:
    """Creates a DNN Keras model for classifying iris data.

  Args:
    hparams: Holds HyperParameters for tuning.

  Returns:
    A Keras Model.
  """
    # The model below is built with Functional API, please refer to
    # https://www.tensorflow.org/guide/keras/overview for all API options.
    inputs = [keras.layers.Input(shape=(1, ), name=f) for f in _FEATURE_KEYS]
    d = keras.layers.concatenate(inputs)
    for _ in range(int(hparams.get('num_layers'))):
        d = keras.layers.Dense(8, activation='relu')(d)
    outputs = keras.layers.Dense(3, activation='softmax')(d)

    model = keras.Model(inputs=inputs, outputs=outputs)
    model.compile(optimizer=keras.optimizers.Adam(
        hparams.get('learning_rate')),
                  loss='sparse_categorical_crossentropy',
                  metrics=[keras.metrics.SparseCategoricalAccuracy()])

    model.summary(print_fn=absl.logging.info)
    return model
Beispiel #2
0
def _build_keras_model(hparams: kerastuner.HyperParameters, 
                       tf_transform_output: tft.TFTransformOutput) -> tf.keras.Model:
  """Creates a Keras WideDeep Classifier model.
  Args:
    hparams: Holds HyperParameters for tuning.
    tf_transform_output: A TFTransformOutput.
  Returns:
    A keras Model.
  """
  # Defines deep feature columns and input layers.
  deep_columns = [
      tf.feature_column.numeric_column(
          key=features.transformed_name(key), 
          shape=())
      for key in features.NUMERIC_FEATURE_KEYS
  ]
  
  input_layers = {
      column.key: tf.keras.layers.Input(name=column.key, shape=(), dtype=tf.float32)
      for column in deep_columns
  }    

  # Defines wide feature columns and input layers.
  categorical_columns = [
      tf.feature_column.categorical_column_with_identity(
          key=features.transformed_name(key), 
          num_buckets=tf_transform_output.num_buckets_for_transformed_feature(features.transformed_name(key)), 
          default_value=0)
      for key in features.CATEGORICAL_FEATURE_KEYS
  ]

  wide_columns = [
      tf.feature_column.indicator_column(categorical_column)
      for categorical_column in categorical_columns
  ]
    
  input_layers.update({
      column.categorical_column.key: tf.keras.layers.Input(name=column.categorical_column.key, shape=(), dtype=tf.int32)
      for column in wide_columns
  })

  # Build Keras model using hparams.
  deep = tf.keras.layers.DenseFeatures(deep_columns)(input_layers)
  for n in range(int(hparams.get('n_layers'))):
    deep = tf.keras.layers.Dense(units=hparams.get('n_units_' + str(n + 1)))(deep)

  wide = tf.keras.layers.DenseFeatures(wide_columns)(input_layers)

  output = tf.keras.layers.Dense(features.NUM_CLASSES, activation='softmax')(
               tf.keras.layers.concatenate([deep, wide]))

  model = tf.keras.Model(input_layers, output)
  model.compile(
      loss='sparse_categorical_crossentropy',
      optimizer=tf.keras.optimizers.Adam(lr=hparams.get('learning_rate')),
      metrics=[tf.keras.metrics.SparseCategoricalAccuracy()])
  model.summary(print_fn=absl.logging.info)

  return model    
Beispiel #3
0
def _build_keras_model(hparams: kerastuner.HyperParameters) -> tf.keras.Model:
    features_in = []
    features_in.extend(DENSE_FEATURES)
    features_in.extend(BINARY_FEATURES)

    features_in = [f'{x}_xf' for x in features_in]
    input_layers = {
        colname: tf.keras.layers.Input(name=colname,
                                       shape=(1, ),
                                       dtype=tf.float32)
        for colname in features_in
    }

    x = tf.keras.layers.Concatenate(axis=-1)(list(input_layers.values()))

    h = int(hparams.get(H_SIZE))
    x = tf.keras.layers.Dense(units=h, activation='relu')(x)

    out = tf.keras.layers.Dense(units=1, activation='sigmoid')(x)

    model = tf.keras.Model(input_layers, out)

    model.compile(loss='binary_crossentropy',
                  optimizer='adam',
                  metrics=[tf.keras.metrics.BinaryAccuracy()])

    model.summary(print_fn=logging.info)
    return model
Beispiel #4
0
def _build_keras_model(data_provider: KerasDataProvider,
                       hparams: kerastuner.HyperParameters) -> tf.keras.Model:
    """Returns a Keras Model for the given data adapter.

  Args:
    data_provider: Data adaptor used to get the task information.
    hparams: Hyperparameters of the model.

  Returns:
    A keras model for the given adapter and hyperparams.
  """

    feature_columns = data_provider.get_numeric_feature_columns(
    ) + data_provider.get_embedding_feature_columns()
    input_layers = data_provider.get_input_layers()

    # All input_layers must be consumed for the Keras Model to work.
    assert len(feature_columns) >= len(input_layers)

    x = tf.keras.layers.DenseFeatures(feature_columns)(input_layers)

    hparam_nodes = hparams.get('num_nodes')
    for numnodes in [hparam_nodes] * hparams.get('num_layers'):
        x = tf.keras.layers.Dense(numnodes)(x)
    output = tf.keras.layers.Dense(data_provider.head_size,
                                   activation=data_provider.head_activation,
                                   name='output')(x)

    model = tf.keras.Model(input_layers, output)

    lr = float(hparams.get('learning_rate'))
    optimizer_str = hparams.get('optimizer')
    if optimizer_str == 'Adam':
        optimizer = tf.keras.optimizers.Adam(lr=lr)
    elif optimizer_str == 'Adagrad':
        optimizer = tf.keras.optimizers.Adagrad(lr=lr)
    elif optimizer_str == 'RMSprop':
        optimizer = tf.keras.optimizers.RMSprop(lr=lr)
    elif optimizer_str == 'SGD':
        optimizer = tf.keras.optimizers.SGD(lr=lr)

    model.compile(loss=data_provider.loss,
                  optimizer=optimizer,
                  metrics=data_provider.metrics)
    model.summary()

    return model
Beispiel #5
0
def _build_keras_model(hparams: kerastuner.HyperParameters) -> tf.keras.Model:
    """Creates Keras model for testing.

  Args:
    hparams: Holds HyperParameters for tuning.

  Returns:
    A Keras Model.
  """
    model = keras.Sequential()
    model.add(keras.layers.Dense(64, activation='relu', input_shape=(32, )))
    for _ in range(hparams.get('num_layers')):  # pytype: disable=wrong-arg-types
        model.add(keras.layers.Dense(64, activation='relu'))
    model.add(keras.layers.Dense(10, activation='softmax'))
    model.compile(optimizer=keras.optimizers.Adam(
        hparams.get('learning_rate')),
                  loss='categorical_crossentropy',
                  metrics=[tf.keras.metrics.Accuracy(name='accuracy')])
    return model
Beispiel #6
0
def _build_keras_model(hparams: kerastuner.HyperParameters) -> tf.keras.Model:
  """Creates a DNN Keras model for classifying iris data.

  Args:
    hparams: Holds HyperParameters for tuning.

  Returns:
    A Keras Model.
  """
  absl.logging.info('HyperParameters config: %s' % hparams.get_config())
  inputs = [keras.layers.Input(shape=(1,), name=f) for f in _FEATURE_KEYS]
  d = keras.layers.concatenate(inputs)
  for _ in range(hparams.get('num_layers')):  # pytype: disable=wrong-arg-types
    d = keras.layers.Dense(8, activation='relu')(d)
  output = keras.layers.Dense(3, activation='softmax')(d)
  model = keras.Model(inputs=inputs, outputs=output)
  model.compile(
      optimizer=keras.optimizers.Adam(hparams.get('learning_rate')),
      loss='sparse_categorical_crossentropy',
      metrics=[keras.metrics.CategoricalAccuracy(name='accuracy')])
  absl.logging.info(model.summary())
  return model
Beispiel #7
0
def _build_keras_model(hparams: kerastuner.HyperParameters) -> tf.keras.Model:
    """Creates a DNN Keras model for classifying iris data.

  Args:
    hparams: Holds HyperParameters for tuning.

  Returns:
    A Keras Model.
  """
    model = keras.Sequential()
    model.add(
        keras.layers.Dense(8,
                           activation='relu',
                           input_shape=(len(_FEATURE_KEYS), )))
    for _ in range(hparams.get('num_layers')):  # pytype: disable=wrong-arg-types
        model.add(keras.layers.Dense(8, activation='relu'))
    model.add(keras.layers.Dense(3, activation='softmax'))
    model.compile(optimizer=keras.optimizers.Adam(
        hparams.get('learning_rate')),
                  loss='categorical_crossentropy',
                  metrics=[tf.keras.metrics.BinaryAccuracy(name='accuracy')])
    absl.logging.info(model.summary())
    return model
Beispiel #8
0
def _build_keras_model(
        hparams: kerastuner.HyperParameters,
        autodata_adapter: kma.KerasModelAdapter,
        sequence_length: Optional[int] = None) -> tf.keras.Model:
    """Returns a Keras Model for the given data adapter.

  Args:
    hparams: Hyperparameters of the model.
    autodata_adapter: Data adaptor used to get the task information.
    sequence_length: The length of the sequence to predict when not-None.

  Returns:
    A keras model for the given adapter and hyperparams.
  """

    feature_columns = autodata_adapter.get_dense_feature_columns()
    input_layers = autodata_adapter.get_input_layers()

    # All input_layers must be consumed for the Keras Model to work.
    assert len(feature_columns) >= len(input_layers)

    x = tf.keras.layers.DenseFeatures(feature_columns)(input_layers)

    num_nodes = hparams.get('num_nodes')
    if sequence_length:
        logging.info(
            'Creating an LSTM model with prediction sequence length: %s.',
            sequence_length)

        x = tf.expand_dims(x, axis=1)
        x = tf.keras.layers.LSTM(num_nodes,
                                 activation='relu',
                                 input_shape=(1, None))(x)
        # repeat vector
        x = tf.keras.layers.RepeatVector(sequence_length)(x)
        # decoder layer
        x = tf.keras.layers.LSTM(num_nodes,
                                 activation='relu',
                                 return_sequences=True)(x)
        output = tf.keras.layers.TimeDistributed(tf.keras.layers.Dense(
            autodata_adapter.head_size,
            activation=autodata_adapter.head_activation),
                                                 name='output')(x)
    else:
        logging.info('Creating an densely-connected DNN model.')

        for numnodes in [num_nodes] * hparams.get('num_layers'):
            x = tf.keras.layers.Dense(numnodes)(x)
        output = tf.keras.layers.Dense(
            autodata_adapter.head_size,
            activation=autodata_adapter.head_activation,
            name='output')(x)

    model = tf.keras.Model(input_layers, output)

    lr = float(hparams.get('learning_rate'))
    optimizer_str = hparams.get('optimizer')
    if optimizer_str == 'Adam':
        optimizer = tf.keras.optimizers.Adam(lr=lr)
    elif optimizer_str == 'Adagrad':
        optimizer = tf.keras.optimizers.Adagrad(lr=lr)
    elif optimizer_str == 'RMSprop':
        optimizer = tf.keras.optimizers.RMSprop(lr=lr)
    elif optimizer_str == 'SGD':
        optimizer = tf.keras.optimizers.SGD(lr=lr)

    model.compile(loss=autodata_adapter.loss,
                  optimizer=optimizer,
                  metrics=autodata_adapter.metrics)
    model.summary()

    return model
Beispiel #9
0
def _build_keras_model(
        hparams: kerastuner.HyperParameters,
        tf_transform_output: tft.TFTransformOutput) -> tf.keras.Model:
    """Creates a Keras WideDeep Classifier model.
    Args:
      hparams: Holds HyperParameters for tuning.
      tf_transform_output: A TFTransformOutput.
    Returns:
      A keras Model.
    """

    real_keys = features.NUMERIC_FEATURE_KEYS
    sparse_keys = features.VOCAB_FEATURE_KEYS + features.BUCKET_FEATURE_KEYS + features.CATEGORICAL_FEATURE_KEYS

    # Defines deep feature columns and input layers.
    deep_columns = [
        tf.feature_column.numeric_column(key=features.transformed_name(key),
                                         shape=())
        for key in features.NUMERIC_FEATURE_KEYS
    ]

    input_layers = {
        column.key: tf.keras.layers.Input(name=column.key,
                                          shape=(),
                                          dtype=tf.float32)
        for column in deep_columns
    }

    # Defines wide feature columns and input layers.
    categorical_columns = [
        tf.feature_column.categorical_column_with_identity(
            key=features.transformed_name(key),
            num_buckets=tf_transform_output.
            num_buckets_for_transformed_feature(
                features.transformed_name(key)),
            default_value=0) for key in features.CATEGORICAL_FEATURE_KEYS
    ]

    categorical_columns += [
        tf.feature_column.categorical_column_with_identity(  # pylint: disable=g-complex-comprehension
            key,
            num_buckets=features.VOCAB_SIZE + features.OOV_SIZE,
            default_value=0)
        for key in features.transformed_names(features.VOCAB_FEATURE_KEYS)
    ]

    categorical_columns += [
        tf.feature_column.categorical_column_with_identity(  # pylint: disable=g-complex-comprehension
            key,
            num_buckets=num_buckets,
            default_value=0) for key, num_buckets in zip(
                features.transformed_names(features.BUCKET_FEATURE_KEYS),
                features.BUCKET_FEATURE_BUCKET_COUNT)
    ]

    wide_columns = [
        tf.feature_column.indicator_column(categorical_column)
        for categorical_column in categorical_columns
    ]

    input_layers.update({
        column.categorical_column.key:
        tf.keras.layers.Input(name=column.categorical_column.key,
                              shape=(),
                              dtype=tf.int32)
        for column in wide_columns
    })

    # Build Keras model using hparams.
    deep = tf.keras.layers.DenseFeatures(deep_columns)(input_layers)
    for n in range(int(hparams.get('n_layers'))):
        deep = tf.keras.layers.Dense(units=hparams.get('n_units_' +
                                                       str(n + 1)))(deep)

    wide = tf.keras.layers.DenseFeatures(wide_columns)(input_layers)

    # output = tf.keras.layers.Dense(features.NUM_CLASSES, activation='softmax')(
    #             tf.keras.layers.concatenate([deep, wide]))

    output = tf.keras.layers.Dense(1, activation='sigmoid')(
        tf.keras.layers.concatenate([deep, wide]))
    output = tf.squeeze(output, -1)

    model = tf.keras.Model(input_layers, output)
    model.compile(
        loss='binary_crossentropy',
        optimizer=tf.keras.optimizers.Adam(lr=hparams.get('learning_rate')),
        # metrics=[tf.keras.metrics.SparseCategoricalAccuracy()])
        metrics=[
            tf.keras.metrics.TruePositives(name='tp'),
            tf.keras.metrics.FalsePositives(name='fp'),
            tf.keras.metrics.TrueNegatives(name='tn'),
            tf.keras.metrics.FalseNegatives(name='fn'),
            tf.keras.metrics.BinaryAccuracy(name='binary_accuracy'),
            tf.keras.metrics.Precision(name='precision'),
            tf.keras.metrics.Recall(name='recall'),
            tf.keras.metrics.AUC(name='auc'),
        ])
    model.summary(print_fn=absl.logging.info)

    return model