def build_model(opts, as_components=False): """Builds a Keras model for Criteo data.""" layers_tup = uq_utils.get_layer_builders(opts.method, opts.dropout_rate, data_lib.NUM_TRAIN_EXAMPLES) _, dense_layer, dense_last, dropout_fn, dropout_fn_last = layers_tup fcs_int, fcs_cat = make_feature_columns(opts) input_layer = make_input_layers() features = input_layer dense_int = keras.layers.DenseFeatures(fcs_int)(features) dense_cat = keras.layers.DenseFeatures(fcs_cat)(features) net = tf.concat([dense_int, dense_cat], axis=-1) logging.info('Dense layer shape: %s', net.shape) # TODO(yovadia): Consider explicit normalization according to data stats. net = keras.layers.BatchNormalization()(net) for size in opts.layer_sizes: net = dropout_fn(net) net = dense_layer(size, activation='relu')(net) prelogits = dropout_fn_last(net) # Sigmoid output necessary to get useful AUC metric outputs. lastlayer = dense_last(1, activation='sigmoid') probs = lastlayer(prelogits) if as_components: trunc = keras.Model(inputs=input_layer, outputs=prelogits) embedings_in = keras.layers.Input(shape=prelogits.shape[1:]) head = keras.Model(inputs=embedings_in, outputs=lastlayer(embedings_in)) return trunc, head return keras.Model(inputs=input_layer, outputs=probs)
def _build_mlp(opts): """Builds a multi-layer perceptron Keras model.""" layer_builders = uq_utils.get_layer_builders(opts.method, opts.dropout_rate, opts.num_train_examples) _, dense_layer, dense_last, dropout_fn, dropout_fn_last = layer_builders inputs = keras.layers.Input(_MNIST_SHAPE) net = keras.layers.Flatten(input_shape=_MNIST_SHAPE)(inputs) for size in opts.mlp_layer_sizes: net = dropout_fn(net) net = dense_layer(size, activation='relu')(net) net = dropout_fn_last(net) logits = dense_last(_NUM_CLASSES)(net) return keras.Model(inputs=inputs, outputs=logits)
def _build_lenet(opts): """Builds a LeNet Keras model.""" layer_builders = uq_utils.get_layer_builders(opts.method, opts.dropout_rate, opts.num_train_examples) conv2d, dense_layer, dense_last, dropout_fn, dropout_fn_last = layer_builders inputs = keras.layers.Input(_MNIST_SHAPE) net = inputs net = conv2d(32, kernel_size=(3, 3), activation='relu', input_shape=_MNIST_SHAPE)(net) net = conv2d(64, (3, 3), activation='relu')(net) net = keras.layers.MaxPooling2D(pool_size=(2, 2))(net) net = dropout_fn(net) net = keras.layers.Flatten()(net) net = dense_layer(128, activation='relu')(net) net = dropout_fn_last(net) logits = dense_last(_NUM_CLASSES, activation='relu')(net) return keras.Model(inputs=inputs, outputs=logits)