Beispiel #1
0
def get_cross_logits(
        features,
        feature_columns,
        shared_feature_vectors,
        units,
        is_training,
        extra_options):

    with tf.variable_scope('cross'):
        _check_cross_args(extra_options)
        use_shared_embedding = extra_options['cross_use_shared_embedding']
        use_project = extra_options['cross_use_project']
        project_size = extra_options['cross_project_size']
        num_layers = extra_options['cross_num_layers']

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        x = tf.concat(feature_vectors, axis=1)  # [B, T]
        y = _cross_net(x, num_layers)
        with tf.variable_scope('logits') as logits_scope:
            logits = fc(y, units=units, name=logits_scope)
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
Beispiel #2
0
def get_ifm_logits(features, feature_columns, shared_feature_vectors, units,
                   is_training, extra_options):

    with tf.variable_scope('ifm'):
        _check_ifm_args(extra_options)
        use_shared_embedding = extra_options['ifm_use_shared_embedding']
        use_project = extra_options['ifm_use_project']
        project_size = extra_options['ifm_project_size']
        hidden_unit = extra_options['ifm_hidden_unit']
        field_dim = extra_options['ifm_field_dim']

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        y = _ifm(feature_vectors, hidden_unit, field_dim, reduce_sum=True)

        with tf.variable_scope('logits') as logits_scope:
            logits = y
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
Beispiel #3
0
def get_cin_logits(features, feature_columns, shared_feature_vectors, units,
                   is_training, extra_options):

    with tf.variable_scope('cin'):
        _check_cin_args(extra_options)
        use_shared_embedding = extra_options['cin_use_shared_embedding']
        use_project = extra_options['cin_use_project']
        project_size = extra_options['cin_project_size']
        hidden_feature_maps = extra_options['cin_hidden_feature_maps']
        split_half = extra_options['cin_split_half']

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        check_feature_dims(feature_vectors)
        x = tf.stack(feature_vectors, axis=1)  # [B, N, D]
        y = _cin_layer(x, hidden_feature_maps, split_half,
                       reduce_sum=False)  # [B, F]

        with tf.variable_scope('logits') as logits_scope:
            logits = fc(y, units=units, name=logits_scope)
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
Beispiel #4
0
def get_fm_logits(features, feature_columns, shared_feature_vectors, units,
                  is_training, extra_options):

    assert units == 1, "FM units must be 1"

    with tf.variable_scope('fm'):
        _check_fm_args(extra_options)

        use_shared_embedding = extra_options['fm_use_shared_embedding']
        use_project = extra_options['fm_use_project']
        project_size = extra_options['fm_project_size']

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        y = _fm(feature_vectors, reduce_sum=True)  # [B, 1]
        with tf.variable_scope('logits') as logits_scope:
            logits = y
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
Beispiel #5
0
def get_fgcnn_feature_vectors(features,
                              feature_columns,
                              feature_vectors,
                              options,
                              name='fgcnn'):
    """
    """

    with tf.variable_scope(name):
        _check_fgcnn_args(options)
        use_shared_embedding = options['fgcnn_use_shared_embedding']
        use_project = options['fgcnn_use_project']
        project_dim = options['fgcnn_project_dim']
        filter_nums = options['fgcnn_filter_nums']
        kernel_sizes = options['fgcnn_kernel_sizes']
        pooling_sizes = options['fgcnn_pooling_sizes']
        new_map_sizes = options['fgcnn_new_map_sizes']

        x = feature_vectors
        if not use_shared_embedding:
            x = get_feature_vectors(features, feature_columns,
                                    name + '_feature_vectors')
        if use_project:
            x = project(feature_vectors, project_dim)
        new_feature_vectors = _fgcnn(x,
                                     filter_nums=filter_nums,
                                     kernel_sizes=kernel_sizes,
                                     pooling_sizes=pooling_sizes,
                                     new_map_sizes=new_map_sizes)

        return new_feature_vectors
Beispiel #6
0
def get_wkfm_logits(
        features,
        feature_columns,
        shared_feature_vectors,
        units,
        is_training,
        extra_options):

    with tf.variable_scope('wkfm'):
        _check_wkfm_args(extra_options)
        use_shared_embedding = extra_options['wkfm_use_shared_embedding']
        use_project = extra_options['wkfm_use_project']
        project_size = extra_options['wkfm_project_size']

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        y = _wkfm(feature_vectors, reduce_sum=True)  # [B, 1]
        with tf.variable_scope('logits') as logits_scope:
            # fc just for adding a bias
            logits = fc(y, units=units, name=logits_scope)
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
Beispiel #7
0
def get_ipnn_logits(
        features,
        feature_columns,
        shared_feature_vectors,
        units,
        is_training,
        extra_options):

    with tf.variable_scope('ipnn'):
        _check_ipnn_args(extra_options)
        use_shared_embedding = extra_options['ipnn_use_shared_embedding']
        use_project = extra_options['ipnn_use_project']
        project_size = extra_options['ipnn_project_size']
        hidden_units = extra_options['ipnn_hidden_units']
        activation_fn = extra_options['ipnn_activation_fn']
        dropout = extra_options['ipnn_dropout']
        batch_norm = extra_options['ipnn_batch_norm']
        layer_norm = extra_options['ipnn_layer_norm']
        use_resnet = extra_options['ipnn_use_resnet']
        use_densenet = extra_options['ipnn_use_densenet']
        unordered_inner_product = extra_options['ipnn_unordered_inner_product']
        concat_project = extra_options['ipnn_concat_project']
        leaky_relu_alpha = extra_options['leaky_relu_alpha']
        swish_beta = extra_options['swish_beta']
        activation_fn = get_activation_fn(activation_fn=activation_fn,
                                          leaky_relu_alpha=leaky_relu_alpha,
                                          swish_beta=swish_beta)
        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        project_feature_vectors = None
        if use_project:
            project_feature_vectors = project(feature_vectors, project_size)

        y = _ipnn(feature_vectors=feature_vectors,
                  project_feature_vectors=project_feature_vectors,
                  use_project=use_project,
                  units=units,
                  hidden_units=hidden_units,
                  activation_fn=activation_fn,
                  dropout=dropout,
                  batch_norm=batch_norm,
                  layer_norm=layer_norm,
                  use_resnet=use_resnet,
                  use_densenet=use_densenet,
                  is_training=is_training,
                  unordered_inner_product=unordered_inner_product,
                  concat_project=concat_project)

        with tf.variable_scope('logits') as logits_scope:
            logits = fc(y, units=units, name=logits_scope)
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
Beispiel #8
0
def get_ccpm_logits(features, feature_columns, shared_feature_vectors, units,
                    is_training, extra_options):

    with tf.variable_scope('ccpm'):
        _check_ccpm_args(extra_options)
        use_shared_embedding = extra_options['ccpm_use_shared_embedding']
        use_project = extra_options['ccpm_use_project']
        project_size = extra_options['ccpm_project_size']
        hidden_units = extra_options['ccpm_hidden_units']
        activation_fn = extra_options['ccpm_activation_fn']
        dropout = extra_options['ccpm_dropout']
        batch_norm = extra_options['ccpm_batch_norm']
        layer_norm = extra_options['ccpm_layer_norm']
        use_resnet = extra_options['ccpm_use_resnet']
        use_densenet = extra_options['ccpm_use_densenet']
        kernel_sizes = extra_options['ccpm_kernel_sizes']
        filter_nums = extra_options['ccpm_filter_nums']
        leaky_relu_alpha = extra_options['leaky_relu_alpha']
        swish_beta = extra_options['swish_beta']
        activation_fn = get_activation_fn(activation_fn=activation_fn,
                                          leaky_relu_alpha=leaky_relu_alpha,
                                          swish_beta=swish_beta)

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        y = _build_ccpm_model(feature_vectors=feature_vectors,
                              kernel_sizes=kernel_sizes,
                              filter_nums=filter_nums,
                              hidden_units=hidden_units,
                              activation_fn=activation_fn,
                              dropout=dropout,
                              is_training=is_training,
                              batch_norm=batch_norm,
                              layer_norm=layer_norm,
                              use_resnet=use_resnet,
                              use_densenet=use_densenet)

        with tf.variable_scope('logits') as logits_scope:
            logits = fc(y, units=units, name=logits_scope)
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
Beispiel #9
0
def get_nfm_logits(features, feature_columns, shared_feature_vectors, units,
                   is_training, extra_options):

    with tf.variable_scope('nfm'):
        _check_nfm_args(extra_options)
        use_shared_embedding = extra_options['nfm_use_shared_embedding']
        use_project = extra_options['nfm_use_project']
        project_size = extra_options['nfm_project_size']
        hidden_units = extra_options['nfm_hidden_units']
        activation_fn = extra_options['nfm_activation_fn']
        dropout = extra_options['nfm_dropout']
        batch_norm = extra_options['nfm_batch_norm']
        layer_norm = extra_options['nfm_layer_norm']
        use_resnet = extra_options['nfm_use_resnet']
        use_densenet = extra_options['nfm_use_densenet']

        leaky_relu_alpha = extra_options['leaky_relu_alpha']
        swish_beta = extra_options['swish_beta']

        activation_fn = get_activation_fn(activation_fn=activation_fn,
                                          leaky_relu_alpha=leaky_relu_alpha,
                                          swish_beta=swish_beta)

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        # Neural FM
        y = _fm(feature_vectors, reduce_sum=False)
        y = add_hidden_layers(y,
                              hidden_units=hidden_units,
                              activation_fn=activation_fn,
                              dropout=dropout,
                              is_training=is_training,
                              batch_norm=batch_norm,
                              layer_norm=layer_norm,
                              use_resnet=use_resnet,
                              use_densenet=use_densenet,
                              scope='hidden_layers')
        with tf.variable_scope('logits') as logits_scope:
            logits = fc(y, units, name=logits_scope)
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
Beispiel #10
0
def get_autoint_logits(
        features,
        feature_columns,
        shared_feature_vectors,
        units,
        is_training,
        extra_options):

    with tf.variable_scope('autoint'):
        _check_autoint_args(extra_options)

        use_shared_embedding = extra_options['autoint_use_shared_embedding']
        use_project = extra_options['autoint_use_project']
        project_size = extra_options['autoint_project_size']
        size_per_head = extra_options['autoint_size_per_head']
        num_heads = extra_options['autoint_num_heads']
        num_blocks = extra_options['autoint_num_blocks']
        dropout = extra_options['autoint_dropout']
        has_residual = extra_options['autoint_has_residual']

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        check_feature_dims(feature_vectors)
        x = tf.stack(feature_vectors, axis=1)  # [B, N, D]
        y = _autoint(x,
                     num_blocks=num_blocks,
                     num_units=size_per_head*num_heads,
                     num_heads=num_heads,
                     dropout=dropout,
                     is_training=is_training,
                     has_residual=has_residual)

        tf.logging.info("autoint output = {}".format(y))
        with tf.variable_scope('logits') as logits_scope:
            logits = fc(y, units, name=logits_scope)
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
Beispiel #11
0
def shallow_fibinet(features,
                    feature_columns,
                    shared_feature_vectors,
                    se_use_shared_embedding,
                    use_project,
                    project_size,
                    interaction_type,
                    se_interaction_type,
                    use_se,
                    name='shallow_fibinet'):
    """Shallow part of FiBiNET
     feature_vectors: list of 2-D tensors of shape [B, D], size N.

    Return:
      Tensor of shape [B, -1]
    """

    with tf.variable_scope(name):
        check_feature_dims(shared_feature_vectors)

        y = bilinear(shared_feature_vectors, interaction_type)  # [B, -1]
        if use_se:
            if se_use_shared_embedding:
                se_feature_vectors = shared_feature_vectors
            else:
                se_feature_vectors = get_feature_vectors(features, feature_columns)
                if use_project:
                    se_feature_vectors = project(se_feature_vectors, project_size)
                check_feature_dims(se_feature_vectors)

            x = tf.stack(se_feature_vectors, axis=1)  # [B, N, D]
            se_x = selayer(x)  # [B, N, D]
            new_se_feature_vectors = tf.unstack(se_x, axis=1)  # N tensors of shape [B, D]
            se_y = bilinear(new_se_feature_vectors,
                            se_interaction_type,
                            name='se_bilinear')   # [B, -1]
            y = tf.concat([y, se_y], axis=1)  # [B, -1]

        return y
Beispiel #12
0
def get_fibinet_logits(
        features,
        feature_columns,
        shared_feature_vectors,
        units,
        is_training,
        extra_options):
    with tf.variable_scope('fibinet'):
        _check_fibinet_args(extra_options)
        use_shared_embedding = extra_options['fibinet_use_shared_embedding']
        use_project = extra_options['fibinet_use_project']
        project_size = extra_options['fibinet_project_size']
        hidden_units = extra_options['fibinet_hidden_units']
        activation_fn = extra_options['fibinet_activation_fn']
        dropout = extra_options['fibinet_dropout']
        batch_norm = extra_options['fibinet_batch_norm']
        layer_norm = extra_options['fibinet_layer_norm']
        use_resnet = extra_options['fibinet_use_resnet']
        use_densenet = extra_options['fibinet_use_densenet']
        use_se = extra_options['fibinet_use_se']
        use_deep = extra_options['fibinet_use_deep']
        interaction_type = extra_options['fibinet_interaction_type']
        se_interaction_type = extra_options['fibinet_se_interaction_type']
        se_use_shared_embedding = extra_options['fibinet_se_use_shared_embedding']
        leaky_relu_alpha = extra_options['leaky_relu_alpha']
        swish_beta = extra_options['swish_beta']
        activation_fn = get_activation_fn(activation_fn=activation_fn,
                                          leaky_relu_alpha=leaky_relu_alpha,
                                          swish_beta=swish_beta)

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        y = shallow_fibinet(features=features,
                            feature_columns=feature_columns,
                            shared_feature_vectors=feature_vectors,
                            se_use_shared_embedding=se_use_shared_embedding,
                            use_project=use_project,
                            project_size=project_size,
                            interaction_type=interaction_type,
                            se_interaction_type=se_interaction_type,
                            use_se=use_se)  # [B, -1]
        if use_deep:
            y = add_hidden_layers(y,
                                  hidden_units=hidden_units,
                                  activation_fn=activation_fn,
                                  dropout=dropout,
                                  is_training=is_training,
                                  batch_norm=batch_norm,
                                  layer_norm=layer_norm,
                                  use_resnet=use_resnet,
                                  use_densenet=use_densenet,
                                  scope='hidden_layers')
            with tf.variable_scope('logits') as logits_scope:
                logits = fc(y, units, name=logits_scope)
                add_hidden_layer_summary(logits, logits_scope.name)
        else:
            assert units == 1, "shallow_fibinet's units must be 1"
            with tf.variable_scope('logits') as logits_scope:
                logits = tf.reduce_sum(y, axis=-1, keepdims=True)  # [B, 1]
                add_hidden_layer_summary(logits, logits_scope.name)

        return logits