예제 #1
0
파일: ifm.py 프로젝트: kiminh/EasyCTR
def get_ifm_logits(features, feature_columns, shared_feature_vectors, units,
                   is_training, extra_options):

    with tf.variable_scope('ifm'):
        _check_ifm_args(extra_options)
        use_shared_embedding = extra_options['ifm_use_shared_embedding']
        use_project = extra_options['ifm_use_project']
        project_size = extra_options['ifm_project_size']
        hidden_unit = extra_options['ifm_hidden_unit']
        field_dim = extra_options['ifm_field_dim']

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        y = _ifm(feature_vectors, hidden_unit, field_dim, reduce_sum=True)

        with tf.variable_scope('logits') as logits_scope:
            logits = y
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
예제 #2
0
        def _model_fn(features, labels, mode, config):

            head = head_lib._binary_logistic_or_multi_class_head(  # pylint: disable=protected-access
                n_classes, weight_column, label_vocabulary, loss_reduction,
                loss_fn)

            is_training = (mode == tf.estimator.ModeKeys.TRAIN)
            net = tf.feature_column.input_layer(features, feature_columns)
            inputs = _attention_layer(features, attention_columns, is_training)
            tf.logging.info('attention outputs = {}'.format(inputs))

            inputs.append(net)
            net = tf.concat(inputs, axis=-1)
            tf.logging.info("inputs: {}".format(net))

            if batch_norm:
                net = tf.layers.batch_normalization(net, training=is_training)

            net = add_hidden_layers(net, hidden_units, activation_fn, dropout,
                                    is_training, batch_norm, 'DNN')
            with tf.variable_scope('logits') as logits_scope:
                logits = fc(net, head.logits_dimension, name=logits_scope)
                add_hidden_layer_summary(logits, logits_scope.name)

            return head.create_estimator_spec(
                features=features,
                mode=mode,
                labels=labels,
                optimizer=optimizers.get_optimizer_instance(
                    optimizer, learning_rate=_LEARNING_RATE),
                logits=logits)
예제 #3
0
def get_cin_logits(features, feature_columns, shared_feature_vectors, units,
                   is_training, extra_options):

    with tf.variable_scope('cin'):
        _check_cin_args(extra_options)
        use_shared_embedding = extra_options['cin_use_shared_embedding']
        use_project = extra_options['cin_use_project']
        project_size = extra_options['cin_project_size']
        hidden_feature_maps = extra_options['cin_hidden_feature_maps']
        split_half = extra_options['cin_split_half']

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        check_feature_dims(feature_vectors)
        x = tf.stack(feature_vectors, axis=1)  # [B, N, D]
        y = _cin_layer(x, hidden_feature_maps, split_half,
                       reduce_sum=False)  # [B, F]

        with tf.variable_scope('logits') as logits_scope:
            logits = fc(y, units=units, name=logits_scope)
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
예제 #4
0
파일: cross.py 프로젝트: kiminh/EasyCTR
def get_cross_logits(
        features,
        feature_columns,
        shared_feature_vectors,
        units,
        is_training,
        extra_options):

    with tf.variable_scope('cross'):
        _check_cross_args(extra_options)
        use_shared_embedding = extra_options['cross_use_shared_embedding']
        use_project = extra_options['cross_use_project']
        project_size = extra_options['cross_project_size']
        num_layers = extra_options['cross_num_layers']

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        x = tf.concat(feature_vectors, axis=1)  # [B, T]
        y = _cross_net(x, num_layers)
        with tf.variable_scope('logits') as logits_scope:
            logits = fc(y, units=units, name=logits_scope)
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
예제 #5
0
파일: fm.py 프로젝트: kiminh/EasyCTR
def get_fm_logits(features, feature_columns, shared_feature_vectors, units,
                  is_training, extra_options):

    assert units == 1, "FM units must be 1"

    with tf.variable_scope('fm'):
        _check_fm_args(extra_options)

        use_shared_embedding = extra_options['fm_use_shared_embedding']
        use_project = extra_options['fm_use_project']
        project_size = extra_options['fm_project_size']

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        y = _fm(feature_vectors, reduce_sum=True)  # [B, 1]
        with tf.variable_scope('logits') as logits_scope:
            logits = y
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
예제 #6
0
파일: wkfm.py 프로젝트: kiminh/EasyCTR
def get_wkfm_logits(
        features,
        feature_columns,
        shared_feature_vectors,
        units,
        is_training,
        extra_options):

    with tf.variable_scope('wkfm'):
        _check_wkfm_args(extra_options)
        use_shared_embedding = extra_options['wkfm_use_shared_embedding']
        use_project = extra_options['wkfm_use_project']
        project_size = extra_options['wkfm_project_size']

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        y = _wkfm(feature_vectors, reduce_sum=True)  # [B, 1]
        with tf.variable_scope('logits') as logits_scope:
            # fc just for adding a bias
            logits = fc(y, units=units, name=logits_scope)
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
예제 #7
0
파일: ipnn.py 프로젝트: kiminh/EasyCTR
def get_ipnn_logits(
        features,
        feature_columns,
        shared_feature_vectors,
        units,
        is_training,
        extra_options):

    with tf.variable_scope('ipnn'):
        _check_ipnn_args(extra_options)
        use_shared_embedding = extra_options['ipnn_use_shared_embedding']
        use_project = extra_options['ipnn_use_project']
        project_size = extra_options['ipnn_project_size']
        hidden_units = extra_options['ipnn_hidden_units']
        activation_fn = extra_options['ipnn_activation_fn']
        dropout = extra_options['ipnn_dropout']
        batch_norm = extra_options['ipnn_batch_norm']
        layer_norm = extra_options['ipnn_layer_norm']
        use_resnet = extra_options['ipnn_use_resnet']
        use_densenet = extra_options['ipnn_use_densenet']
        unordered_inner_product = extra_options['ipnn_unordered_inner_product']
        concat_project = extra_options['ipnn_concat_project']
        leaky_relu_alpha = extra_options['leaky_relu_alpha']
        swish_beta = extra_options['swish_beta']
        activation_fn = get_activation_fn(activation_fn=activation_fn,
                                          leaky_relu_alpha=leaky_relu_alpha,
                                          swish_beta=swish_beta)
        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        project_feature_vectors = None
        if use_project:
            project_feature_vectors = project(feature_vectors, project_size)

        y = _ipnn(feature_vectors=feature_vectors,
                  project_feature_vectors=project_feature_vectors,
                  use_project=use_project,
                  units=units,
                  hidden_units=hidden_units,
                  activation_fn=activation_fn,
                  dropout=dropout,
                  batch_norm=batch_norm,
                  layer_norm=layer_norm,
                  use_resnet=use_resnet,
                  use_densenet=use_densenet,
                  is_training=is_training,
                  unordered_inner_product=unordered_inner_product,
                  concat_project=concat_project)

        with tf.variable_scope('logits') as logits_scope:
            logits = fc(y, units=units, name=logits_scope)
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
예제 #8
0
파일: linear.py 프로젝트: kiminh/EasyCTR
def get_linear_logits(features,
                      linear_feature_columns,
                      units,
                      linear_sparse_combiner,
                      scope):
    logit_fn = linear.linear_logit_fn_builder(
            units=units,
            feature_columns=linear_feature_columns,
            sparse_combiner=linear_sparse_combiner)
    logits = logit_fn(features=features)
    add_hidden_layer_summary(logits, scope.name)

    return logits
예제 #9
0
def get_ccpm_logits(features, feature_columns, shared_feature_vectors, units,
                    is_training, extra_options):

    with tf.variable_scope('ccpm'):
        _check_ccpm_args(extra_options)
        use_shared_embedding = extra_options['ccpm_use_shared_embedding']
        use_project = extra_options['ccpm_use_project']
        project_size = extra_options['ccpm_project_size']
        hidden_units = extra_options['ccpm_hidden_units']
        activation_fn = extra_options['ccpm_activation_fn']
        dropout = extra_options['ccpm_dropout']
        batch_norm = extra_options['ccpm_batch_norm']
        layer_norm = extra_options['ccpm_layer_norm']
        use_resnet = extra_options['ccpm_use_resnet']
        use_densenet = extra_options['ccpm_use_densenet']
        kernel_sizes = extra_options['ccpm_kernel_sizes']
        filter_nums = extra_options['ccpm_filter_nums']
        leaky_relu_alpha = extra_options['leaky_relu_alpha']
        swish_beta = extra_options['swish_beta']
        activation_fn = get_activation_fn(activation_fn=activation_fn,
                                          leaky_relu_alpha=leaky_relu_alpha,
                                          swish_beta=swish_beta)

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        y = _build_ccpm_model(feature_vectors=feature_vectors,
                              kernel_sizes=kernel_sizes,
                              filter_nums=filter_nums,
                              hidden_units=hidden_units,
                              activation_fn=activation_fn,
                              dropout=dropout,
                              is_training=is_training,
                              batch_norm=batch_norm,
                              layer_norm=layer_norm,
                              use_resnet=use_resnet,
                              use_densenet=use_densenet)

        with tf.variable_scope('logits') as logits_scope:
            logits = fc(y, units=units, name=logits_scope)
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
예제 #10
0
def get_nfm_logits(features, feature_columns, shared_feature_vectors, units,
                   is_training, extra_options):

    with tf.variable_scope('nfm'):
        _check_nfm_args(extra_options)
        use_shared_embedding = extra_options['nfm_use_shared_embedding']
        use_project = extra_options['nfm_use_project']
        project_size = extra_options['nfm_project_size']
        hidden_units = extra_options['nfm_hidden_units']
        activation_fn = extra_options['nfm_activation_fn']
        dropout = extra_options['nfm_dropout']
        batch_norm = extra_options['nfm_batch_norm']
        layer_norm = extra_options['nfm_layer_norm']
        use_resnet = extra_options['nfm_use_resnet']
        use_densenet = extra_options['nfm_use_densenet']

        leaky_relu_alpha = extra_options['leaky_relu_alpha']
        swish_beta = extra_options['swish_beta']

        activation_fn = get_activation_fn(activation_fn=activation_fn,
                                          leaky_relu_alpha=leaky_relu_alpha,
                                          swish_beta=swish_beta)

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        # Neural FM
        y = _fm(feature_vectors, reduce_sum=False)
        y = add_hidden_layers(y,
                              hidden_units=hidden_units,
                              activation_fn=activation_fn,
                              dropout=dropout,
                              is_training=is_training,
                              batch_norm=batch_norm,
                              layer_norm=layer_norm,
                              use_resnet=use_resnet,
                              use_densenet=use_densenet,
                              scope='hidden_layers')
        with tf.variable_scope('logits') as logits_scope:
            logits = fc(y, units, name=logits_scope)
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
예제 #11
0
def get_autoint_logits(
        features,
        feature_columns,
        shared_feature_vectors,
        units,
        is_training,
        extra_options):

    with tf.variable_scope('autoint'):
        _check_autoint_args(extra_options)

        use_shared_embedding = extra_options['autoint_use_shared_embedding']
        use_project = extra_options['autoint_use_project']
        project_size = extra_options['autoint_project_size']
        size_per_head = extra_options['autoint_size_per_head']
        num_heads = extra_options['autoint_num_heads']
        num_blocks = extra_options['autoint_num_blocks']
        dropout = extra_options['autoint_dropout']
        has_residual = extra_options['autoint_has_residual']

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        check_feature_dims(feature_vectors)
        x = tf.stack(feature_vectors, axis=1)  # [B, N, D]
        y = _autoint(x,
                     num_blocks=num_blocks,
                     num_units=size_per_head*num_heads,
                     num_heads=num_heads,
                     dropout=dropout,
                     is_training=is_training,
                     has_residual=has_residual)

        tf.logging.info("autoint output = {}".format(y))
        with tf.variable_scope('logits') as logits_scope:
            logits = fc(y, units, name=logits_scope)
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
예제 #12
0
파일: fibinet.py 프로젝트: kiminh/EasyCTR
def get_fibinet_logits(
        features,
        feature_columns,
        shared_feature_vectors,
        units,
        is_training,
        extra_options):
    with tf.variable_scope('fibinet'):
        _check_fibinet_args(extra_options)
        use_shared_embedding = extra_options['fibinet_use_shared_embedding']
        use_project = extra_options['fibinet_use_project']
        project_size = extra_options['fibinet_project_size']
        hidden_units = extra_options['fibinet_hidden_units']
        activation_fn = extra_options['fibinet_activation_fn']
        dropout = extra_options['fibinet_dropout']
        batch_norm = extra_options['fibinet_batch_norm']
        layer_norm = extra_options['fibinet_layer_norm']
        use_resnet = extra_options['fibinet_use_resnet']
        use_densenet = extra_options['fibinet_use_densenet']
        use_se = extra_options['fibinet_use_se']
        use_deep = extra_options['fibinet_use_deep']
        interaction_type = extra_options['fibinet_interaction_type']
        se_interaction_type = extra_options['fibinet_se_interaction_type']
        se_use_shared_embedding = extra_options['fibinet_se_use_shared_embedding']
        leaky_relu_alpha = extra_options['leaky_relu_alpha']
        swish_beta = extra_options['swish_beta']
        activation_fn = get_activation_fn(activation_fn=activation_fn,
                                          leaky_relu_alpha=leaky_relu_alpha,
                                          swish_beta=swish_beta)

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        y = shallow_fibinet(features=features,
                            feature_columns=feature_columns,
                            shared_feature_vectors=feature_vectors,
                            se_use_shared_embedding=se_use_shared_embedding,
                            use_project=use_project,
                            project_size=project_size,
                            interaction_type=interaction_type,
                            se_interaction_type=se_interaction_type,
                            use_se=use_se)  # [B, -1]
        if use_deep:
            y = add_hidden_layers(y,
                                  hidden_units=hidden_units,
                                  activation_fn=activation_fn,
                                  dropout=dropout,
                                  is_training=is_training,
                                  batch_norm=batch_norm,
                                  layer_norm=layer_norm,
                                  use_resnet=use_resnet,
                                  use_densenet=use_densenet,
                                  scope='hidden_layers')
            with tf.variable_scope('logits') as logits_scope:
                logits = fc(y, units, name=logits_scope)
                add_hidden_layer_summary(logits, logits_scope.name)
        else:
            assert units == 1, "shallow_fibinet's units must be 1"
            with tf.variable_scope('logits') as logits_scope:
                logits = tf.reduce_sum(y, axis=-1, keepdims=True)  # [B, 1]
                add_hidden_layer_summary(logits, logits_scope.name)

        return logits
예제 #13
0
파일: dssm.py 프로젝트: kiminh/EasyCTR
        def _model_fn(features, labels, mode, config):
            head = head_lib._binary_logistic_or_multi_class_head(  # pylint: disable=protected-access
                n_classes, weight_column, label_vocabulary, loss_reduction,
                loss_fn)
            is_training = (mode == tf.estimator.ModeKeys.TRAIN)
            net_dssm1 = tf.feature_column.input_layer(features, dssm1_columns)
            net_dssm2 = tf.feature_column.input_layer(features, dssm2_columns)
            tf.logging.info("net_dssm1: {}".format(net_dssm1))
            tf.logging.info("net_dssm2: {}".format(net_dssm2))
            real_activation_fn = get_activation_fn(
                activation_fn=activation_fn,
                leaky_relu_alpha=leaky_relu_alpha,
                swish_beta=swish_beta)

            net_dssm1 = add_hidden_layers(inputs=net_dssm1,
                                          hidden_units=hidden_units,
                                          activation_fn=real_activation_fn,
                                          dropout=dropout,
                                          is_training=is_training,
                                          batch_norm=batch_norm,
                                          layer_norm=layer_norm,
                                          use_resnet=use_resnet,
                                          use_densenet=use_densenet,
                                          scope='dssm1')
            net_dssm2 = add_hidden_layers(inputs=net_dssm2,
                                          hidden_units=hidden_units,
                                          activation_fn=real_activation_fn,
                                          dropout=dropout,
                                          is_training=is_training,
                                          batch_norm=batch_norm,
                                          layer_norm=layer_norm,
                                          use_resnet=use_resnet,
                                          use_densenet=use_densenet,
                                          scope='dssm2')

            with tf.variable_scope('logits') as logits_scope:
                if dssm_mode == 'dot':
                    logits = tf.reduce_sum(net_dssm1 * net_dssm2,
                                           -1,
                                           keepdims=True)
                elif dssm_mode == 'concat':
                    logits = tf.concat([net_dssm1, net_dssm2], axis=1)
                    logits = tf.layers.dense(logits, units=1, activation=None)
                elif dssm_mode == 'cosine':
                    logits = tf.reduce_sum(net_dssm1 * net_dssm2,
                                           -1,
                                           keepdims=True)
                    norm1 = tf.norm(net_dssm1, axis=1, keepdims=True)
                    norm2 = tf.norm(net_dssm2, axis=1, keepdims=True)
                    logits = logits / (norm1 * norm2)
                else:
                    raise ValueError(
                        "unknown dssm mode '{}'".format(dssm_mode))
                add_hidden_layer_summary(logits, logits_scope.name)

            tf.logging.info("logits = {}".format(logits))

            return head.create_estimator_spec(
                features=features,
                mode=mode,
                labels=labels,
                optimizer=optimizers.get_optimizer_instance(
                    optimizer, learning_rate=_LEARNING_RATE),
                logits=logits)