示例#1
0
文件: ipnn.py 项目: kiminh/EasyCTR
def get_ipnn_logits(
        features,
        feature_columns,
        shared_feature_vectors,
        units,
        is_training,
        extra_options):

    with tf.variable_scope('ipnn'):
        _check_ipnn_args(extra_options)
        use_shared_embedding = extra_options['ipnn_use_shared_embedding']
        use_project = extra_options['ipnn_use_project']
        project_size = extra_options['ipnn_project_size']
        hidden_units = extra_options['ipnn_hidden_units']
        activation_fn = extra_options['ipnn_activation_fn']
        dropout = extra_options['ipnn_dropout']
        batch_norm = extra_options['ipnn_batch_norm']
        layer_norm = extra_options['ipnn_layer_norm']
        use_resnet = extra_options['ipnn_use_resnet']
        use_densenet = extra_options['ipnn_use_densenet']
        unordered_inner_product = extra_options['ipnn_unordered_inner_product']
        concat_project = extra_options['ipnn_concat_project']
        leaky_relu_alpha = extra_options['leaky_relu_alpha']
        swish_beta = extra_options['swish_beta']
        activation_fn = get_activation_fn(activation_fn=activation_fn,
                                          leaky_relu_alpha=leaky_relu_alpha,
                                          swish_beta=swish_beta)
        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        project_feature_vectors = None
        if use_project:
            project_feature_vectors = project(feature_vectors, project_size)

        y = _ipnn(feature_vectors=feature_vectors,
                  project_feature_vectors=project_feature_vectors,
                  use_project=use_project,
                  units=units,
                  hidden_units=hidden_units,
                  activation_fn=activation_fn,
                  dropout=dropout,
                  batch_norm=batch_norm,
                  layer_norm=layer_norm,
                  use_resnet=use_resnet,
                  use_densenet=use_densenet,
                  is_training=is_training,
                  unordered_inner_product=unordered_inner_product,
                  concat_project=concat_project)

        with tf.variable_scope('logits') as logits_scope:
            logits = fc(y, units=units, name=logits_scope)
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
示例#2
0
def get_ccpm_logits(features, feature_columns, shared_feature_vectors, units,
                    is_training, extra_options):

    with tf.variable_scope('ccpm'):
        _check_ccpm_args(extra_options)
        use_shared_embedding = extra_options['ccpm_use_shared_embedding']
        use_project = extra_options['ccpm_use_project']
        project_size = extra_options['ccpm_project_size']
        hidden_units = extra_options['ccpm_hidden_units']
        activation_fn = extra_options['ccpm_activation_fn']
        dropout = extra_options['ccpm_dropout']
        batch_norm = extra_options['ccpm_batch_norm']
        layer_norm = extra_options['ccpm_layer_norm']
        use_resnet = extra_options['ccpm_use_resnet']
        use_densenet = extra_options['ccpm_use_densenet']
        kernel_sizes = extra_options['ccpm_kernel_sizes']
        filter_nums = extra_options['ccpm_filter_nums']
        leaky_relu_alpha = extra_options['leaky_relu_alpha']
        swish_beta = extra_options['swish_beta']
        activation_fn = get_activation_fn(activation_fn=activation_fn,
                                          leaky_relu_alpha=leaky_relu_alpha,
                                          swish_beta=swish_beta)

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        y = _build_ccpm_model(feature_vectors=feature_vectors,
                              kernel_sizes=kernel_sizes,
                              filter_nums=filter_nums,
                              hidden_units=hidden_units,
                              activation_fn=activation_fn,
                              dropout=dropout,
                              is_training=is_training,
                              batch_norm=batch_norm,
                              layer_norm=layer_norm,
                              use_resnet=use_resnet,
                              use_densenet=use_densenet)

        with tf.variable_scope('logits') as logits_scope:
            logits = fc(y, units=units, name=logits_scope)
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
示例#3
0
def get_nfm_logits(features, feature_columns, shared_feature_vectors, units,
                   is_training, extra_options):

    with tf.variable_scope('nfm'):
        _check_nfm_args(extra_options)
        use_shared_embedding = extra_options['nfm_use_shared_embedding']
        use_project = extra_options['nfm_use_project']
        project_size = extra_options['nfm_project_size']
        hidden_units = extra_options['nfm_hidden_units']
        activation_fn = extra_options['nfm_activation_fn']
        dropout = extra_options['nfm_dropout']
        batch_norm = extra_options['nfm_batch_norm']
        layer_norm = extra_options['nfm_layer_norm']
        use_resnet = extra_options['nfm_use_resnet']
        use_densenet = extra_options['nfm_use_densenet']

        leaky_relu_alpha = extra_options['leaky_relu_alpha']
        swish_beta = extra_options['swish_beta']

        activation_fn = get_activation_fn(activation_fn=activation_fn,
                                          leaky_relu_alpha=leaky_relu_alpha,
                                          swish_beta=swish_beta)

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        # Neural FM
        y = _fm(feature_vectors, reduce_sum=False)
        y = add_hidden_layers(y,
                              hidden_units=hidden_units,
                              activation_fn=activation_fn,
                              dropout=dropout,
                              is_training=is_training,
                              batch_norm=batch_norm,
                              layer_norm=layer_norm,
                              use_resnet=use_resnet,
                              use_densenet=use_densenet,
                              scope='hidden_layers')
        with tf.variable_scope('logits') as logits_scope:
            logits = fc(y, units, name=logits_scope)
            add_hidden_layer_summary(logits, logits_scope.name)

        return logits
示例#4
0
文件: fibinet.py 项目: kiminh/EasyCTR
def get_fibinet_logits(
        features,
        feature_columns,
        shared_feature_vectors,
        units,
        is_training,
        extra_options):
    with tf.variable_scope('fibinet'):
        _check_fibinet_args(extra_options)
        use_shared_embedding = extra_options['fibinet_use_shared_embedding']
        use_project = extra_options['fibinet_use_project']
        project_size = extra_options['fibinet_project_size']
        hidden_units = extra_options['fibinet_hidden_units']
        activation_fn = extra_options['fibinet_activation_fn']
        dropout = extra_options['fibinet_dropout']
        batch_norm = extra_options['fibinet_batch_norm']
        layer_norm = extra_options['fibinet_layer_norm']
        use_resnet = extra_options['fibinet_use_resnet']
        use_densenet = extra_options['fibinet_use_densenet']
        use_se = extra_options['fibinet_use_se']
        use_deep = extra_options['fibinet_use_deep']
        interaction_type = extra_options['fibinet_interaction_type']
        se_interaction_type = extra_options['fibinet_se_interaction_type']
        se_use_shared_embedding = extra_options['fibinet_se_use_shared_embedding']
        leaky_relu_alpha = extra_options['leaky_relu_alpha']
        swish_beta = extra_options['swish_beta']
        activation_fn = get_activation_fn(activation_fn=activation_fn,
                                          leaky_relu_alpha=leaky_relu_alpha,
                                          swish_beta=swish_beta)

        if not use_shared_embedding:
            feature_vectors = get_feature_vectors(features, feature_columns)
        else:
            feature_vectors = shared_feature_vectors

        if use_project:
            feature_vectors = project(feature_vectors, project_size)

        y = shallow_fibinet(features=features,
                            feature_columns=feature_columns,
                            shared_feature_vectors=feature_vectors,
                            se_use_shared_embedding=se_use_shared_embedding,
                            use_project=use_project,
                            project_size=project_size,
                            interaction_type=interaction_type,
                            se_interaction_type=se_interaction_type,
                            use_se=use_se)  # [B, -1]
        if use_deep:
            y = add_hidden_layers(y,
                                  hidden_units=hidden_units,
                                  activation_fn=activation_fn,
                                  dropout=dropout,
                                  is_training=is_training,
                                  batch_norm=batch_norm,
                                  layer_norm=layer_norm,
                                  use_resnet=use_resnet,
                                  use_densenet=use_densenet,
                                  scope='hidden_layers')
            with tf.variable_scope('logits') as logits_scope:
                logits = fc(y, units, name=logits_scope)
                add_hidden_layer_summary(logits, logits_scope.name)
        else:
            assert units == 1, "shallow_fibinet's units must be 1"
            with tf.variable_scope('logits') as logits_scope:
                logits = tf.reduce_sum(y, axis=-1, keepdims=True)  # [B, 1]
                add_hidden_layer_summary(logits, logits_scope.name)

        return logits
示例#5
0
文件: dssm.py 项目: kiminh/EasyCTR
        def _model_fn(features, labels, mode, config):
            head = head_lib._binary_logistic_or_multi_class_head(  # pylint: disable=protected-access
                n_classes, weight_column, label_vocabulary, loss_reduction,
                loss_fn)
            is_training = (mode == tf.estimator.ModeKeys.TRAIN)
            net_dssm1 = tf.feature_column.input_layer(features, dssm1_columns)
            net_dssm2 = tf.feature_column.input_layer(features, dssm2_columns)
            tf.logging.info("net_dssm1: {}".format(net_dssm1))
            tf.logging.info("net_dssm2: {}".format(net_dssm2))
            real_activation_fn = get_activation_fn(
                activation_fn=activation_fn,
                leaky_relu_alpha=leaky_relu_alpha,
                swish_beta=swish_beta)

            net_dssm1 = add_hidden_layers(inputs=net_dssm1,
                                          hidden_units=hidden_units,
                                          activation_fn=real_activation_fn,
                                          dropout=dropout,
                                          is_training=is_training,
                                          batch_norm=batch_norm,
                                          layer_norm=layer_norm,
                                          use_resnet=use_resnet,
                                          use_densenet=use_densenet,
                                          scope='dssm1')
            net_dssm2 = add_hidden_layers(inputs=net_dssm2,
                                          hidden_units=hidden_units,
                                          activation_fn=real_activation_fn,
                                          dropout=dropout,
                                          is_training=is_training,
                                          batch_norm=batch_norm,
                                          layer_norm=layer_norm,
                                          use_resnet=use_resnet,
                                          use_densenet=use_densenet,
                                          scope='dssm2')

            with tf.variable_scope('logits') as logits_scope:
                if dssm_mode == 'dot':
                    logits = tf.reduce_sum(net_dssm1 * net_dssm2,
                                           -1,
                                           keepdims=True)
                elif dssm_mode == 'concat':
                    logits = tf.concat([net_dssm1, net_dssm2], axis=1)
                    logits = tf.layers.dense(logits, units=1, activation=None)
                elif dssm_mode == 'cosine':
                    logits = tf.reduce_sum(net_dssm1 * net_dssm2,
                                           -1,
                                           keepdims=True)
                    norm1 = tf.norm(net_dssm1, axis=1, keepdims=True)
                    norm2 = tf.norm(net_dssm2, axis=1, keepdims=True)
                    logits = logits / (norm1 * norm2)
                else:
                    raise ValueError(
                        "unknown dssm mode '{}'".format(dssm_mode))
                add_hidden_layer_summary(logits, logits_scope.name)

            tf.logging.info("logits = {}".format(logits))

            return head.create_estimator_spec(
                features=features,
                mode=mode,
                labels=labels,
                optimizer=optimizers.get_optimizer_instance(
                    optimizer, learning_rate=_LEARNING_RATE),
                logits=logits)