Esempio n. 1
0
def NFFM(
        feature_metas,
        biinteraction_mode='all',
        embedding_initializer='glorot_uniform',
        embedding_regularizer=tf.keras.regularizers.l2(1e-5),
        fm_fixed_embedding_dim=None,
        linear_use_bias=True,
        linear_kernel_initializer=tf.keras.initializers.RandomNormal(stddev=1e-4, seed=1024),
        linear_kernel_regularizer=tf.keras.regularizers.l2(1e-5),
        dnn_hidden_units=(128, 64, 1),
        dnn_activations=('relu', 'relu', None),
        dnn_use_bias=True,
        dnn_use_bn=False,
        dnn_dropout=0,
        dnn_kernel_initializers='glorot_uniform',
        dnn_bias_initializers='zeros',
        dnn_kernel_regularizers=tf.keras.regularizers.l2(1e-5),
        dnn_bias_regularizers=None,
        name='NFFM'):

    assert isinstance(feature_metas, FeatureMetas)

    with tf.name_scope(name):

        features = Features(metas=feature_metas)

        # Linear Part
        with tf.name_scope('Linear'):
            linear_output = features.get_linear_logit(use_bias=linear_use_bias,
                                                      kernel_initializer=linear_kernel_initializer,
                                                      kernel_regularizer=linear_kernel_regularizer,
                                                      embedding_group='dot_embedding',
                                                      slots_filter=None)

        # Interaction Part
        with tf.name_scope('Interaction'):
            fm_embedded_dict = features.get_embedded_dict(
                group_name='embedding',
                fixed_embedding_dim=fm_fixed_embedding_dim,
                embedding_initializer=embedding_initializer,
                embedding_regularizer=embedding_regularizer,
                slots_filter=None
            )
            fm_dim_groups = group_embedded_by_dim(fm_embedded_dict)
            interactions = list()
            for fm_group in fm_dim_groups.values():
                group_interaction = BiInteraction(mode=biinteraction_mode)(fm_group)
                interactions.append(group_interaction)

            interactions = tf.concat(interactions, axis=1)

            dnn_output = DNN(
                units=dnn_hidden_units,
                use_bias=dnn_use_bias,
                activations=dnn_activations,
                use_bn=dnn_use_bn,
                dropout=dnn_dropout,
                kernel_initializers=dnn_kernel_initializers,
                bias_initializers=dnn_bias_initializers,
                kernel_regularizers=dnn_kernel_regularizers,
                bias_regularizers=dnn_bias_regularizers
            )(interactions)

        # Output
        output = tf.add_n([linear_output, dnn_output])
        output = tf.keras.activations.sigmoid(output)

        model = tf.keras.Model(inputs=features.get_inputs_list(), outputs=output)

        return model
Esempio n. 2
0
def AFM(feature_metas,
        linear_slots,
        fm_slots,
        embedding_initializer='glorot_uniform',
        embedding_regularizer=tf.keras.regularizers.l2(1e-5),
        fm_fixed_embedding_dim=None,
        linear_use_bias=True,
        linear_kernel_initializer=tf.keras.initializers.RandomNormal(
            stddev=1e-4, seed=1024),
        linear_kernel_regularizer=tf.keras.regularizers.l2(1e-5),
        dnn_hidden_units=(128, 64, 1),
        dnn_activations=('relu', 'relu', None),
        dnn_use_bias=True,
        dnn_use_bn=False,
        dnn_dropout=0,
        dnn_kernel_initializers='glorot_uniform',
        dnn_bias_initializers='zeros',
        dnn_kernel_regularizers=tf.keras.regularizers.l2(1e-5),
        dnn_bias_regularizers=None,
        name='AFM'):

    assert isinstance(feature_metas, FeatureMetas)

    with tf.name_scope(name):

        features = Features(metas=feature_metas)

        # Linear Part
        with tf.name_scope('Linear'):
            linear_output = features.get_linear_logit(
                use_bias=linear_use_bias,
                kernel_initializer=linear_kernel_initializer,
                kernel_regularizer=linear_kernel_regularizer,
                embedding_group='dot_embedding',
                slots_filter=linear_slots)

        # Interaction
        with tf.name_scope('Interaction'):
            fm_embedded_dict = features.get_embedded_dict(
                group_name='embedding',
                fixed_embedding_dim=fm_fixed_embedding_dim,
                embedding_initializer=embedding_initializer,
                embedding_regularizer=embedding_regularizer,
                slots_filter=fm_slots)
            #  print(fm_embedded_dict)
            fm_dim_groups = group_embedded_by_dim(fm_embedded_dict)

            fms = [
                AttentionBasedPoolingLayer()(group)
                for group in fm_dim_groups.values() if len(group) > 1
            ]
            print("fms", fms)
            dnn_inputs = tf.concat(fms, axis=1)
            print("dnn_inputs", dnn_inputs)
            dnn_output = DNN(
                units=dnn_hidden_units,
                use_bias=dnn_use_bias,
                activations=dnn_activations,
                use_bn=dnn_use_bn,
                dropout=dnn_dropout,
                kernel_initializers=dnn_kernel_initializers,
                bias_initializers=dnn_bias_initializers,
                kernel_regularizers=dnn_kernel_regularizers,
                bias_regularizers=dnn_bias_regularizers)(dnn_inputs)

        # Output
        output = tf.add_n([linear_output, dnn_output])
        output = tf.keras.activations.sigmoid(output)

        model = tf.keras.Model(inputs=features.get_inputs_list(),
                               outputs=output)

        return model
Esempio n. 3
0
def WideAndDeep(
        feature_metas,
        wide_slots,
        deep_slots,
        embedding_initializer=tf.keras.initializers.RandomNormal(mean=0.0, stddev=1e-4),
        embedding_regularizer=tf.keras.regularizers.l2(1e-5),
        wide_use_bias=True,
        wide_kernel_initializer=tf.keras.initializers.RandomNormal(stddev=1e-4, seed=1024),
        wide_kernel_regularizer=tf.keras.regularizers.l2(1e-5),
        deep_fixed_embedding_dim=None,
        deep_hidden_units=(128, 64, 1),
        deep_activations=('relu', 'relu', None),
        deep_use_bias=True,
        deep_use_bn=False,
        deep_dropout=0,
        deep_kernel_initializers='glorot_uniform',
        deep_bias_initializers='zeros',
        deep_kernel_regularizers=tf.keras.regularizers.l2(1e-5),
        deep_bias_regularizers=None,
        name='Wide&Deep'):

    assert isinstance(feature_metas, FeatureMetas)

    with tf.name_scope(name):

        features = Features(metas=feature_metas)

        # Wide Part
        with tf.name_scope('Wide'):
            wide_output = features.get_linear_logit(embedding_group='dot_embedding',
                                                    use_bias=wide_use_bias,
                                                    kernel_initializer=wide_kernel_initializer,
                                                    kernel_regularizer=wide_kernel_regularizer,
                                                    slots_filter=wide_slots)

        # Deep Part
        with tf.name_scope('Deep'):
            deep_inputs = features.gen_concated_feature(embedding_group='embedding',
                                                        fixed_embedding_dim=deep_fixed_embedding_dim,
                                                        embedding_initializer=embedding_initializer,
                                                        embedding_regularizer=embedding_regularizer,
                                                        slots_filter=deep_slots)
            deep_output = DNN(
                units=deep_hidden_units,
                use_bias=deep_use_bias,
                activations=deep_activations,
                use_bn=deep_use_bn,
                dropout=deep_dropout,
                kernel_initializers=deep_kernel_initializers,
                bias_initializers=deep_bias_initializers,
                kernel_regularizers=deep_kernel_regularizers,
                bias_regularizers=deep_bias_regularizers
            )(deep_inputs)

        # Output
        output = tf.add_n([wide_output, deep_output])
        output = tf.keras.activations.sigmoid(output)

        model = tf.keras.Model(inputs=features.get_inputs_list(), outputs=output)

        return model
Esempio n. 4
0
def DeepFM(
        feature_metas,
        linear_slots,
        fm_slots,
        dnn_slots,
        embedding_initializer='glorot_uniform',
        embedding_regularizer=tf.keras.regularizers.l2(1e-5),
        fm_fixed_embedding_dim=None,
        linear_use_bias=True,
        #按照正态分布生成随机张量的初始化器。
        linear_kernel_initializer=tf.keras.initializers.RandomNormal(
            stddev=1e-4, seed=1024),
        linear_kernel_regularizer=tf.keras.regularizers.l2(1e-5),
        dnn_hidden_units=(128, 64, 1),
        dnn_activations=('relu', 'relu', None),
        dnn_use_bias=True,
        dnn_use_bn=False,
        dnn_dropout=0,
        dnn_kernel_initializers='glorot_uniform',
        dnn_bias_initializers='zeros',
        dnn_kernel_regularizers=tf.keras.regularizers.l2(1e-5),
        dnn_bias_regularizers=None,
        name='DeepFM'):

    # 检测是否是feature__metas类型是否是FeatrueMetas;
    assert isinstance(feature_metas, FeatureMetas)

    with tf.name_scope(name):

        features = Features(metas=feature_metas)
        #print(features.shape)

        # Linear Part
        with tf.name_scope('Linear'):
            #
            linear_output = features.get_linear_logit(
                use_bias=linear_use_bias,
                kernel_initializer=linear_kernel_initializer,
                kernel_regularizer=linear_kernel_regularizer,
                embedding_group='dot_embedding',
                slots_filter=linear_slots)

        # FM Part
        with tf.name_scope('FM'):
            fm_embedded_dict = features.get_embedded_dict(
                group_name='embedding',
                fixed_embedding_dim=fm_fixed_embedding_dim,
                embedding_initializer=embedding_initializer,
                embedding_regularizer=embedding_regularizer,
                slots_filter=fm_slots)
            fm_dim_groups = group_embedded_by_dim(fm_embedded_dict)
            fms = [
                FM()(group) for group in fm_dim_groups.values()
                if len(group) > 1
            ]
            fm_output = tf.add_n(fms)

        # DNN Part
        with tf.name_scope('DNN'):
            dnn_inputs = features.gen_concated_feature(
                embedding_group='embedding',
                fixed_embedding_dim=fm_fixed_embedding_dim,
                embedding_initializer=embedding_initializer,
                embedding_regularizer=embedding_regularizer,
                slots_filter=dnn_slots)
            dnn_output = DNN(
                units=dnn_hidden_units,
                use_bias=dnn_use_bias,
                activations=dnn_activations,
                use_bn=dnn_use_bn,
                dropout=dnn_dropout,
                kernel_initializers=dnn_kernel_initializers,
                bias_initializers=dnn_bias_initializers,
                kernel_regularizers=dnn_kernel_regularizers,
                bias_regularizers=dnn_bias_regularizers)(dnn_inputs)

        # Output 将计算矩阵进行拼接

        output = tf.add_n([linear_output, fm_output, dnn_output])

        output = tf.keras.activations.sigmoid(output)

        model = tf.keras.Model(inputs=features.get_inputs_list(),
                               outputs=output)

        return model