Ejemplo n.º 1
0
 def _replace_attr_with_edl_embedding(model):
     """Replace the keras embedding attributes in the model with
     `elasticdl.layers.Embedding` layers.
     """
     for name, value in model.__dict__.items():
         if type(value) == tf.keras.layers.Embedding:
             logger.info("Replace {} layer with "
                         "elasticdl.layers.Embedding".format(value))
             initializer_name = tf.keras.initializers.serialize(
                 value.embeddings_initializer)["class_name"]
             embedding_layer = Embedding(
                 output_dim=value.output_dim,
                 input_dim=value.input_dim,
                 embeddings_initializer=initializer_name,
                 mask_zero=value.mask_zero,
                 input_length=value.input_length,
             )
             setattr(model, name, embedding_layer)
         elif type(value) == SparseEmbedding:
             logger.info("Replace {} layer with "
                         "elasticdl.layers.Embedding".format(value))
             embedding_layer = Embedding(
                 output_dim=value.output_dim,
                 input_dim=value.input_dim,
                 embeddings_initializer=initializer_name,
                 combiner=value.combiner,
             )
             setattr(model, name, embedding_layer)
     return model
Ejemplo n.º 2
0
        def _clone_function(layer):
            if type(layer) in [tf.keras.layers.Embedding, SparseEmbedding]:
                logger.debug("Replace {} with {}".format(
                    layer.name, Embedding))
                # ElasticDL embedding only accept a string type initializer
                init = tf.keras.initializers.serialize(
                    layer.embeddings_initializer)["class_name"]

                if type(layer) == tf.keras.layers.Embedding:
                    embedding_layer = Embedding(
                        output_dim=layer.output_dim,
                        input_dim=layer.input_dim,
                        embeddings_initializer=init,
                        mask_zero=layer.mask_zero,
                        input_length=layer.input_length,
                        name=layer.name,
                    )
                else:
                    embedding_layer = Embedding(
                        output_dim=layer.output_dim,
                        input_dim=layer.input_dim,
                        embeddings_initializer=init,
                        name=layer.name,
                        combiner=layer.combiner,
                    )
                return embedding_layer
            return layer
Ejemplo n.º 3
0
        def _clone_function(layer):
            if type(layer) in [
                    tf.keras.layers.Embedding,
                    SparseEmbedding,
            ] and _need_partition_embedding(layer):
                logger.debug("Replace {} with {}".format(
                    layer.name, Embedding))
                # ElasticDL embedding only accept a string type initializer
                init = tf.keras.initializers.serialize(
                    layer.embeddings_initializer)["class_name"]

                if type(layer) == tf.keras.layers.Embedding:
                    embedding_layer = Embedding(
                        output_dim=layer.output_dim,
                        input_dim=layer.input_dim,
                        embeddings_initializer=init,
                        mask_zero=layer.mask_zero,
                        input_length=layer.input_length,
                        name=layer.name,
                    )
                else:
                    embedding_layer = Embedding(
                        output_dim=layer.output_dim,
                        input_dim=layer.input_dim,
                        embeddings_initializer=init,
                        name=layer.name,
                        combiner=layer.combiner,
                    )
                embedding_layer.set_embedding_weight_name(
                    layer.trainable_weights[0].name)
                return embedding_layer
            elif type(layer) == tf.keras.layers.DenseFeatures:
                return _replace_tf_embedding_column_with_edl(layer)
            return layer
Ejemplo n.º 4
0
def custom_model(
    input_dim=5383, embedding_dim=64, input_length=10, fc_unit=64
):
    inputs = tf.keras.Input(shape=(input_length,))
    embed_layer = Embedding(
        output_dim=embedding_dim, mask_zero=True, input_length=input_length
    )
    embeddings = embed_layer(inputs)
    embeddings = ApplyMask()(embeddings)

    emb_sum = K.sum(embeddings, axis=1)
    emb_sum_square = K.square(emb_sum)
    emb_square = K.square(embeddings)
    emb_square_sum = K.sum(emb_square, axis=1)
    second_order = K.sum(
        0.5 * Subtract()([emb_sum_square, emb_square_sum]), axis=1
    )

    id_bias = Embedding(output_dim=1, mask_zero=True)(inputs)
    id_bias = ApplyMask()(id_bias)
    first_order = K.sum(id_bias, axis=(1, 2))
    fm_output = tf.keras.layers.Add()([first_order, second_order])

    nn_input = Flatten()(embeddings)
    nn_h = Dense(fc_unit)(nn_input)
    deep_output = Dense(1)(nn_h)
    deep_output = tf.reshape(deep_output, shape=(-1,))
    logits = tf.keras.layers.Add()([fm_output, deep_output])
    probs = tf.reshape(tf.sigmoid(logits), shape=(-1, 1))

    m = tf.keras.Model(
        inputs=inputs, outputs={"logits": logits, "probs": probs}
    )
    return m
Ejemplo n.º 5
0
 def __init__(self, output_dim=16):
     super(CustomModel, self).__init__(name="embedding_test_model")
     self.output_dim = output_dim
     self.embedding_1 = Embedding(output_dim)
     self.embedding_2 = Embedding(output_dim)
     self.concat = Concatenate()
     self.dense = Dense(1, input_shape=(output_dim * 3, ))
     self.flatten = Flatten()
Ejemplo n.º 6
0
 def _replace_attr_with_edl_embedding(model):
     """Replace the keras embedding attributes in the model with
     `elasticdl.layers.Embedding` layers.
     """
     for name, value in model.__dict__.items():
         if type(
             value
         ) == tf.keras.layers.Embedding and _need_partition_embedding(
             value
         ):
             logger.info(
                 "Replace {} layer with "
                 "elasticdl.layers.Embedding".format(value)
             )
             initializer_name = tf.keras.initializers.serialize(
                 value.embeddings_initializer
             )["class_name"]
             embedding_layer = Embedding(
                 output_dim=value.output_dim,
                 input_dim=value.input_dim,
                 embeddings_initializer=initializer_name,
                 mask_zero=value.mask_zero,
                 input_length=value.input_length,
                 name=value.name,
             )
             # The weights of subclass model is None, so we need to create
             # the weight name which is "{layer_name}/embeddings:0" in
             # tf.keras.layers.Embedding.
             embedding_layer.set_embedding_weight_name(
                 value.name + "/embeddings:0"
             )
             setattr(model, name, embedding_layer)
         elif type(value) == SparseEmbedding and _need_partition_embedding(
             value
         ):
             logger.info(
                 "Replace {} layer with "
                 "elasticdl.layers.Embedding".format(value)
             )
             embedding_layer = Embedding(
                 output_dim=value.output_dim,
                 input_dim=value.input_dim,
                 embeddings_initializer=initializer_name,
                 combiner=value.combiner,
                 name=value.name,
             )
             embedding_layer.set_embedding_weight_name(
                 value.name + "/embeddings:0"
             )
             setattr(model, name, embedding_layer)
         elif type(value) == tf.keras.layers.DenseFeatures:
             feature_layer = _replace_tf_embedding_column_with_edl(value)
             setattr(model, name, feature_layer)
     return model
    def __init__(self, output_dim=16, weights=None):
        """
        Arguments:
            output_dim: An Integer. It is the output dimension of embedding
                layers in `EdlEmbeddingModel`.
            weights: A numpy ndarray list. If `weights` is not None, dense
                layer initializes its weights using `weights`.
        """
        super(EdlEmbeddingModel, self).__init__(name="EdlEmbeddingModel")
        self.output_dim = output_dim
        if weights:
            if len(weights) != 2:
                raise ValueError(
                    "EdlEmbeddingModel constructor receives weights with "
                    "length %d, expected %d" % (len(weights), 2))

        self.embedding_1 = Embedding(output_dim)
        self.embedding_2 = Embedding(output_dim)
        self.concat = Concatenate()
        self.dense = Dense(1, weights=weights)
        self.flatten = Flatten()
Ejemplo n.º 8
0
def create_embedding_layer(
    embedding_size,
    output_dim,
    input_length=None,
    combiner=None,
    mask_zero=False,
):
    layer = Embedding(
        output_dim,
        input_length=input_length,
        combiner=combiner,
        mask_zero=mask_zero,
    )
    worker = mock_worker(embedding_size, output_dim)
    layer.set_lookup_func(worker.lookup_embedding)
    return layer