Exemple #1
0
    def __init__(self,
                 units,
                 num_head,
                 activation=None,
                 use_bias=False,
                 attention_dropout=0.0,
                 kernel_initializer='glorot_normal',
                 bias_initializer='zeros',
                 kernel_regularizer=None,
                 bias_regularizer=None,
                 kernel_constraint=None,
                 bias_constraint=None,
                 **kwargs):
        super(RelativePartialMultiHeadSelfAttention, self).__init__(**kwargs)
        self.supports_masking = True
        self.units = units
        self.num_head = num_head
        self.units_head = units // num_head
        self.activation = activation
        self.activation = activations.get(activation)
        self.use_bias = use_bias
        self.attention_dropout = attention_dropout
        self.kernel_initializer = initializers.get(kernel_initializer)
        self.bias_initializer = initializers.get(bias_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.bias_regularizer = regularizers.get(bias_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint)
        self.bias_constraint = constraints.get(bias_constraint)

        self.kernel, self.bias = None, None
        self.att_drop_layer = None
Exemple #2
0
    def __init__(self, input_dim, output_dim, embed_dim=None,
                 cutoffs=None, div_val=1, use_bias=True,
                 force_projection=None,
                 embeddings_initializer='uniform',
                 embeddings_regularizer=None,
                 embeddings_constraint=None,
                 kernel_initializer='glorot_uniform',
                 kernel_regularizer=None,
                 kernel_constraint=None,
                 bias_initializer='zeros',
                 bias_regularizer=None,
                 bias_constraint=None,
                 bind_embeddings=False,
                 bind_projections=False,
                 **kwargs):
        super(AdaptiveSoftmax, self).__init__(**kwargs)

        self.input_dim = input_dim
        self.output_dim = output_dim
        self.embed_dim = embed_dim
        if embed_dim is None:
            self.embed_dim = input_dim
        self.cutoffs = cutoffs
        if cutoffs is not None:
            if self.cutoffs[0] != 0:
                self.cutoffs = [0] + self.cutoffs
            if self.cutoffs[-1] != output_dim:
                self.cutoffs.append(output_dim)
        self.div_val = div_val
        self.use_bias = use_bias
        self.force_projection = force_projection
        if force_projection is None:
            if div_val == 1:
                self.force_projection = False
            else:
                self.force_projection = True
        self.cluster_num = 0
        if self.cutoffs is not None:
            self.cluster_num = len(self.cutoffs) - 2

        self.embeddings_initializer = initializers.get(embeddings_initializer)
        self.embeddings_regularizer = regularizers.get(embeddings_regularizer)
        self.embeddings_constraint = constraints.get(embeddings_constraint)
        self.kernel_initializer = initializers.get(kernel_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint)
        self.bias_initializer = initializers.get(bias_initializer)
        self.bias_regularizer = regularizers.get(bias_regularizer)
        self.bias_constraint = constraints.get(bias_constraint)

        self.bind_embeddings = bind_embeddings
        if not isinstance(bind_embeddings, list):
            self.bind_embeddings = [bind_embeddings] * (self.cluster_num + 1)
        self.bind_projections = bind_projections
        if not isinstance(bind_projections, list):
            self.bind_projections = [bind_projections] * (self.cluster_num + 1)

        self.embeddings, self.projections, self.biases = (None,) * 3
        self.kernel_cluster, self.bias_cluster = None, None
    def __init__(self,
                 input_dim,
                 output_dim,
                 embed_dim=None,
                 cutoffs=None,
                 div_val=1,
                 force_projection=None,
                 embeddings_initializer='uniform',
                 embeddings_regularizer=None,
                 embeddings_constraint=None,
                 kernel_initializer='glorot_uniform',
                 kernel_regularizer=None,
                 kernel_constraint=None,
                 mask_zero=False,
                 return_embeddings=False,
                 return_projections=False,
                 **kwargs):
        super(AdaptiveEmbedding, self).__init__(**kwargs)

        self.input_dim = input_dim
        self.output_dim = output_dim
        self.embed_dim = embed_dim
        if embed_dim is None:
            self.embed_dim = output_dim
        self.cutoffs = cutoffs
        if cutoffs is not None:
            if self.cutoffs[0] != 0:
                self.cutoffs = [0] + self.cutoffs
            if self.cutoffs[-1] != input_dim:
                self.cutoffs.append(input_dim)
        self.div_val = div_val
        self.force_projection = force_projection
        if force_projection is None:
            if div_val == 1:
                self.force_projection = False
            else:
                self.force_projection = True

        self.embeddings_initializer = initializers.get(embeddings_initializer)
        self.embeddings_regularizer = regularizers.get(embeddings_regularizer)
        self.embeddings_constraint = constraints.get(embeddings_constraint)
        self.kernel_initializer = initializers.get(kernel_initializer)
        self.kernel_regularizer = regularizers.get(kernel_regularizer)
        self.kernel_constraint = constraints.get(kernel_constraint)

        self.mask_zero = mask_zero
        self.supports_masking = mask_zero
        self.return_embeddings = return_embeddings
        self.return_projections = return_projections

        self.embeddings = None
        self.projections = None
Exemple #4
0
    def __init__(self,
                 units,
                 bias_initializer='zeros',
                 bias_regularizer=None,
                 bias_constraint=None,
                 **kwargs):
        super(SegmentBias, self).__init__(**kwargs)
        self.supports_masking = True
        self.units = units
        self.bias_initializer = initializers.get(bias_initializer)
        self.bias_regularizer = regularizers.get(bias_regularizer)
        self.bias_constraint = constraints.get(bias_constraint)

        self.bias_context, self.bias_relative = None, None
Exemple #5
0
    def __init__(self,
                 units,
                 initializer='uniform',
                 regularizer=None,
                 constraint=None,
                 **kwargs):
        super(MaskEmbedding, self).__init__(**kwargs)
        self.supports_masking = True
        self.units = units
        self.initializer = initializers.get(initializer)
        self.regularizer = regularizers.get(regularizer)
        self.constraint = constraints.get(constraint)

        self.embeddings = None