Ejemplo n.º 1
0
    def __init__(
        self,
        input_size,
        output_size,
        use_bias=True,
        weights_initializer="xavier_uniform",
        bias_initializer="zeros",
        activation=None,
        clip=None,
        **kwargs,
    ):
        super().__init__()
        logger.debug(f" {self.name}")

        logger.debug("  Dense")
        self.dense = Dense(
            input_size=input_size,
            output_size=output_size,
            use_bias=use_bias,
            weights_initializer=weights_initializer,
            bias_initializer=bias_initializer,
        )

        self.activation = get_activation(activation)

        if clip is not None:
            if isinstance(clip, (list, tuple)) and len(clip) == 2:
                self.clip = partial(torch.clip, min=clip[0], max=clip[1])
            else:
                raise ValueError(
                    "The clip parameter of {} is {}. "
                    "It must be a list or a tuple of length 2.".format(
                        self.feature_name, self.clip))
        else:
            self.clip = None
Ejemplo n.º 2
0
    def __init__(
        self,
        input_size,
        num_classes,
        use_bias=True,
        weights_initializer="xavier_uniform",
        bias_initializer="zeros",
        **kwargs,
    ):
        super().__init__()
        logger.debug(f" {self.name}")

        logger.debug("  Dense")
        self.num_classes = num_classes
        self.dense = Dense(
            input_size=input_size,
            output_size=num_classes,
            use_bias=use_bias,
            weights_initializer=weights_initializer,
            bias_initializer=bias_initializer,
        )

        self.sampled_loss = False
        if LOSS in kwargs and TYPE in kwargs[LOSS] and kwargs[LOSS][
                TYPE] is not None:
            self.sampled_loss = kwargs[LOSS][TYPE].startswith("sampled")

        # this is needed because TF2 initializes the weights at the first call
        # so the first time we need to compute the full dense,
        # otherwise the weights of the Dense layer would not be initialized
        self.first_call = True
Ejemplo n.º 3
0
 def __init__(
     self,
     input_size: int,
     vocab_size: int,
     max_sequence_length: int,
     use_attention: bool = False,
     use_bias: bool = True,
     attention_embedding_size: int = 256,
     attention_num_heads: int = 8,
     **kwargs,
 ):
     super().__init__()
     self.vocab_size = vocab_size
     self.max_sequence_length = max_sequence_length
     self.input_size = input_size
     self.use_attention = use_attention
     if use_attention:
         logger.debug("  MultiHeadSelfAttention")
         self.self_attention = MultiHeadSelfAttention(
             input_size=input_size,
             hidden_size=attention_embedding_size,
             num_heads=attention_num_heads)
         # Adjust the input size to the final projection layer.
         input_size = self.self_attention.output_shape[0]
     self.projection_layer = Dense(input_size=input_size,
                                   output_size=vocab_size,
                                   use_bias=use_bias)
Ejemplo n.º 4
0
class Regressor(Decoder):
    def __init__(
<<<<<<< HEAD
            self,
            input_size,
            use_bias=True,
            weights_initializer='xavier_uniform',
            bias_initializer='zeros',
<<<<<<< HEAD
            weights_regularizer=None,
            bias_regularizer=None,
            activity_regularizer=None,
=======
>>>>>>> upstream/master
            activation=None,
            **kwargs
=======
        self,
        input_size,
        use_bias=True,
        weights_initializer="xavier_uniform",
        bias_initializer="zeros",
        activation=None,
        **kwargs,
>>>>>>> upstream/master
    ):
        super().__init__()
        logger.debug(f" {self.name}")

        logger.debug("  Dense")

        self.dense = Dense(
            input_size=input_size,
            output_size=1,
            use_bias=use_bias,
            weights_initializer=weights_initializer,
            bias_initializer=bias_initializer,
<<<<<<< HEAD
            weights_regularizer=weights_regularizer,
            bias_regularizer=bias_regularizer,
            activity_regularizer=activity_regularizer,
        )
Ejemplo n.º 5
0
    def __init__(
        self,
        input_size,
        use_bias=True,
        weights_initializer="xavier_uniform",
        bias_initializer="zeros",
        activation=None,
        **kwargs,
    ):
        super().__init__()
        logger.debug(f" {self.name}")

        logger.debug("  Dense")

        self.dense = Dense(
            input_size=input_size,
            output_size=1,
            use_bias=use_bias,
            weights_initializer=weights_initializer,
            bias_initializer=bias_initializer,
        )