Ejemplo n.º 1
0
 def __init__(
     self,
     hidden_size,
     num_intents,
     num_slots,
     dropout=0.0,
     use_transformer_pretrained=True,
     **kwargs,
 ):
     super().__init__(**kwargs)
     self.dropout = nn.Dropout(dropout)
     self.slot_mlp = MultiLayerPerceptron(
         hidden_size,
         num_classes=num_slots,
         device=self._device,
         num_layers=2,
         activation='relu',
         log_softmax=False,
     )
     self.intent_mlp = MultiLayerPerceptron(
         hidden_size,
         num_classes=num_intents,
         device=self._device,
         num_layers=2,
         activation='relu',
         log_softmax=False,
     )
     if use_transformer_pretrained:
         self.apply(
             lambda module: transformer_weights_init(module, xavier=False))
Ejemplo n.º 2
0
 def __init__(
     self,
     hidden_size,
     num_classes,
     activation='relu',
     log_softmax=True,
     dropout=0.0,
     use_transformer_pretrained=True,
 ):
     super().__init__()
     if activation not in ACT2FN:
         raise ValueError(f'activation "{activation}" not found')
     self.dense = nn.Linear(hidden_size, hidden_size)
     self.act = ACT2FN[activation]
     self.norm = nn.LayerNorm(hidden_size, eps=1e-12)
     self.mlp = MultiLayerPerceptron(
         hidden_size,
         num_classes,
         self._device,
         num_layers=1,
         activation=activation,
         log_softmax=log_softmax,
     )
     self.dropout = nn.Dropout(dropout)
     if use_transformer_pretrained:
         self.apply(
             lambda module: transformer_weights_init(module, xavier=False))
     self.to(self._device)
Ejemplo n.º 3
0
    def __init__(
        self,
        hidden_size,
        num_classes,
        name=None,
        num_layers=2,
        activation='relu',
        log_softmax=True,
        dropout=0.0,
        use_transformer_pretrained=True,
    ):
        super().__init__()

        self.name = name
        self.mlp = MultiLayerPerceptron(
            hidden_size,
            num_classes,
            self._device,
            num_layers,
            activation,
            log_softmax,
        )
        self.dropout = nn.Dropout(dropout)
        if use_transformer_pretrained:
            self.apply(
                lambda module: transformer_weights_init(module, xavier=False))
Ejemplo n.º 4
0
 def __init__(self,
              hidden_size,
              num_classes,
              activation=nn.functional.relu,
              log_softmax=True,
              dropout=0.0,
              use_transformer_pretrained=True):
     super().__init__()
     self.dense = nn.Linear(hidden_size, hidden_size)
     self.act = activation
     self.norm = nn.LayerNorm(hidden_size, eps=1e-12)
     self.mlp = MultiLayerPerceptron(hidden_size,
                                     num_classes,
                                     self._device,
                                     num_layers=1,
                                     activation=activation,
                                     log_softmax=log_softmax)
     self.dropout = nn.Dropout(dropout)
     if use_transformer_pretrained:
         self.apply(
             lambda module: transformer_weights_init(module, xavier=False))
     self.to(self._device)