Ejemplo n.º 1
0
    def __init__(self, params, vocab):
        super(SLUTagger, self).__init__()

        self.lstm = Lstm(params, vocab)
        self.num_slot = params.num_slot
        self.hidden_dim = params.hidden_dim * 2 if params.bidirection else params.hidden_dim
        self.linear = nn.Linear(self.hidden_dim, self.num_slot)
        self.crf_layer = CRF(self.num_slot)
Ejemplo n.º 2
0
    def __init__(self, params, vocab):
        super(BiLSTMCRFTagger, self).__init__()
        self.lstm = Lstm(params, vocab)
        self.num_entity_label = params.num_entity_label
        self.hidden_dim = params.hidden_dim * 2 if params.bidirection else params.hidden_dim
        self.linear = nn.Linear(self.hidden_dim, self.num_entity_label)

        self.crf_layer = CRF(self.num_entity_label)
Ejemplo n.º 3
0
    def __init__(self, params, vocab):
        super(SentRepreGenerator, self).__init__()

        self.hidden_size = params.hidden_dim * 2 if params.bidirection else params.hidden_dim

        self.template_encoder = Lstm(params, vocab)

        self.input_atten_layer = Attention(attention_size=self.hidden_size)
        self.template_attn_layer = Attention(attention_size=self.hidden_size)
Ejemplo n.º 4
0
    def __init__(self, params, vocab):
        super(SentRepreGenerator, self).__init__()
        self.hidden_size = params.hidden_dim * 2 if params.bidirection else params.hidden_dim
        
        # LSTM Encoder for template
        self.template_encoder = Lstm(params, vocab)

        # attention layers for templates and input sequences
        self.input_atten_layer = Attention(attention_size=self.hidden_size)
        self.template_attn_layer = Attention(attention_size=self.hidden_size)
Ejemplo n.º 5
0
    def __init__(self, params, vocab):
        super(CoarseSLUTagger, self).__init__()

        self.lstm = Lstm(params, vocab)
        self.num_binslot = params.num_binslot
        self.hidden_dim = params.hidden_dim * 2 if params.bidirection else params.hidden_dim
        self.linear = nn.Linear(self.hidden_dim, self.num_binslot)
        self.linear_chunking = nn.Linear(self.hidden_dim, 3)
        self.crf_layer_chunking = CRF(3)
        self.crf_layer = CRF(self.num_binslot)
        self.domain_coarse_mask = self.gen_emission_mask()
Ejemplo n.º 6
0
 def __init__(self, params, vocab_en, vocab_trans):
     super(ModelSLU, self).__init__()
     self.label_reg = params.la_reg
     self.adversarial = params.adv
     self.intent_adv = params.intent_adv
     self.zeroshot = params.zs
     if self.label_reg == True:
         self.label_encoder = LabelEncoder(params)
     self.lstm = Lstm(params, vocab_en, vocab_trans)
     self.intent_predictor = IntentPredictor(params)
     self.slot_predictor = SlotPredictor(params)
Ejemplo n.º 7
0
    def __init__(self, params, vocab):
        super(ConceptTagger, self).__init__()

        self.use_example = params.use_example
        if self.use_example:
            hidden_dim = params.hidden_dim * 2 if params.bidirection else params.hidden_dim
            self.w_a = nn.Parameter(torch.FloatTensor(hidden_dim))
            torch.nn.init.uniform(self.w_a.data, -0.01, 0.01)
            self.softmax = nn.Softmax(dim=-1)

        self.lstm_encoder = Lstm(params, vocab)
        self.lstm_predictor = LstmBasedSlotPredictor(params)
        self.slot_embs = load_embedding_from_pkl(params.slot_emb_file)
        self.example_embs = load_embedding_from_pkl(params.example_emb_file)
Ejemplo n.º 8
0
    def __init__(self, params, vocab):
        super(ConceptTagger, self).__init__()

        self.use_example = params.use_example
        if self.use_example:
            hidden_dim = params.hidden_dim * 2 if params.bidirection else params.hidden_dim
            self.w_a = nn.Parameter(torch.FloatTensor(hidden_dim))
            torch.nn.init.uniform(self.w_a.data, -0.01, 0.01)
            self.softmax = nn.Softmax(dim=-1)

            self.example_embs = torch.cuda.FloatTensor(
                load_embedding_from_npy(
                    params.ner_example_emb_file))  # (num_entity, emb_dim, 2)

        self.lstm_encoder = Lstm(params, vocab)
        self.lstm_predictor = LstmBasedSlotPredictor(params)
        self.entity_embs = torch.cuda.FloatTensor(
            load_embedding_from_npy(
                params.ner_entity_type_emb_file))  # (num_entity, emb_dim)
        self.entity_embs_size = self.entity_embs.size()