def __init__(self, concept2idx, tag2idx, word2concept, lstm_dim=50, hidden_dim=50): self.concept2idx = concept2idx, self.tag2idx = tag2idx, self.word2concept = word2concept c_i = Input(shape=(dataset.MAX_SEQ_LEN, ), name="char_input") w_i = Input(shape=(dataset.MAX_SEQ_LEN, ), name="word_input") c_emb = dataset.get_char_emb_layer()(c_i) w_emb = dataset.get_word_emb_layer()(w_i) x = Bidirectional(LSTM(lstm_dim, return_sequences=True, name="lstm"))(concatenate([c_emb, w_emb])) x = TimeDistributed(Dense(hidden_dim, activation="sigmoid"))(x) o = TimeDistributed(Dense(1, activation="sigmoid", name="output"))(x) self.model = Model(inputs=[c_i, w_i], outputs=o) self.model.compile(loss=losses.binary_crossentropy, optimizer="adam", metrics=[seq_accuracy, "accuracy"])
def __init__(self, concept2idx, tag2idx, word2concept, lstm_dim=50, hidden_dim=50): self.concept2idx = concept2idx, self.tag2idx = tag2idx, self.word2concept = word2concept c_i = Input(shape=(dataset.MAX_SEQ_LEN, ), name="char_input") con_i = Input(shape=(dataset.MAX_SEQ_LEN, ), dtype="int32") t_i = Input(shape=(dataset.MAX_SEQ_LEN, ), dtype="int32") c_emb = dataset.get_char_emb_layer()(c_i) con_emb = Lambda(K.one_hot, arguments={'num_classes': len(concept2idx) + 2}, output_shape=(dataset.MAX_SEQ_LEN, len(concept2idx) + 2))(con_i) t_emb = Lambda(K.one_hot, arguments={'num_classes': len(tag2idx) + 2}, output_shape=(dataset.MAX_SEQ_LEN, len(tag2idx) + 2))(t_i) x = Bidirectional(LSTM(lstm_dim, return_sequences=True, name="lstm"))( concatenate([c_emb, con_emb, t_emb])) x = TimeDistributed(Dense(hidden_dim, activation="sigmoid"))(x) o = TimeDistributed(Dense(1, activation="sigmoid", name="output"))(x) self.model = Model(inputs=[c_i, con_i, t_i], outputs=o) self.model.compile(loss=losses.binary_crossentropy, optimizer="adam", metrics=[seq_accuracy, "accuracy"])