예제 #1
0
                                             ignore_case=True)
            yield inputs, outputs


print('Fit LM...')
if os.path.exists(LM_MODEL_PATH):
    bi_lm = BiLM(model_path=LM_MODEL_PATH)
else:
    bi_lm = BiLM(token_num=len(word_dict))
    bi_lm.model.fit_generator(
        generator=train_lm_generator(batch_size=batch_size),
        steps_per_epoch=len(sentences) // batch_size,
        epochs=epoch_num,
        verbose=True,
    )
    bi_lm.save_model(LM_MODEL_PATH)

# Build model for classification
input_layer, feature_layer = bi_lm.get_feature_layers()
lstm_layer = keras.layers.Bidirectional(
    keras.layers.LSTM(units=50),
    name='Bi-LSTM',
)(feature_layer)
dense_layer = keras.layers.Dense(
    units=2,
    activation='softmax',
    name='Dense',
)(lstm_layer)
model = keras.models.Model(inputs=input_layer, outputs=dense_layer)
model.compile(
    optimizer='adam',
예제 #2
0
 def test_save_load(self):
     with tempfile.TemporaryDirectory() as temp_path:
         model_path = os.path.join(temp_path, 'save_load.h5')
         model = BiLM(token_num=101)
         model.save_model(model_path)
         model.load_model(model_path)
예제 #3
0
        epochs=EPOCHS,
        validation_data=lm_batch_generator(sentences=valid_sentences,
                                           steps=valid_steps),
        validation_steps=valid_steps,
        callbacks=[
            keras.callbacks.EarlyStopping(monitor='val_loss', patience=2),
            keras.callbacks.EarlyStopping(
                monitor='val_Bi-LM-Dense-Forward_sparse_categorical_accuracy',
                patience=2),
            keras.callbacks.EarlyStopping(
                monitor='val_Bi-LM-Dense-Backward_sparse_categorical_accuracy',
                patience=2),
        ],
        verbose=True,
    )
    bi_lm_model.save_model(MODEL_LM_PATH)


def lm_batch_generator(sentences, steps):
    global word_dict, char_dict, max_word_len
    while True:
        for i in range(steps):
            batch_sentences = sentences[BATCH_SIZE *
                                        i:min(BATCH_SIZE *
                                              (i + 1), len(sentences))]
            inputs, outputs = BiLM.get_batch(
                sentences=batch_sentences,
                token_dict=word_dict,
                ignore_case=True,
                unk_index=word_dict['<UNK>'],
                eos_index=word_dict['<EOS>'],
예제 #4
0
 def test_init_load(self):
     model_path = os.path.join(self.tmp_path, 'save_load.h5')
     model = BiLM(token_num=101)
     model.save_model(model_path)
     BiLM(model_path=model_path)