Beispiel #1
0
                    epochs=5,
                    verbose=1,
                    batch_size=32,
                    validation_data=(padded_test_docs, y_test))

# In[81]:

class_names = [
    'Computer Science', 'Physics', 'Mathematics', 'Statistics',
    'Quantitative Biology', 'Quantitative Finance'
]
predsTest = pd.DataFrame(model.predict(padded_test_docs), columns=class_names)
predsTest['pred'] = predsTest.idxmax(axis=1)
predsTest.head(3)

# In[83]:

roundedPredsTest = predsTest.pred
print('Confusion Matrix: ')
cf_matrix = confusion_matrix(val[val.index.isin(test_index)]['LABEL'],
                             roundedPredsTest,
                             labels=class_names)
print(cf_matrix)
meplot.cf_matrix_heatmap(cf_matrix, class_names, 13)

# In[84]:

print(
    classification_report(val[val.index.isin(test_index)]['LABEL'],
                          roundedPredsTest))
Beispiel #2
0
plt.plot(epochs, loss, color='r', label='Training loss')
plt.plot(epochs, val_loss, color='b', label='Validation loss')
plt.title('Training and Validation loss')
plt.legend()
plt.show()

# Evaluate model in testing dataset

# In[74]:

predsTest = model.predict(padded_test_docs)
roundedPredsTest = np.round(predsTest)
print('Confusion Matrix: Positive is class 1 and Negative is class 0')
cf_matrix = confusion_matrix(y_test, roundedPredsTest, labels=[1, 0])
print(cf_matrix)
meplot.cf_matrix_heatmap(cf_matrix)

# In[75]:

print(classification_report(y_test, roundedPredsTest))

# ### Explainability

# #### Lime

# In[77]:

from lime import lime_text
from lime.lime_text import LimeTextExplainer