def compare_sentiment(x_test, y_test, custom_model_score):
    """
    Compare vader sentiment to model sentiment. Vader does not require training data
    :param x_test: Tweets to test predict sentiment of.
    :param y_test: Actual sentiment
    :param custom_model_score: Score from custom model
    :return: Vaders predictions and accuracy score
    """
    vader_map = {'neg': 'negative', 'pos': 'positive', 'neu': 'neutral'}
    vader_predictions = []
    vader = SentimentIntensityAnalyzer()

    for text in x_test:
        vader_prediction_map = vader.polarity_scores(text)
        vader_prediction_map.pop('compound')
        vader_predictions.append(vader_map[max(vader_prediction_map.items(),
                                               key=operator.itemgetter(1))[0]])

    vader_score = accuracy_score(y_test, vader_predictions)
    print(f'Vader scored: {vader_score}')
    print(f'Custom model scored: {custom_model_score}')

    cm = confusion_matrix(y_test,
                          vader_predictions,
                          labels=['positive', 'neutral', 'negative'],
                          normalize='true')
    plot = ConfusionMatrixDisplay(cm, ['positive', 'neutral', 'negative'])
    plot.plot(xticks_rotation=45)
    plot.ax_.set_title(f'Vader Score: {vader_score}')
    plot.figure_.canvas.set_window_title('Vader Confusion Matrix')
    plt.subplots_adjust(bottom=0.25, right=0.80, top=0.75)
    return vader_predictions, vader_score
def confusionMatrixFromPrediction(y_true, y_pred, display_labels, title, normalize='true', cmap=plt.cm.Blues, save_path=None):
    cm = confusion_matrix(y_true, y_pred, normalize=normalize)
    disp = ConfusionMatrixDisplay(confusion_matrix=cm,
                                  display_labels=display_labels)
    plot = disp.plot(cmap=cmap, xticks_rotation='horizontal')
    plt.title(title)
    if save_path:
        plt.savefig(save_path)
    return plot
Exemple #3
0
    def cm(self, path):
        y_pred = self.predict(self.ds.X_test)
        cm = confusion_matrix(self.ds.y_test, y_pred)
        disp = ConfusionMatrixDisplay(cm)

        cm_plot = disp.plot(cmap=plt.get_cmap('Blues'),
                            ax=None,
                            xticks_rotation=90,
                            values_format='d')

        cm_plot.figure_.savefig(path, bbox_inches='tight')
Exemple #4
0
def train_graph_matrix(H, parameter, y_true, y_pred, noise=False, pitch=False, shift=False):

    print("[^-] Plotting confusion matrix...")
    cm = confusion_matrix(y_true, y_pred)
    display_labels = H.classes_

    disp = ConfusionMatrixDisplay(confusion_matrix=cm,
                                  display_labels=display_labels)
    disp1 = disp.plot(include_values=True, xticks_rotation='horizontal', cmap=plt.cm.magma)
    if noise or pitch or shift:
        disp1.ax_.set_title("Confusion " + parameter + " matrix rate with sound tunes")
    else:
        disp1.ax_.set_title("Confusion " + parameter + " matrix")
    plt.show()
def plot_confusion_matrix(y_pred: np.ndarray,
                          y_true: np.ndarray,
                          display_labels: List[str],
                          title: str,
                          normalize=None,
                          fontsize=30) -> ConfusionMatrixDisplay:
    """Plot Confusion Matrix.

    Parameters
    ----------
    y_pred : array-like of shape (n_samples,)
        Predicted values.
    y_true : array-like of shape (n_samples,)
        Target values.
    normalize : {'true', 'pred', 'all'}, default=None
        Normalizes confusion matrix over the true (rows), predicted (columns)
        conditions or all the population. If None, confusion matrix will not be
        normalized.
    display_labels : array-like of shape (n_classes,)
        Target names used for plotting.
        Rotation of xtick labels.

    Returns
    -------
    display: `sklearn.metrics.ConfusionMatrixDisplay`
    """

    cm = confusion_matrix(y_true, y_pred, normalize=normalize)

    disp = ConfusionMatrixDisplay(confusion_matrix=cm,
                                  display_labels=display_labels)

    disp = disp.plot(include_values=True,
                     cmap=plt.cm.Blues,
                     ax=None,
                     xticks_rotation="vertical",
                     values_format=None)

    disp.ax_.set_title(title, fontsize=fontsize)
    return disp
Exemple #6
0
    def _plot_confusion_matrix(self,
                               predicted_y,
                               expected_y,
                               labels,
                               largesize=False,
                               title='Confusion matrix',
                               filename='confusion_matrix.png'):
        cm = confusion_matrix(expected_y, predicted_y)
        self.logger.info(cm)

        disp = ConfusionMatrixDisplay(cm, display_labels=labels)
        disp.plot(cmap=plt.cm.Blues, values_format='')
        plt.xlabel('Predicted result')
        plt.ylabel('Expected result')
        plt.title(title)
        if largesize:
            plt.rcParams["figure.figsize"] = (18.5, 10.5)
        plt.savefig(os.path.join("output", filename), dpi=200)
        plt.close()

        self.logger.info(
            f'\n{classification_report(expected_y, predicted_y, digits=4)}')
Exemple #7
0
def apply_classifier(classifier, x_train, y_train, x_test, y_test):
    """
    Applies the provided classifier.
    :param classifier: Target classifier
    :param x_train: Input training data
    :param y_train: Output classes
    :param x_test: Input testing data
    :param y_test: Output classes to test against
    """

    groups = [
        'alt.atheism', 'comp.graphics', 'comp.os.ms-windows.misc',
        'comp.sys.ibm.pc.hardware', 'comp.sys.mac.hardware', 'comp.windows.x',
        'misc.forsale', 'rec.autos', 'rec.motorcycles', 'rec.sport.baseball'
    ]

    classifier.fit(x_train, y_train)
    prediction = classifier.predict(x_test)

    score = metrics.accuracy_score(y_test, prediction)
    print(f'Prediction score: {score}')
    # Convert group number to actual text: 0 ->  'alt.atheism'
    y_test_group = [groups[index] for index in y_test]
    prediction_group = [groups[index] for index in prediction]

    result_string = f'{classifier}: Score: {round(score, 3)}'

    cm = metrics.confusion_matrix(y_test_group,
                                  prediction_group,
                                  labels=groups,
                                  normalize='true')
    plot = ConfusionMatrixDisplay(cm, groups)
    plot.plot(xticks_rotation=90)
    plot.ax_.set_title(result_string)
    plt.subplots_adjust(bottom=0.25, right=0.80, top=0.75)
    return result_string, score
Exemple #8
0
def plot_confusion_matrix(y_pred, y_true, labels, display_labels):
    """
    Plots the confusion matrix for given data
    :param y_pred: Predicted labels
    :param y_true: True labels
    :param labels: Class labels integer
    :param display_labels: Class labels to display
    :return: None
    """

    cm = confusion_matrix(y_pred=y_pred, y_true=y_true, labels=labels)

    ConfusionMatrixDisplay(confusion_matrix=cm,
                           display_labels=display_labels).plot(
                               cmap=plt.cm.Blues, values_format="d")

    plt.grid(False)

    return plt
Exemple #9
0
                      np.concatenate((stdconf[:, 0, 0], stdconf[:, 1, 1]))),
                  columns=["Artifact Type", "Type", "Average Rate", "std"])
grouped_barplot(df,
                "Artifact Type",
                "Type",
                "Average Rate",
                "std",
                colors=plotcolours,
                title="LDA with shrinkage - Classification Accuracy")
#%%
# Multiclass
clf = LinearDiscriminantAnalysis(solver='eigen', shrinkage='auto')
meanconf, stdconf = classifyTargetVsArtifacts(clf, mrk_class, epo, ivals,
                                              epo_t_o)

disp = ConfusionMatrixDisplay(
    meanconf, list(map(lambda x: abbrDict[x], np.unique(mrk_class))))
plt.figure(figsize=(10, 10))
disp.plot(values_format='.2f', cmap='Blues')
#%%
# Extract relevant data (T = target, NT = NonTarget)
chans = ['Cz', 'Pz']
time = 300
plt.figure(figsize=(10, 10))
for mrk_c in np.unique(mrk_class):
    first = epo[np.where(epo_t_o == time), clab == chans[0],
                mrk_class == mrk_c]
    second = epo[np.where(epo_t_o == time), clab == chans[1],
                 mrk_class == mrk_c]
    plt.scatter(first, second, s=2, label=artifactDict[mrk_c])
plt.legend()
plt.xlim((-10, 10))
    model = MLPClassifier(hidden_layer_sizes=(150, 100, 50),
                          max_iter=500,
                          random_state=7)
    model.fit(x_train_vector, y_train)

    predictions = model.predict(x_test_vector)
    score = accuracy_score(y_test, predictions)
    result_string = f'{model}: Score: {round(score, 3)}'

    # Plot confusion matrix of custom model
    cm = confusion_matrix(y_test,
                          predictions,
                          labels=['positive', 'neutral', 'negative'],
                          normalize='true')
    plot = ConfusionMatrixDisplay(cm, ['positive', 'neutral', 'negative'])
    plot.plot(xticks_rotation=45)
    plot.ax_.set_title(result_string)
    plot.figure_.canvas.set_window_title('Custom Model Confusion Matrix')
    plt.subplots_adjust(bottom=0.25, right=0.80, top=0.75)

    compare_sentiment(x_test, y_test, score)

    # Use model on unseen topic: Football
    other_topic = process_tweet_text(load_tweets(db, 'other_topic'))
    other_topic_df = pd.DataFrame(columns=['text', 'sentiment'])
    for tweet in other_topic:
        other_topic_df.loc[len(other_topic_df)] = [
            tweet['text'], tweet['sentiment']
        ]
Exemple #11
0
# torch.save(model.state_dict(), "spine_model.pth")
# print("Saved PyTorch Model State to model.pth")

model = binaryClassification()
model.load_state_dict(torch.load("spine_model.pth"))
model.to(device)
y_pred_list = []
model.eval()
with torch.no_grad():
    for X_batch in test_loader:
        X_batch = X_batch.to(device)
        y_test_pred = model(X_batch)
        y_test_pred = torch.sigmoid(y_test_pred)
        y_pred_tag = torch.round(y_test_pred)
        y_pred_list.append(y_pred_tag.cpu().numpy())

y_pred_list = [a.squeeze().tolist() for a in y_pred_list]

print(y_pred_list)

cm = confusion_matrix(y_test, y_pred_list, labels=[1.0, 0])

print(cm)

disp = ConfusionMatrixDisplay(cm, display_labels=[1, 0])

disp.plot()
plt.show()
print(classification_report(y_test, y_pred_list))