def plot_confusion_matrix(cls_pred, cls_true): # This is called from print_test_accuracy() below. # cls_pred is an array of the predicted class-number for # all images in the test-set. # Get the true classifications for the test-set. # Get the confusion matrix using sklearn. cm = confusion_matrix(y_true=cls_true, y_pred=cls_pred) # Print the confusion matrix as text. print(cm) # Plot the confusion matrix as an image. plt.matshow(cm) # Make various adjustments to the plot. plt.colorbar() tick_marks = np.arange(labels_type) plt.xticks(tick_marks, range(labels_type)) plt.yticks(tick_marks, range(labels_type)) plt.xlabel('Predicted') plt.ylabel('True') # Ensure the plot is shown correctly with multiple plots # in a single Notebook cell. plt.show()
def cm_plot(original_label, predict_label, kunm, pic=None): prec_score = precision_score(original_label, predict_label, average=None) recall = recall_score(original_label, predict_label, average=None) f1 = f1_score(original_label, predict_label, average=None) cm = confusion_matrix(original_label, predict_label) cm_new = np.empty(shape=[5, 5]) for x in range(5): t = cm.sum(axis=1)[x] for y in range(5): cm_new[x][y] = round(cm[x][y] / t * 100, 2) plt.figure() plt.matshow(cm_new, cmap=plt.cm.Blues) plt.colorbar() x_numbers = [] y_numbers = [] cm_percent = [] for x in range(5): y_numbers.append(cm.sum(axis=1)[x]) x_numbers.append(cm.sum(axis=0)[x]) for y in range(5): percent = format(cm_new[x, y] * 100 / cm_new.sum(axis=1)[x], ".2f") cm_percent.append(str(percent)) plt.annotate(format(cm_new[x, y] * 100 / cm_new.sum(axis=1)[x], ".2f"), xy=(y, x), horizontalalignment='center', verticalalignment='center', fontsize=10) plt.ylabel('True label') plt.xlabel('Predicted label') plt.title('confusion matrix') y_stage = [ "W\n(" + str(y_numbers[0]) + ")", "N1\n(" + str(y_numbers[1]) + ")", "N2\n(" + str(y_numbers[2]) + ")", "N3\n(" + str(y_numbers[3]) + ")", "REM\n(" + str(y_numbers[4]) + ")" ] x_stage = [ "W\n(" + str(x_numbers[0]) + ")", "N1\n(" + str(x_numbers[1]) + ")", "N2\n(" + str(x_numbers[2]) + ")", "N3\n(" + str(x_numbers[3]) + ")", "REM\n(" + str(x_numbers[4]) + ")" ] y = [0, 1, 2, 3, 4] plt.xticks(y, x_stage) plt.yticks(y, y_stage) #sns.heatmap(cm_percent, fmt='g', cmap="Blues", annot=True, cbar=False, xticklabels=x_stage, yticklabels=y_stage) # 画热力图,annot=True 代表 在图上显示 对应的值, fmt 属性 代表输出值的格式,cbar=False, 不显示 热力棒 plt.savefig(savepath + "matrix" + str(kunm) + ".svg") #plt.show() plt.show() plt.close() # plt.savefig("/home/data_new/zhangyongqing/flx/pythoncode/"+str(knum)+"matrix.jpg") return kappa(cm), classification_report(original_label, predict_label)
def test_sender(): """ @utilite: Cette fonction a pour seul but de verifier si la fonction sender fonctionne correctement """ Map_hauteur = np.zeros(shape=(5, 5)) Map_hauteur[1, 0] = 2 a = sender(Map_hauteur, 1, 0, 100000, 2, 0.5*10**-1, 0.4*10**-1, 20, 1) plt.matshow(a) plt.show()
def cm_plot(original_label, predict_label, kunm, savepath): prec_score = precision_score(original_label, predict_label, average=None) recall = recall_score(original_label, predict_label, average=None) f1 = f1_score(original_label, predict_label, average=None) cm = confusion_matrix(original_label, predict_label) cm_new = np.empty(shape=[5, 5]) for x in range(5): t = cm.sum(axis=1)[x] for y in range(5): cm_new[x][y] = round(cm[x][y] / t * 100, 2) plt.figure() plt.matshow(cm_new, cmap=plt.cm.Blues) plt.colorbar() x_numbers = [] y_numbers = [] cm_percent = [] for x in range(5): y_numbers.append(cm.sum(axis=1)[x]) x_numbers.append(cm.sum(axis=0)[x]) for y in range(5): percent = format(cm_new[x, y] * 100 / cm_new.sum(axis=1)[x], ".2f") cm_percent.append(str(percent)) plt.annotate(format(cm_new[x, y] * 100 / cm_new.sum(axis=1)[x], ".2f"), xy=(y, x), horizontalalignment='center', verticalalignment='center', fontsize=10) plt.ylabel('True label') plt.xlabel('Predicted label') plt.title('confusion matrix') y_stage = [ "W\n(" + str(y_numbers[0]) + ")", "N1\n(" + str(y_numbers[1]) + ")", "N2\n(" + str(y_numbers[2]) + ")", "N3\n(" + str(y_numbers[3]) + ")", "REM\n(" + str(y_numbers[4]) + ")" ] x_stage = [ "W\n(" + str(x_numbers[0]) + ")", "N1\n(" + str(x_numbers[1]) + ")", "N2\n(" + str(x_numbers[2]) + ")", "N3\n(" + str(x_numbers[3]) + ")", "REM\n(" + str(x_numbers[4]) + ")" ] y = [0, 1, 2, 3, 4] plt.xticks(y, x_stage) plt.yticks(y, y_stage) plt.savefig(savepath + "matrix" + str(kunm) + ".svg") plt.show() plt.close() return kappa(cm), classification_report(original_label, predict_label)
def plot_confusion_matrix(y_true, y_predicted, M0, M1, success_rate, main_title, show): cm = confusion_matrix(y_true, y_predicted) #print(cm) if (show == 'yes'): plt.matshow(cm, cmap = 'Reds') plt.colorbar() plt.ylabel('Actual') plt.xlabel('Predicted') plt.suptitle(main_title) true_positives = 0 plt.show() for i in range (0, 52): for j in range (0, 52): if(i == j): true_positives += cm[i,j] print('\n') print('M0 =',M0,' , M1 =',M1,'Number of true positives = ', true_positives, '\n') plt.close()
def plot_confusion_matrix(y_true, y_predicted, Mpca, Mlda, success_rate, show): cm = confusion_matrix(y_true, y_predicted) #print(cm) if (show == 'yes'): plt.matshow(cm, cmap = 'Reds') title = 'PCA-LDA Confusion Matrix with Mpca='+str(Mpca)+' and Mlda='+str(Mlda)+' [Success Rate='+str(success_rate)+'%]' plt.colorbar() plt.ylabel('Actual') plt.xlabel('Predicted') plt.suptitle(title) true_positives = 0 plt.show() for i in range (0, 52): for j in range (0, 52): if(i == j): true_positives += cm[i,j] print('\n') print('Mpca =',Mpca,' , Mlda =',Mlda,'Number of true positives = ', true_positives, '\n') plt.close()
def cm_plot(original_label, predict_label, kunm, pic=None): cm = confusion_matrix(original_label, predict_label) print('kappa:', kappa(cm)) plt.figure() plt.matshow(cm, cmap=plt.cm.Blues) plt.colorbar() for x in range(len(cm)): for y in range(len(cm)): plt.annotate(cm[x, y], xy=(x, y), horizontalalignment='center', verticalalignment='center') plt.ylabel('True label') plt.xlabel('Predicted label') plt.title('confusion matrix') if pic is not None: plt.savefig(str(pic) + '.svg') # plt.xticks(('Wake','N1','N2','N3','REM')) # plt.yticks(('Wake','N1','N2','N3','REM')) plt.savefig(savepath + "cnnmatrix" + str(kunm) + ".svg") plt.show()
def plotCochleagram(self, batch_no, clip): with h5py.File(self.stimulus_files[batch_no],'r') as f_in: plt.matshow(f_in[self.keyword][clip,0:self.coch_size_flattened].reshape(self.coch_size), origin='lower') plt.set_cmap('Blues') plt.title(self.stimulus_name) return f_in[self.keyword][clip,0:self.coch_size_flattened].reshape(self.coch_size)
img = image.load_img(img_path, target_size=(224, 224)) x = image.img_to_array(img) x = np.expand_dims(x, axis=0) x = preprocess_input(x) preds = model.predict(x) print(decode_predictions(preds, top=3)[0]) print(np.argmax(preds[0])) african_elephant_output = model.output[:, 386] last_conv_layer = model.get_layer('block5_conv3') with tf.GradientTape() as gtape: grads = gtape.gradient(african_elephant_output, last_conv_layer.output) pooled_grads = K.mean(grads, axis=(0, 1, 2)) iterate = K.function([model.input], [pooled_grads, last_conv_layer.output[0]]) pooled_grads_value, conv_layer_output_value = iterate([x]) for i in range(512): conv_layer_output_value[:, :, i] *= pooled_grads_value[i] heatmap = np.mean(conv_layer_output_value, axis=-1) heatmap = np.maximum(heatmap, 0) heatmap /= np.max(heatmap) plt.matshow(heatmap) plt.show()
#shoiwng the image on a plot plt.figure() plt.imshow(final_image_array[0]) plt.show() #using model to submit one input and multiple ooutputs from keras.models import Model layer_outputs=[layer.output for layer in model.layers[:8]] activation_model=Model(inputs=model.input,outputs=layer_outputs) activations=activation_model.predict(img_tensor) print(activations[0].shape) first_layer_activation=activations[0] #visualizing the fourth channel int the first layer(note that there are 32 channels) plt.matshow(first_layer_activation[0, :, :, 4], cmap='viridis') #visualizing the seventh layernof the first channel plt.matshow(first_layer_activation[0, :, :, 7], cmap='viridis') #unfinished #Visualizing Convnets filters #deeriving the loss of the filter from keras.applications import VGG16 from keras import backend as K model=VGG16(weights='imagenet',include_top=False) filter_index=0 layer_name='block3_conv1' layer_output=model.get_layer(layer_name).output
#64 features for each digit we have digits.target np.bincount(digits.target) #Cool Method digits.data[0].shape digits.data[0].reshape(8, 8).shape # Commented out IPython magic to ensure Python compatibility. import matplotlib.pyplot as plt # %matplotlib inline # %matplotlib notebook <- interactive interface #plt.matshow: Display an array as a matrix in a new figure window. plt.matshow(digits.data[0].reshape(8, 8), cmap=plt.cm.Greys) #Color Map Gray plt.matshow(digits.data[0].reshape(8, 8), cmap=plt.cm.Blues) #Color Map Blue plt.imshow(digits.data[0].reshape(8, 8), cmap=plt.cm.Greys) plt.imshow(digits.data[1].reshape(8, 8), cmap=plt.cm.OrRd) digits.target[0] ''' plt.subplots() is a function that returns a tuple containing a figure and axes object(s). Thus when using fig, ax = plt.subplots() you unpack this tuple into the variables fig and ax. Having fig is useful if you want to change figure-level attributes or save the figure as an image file later (e.g. with fig.savefig('yourfilename.png')). ''' fig, axes = plt.subplots(4, 4) for x, y, ax in zip(digits.data, digits.target, axes.ravel()):
predictions = [labels2[k] for k in predicted_class_indices] print(predicted_class_indices) print(labels) print(predictions) # In[86]: cf = confusion_matrix(predicted_class_indices, label) cf # In[89]: exp_series = pd.Series(label) pred_series = pd.Series(predicted_class_indices) pd.crosstab(exp_series, pred_series, rownames=['Actual'], colnames=['Predicted'], margins=True) # In[92]: plt.matshow(cf) plt.title('Confusion Matrix Plot') plt.colorbar() plt.xlabel('Predicted') plt.ylabel('Actual') plt.show() # In[ ]: