def plot_confusion_matrix(cm, classes_types, ofname, normalize=False, title='Confusion matrix', cmap=plt.cm.RdPu, show=True): # plt.cm.Reds): """ This function prints and plots the confusion matrix. Normalization can be applied by setting `normalize=True`. """ if normalize: cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis] print("Normalized confusion matrix") else: print('Confusion matrix, without normalization') cm = cm.astype('int') print(cm) plt.figure(figsize=(9, 8)) plt.imshow(cm, interpolation='nearest', cmap=cmap) plt.title(title, fontsize=16) cb = plt.colorbar(fraction=0.046, pad=0.04) cb.ax.tick_params(labelsize=16) tick_marks = np.arange(len(classes_types)) plt.xticks(tick_marks, classes_types, rotation=45) plt.yticks(tick_marks, classes_types) plt.tick_params(axis='x', labelsize=16) plt.tick_params(axis='y', labelsize=16) thresh = cm.max() / 2. for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])): if normalize: plt.text(j, i, "{:0.2f}".format(cm[i, j]), horizontalalignment="center", color="white" if (cm[i, j] < 0.01) or (cm[i, j] >= 0.75) else "black", fontsize=18) else: plt.text(j, i, "{:0}".format(cm[i, j]), horizontalalignment="center", color="white" if (cm[i, j] < 3) or (cm[i, j] >= 100) else "black", fontsize=18) plt.ylabel('True label', fontsize=16) plt.xlabel('Predicted label', fontsize=16) plt.tight_layout() ensure_dir(ofname) plt.savefig(ofname, bbox_inches='tight', pad_inches=0.1) if show: plt.show()
def plot_confusion_matrix(cm, classes, normalize=False, title='Confusion matrix', cmap=cm.Blues): """ This function prints and plots the confusion matrix. Normalization can be applied by setting `normalize=True`. """ imshow(cm, interpolation='nearest', cmap=cmap) suptitle(title, fontsize=14, horizontalalignment="right") colorbar() tick_marks = np.arange(len(classes)) xticks(tick_marks, classes, rotation=45, horizontalalignment="right") yticks(tick_marks, classes) if normalize: cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis] print("Normalized confusion matrix") else: print('Confusion matrix, without normalization') print(cm) thresh = cm.max() / 2. for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])): text(j, i, "{:0.2f}".format(cm[i, j]), horizontalalignment="center", size=8, color="white" if cm[i, j] > thresh else "black") tight_layout() ylabel('True label')
def plot_confusion_matrix(self, true_values: list, predicted_values: list, labels: List[str] = None, normalize: bool = False, title: str = None, title_padding: float = None, save_path: str = None, filename: str = None, ax=None, show_plot: bool = True, hide_axis: bool = False): if ax is None: ax = self.create_plot() cm = confusion_matrix(true_values, predicted_values, labels) vmin = cm.min() vmax = cm.max() if normalize: cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis] vmin = 0 vmax = 1 sns_heatmap = sns.heatmap(cm, ax=ax, vmin=vmin, vmax=vmax, cmap='RdYlGn_r', square=True) ax.set_xlabel('Predicted values') # , labelpad=20) ax.set_ylabel('True values') if labels is not None: ax.set_ylim(0, len(labels) + 0.5) ax.set_ylim(0, len(labels) + 0.5) sns_heatmap.set_yticklabels(labels, rotation=0) sns_heatmap.set_xticklabels(labels, rotation=45, horizontalalignment='right') self._add_properties(ax, title, title_padding, save_path, filename, hide_axis) if show_plot and (save_path is None or filename is None): plt.show() if show_plot or (save_path is not None and filename is not None): plt.clf() return ax
def plot_confusion(yhat, data, model_name): ''' Args: yhat: numpy array of dim [n_ev, n_classes] with the net predictions on the test data data: an OrderedDict containing all X, y, w ndarrays for all particles (both train and test), e.g.: data = { "X_jet_train" : X_jet_train, "X_jet_test" : X_jet_test, "X_photon_train" : X_photon_train, "X_photon_test" : X_photon_test, "y_train" : y_train, "y_test" : y_test, "w_train" : w_train, "w_test" : w_test } Returns: Saves confusion.pdf confusion matrix ''' y_test = data['y_test'] le = data['LabelEncoder'] plt.clf() def _plot_confusion_matrix(cm, title='Confusion matrix', cmap=plt.cm.Blues): plt.imshow(cm, interpolation='nearest', cmap=cmap) plt.title(title) plt.colorbar() tick_marks = np.arange(len(np.unique(y_test))) plt.xticks(tick_marks, [le.inverse_transform(k) for k in range(len(np.unique(y_test)))]) plt.yticks(tick_marks, [le.inverse_transform(k) for k in range(len(np.unique(y_test)))]) plt.tight_layout() plt.ylabel('True label') plt.xlabel('Predicted label') cm = confusion_matrix(y_test, np.argmax(yhat, axis=1)) # Normalize the confusion matrix by row (i.e by the number of samples # in each class) cm_normalized = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis] _plot_confusion_matrix(cm_normalized, title='Normalized confusion matrix') plt.savefig('confusion' + model_name + '.pdf')
# True values of testing dataset R_true = test_R["true"].tolist() # Predicted values of testing dataset R_predicted = test_R.apply(prediction_region, axis=1) R_predicted = [row[0] for row in R_predicted] # Set labels labels = Regions # Calculation of Confusion Matrix cm = confusion_matrix(R_true, R_predicted) # Normalize Confusion Matrix cm = cm / cm.astype(np.float).sum(axis=1) # Plot Confusion Matrix fig = plt.figure(figsize=(numRegions, numRegions)) ax = fig.add_subplot(111) cax = ax.matshow(cm) plt.imshow(cm, interpolation='nearest', cmap=plt.cm.Blues) thresh = cm.max() / 2. for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])): plt.text(j, i, float("{0:.2f}".format(round(cm[i, j], 2))), horizontalalignment="center", color="white" if cm[i, j] > thresh else "black") i = range(len(labels)) ax.set(xticks=i, xticklabels=labels, yticks=i, yticklabels=labels)
def sigmoid(x): def plot_binned_stat(allIouVsCls, val_to_plot, bins=10, pltAll = 0, linestyle = ':', plttype = 'stat',applysigx = True , applysigy = False, plt_unitline=False): color=cm.rainbow(np.linspace(0,1, len(allIouVsCls.keys()))) legendK = [] if pltAll: legendK = allIouVsCls.keys() for i, cls in enumerate(allIouVsCls): xval = FN.sigmoid(torch.FloatTensor(allIouVsCls[cls][val_to_plot[0]])).numpy() if applysigx else allIouVsCls[cls][val_to_plot[0]] yval = FN.sigmoid(torch.FloatTensor(allIouVsCls[cls][val_to_plot[1]])).numpy() if applysigy else allIouVsCls[cls][val_to_plot[1]] if plttype == 'stat': aClsVsRec = binned_statistic(xval, yval,statistic='mean', bins=bins) aClsVsRec_std = binned_statistic(xval, yval,statistic=np.std, bins=bins) #plt.plot((aClsVsRec[1][:-1]+aClsVsRec[1][1:])/2, aClsVsRec[0],color=color[i],marker='o',linestyle=linestyle); plt.errorbar((aClsVsRec[1][:-1]+aClsVsRec[1][1:])/2, aClsVsRec[0], yerr = aClsVsRec_std[0], color=color[i],marker='o',linestyle=linestyle); else: plt.scatter(xval, yval,alpha=0.5,color=color[i],s=20) if pltAll < 2: legendK = legendK + ['all'] allX = np.concatenate([allIouVsCls[cls][val_to_plot[0]] for cls in allIouVsCls]) allY = np.concatenate([allIouVsCls[cls][val_to_plot[1]] for cls in allIouVsCls]) xval = FN.sigmoid(torch.FloatTensor(allX)).numpy() if applysigx else allX yval = FN.sigmoid(torch.FloatTensor(allY)).numpy() if applysigy else allY if plttype == 'stat': aClsVsRec = binned_statistic(xval, yval,statistic='mean', bins=bins) aClsVsRec_std = binned_statistic(xval, yval,statistic=np.std, bins=bins) #plt.plot((aClsVsRec[1][:-1]+aClsVsRec[1][1:])/2, aClsVsRec[0],color=color[-1],marker='o',linestyle='-', linewidth=2); plt.errorbar((aClsVsRec[1][:-1]+aClsVsRec[1][1:])/2, aClsVsRec[0], yerr = aClsVsRec_std[0], color=color[-1],marker='o',linestyle='-', linewidth=2); else: plt.scatter(xval,yval,alpha=0.4,color=color[-1],s=20) plt.xlabel(val_to_plot[0]) plt.ylabel(val_to_plot[1]) plt.legend(legendK) if plt_unitline: plt.plot(xval,xval, 'k-'); plt.show() fname = 'removeEvalResults/fullres/train_checkpoint_stargan_coco_fulleditor_LowResMask_pascal_RandDiscrWdecay_wgan_30pcUnion_noGT_reg_biasM_randRot_fixedD_randDisc_smM_fixInp_imnet_IN_maxPool_V2_180_1227' tr_res = json.load(open(fname,'r')) selected_attrs = ['person', 'bird', 'cat', 'cow', 'dog', 'horse', 'sheep', 'airplane', 'bicycle', 'boat', 'bus', 'car', 'motorcycle', 'train', 'bottle', 'couch', "dining table", "potted plant", 'chair','tv'] attToIdx = {att:i for i,att in enumerate(selected_attrs)} res = tr_res allIouVsCls = {} for key,img in res['images'].items(): for cls in img['perclass']: if cls not in allIouVsCls: allIouVsCls[cls] = {'iou':[], 'recall':[], 'precision':[], 'ocls':[],'acls':[],'gtsize':[], 'predsize':[], 'false_damage':[], 'n_obj':[], 'diff':[]} allIouVsCls[cls]['iou'].append(img['perclass'][cls]['iou']) allIouVsCls[cls]['recall'].append(img['perclass'][cls]['rec']) allIouVsCls[cls]['precision'].append(img['perclass'][cls]['prec']) allIouVsCls[cls]['ocls'].append(img['real_scores'][attToIdx[cls]]) allIouVsCls[cls]['acls'].append(img['perclass'][cls]['remove_scores'][attToIdx[cls]]) allIouVsCls[cls]['rSucc'].append(float(img['perclass'][cls]['remove_scores'][attToIdx[cls]]<0.)) allIouVsCls[cls]['diff'].append(img['real_scores'][attToIdx[cls]] - img['perclass'][cls]['remove_scores'][attToIdx[cls]]) allIouVsCls[cls]['gtsize'].append(img['perclass'][cls]['gtSize']) allIouVsCls[cls]['predsize'].append(img['perclass'][cls]['predSize']) #allIouVsCls[cls]['false_damage'].append(np.max([img['real_scores'][oclsId] - img['perclass'][cls]['remove_scores'][oclsId] for oclsId in img['real_label'] if selected_attrs[oclsId]!=cls])/(len(img['real_label'])-1+1e-6) ) allIouVsCls[cls]['false_damage'].append(np.max([img['real_scores'][oclsId] - img['perclass'][cls]['remove_scores'][oclsId] for oclsId in img['real_label'] if selected_attrs[oclsId]!=cls]) if len(img['real_label'])>1 else np.nan) allIouVsCls[cls]['n_obj'].append(len(img['real_label'])) val_to_plot = ['ocls','recall'] 'person' ,'bird' , 'cat' , 'cow' , 'dog' , 'horse' , 'sheep' , 'airplane' , 'bicycle' ,'boat' , 'bus' , 'car' , 'motorcycle' , 'train' , 'bottle' , 'couch' , 'dining table' , 'potted plant', 'chair' , 'tv' cat2id= {} data = {} ; data['images'] = {} for ann in train_ann: annSp = ann.split() imgid = int(annSp[0].split('.')[0]) cls = annSp[1].lower() if imgid not in data['images']: finfo = subprocess.check_output(['file', 'flickr_logos_27_dataset_images/'+annSp[0]]) data['images'][imgid] = {'bboxAnn': [], 'id': imgid, 'filename':annSp[0], 'split':'train','imgSize': map(int, finfo.split(',')[-2].split('x'))} if cls not in cat2id: cat2id[cls] = len(cat2id) bbox = map(int,annSp[-4:]) img_w,img_h = data['images'][imgid]['imgSize'] bbox = [float(bbox[0])/float(img_w), float(bbox[1])/float(img_h), float(bbox[2]-bbox[0])/float(img_w), float(bbox[3] - bbox[1])/float(img_h)] data['images'][imgid]['bboxAnn'].append({'bbox': bbox, 'cid': cat2id[cls]}) data['categories'] = [{'id':cat2id[cat], 'name':cat} for cat in cat2id] for ann in val_ann: annSp = ann.split() imgid = int(annSp[0].split('.')[0]) cls = annSp[1].lower() if imgid not in data['images']: finfo = subprocess.check_output(['file', 'flickr_logos_27_dataset_images/'+annSp[0]]) data['images'][imgid] = {'bboxAnn': [], 'id': imgid, 'filename':annSp[0], 'split':'train','imgSize': map(int, finfo.split(',')[-2].split('x'))} if cls not in cat2id: cat2id[cls] = len(cat2id) bbox = [0., 0., 1., 1.] data['images'][imgid]['bboxAnn'].append({'bbox': bbox, 'cid': cat2id[cls]}) for ann in val_ann: annSp = ann.split() imgid = int(annSp[0].split('.')[0]) cls = annSp[1].lower() data['images'][imid2index[imgid]]['split'] = 'val' cat2id= {} data = {} ; data['images'] = {} for ann in tqdm(train_ann): annSp = ann.split() if annSp[4]: imgid = int(annSp[2].split('.')[0]) cls = annSp[1].lower() if imgid not in data['images']: finfo = subprocess.check_output(['file', 'images/'+annSp[2]]) data['images'][imgid] = {'bboxAnn': [], 'id': imgid, 'filename':annSp[2], 'split':'train','imgSize': map(int, finfo.split(',')[-2].split('x'))} if cls not in cat2id: cat2id[cls] = len(cat2id) bbox = map(int,annSp[-4:]) img_w,img_h = data['images'][imgid]['imgSize'] bbox = [float(bbox[0])/float(img_w), float(bbox[1])/float(img_h), float(bbox[2]-bbox[0])/float(img_w), float(bbox[3] - bbox[1])/float(img_h)] data['images'][imgid]['bboxAnn'].append({'bbox': bbox, 'cid': cat2id[cls]}) data['categories'] = [{'id':cat2id[cat], 'name':cat} for cat in cat2id] for fname in tqdm(notPresentImgs): finfo = subprocess.check_output(['file', 'images/'+fname]) imgid = int(fname.split('.')[0]) data['images'].append({'bboxAnn': [], 'id': imgid, 'filename':fname, 'split':'train','imgSize': map(int, finfo.split(',')[-2].split('x'))}) import matplotlib.pyplot as plt import numpy as np import numpy as np import seaborn from PIL import Image from mpl_toolkits.axes_grid1.inset_locator import zoomed_inset_axes from mpl_toolkits.axes_grid1.inset_locator import mark_inset fig, ax = plt.subplots(); ax.imshow(img, origin='upper', extent=[0,128, 128,0]); axins = zoomed_inset_axes(ax, zoom=3, loc=7) extent = [50, 60, 70, 60] axins.imshow(img, interpolation="nearest", origin='upper', extent=[0,128, 0,128]) axins.set_xlim(*extent[:2]) axins.set_ylim(*extent[2:]) axins.yaxis.get_major_locator().set_params(nbins=7) axins.xaxis.get_major_locator().set_params(nbins=7) plt.xticks(visible=False) plt.yticks(visible=False) mark_inset(ax, axins, loc1=1, loc2=3, fc="none", ec="0.5") ax.set_axis_off() plt.draw(); plt.show() fig, ax = plt.subplots(frameon=False); ax.imshow(img, origin='lower'); axins = zoomed_inset_axes(ax, zoom=3, loc=7) extent = [55, 65, 44, 54] axins.imshow(img, interpolation="nearest", origin='lower') axins.set_xlim(*extent[:2]) axins.set_ylim(*extent[2:]) axins.yaxis.get_major_locator().set_params(nbins=7) axins.xaxis.get_major_locator().set_params(nbins=7) #axins.set_axis_off() plt.xticks(visible=False) plt.yticks(visible=False) mark_inset(ax, axins, loc1=2, loc2=3, fc="none", ec="r") ax.set_axis_off() plt.draw(); plt.show() import numpy as np import json from scipy.special import expit import matplotlib.pyplot as plt def plot_confusion_matrix(cm, classes, normalize=False, title='Confusion matrix', cmap=plt.cm.Blues): """ This function prints and plots the confusion matrix. Normalization can be applied by setting `normalize=True`. """ if normalize: cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis] print("Normalized confusion matrix") else: print('Confusion matrix, without normalization') print(cm) plt.imshow(cm, interpolation='nearest', cmap=cmap) plt.title(title) plt.colorbar() tick_marks = np.arange(len(classes)) plt.xticks(tick_marks, classes, rotation=90) plt.yticks(tick_marks, classes) fmt = '.1f' thresh = cm.max() / 2. #for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])): # plt.text(j, i, format(cm[i, j], fmt), # horizontalalignment="center", # color="white" if cm[i, j] > thresh else "black") plt.tight_layout() plt.ylabel('Removed Object') plt.xlabel('Change in Classifier Scores after removal')
#================================================================================================ #============================= Co ouccerence computations ===================================== #================================================================================================ def plot_cooccur_matrix(cm, classes, normalize=False, title='Confusion matrix', cmap=plt.cm.Blues, vmin=None, vmax=None): """ This function prints and plots the confusion matrix. Normalization can be applied by setting `normalize=True`. """ if normalize: cm = cm.astype('float') / cm.diagonal()[:, np.newaxis] print("Normalized co-occurance matrix (w.r.t primary class counts)") else: print('Co-occurance matrix, without normalization') print(cm) if vmin is None: vmin = cm.min() if vmax is None: vmin = cm.max() plt.imshow(cm, interpolation='nearest', cmap=cmap, vmin=vmin, vmax=vmax) plt.title(title) plt.colorbar() tick_marks = np.arange(len(classes))