def plot_heat_temperature(self): """Plot the measurement temperature as a heat map in (H, Hr) or (Hc, Hb) space. """ self.send_latest_data.emit() plotting.heat_map(ax=self.p_map.axes, forc=self.data_queue.get(), data_str='temperature', mask=self.f_2d_mask.currentText(), coordinates=self.coordinates(), cmap=self.f_2d_cmap.text()) self.tabWidget.setCurrentIndex(1) return
def plot_heat_rho_uncertainty(self): """Plot the FORC distribution uncertainty as a heat map in (H, Hr) or (Hc, Hb) space. """ self.send_latest_data.emit() plotting.heat_map(ax=self.p_map.axes, forc=self.data_queue.get(), data_str='rho_uncertainty', mask=self.f_2d_mask.currentText(), coordinates=self.coordinates(), cmap=self.f_2d_cmap.text()) self.tabWidget.setCurrentIndex(1) return
def post_processing(clf, basename): """Given a PairwiseClassifier object and a basename, perform post processing""" # Average classification accuracy brain map mc.make_mask_map(clf, basename + "avg_class.nii.gz", mc.get_mask_averages(clf, )) print "Made average accuracy map" print "Making consistency brain maps.." mc.make_mask_map(clf, basename + "imps_shannons_0.nii.gz", mc.importance_stats(clf, method='shannons')) mc.make_mask_map(clf, basename + "imps_var_0.nii.gz", mc.importance_stats(clf, method='var')) # mc.make_mask_map(basename + "imps_cor_0.nii.gz", mc.importance_stats(method='cor')) mc.make_mask_map(clf, basename + "acc_shannons_0.nii.gz", mc.accuracy_stats(clf, method='shannons')) mc.make_mask_map(clf, basename + "clf_var_0.nii.gz", mc.accuracy_stats(clf, method='var')) print "Making sparsity brain maps.." mc.make_mask_map(clf, basename + "imps_shannons_1.nii.gz", mc.importance_stats(clf, method='shannons', axis=1)) mc.make_mask_map(clf, basename + "imps_var_1.nii.gz", mc.importance_stats(clf, method='var', axis=1)) # mc.make_mask_map(clf, basename + "imps_cor_1.nii.gz", mc.importance_stats(clf, method='cor', axis=1)) mc.make_mask_map(clf, basename + "acc_shannons_1.nii.gz", mc.accuracy_stats(clf, method='shannons')) mc.make_mask_map(clf, basename + "clf_var_1.nii.gz", mc.accuracy_stats(clf, method='var')) # print "Making consistency heat maps..." heat_map(mc.importance_stats(clf, method='shannons', axis=0, average=False).T, range(1, clf.mask_num), clf.feature_names, file_name=basename + "shannons_hm_0.png") heat_map(mc.importance_stats(clf, method='var', axis=0, average=False).T, range(1, clf.mask_num), clf.feature_names, file_name=basename + "var_hm_0.png") print "Making sparsity heat maps..." heat_map(mc.importance_stats(clf, method='shannons', axis=1, average=False).T, range(1, clf.mask_num), range(0, clf.mask_num), file_name=basename + "shannons_hm_1.png") heat_map(mc.importance_stats(clf, method='var', axis=1, average=False).T, range(1, clf.mask_num), range(0, clf.mask_num), file_name=basename + "var_hm_1.png") print "Making feature importance heatmaps..." mc.region_heatmap(clf, basename) mc.region_heatmap(clf, basename, zscore_regions=True) mc.region_heatmap(clf, basename, zscore_features=True) mc.region_heatmap(clf, basename, zscore_regions=True, zscore_features=True)
def post_processing(clf, basename): """Given a PairwiseClassifier object and a basename, perform post processing""" # Average classification accuracy brain map mc.make_mask_map(clf, basename + "avg_class.nii.gz", mc.get_mask_averages(clf, )) print "Made average accuracy map" print "Making consistency brain maps.." mc.make_mask_map(clf, basename + "imps_shannons_0.nii.gz", mc.importance_stats(clf, method='shannons')) mc.make_mask_map(clf, basename + "imps_var_0.nii.gz", mc.importance_stats(clf, method='var')) # mc.make_mask_map(basename + "imps_cor_0.nii.gz", mc.importance_stats(method='cor')) mc.make_mask_map(clf, basename + "acc_shannons_0.nii.gz", mc.accuracy_stats(clf, method='shannons')) mc.make_mask_map(clf, basename + "clf_var_0.nii.gz", mc.accuracy_stats(clf, method='var')) print "Making sparsity brain maps.." mc.make_mask_map(clf, basename + "imps_shannons_1.nii.gz", mc.importance_stats(clf, method='shannons', axis=1)) mc.make_mask_map(clf, basename + "imps_var_1.nii.gz", mc.importance_stats(clf, method='var', axis=1)) # mc.make_mask_map(clf, basename + "imps_cor_1.nii.gz", mc.importance_stats(clf, method='cor', axis=1)) mc.make_mask_map(clf, basename + "acc_shannons_1.nii.gz", mc.accuracy_stats(clf, method='shannons')) mc.make_mask_map(clf, basename + "clf_var_1.nii.gz", mc.accuracy_stats(clf, method='var')) # print "Making consistency heat maps..." heat_map(mc.importance_stats(clf, method='shannons', axis=0, average=False).T, range(1, clf.mask_num), clf.feature_names, file_name=basename + "shannons_hm_0.png") heat_map(mc.importance_stats(clf, method='var', axis=0, average=False).T, range(1, clf.mask_num), clf.feature_names, file_name=basename + "var_hm_0.png") print "Making sparsity heat maps..." heat_map(mc.importance_stats(clf, method='shannons', axis=1, average=False).T, range(1, clf.mask_num), range(0, clf.mask_num), file_name=basename + "shannons_hm_1.png") heat_map(mc.importance_stats(clf, method='var', axis=1, average=False).T, range(1, clf.mask_num), range(0, clf.mask_num), file_name=basename+"var_hm_1.png") print "Making feature importance heatmaps..." mc.region_heatmap(clf, basename) mc.region_heatmap(clf, basename, zscore_regions=True) mc.region_heatmap(clf, basename, zscore_features=True) mc.region_heatmap(clf, basename, zscore_regions=True, zscore_features=True)
def region_heatmap(clf, basename=None, zscore_regions=False, zscore_features=False, thresh=None, subset=None, compare=False, each_region=True): """" Makes a heatmap of the importances of the classification. Makes an overall average heatmap as well as a heatmap for each individual region. Optionally, you can specify the heatmap to be z-scored. You can also specify a threshold. Args: basename: string, base directory and file name zscore_regions: boolean, should heatmap be z-scored based within regions zscore_regions: boolean, should heatmap be z-scored based within features thresh: value to threshold heatmap. Only values above this value are kept subset: what regions should be plotted? default is all each_region: make a heat map for every single subregion Outputs: Outputs a .png file for the overall heatmap and for each region. If z-scored on thresholded, will denote in file name using z0 (regions), z1 (features), and/or t followed by threshold. """ from plotting import heat_map if subset is None: subset = range(0, clf.mask_num) if compare is True: overall_fi = clf.feature_importances[subset][:, subset] else: overall_fi = clf.feature_importances[:, subset] if np.array(subset).max() > clf.mask_num: print "Warning: you entered an incorrect mask index!" fi = overall_fi.mean(axis=0).T z0 = "" z1 = "" t = "" if zscore_regions: fi = np.apply_along_axis(stats.zscore, 0, fi) z0 = "z0_" if zscore_features: fi = np.apply_along_axis(stats.zscore, 1, fi) z1 = "z1_" if thresh is not None: fi = np.ma.masked_array(fi) fi.mask = fi < thresh t = "zt" + str(thresh) + "_" if basename is None: file_name = None else: file_name = basename + "imps_hm_" + z0 + z1 + t + "overall.png" heat_map(fi, np.array(subset) + 1, clf.feature_names, file_name) if each_region: for i in subset: fi = overall_fi[subset.index(i)].T if zscore_regions: fi = np.ma.masked_invalid(stats.zscore(fi, axis=0)) if zscore_features: fi = stats.zscore(fi, axis=1) if thresh is not None: fi.mask = fi < thresh if basename is None: file_name = None else: file_name = basename + "imps_hm_" + \ z0 + z1 + t + str(i) + ".png" heat_map(fi, np.array(subset) + 1, clf.feature_names, file_name)