def plot_scatters(subjects, axes): ftemp = "correlation_analysis/{}_{}_ifs.pkz" for subj, ax in zip(subjects, axes): sticks = moss.load_pkl(ftemp.format(subj, "sticks")).corrmat rest = moss.load_pkl(ftemp.format(subj, "rest")).corrmat triu = np.triu_indices_from(rest, 1) ax.scatter(sticks[triu], rest[triu], s=3, linewidth=.2, color=".6", edgecolor="w", rasterized=True) ax.plot([-.2, .8], [-.2, .8], lw=1, dashes=(5, 2), color=".3") plt.setp(axes, xlim=(-.25, .8), ylim=(-.25, .8), xticks=np.linspace(-.2, .8, 6), yticks=np.linspace(-.2, .8, 6), aspect="equal") plt.setp(axes[1:], yticklabels=[]) for ax in axes: sns.despine(ax=ax, trim=True) plt.setp(ax.get_xticklabels(), size=6) plt.setp(ax.get_yticklabels(), size=6)
def plot_kdes(subjects, axes): ftemp = "correlation_analysis/{}_{}_ifs.pkz" for subj, ax in zip(subjects, axes): sticks = moss.load_pkl(ftemp.format(subj, "sticks")).corrmat rest = moss.load_pkl(ftemp.format(subj, "rest")).corrmat triu = np.triu_indices_from(rest, 1) sns.kdeplot(sticks[triu], color=".15", label="residual", ax=ax) sns.kdeplot(rest[triu], color=".45", dashes=[4, 1], label="resting", ax=ax) plt.setp(axes, xlim=(-.25, .8), ylim=(0, 17), xticks=np.linspace(-.2, .8, 6), yticks=[]) for ax in axes: sns.despine(ax=ax, left=True, trim=True) plt.setp(ax.get_xticklabels(), size=6) plt.setp(ax.get_yticklabels(), size=6) axes[0].legend(bbox_to_anchor=(1.2, .8)) for ax in axes[1:]: ax.legend_ = None
def plot_time_corrs(subjects, axes): x = np.arange(1, 5) palette = [".2", ".5"] for subj, ax in zip(subjects, axes): res_fname = "correlation_analysis/{}_rest_ifs.pkz".format(subj) res = moss.load_pkl(res_fname) for line, color in zip(res.corr_times.T, palette): ax.plot(x, line, "o-", color=color, ms=3, clip_on=False) sig = res.corr_times_pctiles > 95 ax.plot(x[sig], np.ones(sig.sum()) * .0025, marker=(6, 2, 0), ls="", mew=.35, mec=".2", ms=3) ax.set(xticks=x, xlim=(.6, 4.4), ylim=(0, .07)) sns.despine(ax=ax, trim=True) plt.setp(axes[1:], yticklabels=[]) axes[0].set_ylabel("Correlation (r)")
def plot_mds(subjects, experiments, axes): for subj, exp, ax in zip(subjects, experiments, axes): res_fname = "correlation_analysis/{}_{}_ifs.pkz".format(subj, exp) res = moss.load_pkl(res_fname) sorter = np.argsort(np.abs(res.prefs)) x_, y_ = res.mds_coords.T.dot(res.prefs) t = np.arctan2(y_, x_) rot = [[np.cos(t), np.sin(t)], [-np.sin(t), np.cos(t)]] x, y = np.dot(rot, res.mds_coords[sorter].T) cmap = get_colormap(exp) ax.scatter(x, y, c=res.prefs[sorter], cmap=cmap, vmin=-1.75, vmax=1.75, s=8, linewidth=0) ax.set(xlim=(-.9, .9), ylim=(-.9, .9), aspect="equal") ax.set_axis_off()
def get_subject_order(exp): subjects = lyman.determine_subjects([exp + "_subjects"]) accs = pd.Series(index=subjects, dtype=np.float) for subj in subjects: fname = "decoding_analysis/{}_{}_ifs.pkz".format(subj, exp) accs.ix[subj] = moss.load_pkl(fname).acc return list(accs.sort(inplace=False, ascending=False).index)
def plot_corrmats(subjects, axes, exp): for subj, ax in zip(subjects, axes): fname = "correlation_analysis/{}_{}_ifs.pkz".format(subj, exp) corrmat = moss.load_pkl(fname).corrmat ax.imshow(corrmat - np.eye(len(corrmat)), cmap="RdBu_r", vmin=-.15, vmax=.15, rasterized=True) ax.set(xticks=[], yticks=[]) sns.despine(ax=ax, left=True, bottom=True)
def plot_prediction_curves(subjects, axes, exp): res_ftemp = "spatial_analysis/{}_{}_ifs.pkz" for subj, ax in zip(subjects, axes): res = moss.load_pkl(res_ftemp.format(subj, exp)) x = res.steps norm = res.null.mean() real = res.real / norm pint = res.pint / norm ax.plot(x, real, "o-", color=".15", ms=2.5, clip_on=False) ax.fill_between(x, *pint, color=".4", alpha=.3) cross_x, cross_y = res.intersect cross_y /= norm ax.plot([cross_x, cross_x], [0, cross_y], lw=.8, dashes=[3, 1], color=".5", zorder=0) ax.set(xlim=(0, 40), ylim=(0, 2), xticks=np.linspace(0, 40, 5), yticks=[0, 1, 2], yticklabels=[0, 1, 2]) sns.despine(ax=ax) ylabel = "Normalized error" plt.setp(axes[1:7], yticklabels=[]) axes[0].set(ylabel=ylabel) if exp == "dots": plt.setp(axes[8:], yticklabels=[]) plt.setp(axes[:7], xticklabels=[]) axes[7].set_ylabel(ylabel)
def plot_brains(subjects, axes): for subj, subj_axes in zip(subjects, axes): exp = dict(pc="dots", ti="sticks")[subj[:2]] data_fname = "roi_cache/{}_{}_ifs.npz".format(subj, exp) with np.load(data_fname) as dobj: vox_ijk = dobj["vox_ijk"] res_fname = "decoding_analysis/{}_{}_ifs.pkz".format(subj, exp) res = moss.load_pkl(res_fname) prefs = res.prefs surf_vals = roi_to_surf(exp, subj, prefs, vox_ijk) lut = get_colormap(exp, False) for hemi, ax in zip(["lh", "rh"], subj_axes): b = Brain(subj, hemi, "inflated", background="white", cortex=("binary", -4, 8, False), size=(1000, 600)) b.add_data(surf_vals.ix[hemi].fillna(-11).values, colormap=lut, colorbar=False, thresh=-10, min=-1.75, max=1.75) mlab.view(*get_ifs_view(subj, hemi)) img = crop(b.screenshot()) ax.imshow(img, rasterized=True) ax.set(xticks=[], yticks=[]) b.close()
def plot_cluster_error(ax): res_ftemp = "spatial_analysis/{}_{}_ifs.pkz" for exp in ["dots", "sticks"]: subjects = get_subject_order(exp) color = get_colormap(exp, as_cmap=False)[20] errs = [] for subj in subjects: res = moss.load_pkl(res_ftemp.format(subj, exp)) x = res.steps norm = res.null.mean() errs.append(res.real / norm) errs = np.vstack(errs) mean = errs.mean(axis=0) ax.plot(x, mean, color=color, lw=2) sem = stats.sem(errs, axis=0) ax.fill_between(x, mean - sem, mean + sem, alpha=.2, color=color) ax.axhline(y=1, lw=1, dashes=[5, 2], color=".5", zorder=0, xmin=.02, xmax=.98) ax.set(xlim=(0, 42), ylim=(.55, 1.45), yticks=[.6, .8, 1, 1.2, 1.4], xticks=[0, 10, 20, 30, 40], xlabel="Neighborhood radius (mm)", ylabel="Normalized error") sns.despine(ax=ax, trim=True)
def plot_hists(subjects, axes, label_last=1, ymax=350): bins = np.linspace(-2, 2, 20) for subj, ax in zip(subjects, axes): exp = dict(pc="dots", ti="sticks")[subj[:2]] res_fname = "decoding_analysis/{}_{}_ifs.pkz".format(subj, exp) res = moss.load_pkl(res_fname) prefs = res.prefs pctiles = res.pref_pctiles cmap = get_colormap(exp) plot_prefs = [ prefs[pctiles < 10], prefs[pctiles > 90], prefs[(pctiles >= 10) & (pctiles <= 90)] ] ax.hist(plot_prefs, histtype="barstacked", rwidth=1, color=[cmap(.01), cmap(.99), ".9"], bins=bins) ax.set(xlim=(-2, 2), xticks=[-2, -1, 0, 1, 2], xticklabels=[], yticks=[], ylim=(0, ymax)) for ax in axes[-label_last:]: ax.set_xlabel("Context\npreference", labelpad=2, fontsize=7) ax.set(xticklabels=[-2, -1, 0, 1, 2]) for ax in axes: sns.despine(ax=ax, left=True)
def plot_distance_corrs(subjects, axes, exp): for subj, ax in zip(subjects, axes): res_fname = "correlation_analysis/{}_{}_ifs.pkz".format(subj, exp) res = moss.load_pkl(res_fname) x = res.distance_thresh for dim, color, marker in zip(["3D", "2D"], [".5", ".2"], ["x", "+"]): same, diff = res.corr_distance[dim].T ax.plot(x, same - diff, "o-", color=color, ms=3, clip_on=False) sig = res.corr_distance_pctiles[dim] > 95 stary = -.005 if exp == "dots" else -.0025 ax.plot(x[sig], np.ones(sig.sum()) * stary, marker=marker, ls="", mew=.35, mec=".2", ms=3) ylim = (-.01, .08) if exp == "dots" else (-.005, .04) yticks = np.array([0, .01, .02, .03, .04]) yticks = yticks * 2 if exp == "dots" else yticks ax.set(xlim=(-2, 42), ylim=ylim, yticks=yticks) sns.despine(ax=ax, trim=True) ylabel = "Subnetwork strength\n($r_{\mathrm{same}} - r_{\mathrm{diff}}$)" plt.setp(axes[1:7], yticklabels=[]) axes[0].set_ylabel(ylabel) if exp == "dots": plt.setp(axes[8:], yticklabels=[]) plt.setp(axes[:7], xticklabels=[]) axes[7].set_ylabel(ylabel)
rois = ["ifs", "mfc"] decoding_data = {} for exp, subj_list in subjects.iteritems(): # Set up the dataframe for this experiment idx = pd.MultiIndex.from_product([subj_list, rois], names=["subj", "roi"]) exp_df = pd.DataFrame(index=idx, columns=["acc", "chance", "pctile"], dtype=np.float) # Load the data for each subject/roi for subj, roi in idx: fname = "decoding_analysis/{}_{}_{}.pkz".format(subj, exp, roi) res = moss.load_pkl(fname) exp_df.ix[subj, roi] = [res.acc, res.chance, res.acc_pctile] decoding_data[exp] = exp_df # Combine across experiments and save decoding_df = pd.concat(decoding_data, names=["experiment"]).reset_index() decoding_df.to_csv("data/decoding_results.csv", index=False) # ----- Combine correlation results subjects["rest"] = subjects["sticks"] corr_dfs = {} for exp in ["dots", "sticks", "rest"]: subj_list = subjects[exp] exp_data = []
if __name__ == "__main__": try: _, subj, exp, roi = sys.argv except ValueError: sys.exit("Usages: spatial_analysis.py <subj> <exp> <roi>") # Ensure that the output exists if not op.exists("spatial_analysis"): os.mkdir("spatial_analysis") # Load the context preference data fname = "decoding_analysis/{}_{}_{}.pkz".format(subj, exp, roi) prefs = moss.load_pkl(fname).prefs # Load the distance matrix fname = "roi_cache/{}_{}_{}.npz".format(subj, exp, roi) with np.load(fname) as dobj: dmat = dobj["dmat2d"] # Define the steps and radius steps = np.arange(2, 42, 2) radius = 2 # Compute the real curve real = prediction_curve(dmat, prefs, steps, radius) # Compute the null curves seed = sum(map(ord, subj + "_spatial"))
sys.exit("Usages: decoding_analysis.py <subj> <exp> <roi>") # Ensure that the output exists if not op.exists("correlation_analysis"): os.mkdir("correlation_analysis") # Load the data data_fname = "roi_cache/{}_{}_{}.npz".format(subj, exp, roi) dobj = np.load(data_fname) data = dobj["ts_data"] dmats = {"2D": dobj["dmat2d"], "3D": dobj["dmat3d"]} # Load the outputs of the decoding analysis task_exp = "sticks" if exp == "rest" else exp res_fname = "decoding_analysis/{}_{}_{}.pkz".format(subj, task_exp, roi) res = moss.load_pkl(res_fname) # Remove voxels that were excluded from the decoding analysis data = data[:, res.good_voxels] # Pull out the tail voxels tail_mask = res.tails.astype(np.bool) data = data[:, tail_mask] tails = res.tails[tail_mask] prefs = res.prefs[tail_mask] # Regress the task-related effects out of experiment data if exp != "rest": data = regress_task(exp, subj, data) # Compute the timeseries correlation