x0 = model_output.x f2, ax2 = plt.subplots(1, 2) best = 0 for k in results.keys(): if results[k] is not None: if results[k]['cc'].mean() > best: best = results[k]['cc'].mean() best_alpha = k ax2[0].plot(k, results[k]['cc'].mean(), 'ko') ax2[1].plot(k, results[k]['mse'].mean(), 'ro') if best_alpha == 0: best_alpha = alpha1[1] ax2[0].set_ylabel('prediction correlation') ax2[0].set_xlabel('pupil constraint') ax2[0].set_aspect(cplt.get_square_asp(ax2[0])) ax2[1].set_ylabel('NMSE') ax2[1].set_xlabel('pupil constraint') ax2[1].set_aspect(cplt.get_square_asp(ax2[1])) f, ax = plt.subplots(4, 1) ax[0].set_title('Simulated data') ax[0].imshow(resp, aspect='auto') ax[2].set_title('True state-variables') ax[2].plot(lv1.T, color='purple', label='LV') ax[2].plot(pupil.T, color='green', label='pupil') ax[2].legend()
tar_resp = r['pop_psth'].extract_epoch(tar)[0, :, idx].squeeze() tar_sem = r['pop_psth_sem'].extract_epoch(tar)[0, :, idx].squeeze() ax[i].plot(time, tar_resp, label=tar) ax[i].fill_between(time, tar_resp - tar_sem, tar_resp + tar_sem, alpha=0.5, lw=0) except: ax[i].plot(time, np.nan * np.ones(len(time))) ax[i].fill_between(time, np.nan * np.ones(len(time)), np.nan * np.ones(len(time)), alpha=0.5, lw=0) # figure out onset / offset bin ax[i].axvline(onset, color='lightgrey', linestyle='--') ax[i].axvline(offset, color='lightgrey', linestyle='--') # add plot labels set lims ax[i].legend(fontsize=8) ax[i].set_xlabel('Time (s)', fontsize=8) if i == 0: ax[i].set_ylabel('Norm. Response') ax[i].set_ylim((0, ylim)) ax[i].set_aspect(cplt.get_square_asp(ax[i])) fig.tight_layout() plt.show()
results['dc'] = d1 results['baseline'] = b # create plots to evaluate outcome of fits plt.figure(figsize=(6, 4)) ax2 = plt.subplot2grid((1, 2), (0, 0), rowspan=1, colspan=1) ax3 = plt.subplot2grid((1, 2), (0, 1), rowspan=1, colspan=1) # plot the model weights ax2.scatter(results['gain'], results['dc'], s=25, color='k', edgecolor='white') ax2.set_xlabel('gain weights', fontsize=8) ax2.set_ylabel('DC weights', fontsize=8) ax2.axhline(0, color='k', linestyle='--') ax2.axvline(0, color='k', linestyle='--') ax2.set_aspect(cplt.get_square_asp(ax2)) # plot the model performance null_cc = glm.corrcoef_by_neuron(rec['resp']._data, rec['psth']._data) ax3.scatter(null_cc, results['cc'], s=25, color='k', edgecolor='white') ax3.plot([0, 1], [0, 1], 'k--') ax3.axhline(0, color='k', linestyle='--') ax3.axvline(0, color='k', linestyle='--') ax3.set_xlabel('Null model (r0)', fontsize=8) ax3.set_ylabel('Full model', fontsize=8) ax3.set_aspect(cplt.get_square_asp(ax3)) plt.tight_layout() # perform regression of different factors and determine how noise correlations change r_true = rec['resp']._data
cmap='Purples', c=rec['pupil']._data.squeeze()) ax[3].set_title('second-order pupil lv') f.tight_layout() # Look at model weights and prediction coef for each neuron # and noise correlations before / after f, ax = plt.subplots(2, 2) ax[0, 0].scatter(w1, w2, s=25, color='k', edgecolor='white') ax[0, 0].axhline(0, linestyle='--', color='k') ax[0, 0].axvline(0, linestyle='--', color='k') ax[0, 0].set_xlabel('first-order weights') ax[0, 0].set_ylabel('second-order weights') ax[0, 0].set_aspect(cplt.get_square_asp(ax[0, 0])) null_cc = glm.corrcoef_by_neuron(rec['resp']._data, rec['psth']._data) pred_cc = glm.corrcoef_by_neuron(rec['resp']._data, pred) first_cc = glm.corrcoef_by_neuron(rec['resp']._data, pred2) ax[0, 1].scatter(null_cc, pred_cc, s=25, color='k', edgecolor='white') ax[0, 1].plot([0, 1], [0, 1], 'k--') ax[0, 1].set_xlabel('null model (r0)') ax[0, 1].set_ylabel('full model') ax[0, 1].set_aspect(cplt.get_square_asp(ax[0, 1])) # full model vs. first order only model ax[1, 1].scatter(null_cc, first_cc, s=25, color='k', edgecolor='white') ax[1, 1].plot([0, 1], [0, 1], 'k--') ax[1, 1].set_xlabel('null model model')