def PTC(file_name, ax1, ax2, cutoff=1): hdul, vbb, wideint, wwideint = load_hdul(file_name) diffvr_els = 0 for C in CHANNEL_5: mean_el, diffvr_el, corr, gain = load_data(hdul, C, cutoff=cutoff) mean_el, diffvr_el = mean_el/1000, diffvr_el/1000 ax1.plot(mean_el, diffvr_el, 'o', label=f'Channel {str(C)}') ax2.plot(mean_el, diffvr_el - mean_el, 'o', label=f'Channel {str(C)}') diffvr_els += diffvr_el/4 lin = np.linspace(0, 70 * cutoff, 5000) ax1.plot(lin, lin, '--') ax1.legend(loc='upper left') ax1.set_xlabel('Mean (kel)') ax1.set_ylabel('Variance ($kel^2$)') ax1.annotate('Possion Noise: $\sigma_{S}^{2}=\mu$', xy=(60, 60), xytext=(40, 69), arrowprops=dict(width=2, headlength=4, facecolor='black'), fontsize=14) ax1.set_xlim(-2, 72 * cutoff) lin = np.linspace(0, 70 * cutoff, 5000) ax2.plot(lin, np.zeros(5000), '--') ax2.legend(loc='lower left') ax2.set_xlabel('Mean (kel)') ax2.set_ylabel('Variance ($kel^2$)') ax2.set_xlim(-2, 72 * cutoff) return diffvr_els
def raw_PTC(file_name, ax): hdul, vbb, wideint, wwideint = load_hdul(file_name) for C in CHANNEL_5: channel_data = hdul[1].data[hdul[1].data['chans'] == C] mean, diffvr = channel_data['mn1db'], channel_data['diffvr'] mean, diffvr = dip_remove(mean, diffvr) mean, diffvr = mean/1000, diffvr/1000 ax.plot(mean, diffvr, 'o', label=f'Channel {str(C)}') ax.legend(loc='upper left') ax.set_xlabel('Mean (kADU)') ax.set_ylabel('Variance ($kADU^2$)')
def PTC_fit(file_name, deg, ax3): hdul, vbb, wideint, wwideint = load_hdul(file_name) data_len = len(load_data(hdul, 5)[0]) mean_el_ave, diffvr_el_ave = np.zeros(np.shape(data_len)), np.zeros(np.shape(data_len)) for idx, C in enumerate(CHANNEL_5): mean_el, diffvr_el, corr, gain = load_data(hdul, C) mean_el, diffvr_el = mean_el/1000, diffvr_el/1000 mean_el_ave, diffvr_el_ave = mean_el_ave + mean_el/4, diffvr_el_ave + diffvr_el/4 diffvr_el_1, diffvr_el_n = poly_fit(mean_el_ave, diffvr_el_ave, deg) ax3.plot(mean_el_ave, diffvr_el_ave - diffvr_el_1, 'x', label=f'deg=1') ax3.plot(mean_el_ave, diffvr_el_ave - diffvr_el_n, 'x', label=f'deg={deg}') ax3.set_xlabel('Mean (kel)') ax3.set_ylabel('Residual ($kel^2$)') ax3.legend()
def correlation_map(file_name, C, limit, ax): hdul, vbb, wideint, wwideint = load_hdul(file_name) mean_el, diffvr_el, corr, gain = load_data(hdul, C) X, Y = np.meshgrid(np.arange(limit), np.arange(limit)) Z = np.zeros([limit, limit]) ij_pairs = [(i, j) for i in np.arange(limit) for j in np.arange(limit) if i + j != 0] for ij_pair in ij_pairs: i, j = ij_pair[0], ij_pair[1] reg = LinearRegression(fit_intercept=True).fit(np.linspace(0, 1, np.shape(corr)[0]).reshape(-1, 1), corr[:, i, j]) Z[i, j] = reg.coef_[0] ax.view_init(ax.elev+15, ax.azim + 102) ax.set_xlabel('Serial direction (plx)') ax.set_ylabel('Parallel direction (plx)') ax.set_zlabel('Coefficient (frac)') make_bars(ax, X, Y, Z, width=0.23)
def covariance(file_name, fit, ax): hdul, vbb, wideint, wwideint = load_hdul(file_name) data_len = len(load_data(hdul, 5)[0]) mean_el_ave, cov_el_ave = np.zeros(np.shape(data_len)), np.zeros( np.shape(data_len)) ijs = [(i, j) for i in range(0, 10) for j in range(0, 10) if i + j != 0] for C in CHANNEL_5: mean_el, cov_el, corr, gain = load_data(hdul, C) mean_el, cov_el = mean_el / 1000, cov_el / 1000 for ij in ijs: i, j = ij[0], ij[1] if fit is True: reg = LinearRegression(fit_intercept=False).fit( mean_el.reshape(-1, 1), corr[:, i, j]) corr_ij = reg.predict(mean_el.reshape(-1, 1)) # ransac = RANSACRegressor(residual_threshold=0.01, stop_probability=0.99) # ransac.fit(mean_el.reshape(-1, 1), corr[:, i, j]) # corr_ij = ransac.predict(mean_el.reshape(-1, 1)) else: corr_ij = corr[:, i, j] cov_el = cov_el + corr_ij * mean_el """ if C == 10: cov_el = cov_el + corr_ij * mean_el * 1.3 else: cov_el = cov_el + corr_ij * mean_el * 5.2 """ ax.plot(mean_el, cov_el, 'o', label=f'Channel {str(C)}') mean_el_ave, cov_el_ave = mean_el_ave + mean_el / 4, cov_el_ave + cov_el / 4 lin = np.linspace(0, 72, 5000) ax.plot(lin, lin, '--') ax.set_xlabel('Mean (kel)') ax.set_ylabel('Variance + Covariance ($kel^2$)') ax.legend(loc='upper left') ax.set_xlim(-2, 74) return mean_el_ave, cov_el_ave
def photometry(file_name, C, inlier_mask): hdul, vbb, wideint, wwideint = load_hdul(file_name) outlier_mask = np.logical_not(inlier_mask) mean_el, diffvr_el, corr, gain = load_data(hdul, C) channel_data = hdul[1].data[hdul[1].data['chans'] == C] foo = np.shape(mean_el)[0] mean1_el, mean2_el = channel_data['mn1db'][:foo] * gain, channel_data['mn2db'][:foo] * gain mean1_ph, mean2_ph = channel_data['photons1'][:foo], channel_data['photons2'][:foo] mean_el_diff = (mean1_el - mean2_el) / max(mean1_el - mean2_el) mean_ph_diff = (mean1_ph - mean2_ph) / max(mean1_ph - mean2_ph) print(np.shape(mean_el_diff[inlier_mask])) print(np.shape(mean_el_diff[outlier_mask])) plt.figure(figsize=(8, 6)) plt.plot(mean_ph_diff[inlier_mask], mean_el_diff[inlier_mask], '.', label='inlier') plt.plot(mean_ph_diff[outlier_mask], mean_el_diff[outlier_mask], '.', label='outlier') plt.legend(loc='upper left') plt.xlabel('Normalized Photon Difference (frac)') plt.ylabel('Normalized Electron Difference (frac)')
def correlation(file_name, C, i, j, ax): hdul, vbb, wideint, wwideint = load_hdul(file_name) mean_el, diffvr_el, corr, gain = load_data(hdul, C) mean_el = mean_el / 1000 corr_ij = corr[:, i, j] # if i == 0 and j == 1: # corr_ij -= 0.0008 reg = LinearRegression(fit_intercept=True).fit((mean_el).reshape(-1, 1), corr_ij) prediction = reg.predict((mean_el).reshape(-1, 1)) # ransac = linear_model.RANSACRegressor(residual_threshold=0.01, stop_probability=0.99) # ransac.fit(mean_el.reshape(-1, 1), corr_ij) # inlier_mask = ransac.inlier_mask_ # prediction = ransac.predict(mean_el.reshape(-1, 1)) # outlier_mask = np.logical_not(inlier_mask) # ax.plot(mean_el[outlier_mask], corr_ij[outlier_mask], 'x', label='Outlier') p = ax.scatter(mean_el[5:], corr_ij[5:], edgecolors='k', label=f'Channel average, $R_{{{str(i)+str(j)}}}$') ax.plot(mean_el, prediction, '-', color=p.get_facecolor()[0], linewidth=6) ax.set_xlabel('Mean (kel)') ax.set_ylabel('Correlation (frac)') ax.legend(loc='upper left') ax.set_ylim(-0.002, 0.011)