def badpixels(self): fr = findramp() freads = findread() #check slope on 1st ramp fname_ramp = fr.findramp(self.ls_flat[0]) slope = fits.open(fname_ramp)['slope'].data mean, med, std = sc(slope[1800:2200, 1800:2200], sigma=5) n = int(40000. / (mean * 5.5733)) print("40,000ADU ~ n=%d" % n) cube_bpm = [] for uid in self.ls_flat: ls = freads.findreads(uid) cube_bpm.append(self._make_cds(ls[0], ls[n])) im = np.median(cube_bpm, axis=0) filt = median_filter(im, size=(20, 20)) filt /= np.median(filt) im /= filt mean, _, std = sc(im, sigma=5) bpm = np.zeros((4096, 4096)) bpm[im < (mean - 5 * std)] = 1 bpm[:4, :] = 0 bpm[-4:, :] = 0 bpm[:, :4] = 0 bpm[:, -4:] = 0 hdu = fits.PrimaryHDU(data=bpm) hdu.header['DATE'] = (datetime.now().strftime(self.time_fmt), "Creation date") hdu.header['UNIQUEID'] = (datetime.now().strftime("%y%m%d%H%M%S"), "Unique identification number") hdu.header.add_comment("Built using:") for uid in self.ls_flat: hdu.header.add_comment(uid) hdu.writeto(join(self.pathtmp, "badpixels.fits"), overwrite=True)
def make_report(self, nonlinf='nonlin.fits', cpath='/nirps_raw/characterization', sbias='superbias_20210416.fits'): nl = fits.getdata(join(cpath, nonlinf)) c3 = nl[0] c2 = nl[1] bpm = nl[2] if sbias == '': bias = 0 else: bias = sc(fits.getdata(join(cpath, sbias)), sigma=5)[0] statsC3 = sc(c3[bpm == 0], sigma=5) statsC2 = sc(c2[bpm == 0], sigma=5) statsbpm = len(bpm[bpm == 1].ravel()) print("C3: %.2E +/- %.2E" % (statsC3[0], statsC3[2])) print("C2: %.2E +/- %.2E" % (statsC2[0], statsC2[2])) print("%f percent of pixels are BAD." % ((float(statsbpm) / 4096**2) * 100)) from matplotlib import pyplot as plt import seaborn as sns sns.set_theme() f, ax = plt.subplots(1, 2) distc3 = c3[(c3 > np.percentile(c3, 0.15)) & (c3 < np.percentile(c3, 99.5))] distc2 = c2[(c2 > np.percentile(c2, 0.3)) & (c2 < np.percentile(c2, 99.9))] ax[0].set_yscale('log') ax[1].set_yscale('log') ax[0].hist(distc3, bins=200) ax[0].set(xlabel='Non-linearity value', ylabel='Count', title='C$_3$') ax[1].hist(distc2, bins=200) ax[1].set(xlabel='Non-linearity value', ylabel='Count', title='C$_2$') f.suptitle('Non-linearity Coefficient distribution') plt.tight_layout() f.savefig(join(cpath, 'nl-hist.png')) print("%s saved successfully." % join(cpath, 'nl-hist.png')) #check the 3% and 5 % NL flux = np.arange(1, 65000, 1) perc = (((flux + statsC3[0] * flux**3 + statsC2[0] * flux**2) / flux) - 1) * 100 def closest(arr, val): return np.argmin(abs(arr - val)) perc3 = flux[closest(perc, 3)] + bias perc5 = flux[closest(perc, 5)] + bias print("3 percent non-linear @ %f, 5 percent non-linear @ %f" % (perc3, perc5)) plt.show()
def master_map(self): from scanf import scanf from os import listdir from os.path import basename ls = [ join(self.pathtmp, f) for f in listdir(self.pathtmp) if ".gain.fits" in f ] uid_pairs = [scanf("%sx%s.gain.fits", basename(f)) for f in ls] print("Will use uid pair:") for uid in uid_pairs: print("%s X %s" % (uid[0], uid[1])) cube = [fits.getdata(f)[0] for f in ls] gg = np.median(cube, axis=0) hdu = fits.PrimaryHDU(data=gg) from datetime import datetime time = datetime.now().strftime("%Y-%m-%dT%H:%M:%S") uid = datetime.now().strftime("%y%m%d%H%M%S") hdu.header['DATE'] = (time, "Creation date") hdu.header['UNIQUEID'] = ( uid, "Unique identification number for this file.") hdu.header.add_comment("Built using:") for uid in uid_pairs: hdu.header.add_comment("%s,%s" % (uid[0], uid[1])) hdu.writeto(join(self.path, "master_gain.fits")) print("master_gain.fits created") fig, ax = plt.subplots() im = ax.imshow(gg, cmap='YlGn', vmin=.7, vmax=1.9) plt.colorbar(im) ax.xaxis.set_visible(False) ax.yaxis.set_visible(False) g, _, g_err = sc(gg) ax.set(title='Gain %2.2f+/-%2.2f adu/e$^-$' % (g, g_err)) plt.tight_layout() fig.savefig(join(self.path, 'gain.png')) sns.set_theme() f1, ax1 = plt.subplots() gg_flat = gg.ravel() gg_flat = gg_flat[(gg_flat > .9) & (gg_flat < 1.9)] g, _, g_err = sc(gg_flat) ax1.hist(gg_flat, bins=50) ax1.set(title='Median, %.2f+/-%.2f' % (g, g_err), xlabel='Gain', ylabel='Occurance') plt.tight_layout() f1.savefig(join(self.path, 'gain.dist.png')) plt.show()
def get_chi2(self, x, y, beta): ''' returned residuals of linear fit and reduced chi squared Parameters ---------- x : TYPE DESCRIPTION. y : TYPE DESCRIPTION. y_ideal : TYPE DESCRIPTION. y_ideal_unc : TYPE DESCRIPTION. beta : TYPE DESCRIPTION. Returns ------- residuals_std : TYPE DESCRIPTION. reduced_chi2 : TYPE DESCRIPTION. ''' #nonlinearity = (y_ideal/y) #nonlinearity_unc = np.sqrt(nonlinearity**2*(self.unc(y)**2/y**2+y_ideal_unc**2/y_ideal**2)) residuals = (y - flin(x, *beta)) / y residuals_std = sc(residuals)[2] chi2 = np.sum(((flin(x, *beta) - y) / self.unc(y))**2) reduced_chi2 = chi2 / (len(x) - len(beta)) return residuals_std, reduced_chi2
def darkcurrant(uids): dc = [] for uid in uids: fr = findramp() rfile = fr.findramp(uid) slope = fits.open(rfile)['slope'].data dc.append(sc(slope[gpm], sigma=5)) return dc
def check_cds_noise(self, lsuid): stats = [] for uid in lsuid: if self.toponly: stats.append( sc( np.load(join(self.pathtmp, "%s.readout.to.npy" % uid))[0, :])) else: stats.append( sc( np.load(join(self.pathtmp, "%s.readout.npy" % uid))[0, :])) mn, _, _ = zip(*stats) print(mn) _m, _, _s = sc(mn) print("CDS readout noise is %.2f +/- %.2f" % (_m * self.gain, _s * self.gain))
def _single(self, uid): if self.toponly: if not isfile(join(self.pathtmp, '%s.readout.to.npy' % uid)): print("%s not found" % uid) return [], [] arr = np.load(join(self.pathtmp, '%s.readout.to.npy' % uid)) else: if not isfile(join(self.pathtmp, '%s.readout.npy' % uid)): print("%s not found" % uid) return [], [] arr = np.load(join(self.pathtmp, '%s.readout.npy' % uid)) stats = [sc(r) for r in arr] S, _, err = zip(*stats) return np.asarray(S) * self.gain, np.asarray(err) * self.gain
def makesvar_center(self, x, y): ''' x and y is at the center of a box of size self.size Parameters ---------- x : TYPE DESCRIPTION. y : TYPE DESCRIPTION. Returns ------- TYPE DESCRIPTION. ''' gpm = self._sub(self.gpm, x, y) s = self._sub3d(self.S, x, y) diff = self._sub3d(self.diff, x, y) S = [sc(im[gpm], maxiters=4)[0] for im in s] var = [sc(im[gpm], maxiters=4)[2]**2 / 2.0 for im in diff] return np.asarray(S), np.asarray(var)
def repport(dc, show=True, gain=1.27): dc = dc * gain stats = sc(dc, sigma=5) print("Dark currant: %f +/- %f" % (stats[1], stats[2])) dc = dc[(dc > np.percentile(dc, 0.13)) & (dc < np.percentile(dc, 99.87))] count, bins_count = np.histogram(dc, bins=200) pdf = count / sum(count) cdf = np.cumsum(pdf) f, ax = plt.subplots(2, 1) ax[0].hist(dc, bins=200) ax[1].plot(bins_count[1:], cdf) ax[1].set(xlabel='Dark current (ADU/second)', title='Cummulative Sum') ax[0].set(xlabel='Dark currant (ADU/second)', ylabel='Count', title='Dark current distribution') plt.tight_layout() f.savefig(join(path, 'darkcurrant_dist.png')) if show: plt.show()
def hotpixels(self): fr = findread() cube = [] hp = np.zeros((4096, 4096), dtype=int) for uid in self.ls_dark: ls = fr.findreads(uid) cube.append(self._make_cds(ls[0], ls[1])) dark = np.median(cube, axis=0) mean, _, std = sc(dark, sigma=5) hp[dark > (mean + 5 * std)] = 1 hdu = fits.PrimaryHDU(data=hp) hdu.header['DATE'] = (datetime.now().strftime(self.time_fmt), "Creation date") hdu.header['UNIQUEID'] = (datetime.now().strftime("%y%m%d%H%M%S"), "Unique identification number") hdu.header.add_comment("Built using:") for uid in self.ls_dark: hdu.header.add_comment(uid) hdu.writeto(join(self.pathtmp, "hotpixels.fits"), overwrite=True) print("hotpixels.fits write successfully.") return hp
def darkcurrantUdeM(ls): dc = [] for rfile in ls: slope = fits.getdata(rfile)[0] dc.append(sc(slope[gpm], sigma=5)) return dc
def fit_graph_rapport(self, f_meas, xpos=2000, ypos=2000): ''' Show an example of fit for one pixel using curve_fit and polyfit Parameters ---------- f_meas : TYPE DESCRIPTION. Returns ------- TYPE DESCRIPTION. ''' a2 = 0 # 2nd order coefficient, curvefit a3 = 0 #curvefit _a2 = 0 # polyfit _a3 = 0 #polyfit x = 5.5733 * np.arange(1, len(f_meas) + 1, 1) time = x[f_meas < self.saturation] f = f_meas[f_meas < self.saturation] fig2, ax2 = plot() ax2[0].plot(time, f, 'd', markersize=3, label='$f_{original}$') fig, ax = plot() #def ffit(t,a,b): # return a*t+b def fffit(t, a3, a2, a1, a0): return a3 * t**3 + a2 * t**2 + a1 * t + a0 #y2 -> use curvefit #_y2 -> use polyfit for ite in range(self.ite): y2 = f + a2 * f**2 + a3 * f**3 _y2 = f + _a2 * f**2 + _a3 * f**3 #fit to find the S_ideal _beta, _lin_cov = np.polyfit(time, _y2, 1, cov=True) beta, lin_cov = curve_fit(flin, time, y2, sigma=self.unc(y2), p0=[100, 0], maxfev=10000) signal_fit_cov = lin_cov[1, 0] #_signal_fit_cov = _lin_cov[1,0] #_err = np.sqrt(np.diag(_lin_cov)) err = np.sqrt(np.diag(lin_cov)) ideal_fit_unc = np.sqrt(err[0]**2 + (err[1] * time)**2 + 2 * signal_fit_cov**2) #_ideal_fit_unc = np.sqrt(_err[0]**2+(_err[1]*time)**2+ 2*_signal_fit_cov**2) err_2fit = ideal_fit_unc + self.unc(y2) #_err_2fit = _ideal_fit_unc+self.unc(_y2) _correction, _c_cov = np.polyfit(_y2, _y2 - flin(time, *_beta), 3, cov=True) correction, c_cov = curve_fit(fffit, y2, y2 - flin(time, *beta), p0=[-7e-13, 2e-08, 1e-04, -2], sigma=err_2fit, maxfev=10000) a2 -= correction[1] a3 -= correction[0] _a2 -= _correction[1] _a3 -= _correction[0] ax[1].plot(time, y2 - np.polyval(np.polyfit(time, y2, 1), time), '.', label='iteration {0}'.format(ite)) ax[0].plot(time, y2, '.', label='iteration {0}'.format(ite), alpha=0.5) beta, lin_cov = curve_fit(flin, time, y2, sigma=self.unc(y2), p0=[100, 0], maxfev=10000) chi2_res, red_chi2 = self.get_chi2(time, y2, beta) print(chi2_res, red_chi2) res = 1 - (flin(time, *beta) / y2) _res = 1 - (flin(time, *_beta) / _y2) #graph comparatif curve_fit vs polyfit props = dict(boxstyle='round', facecolor='lightgreen', alpha=0.5) beta_, lin_cov_ = curve_fit(flin, time, y2, sigma=self.unc(y2), p0=[100, 0], maxfev=10000) _beta_, _lin_cov_ = curve_fit(flin, time, _y2, sigma=self.unc(_y2), p0=[100, 0], maxfev=10000) _err_ = np.sqrt(np.diag(_lin_cov_)) err_ = np.sqrt(np.diag(lin_cov_)) ax2[0].text(.8, 0.3, "Slope: %.2f+/-%.2f\nb: %.2f+/-%.2f" % (beta_[0], err_[0], beta_[1], err_[1]), transform=ax2[0].transAxes, fontsize=8, verticalalignment='top', bbox=props) ax2[0].plot(time, y2, 'd', markersize=3, label='$f_{corr}$') ax2[0].plot(time, flin(time, *beta), label='$f_{ideal}$') ax2[1].plot(time, res, 'd', markersize=3, label='Residuals') ax2[1].set_ylim([-.01, .01]) ax2[1].text(.8, 0.2, "res. : %f " % (sc(res)[2]), transform=ax2[1].transAxes, fontsize=8, verticalalignment='top', bbox=props) ax2[0].set(title='Pixel X:%d Y:%d' % (xpos, ypos)) ax2[0].legend() ax2[1].legend() ax2[1].set(xlabel='Time (second)', ylabel='Residuals') ax2[0].set(xlabel='Time (second)', ylabel='flux (DN)') print('res: %f' % (sc(res)[2])) #self.get_chi2(time,) ax[1].legend() ax[0].legend() ax[1].set(xlabel='time', ylabel='flux - lin model') ax[0].set(xlabel='time', ylabel='flux') plt.show() return a3, a2
def _amp_ro(self, amp, mask): return sc(amp[mask], sigma=5)[2]