def ptc(folder): os.chdir(folder) img_list = glob.glob('*.fits*') bias = cam.get_master_bias(-40) bias = np.asarray(bias, dtype=np.int32) times = [] for k in img_list: hdu = fits.open(k) times.append(hdu[0].header['DITSER']) times = np.unique(times) #sort times if order is needed sets = [[] for _ in times] for j in img_list: hdu = fits.open(j) dit = hdu[0].header['DITSER'] ind = np.argwhere(times == dit) sets[ind[0, 0]].append(hdu[0].data) amp, noise = [], [] for i in sets: first = i[1].astype(np.int32) second = i[0].astype(np.int32) diff_img = first - second single = first - bias roi_diff = diff_img[200:900, 300:1000] roi_single = single[200:900, 300:1000] noise.append(np.std(roi_diff)) amp.append(np.median(roi_single)) noise = np.array(noise) / m.sqrt(2) amp = np.array(amp) amp = np.log10(amp) noise = np.log10(noise) plt.scatter(amp, noise, c='b', label='Data') x1 = amp[(amp < 4) & (amp > 3.4)] y1 = noise[(amp < 4) & (amp > 3.4)] x = np.linspace(-1, 5, 500) x2 = amp[(amp < 4) & (amp > 3.8)] y2 = noise[(amp < 4) & (amp > 3.8)] def fixed(x, b): return 0.5 * x + b popt1, _ = optimize.curve_fit(fixed, x1, y1) plt.plot(x, fixed(x, *popt1), 'r-', label='1/2 Slope Fit 1') popt2, _ = optimize.curve_fit(fixed, x2, y2) # plt.plot(x, fixed(x, *popt2), 'g-',label='1/2 Slope Fit 2') plt.axhline(1.255, c='m', linestyle='--', label='19ADU Read-noise floor') plt.ylabel('Noise (log(ADUs))') plt.xlabel('Intensity (log(ADUs))') plt.grid(True) plt.title('Photon Transfer Curve (g=3.34$e^-$/ADU)') plt.legend(loc='best') plt.show()
def master_flat(i, folder): dark = fits.open(dark_path) bias = cam.get_master_bias('-60') os.chdir(folder) img_list = glob.glob('*.fits*') img_list_split = [i.split('_') for i in img_list] stack = np.zeros((1040, 1296)) for j in files: if img_list_split[k][1] == str(float(int_time)): hdu = fits.open(j) img = hdu[0].data img = img - bias - dark #Subtract bias and dark from each flat stack = np.dstack((stack, data)) stack = stack[:, :, 1:] #Remove 0 array it is stacked on flat_collapsed = np.median(stack, axis=2) flat_normed = flat_collapsed / np.max(flat_collapsed) #Normalise flat flat_header = hdu[0].header flat_header.append( ('NSTACK', stack.shape[2], 'Number of exposures stacked')) flat_header.append( ('TYPE', 'MASTER_FLAT', 'Normalised median stack of flat fields')) master_name = 'mastersky_'+str(i/1000)+"s_" + str(stack.shape[2]) \ + 'stack_am' + str(am) + '.fits' #Write to FITS file fits.writeto(master_name, sky_reduced, sky_header) os.chdir(os.path.dirname(os.path.realpath(__file__)))
def dark_gain_method(dk_folder, ff_folder): os.chdir(dk_folder) dk_list = glob.glob('*.fits*') os.chdir(ff_folder) ff_list = glob.glob('*.fits*') gains = [] for i in range(0, len(dk_list), 2): os.chdir(dk_folder) dk_hdu1 = fits.open(dk_list[i]) dk_hdu2 = fits.open(dk_list[i + 1]) dk_img1 = dk_hdu1[0].data dk_img1 = dk_img1.astype(np.int32) dk_img2 = dk_hdu2[0].data dk_img2 = dk_img2.astype(np.int32) os.chdir(ff_folder) ff_hdu1 = fits.open(ff_list[i]) ff_hdu2 = fits.open(ff_list[i + 1]) ff_img1 = ff_hdu1[0].data ff_img1 = ff_img1.astype(np.int32) ff_img2 = ff_hdu2[0].data ff_img2 = ff_img2.astype(np.int32) dk_img1 = dk_img1[500:700, 500:700] dk_img2 = dk_img2[500:700, 500:700] ff_img1 = ff_img1[500:700, 500:700] ff_img2 = ff_img2[500:700, 500:700] ff_diff = ff_img2 - ff_img1 dk_diff = dk_img2 - dk_img1 ff_var = np.var(ff_diff) dk_var = np.var(dk_diff) var_denom = (ff_var - dk_var) / 2 ff_med1 = np.median(ff_img1) ff_med2 = np.median(ff_img2) dk_med1 = np.median(dk_img1) dk_med2 = np.median(dk_img2) ff_med = ff_med1 + ff_med2 dk_med = dk_med1 + dk_med2 med_numer = (ff_med - dk_med) / 2 k = med_numer / var_denom gains.append(k) print(gains) print("Gain[ADU/e]: {}".format(np.mean(gains)))
def ptc_gain(folder): os.chdir(folder) img_list = glob.glob('*.fits*') bias = cam.get_master_bias(-40) bias = np.asarray(bias, dtype=np.int32) times = [] for k in img_list: hdu = fits.open(k) times.append(hdu[0].header['DITSER']) times = np.unique(times) print('times retrieved') #sort times if order is needed sets = [[] for _ in times] for j in img_list: hdu = fits.open(j) dit = hdu[0].header['DITSER'] ind = np.argwhere(times == dit) sets[ind[0, 0]].append(hdu[0].data) print('sets constructed') amp, var = [], [] for i in sets: first = i[1].astype(np.int32) second = i[0].astype(np.int32) diff_img = first - second first = first - bias roi_diff = diff_img[400:800, 400:800] roi_single = first[400:800, 400:800] var.append(np.var(roi_diff)) amp.append(np.median(roi_single)) print('arrays finished') var = np.array(var) / 2 amp = np.array(amp) plt.scatter(amp, var) #,label = 'Data') # slope, intercept, r_value, _ ,_ = linregress(x,y) # fit = slope*x + intercept # plt.plot(x,fit) # rsqr = round((r_value**2),4) # plt.plot(amp,fit,'g',label = 'Linear Fit, $r^2$ = {}'.format(rsqr)) # def fixed(x,b): # return 0.31*x+b # popt1, _ = optimize.curve_fit(fixed,x,y) # plt.plot(amp, fixed(amp, *popt1), 'r-',label='Pre-determined gain slope fit (m=0.31)') plt.ylabel('$\sigma^2$ (ADUs)') plt.xlabel('Median Pixel Value (ADUs)') plt.grid(True) plt.title( 'Variance vs Intensity' ) # Gain Study 2 (SLD), g = {}ADUs/$e^-$ (n = {})'.format(round(slope,2),len(x)))
def crosstalk(folder): img = '/crosstalk_2.fits' img_dir = folder + img hdu = fits.open(img_dir) frame = hdu[0].data bias = cam.get_master_bias(-40) bias = bias.astype(np.int32) dark = cam.get_master_dark(20, -40) dark = dark.astype(np.int32) frame = frame - bias - dark fig, axs = plt.subplots(nrows=1, ncols=2) cut_region = range(200, 205) cuts = frame[:, 199] for i in cut_region: cut = frame[:, i] cuts = np.vstack((cuts, cut)) cut_median = np.median(cuts, axis=0) axs[1].plot(cut_median) axs[1].set_ylabel('ADUs') axs[1].set_xlabel('y-Index') axs[0].imshow(frame) plt.suptitle('Vertical Profile at x = {}'.format(cut_region[0])) plt.show()
def master_bias_local(folder, T): ''' Enter docstring here ''' os.chdir(folder) img_list = glob.glob('*.fits*') stack = np.zeros((naxis1, naxis2), dtype=np.uint16) for i in img_list: hdu = fits.open(i) data = hdu[0].data stack = np.dstack((stack, data)) bias_header = hdu[0].header stack = stack[:, :, 1:] #Slice off base layer ndit = stack.shape[2] master_bias = np.median(stack, axis=2) master_bias = master_bias.astype(np.uint16) bias_header.append(('NDIT', ndit, 'Number of integrations')) bias_header.append(('TYPE', 'MASTER_BIAS', 'Median stack of dark frames')) #Output master frame to fits master_path = master_biases + 'master_bias_' \ + str(T) + '.fits' fits.writeto(master_path, master_bias, bias_header) print('PROGRAM HAS COMPLETED')
def temp_var(folder): os.chdir(folder) img_list = glob.glob('*.fits*') bias = cam.get_master_bias(-40) #Retrieve bias bias = bias.astype(np.int32) stack = np.zeros((naxis1, naxis2), dtype=np.int32) for i in img_list: hdu = fits.open(i) data = hdu[0].data data = data.astype(np.int32) data -= bias stack = np.dstack((stack, data)) stack = stack[:, :, 1:] #Slice off base layer var_map = np.var(stack, axis=2) dark = cam.get_master_dark(40) bias = cam.get_master_bias(-60) nonlinear_mask = (var_map > 50000) * 1 # var_map = var_map[var_map<0.6E6] plt.hist(var_map.flatten(), bins=200) plt.yscale('log') plt.grid(True) plt.xlabel('$\sigma^2$ (ADUs)') plt.ylabel('No. of pixels') plt.title('Distribution of temporal variability (DIT=450ms,NDIT=30)') plt.show() print('PROGRAM HAS COMPLETED') return nonlinear_mask
def pixel_population_ramp(folder): os.chdir(folder) img_list = glob.glob('*.fits*') bias = cam.get_master_bias(-40) bias = np.asarray(bias, dtype=np.int32) times, pixels = [], [] stack = np.zeros((10, 10), dtype=np.int32) for i in img_list: hdu = fits.open(i) data = hdu[0].data data = np.asarray(data, dtype=np.int32) data = data - bias times.append(hdu[0].header['DITSER']) #Bright Region roi = data[790:800, 790:800] stack = np.dstack((stack, roi)) stack = stack[:, :, 1:] #Slice off base layer for i in range(stack.shape[0]): for j in range(stack.shape[1]): pix_array = stack[i, j, :] plt.scatter(times, pix_array) plt.xlabel('Integration Time (ms)') plt.ylabel('Intensity (ADUs)') plt.grid(True) plt.title('Integration Ramp for 100 pixel population') plt.show() print('PROGRAM HAS COMPLETED')
def ramp_plot(folder): os.chdir(folder) img_list = glob.glob('*.fits*') bias = cam.get_master_bias(-40) bias = np.asarray(bias, dtype=np.int32) times, amp = [], [] for i in img_list: hdu = fits.open(i) data = hdu[0].data data = np.asarray(data, dtype=np.int32) # data = data - bias times.append(hdu[0].header['DITSER']) #Dark Region roi_single_d = data[400:800, 400:800] amp.append(np.median(roi_single_d)) amp = np.array(amp) plt.scatter(times, amp, c='blue', label='Data') plt.xlabel('Integration Time (ms)') plt.ylabel('Intensity (ADUs)') # plt.xscale('log') # plt.yscale('log') plt.grid(True) plt.title('Integration Ramp') plt.legend(loc='best') plt.show() print('PROGRAM HAS COMPLETED')
def bias_bad_pix_var(folder): os.chdir(folder) img_list = glob.glob('*.fits*') low_map = np.zeros((naxis1, naxis2)) very_low_map = np.zeros((naxis1, naxis2)) hot_map = np.zeros((naxis1, naxis2)) very_hot_map = np.zeros((naxis1, naxis2)) flag_map = np.zeros((naxis1, naxis2)) dead_map = np.zeros((naxis1, naxis2)) n = len(img_list) for k in img_list: hdu = fits.open(k) data = hdu[0].data l = np.median(data) - 5 * np.std(data) h = np.median(data) + 5 * np.std(data) dead_map += (data == 15) * 1 very_low_map += ((data > 15) & (data < 700)) * 1 low_map += ((data > 700) & (data < l)) * 1 hot_map += ((data > h) & (data < 6000)) * 1 very_hot_map += ((data > 6000) & (data < 16383)) * 1 flag_map += (data == 16383) * 1 print(len(very_low_map[very_low_map != 0]), k) print(flag_map) hot_dist = (hot_map[hot_map != 0]) very_hot_dist = (very_hot_map[very_hot_map != 0]) low_dist = (low_map[low_map != 0]) very_low_dist = (very_low_map[very_low_map != 0]) dead_dist = (dead_map[dead_map != 0]) flag_dist = (flag_map[flag_map != 0]) print(flag_dist) print(dead_dist) print(flag_map) # plt.hist(hot_dist,bins=100,facecolor='g',hatch='/', edgecolor='k',fill=True, alpha=0.5,\ # label=r'High Bias') # plt.hist(very_hot_dist,bins=100,facecolor='y',hatch='|', edgecolor='k',fill=True, alpha=0.5,\ # label=r'Very High Bias') # plt.hist(low_dist,bins=100,hatch='*', facecolor='c',edgecolor='k',fill=True,alpha=0.5,\ # label=r'Low Bias') # plt.hist(very_low_dist,bins=100,hatch='+', facecolor='r',edgecolor='k',fill=True,alpha=0.5,\ # label=r'Very Low Bias') # plt.hist(flag_dist,bins=100,hatch='.', facecolor='m',edgecolor='k',fill=True,alpha=0.5,\ # label=r'Flag') # plt.hist(dead_dist,bins=100,hatch='o', facecolor='k',edgecolor='k',fill=True,alpha=0.5,\ # label=r'Dead') # plt.xlabel('Recurrance Rate (%)') # plt.ylabel('# of Pixels') # plt.legend(loc='best') # plt.title('Recurrance of bad pixels on bias frames(n={},DIT=40s)'.format(n)) # plt.show() print('Program Complete')
def median_ptc(folder): os.chdir(folder) img_list = glob.glob('*.fits*') bias = cam.get_master_bias(-40) bias = np.asarray(bias, dtype=np.int32) times = [] for k in img_list: hdu = fits.open(k) times.append(hdu[0].header['DITSER']) times = np.unique(times) sets = [[] for _ in times] for j in img_list: hdu = fits.open(j) dit = hdu[0].header['DITSER'] ind = np.argwhere(times == dit) sets[ind[0, 0]].append(hdu[0].data) amp, var = [], [] for i in sets: print(len(i)) med, var_ele = [], [] for j in range(0, len(i), 2): #select every second array first = i[j].astype(np.int32) second = i[j + 1].astype(np.int32) diff_img = first - second roi_diff = diff_img[400:800, 400:800] roi_single = first[400:800, 400:800] med.append(np.median(roi_single)) var_ele.append(np.var(roi_diff)) var.append(np.mean(var_ele)) amp.append(np.mean(med)) var = np.array(var) / 2 amp = np.array(amp) plt.scatter(amp, var, marker='d', label='Median Results (n=20)') plt.ylabel('$\sigma^2$ (ADUs)') plt.xlabel('Median Pixel Value (ADUs)') plt.grid(True) plt.legend(loc='best') plt.title('Variance vs Intensity') plt.show()
def read_diff(files): hdu1 = fits.open(imgs[0]) hdu2 = fits.open(imgs[1]) img1 = hdu1[0].data img2 = hdu2[0].data diff = img1 - img2 #Create difference image header = hdu1[0].header img, _, _ = sigmaclip(diff, 5, 5) sig = np.std(img) RN = int(sig / np.sqrt(2)) fig, axs = plt.subplots(1, 2, tight_layout=True) axs[0].imshow(diff, vmax=25) axs[1].hist(img, bins=1000, label='$\sigma={}$ electrons'.format(int(sig))) axs[1].set_title('Mean-subtracted/Gain Adjusted'.format(int(np.std(img)))) axs[1].set_xlabel('$e^{-}$') axs[1].set_title( 'Difference of Master Bias Frames, DIT=$33\mu s$ (520REFCLKS), NDIT=1000, RN={}' .format(RN)) plt.show() plt.legend(loc='best')
def dark_bad_pix_var(folder, i): bias = cam.get_master_bias(-60) os.chdir(folder) i = i * 1000 #convert to ms img_list = glob.glob('*.fits*') img_list_split = [i.split('_') for i in img_list] low_map = np.zeros((naxis1, naxis2)) very_low_map = np.zeros((naxis1, naxis2)) hot_map = np.zeros((naxis1, naxis2)) very_hot_map = np.zeros((naxis1, naxis2)) c_bias = 1951 * np.ones((naxis1, naxis2)) n = 0 for k in range(len(img_list)): if img_list_split[k][1] == str(i): hdu = fits.open(img_list[k]) data = hdu[0].data dark = data - bias np.asarray(dark, dtype=np.float64) print(np.min(dark)) # hot_dark = np.median(dark)+6*np.std(dark) low_map += ((dark < 300) & (dark > 0)) * 1 very_low_map += (dark < 0) * 1 hot_map += ((dark > 2000) & (dark < 5000)) * 1 very_hot_map += (dark > 5000) * 1 print(len((very_low_map[very_low_map != 0]))) n += 1 hot_dist = (hot_map[hot_map != 0]) / n very_hot_dist = (very_hot_map[very_hot_map != 0]) / n low_dist = (low_map[low_map != 0]) / n very_low_dist = (very_low_map[very_low_map != 0]) / n print(very_low_dist) #plt.hist(hot_dist,facecolor='g',hatch='/', edgecolor='k',fill=True, alpha=0.5,\ # label=r'High $I_{dark}$') #plt.hist(very_hot_dist,facecolor='y',hatch='|', edgecolor='k',fill=True, alpha=0.5,\ # label=r'Very High $I_{dark}$') #plt.hist(low_dist,hatch='*', facecolor='c',edgecolor='k',fill=True,alpha=0.5,\ # label=r'Low $I_{dark}$') plt.hist(very_low_dist,hatch='o', facecolor='r',edgecolor='k',fill=True,alpha=0.5,\ label=r'Very Low $I_{dark}$') plt.xlabel('Recurrance Rate (%)') plt.ylabel('# of Pixels') plt.legend(loc='best') plt.title( 'Recurrance of bad pixels on dark frames(n={},DIT=40s)'.format(n)) plt.show() print('Program Complete')
def var_time(folder): os.chdir(folder) img_list = glob.glob('*.fits*') bias = cam.get_master_bias(-40) bias = np.asarray(bias, dtype=np.int32) times = [] for k in img_list: hdu = fits.open(k) times.append(hdu[0].header['DITSER']) times = np.unique(times) #sort times if order is needed sets = [[] for _ in times] for j in img_list: hdu = fits.open(j) dit = hdu[0].header['DITSER'] ind = np.argwhere(times == dit) sets[ind[0, 0]].append(hdu[0].data) var = [] for i in sets: first = i[1].astype(np.int32) second = i[0].astype(np.int32) diff_img = first - second roi_diff = diff_img[400:800, 400:800] var.append(np.var(roi_diff)) var = np.array(var) / 2 plt.scatter(times, var, c='blue', label='Data') plt.ylabel('$\sigma^2$ (ADUs)') plt.xlabel('Integration Time (ms)') plt.grid(True) plt.title('Variance vs Integration Time') plt.legend(loc='best') plt.show() print('PROGRAM HAS COMPLETED')
def bias_temp_var(folder): os.chdir(folder) img_list = glob.glob('*.fits*') stack = np.zeros((naxis1, naxis2), dtype=np.uint16) for i in img_list: hdu_img = fits.open(i) data = hdu_img[0].data stack = np.dstack((stack, data)) stack = stack[:, :, 1:] #Slice off base layer temp_stack = np.var(stack, axis=2) plt.imshow(temp_stack) plt.show()
def full_well_hist(folder): os.chdir(folder) img_list = glob.glob('*.fits*') bias = cam.get_master_bias(-40) stack = np.zeros((naxis1, naxis2), dtype=np.uint16) for i in img_list: hdu_img = fits.open(i) data = hdu_img[0].data data = data - bias data[data > 60000] = 0 #Avoid unsigned integer overflow stack = np.dstack((stack, data)) stack = stack[:, :, 1:] #Remove 0 array it is stacked on collapsed = np.median(stack, axis=2) clipped, _, _ = sigmaclip(collapsed, 8, 8) #clipped *= 3.22 #gain-adjust median = np.median(clipped) sigma = np.std(clipped) early_line = median - 3 * sigma #3sigma away from distribution full-well begins fig, axs = plt.subplots(nrows=1, ncols=2) axs[0].axvline(early_line, linestyle='--', c='r', label='$3\sigma$ Full Well: {}ADUs'.format(int(early_line))) axs[0].axvline(median, linestyle='--', c='blue', label='Median: {}ADUs'.format(int(median))) axs[0].hist(clipped, bins=110, facecolor='g', edgecolor='k', fill=True) axs[0].set_yscale('log') axs[0].set_ylabel('No. of Pixels') axs[0].set_xlabel('ADUs') axs[0].grid(True) axs[0].legend(loc='best') img_eq = exposure.equalize_hist(data) axs[1].imshow(img_eq) plt.suptitle( 'Full-well Study of Oversaturated Frames (DIT={}s,NDIT={})'.format( 20, 70)) plt.show()
def cooling_test(): os.chdir('C:') os.chdir('C:/nstf/images/images19-06-2020/cooling_test_2') files = glob.glob('*.fits*') bias = cam.get_master_bias(-60) temps = [] vals = [] sigma = [] for i in files: hdu = fits.open(i) ambtemp = hdu[0].header['AMBTEMP'] temps.append(ambtemp) data = hdu[0].data data = data - bias clipped, _, _ = sigmaclip(data, 5, 5) #clip hot/dead pixels vals.append(np.median(clipped)) sigma.append(np.std(clipped)) temps = np.array(temps) vals = np.array(vals) vals = vals[temps < 10] temps = temps[temps < 10] vals = 3.23 * vals vals = vals / 2 slope, intercept, r_value, _, _ = linregress(temps, vals) fit = slope * temps + intercept slope = int(slope) rsqr = round((r_value**2), 4) plt.scatter(temps, vals, label='Data', color='black') plt.plot(temps,fit,'g--',label = 'Linear Fit (m={0}e/s/$^\circ$C, $r^2$={1})'\ .format(slope,rsqr)) plt.grid(True) plt.legend(loc='best') # plt.axvspan(26, 8, color='r', alpha=0.5, lw=0) # plt.axvspan(7.9, -11, color='g', alpha=0.5, lw=0) # plt.text(20,180,r'$FPA\ Unstable$') # plt.text(0,180,r'$FPA\ Stable$') # plt.gca().invert_xaxis() plt.xlabel('Temperature ($^\circ$C)') plt.ylabel('Median $e^-$/s/pix') plt.title('Detector Cooling Test (FPA:-60$^\circ$C, DIT=2s)') plt.show()
def sky_hists(folder): os.chdir(folder) img_list = glob.glob('*.fits*') sns.set(color_codes=True) for i in img_list: hdu = fits.open(i) dit = (hdu[0].header['DITSER']) / 1000 #DIT in seconds img = (hdu[0].data) / dit #convert into rate clipped = cam.roi_circle(img) #remove vignetting clipped *= 4.16 #gain adjust clipped, _, _ = sigmaclip(clipped, 3, 3) sns.distplot(clipped, label='DIT={}s,$\mu={}$$e^-$/s'.format( dit, int(np.mean(clipped)))) plt.xlabel('$e^-$/s') plt.ylabel('Density $\%$') plt.title('Sky background distribution (NDIT=10, Airmass=1)') plt.legend(loc='best') plt.show()
def master_bias(n, tag, T): ''' Enter docstring here ''' cam.set_int_time(0.033) cam.set_frame_time(100.033) cam.printProgressBar(0, n, prefix='Progress:', suffix='Complete', length=50) stack = np.zeros((naxis1, naxis2), dtype=np.uint16) for j in range(n): cap, _ = cam.img_cap(routine, img_dir, 'f') hdu_img = fits.open(unsorted_img) fits_img = hdu_img[0] data = fits_img.data hdu_img.close() #Close image so it can be sorted stack = np.dstack((stack, data)) cam.printProgressBar(j,n, prefix = 'Progress:', \ suffix = 'Complete', length = 50) if j == n - 1: #On final frame grab header bias_header = fits.getheader(unsorted_img) os.remove(unsorted_img) #Delete image after data retrieval bias_header.append(('NDIT', n, 'Number of integrations')) bias_header.append(('TYPE', 'MASTER_BIAS', '0s exposure frame')) bias_header.append(('FPATEMP', T, 'Temperature of detector')) #Median Stack stack = stack[:, :, 1:] #Slice off base layer master_bias = np.median(stack, axis=2) master_bias = master_bias.astype(np.uint16) #Write master frame to fits master_path = read_path + 'master_bias_' \ + tag + '.fits' fits.writeto(master_path, master_bias, bias_header) print('PROGRAM HAS COMPLETED')
def stack_hists(folder): os.chdir(folder) img_list = glob.glob('*.fits*') bias = cam.get_master_bias(-40) #Retrieve bias bias = bias.astype(np.int32) for i in img_list: hdu = fits.open(i) data = hdu[0].data data = data.astype(np.int32) data -= bias data = data[400:800, 400:800] plt.hist(data.flatten(), bins=100) plt.yscale('log') plt.grid(True) plt.xlabel('ADUs') plt.ylabel('No. of pixels') plt.title('Pixel Distribution Histograms (NDIT=30)') plt.show()
def master_dark_hist(): os.chdir('//merger.anu.edu.au/mbirch/data/master_dark') files = glob.glob('*-10.6C.fits*') sns.set(color_codes=True) for i in files: hdu = fits.open(i) int_t = (hdu[0].header['DITSER']) / 1000 img = (hdu[0].data) / int_t clipped, _, _ = sigmaclip(img, 5, 5) clipped = clipped * 3.23 val = round(np.median(clipped)) sns.distplot(clipped, label='DIT={}s,$\mu={}$'.format(int_t, val)) plt.xlabel('ADUs/s') plt.ylabel('Density $\%$') plt.title( 'Master Dark Pixel Distributions (NDIT=20, FPA:-60$^\circ$C, Shutter:-10.6$^\circ$C)' ) plt.legend(loc='best') plt.show()
def master_dark_local(folder, i): ''' DIT and NDIT are inputs Function can also take tag for sorting individual frames onto local drive T is the FPA temperature used to record temperature of FPA for this dark which is written to file name and FITS header Program also outputs a .npy binary file containing 3D datacube of central (100,100) window for studying temporal variance over stack Takes folder of single frames ''' os.chdir(folder) bias = cam.get_master_bias(-60) #Retrieve bias img_list = glob.glob('*.fits*') img_list_split = [i.split('_') for i in img_list] stack = np.zeros((naxis1, naxis2), dtype=np.uint16) for k in range(len(img_list)): if img_list_split[k][1] == str(i): hdu = fits.open(img_list[k]) data = hdu[0].data data = data - bias #Bias subtract data[data > 60000] = 0 #Avoid unsigned integer overflow stack = np.dstack((stack, data)) dark_header = hdu[0].header stack = stack[:, :, 1:] #Slice off base layer ndit = stack.shape[2] master_dark = np.median(stack, axis=2) master_dark = master_dark.astype(np.uint16) dark_header.append(('NDIT', ndit, 'Number of integrations')) dark_header.append(('TYPE', 'MASTER_DARK', 'Median stack of dark frames')) #Output master frame to fits master_path = master_darks + 'master_dark_' \ + str(i/1000) + '_-10.6C.fits' fits.writeto(master_path, master_dark, dark_header) print('PROGRAM HAS COMPLETED')
def master_sky(i, folder, am, filter): ''' Takes integration time in ms, folder containing sky backgrounds an airmass and camera temperature ''' os.chdir(folder) bias = cam.get_master_bias(-60) #Retrieve bias bias = bias.astype(np.int32) dark = cam.get_master_dark(int(i / 2000)) #Retrieve dark dark = dark.astype(np.int32) img_list = glob.glob('*.fits*') img_list_split = [i.split('_') for i in img_list] stack = np.zeros((1040, 1296), dtype=np.int32) for k in range(len(img_list)): if img_list_split[k][1] == str(float(i)): hdu = fits.open(img_list[k]) data = hdu[0].data data = data.astype(np.int32) data -= bias #Bias subtract data -= dark #Dark subtract stack = np.dstack((stack, data)) stack = stack[:, :, 1:] #Remove 0 array it is stacked on #Collapse multi-dimensional array along depth axis by median sky_collapsed = np.median(stack, axis=2) #Append neccesary info to header sky_header = hdu[0].header sky_header.append(('NDIT', stack.shape[2], 'Number of integrations')) sky_header.append( ('TYPE', 'MASTER_SKY', 'Median stack of sky backgrounds')) sky_header.append(('AIRMASS', am, 'Airmass of exposures')) sky_header.append(('BAND', filter, 'Bandpass filter')) master_name = master_skys + 'mastersky_'+str(i/1000)+"s_" + str(stack.shape[2]) \ + 'stack_am' + str(am) + '.fits' #Write to FITS file fits.writeto(master_name, sky_collapsed, sky_header) os.chdir(os.path.dirname(os.path.realpath(__file__))) print('PROGRAM COMPLETE')
def dark_current(folder): os.chdir(folder) files = glob.glob('*.fits*') # bias = cam.get_master_bias(-60) vals, temps, times = [], [], [] for i in files: hdu = fits.open(i) temps.append(hdu[0].header['TEMPAMB']) times.append(hdu[0].header['DITSER']) data = hdu[0].data # data = data - bias # data[data > 60000] = 0 #Avoid unsigned integer overflow clipped, _, _ = sigmaclip(data, 3, 3) vals.append(np.median(clipped)) #test_temp = round(np.mean(temps),1) times = np.array(times) / 1000 vals = 3.22 * np.array(vals) slope, intercept, r_value, _, _ = linregress(times, vals) fit = slope * times + intercept i_dark = int(round(slope)) rsqr = round((r_value**2), 4) plt.scatter(times, vals / 1000, c='black', label='Data ($n={}$)'.format(len(files))) plt.plot(times, fit / 1000, c='g', linestyle='dashed', label='Linear Fit, $r^2$ = {}'.format(rsqr)) plt.grid(True) plt.xlabel('Integration Time (s)') plt.ylabel('Median Pixel Value ($ke^-$)') plt.legend(loc='best') plt.title('Dark Current, {0}$e^-$/s (FPA: $0^\circ$, Shutter: 20$^\circ$C)'\ .format(i_dark)) plt.show()
def analyse_read(): img_path = read_path + 'master_read_' \ + 'test1_1000'+ '.fits' hdu_img = fits.open(img_path) img = hdu_img[0].data header = hdu_img[0].header fig, axs = plt.subplots(1, 3, tight_layout=True) axs[0].hist(img.flatten(), bins=3000) axs[0].set_xlim(1100, 1700) axs[0].set_title('Raw $\sigma$={}ADUs'.format(int(np.std(img)))) axs[0].set_xlabel('ADUs') axs[1].imshow(img, vmax=2000) noise_img = (img - np.mean(img)) * 3.07 axs[2].hist(noise_img.flatten(), bins=3000) axs[2].set_xlim(-900, 900) axs[2].set_title( 'Mean-subtracted/Gain Adjusted $\sigma={}$ electrons'.format( int(np.std(noise_img)))) axs[2].set_xlabel('$e^{-}$') axs[1].set_title( 'Master Bias Frame, DIT=$33\mu s$ (520REFCLKS), NDIT=1000') plt.show()
def brightness_estimate(file, rows, cols): hdul = fits.open(file) img = hdul[0].data dit = hdul[0].header['DITSER'] roi = img[rows[0]:rows[1], cols[0]:cols[1]] counts = np.median(roi) #[ADUs/pixel] counts /= (dit / 1000) # Time [ADUs/s/pixel] counts *= 4.17 # Gain [e/s/pixel] UNCERTAIN counts /= 1.24 # Plate-scale [e/s/arcsecond] alpha_bintel = 0.7 #optical throughput of BinTel [2x aluminium mirrors] counts /= alpha_bintel #Divide by throughput to get incident photons photons = 0.8 * counts # QE [photons/s/arcsecond] UNCERTAIN lambda_c = 1.3E-6 #central wavelength UNCERTAIN e_phot = (h.value * c.value) / lambda_c I = photons / e_phot #[W/arcsecond] nu_1 = c.value / 1E-6 nu_2 = c.value / 1.6E-6 delta_nu = nu_1 - nu_2 f_nu = I / delta_nu #[W/Hz/arcsecond] aperture = m.pi * ((0.25 / 2)**2) #M1 aperture size [cm^2] f_nu /= aperture #[W/m^2/Hz/arcsecond] f_nu /= 10E-26 # [Jy/arcsecond] #Output ab_mag/arcsecond mag_ab = -2.5 * m.log10(f_nu / 3631) #takes [Jy/arcsecond] dreams_counts = counts * 2.48 #DREAMS plate-scale [e/s/DREAMSpixel] alpha_dreams = 0.4 dreams_counts *= alpha_dreams #Adjust by optical throughput of DREAMS #Insert optical throughput of DREAMS print("Sky Brightness: {} mag/arcsecond or {} e/s/DREAMSpixel".\ format(round(mag_ab,1),int(dreams_counts)))
def fringing_stack(folder, T, band): os.chdir(folder) img_list = glob.glob('*.fits*') bias = cam.get_master_bias(T) bias = bias.astype(np.int32) stack = np.zeros((naxis1, naxis2), dtype=np.int32) for i in img_list: hdu = fits.open(i) frame = hdu[0].data frame = frame.astype(np.int32) frame = frame - bias stack = np.dstack((stack, frame)) stack = stack[:, :, 1:] #Slice off base layer img = np.median(stack, axis=2) img_save = np.copy(img) masked = cam.roi_circle(img) norm_masked = masked / np.max(masked) # plt.hist(norm_masked,bins=300,label=band) return img_save
def master_dark(i, n, T, tag=''): ''' DIT and NDIT are inputs Function can also take tag for sorting individual frames onto local drive T is the FPA temperature used to record temperature of FPA for this dark which is written to file name and FITS header Program also outputs a .npy binary file containing 3D datacube of central (100,100) window for studying temporal variance over stack ''' cam.set_int_time(i) cam.set_frame_time(i + 20) bias = cam.get_master_bias(T) cam.printProgressBar(0, n, prefix='Progress:', suffix='Complete', length=50) stack = np.zeros((naxis1, naxis2), dtype=np.uint16) window = np.zeros((100, 100), dtype=np.uint16) for j in range(n): _, _ = cam.img_cap(routine, img_dir, 'f') hdu_img = fits.open(unsorted_img) data = hdu_img[0].data hdu_img.close() #Close image so it can be sorted data = data - bias stack = np.dstack((stack, data)) data_window = cam.window(data, 100) window = np.dstack((window, data_window)) cam.printProgressBar(j,n, prefix = 'Progress:', \ suffix = 'Complete', length = 50) if j == n - 1: #On final frame grab header dark_header = fits.getheader(unsorted_img) #Save single frame to local drive cam.file_sorting(local_img_dir, i, i + 20, tag=tag) #Median stack stack = stack[:, :, 1:] #Slice off base layer master_dark = np.median(stack, axis=2) #Prepare window for temporal analysis window = window[:, :, 1:] #Slice off base layer temp_var = np.median(np.var(stack, axis=2)) temp_path = master_darks + 'dark_cube' \ + str(i/1000) + '_' +str(T) +'C.npy' np.save(temp_path, window) dark_header.append(('NDIT', n, 'Number of integrations')) dark_header.append(('TYPE', 'MASTER_DARK', 'Median stack of dark frames')) dark_header.append(('FPATEMP', T, 'Temperature of detector')) dark_header.append( ('TEMPVAR', temp_var, 'Median temporal variance of central (100,100) window')) #Output master frame to fits master_path = master_darks + 'master_dark_' \ + str(i/1000) + '_' +str(T) +'C.fits' fits.writeto(master_path, master_dark, dark_header) print('PROGRAM HAS COMPLETED')
def nonlinearity(folder): os.chdir(folder) img_list = glob.glob('*.fits*') bias = cam.get_master_bias(-40) bias = np.asarray(bias, dtype=np.int32) times = [] for k in img_list: hdu = fits.open(k) times.append(hdu[0].header['DITSER']) times = np.unique(times) sets = [[] for _ in times] for j in img_list: hdu = fits.open(j) dit = hdu[0].header['DITSER'] ind = np.argwhere(times == dit) sets[ind[0, 0]].append(hdu[0].data) amp, var = [], [] for i in sets: first = i[1].astype(np.int32) second = i[0].astype(np.int32) diff_img = first - second first = first - bias roi_diff = diff_img[400:800, 400:800] roi_single = first[400:800, 400:800] var.append(np.var(roi_diff)) amp.append(np.median(roi_single)) var = np.array(var) / 2 #Adjust var amp = np.array(amp) #Linear Region lin_y = var[(amp > 2000) & (amp < 8000)] lin_x = amp[(amp > 2000) & (amp < 8000)] slope, intercept, r_value, _, _ = linregress(lin_x, lin_y) fit = slope * amp + intercept rsqr = round(r_value**2, 3) m = round(slope, 2) b = int(intercept) x = np.linspace(amp[0], amp[-1], 5000) noise = (20**2) + np.sqrt(x) nonlin = 11600 fwell = 13480 fig1 = plt.figure(1) frame1 = fig1.add_axes((.1, .3, .8, .6)) plt.plot(amp, fit, linestyle='--', c='g', label='Linear fit: $r^2$ = {}, m = {}, b = {}'.format(rsqr, m, b)) plt.scatter(amp, var, label='Data (n={})'.format(len(times))) plt.axvline(nonlin, c='m', label='$4\%$ Non-linearity Point: {}ADUs'.format(nonlin)) plt.axvline(fwell, c='y', label='Full-well: {}ADUs'.format(fwell)) plt.xlim(11000, 13600) plt.ylim(0, 4000) plt.ylabel('$\sigma^2$ (ADUs)') plt.title('Linear photon transfer curve with fit residuals') plt.legend(loc='best') frame1.set_xticklabels([]) plt.grid() #Residual plot resids = fit - var frame2 = fig1.add_axes((.1, .1, .8, .2)) plt.plot(amp, resids, 'or') plt.fill_between(x, -noise, noise, alpha=0.2, label='Shot/Read Noise') plt.ylabel('$\sigma^2$ Residuals (ADUs)') plt.xlabel('Median Pixel Value (ADUs)') plt.axvline(nonlin, c='m', label='$4\%$ Non-linearity Point: {}ADUs'.format(nonlin)) plt.axvline(fwell, c='y', label='Full-well: {}ADUs'.format(fwell)) plt.grid() plt.ylim(-1000, 4000) plt.xlim(11000, 13600) plt.legend(loc='best') plt.show()