def dark_current(n, T, tag='', amb_temp=''): int_times = np.round(np.linspace(5, 500, n), 0) cam.printProgressBar(0, sum(int_times)) y = 0 for j in int_times: cam.set_int_time(j) cam.set_frame_time(j + 20) cap, _ = cam.img_cap(routine, img_dir, 'f') hdu_img = fits.open(unsorted_img) data = hdu_img[0].data dark_header = fits.getheader(unsorted_img) dark_header.append(('FPATEMP', T, 'Temperature of detector')) dark_header.append(('TEMPAMB', amb_temp, 'Ambient Temperature')) hdu_img.close() os.remove(unsorted_img) #Delete image after data retrieval fits.writeto(unsorted_img, data, dark_header) cam.file_sorting(img_dir, j, j + 20, tag=tag) y += j cam.printProgressBar(y, sum(int_times)) print('PROGRAM HAS COMPLETED')
def read_noise_estimate(n): ''' Capture n pairs of bias frames (520REFCLKS) Produce difference image from each pair and store read noise estimate from sigma/sqrt(2) of difference Output histogram of final pair with RN estimate as average of all pairs ''' cam.set_int_time(0.033) cam.set_frame_time(100) cam.printProgressBar(0, 2 * n, prefix='Progress:', suffix='Complete', length=50) y = 0 RNs = [] for j in range(n): bias_1, _ = cam.simple_cap() y += 1 cam.printProgressBar(y, 2 * n) bias_2, _ = cam.simple_cap() y += 1 cam.printProgressBar(y, 2 * n) bias_1 = np.asarray(bias_1, dtype=np.int32) bias_2 = np.asarray(bias_2, dtype=np.int32) #save max mean dif, max absolute dif bias_dif = bias_2 - bias_1 dif_clipped = bias_dif.flatten() RNs.append(np.std(dif_clipped) / np.sqrt(2)) dev = np.std(dif_clipped) RNs = np.array(RNs) RN = round(np.median(RNs), 3) uncert = round(3 * np.std(RNs), 2) sample_hist, _, _ = stats.sigmaclip(dif_clipped, 5, 5) N, bins, _ = plt.hist(sample_hist,bins = 265,facecolor='blue', alpha=0.75,\ label = 'Bias Difference Image') def fit_function(x, B, sigma): return (B * np.exp(-1.0 * (x**2) / (2 * sigma**2))) popt, _ = optimize.curve_fit(fit_function, xdata=bins[0:-1]+0.5, \ ydata=N, p0=[0, dev]) xspace = np.linspace(bins[0], bins[-1], 100000) fit_dev = round(popt[1], 3) delta_sig = round(abs(fit_dev - dev), 2) plt.plot(xspace+0.5, fit_function(xspace, *popt), color='darkorange', \ linewidth=2.5, label='Gaussian fit, $\Delta\sigma$:{}'.format(delta_sig)) plt.ylabel('No. of Pixels') plt.xlabel('ADUs') plt.title( 'Read Noise Estimate:${}\pm{}$ ADUs ($n={}$, FPA:$-40^\circ$C)'.format( RN, uncert, n)) plt.legend(loc='best') plt.show() print('PROGRAM HAS COMPLETED')
def ramp(n, tag=''): int_times = np.round(np.linspace(50, 5000, n), 0) for j in int_times: cam.set_int_time(j) cam.set_frame_time(j + 20) cam.img_cap(routine, img_dir, 'f') cam.file_sorting(img_dir, j, j + 20, tag=tag) print('PROGRAM COMPLETE')
def full_well(n, int_t, tag=''): dit = cam.set_int_time(int_t) cam.set_frame_time(int_t + 20) cam.printProgressBar(0, n) for j in range(n): cap, _ = cam.img_cap(routine, img_dir, 'f') cam.file_sorting(img_dir, dit, dit + 20, tag=tag) cam.printProgressBar(j, n)
def gain_estimate(n): bias = cam.get_master_bias(-40) int_times = np.round(np.linspace(500, 5000, n), 0) medians, sigmasqrs = [], [] for j in int_times: cam.set_int_time(j) cam.set_frame_time(j + 20) #Take pair of images first, _ = cam.simple_cap() second, _ = cam.simple_cap() first = np.asarray(first, dtype=np.int32) second = np.asarray(second, dtype=np.int32) first = first - bias second = second - bias diff_img = first - second #Difference of bias-subtracted frames clipped_diff_img = cam.roi_clip(diff_img) clipped_img = cam.roi_clip(first) sigmasqrs.append(np.var(clipped_diff_img)) #variance from difference medians.append(np.median(clipped_img)) #intensity from single frame sigmasqrs = np.array(sigmasqrs) / 2 #Adjust for difference theorem medians = np.array(medians) #Linear fit slope, intercept, r_value, _, _ = stats.linregress(medians, sigmasqrs) print(slope, intercept, r_value) #Figure out parameters gain = round((1 / slope), 2) #read_noise = int(np.sqrt((gain**2)*intercept)) results_path = testing_dir +'gain_testing/' + \ 'study_2_results.txt' np.savetxt(results_path, (medians, sigmasqrs, int_times)) fit = slope * medians + intercept slope = round(slope, 2) intercept = int(intercept) rsqr = round((r_value**2), 4) plt.scatter(medians, sigmasqrs, c='red', label='Data') plt.plot(medians,fit,'g--',label = 'Linear Fit: (m = {0}, b = {1}, $r^2$ = {2})'\ .format(slope,intercept,rsqr)) plt.ylabel('$\sigma^2$ (ADUs)') plt.xlabel('Intensity (ADUs)') plt.grid(True) plt.legend(loc='best') plt.title( 'Pairwise Variance vs Intensity Gain Study, g = {0}'.format(gain)) plt.show() print('PROGRAM HAS COMPLETED')
def pair_ramp(n, tag=''): int_times = np.round(np.linspace(400, 700, n), 3) for j in int_times: cam.set_int_time(j) cam.set_frame_time(j + 250) #Take pair of images cam.img_cap(routine, img_dir, 'f') cam.file_sorting(img_dir, j, j + 250, tag=tag) print('PROGRAM COMPLETE')
def read_ramp(n): int_times = np.round(np.linspace(0.033, 0.5, n), 3) RNs = [] bias_level = [] cam.printProgressBar(0, n) y = 0 for j in int_times: int_t = cam.set_int_time(j) if int_t < (j + 1): cam.set_frame_time(20.33) bias_1, _ = cam.simple_cap() bias_2, _ = cam.simple_cap() bias_1 = np.asarray(bias_1, dtype=np.int32) bias_2 = np.asarray(bias_2, dtype=np.int32) bias_dif = bias_2 - bias_1 dif_clipped = bias_dif.flatten() RNs.append(np.std(dif_clipped) / np.sqrt(2)) bias_level.append(np.median(bias_1)) else: RNs.append(RNs[-1]) bias_level.append(bias_level[-1]) y += 1 cam.printProgressBar(y, n) RNs = 3.22 * np.array(RNs) bias_level = 3.22 * np.array(bias_level) int_times *= 1E3 fig, ax1 = plt.subplots() color = 'tab:red' ax1.set_xlabel('Integration Time ($\mu$s)') ax1.set_ylabel('Median $e^-$/pixel', color=color) ax1.scatter(int_times, bias_level, color=color) ax1.tick_params(axis='y', labelcolor=color) ax2 = ax1.twinx() # instantiate a second axes that shares the same x-axis color = 'tab:blue' ax2.set_ylabel('$\sigma$', color=color) # we already handled the x-label with ax1 ax2.scatter(int_times, RNs, color=color) ax2.tick_params(axis='y', labelcolor=color) fig.tight_layout() # otherwise the right y-label is slightly clipped plt.grid(True) plt.title('Read-noise/Bias as function of Integration Time') plt.show()
def frame_int_comp(): dit = 400 cam.set_int_time(dit) deltas = np.linspace(-50, 50, 100) amps = [] for i in deltas: cam.set_frame_time(dit + i) frame, _ = cam.simple_cap() amps.append(np.median(frame)) plt.scatter(deltas, amps) plt.xlabel('Frame Time - Integration Time') plt.ylabel('Median Pixel Intensity (ADUs)') plt.grid(True) plt.show()
def bias_temp(n, loops): bias = cam.get_master_bias(-40) #Reference median_residuals = [] times = [] #Stamp starting time here int_t = cam.set_int_time(0.033) frame_t = cam.set_frame_time(20.033) for i in range(loops): img = np.zeros(shape=(naxis1, naxis2)) #Initiate array for coadding for i in range(n): frame, _ = cam.simple_cap() img += frame img = img / n #Stamp time of this frame here #Append time to list of times residual_img = img - bias median_residuals.append( np.median(residual_img)) #Append median of difference fig, ax1 = plt.subplots() colour = 'tab:blue' ax1 = sns.pointplot(x=temps, y=means, color=colour) ax1.set_ylabel('Median Residual', color=colour) ax1.tick_params(axis='y', labelcolor=colour) ax1.set_xlabel('Time (s)') plt.title( r'$\mathrm{Thermal\ emission\ of\ shutter\ study\(bias\ frames)}$') plt.show()
def expose(i, tag=''): int_t = cam.set_int_time(i) frame_t = cam.set_frame_time(i + 100) frame, int_header = cam.simple_cap() if args.c != '': int_header.append(('COMMENT', args.c, 'User-defined comment')) fits.writeto(unsorted_img, frame, int_header) cam.weather_to_fits(unsorted_img) cam.file_sorting(local_img_dir, int_t, frame_t, tag=tag)
def master_bias(n, tag, T): ''' Enter docstring here ''' cam.set_int_time(0.033) cam.set_frame_time(100.033) cam.printProgressBar(0, n, prefix='Progress:', suffix='Complete', length=50) stack = np.zeros((naxis1, naxis2), dtype=np.uint16) for j in range(n): cap, _ = cam.img_cap(routine, img_dir, 'f') hdu_img = fits.open(unsorted_img) fits_img = hdu_img[0] data = fits_img.data hdu_img.close() #Close image so it can be sorted stack = np.dstack((stack, data)) cam.printProgressBar(j,n, prefix = 'Progress:', \ suffix = 'Complete', length = 50) if j == n - 1: #On final frame grab header bias_header = fits.getheader(unsorted_img) os.remove(unsorted_img) #Delete image after data retrieval bias_header.append(('NDIT', n, 'Number of integrations')) bias_header.append(('TYPE', 'MASTER_BIAS', '0s exposure frame')) bias_header.append(('FPATEMP', T, 'Temperature of detector')) #Median Stack stack = stack[:, :, 1:] #Slice off base layer master_bias = np.median(stack, axis=2) master_bias = master_bias.astype(np.uint16) #Write master frame to fits master_path = read_path + 'master_bias_' \ + tag + '.fits' fits.writeto(master_path, master_bias, bias_header) print('PROGRAM HAS COMPLETED')
def expose(i,tag=''): int_t = cam.set_int_time(i) frame_t = cam.set_frame_time(i+250) frame , int_header = cam.simple_cap() if args.c != '': int_header.append(('COMMENT',args.c,'User-defined comment')) # os.remove(unsorted_img) fits.writeto(unsorted_img,frame,int_header,overwrite=True) cam.weather_to_fits(unsorted_img) cam.file_sorting(img_dir,int_t,frame_t,tag=tag) print('EXPOSE COMPLETE')
def persist_routine(dit, offset, end_t, tag): sorting_dir = persist_dir + '/' + tag os.mkdir(sorting_dir) img_name = sorting_dir + '/img_' + str(dit) + '_' #Run sld_on.exe subprocess.call(["C:\EDT\pdv\sld_on.exe"]) print("SOAK BEGIN") cam.set_int_time(dit) cam.set_frame_time(dit + offset) fr = cam.read_frame_time() it = cam.read_int_time() print(fr, it) #Take throwaway image to open up cam cam.img_cap(routine, img_dir, 'f') os.remove(unsorted_img) #Run soak.exe subprocess.call(["C:\EDT\pdv\soak.exe"]) t0 = time.time() #Start Timer (time since soak) t = 0 print("SOAK END") while t < end_t: cam.img_cap(routine, img_dir, 'f') t1 = time.time() t = t1 - t0 t_s = round(t, 2) print("Image taken: {}".format(t_s)) file_name = img_name + str(t_s) + '_.fits' os.rename(unsorted_img, file_name) print("PROGRAM COMPLETE")
def master_dark(i, n, T, tag=''): ''' DIT and NDIT are inputs Function can also take tag for sorting individual frames onto local drive T is the FPA temperature used to record temperature of FPA for this dark which is written to file name and FITS header Program also outputs a .npy binary file containing 3D datacube of central (100,100) window for studying temporal variance over stack ''' cam.set_int_time(i) cam.set_frame_time(i + 20) bias = cam.get_master_bias(T) cam.printProgressBar(0, n, prefix='Progress:', suffix='Complete', length=50) stack = np.zeros((naxis1, naxis2), dtype=np.uint16) window = np.zeros((100, 100), dtype=np.uint16) for j in range(n): _, _ = cam.img_cap(routine, img_dir, 'f') hdu_img = fits.open(unsorted_img) data = hdu_img[0].data hdu_img.close() #Close image so it can be sorted data = data - bias stack = np.dstack((stack, data)) data_window = cam.window(data, 100) window = np.dstack((window, data_window)) cam.printProgressBar(j,n, prefix = 'Progress:', \ suffix = 'Complete', length = 50) if j == n - 1: #On final frame grab header dark_header = fits.getheader(unsorted_img) #Save single frame to local drive cam.file_sorting(local_img_dir, i, i + 20, tag=tag) #Median stack stack = stack[:, :, 1:] #Slice off base layer master_dark = np.median(stack, axis=2) #Prepare window for temporal analysis window = window[:, :, 1:] #Slice off base layer temp_var = np.median(np.var(stack, axis=2)) temp_path = master_darks + 'dark_cube' \ + str(i/1000) + '_' +str(T) +'C.npy' np.save(temp_path, window) dark_header.append(('NDIT', n, 'Number of integrations')) dark_header.append(('TYPE', 'MASTER_DARK', 'Median stack of dark frames')) dark_header.append(('FPATEMP', T, 'Temperature of detector')) dark_header.append( ('TEMPVAR', temp_var, 'Median temporal variance of central (100,100) window')) #Output master frame to fits master_path = master_darks + 'master_dark_' \ + str(i/1000) + '_' +str(T) +'C.fits' fits.writeto(master_path, master_dark, dark_header) print('PROGRAM HAS COMPLETED')
import scicam as cam import argparse parser = argparse.ArgumentParser(prog='Write Frame Time', description='Change Camera Frame Time') parser.add_argument('f', type=float, help='Frame time in milliseconds') parser.add_argument('-v', action='store_true', help='Verbose mode') parser.add_argument('-r', action='store_true', help='Enter data as rate') args = parser.parse_args() if args.v: if args.r: cam.set_frame_time(args.f,verbose=True,rate=True) else: cam.set_frame_time(args.f,verbose=True) else: if args.r: cam.set_frame_time(args.f,rate=True) else: cam.set_frame_time(args.f)
#Option to set integration time w/ or w/out verbose if args.i: if args.v: int_t = cam.set_int_time(args.i, verbose=True) else: int_t = cam.set_int_time(args.i) else: if args.v: int_t = cam.read_int_time(verbose=True) else: int_t = cam.read_int_time() #Option to set frame time w/ or w/out verbose if args.t: if args.v: frame_t = cam.set_frame_time(args.t, verbose=True) else: frame_t = cam.set_frame_time(args.t) else: if args.v: frame_t = cam.read_frame_time(verbose=True) else: frame_t = cam.read_frame_time() #Capture image w/ or w/out defined location if args.l: for i in range(args.l): if args.r: capture_resp, _ = capture(args.p, args.r, 'f') cam.file_sorting(args.r, int_t,
import scicam as cam import argparse parser = argparse.ArgumentParser(prog='capture Image', description='Captures image using specified routine') parser.add_argument('-i', type=float, help='Integration Time') parser.add_argument('-g', type=str, help='Naming Tag',default = '') parser.add_argument('-l', type=int, help='Number of integrations (NDIT)') args = parser.parse_args() img_dir = '//merger.anu.edu.au/mbirch/images' if args.i: int_t = cam.set_int_time(args.i) frame_t = cam.set_frame_time((args.i+20)) else: int_t = cam.read_int_time() frame_t = cam.read_frame_time() if args.l: for i in range(args.l): cam.img_cap('capture',img_dir) cam.file_sorting(img_dir,int_t,frame_t,tag=args.g) else: cam.img_cap('capture',img_dir) cam.file_sorting(img_dir,int_t,frame_t,tag=args.g)