def run(path0, silent=False, verbose=True): ''' Main function to run for a given path containing the raw GNIRS data Parameters ---------- path0 : str Path to raw FITS file. Must include '/' at the end silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- Notes ----- Created by Chun Ly, 22 March 2017 Modified by Chun Ly, 23 March 2017 - Call dir_check.main() to handle multiple date directories Modified by Chun Ly, 8 January 2018 - Import glog and call for stdout and ASCII logging - Pass mylogger to get_files() Modified by Chun Ly, 7 May 2018 - Bug fix: Call check_path() to add '/' in path0 ''' rawdir = check_path(path0) # + on 07/05/2018 # + on 08/01/2018 logfile = path0+'symlink.log' mylogger = glog.log0(logfile)._get_logger() if silent == False: mylogger.info('### Begin run : '+systime()) # + on 23/03/2017 dir_list, list_path = dir_check.main(path0, mylogger=mylogger, silent=silent, verbose=verbose) # Mod on 23/03/2017 for path in list_path: files, n_files = get_files(path, mylogger=mylogger, silent=silent, verbose=verbose) for nn in xrange(n_files): c_file = path+'c'+files[nn] if exists(c_file): mylogger.warn('File exists : '+c_file) else: cmd0 = 'ln -fs '+files[nn]+' '+c_file if silent == False: mylogger.info(cmd0) os.system(cmd0) #endfor #endfor if silent == False: mylogger.info('### End run : '+systime())
def main(path0, silent=True, verbose=False): ''' Main function for checking paths and adding '/' Parameters ---------- path0 : str Full path. silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- Notes ----- Created by Chun Ly, 20 September 2017 ''' if silent == False: log.info('### Begin main : '+systime()) if path0[-1] != '/': path0 = path0 + '/' if silent == False: log.info('### End main : '+systime()) return path0
def in_bino_field(tab0, verts0, silent=False, verbose=True): ''' Determine sources in MMT/Binospec field Parameters ---------- tab0 : astropy.table.table.Table Astropy Table of HSC-SSP NB excess emitter catalog. verts0 : list List of matplotlib.patches.Rectangle vertices silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- in_fields0 : list List of numpy arrays Notes ----- Created by Chun Ly, 22 March 2018 ''' if silent == False: log.info('### Begin in_bino_field : ' + systime()) ra = tab0['ra'].data dec = tab0['dec'].data coords = np.array([ra, dec]).transpose() n_ptgs = len(verts0) in_field0 = [] for cc in range(n_ptgs): # Side 1 t_path1 = Path0(verts0[cc][0]) cp_res1 = t_path1.contains_points(coords) in_field1 = np.array( [xx for xx in range(len(tab0)) if cp_res1[xx] == True]) # Side 2 t_path2 = Path0(verts0[cc][1]) cp_res2 = t_path2.contains_points(coords) in_field2 = np.array( [xx for xx in range(len(tab0)) if cp_res2[xx] == True]) print cc, len(in_field1), len(in_field2) in_field = np.append(in_field1, in_field2) in_field.sort() in_field0.append(in_field) #endfor if silent == False: log.info('### End in_bino_field : ' + systime()) return in_field0
def main(path, final_prefix, outfile='', all_lis=[], all_file='', mylogger=None, silent=False, verbose=True): ''' Check if files exist. If not, create a temporary ASCII file to execute IRAF commands for the subset of files that do not exists Parameters ---------- silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- Notes ----- Created by Chun Ly, 26 April 2017 Modified by Chun Ly, 10 January 2018 - Implement glog logging, allow mylogger keyword input ''' # + on 10/01/2018 if type(mylogger) == type(None): mylog, clog = 0, log else: mylog, clog = 1, mylogger if silent == False: clog.info('### Begin main : '+systime()) if len(all_lis) == 0 and all_file == '': clog.warn('Must specify all_lis or all_file') clog.warn('Aborting!!!') return #endif if len(all_lis) == 0: all_lis = np.loadtxt(path+'all.lis', dtype=type(str)) no_files = [file0 for file0 in all_lis if exists(path+final_prefix+file0) == False] if len(no_files) > 0: if outfile != '': if silent == False: clog.info('Writing : '+path+outfile) np.savetxt(path+outfile, no_files, fmt='%s') else: pref_name = [final_prefix+file0 for file0 in no_files] print pref_name if silent == False: clog.info('### End main : '+systime())
def get_files(path0, mylogger=None, silent=False, verbose=True): ''' Simple function to get names of raw files Parameters ---------- path0 : str Path to raw FITS file. Must include '/' at the end silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- files : list List containing the filename for FITS raw files n_files : int Size of [files] Notes ----- Created by Chun Ly, 22 March 2017 Modified by Chun Ly, 5 June 2017 - Fix minor bug: infile -> infile0 Modified by Chun Ly, 8 January 2018 - Import glog and call for stdout and ASCII logging Modified by Chun Ly, 20 April 2018 - Bug fix: mylog -> clog ''' if type(mylogger) == type(None): mylog, clog = 0, log else: mylog, clog = 1, mylogger if silent == False: clog.info('### Begin get_files : '+systime()) infile0 = path0+'all.lis' if not exists(infile0): clog.warn('File does not exists!!! : '+infile0) clog.warn('EXITING!!!') return if silent == False: clog.info('Reading : '+infile0) files = np.loadtxt(infile0, dtype=type(str)).tolist() n_files = len(files) if silent == False: clog.info('### End get_files : '+systime()) return files, n_files
def main(sub_dict0, fld_idx, fld_tab, Inst, tab_outfile, silent=False, verbose=True): ''' Provide explanation for function here. Parameters ---------- silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- Notes ----- Created by Chun Ly, 21 March 2018 - Bug fix: with removing PA column in table ''' if silent == False: log.info('### Begin main : '+systime()) for key in sub_dict0.keys(): s_idx = sub_dict0[key] cmd1 = 'n_fld_%s = np.zeros(len(fld_idx), dtype=np.int)' % key exec(cmd1) for ff,in_field in enumerate(fld_idx): in_idx = list(set(in_field.tolist()) & set(s_idx)) cmd2 = 'n_fld_%s[ff] = len(in_idx)' % key exec(cmd2) #endfor #endfor t_cols = ['n_fld_'+ aa for aa in sub_dict0.keys()] fld_arr0 = [fld_tab['MaskName'].data, fld_tab['RA'].data, fld_tab['Dec'].data, fld_tab['PA'].data] names0 = ['MaskName', 'RA', 'Dec', 'PA'] cmd3 = "fld_arr0 += ["+', '.join(t_cols)+']' exec(cmd3) names0 += [val.replace('NB0','NB') for val in sub_dict0.keys()] inptg_tab0 = Table(fld_arr0, names=names0) if Inst == 'Hecto': del inptg_tab0['PA'] if silent == False: inptg_tab0.pprint(max_lines=-1) if silent == False: log.info('### Writing : '+tab_outfile) inptg_tab0.write(tab_outfile, format='ascii.latex') if silent == False: log.info('### End main : '+systime())
def in_deimos_field(tab0, verts0, silent=False, verbose=True): ''' Determine sources in Keck/DEIMOS field Parameters ---------- tab0 : astropy.table.table.Table Astropy Table of HSC-SSP NB excess emitter catalog. verts0 : list List of matplotlib.patches.Rectangle vertices silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- in_fields0 : list List of numpy arrays Notes ----- Created by Chun Ly, 3 March 2018 Modified by Chun Ly, 4 March 2018 - Bug fix: Incorrect variable name, patches -> verts0 ''' if silent == False: log.info('### Begin in_deimos_field : ' + systime()) ra = tab0['ra'].data dec = tab0['dec'].data coords = np.array([ra, dec]).transpose() n_ptgs = len(verts0) in_field0 = [] for cc in range(n_ptgs): t_path = Path0(verts0[cc]) cp_res = t_path.contains_points(coords) in_field = np.array( [xx for xx in range(len(tab0)) if cp_res[xx] == True]) print cc, len(in_field) in_field0.append(in_field) #endfor if silent == False: log.info('### End in_deimos_field : ' + systime()) return in_field0
def cp_files(outdir, path, final_prefix, input_lis, silent=False, verbose=True): ''' Copy files into outdir for certain IRAF functions Parameters ---------- outdir : str Full path for where files are temporarily stored. Must include '/' at the end path : str Full path to list. Must include '/' at the end final_prefix : str Files with specific prefix to search for ('rnc', etc). This will be added before the filenames in [input_lis] input_lis : str Filename for input list to check ('arc.lis', 'flat.lis', etc.) Full path need to be provided here. silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- Notes ----- Created by Chun Ly, 7 May 2017 ''' if silent == False: log.info('### Begin cp_files : ' + systime()) if silent == False: log.info('### Reading : ' + input_lis) files = np.loadtxt(input_lis, dtype=type(str)) files0 = [path + final_prefix + file0 for file0 in files] cmd0 = 'cp -a ' + ' '.join(files0) + ' ' + outdir if silent == False: log.info('### ' + cmd0) os.system(cmd0) if silent == False: log.info('### End cp_files : ' + systime())
def run_all(files=None, path0=None, user='******', passwd='', silent=False, verbose=True): ''' Run all functions related to MMTCam analysis Parameters ---------- files : list List of files path0 : string Path to files. If not provided it is assumed that [files] has the full path name silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- Notes ----- Created by Chun Ly, 23 February 2017 Modified by Chun Ly, 28 February 2017 - Add user and passwd keyword to pass on to make_postage() ''' if files == None and path0 == None: log.error('files and path0 keywords not provided') log.error('Exiting!!!') return if silent == False: log.info('### Begin run_all: '+systime()) if files == None and path0 != None: files, seqno = get_files(path0) # path0 = None # Reset since files will have full path if silent == False: log.info('The following files will be analyzed from : ') log.info(path0) for file in files: log.info(os.path.basename(file)) find_stars(files=files, path0=path0, plot=True, verbose=False) make_postage(files=files, path0=path0, user=user, passwd=passwd, verbose=False) psf_contours(files=files, path0=path0, verbose=False) if silent == False: log.info('### End run_all: '+systime())
def in_hecto_field(tab0, fld_coord, silent=False, verbose=True): ''' Determine sources in MMT/Hectospec field Parameters ---------- tab0 : astropy.table.table.Table Astropy Table of HSC-SSP NB excess emitter catalog. fld_coord : list List of RA,Dec coordinate set silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- in_fields0 : list List of numpy arrays Notes ----- Created by Chun Ly, 16 March 2018 ''' if silent == False: log.info('### Begin in_hecto_field : '+systime()) ra = tab0['ra'].data dec = tab0['dec'].data n_ptgs = len(fld_coord) in_field0 = [] for cc in range(n_ptgs): ra_diff0 = ra - fld_coord[cc][0] dec_diff0 = dec - fld_coord[cc][1] diff0 = np.sqrt(ra_diff0**2 + dec_diff0**2) in_field = np.array([xx for xx in range(len(tab0)) if diff0[xx] <= 0.50]) print cc, len(in_field) in_field0.append(in_field) #endfor if silent == False: log.info('### End in_hecto_field : '+systime()) return in_field0
def delete(path0, silent=False, verbose=True): ''' Remove all symbolic links in given path containing the raw GNIRS data Parameters ---------- path0 : str Path to raw FITS file. Must include '/' at the end silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- Notes ----- Created by Chun Ly, 22 March 2017 Modified by Chun Ly, 8 January 2018 - Import glog and call for stdout and ASCII logging - Pass mylogger to get_files() Modified by Chun Ly, 22 January 2018 - Pass rm command strings to mylogger ''' # + on 08/01/2018 logfile = path0+'symlink.log' mylogger = glog.log0(logfile)._get_logger() if silent == False: mylogger.info('### Begin delete : '+systime()) files, n_files = get_files(path0, mylogger=mylogger, silent=silent, verbose=verbose) for nn in xrange(n_files): c_file = path0+'c'+files[nn] if exists(c_file): if os.path.islink(c_file) == True: cmd0 = 'rm '+c_file mylogger.info(cmd0) os.system(cmd0) else: mylogger.info('File is from cleanir: '+c_file) if silent == False: mylogger.info('### End delete : '+systime())
def main(field='', dr='pdr1', silent=False, verbose=True): ''' Main function to read in HSC SSP catalogs of NB excess emitters Parameters ---------- field : str Name of field to read in. Either 'udeep' or 'deep' dr : str Data release name. Default: 'pdr1' silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- Notes ----- Created by Chun Ly, 27 February 2018 Modified by Chun Ly, 2 March 2018 - Call paths module to get path ''' if silent == False: log.info('### Begin main : ' + systime()) if field == '': log.warn("### [field] input not specified!!!") log.warn("### Either 'udeep' or 'deep'") log.warn('### Exiting!!!') return dir0 = paths.gdrive() # Mod on 02/03/2018 infile = dir0 + 'catalogs/%s_%s_nb_forced_coord_phot.csv.gz' % (dr, field) if silent == False: log.info('### Reading : ' + infile) tab0 = asc.read(infile, format='csv') if silent == False: log.info('### End main : ' + systime()) return tab0
def main(silent=False, verbose=True): ''' Provide explanation for function here. Parameters ---------- silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- Notes ----- Created by Chun Ly, 13 July 2018 ''' if silent == False: log.info('### Begin main : ' + systime()) path0 = '/Users/cly/Google Drive/NASA_Summer2015/Filters/' filters = ['NB704', 'NB711', 'NB816', 'NB921', 'NB973'] files = [path0 + filt + 'response.dat' for filt in filters] filt_corr = np.zeros(len(files)) for ff in range(len(files)): log.info('Reading : ' + files[ff]) tab1 = asc.read(files[ff], format='no_header') y_val = tab1['col2'] y_val /= max(y_val) good = np.where(y_val >= 0.05)[0] # Compute statistical correction by Sum [filter_amp * filter_amp] / Sum [filter_amp] weight_sum = np.sum(y_val[good]**2) / np.sum(y_val[good]) filt_corr[ff] = 1 / weight_sum filt_tab = Table([filters, filt_corr], names=('Filter', 'Filt_Stat_Corr')) filt_tab.pprint() if silent == False: log.info('### End main : ' + systime())
def get_k_values(wave, law='CCM89', silent=True, verbose=False): ''' Function to get k(lambda): A(lambda) = k(lambda) * E(B-V) Parameters ---------- wave : float or array like Wavelength in units of Angstroms law : string String for dust attenuation "law". Default: "CCM89". Full list available from RC.getLaws() Options are: 'G03 LMC', 'K76', 'F99-like', 'F88 F99 LMC', 'No correction', 'SM79 Gal', 'MCC99 FM90 LMC', 'CCM89 Bal07', 'CCM89 oD94', 'S79 H83 CCM89', 'F99', 'CCM89' - See pn.RedCorr.printLaws() for more details silent : boolean Turns off stdout messages. Default: True verbose : boolean Turns on additional stdout messages. Default: False Returns ------- k value at wave Notes ----- Created by Chun Ly, 22 November 2016 ''' if silent == False: print '### Begin balmer_decrement.get_k_values() | '+systime() RC = pn.RedCorr(E_BV = 1.0) RC.law = law if silent == False: print '### End balmer_decrement.get_k_values() | '+systime() return np.log10(RC.getCorr(wave)) / 0.4
def rm_files(final_prefix, input_lis, silent=False, verbose=True): ''' Delete files in the current directory Parameters ---------- final_prefix : str Files with specific prefix to search for ('rnc', etc). This will be added before the filenames in [input_lis] input_lis : str Filename for input list to check ('arc.lis', 'flat.lis', etc.) silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- Notes ----- Created by Chun Ly, 7 May 2017 Modified by Chun Ly, 15 May 2017 - Got rid of [path] array ''' if silent == False: log.info('### Begin rm_files : ' + systime()) if silent == False: log.info('### Reading : ' + input_lis) files = np.loadtxt(input_lis, dtype=type(str)) files0 = [final_prefix + file0 for file0 in files] cmd0 = 'rm ' + ' '.join(files0) if silent == False: log.info('### ' + cmd0) os.system(cmd0) if silent == False: log.info('### End rm_files : ' + systime())
def main(path0, out_pdf='', silent=False, verbose=True, overwrite=False): ''' Main function to generate PDF illustrating alignment on target Parameters ---------- path0 : str Path to FITS file. Must include '/' at the end silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True overwrite : boolean Overwrite files if they exists. Default: False Returns ------- Notes ----- Created by Chun Ly, 24 March 2017 Modified by Chun Ly, 01 April 2017 - Handle CRs and bad pixels using cosmicrays_lacosmic Modified by Chun Ly, 04 April 2017 - Use find_gnirs_window_mean to find center Modified by Chun Ly, 04-05 April 2017 - Adjust greyscale limits to handle slit image (make it black), and faint sources Use find_gnirs_window_mean to find center Modified by Chun Ly, 05 April 2017 - Handle alignment sequences with more than just 4 frames - Handle excess subplots for individual PDF pages (remove axes) - Compute seeing FWHM for acquisition images Modified by Chun Ly, 06 April 2017 - Get coordinates for slit in cutout Modified by Chun Ly, 11 May 2017 - Use slit image to find center when telluric data is only available Modified by Chun Ly, 3 July 2017 - Add overwrite option to prevent overwriting file Modified by Chun Ly, 9 January 2018 - Import glog and call for stdout and ASCII logging - Pass mylogger to find_gnirs_window_mean(), find_gnirs_window() Modified by Chun Ly, 20 April 2018 - Pass mylogger to gauss2d_fit() - Switch print statements to mylogger calls Modified by Chun Ly, 22 April 2018 - Bug fix: mylogger calls mistakes - Bug fix: mylogger calls mistakes (cont'd) ''' # + on 09/01/2018 logfile = path0 + 'align_check.log' mylogger = glog.log0(logfile)._get_logger() if silent == False: mylogger.info('### Begin main : ' + systime()) dir_list, list_path = dir_check.main(path0, mylogger=mylogger, silent=silent, verbose=verbose) out_pdf_default = out_pdf for path in list_path: infile = path + 'hdr_info.QA.tbl' if not exists(infile): mylogger.warning('File does not exist : ' + infile) mylogger.warning('Exiting!!! ' + systime()) return out_pdf = path + 'align_check.pdf' if out_pdf == '' else path + out_pdf # Mod on 03/07/2017 if overwrite == False and exists(out_pdf): mylogger.warn('File exists!! Will not overwrite ' + out_pdf) else: pp = PdfPages(out_pdf) if silent == False: mylogger.info('Reading: ' + infile) tab0 = asc.read(infile, format='fixed_width_two_line') align = [ii for ii in xrange(len(tab0)) if tab0['QA'][ii] == 'N/A'] if silent == False: mylogger.info('Number of alignment images found : ' + str(len(align))) ID = tab0['object'][align] ID0 = list(set(ID)) #Unique ID's if silent == False: mylogger.info('Sources found : ' + ', '.join(ID0)) # + on 04/04/2017 win_ref_idx = [ tt for tt in xrange(len(tab0)) if (tab0['QA'][tt] == 'N/A') and ('Acq' in tab0['slit'][tt]) and ('HIP' not in tab0['object'][tt]) and ( 'HD' not in tab0['object'][tt]) ] # Mod on 11/05/2017 if len(win_ref_idx) > 0: win_ref_file = path + tab0['filename'][win_ref_idx[0]] mylogger.info('Reference image for finding GNIRS window : ' + win_ref_file) x_min, x_max, y_min, y_max, x_cen, \ y_cen = find_gnirs_window_mean(win_ref_file, mylogger=mylogger) else: mylogger.info('Using telluric image as reference') win_ref_file = path + tab0['filename'][0] slit_x0, slit_y0_lo, slit_y0_hi = get_slit_trace(win_ref_file) x_min, x_max = min(slit_x0), max(slit_x0) x_cen = (x_min + x_max) / 2.0 y_cen = (np.median(slit_y0_lo) + np.median(slit_y0_hi)) / 2.0 y_min, y_max = y_cen - size2d[0].value / 2.0, y_cen + size2d[ 0].value / 2.0 pos_cen = (x_cen, y_cen) new_size = u.Quantity((y_max - y_min, x_max - x_min), u.pixel) # + on 20/04/2018, Mod on 22/04/2018 mylogger.info('pos_cen : (%f, %f) ' % (pos_cen[0], pos_cen[1])) mylogger.info('new_size : [%f, %f] pix ' % (new_size[0].value, new_size[1].value)) for ii in xrange(len(ID0)): t_idx = [ tt for tt in xrange(len(tab0)) if (tab0['object'][tt] == ID0[ii] and tab0['QA'][tt] == 'N/A') ] t_files = [path + a for a in tab0['filename'][t_idx]] ncols = 2.0 nrows = 2 # np.ceil(len(t_idx)/ncols) ncols, nrows = np.int(ncols), np.int(nrows) # Mod on 05/04/2017 if len(t_idx) <= nrows * ncols: fig, ax_arr = plt.subplots(nrows=nrows, ncols=ncols) #med0, x_min, x_max, y_min, \ # y_max, x_cen, y_cen = find_gnirs_window(t_files[1], mylogger=mylogger) # Later + on 24/03/2017 | Mod on 04/04/2017 xcen, ycen = find_star(t_files[-1], pos=pos_cen, find_size2d=new_size) # Fix to get relative coordinate for Cutout2D image #xcen -= pos_cen[0]-new_size[1].value/2.0 #ycen -= pos_cen[1]-new_size[0].value/2.0 slit_x0, slit_y0_lo, slit_y0_hi = get_slit_trace( t_files[0]) #, x_min, x_max) # Adjust values for offset that is applied # Bug: Mod on 04/04/2017 to get proper coordinate slit_x0 -= np.int64(pos_cen[0] - size2d[1].value / 2.0) slit_y0_lo -= pos_cen[1] - size2d[0].value / 2.0 slit_y0_hi -= pos_cen[1] - size2d[0].value / 2.0 for jj in xrange(len(t_idx)): jj_idx = t_idx[jj] # + on 05/04/2017 if len(t_idx) > (nrows * ncols): if jj % (nrows * ncols) == 0: fig, ax_arr = plt.subplots(nrows=nrows, ncols=ncols) im0 = fits.getdata(t_files[jj]) hdr0 = fits.getheader(t_files[jj], ext=0) # Get WCS header # + 01/04/2017 im0_clean = cosmicray_lacosmic(im0, sigclip=10)[0] cutout = Cutout2D(im0_clean, pos_cen, size2d, mode='partial', fill_value=np.nan) t_col, t_row = jj % ncols, (jj / ncols) % nrows # Mod on 04/04/2017 to handle bright and faint stars max0 = np.max(cutout.data) # Compute median within GNIRS window # + on 04-05/04/2017 temp = im0_clean[-50:-1, :] bgd0, sig0 = np.median(temp), np.std(temp) idx_y, idx_x = np.where(im0_clean > (bgd0 + 5 * sig0)) med0 = np.median(im0_clean[idx_y, idx_x]) mylogger.info('## max0 : %f med0 : %f ' % (max0, med0)) if max0 > 50000: z1, z2 = zscale.get_limits(cutout.data) z2 = max0 # Change for better stretch for telluric star else: if ('Acq_' not in tab0['slit'][jj_idx]) and \ (tab0['exptime'][jj_idx] == 3): # First frame that will show the longslit z1, z2 = 0.0, 0.5 * max0 else: # This should handle faint and bright stars z1, z2 = 0.5 * med0, max0 norm = ImageNormalize(vmin=z1, vmax=z2) t_ax = ax_arr[t_row, t_col] t_ax.imshow(cutout.data, cmap='Greys', origin='lower', norm=norm) #aplpy.FITSFigure(cutout) # Draw trace of slit t_ax.plot(slit_x0, slit_y0_lo, 'r-') t_ax.plot(slit_x0, slit_y0_hi, 'r-') t_ax.xaxis.set_ticklabels([]) t_ax.yaxis.set_ticklabels([]) fig.suptitle(path, fontsize=14) txt0 = tab0['filename'][jj_idx] + '\n' txt0 += tab0['datelabel'][jj_idx] + '\n' txt0 += tab0['UT_date'][jj_idx] + '\n' txt0 += tab0['object'][jj_idx] t_ax.annotate(txt0, [0.025, 0.95], xycoords='axes fraction', ha='left', va='top') # Plot inset | Later + on 24/03/2017 axins = zoomed_inset_axes(t_ax, 6, loc=4) norm2 = ImageNormalize(vmin=z1, vmax=z2) axins.imshow(cutout.data, cmap='Greys', origin='lower', norm=norm2) # Draw trace of slit axins.plot(slit_x0, slit_y0_lo, 'r-') axins.plot(slit_x0, slit_y0_hi, 'r-') # Mod on 04/04/2017 to get Cutout2d coordinates c_xcen = xcen - (pos_cen[0] - size2d[1].value / 2.0) c_ycen = ycen - (pos_cen[1] - size2d[0].value / 2.0) x1, x2, y1, y2 = c_xcen - 20, c_xcen + 20, c_ycen - 20, c_ycen + 20 axins.set_xlim([x1, x2]) axins.set_ylim([y1, y2]) axins.xaxis.set_ticklabels([]) axins.yaxis.set_ticklabels([]) mark_inset(t_ax, axins, loc1=1, loc2=3, fc="none", ec="b", ls='dotted', lw=0.5) # Compute FWHM of alignment star | + on 05/04/2017 if ('Acq_' not in tab0['slit'][jj_idx]) and \ (tab0['exptime'][jj_idx] == 3): mylogger.info('No source in slit : ' + tab0['filename'][jj_idx]) else: # + on 06/04/2017 c_size2d = u.Quantity((40, 40), u.pixel) c_slit_x0 = slit_x0 - (c_xcen - c_size2d[1].value / 2.0) c_slit_y0_lo = slit_y0_lo - (c_ycen - c_size2d[0].value / 2.0) c_slit_y0_hi = slit_y0_hi - (c_ycen - c_size2d[0].value / 2.0) im0_crop = Cutout2D(cutout.data, (c_xcen, c_ycen), c_size2d, mode='partial', fill_value=np.nan) gauss2d_fit(im0_crop.data, hdr0, t_ax, c_slit_x0, c_slit_y0_lo, c_slit_y0_hi, mylogger=mylogger) # Mod on 06/04/2017 # Write each page separately | + on 05/04/2017 if len(t_idx) > (nrows * ncols): # Mod later on 05/04/2017 to handle excess subplots if jj == len(t_idx) - 1: rem0 = len(t_idx) % (nrows * ncols) # remainder if rem0 != 0: for rr in range(rem0, nrows * ncols, 1): t_col, t_row = rr % ncols, (rr / ncols) % nrows ax_arr[t_row, t_col].axis('off') if (jj % (nrows * ncols) == nrows*ncols-1) or \ (jj == len(t_idx)-1): subplots_adjust(left=0.02, bottom=0.02, top=0.95, right=0.98, wspace=0.02, hspace=0.02) fig.set_size_inches(11, 8) fig.savefig(pp, format='pdf') #endfor # Mod on 05/04/2017 if len(t_idx) <= nrows * ncols: # Mod later on 05/04/2017 to handle excess subplots for rr in range(len(t_idx), nrows * ncols): t_col, t_row = rr % ncols, (rr / ncols) % nrows ax_arr[t_row, t_col].axis('off') subplots_adjust(left=0.02, bottom=0.02, top=0.95, right=0.98, wspace=0.02, hspace=0.02) fig.set_size_inches(11, 8) fig.savefig(pp, format='pdf') #endfor pp.close() #endelse out_pdf = out_pdf_default if silent == False: mylogger.info('### End main : ' + systime())
def main(path0='', out_pdf='', check_quality=True, skysub=False, silent=False, verbose=True, overwrite=False): ''' main() function to compute natural seeing (image quality) from bright alignment star Parameters ---------- path0 : str Full path to where output PDF and FITS file are located. Must end with a '/' out_pdf : str Filename for output PDF. Do NOT include full path check_quality : boolean Check whether data meets IQ requirements. Default: True silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True overwrite : boolean Overwrite files if they exists. Default: False Returns ------- multi-page PDF plot Notes ----- Created by Chun Ly, 10 March 2017 - Later modified to include check_quality keyword option - Later modified to include inset that shows the stacked line profile Modified by Chun Ly, 11 April 2017 - Call dir_check.main() to handle multiple date directories Modified by Chun Ly, 13 April 2017 - Minor bug: Check if file exists first Modified by Chun Ly, 10 May 2017 - Minor bug: When .lis file contains only one entry, problem for appending to list Modified by Chun Ly, 1 June 2017 - Added overwrite keyword option to overwrite file. Default is not to overwrite .pdf files - Bug found: No longer need sky.lis since obj.lis includes all Modified by Chun Ly, 6 June 2017 - Add skysub keyword option to operate on sky-subtracted images Modified by Chun Ly, 14 July 2017 - Fix tick mark locations - Fix y limit range for extreme outliers Modified by Chun Ly, 16 November 2017 - Change prefix: rnc to rbnc Modified by Chun Ly, 18 December 2017 - Import glog and call for stdout and ASCII logging - Pass mylogger to compute_fwhm() Modified by Chun Ly, 11 January 2018 - Pass mylogger to dir_check.main() Modified by Chun Ly, 18 April 2018 - Compute and report seeing at zenith - Show FWHM @ Zenith on right y-axis Modified by Chun Ly, 19 April 2018 - Include airmass info in plots Modified by Chun Ly, 14 May 2018 - Write ASCII table containing FWHM determination Modified by Chun Ly, 18 May 2018 - Adjust subplots_adjust to avoid labels being cut off - Handle case when extra plot window (odd numbers) is available - Handle case with extra plot window (cont'd) Modified by Chun Ly, 19 May 2018 - Include QA (PASS/USABLE/FAIL) info in table Modified by Chun Ly, 28 June 2018 - Bug troubleshooting with ValueError - Handle ValueError for avg FWHM ''' # + on 18/12/2017 logfile = path0 + 'IQ_plot.log' mylogger = glog.log0(logfile)._get_logger() if silent == False: mylogger.info('### Begin main : ' + systime()) # + on 11/04/2017 dir_list, list_path = dir_check.main(path0, mylogger=mylogger, silent=silent, verbose=verbose) out_pdf_default = out_pdf # Mod on 11/04/2017 for path in list_path: files = [] file_lis = ['obj.lis', 'telluric.lis'] # Minor bug fix on 01/06/2017 for file0 in file_lis: # Mod on 13/04/2017 if exists(path + file0): if silent == False: mylogger.info('Reading : ' + path + file0) t_files = np.loadtxt(path + file0, dtype=type(str)).tolist() # Bug fix - 10/05/2017 if type(t_files) == str: t_files = [t_files] files += t_files else: if silent == False: mylogger.info('File not found : ' + path + file0) files.sort() n_files = len(files) # + on 14/05/2018 FWHM_avg_arr = np.zeros(n_files) # FWHM from averaging stack0_shift FWHM_avg_arr_Z = np.zeros( n_files) # FWHM from averaging stack0_shift at Zenith FWHM_avg_QA = [ '' ] * n_files # QA check from averaging stack0_shift at Zenith FWHM_med_arr = np.zeros(n_files) # FWHM from median along Y FWHM_med_arr_Z = np.zeros( n_files) # FWHM from median along Y at Zenith FWHM_med_QA = [''] * n_files # QA check from median along Y at Zenith # Mod on 06/06/2017 if skysub == True: files = ['brnc' + file0 for file0 in files] # Mod on 16/11/2017 out_pdf = path+'IQ_plot.skysub.pdf' if out_pdf == '' else \ path+out_pdf else: out_pdf = path+'IQ_plot.raw.pdf' if out_pdf == '' else \ path+out_pdf if overwrite == False and exists(out_pdf): mylogger.warn('File exists!! Will not overwrite ' + out_pdf) else: pp = PdfPages(out_pdf) for nn in xrange(n_files): if silent == False: mylogger.info('Reading : ' + files[nn]) hdr0 = fits.getheader(path + files[nn]) # Mod on 06/06/2017 if skysub == False: im0 = fits.getdata(path + files[nn]) else: im0 = fits.getdata(path + files[nn], 'sci') airmass = hdr0['AIRMASS'] # + on 18/04/2018 # Mod on 18/12/2017 bins, fwhm0, stack0_shift = compute_fwhm(im0, mylogger=mylogger) row = nn % 2 if row == 0: fig, ax0 = plt.subplots(2, 1) good = np.where(fwhm0 > 0)[0] ax0[row].plot(bins[good], fwhm0[good], marker='o', alpha=0.5, mec='none', mfc='b', linestyle='none', zorder=2) ax0[row].get_yaxis().set_tick_params(which='both', right=True, width=1, direction='in') ax0[row].get_xaxis().set_tick_params(which='both', top=True, width=1, direction='in') # Compute average line profile from stack0_shift # Later + on 10/03/2017 avg_stack = np.average(stack0_shift, axis=0) x0_avg = np.arange(-25, 25) if row == 0: axi = fig.add_axes([0.60, 0.81, 0.25, 0.15]) if row == 1: axi = fig.add_axes([0.60, 0.36, 0.25, 0.15]) axi.plot(x0_avg * pscale, avg_stack, 'k-') axi.set_ylim([-0.4, 1.1]) axi.set_xlim([-2.0, 2.0]) axi.set_xticks(range(-2, 2, 1)) axi.set_xlabel('X [arcsec]', fontsize=8) axi.tick_params(labelsize=8) axi.minorticks_on() p0 = [0.0, 1.0, 0.0, 2.0] try: popt, pcov = curve_fit(gauss1d, x0_avg, avg_stack, p0=p0) fit_good = 1 except ValueError: print len(np.where(np.isnan(x0_avg))[0]) print len(np.where(np.isnan(avg_stack))[0]) fit_good = 0 if fit_good: avg_fwhm0 = popt[3] * 2 * np.sqrt(2 * np.log(2)) * pscale avg_fwhm_Z = avg_fwhm0 / airmass**0.6 # + on 18/04/2018 axi.plot(x0_avg * pscale, gauss1d(x0_avg, *popt), 'r--') axi.annotate('FWHM = %.3f" (%.3f")' % (avg_fwhm0, avg_fwhm_Z), [0.50, 0.025], xycoords='axes fraction', ha='center', va='bottom', fontsize=8) ax0[row].axhline(y=avg_fwhm0, linewidth=2, color='r', linestyle='--', zorder=1) # Median FWHM | Later + on 10/03/2017 med_fwhm0 = np.median(fwhm0[good]) med_fwhm0_Z = med_fwhm0 / airmass**0.6 # + on 18/04/2018 ax0[row].axhline(y=med_fwhm0, linewidth=2, color='g', linestyle='--', zorder=1) # Axes labeling ax0[row].set_ylabel('FWHM [arcsec]') if row == 1 or nn == n_files - 1: ax0[row].set_xlabel('Y [pixel]') else: ax0[row].set_xticklabels([]) if nn == n_files - 1: if row == 0: ax0[row + 1].axis('off') # Annotation txt0 = files[nn] + '\nTarget: ' + hdr0[ 'OBJECT'] #.split(' ')[0] ax0[row].annotate(txt0, [0.025, 0.95], xycoords='axes fraction', ha='left', va='top') # + on 19/04/2018 ax0[row].annotate('AM = %.3f' % airmass, [0.025, 0.025], xycoords='axes fraction', ha='left', va='bottom') # Later + on 10/03/2017 if check_quality: req = hdr0['REQIQ'].replace('-percentile', '%') raw = hdr0['RAWIQ'].replace('-percentile', '%') i_raw = [ ii for ii in range(len(FWHM_IQ_C)) if raw in FWHM_IQ_C[ii] ][0] i_req = [ ii for ii in range(len(FWHM_IQ_C)) if raw in FWHM_IQ_C[ii] ][0] txt0 = 'Req. IQ: %s [%.2f"]\n' % (req, FWHM_IQ_J[i_req]) txt0 += 'Raw IQ: %s [%.2f"]\n' % (raw, FWHM_IQ_J[i_raw]) # Mod on 18/04/2018 if med_fwhm0_Z <= FWHM_IQ_J[i_raw]: txt0 += 'PASS' FWHM_med_QA[nn] = 'PASS' # + on 19/05/2018 else: if med_fwhm0_Z <= FWHM_IQ_J[i_raw] * 1.25: txt0 += 'USABLE' FWHM_med_QA[nn] = 'USABLE' # + on 19/05/2018 if med_fwhm0_Z > FWHM_IQ_J[i_raw] * 1.25: txt0 += 'FAIL' FWHM_med_QA[nn] = 'FAIL' # + on 19/05/2018 # + on 19/05/2018 if fit_good: if avg_fwhm_Z <= FWHM_IQ_J[i_raw]: FWHM_avg_QA[nn] = 'PASS' else: if avg_fwhm_Z <= FWHM_IQ_J[i_raw] * 1.25: FWHM_avg_QA[nn] = 'USABLE' if avg_fwhm_Z > FWHM_IQ_J[i_raw] * 1.25: FWHM_avg_QA[nn] = 'FAIL' ax0[row].annotate(txt0, [0.975, 0.05], ha='right', xycoords='axes fraction', va='bottom') # Aesthetics ax0[row].set_xlim([0, 1050]) if max(fwhm0[good]) > 3: ax0[row].set_ylim([0, 3.0]) else: ax0[row].set_ylim([min(fwhm0) - 0.025, max(fwhm0) + 0.075]) ax0[row].minorticks_on() # + on 18/04/2018 ax2 = ax0[row].twinx() ax2.set_ylabel(r"FWHM @ Zenith [arcsec]") ax2.set_ylim(np.array(ax0[row].get_ylim()) / airmass**0.6) ax2.minorticks_on() if row != 1: ax2.set_xticklabels([]) if row == 1 or nn == n_files - 1: subplots_adjust(left=0.11, bottom=0.10, top=0.975, right=0.875, wspace=0.03, hspace=0.05) fig.savefig(pp, format='pdf') # + on 14/05/2018 if fit_good: FWHM_avg_arr[nn] = avg_fwhm0 FWHM_avg_arr_Z[nn] = avg_fwhm_Z FWHM_med_arr[nn] = med_fwhm0 FWHM_med_arr_Z[nn] = med_fwhm0_Z #endfor if silent == False: mylogger.info('Writing : ' + out_pdf) pp.close() # + on 14/05/2018 fwhm_out_file = out_pdf.replace('.pdf', '.tbl') arr0 = [ files, FWHM_avg_arr, FWHM_avg_arr_Z, FWHM_med_arr, FWHM_med_arr_Z, FWHM_avg_QA, FWHM_med_QA ] # Mod on 19/05/2018 names0 = ('files', 'FWHM_avg', 'FWHM_avg_Z', 'FWHM_med', 'FWHM_med_Z', 'FWHM_avg_QA', 'FWHM_med_QA' ) # Mod on 19/05/2018 tab0 = Table(arr0, names=names0) if silent == False: mylogger.info('Writing : ' + fwhm_out_file) asc.write(tab0, fwhm_out_file, format='fixed_width_two_line', overwrite=True) out_pdf = out_pdf_default #endelse #endfor if silent == False: mylogger.info('### End main : ' + systime())
def main(silent=False, verbose=True): ''' Main function to plot trends of peak flows Parameters ---------- silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- Notes ----- Created by Chun Ly, 2 August 2018 ''' if silent == False: log.info('### Begin main : '+systime()) main_file = '/Users/cly/Dropbox/Documents/Personals/Peak_Flows.xls' data = read_excel(main_file) dates = data['Date'].values time = data['Time'].values comm0 = data['Notes'].values v1 = data['PF #1'].values v2 = data['PF #2'].values v3 = data['PF #3'].values val = [v1, v2, v3] avg_pf = np.average(val, axis=0) high_el = np.array([xx for xx in range(len(data)) if '8500' in str(comm0[xx])]) tucson = np.array([xx for xx in range(len(data)) if 'Tucson' in str(comm0[xx])]) travel = np.array([xx for xx in range(len(data)) if 'Travel' in str(comm0[xx])]) sick = np.array([xx for xx in range(len(data)) if 'sick' in str(comm0[xx]) or 'Sick' in str(comm0[xx])]) # data.Date = to_datetime(data['Date'], format='%Y-%m-%d %H:%M:%S.%f') # data.set_index(['Date'],inplace=True) fig, ax = plt.subplots(nrows=2) ax[0].scatter(dates[high_el], avg_pf[high_el], edgecolor='none', facecolor='red', alpha=0.5, label='Mt Hopkins (8500 ft)') ax[0].scatter(dates[tucson], avg_pf[tucson], edgecolor='none', facecolor='blue', alpha=0.5, label='Tucson (2500 ft)') ax[0].scatter(dates[travel], avg_pf[travel], edgecolor='none', facecolor='green', alpha=0.5, label='Travel') ax[0].scatter(dates[sick], avg_pf[sick], 30, marker='x', color='red', label='Sick') #data['Date'], data['PF #1']) #data.plot('Date', 'PF #1') #, xlim=[2016,2019]) ax[0].legend(loc='lower right', fontsize=10) ax[1].scatter(time, avg_pf) if silent == False: log.info('### End main : '+systime())
def main(field='', dr='pdr1', noOII=False, DEIMOS=False, Hecto=False, Bino=False, silent=False, verbose=True): ''' Main function to plot RA and Dec for each sub-sample of NB excess emitters Parameters ---------- field : str Name of field to read in. Either 'udeep' or 'deep' dr : str Data release name. Default: 'pdr1' noOII : boolean Indicate whether to NOT plot [OII] emitters. Default: False DEIMOS : boolean Indicate whether to plot Keck/DEIMOS FoV on PDF plots Hecto : boolean Indicate whether to plot MMT/Hecto FoV on PDF plots Bino : boolean Indicate whether to plot MMT/Bino FoV on PDF plots silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- Notes ----- Created by Chun Ly, 28 February 2018 - Import subsample module to get NB excess emitter subsamples - Call subsample to get galaxy field, [gal_field] - Construct for loop to look over each galaxy field and subsample - Define output PDF path - Plot ra and dec for each galaxy field and NB subsamples Modified by Chun Ly, 1 March 2018 - Add noOII keyword to prevent [OII] emitters from being plotted - log.info output PDF file - Add DEIMOS boolean keyword input and plot DEIMOS FoV when set - Set axes limit for RA and Dec for each galaxy field - ax.legend() visibility improvement, RA/Dec limit changes - Bug fix: Placement of n_subsample for ax.legend() ncol determination - Add Hecto boolean keyword input and plot Hecto FoV when set - Update call to subsample.main(); simplify code Modified by Chun Ly, 2 March 2018 - Call paths module to get path Modified by Chun Ly, 3 March 2018 - Read in field coordinate file for specific FoV overlay - Generate DEIMOS coordinate list, pass to plot_deimos_fov() - Bug fix: Call plot_deimos_fov() outside of for loop - List comprehension: require DEIMOS pointing inside field - If statement before calling plot_deimos_fov() - Get vertices from plot_deimos_fov() Modified by Chun Ly, 4 March 2018 - Call in_deimos_field() - Create table of targets in DEIMOS field, deimos_tab0 Modified by Chun Ly, 5 March 2018 - Write table of targets in DEIMOS field to LaTeX files - Include RA, Dec, PA in DEIMOS target field table - Simplify fld_arr0 cmd for exec - Define and pass maskno into plot_deimos_fov() Modified by Chun Ly, 7 March 2018 - Get average RA and Dec for fields Modified by Chun Ly, 16 March 2018 - Overlay Hecto pointings from input coordinate list - Hecto sub-code tested. Bug fix: f_idx -> h_idx Modified by Chun Ly, 19 March 2018 - Read in PRIMUS catalog; Call overlay_primus() Modified by Chun Ly, 20 March 2018 - Import and call ds9_mask_overlay() to overlay ds9 regions Modified by Chun Ly, 21 March 2018 - Add Bino boolean keyword and plot Binospec FoV when set - Change out_pdf suffix for Bino and Hecto case - List comprehensions for mask/config centers - Call subsample_in_pointing for DEIMOS fields - Call subsample_in_pointing for Hecto fields Modified by Chun Ly, 22 March 2018 - Call in_bino_field() - Call subsample_in_pointing for Bino fields ''' if silent == False: log.info('### Begin main : ' + systime()) if field == '': log.warn("### [field] input not specified!!!") log.warn("### Either 'udeep' or 'deep'") log.warn('### Exiting!!!') return dir0 = paths.gdrive() # Mod on 02/03/2018 # + on 03/03/2018 if DEIMOS or Hecto or Bino: field_coord_file = dir0 + 'field_coordinates.txt' if silent == False: log.info('### Writing : ' + field_coord_file) ptg_tab = asc.read(field_coord_file) # + on 19/03/2018 PRIMUS_cat_file = dir0 + 'catalogs/primus/PRIMUS_mask_centers.txt' PRIMUS_tab0 = asc.read(PRIMUS_cat_file) tab0 = read_catalog.main(field=field, dr=dr, silent=silent, verbose=verbose) ra0, dec0 = tab0['ra'], tab0['dec'] # Mod on 01/03/2018 sub_dict0, gal_dict0 = subsample.main(tab0=tab0, field=field, dr=dr) gal_field0 = gal_dict0.keys() # Unique galaxy field list RA_lim_dict0 = { 'UD-COSMOS': [149.25, 151.00], 'UD-SXDS': [33.70, 35.45], 'D-COSMOS': [148.75, 151.80], 'D-DEEP2_3': [350.20, 353.95], 'D-ELAIS_N1': [240.10, 245.40] } DE_lim_dict0 = { 'UD-COSMOS': [1.30, 3.00], 'UD-SXDS': [-5.80, -4.10], 'D-COSMOS': [2.00, 3.75], 'D-DEEP2_3': [-1.90, 1.10], 'D-ELAIS_N1': [52.90, 56.85] } for t_field in gal_field0: f_idx = gal_dict0[t_field] # Mod on 01/03/2018 # + on 07/03/2018 RA_avg = np.average(ra0[f_idx]) DE_avg = np.average(dec0[f_idx]) print 'RA/DE avg : ', t_field, RA_avg, DE_avg fig, ax = plt.subplots() n_subsample = 0 for key in sub_dict0.keys(): # Mod on 01/03/2018 s_idx = sub_dict0[key] # Get the intersection of f_idx and s_idx t_idx = list(set(f_idx) & set(s_idx)) print t_field, key, len(t_idx) if len(t_idx) > 0: m0 = 'o' if 'NB0921' in key else '+' if 'NB0816' in key else '' c0 = 'red' if 'Ha' in key else 'green' if 'OIII' in key \ else 'blue' t_name = key.replace('NB0', 'NB') t_name = t_name.replace('OIII_', '[OIII] ') t_name = t_name.replace('OII_', '[OII] ') t_name = t_name.replace('Ha_', r'H$\alpha$ ') t_name += ' (' + str(len(t_idx)) + ')' # Mod on 01/03/2018 if (noOII and 'OII_' in key): if silent == False: log.warn('## Will not plot ' + key) else: n_subsample += 1 ax.scatter(ra0[t_idx], dec0[t_idx], s=5, marker=m0, color=c0, linewidth=0.5, edgecolor='none', alpha=0.5, label=t_name) #endif #endfor ax.set_xlabel('Right Ascension (deg)') ax.set_ylabel('Declination (deg)') ax.set_xlim([RA_lim_dict0[t_field][1], RA_lim_dict0[t_field][0]]) ax.set_ylim(DE_lim_dict0[t_field]) ax.annotate(t_field, [0.025, 0.975], xycoords='axes fraction', fontsize=12, fontweight='bold', ha='left', va='top') ax.minorticks_on() # Mod on 01/03/2018 ncol = 3 if n_subsample % 3 == 0 else 2 ax.legend(loc='lower center', ncol=ncol, frameon=False, fontsize=10, framealpha=0.9) # Overlay PRIMUS pointings | + on 19/03/2018 ax = overlay_primus(PRIMUS_tab0, ax) # Overlay ds9 masked regions | + on 20/03/2018 tmp_field = t_field.split('-')[-1].replace('_', '-') prefix = 'mask_dr1_s15b_' + field + '_NB0921_' + tmp_field + '*.reg' ds9_file = glob.glob(dir0 + 'catalogs/masks_Hayashi+17/' + prefix)[0] if silent == False: log.info('## ds9_file : ' + ds9_file) ds9_mask_overlay.main(ax, ds9_file, color='black', alpha=0.15) # Overlay DEIMOS FoV | + on 01/03/2018, Mod on 03/03/2018 if DEIMOS: d_idx = [ xx for xx in range(len(ptg_tab)) if ((ptg_tab['Instr'][xx] == 'DEIMOS') and ( ptg_tab['Field'][xx] == t_field)) ] if len(d_idx) > 0: t_tab = ptg_tab[d_idx] pa = t_tab['PA'].data a_coord = [[ra, de] for ra, de in zip(t_tab['RA'], t_tab['Dec'])] # Mod on 05/03/2018 maskno = [ mname.replace(t_field + '-D', '') for mname in t_tab['MaskName'] ] ax, deimos_verts0 = plot_deimos_fov(ax, a_coord, maskno, pa=pa) # + on 04/03/2018 deimos_fld_idx = in_deimos_field(tab0, deimos_verts0, silent=silent, verbose=verbose) # Get subsample sizes in each DEIMOS pointing | + on 04/03/2018 # Mod on 21/03/2018 deimos_outfile = dir0 + 'catalogs/' + t_field + '_deimos.tex' ss_in_ptg(sub_dict0, deimos_fld_idx, t_tab, 'DEIMOS', deimos_outfile, silent=silent, verbose=verbose) #endif #endif # Overlay Hecto FoV | + on 01/03/2018, Mod on 16/03/2018 if Hecto: h_idx = [ xx for xx in range(len(ptg_tab)) if ((ptg_tab['Instr'][xx] == 'Hecto') and ( ptg_tab['Field'][xx] == t_field)) ] if len(h_idx) > 0: t_tab = ptg_tab[h_idx] a_coord = [[ra, de] for ra, de in zip(t_tab['RA'], t_tab['Dec'])] configno = [ cname.replace(t_field + '-H', '') for cname in t_tab['MaskName'].data ] ax = plot_hecto_fov(ax, a_coord, configno) hecto_fld_idx = in_hecto_field(tab0, a_coord, silent=silent, verbose=verbose) # Get subsample sizes in each Hecto pointing | Mod on 21/03/2018 hecto_outfile = dir0 + 'catalogs/' + t_field + '_hecto.tex' ss_in_ptg(sub_dict0, hecto_fld_idx, t_tab, 'Hecto', hecto_outfile, silent=silent, verbose=verbose) #endif #endif # Overlay Binospec FoV | + on 21/03/2018 if Bino: b_idx = [ xx for xx in range(len(ptg_tab)) if ((ptg_tab['Instr'][xx] == 'Bino') and ( ptg_tab['Field'][xx] == t_field)) ] if len(b_idx) > 0: t_tab = ptg_tab[b_idx] pa = t_tab['PA'].data a_coord = [[ra, de] for ra, de in zip(t_tab['RA'], t_tab['Dec'])] maskno = [ mname.replace(t_field + '-B', '') for mname in t_tab['MaskName'].data ] ax, bino_verts0 = plot_bino_fov(ax, a_coord, maskno, pa=pa) # + on 22/03/2018 bino_fld_idx = in_bino_field(tab0, bino_verts0, silent=silent, verbose=verbose) # Get subsample sizes in each Bino pointing | + on 22/03/2018 bino_outfile = dir0 + 'catalogs/' + t_field + '_bino.tex' ss_in_ptg(sub_dict0, bino_fld_idx, t_tab, 'Bino', bino_outfile, silent=silent, verbose=verbose) #endif #endif # Mod on 01/03/2018 out_pdf = dir0 + 'plots/' + t_field + '_radec.pdf' if noOII: out_pdf = out_pdf.replace('.pdf', '.noOII.pdf') if DEIMOS: out_pdf = out_pdf.replace('.pdf', '.DEIMOS.pdf') # Mod on 21/03/2018 if Hecto and Bino: out_pdf = out_pdf.replace('.pdf', '.MMT.pdf') else: if Hecto: out_pdf = out_pdf.replace('.pdf', '.Hecto.pdf') if Bino: out_pdf = out_pdf.replace('.pdf', '.Bino.pdf') if silent == False: log.info('## Writing : ' + out_pdf) fig.savefig(out_pdf, bbox_inches='tight') if silent == False: log.info('### End main : ' + systime())
def run(path0, clean_file='', out_script='', silent=False, verbose=True, overwrite=False): ''' Create a .sh script to run cleanir.py for a set of files Parameters ---------- silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True overwrite : boolean Overwrite files if they exists. Default: False Returns ------- Notes ----- Created by Chun Ly, 7 March 2017 Modified by Chun Ly, 15 May 2017 - Call dir_check.main() to handle multiple date directories Modified by Chun Ly, 30 May 2017 - Added overwrite option. Default is to not overwrite .sh files - Fix bug when only one file is found Modified by Chun Ly, 18 June 2017 - Fix to work with no date directory Modified by Chun Ly, 22 January 2018 - Import glog and call for stdout and ASCII logging - Pass mylogger to dir_check.main() Modified by Chun Ly, 20 April 2018 - Change afq to rfq - row filtering generally produces better results ''' logfile = path0+'cleanir_script.log' mylogger = glog.log0(logfile)._get_logger() if silent == False: mylogger.info('### Begin run : '+systime()) if clean_file == '': clean_file = 'clean.lis' # + on 15/05/2017 dir_list, list_path = dir_check.main(path0, mylogger=mylogger, silent=silent, verbose=verbose) # Mod on 15/05/2017 for date,path in zip(dir_list,list_path): clean_file0 = path+clean_file if not exists(clean_file0): mylogger.warn('File does not exist!!!') mylogger.warn(clean_file0) else: if silent == False: mylogger.info('Reading : '+clean_file0) files = np.loadtxt(clean_file0, dtype=type(str)).tolist() if type(files) == str: files = [files] # Bug fix. Mod on 30/05/2017 out_script0 = path+'run_cleanir.sh' if out_script == '' \ else out_script if date != '': # Mod on 18/06/2017 out_script0 = out_script0.replace('.sh', '.'+date+'.sh') # Mod on 30/05/2017 if overwrite == False and exists(out_script0): log.warn('## File found!!! : '+out_script0) log.warn('## Will not overwrite!!!') else: if silent == False: stat0 = 'Overwriting' if exists(out_script0) else 'Writing' mylogger.info(stat0+' : '+out_script0) f = open(out_script0, 'w') for ii in xrange(len(files)): cmd1 = cmd0+' -rfqo '+path+'c'+files[ii]+' '+path+files[ii] f.write(cmd1+'\n') f.close() #endelse #endelse #endfor if silent == False: mylogger.info('End run : '+systime())
def check_extended(h0, s_cat, seqno, return_irsa_cat=False, silent=False, verbose=True): ''' Query the 2MASS extended source catalog (XSC) to identify contamination from extended sources Parameters ---------- s_cat : astropy.table.Table Astropy-formatted table hd0 : FITS header FITS header containing WCS to determine RA/Dec silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- Notes ----- Created by Chun Ly, 27 February 2017 Modified by Chun Ly, 28 February 2017 - Include extended galaxies slightly outside of MMTCam FoV - Use elliptical formula to determine if inside extended source ''' if silent == False: log.info('### Begin check_extended: '+systime()) size0 = 1.25*h0['NAXIS1'] * pscale # Size of region to search c0 = coords.SkyCoord(ra=h0['CRVAL1'], dec=h0['CRVAL2'], unit=u.deg) i_cat0 = IRSA.query_region(c0, catalog='fp_xsc', spatial='Box', width=size0) flag_ext = np.zeros(len(s_cat)) # + on 28/02/2017 # Check if daofind sources are within elliptical region of extended sources # + on 28/02/2017 if len(i_cat0) == 0: log.info('No extended source found for : '+seqno) else: log.info('Extended sources found for : '+seqno) if verbose == True: print i_cat0 w0 = WCS(h0) sRA, sDec = w0.wcs_pix2world(s_cat['xcentroid'], s_cat['ycentroid'], 1) sc = coords.SkyCoord(ra=sRA, dec=sDec, unit=u.deg) for cc in range(len(i_cat0)): ic = coords.SkyCoord(ra=i_cat0['clon'][cc], dec=i_cat0['clat'][cc], unit=(u.hour, u.deg)) #dist0 = ic.separation(sc).to(u.arcsec).value dra = (ic.ra.deg - sRA)*3600.0 * np.cos(np.radians(ic.dec.deg)) ddec = (ic.dec.deg - sDec)*3600.0 ang0 = np.radians(90.0-i_cat0['sup_phi']) maj0 = i_cat0['r_k20fe'][cc] min0 = maj0*i_cat0['sup_ba'][cc] dist0 = ((dra*np.cos(ang0) + ddec*np.sin(ang0))/maj0)**2 + \ ((dra*np.sin(ang0) - ddec*np.cos(ang0))/min0)**2 ext0 = np.where(dist0 <= 1.0)[0] flag_ext[ext0] = 1 # print s_cat[ext0] if silent == False: log.info('### End check_extended: '+systime()) if return_irsa_cat == False: return flag_ext else: flag_ext, i_cat0
def main(silent=False, verbose=True): ''' Main function to derive errors from NB photometry Parameters ---------- filter : str Name of filter: 'NB704', 'NB711', 'NB816', 'NB921', 'NB973' NB : array NB magnitudes on AB system sig_NB : array error on [NB] excess : array BB - NB color on AB system sig_excess : array error on [excess] silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- flux sig_flux EW sig_EW Notes ----- Created by Chun Ly, 13 December 2018 ''' if silent == False: log.info('### Begin main : ' + systime()) #Limiting magnitudes m_NB = np.array([ 26.7134 - 0.047, 26.0684, 26.9016 + 0.057, 26.7894 + 0.041, 27.3928 + 0.032, 26.7088 - 0.109, 25.6917 - 0.051 ]) m_BB1 = np.array( [28.0829, 28.0829, 27.7568, 27.8933, 28.0829, 26.8250, 26.8250]) m_BB2 = np.array( [27.7568, 27.7568, 26.8250, 28.0829, 27.7568, 00.0000, 00.0000]) cont_lim = mag_combine(m_BB1, m_BB2, epsilon) limit_dict = {'m_NB': m_NB, 'm_BB': cont_lim} tab0, infile = get_data() tab0 = get_errors(tab0, filt_dict0, BB_filt, epsilon, limit_dict=limit_dict) outfile = infile.replace('.fits', '.errors.fits') tab0.write(outfile, format='fits', overwrite=True) plot_errors('Ha', filt_ref, tab0, limit_dict) plot_flux_ew_errors('Ha', filt_ref, tab0) if silent == False: log.info('### End main : ' + systime())
def check_prefix(final_prefix, input_lis, list_path='', path='', mylogger=None, prereq=False, silent=False, verbose=True): ''' Check if specific files from an input list exist with given prefix Parameters ---------- final_prefix : str Files with specific prefix to search for ('rnc', etc). This will be added before the filenames in [input_lis] input_lis : str Filename for input list to check ('arc.lis', 'flat.lis', etc.) If full path is included, do not provide [list_path] list_path : str (Optional) Full path for input_lis of it does not contain full path. Must include '/' at the end path : str (Optional) Full path to individual files if [input_lis] contents do not contain full path. Must include '/' at the end prereq: boolean Set if you're checking if necessary intermediate/input files are available. If set and files exist, returns do_run = 1. Default: False silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- do_run : int 0 - Not all files available 1 - All files available Notes ----- Created by Chun Ly, 5 May 2017 Modified by Chun Ly, 16 May 2017 - Switch path to an optional keyword Modified by Chun Ly, 2 June 2017 - Changed path -> list_path for input_lis - path is now for individual frames from the input_lis Modified by Chun Ly, 18 December 2017 - Implement glog logging, allow mylogger keyword input Modified by Chun Ly, 25 June 2018 - Add prereq keyword option to check if intermediate files are available ''' # + on 18/12/2017 if type(mylogger) == type(None): mylog, clog = 0, log else: mylog, clog = 1, mylogger if silent == False: clog.info('### Begin check_prefix : '+systime()) input_lis0 = list_path+input_lis if silent == False: clog.info('Reading : '+input_lis0) # Mod on 18/12/2017 files = np.loadtxt(input_lis0, dtype=type(str)) f_exist = [file0 for file0 in files if exists(path+final_prefix+file0) == True] f_noexist = [file0 for file0 in files if exists(path+final_prefix+file0) == False] do_run = 0 # Mod on 25/06/2018 if not prereq: if len(f_exist) == 0: clog.info('No files exist, as expected') # Mod on 18/12/2017 do_run = 1 else: # Mod on 18/12/2017 if len(f_exist) != len(files): clog.warn('Some files do not exist!') clog.warn(', '.join(f_noexist)) else: clog.info('All files exist!!!') else: if len(f_exist) == 0: clog.warn('Intermediate files unavailable!!') else: clog.info('Intermediate files available') do_run = 1 if silent == False: clog.info('End check_prefix : '+systime()) return do_run
def main(silent=False, verbose=True): ''' Main function to read in Starburst99 models and compute UV lum Parameters ---------- silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- Notes ----- Created by Chun Ly, 16 June 2019 ''' if silent == False: log.info('### Begin main : ' + systime()) Z = [0.05, 0.02, 0.008, 0.004, 0.0004] Z = np.array(Z) Llambda = np.array([40.12892, 40.21840, 40.25546, 40.27597, 40.30982]) ''' From Starburst99 CSF 1 Msun/yr model with Kroupa IMF and Padova stellar tracks. Log units of erg/s/Ang. Using age of 0.3E9 (except 0.1E9 for 2.5xZ_solar) ''' lambda0 = 1500.0 * u.Angstrom fig, ax = plt.subplots(ncols=2, nrows=2) # nuL_nu in ax[0][0] # nu Lnu = lambda Llambda nuLnu = Llambda + np.log10(lambda0.value) ylabel = r'$\nu L_{\nu}(1500\AA)$/SFR [erg s$^{-1}$/$M_{\odot}$ yr$^{-1}$]' nuLnu_fit_kr, nuLnu_fit_ch = plot_panel(ax[0][0], Z, nuLnu, ylabel, showlegend=True) # L_nu in ax[0][1] nu_offset = np.log10(c0.to(u.m / u.s).value / lambda0.to(u.m).value) Lnu = nuLnu - nu_offset ylabel = r'$L_{\nu}(1500\AA)$/SFR [erg s$^{-1}$ Hz$^{-1}$/$M_{\odot}$ yr$^{-1}$]' Lnu_fit_kr, Lnu_fit_ch = plot_panel(ax[0][1], Z, Lnu, ylabel) # Plot K98 relation ax[0][1].scatter([1.0], -1 * np.log10(1.4e-28), color='green', marker='o', s=50, edgecolor='none', alpha=0.5) ax[0][1].annotate('K98', [1.05, -1 * np.log10(1.4e-28 * 0.98)], xycoords='data', fontsize=8, ha='left', va='bottom') # Plot H-alpha in ax[1][0] LHa = np.array([41.061, 41.257, 41.381, 41.439, 41.536]) ylabel = r'$L({\rm H}\alpha)$/SFR [erg s$^{-1}$/$M_{\odot}$ yr$^{-1}$]' LHa_fit_kr, LHa_fit_ch = plot_panel(ax[1][0], Z, LHa, ylabel, labelx=True) # Plot nuLnu vs LHa in ax[1][1] ylabel = r'$\nu L_{\nu}(1500\AA)/L({\rm H}\alpha)$' nuLnu_LHa_fit_kr, \ nuLnu_LHa_fit_ch = plot_panel(ax[1][1], Z, nuLnu-LHa, ylabel, labelx=True) plt.subplots_adjust(left=0.085, right=0.995, bottom=0.07, top=0.98, wspace=0.225, hspace=0.04) out_pdf = '/Users/cly/Google Drive/NASA_Summer2015/Plots/sfr_metallicity_plot.pdf' fig.set_size_inches(10, 8) fig.savefig(out_pdf) out_npzfile = out_pdf.replace('.pdf', '_fit.npz') np.savez(out_npzfile, nuLnu_fit_kr=nuLnu_fit_kr, nuLnu_fit_ch=nuLnu_fit_ch, Lnu_fit_kr=Lnu_fit_kr, Lnu_fit_ch=Lnu_fit_ch, LHa_fit_kr=LHa_fit_kr, LHa_fit_ch=LHa_fit_ch, nuLnu_LHa_fit_kr=nuLnu_LHa_fit_kr, nuLnu_LHa_fit_ch=nuLnu_LHa_fit_ch) if silent == False: log.info('### End main : ' + systime())
def psf_contours(files=None, path0=None, out_pdf_plot=None, silent=False, verbose=True): ''' Generate contour plots for the MMTCam PSF Parameters ---------- files : list List of files path0 : string Path to files. If not provided it is assumed that [files] has the full path name silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- Notes ----- Created by Chun Ly, 24 February 2017 - Later mod to handle plotting styles - Later Mod to include header info in annotation - Use filled contours with plasma cmap - Add colorbar Modified by Chun Ly, 25 February 2017 - Add colorbar for last subplot - Get FWHM and FWQM from fwhm_fwqm_image() - Call opt.curve_fit() to fit 2-D Gaussians - Overlay cyan contours for best 2-D fit Modified by Chun Ly, 26 February 2017 - Minor stylistic plotting changes - Call get_mst() to get MST time - Draw center of best fit - Call draw_NE_vector() function Modified by Chun Ly, 26 February 2017 - Use cosmicray_median() to interpolate over CRs - Use psf_im_cr over psf_im - Use uniform_filter() to smooth data with a size of 3 pixels Modified by Chun Ly, 01 March 2017 - Annotate plot with wind data - Change x and y limit to give more room ''' if files == None and path0 == None: log.error('files and path0 keywords not provided') log.error('Exiting!!!') return if silent == False: log.info('### Begin psf_contours: '+systime()) if files == None and path0 != None: files, seqno = get_files(path0) else: if files != None: seqno = get_seqno(files) post_dir0 = path0 + 'post/' if out_pdf_plot == None: out_pdf_plot = path0+'psf_contours.pdf' pp = PdfPages(out_pdf_plot) ncols, nrows = 3, 3 n_files = len(files) for ff in xrange(n_files): psf_file = post_dir0+seqno[ff]+'.fits' psf_im, h0 = fits.getdata(psf_file, header=True) # Identify and interpolate over any extraneous CRs | + on 26/02/2017 psf_im_cr, mask = cosmicray_median(psf_im, thresh=5, rbox=11) psf_im_cr /= np.max(psf_im_cr) psf_im_sm = uniform_filter(psf_im_cr, size=3) # + on 26/02/2017 if ff == 0: shape0 = psf_im_cr.shape x0 = pscale*np.arange(-1*shape0[0]/2.0,shape0[0]/2.0) y0 = pscale*np.arange(-1*shape0[1]/2.0,shape0[1]/2.0) if ff % (ncols*nrows) == 0: fig, ax = plt.subplots(nrows, ncols) row, col = ff / ncols % nrows, ff % ncols # Later mod on 24/02/2017, 26/02/2017 cf = ax[row,col].contourf(x0, y0, psf_im_sm, levels=c_levels, cmap=plt.cm.plasma) # Mod on 25/02/2017 to include colorbar for last subplot # Mod on 26/02/2017 to shrink height if col == ncols-1: cax = fig.add_axes([0.925, 0.76-0.32*row, 0.01, 0.14]) if ff == n_files-1: cax = fig.add_axes([0.605, 0.76-0.32*row, 0.01, 0.14]) if col == ncols-1 or ff == n_files-1: cbar = fig.colorbar(cf, ax=ax[row,col], cax=cax) cbar.ax.tick_params(labelsize=8) if row == nrows-1: ax[row,col].set_xlabel('X [arcsec]') else: if ((n_files-1)-ff) > ncols-1: ax[row,col].set_xticklabels([]) if ff == n_files-1: for cc in range(ncols): ax[row,cc].set_xlabel('X [arcsec]') if col == 0: ax[row,col].set_ylabel('Y [arcsec]') else: ax[row,col].set_yticklabels([]) # Mod on 26/02/2017 t_label = seqno[ff]+'.'+h0['FILTER'] ax[row,col].annotate(t_label, [0.025,0.975], weight='bold', ha='left', va='top', xycoords='axes fraction', fontsize=10) t_nstack = r'N$_{\rm stack}$=%i' % h0['NBRIGHT'] ax[row,col].annotate(t_nstack, [0.975,0.975], weight='bold', ha='right', va='top', xycoords='axes fraction', fontsize=10) # Compute image quality | + on 25/02/2017 f_annot = 'UTC='+h0['UT']+' MST='+get_mst(h0)+'\n' # + on 26/02/2017 fwhm0, fwqm0 = fwhm_fwqm_size(psf_im_cr, pscale) f_annot += 'Area: FWHM=%.2f", FWQM=%.2f"\n' % (fwhm0, fwqm0) sigG = fwhm0/f_s/pscale.to(u.arcsec).value ini_guess = (1.0, 25, 25, sigG, sigG, 0.0, 0.0) gx = np.linspace(0,shape0[0]-1,shape0[0]) gy = np.linspace(0,shape0[1]-1,shape0[1]) gx, gy = np.meshgrid(gx, gy) psf_im_re = psf_im_cr.reshape(shape0[0]*shape0[1]) popt, pcov = opt.curve_fit(gauss2d, (gx, gy), psf_im_re, p0=ini_guess) FWHMx = popt[3] * f_s * pscale.to(u.arcsec).value FWHMy = popt[4] * f_s * pscale.to(u.arcsec).value f_annot += r'2DFit: FW$_1$=%.2f", FW$_2$=%.2f", ' % (FWHMx,FWHMy) f_annot += r'$\theta$=%.2f' % np.degrees(popt[5]) + '\n' # + on 01/03/2017 f_annot += 'Y1: avg=%.1f, max=%.1f, dir=%.1f\n' % (h0['Y1_AVG'],h0['Y1_MAX'], h0['Y1_DIR']) f_annot += 'Y2: avg=%.1f, max=%.1f, dir=%.1f' % (h0['Y2_AVG'],h0['Y2_MAX'], h0['Y2_DIR']) ax[row,col].set_xlim([-5,5]) ax[row,col].set_ylim([-5,5]) # Mod on 27/02/2017 to have a fill color ax[row,col].annotate(f_annot, [0.025,0.915], xycoords='axes fraction', ha='left', va='top', fontsize=8, zorder=10, bbox=bbox_props) # Overlay 0.25, 0.50, and 0.75 quartile contours for 2-D Gaussian fit # + on 25/02/2017 f_data = gauss2d((gx, gy), *popt) f_data /= np.max(f_data) levels = np.array([0.25,0.5,0.75]) CS = ax[row,col].contour(x0, y0, f_data.reshape(shape0[0],shape0[1]), colors='c', linewidth=2, cmap=None, levels=levels.tolist()) if col==0: ax[row,col].clabel(CS, CS.levels, fmt='%.2f', inline=1, inline_spacing=0.25, fontsize=8) # Plot center of fit | + on 26/02/2017 xcen = v_pscale*(-1*shape0[0]/2.0+popt[1]) ycen = v_pscale*(-1*shape0[1]/2.0+popt[2]) ax[row,col].plot(xcen, ycen, 'o', mfc='c', mec='none', alpha=0.5) hdr_annotate(h0, ax[row,col]) # + on 24/02/2017 draw_NE_vector(h0, ax[row,col]) # + on 26/02/2017 if ff == n_files-1: for cc in range(col+1,ncols): ax[row,cc].axis('off') for rr in range(row+1,nrows): for cc in range(ncols): ax[rr,cc].axis('off') if ff % (ncols*nrows) == ncols*nrows-1 or ff == n_files-1: ax[0,1].set_title('MMTCam : '+path0.split('/')[-2], loc=u'center', fontsize=14, weight='bold') subplots_adjust(left=0.025, bottom=0.025, top=0.975, right=0.975, wspace=0.02, hspace=0.02) fig.set_size_inches(8,8) fig.savefig(pp, format='pdf', bbox_inches='tight') #endfor pp.close() if silent == False: log.info('### End psf_contours: '+systime())
def make_postage(files=None, path0=None, n_stack=5, size=50, user='******', passwd='', silent=False, verbose=True): ''' Create cut-outs and median stack to produce image of the point-spread function Parameters ---------- files : list List of files path0 : string Path to files. If not provided it is assumed that [files] has the full path name silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- Notes ----- Created by Chun Ly, 23 February 2017 Modified by Chun Ly, 24 February 2017 - Include FITS header in cutout images Modified by Chun Ly, 26 February 2017 - Use cosmicray_median() to interpolate over CRs - Include number of stack sources in FITS header Modified by Chun Ly, 28 February 2017 - Call query_mmtlog_wind() function - Add user and passwd keyword to pass on Modified by Chun Ly, 1 March 2017 - Check if wind data table is available before running query_mmtlog_wind() - Call wind_avg_max() function ''' if files == None and path0 == None: log.error('files and path0 keywords not provided') log.error('Exiting!!!') return if silent == False: log.info('### Begin make_postage: '+systime()) if files == None and path0 != None: files, seqno = get_files(path0) # path0 = None # Reset since files will have full path else: if files != None: seqno = get_seqno(files) # Query for wind data | + on 28/02/2017 # Mod on 01/03/2017 to check if file exists wind_file = path0+'wind_data.tbl' if not exists(wind_file): u_start = fits.getheader(files[0])['DATE-OBS'] u_stop = fits.getheader(files[-1])['DATE-OBS'] wind_tab0 = query_mmtlog_wind(u_start, u_stop, user=user, passwd=passwd, path0=path0) else: if silent == False: log.info('### File found! Reading : '+wind_file) wind_tab0 = asc.read(wind_file, format='fixed_width_two_line') post_dir0 = path0 + 'post/' if not exists(post_dir0): if silent == False: log.info('Creating : '+post_dir0) os.mkdir(post_dir0) out_cat_dir0 = path0+out_cat_dir for ff in xrange(len(files)): basename = os.path.basename(files[ff]) image, hdr = fits.getdata(files[ff], header=True) mean, median, std = sigma_clipped_stats(image, sigma=2.0, iters=5) image_sub = image - median in_cat = out_cat_dir0+basename.replace('.fits.gz','.tbl').\ replace('.fits','.tbl') s_cat = asc.read(in_cat, format='fixed_width_two_line') # Handle failure if only one source is available and is near edge # + on 28/02/2017 not_edge = np.where((s_cat['xcentroid'] > 50.0) & (s_cat['xcentroid'] <= hdr['NAXIS1']-50) & (s_cat['ycentroid'] > 50.0) & (s_cat['ycentroid'] <= hdr['NAXIS2']-50))[0] s_cat = s_cat[not_edge] # + on 28/02/2017 good = np.where(s_cat['peak'] >= 0.33*max(s_cat['peak']))[0] s_cat = s_cat[good] n_bright = np.min([n_stack,len(s_cat)]) bright = range(n_bright) s_cat = s_cat[bright] x0 = np.round_(s_cat['xcentroid']) y0 = np.round_(s_cat['ycentroid']) im0 = np.zeros( (len(bright), size, size) ) size2d = u.Quantity((size, size), u.pixel) for ii in range(n_bright): pos0 = (x0[ii], y0[ii]) cutout = Cutout2D(image_sub, pos0, size2d, mode='partial', fill_value=np.nan) # Identify and interpolate over CRs cutout_cr, crmask = cosmicray_median(cutout.data, thresh=5, rbox=11) im0[ii] = cutout_cr/np.max(cutout_cr) out_fits = post_dir0+seqno[ff]+'.fits' psf_im = np.nanmedian(im0, axis=0) hdr.set('NBRIGHT', n_bright) # + on 26/02/2017 wind_avg_max(wind_tab0, hdr) # + on 01/03/2017 fits.writeto(out_fits, psf_im, hdr, overwrite=True) if silent == False: log.info('### End make_postage: '+systime())
def main(path0, targets, outfile=None, silent=False, verbose=True): ''' Generate ASCII file summarizing observations Parameters ---------- path0 : str Parent path for all files silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- Notes ----- Created by Chun Ly, 25 April 2017 Modified by Chun Ly, 5 May 2017 - Handle overwriting file Modified by Chun Ly, 3 June 2017 - Bug fix: Check if hdr_info.QA.tbl exists ''' if silent == False: log.info('### Begin main : ' + systime()) Targets, ObsDate, ObsSet = [], [], [] TotalTime, gratwave, Airmass = [], [], [] TellStar, TellSet, TellAM = [], [], [] # Later Mod on 25/04/2017 for tt in range(len(targets)): tt_path = path0 + targets[tt] + '/' dir_list, list_path = dir_check.main(tt_path, silent=True, verbose=False) cnt = 0 for path in list_path: # Mod on 25/04/2017 txt = targets[tt] if cnt == 0 else '...' Targets.append(txt) #targets[tt]) QA_file = path + '/hdr_info.QA.tbl' # Mod on 03/06/2017 if not exists(QA_file): log.warn('## File not found! ' + QA_file) ObsDate.append('N/A') gratwave.append('N/A') ObsSet.append('N/A') TotalTime.append('N/A') Airmass.append('N/A') TellStar.append('N/A') TellSet.append('N/A') TellAM.append('N/A') else: QA_tab = asc.read(QA_file, format='fixed_width_two_line') # All science targets idx = [ xx for xx in range(len(QA_tab)) if ('obj' in QA_tab['QA'][xx]) or ( 'sky' in QA_tab['QA'][xx]) ] # Later Mod on 25/04/2017 tab_ref = QA_tab[idx][0] t_date = tab_ref['UT_date'].split('T')[0] exptime = tab_ref['exptime'] ObsDate.append(t_date) gratwave.append(tab_ref['gratwave']) ObsSet.append(str(len(idx)) + 'x' + str(exptime) + 's') TotalTime.append('%.2f' % (len(idx) * exptime / 60.0)) AM0 = QA_tab['airmass'][idx] Airmass.append('%.3f-%.3f' % (np.min(AM0), np.max(AM0))) t_idx = [ xx for xx in range(len(QA_tab)) if ('telluric' in QA_tab['QA'][xx]) ] t_names = list(set(QA_tab['object'][t_idx])) telstar, telset, telAM = get_telluric_info( QA_tab, t_idx, t_names) TellStar.append(telstar) TellSet.append(telset) TellAM.append(telAM) # Later + on 25/04/2017 cnt += 1 #endelse #endfor #endfor arr0 = [ Targets, ObsDate, ObsSet, TotalTime, gratwave, Airmass, TellStar, TellSet, TellAM ] names0 = ('Name', 'UT_Date', 'Sequence', 'Int_Time', 'Grating_Wave', 'Airmass', 'Telluric_Star', 'Telluric_Seq', 'Telluric_AM') tab0 = Table(arr0, names=names0) print tab0 if outfile == None: outfile = path0 + 'obs_summary.txt' # Mod on 06/05/2017 if silent == False: stat0 = 'Overwriting : ' if exists(outfile) else 'Writing : ' log.info(stat0 + outfile) asc.write(tab0, output=outfile, format='fixed_width_two_line', overwrite=True) if silent == False: log.info('### End main : ' + systime())
def main(rawdir, line_source='', mylogger=None, silent=False, verbose=True): ''' Main function for wave_cal_script Parameters ---------- rawdir : str Path to raw files. Must end in a '/' line_source : str Type of lines to use for wavelength calibration. Option is either 'arc' or 'OH silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- Notes ----- Created by Chun Ly, 7-8 October 2017 Modified by Chun Ly, 8 November 2017 - Specify lampspec and outspec Modified by Chun Ly, 16 November 2017 - Handle line_source == 'OH' - Generalized arrays: arc -> frame - Specify GNIRS log file Modified by Chun Ly, 20 November 2017 - Bug fix: prefix needed for [frames] for line_source == OH Modified by Chun Ly, 9 January 2018 - Import glog and call for stdout and ASCII logging Modified by Chun Ly, 9 January 2018 - Allow mylogger keyword Modified by Chun Ly, 30 May 2018 - Change order for nswavelength fitting Modified by Chun Ly, 16 June 2018 - Change coordlist for OH skylines Modified by Chun Ly, 19 June 2018 - Force function to legendre - Set fwidth in nswavelength call to depend on slitwidth Modified by Chun Ly, 21 June 2018 - Include ending print statement Modified by Chun Ly, 10 July 2018 - Modify threshold for OH lines from 50 to 25 ''' # Mod on 10/01/2018 if type(mylogger) == type(None): logfile = rawdir + 'QA_wave_cal.log' mylogger = glog.log0(logfile)._get_logger() if silent == False: mylogger.info('### Begin main : ' + systime()) timestamp = systime().replace(':', '.') logfile = rawdir + 'gnirs_' + timestamp + '.log' rawdir = check_path(rawdir) if line_source == '': mylogger.warn("Must specify line_source keyword: ") mylogger.warn("line_source='arc' or line_source='OH'") mylogger.warn("Exiting!!!") return else: out_script = rawdir + 'wave_cal_' + line_source + '.py' if silent == False: mylogger.info('Writing : ' + out_script) f0 = open(out_script, 'w') line0 = [ 'iraf.gemini(_doprint=0)', 'iraf.gemini.gnirs(_doprint=0)', 'iraf.gemini.unlearn()', 'iraf.gemini.gemtools.unlearn()', 'iraf.gemini.gnirs.unlearn()', 'iraf.set(stdimage="imt4096")', 'iraf.gemini.nsheaders("gnirs")' ] #'iraf.gemini.gnirs.logfile = "%s"' % logfile] # + on 16/11/2017 f0.writelines("\n".join(line0) + "\n") if line_source == 'arc': frame_list = rawdir + 'arc.lis' if line_source == 'OH': frame_list = rawdir + 'obj.OH.lis' frames = np.loadtxt(frame_list, dtype=type(str)) if line_source == 'OH': frames = ['rbnc' + file0 for file0 in frames] frame_hdr = fits.getheader(rawdir + frames[0]) crpix = n_sp_pix / 2.0 crval = frame_hdr['gratwave'] * 1e4 # in Angstroms if frame_hdr['FILTER2'] == 'X_G0518': cdelt = -0.094 * 1e4 / n_sp_pix if frame_hdr['FILTER2'] == 'J_G0517': cdelt = -0.113 * 1e4 / n_sp_pix mylogger.info('## CRVAL : %.1f ' % crval) mylogger.info('## CDELT : %.1f CRPIX : %.1f' % (cdelt, crpix)) line1 = ['crval = %f' % crval, 'crpix = %f' % crpix, 'cdelt = %f' % cdelt] f0.writelines("\n".join(line1) + "\n") # Get slit length | + on 19/06/2018q slitwidth = np.float(frame_hdr['SLIT'].split('arcsec')[0]) pscale = np.sqrt(frame_hdr['CD1_1']**2 + frame_hdr['CD2_1']**2) * 3600.0 * u.arcsec fwidth = 1.5 * slitwidth / pscale.to(u.arcsec).value if line_source == 'arc': coordlist = 'gnirs$data/argon.dat' database = 'database/' threshold = 50 if line_source == 'OH': coordlist = rawdir + 'rousselot2000_convl.dat' if not exists(coordlist): log.warn('File does not exists!!! : ' + coordlist) database = 'database_OH/' threshold = 25 # Mod on 08/11/2017 line2 = [ "coordlist = '%s'" % coordlist, "database = '%s'" % database, "lampspec = '%s_stack.fits'" % line_source, "outspec = 'w%s_stack.fits'" % line_source, "threshold = %f" % threshold, "logfile = '%s'" % logfile ] # + on 16/11/2017 f0.writelines("\n".join(line2) + "\n") # Mod on 08/11/2017, 16/11/2017 cmd = "iraf.gnirs.nswavelength(lampspec, outprefix='',"+\ "outspectra=outspec, crval=crval, cdelt=cdelt, crpix=crpix, "+\ "coordlist=coordlist, database=database, fl_inter='yes', "+\ "function='legendre', cradius=20, threshold=threshold, "+\ "fwidth=%.1f, " % fwidth +"order=3, logfile=logfile)" f0.write(cmd + '\n') f0.write('print("Completed! Return to other terminal and hit RETURN")\n') f0.close() if silent == False: mylogger.info('### End main : ' + systime())
def find_stars(files=None, path0=None, plot=False, out_pdf_plot=None, silent=False, verbose=True): ''' Find stars in an image and return a catalog of positions Parameters ---------- files : list List of files path0 : string Directory path to files. silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- Notes ----- Created by Chun Ly, 23 February 2017 - Later modified to plot images and overlay sources - Adjust scale to using IRAF's zscale - Call remove_dup_sources() Modified by Chun Ly, 28 February 2017 - Call check_extended() to get flag indicating extended, flag_ext ''' if silent == False: log.info('### Begin find_stars: '+systime()) if files == None and path0 == None: log.error('files and path0 keywords not provided') log.error('Exiting!!!') return if files == None and path0 != None: files, seqno = get_files(path0) # path0 = None # Reset since files will have full path else: if files != None: seqno = get_seqno(files) # Later + on 23/02/2017 out_cat_dir0 = path0+out_cat_dir if not exists(out_cat_dir0): if silent == False: log.info('Creating : '+out_cat_dir0) os.mkdir(out_cat_dir0) s_date = path0.split('/')[-2] # Mod on 25/02/2017 for minor bug if plot == True: if out_pdf_plot == None: out_pdf_plot = path0+'find_stars.pdf' pp = PdfPages(out_pdf_plot) for ff in xrange(len(files)): #[34,35,36,37,38,39]: #xrange(len(files)): basename = os.path.basename(files[ff]) image, hdr = fits.getdata(files[ff], header=True) mean, median, std = sigma_clipped_stats(image, sigma=2.0, iters=5) image_sub = image - median if verbose == True: log.info('%s mean/med/sig: %f %f %f' % (seqno[ff], mean, median, std)) #Later Mod on 23/02/2017 to lower threshold daofind = DAOStarFinder(fwhm=8.0, threshold=5.*std) s_cat = daofind(image_sub) # Exclude saturated objects unsat = np.where(s_cat['peak'] <= 60000.0)[0] sat = np.where(s_cat['peak'] > 60000.0)[0] cat_sat = s_cat[sat] s_cat = s_cat[unsat] s_cat.sort(['peak']) s_cat.reverse() # s_cat.pprint() # + on 28/02/2017 flag_ext = check_extended(hdr, s_cat, seqno[ff], verbose=False) i_extend = np.where(flag_ext == 1)[0] i_point = np.where(flag_ext == 0)[0] if len(i_extend) > 0: cat_ext = s_cat[i_extend] s_cat = s_cat[i_point] #endif # Later + on 23/02/2017 bad = remove_dup_sources(s_cat) if len(bad) > 0: cat_bad = s_cat[bad] s_cat.remove_rows(bad) if ff == 0 and silent == False: s_cat.pprint() # + on 23/02/2017 out_cat = out_cat_dir0+basename.replace('.fits.gz','.tbl') out_cat = out_cat.replace('.fits','.tbl') s_cat.write(out_cat, format='ascii.fixed_width_two_line', overwrite=True) # Write extended catalog | + on 28/02/2017 if len(i_extend) >0: out_cat_ext = out_cat.replace('.tbl','.ext.tbl') cat_ext.write(out_cat_ext, format='ascii.fixed_width_two_line', overwrite=True) # Later + on 23/02/2017 if len(bad) >0 and verbose == True: log.info('The following will be removed : ') cat_bad.pprint() if len(bad) >0: out_cat_bad = out_cat.replace('.tbl','.bad.tbl') cat_bad.write(out_cat_bad, format='ascii.fixed_width_two_line', overwrite=True) if plot == True: pos0 = (s_cat['xcentroid'], s_cat['ycentroid']) aper0 = CircularAperture(pos0, r=8.) pos0 = (cat_sat['xcentroid'], cat_sat['ycentroid']) sat_aper0 = CircularAperture(pos0, r=8.) pos0 = (cat_bad['xcentroid'], cat_bad['ycentroid']) bad_aper0 = CircularAperture(pos0, r=8.) fig, ax = plt.subplots() z1, z2 = zscale.get_limits(image_sub) # print z1, z2 norm = ImageNormalize(vmin=z1, vmax=z2) #stretch=SqrtStretch()) ax.imshow(image_sub, cmap='Greys', origin='lower', norm=norm) aper0.plot(color='blue', lw=1.5, alpha=0.5) sat_aper0.plot(color='red', lw=1.5, alpha=0.5) bad_aper0.plot(color='magenta', lw=1.5, alpha=0.5) # Label bright sources | + on 28/02/2017 bright = np.where(s_cat['peak'] >= 0.33*max(s_cat['peak']))[0] for nn in bright: t_pos = [s_cat['xcentroid'][nn], s_cat['ycentroid'][nn]+10] ax.annotate(str(nn+1), t_pos, xycoords='data', ha='center', va='bottom', color='b', weight='medium') # Mark sources excluded by extended criteria | + on 28/02/2017 if len(i_extend) > 0: ax.plot(cat_ext['xcentroid'], cat_ext['ycentroid'], 'rx', linewidth=2) t_ann = s_date+'/'+os.path.basename(files[ff]) ax.set_title(t_ann, loc=u'center', fontsize=14, weight='bold') #ax.annotate(t_ann, [0.025,0.975], xycoords='axes fraction', # ha='left', va='top', bbox=bbox_props) ax.set_xlim([0,hdr['NAXIS1']]) ax.set_ylim([0,hdr['NAXIS2']]) ax.set_xlabel('X [pixels]') ax.set_ylabel('Y [pixels]') fig.set_size_inches(8,8) fig.savefig(pp, format='pdf', bbox_inches='tight') #endfor if plot == True: if silent == False: log.info('## Writing : '+out_pdf_plot+' | '+systime()) pp.close() if silent == False: log.info('### End find_stars: '+systime())
def query_mmtlog_wind(u_start, u_stop, user='******', passwd='', path0='', silent=False, verbose=True): ''' Query ops.mmto.arizona.edu's log for wind data. Note: This code requires specifying the password Parameters ---------- u_start : string UTC start time. Formatted as 'YYYY-MM-DD HH:MM:SS' u_stop : string UTC stop time. Formatted as 'YYYY-MM-DD HH:MM:SS' user : string Username to login. Default: 'webuser' passwd : string Password for user. Default: '' path0 : string Directory path to files. silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- tab0 : astropy.table.Table Astropy table containing young and young2 wind data Notes ----- Created by Chun Ly, 28 February 2017 Modified by Chun Ly, 01 March 2017 - Return tab0 ''' if passwd == '': log.error('Must specify password!') log.error('Exiting!!!') return if silent == False: log.info('### Begin query_mmtlog_wind: '+systime()) m_start = Time(u_start).to_datetime(timezone=utc_mst) m_start = m_start.strftime('%Y-%m-%d %H:%M%:%S') m_stop = Time(u_stop).to_datetime(timezone=utc_mst) + \ timedelta(seconds=10*60.0) # Add 10 min. to have enough buffer m_stop = m_stop.strftime('%Y-%m-%d %H:%M%:%S') conn = pymysql.connect(host='ops.mmto.arizona.edu', user=user, passwd=passwd, db='mmtlogs') cur = conn.cursor() sql1 = "SELECT timestamp,young_wind_speed,young_wind_direction FROM "+\ "young_background_log where timestamp >= '"+m_start+"' AND "+\ "timestamp < '"+m_stop+"'" # print sql1 n_entries = cur.execute(sql1) results1 = cur.fetchall() sql2 = sql1.replace('young', 'young2') cur.execute(sql2) results2 = cur.fetchall() time0 = np.repeat('XXXX-XX-XX XX:XX:XX', n_entries) speed1 = np.zeros(n_entries) direct1 = np.zeros(n_entries) speed2 = np.zeros(n_entries) direct2 = np.zeros(n_entries) for nn in xrange(n_entries): time0[nn] = results1[nn][0].isoformat() speed1[nn] = results1[nn][1] direct1[nn] = results1[nn][2] speed2[nn] = results2[nn][1] direct2[nn] = results2[nn][2] outfile = path0+'wind_data.tbl' vec0 = [time0, speed1, direct1, speed2, direct2] names0 = ('MST_time','speed1', 'direct1', 'speed2', 'direct2') tab0 = Table(vec0, names=names0) if silent == False: log.info('## Writing : '+outfile) asc.write(tab0, outfile, format='fixed_width_two_line', overwrite=True) if silent == False: log.info('### End query_mmtlog_wind: '+systime()) return tab0