def f_simple_observing_log(offset=54034): """ Makes a 1D observing log separated by JHK but not by tile. """ fig = plt.figure() sub1= plt.subplot(3,1,1) j_table = band_cut(variables_photometry, 'j') h_table = band_cut(variables_photometry, 'h') k_table = band_cut(variables_photometry, 'k') j_dates = np.array(list(set(j_table.MEANMJDOBS))) h_dates = np.array(list(set(h_table.MEANMJDOBS))) k_dates = np.array(list(set(k_table.MEANMJDOBS))) plt.plot(j_dates - offset, np.ones(len(j_dates)) + 1/4, 'b.') plt.plot(h_dates - offset, np.ones(len(h_dates)), 'g.') plt.plot(k_dates - offset, np.ones(len(k_dates)) - 1/4, 'r.') plt.ylim(1/3,2-1/3) plt.xticks([54101-offset, 54282-offset, 54466-offset, 54648-offset, 54832-offset], ["Jan 2007", "July 2007", "Jan 2008", "July 2008", "Jan 2009"], rotation=30, fontsize=18) plt.setp(sub1.get_yticklabels(), visible=False) plt.show() return fig
def f_observing_log(title="Observing log for each tile, for each band"): """ Makes a graphical observing log. """ tile_tables = filter_by_tile()[0] fig = plt.figure() for i, tile_table in zip(range(len(tile_tables)), tile_tables): # How do we J slice? BAND CUT j_tile_table = band_cut(tile_table, 'j') h_tile_table = band_cut(tile_table, 'h') k_tile_table = band_cut(tile_table, 'k') j_dates = list(set(j_tile_table.MEANMJDOBS)) h_dates = list(set(h_tile_table.MEANMJDOBS)) k_dates = list(set(k_tile_table.MEANMJDOBS)) plt.plot(j_dates, 5/4+i*np.ones(len(j_dates)), 'b.') plt.plot(h_dates, 1+i*np.ones(len(h_dates)), 'g.') plt.plot(k_dates, 3/4+i*np.ones(len(k_dates)), 'r.') plt.xlabel("Modified Julian Date") plt.ylabel("Tile #", rotation='horizontal') plt.title(title) plt.ylim(1-1/3, 16+1/3) return fig
def get_columns(self, band, max_flag=0, min_flag=0): """ Returns relevant columns for a given photometry band. `band` must be 'j', 'h', or 'k'. """ if band.lower() not in ('j', 'h', 'k'): raise ValueError("Invalid band: {0} not in ('j', 'h', 'k')".format(band.lower())) b_table = band_cut(self.s_table, band, max_flag=max_flag, min_flag=min_flag) columns = {} columns['date'] = b_table['MEANMJDOBS'] - self.date_offset columns['mag'] = b_table['{0}APERMAG3'.format(band.upper())] columns['err'] = b_table['{0}APERMAG3ERR'.format(band.upper())] columns['flag'] = b_table['{0}PPERRBITS'.format(band.upper())] try: columns['grade'] = b_table['{0}GRADE'.format(band.upper())] except: columns['grade'] = np.zeros_like(columns['mag']) return columns
def get_colorcolor_columns(self, max_flag=256, min_flag=0): """ Returns relevant columns for the J-H, H-K color+color pair. """ colorcolor_table = band_cut(band_cut(band_cut(self.s_table, 'k', max_flag=max_flag), 'h', max_flag=max_flag), 'j', max_flag=max_flag) columns = {} columns['date'] = colorcolor_table['MEANMJDOBS'] - self.date_offset columns['jmh'] = colorcolor_table['JMHPNT'] columns['hmk'] = colorcolor_table['HMKPNT'] columns['jmh_err'] = colorcolor_table['JMHPNTERR'] columns['hmk_err'] = colorcolor_table['HMKPNTERR'] return columns
def get_colormag_columns(self, band, max_flag=256, min_flag=0): """ Returns relevant columns for color+magnitude pair. """ if band.lower() not in ('jjh', 'khk'): raise ValueError("Invalid color-mag combination: {0} not in ('jjh', 'khk')".format(band.lower())) mag, blue, red = band.lower() colormag_table = band_cut(band_cut(self.s_table, red, max_flag=max_flag), blue, max_flag=max_flag) columns = {} columns['date'] = colormag_table['MEANMJDOBS'] - self.date_offset columns['mag'] = colormag_table['{0}APERMAG3'.format(mag.upper())] columns['color'] = colormag_table['{0}M{1}PNT'.format(blue.upper(), red.upper())] columns['mag_err'] = colormag_table['{0}APERMAG3ERR'.format(mag.upper())] columns['color_err'] = colormag_table['{0}M{1}PNTERR'.format(blue.upper(), red.upper())] return columns
def star_slope( table, sid, xband='hmk', yband='k', flags=0, verbose=True, null=np.double(-9.99999488e+08)): """ Calculates the color slope, given an input table and ID. Parameters ---------- table : atpy.Table Table with time-series photometry sid : int 13-digit WFCAM source ID of star to plot xband : {'jmh', 'hmk'} The x-axis array to use for the slope. Default 'k'. yband : {'j', 'jmh', 'hmk'} The y-axis array to use for the slope. Default 'hmk'. flags : int, optional Maximum ppErrBit quality flags to use (default 0). verbose : bool. optional Whether to print a verbose output. Default true. Returns ------- slope : float Slope (in rise/run) of the linear fit. intercept : float Y-value where the linear fit intercepts the Y-axis. slope_error : float The standard error on the fitted slope: an indication of fit quality. """ if (xband not in ['jmh', 'hmk']) or (yband not in ['j', 'jmh', 'k']): raise ValueError("Incorrect argument to `xband` or `yband`") # define this dict thing band_dict = {'j':'JAPERMAG3', 'k':'KAPERMAG3', 'jmh':'JMHPNT', 'hmk':'HMKPNT'} # Loading data s_table = data_cut (table, sid, season=0) if len(s_table) == 0: print "no data here" return j_table = band_cut( s_table, 'j', max_flag=flags) h_table = band_cut( s_table, 'h', max_flag=flags) # k_table = band_cut( s_table, 'k', max_flag=flags) jh_table = band_cut( j_table, 'h', max_flag=flags) hk_table = band_cut( h_table, 'k', max_flag=flags) # jk_table = band_cut( j_table, 'k', max_flag=flags) jhk_table = band_cut( jh_table, 'k', max_flag=flags) if (xband, yband) == ('hmk', 'k'): data = hk_table elif (xband, yband) == ('jmh', 'j'): data = jh_table elif (xband, yband) == ('hmk', 'jmh'): data = jhk_table else: data = jhk_table print "Incorrect combination of `xband`, `yband`." x_array = data.data[band_dict[xband]] xerr_array = data.data[band_dict[xband]+"ERR"] y_array = data.data[band_dict[yband]] yerr_array = data.data[band_dict[yband]+"ERR"] return slope(x_array, y_array, xerr_array, yerr_array, verbose)
def Stetson_machine ( s_table, flags=0) : """ Computes the Stetson index on the best combination of bands. There's a lot of internal logic here on how to exactly accomplish that, and especially which version of the Stetson index to even use. Parameters ---------- s_table : atpy.Table Table with time-series photometry of one star flags : int, optional Maximum ppErrBit quality flags to use (default 0) Returns ------- Stetson : float The Stetson variability index (either "I" or "J" depending on whether 2 or 3 bands were used). choice : str {'jhk', 'hk', 'jh', 'jk'} Which combination of bands is optimal. stetson_nights : int How many nights have all of the optimal combination (and therefore, how many nights' worth of data is going into the Stetson calculation) """ # First, slice the data to find how many nights have # a given combination of bands. j_table = band_cut( s_table, 'j', max_flag=flags) h_table = band_cut( s_table, 'h', max_flag=flags) k_table = band_cut( s_table, 'k', max_flag=flags) jh_table = band_cut( j_table, 'h', max_flag=flags) hk_table = band_cut( h_table, 'k', max_flag=flags) jk_table = band_cut( j_table, 'k', max_flag=flags) jhk_table = band_cut( jh_table, 'k', max_flag=flags) # Then we'll measure how many nights are in each combination. jh_len = len(jh_table) hk_len = len(hk_table) jk_len = len(jk_table) jhk_len = len(jhk_table) # The combination with the most nights (weighted by value^{1}) # will win. Ties are determined in order: JHK, HK, JH, JK max_len = max(jh_len, hk_len, jk_len, jhk_len*2) # Now note the winning choice, and compute the relevant index. # If there are no simultaneous observations, choose the most-observed band # and do a singleband 'Stetson'. if max_len == 0: j_len = len(j_table) h_len = len(h_table) k_len = len(k_table) max_len_single = max(j_len, h_len, k_len) if k_len == max_len_single: choice = 'k' vcol = k_table.KAPERMAG3 verr = k_table.KAPERMAG3ERR elif h_len == max_len_single: choice = 'h' vcol = h_table.HAPERMAG3 verr = h_table.HAPERMAG3ERR else: choice = 'j' vcol = j_table.JAPERMAG3 verr = j_table.JAPERMAG3ERR Stetson = stetson.S_singleton(vcol, verr) stetson_nights = max_len_single elif 2*jhk_len == max_len: choice = 'jhk' jcol = jhk_table.JAPERMAG3; jerr = jhk_table.JAPERMAG3ERR hcol = jhk_table.HAPERMAG3; herr = jhk_table.HAPERMAG3ERR kcol = jhk_table.KAPERMAG3; kerr = jhk_table.KAPERMAG3ERR Stetson = stetson.S(jcol, jerr, hcol, herr, kcol, kerr) stetson_nights = jhk_len else: if hk_len == max_len: choice = 'hk' bcol = hk_table.HAPERMAG3; berr = hk_table.HAPERMAG3ERR vcol = hk_table.KAPERMAG3; verr = hk_table.KAPERMAG3ERR elif jh_len == max_len: choice = 'jh' bcol = jh_table.JAPERMAG3; berr = jh_table.JAPERMAG3ERR vcol = jh_table.HAPERMAG3; verr = jh_table.HAPERMAG3ERR elif jk_len == max_len: choice = 'jk' bcol = jk_table.JAPERMAG3; berr = jk_table.JAPERMAG3ERR vcol = jk_table.KAPERMAG3; verr = jk_table.KAPERMAG3ERR Stetson = stetson.I(bcol, berr, vcol, verr) stetson_nights = max_len # Finally, return S, the band choice, and how many nights # are going into the Stetson calculation for that choice. return (Stetson, choice, stetson_nights)
def statcruncher (table, sid, season=0, rob=True, per=True, graded=False, colorslope=False, flags=0) : """ Calculates several statistical properties for a given star. Will work with "lonely" datapoints (i.e. not all JHK mags are well-defined). Optionally works with graded data, too! Parameters ---------- table : atpy.Table Table with time-series photometry sid : int 13-digit WFCAM source ID of star to plot season : int, optional Which observing season of our dataset (1, 2, 3, or all). Any value that is not the integers (1, 2, or 3) will be treated as "no season", and no time-cut will be made. Note that this is the default behavior. rob : bool, optional Use robust statistics, in addition to normal ones? (takes longer, default True) per : bool, optional Run period-finding? Uses fast chi-squared and lomb-scargle. (takes longer, default True) graded : bool, optional Also calculate Stetson indices using quality grades as weights? Uses stetson_graded; requires that the data has been graded by night_cleanser.null_cleanser_grader(). colorslope : bool, optional Calculate color slopes? Runs them over (JvJ-H, KvH-K, J-HvH-K). Make sure your data has been color-error-corrected! Default False. flags : int, optional Maximum ppErrBit quality flags to use (default 0) Returns ------- ret : data structure Contains the computed values. They can be accessed as attributes (e.g., "ret.j_mean" or "ret.Stetson"). """ s_table = data_cut ( table, sid, season=season) if len(s_table) < 1: print "no data for %d!" % sid return None # First, let's compute single-band statistics. This will require # separate data_cuts on each band. full_jtable = band_cut(s_table, 'j') full_htable = band_cut(s_table, 'h') full_ktable = band_cut(s_table, 'k') j_table = band_cut(s_table, 'j', max_flag=flags) h_table = band_cut(s_table, 'h', max_flag=flags) k_table = band_cut(s_table, 'k', max_flag=flags) jmh_table = band_cut(j_table, 'h', max_flag=flags) hmk_table = band_cut(h_table, 'k', max_flag=flags) # jhk_table used only for colorslope jhk_table = band_cut( jmh_table, 'k', max_flag=flags) # get a date (x-axis) for each jdate = j_table.MEANMJDOBS hdate = h_table.MEANMJDOBS kdate = k_table.MEANMJDOBS jmhdate = jmh_table.MEANMJDOBS hmkdate = hmk_table.MEANMJDOBS # date = s_table.MEANMJDOBS # get a magnitude and magnitude error for each band jcol = j_table.JAPERMAG3; jerr = j_table.JAPERMAG3ERR hcol = h_table.HAPERMAG3; herr = h_table.HAPERMAG3ERR kcol = k_table.KAPERMAG3; kerr = k_table.KAPERMAG3ERR jmhcol= jmh_table.JMHPNT; jmherr = jmh_table.JMHPNTERR hmkcol= hmk_table.HMKPNT; hmkerr = hmk_table.HMKPNTERR # get the RA and DEC columns, checking for sensible values racol= s_table.RA[(s_table.RA > 0) & (s_table.RA < 7)] decol= s_table.DEC[(s_table.DEC > -4) & (s_table.DEC < 4)] # Now let's get some ability to track errorful data. # messy_table_j = band_cut( s_table, 'j') # messy_table_h = band_cut( s_table, 'h') # messy_table_k = band_cut( s_table, 'k') # jppcol = messy_table_j.JPPERRBITS # hppcol = messy_table_h.HPPERRBITS # kppcol = messy_table_k.KPPERRBITS # make an empty data structure and just assign it information, then return # the object itself! then there's no more worrying about indices. class Empty(): pass ret = Empty() # How many nights have observations in each band? ret.N_j = len(j_table) ret.N_h = len(h_table) ret.N_k = len(k_table) # What's the distribution of flags and nights? js = full_jtable.JPPERRBITS hs = full_htable.HPPERRBITS ks = full_ktable.KPPERRBITS ret.N_j_noflag = len(js[js == 0]) ret.N_h_noflag = len(hs[hs == 0]) ret.N_k_noflag = len(ks[ks == 0]) ret.N_j_info = len(js[(js < 256) & (js > 0)]) ret.N_h_info = len(hs[(hs < 256) & (hs > 0)]) ret.N_k_info = len(ks[(ks < 256) & (ks > 0)]) ret.N_j_warn = len(js[ js >= 256 ]) ret.N_h_warn = len(hs[ hs >= 256 ]) ret.N_k_warn = len(ks[ ks >= 256 ]) # Mean position of this source ret.RA = racol.mean() ret.DEC = decol.mean() # Calculate the Stetson index... S, choice, stetson_nights = Stetson_machine (s_table, flags) ret.Stetson = S ret.Stetson_choice = choice ret.Stetson_N = stetson_nights if graded: # Calculate the graded Stetson index... g_S, g_choice, g_stetson_nights = ( graded_Stetson_machine (s_table, flags) ) ret.graded_Stetson = g_S ret.graded_Stetson_choice = g_choice ret.graded_Stetson_N = g_stetson_nights # Calculate PSTAR parameters ret.pstar_mean = s_table.PSTAR.mean() ret.pstar_median = np.median(s_table.PSTAR) ret.pstar_rms = s_table.PSTAR.std() # Create parallel data structures for each band, so we can iterate ret.j = Empty(); ret.j.data = jcol; ret.j.err = jerr; ret.j.date = jdate ret.h = Empty(); ret.h.data = hcol; ret.h.err = herr; ret.h.date = hdate ret.k = Empty(); ret.k.data = kcol; ret.k.err = kerr; ret.k.date = kdate ret.jmh = Empty(); ret.jmh.data=jmhcol; ret.jmh.err = jmherr ret.hmk = Empty(); ret.hmk.data=hmkcol; ret.hmk.err = hmkerr ret.jmh.date = jmhdate; ret.hmk.date = hmkdate ret.j.N = ret.N_j ; ret.h.N = ret.N_h ; ret.k.N = ret.N_k ret.jmh.N = len(jmh_table) ; ret.hmk.N = len(hmk_table) bands = [ ret.j, ret.h, ret.k, ret.jmh, ret.hmk ] for b in bands: # use b.data, b.err # if this band is empty, don't try to do the following assignments if b.N == 0: continue b.rchi2 = reduced_chisq( b.data, b.err ) b.mean = b.data.mean() b.median = np.median(b.data) # dao b.rms = b.data.std() b.min = b.data.min() b.max = b.data.max() b.range = b.max - b.min b.err_mean = b.err.mean() #dao b.err_median = np.median(b.err) #dao b.err_rms = b.err.std() #dao b.err_min = b.err.min() #dao b.err_max = b.err.max() #dao b.err_range = b.err_max - b.err_min #dao # Robust quantifiers simply have an "r" at the end of their names if rob: b.datar, b.indr = rb.removeoutliers(b.data, 3, niter=2, retind=True) b.errr = b.err[b.indr] b.meanr = rb.meanr(b.data) b.medianr = rb.medianr(b.data) # dao b.rmsr = rb.stdr(b.data) b.minr = b.datar.min() b.maxr = b.datar.max() b.ranger = b.maxr - b.minr b.err_meanr = b.errr.mean() # dao b.err_medianr = np.median(b.errr) #dao b.err_rmsr = b.errr.std() #dao b.err_minr = b.errr.min() #dao b.err_maxr = b.errr.max() #dao b.err_ranger = b.err_maxr - b.err_minr #dao # Period finding... is a little dodgy still, and might take forever if per==True and b.N > 2: hifac = lsp_tuning(b.date) b.lsp = lsp(b.date, b.data, 6., hifac) Jmax = lsp_mask(b.lsp[0], b.lsp[1]) b.lsp_per = 1./ b.lsp[0][Jmax] b.lsp_pow = b.lsp[1][Jmax] b.lsp_sig = getSignificance(b.lsp[0], b.lsp[1], b.lsp[2], 6.)[Jmax] best_freq, chimin = test_analyze( b.date, b.data, b.err, ret_chimin=True ) b.fx2_per, b.fx2_chimin = 1./best_freq, chimin if colorslope: # J vs J-H : use jmh_table exclusively (ret.jjh_slope, a, ret.jjh_slope_err) = ( slope( jmh_table.JMHPNT, jmh_table.JAPERMAG3, jmh_table.JMHPNTERR, jmh_table.JAPERMAG3ERR, verbose=False) ) # K vs H-K : use hmk_table exclusively (ret.khk_slope, a, ret.khk_slope_err) = ( slope( hmk_table.HMKPNT, hmk_table.KAPERMAG3, hmk_table.HMKPNTERR, hmk_table.KAPERMAG3ERR, verbose=False) ) # J-H vs H-K : use jhk_table exclusively (ret.jhk_slope, a, ret.jhk_slope_err) = ( slope( jhk_table.HMKPNT, jhk_table.JMHPNT, jhk_table.HMKPNTERR, jhk_table.JMHPNTERR, verbose=False) ) # and the pp_max, using the messy table # (slated for a re-implementation) # ret.jpp_max = jppcol.max() # ret.hpp_max = hppcol.max() # ret.kpp_max = kppcol.max() return ret
def f_comparison_observing_log(): """ Zoomed out timeline comparing us to CHS2001 and YSOVAR time coverage. """ fig = plt.figure(figsize=(9,2.5)) chs01_start = 51580 #2000 Feb 6 in MJD chs01_end = 51639 # 2000 Apr 8 in MJD ysovar_spitzer_start = 55126 #2009 Oct 23 in MJD ysovar_spitzer_end = 55163 # 2009 Dec 1 in MJD ysovar_ukirt_start = 55123 #2009 Oct 20 in MJD ysovar_ukirt_end = 55185 # 2009 Dec 22 ysovar_cfht_start = 55130 # 2009 Oct 27 ysovar_cfht_end = 55142 # 2009 Nov 8 j_wavelength = 1.1 #microns h_wavelength = 1.6 ks_wavelength = 2.15 k_wavelength = 2.2 irac1_wavelength = 3.6 irac2_wavelength = 4.5 j_table = band_cut(variables_photometry, 'j') h_table = band_cut(variables_photometry, 'h') k_table = band_cut(variables_photometry, 'k') wfcam_j_dates = np.array(list(set(j_table.MEANMJDOBS))) wfcam_h_dates = np.array(list(set(h_table.MEANMJDOBS))) wfcam_k_dates = np.array(list(set(k_table.MEANMJDOBS))) # Our observations plt.plot(wfcam_j_dates, j_wavelength*np.ones_like(wfcam_j_dates), 'b.') plt.plot(wfcam_h_dates, h_wavelength*np.ones_like(wfcam_h_dates), 'g.') plt.plot(wfcam_k_dates, k_wavelength*np.ones_like(wfcam_k_dates), 'r.') # Carpenter plt.plot([chs01_start, chs01_end], [j_wavelength]*2, 'b',lw=4) plt.plot([chs01_start, chs01_end], [h_wavelength]*2, 'g', lw=4) plt.plot([chs01_start, chs01_end], [ks_wavelength]*2, 'r', lw=4) # YSOVAR # spitzer plt.plot([ysovar_spitzer_start, ysovar_spitzer_end], [irac1_wavelength]*2, 'm', lw=4) plt.plot([ysovar_spitzer_start, ysovar_spitzer_end], [irac2_wavelength]*2, 'k', lw=4) # UKIRT plt.plot([ysovar_ukirt_start, ysovar_ukirt_end], [j_wavelength]*2, 'b', lw=4) # CFHT plt.plot([ysovar_cfht_start, ysovar_cfht_end], [j_wavelength]*2, 'b', lw=4) plt.plot([ysovar_cfht_start, ysovar_cfht_end], [ks_wavelength]*2, 'r', lw=4) # plt.gca().invert_yaxis() plt.ylabel("Wavelength (microns)") plt.ylim(6, 0.1) xticks_values = [51544, 51910, 52275, 52640, 53005, 53371, 53736, 54101, 54466, 54832, 55197] xticklabels_values = ["20%02d"%x for x in range(11)] plt.xticks(xticks_values[::2], xticklabels_values[::2]) plt.title("History of infrared monitoring campaigns in the ONC") plt.text(51550, 0.8, "Carpenter et al. 2001") plt.text(55050, 0.8, "YSOVAR$^a$") plt.text(54250, 0.8, "This paper") plt.text(55220, 4.5, "[4.5]", color='k', size=14, verticalalignment='center') plt.text(55220, 3.6, "[3.6]", color='m', size=14, verticalalignment='center') plt.text(55200, 2.5, "$K_s$", color='r', size=16) plt.text(55230, 1.3, "$J$", color='b', size=16) plt.text(53900, 2.5, "$K$", color='red', size=16) plt.text(53900, 1.8, "$H$", color='green', size=16) plt.text(53915, 1.1, "$J$", color='blue', size=16) plt.show() return fig
def quadrant_corrector(data, j_constants, h_constants, k_constants, min_grade=None, max_grade=None): """ Corrects magnitudes using a network of constant stars. Parameters ---------- data : atpy.Table Table with UKIRT time-series photometry. j_constants, h_constants, k_constants : atpy.Table Table with 'spreadsheet' information on J, H, and K constants. Requires robust statistical information. Must be pre-cleaned (we'll use all the constants you give us) min_grade, max_grade : float, optional What range of grades to correct data for. Default is all of them. Returns ------- new_data : atpy.Table The corrected data table, with photometry corrected using the network. """ # Make a copy of the data table new_data = data.where(data.SOURCEID != 0) # new_data.add_column # glue your set of constant lists together cdict = {'j':j_constants, 'h':h_constants, 'k':k_constants} for band in ['j', 'h', 'k']: # Grab the timestamps we'll be iterating over bdata = band_cut(data, band, max_flag=256) col = band.upper()+"APERMAG3" bandmean = band.lower()+"_meanr" bandgrade = band.upper()+"GRADE" timestamp_list = list(set(list(bdata.MEANMJDOBS))) timestamp_list.sort() for date in timestamp_list: this_night = bdata.where(bdata.MEANMJDOBS == date) # Can we skip this night due to a sufficient grade? print( "min_grade: %f, max_grade: %f, grade: %f" % (min_grade, max_grade, this_night.data[bandgrade][0])) if min_grade == 0.0 and max_grade == 1.0: pass elif ((this_night.data[bandgrade][0] < min_grade) or (this_night.data[bandgrade][0] > max_grade)): print "Night %s skipped re:quality" % str(date) continue # first, grab the sourceids that are in this here night source_list = this_night.SOURCEID ra_list = this_night.RA dec_list = this_night.DEC print str(len(source_list)) +" sources on night %f" % date # And also grab the constants that are in this here night! ref_phot = cdict[band].where( np.in1d(cdict[band].SOURCEID, source_list) ) for s, ra, dec in zip(source_list, ra_list, dec_list): # Find four nearby constants (one in each column) (sid_list, offset_list, ra_list, dec_list) = quadrant_match( np.degrees(ra), np.degrees(dec), ref_phot, max_match=600) # print offset_list, "sup" # Get the deviation of each constant deviation = [] for sid in sid_list: this_stars_phot = this_night.where( this_night.SOURCEID == sid) this_stars_avg = ref_phot.where( ref_phot.SOURCEID == sid) deviation.append(this_stars_phot.data[col][0] - this_stars_avg.data[bandmean][0]) # Calculate the relevant offset adjustment = magnitude_adjustment(deviation, offset_list) # Apply the offset to our working table new_data.data[col][(new_data.SOURCEID == s) & (new_data.MEANMJDOBS == date)] += adjustment if s%23 == 0: print ("%d: adjusted by %f mag (%s) on night %d using %d ref. stars" % (s, adjustment, band.upper(), date, len(offset_list))) # break return new_data
def exposure_grader(data, spreadsheet, band, min_mag=17): """ Investigates the quality of all exposures by checking a) how many constant stars are detected b) how many fall inside of, versus outside of, \pm .05 mag deviation Very similar to count_constants_calc_ratio(), but this one goes on a per-exposure, rather than per-night, basis. Parameters ---------- data : atpy.Table Table that contains all the photometry data. spreadsheet : atpy.Table Table that contains median photometry and stuff band : str {'j'|'h'|'k'} Which band to use. Returns ------- date : np.ndarray Array of MJD timestamps corresponding to times of observation. n_const : np.ndarray Number of constant stars detected per exposure ratio : np.ndarray Ratio of (well-behaved)/(deviant) constants per exposure """ if band.lower() not in ('j','h','k'): raise(ValueError("`band` must be 'j','h', or 'k'")) col = band.upper()+"APERMAG3" bandmean = band.lower()+"_meanr" pperrbits = band.upper()+"PPERRBITS" # First, let's make the list of dates that we are iterating through. date_list = list(set(list(data.MEANMJDOBS))) date_list.sort() print len(date_list), " timestamps in this dataset" dates = np.array(date_list) n_const = np.zeros_like(dates, dtype='int') ratio = np.zeros_like(dates, dtype='float') # Now we iterate over our date list. for night, i in zip(date_list, range(len(date_list))): # Grab this night's photometry that corresponds to the input constant # star list. # relevant data rdata = band_cut(data, band, max_flag=256) this_nights_phot = rdata.where( (rdata.MEANMJDOBS == night) & (np.in1d(rdata.SOURCEID, spreadsheet.SOURCEID)) & (rdata.data[col] < min_mag)) # Grab the spreadsheet info that corresponds exactly to this night's # stars. ("reference photometry") ref_phot = spreadsheet.where( np.in1d(spreadsheet.SOURCEID, this_nights_phot.SOURCEID) ) print "For night %s:" % night print len(this_nights_phot), len(ref_phot) # Now compute the deviation for each star. # (I'd love to do this in an array-based way, but some stars have # two observations per night, and that breaks my array-based method...) deviation = np.zeros_like( ref_phot.data[bandmean] ) for j in range(len(deviation)): this_stars_phot = this_nights_phot.where( this_nights_phot.SOURCEID == ref_phot.SOURCEID[j]) deviation[j] = ( this_stars_phot.data[col][0] - ref_phot.data[bandmean][j]) # NOW count how many stars there are and the ratio that fall inside # versus outside the \pm .05 mag bands n_const[i] = len(deviation) goods = len( deviation[np.abs(deviation) < 0.05] ) if n_const[i] > 0: ratio[i] = (goods / n_const[i]) else: ratio[i] = 0 return dates, n_const, ratio
def mapmaker(data, spreadsheet, band, path, min_mag=17): """ A function to make variability map movies. Produces only the PNGs required to make the movies. In order to glue them together, manually run a command like: mencoder mf://*.png -mf fps=1:type=png -ovc copy -o k_movie_slow.avi to create a .avi video. Parameters ---------- data : atpy.Table Table that contains all the photometry data. spreadsheet : atpy.Table Table that contains median photometry and stuff band : str {'j'|'h'|'k'} Which band to use. path : string Place to save to. """ if not (len(band)==1 and type(band) is str): raise(ValueError) col = band.upper()+"APERMAG3" bandmean = band.lower()+"_meanr" pperrbits = band.upper()+"PPERRBITS" # First, let's make the list of dates that we are iterating through. date_list = list(set(list(np.trunc(data.MEANMJDOBS)))) date_list.sort() # Now we iterate over our date list. for night in date_list: # Grab this night's photometry that corresponds to the input constant # star list. # relevant data rdata = band_cut(data, band, max_flag=256) this_nights_phot = rdata.where( (np.trunc(rdata.MEANMJDOBS) == night) & (np.in1d(rdata.SOURCEID, spreadsheet.SOURCEID)) & (rdata.data[col] < min_mag)) # Grab the spreadsheet info that corresponds exactly to this night's # stars. ("reference photometry") ref_phot = spreadsheet.where( np.in1d(spreadsheet.SOURCEID, this_nights_phot.SOURCEID) ) print "For night %s:" % night print len(this_nights_phot), len(ref_phot) # Now compute the deviation for each star. # (I'd love to do this in an array-based way, but some stars have # two observations per night, and that breaks my array-based method...) deviation = np.zeros_like( ref_phot.data[bandmean] ) for i in range(len(deviation)): this_stars_phot = this_nights_phot.where( this_nights_phot.SOURCEID == ref_phot.SOURCEID[i]) deviation[i] = ( this_stars_phot.data[col][0] - ref_phot.data[bandmean][i]) try: fig = plt.figure() plt.scatter(np.degrees(ref_phot.RA), np.degrees(ref_phot.DEC), s = (19 - ref_phot.data[bandmean])**2, c = deviation, cmap='RdBu_r', vmin=-0.15, vmax=0.15) cbar = plt.colorbar() cbar.set_label("Deviation from mean magnitude") cbar.ax.invert_yaxis() fig.gca().invert_xaxis() fig.gca().set_aspect('equal') plt.xlabel("Right Ascension (degrees)") plt.ylabel("Declination (degrees)") plt.xlim(84.3, 83.2) plt.ylim(-5.95, -4.9) plt.title("Night: MJD = %s (%d since 01/01/2000)" % (str(night), night - 51544)) plt.savefig(path+'%s.png' % str(night)) plt.close() except ValueError: continue
def deviation_plot(data, spreadsheet, band, path, min_mag=17): """ Plots the deviation of each constant star as a function of magnitude. Parameters ---------- data : atpy.Table Table that contains all the photometry data. spreadsheet : atpy.Table Table that contains median photometry and stuff band : str {'j'|'h'|'k'} Which band to use. path : string Place to save to. """ if not (len(band)==1 and type(band) is str): raise(ValueError) col = band.upper()+"APERMAG3" bandmean = band.lower()+"_meanr" pperrbits = band.upper()+"PPERRBITS" colordict = {'k':'r', 'h':'g', 'j':'b'} # First, let's make the list of dates that we are iterating through. date_list = list(set(list(np.trunc(data.MEANMJDOBS)))) date_list.sort() # Now we iterate over our date list. for night in date_list: # Grab this night's photometry that corresponds to the input constant # star list. # relevant data rdata = band_cut(data, band, max_flag=256) this_nights_phot = rdata.where( (np.trunc(rdata.MEANMJDOBS) == night) & (np.in1d(rdata.SOURCEID, spreadsheet.SOURCEID)) & (rdata.data[col] < min_mag)) # Grab the spreadsheet info that corresponds exactly to this night's # stars. ("reference photometry") ref_phot = spreadsheet.where( np.in1d(spreadsheet.SOURCEID, this_nights_phot.SOURCEID) ) print "For night %s:" % night print len(this_nights_phot), len(ref_phot) # Now compute the deviation for each star. # (I'd love to do this in an array-based way, but some stars have # two observations per night, and that breaks my array-based method...) deviation = np.zeros_like( ref_phot.data[bandmean] ) for i in range(len(deviation)): this_stars_phot = this_nights_phot.where( this_nights_phot.SOURCEID == ref_phot.SOURCEID[i]) deviation[i] = ( this_stars_phot.data[col][0] - ref_phot.data[bandmean][i]) try: fig = plt.figure() plt.plot( ref_phot.data[bandmean], deviation, colordict[band.lower()]+'.') plt.plot( [5, 20], [0, 0], 'k--') plt.plot( [5, 20], [0.05, 0.05], 'k:') plt.plot( [5, 20], [-0.05, -0.05], 'k:') plt.xlabel("Mean %s magnitude" % band.upper()) plt.ylabel("Deviation (mag)") plt.title("Night: MJD = %s (%d since 01/01/2000)" % (str(night), night - 51544)) plt.xlim(11, min_mag+0.25) plt.ylim(-1,1) plt.gca().invert_yaxis() plt.savefig(path+'%s_dev.png' % str(night)) plt.close() # if night > 54040.0: # break except: continue return None