def store_a0v_results(self, igr_storage, extractor, a0v_flattened_data): wvl_header, wvl_data, convert_data = \ self.get_wvl_header_data(igr_storage, extractor) f_obj = pyfits.open(extractor.obj_filenames[0]) f_obj[0].header.extend(wvl_header) from libs.products import PipelineImage as Image image_list = [Image([("EXTNAME", "SPEC_FLATTENED")], convert_data(a0v_flattened_data[0][1]))] if self.debug_output: for ext_name, data in a0v_flattened_data[1:]: image_list.append(Image([("EXTNAME", ext_name.upper())], convert_data(data))) from libs.products import PipelineImages #Base from libs.storage_descriptions import SPEC_FITS_FLATTENED_DESC r = PipelineProducts("flattened 1d specs") r.add(SPEC_FITS_FLATTENED_DESC, PipelineImages(image_list)) mastername = extractor.obj_filenames[0] igr_storage.store(r, mastername=mastername, masterhdu=f_obj[0]) tgt_basename = extractor.pr.tgt_basename extractor.db["a0v"].update(extractor.band, tgt_basename)
def store_profile(self, igr_storage, mastername, orders, slit_profile_list, profile_x, profile_y): ## save profile r = PipelineProducts("slit profile for point source") from libs.storage_descriptions import SLIT_PROFILE_JSON_DESC from libs.products import PipelineDict slit_profile_dict = PipelineDict(orders=orders, slit_profile_list=slit_profile_list, profile_x=profile_x, profile_y=profile_y) r.add(SLIT_PROFILE_JSON_DESC, slit_profile_dict) igr_storage.store(r, mastername=mastername, masterhdu=None)
def store_processed_inputs(self, igr_storage, mastername, image_list, variance_map, shifted_image_list): from libs.storage_descriptions import (COMBINED_IMAGE_DESC, # COMBINED_IMAGE_A_DESC, # COMBINED_IMAGE_B_DESC, WVLCOR_IMAGE_DESC, #VARIANCE_MAP_DESC ) from libs.products import PipelineImages #Base r = PipelineProducts("1d specs") #r.add(COMBINED_IMAGE_DESC, PipelineImageBase([], *image_list)) r.add(COMBINED_IMAGE_DESC, PipelineImages(image_list)) # r.add(COMBINED_IMAGE_A_DESC, PipelineImageBase([], # a_data)) # r.add(COMBINED_IMAGE_B_DESC, PipelineImageBase([], # b_data)) #r.add(VARIANCE_MAP_DESC, PipelineImageBase([], # variance_map)) # r.add(VARIANCE_MAP_DESC, PipelineImageBase([], # variance_map.data)) igr_storage.store(r, mastername=mastername, masterhdu=None) r = PipelineProducts("1d specs") r.add(WVLCOR_IMAGE_DESC, PipelineImages(shifted_image_list)) igr_storage.store(r, mastername=mastername, masterhdu=None)
def store_wavelength_outputs(self, extractor, p2_list, ap): orders = extractor.orders_w_solutions wvl_solutions = extractor.wvl_solutions p2_dict = dict(zip(orders, p2_list)) # save order_map, etc order_map = ap.make_order_map() slitpos_map = ap.make_slitpos_map() order_map2 = ap.make_order_map(mask_top_bottom=True) slitoffset_map = np.empty_like(slitpos_map) slitoffset_map.fill(np.nan) wavelength_map = np.empty_like(slitpos_map) wavelength_map.fill(np.nan) from scipy.interpolate import interp1d for o, wvl in zip(ap.orders, wvl_solutions): xi = np.arange(0, 2048) xl, yl = np.meshgrid(xi, xi) msk = order_map == o xl_msk = xl[msk] slitoffset_map_msk = p2_dict[o](xl_msk, slitpos_map[msk]) slitoffset_map[msk] = slitoffset_map_msk wvl_interp1d = interp1d(xi, wvl, bounds_error=False) wavelength_map[msk] = wvl_interp1d(xl_msk - slitoffset_map_msk) from libs.storage_descriptions import (ORDERMAP_FITS_DESC, SLITPOSMAP_FITS_DESC, SLITOFFSET_FITS_DESC, WAVELENGTHMAP_FITS_DESC, ORDERMAP_MASKED_FITS_DESC) from libs.products import PipelineImageBase, PipelineProducts products = PipelineProducts("Distortion map") for desc, im in [(ORDERMAP_FITS_DESC, order_map), (SLITPOSMAP_FITS_DESC, slitpos_map), (SLITOFFSET_FITS_DESC, slitoffset_map), (WAVELENGTHMAP_FITS_DESC,wavelength_map), (ORDERMAP_MASKED_FITS_DESC, order_map2)]: products.add(desc, PipelineImageBase([], im)) igr_storage = extractor.igr_storage igr_storage.store(products, mastername=extractor.obj_filenames[0], masterhdu=None) if 0: # test x = np.arange(2048, dtype="d") oi = 10 o = orders[oi] yi = 0.5*(slit_slice[:-1] + slit_slice[1:]) ax1 = subplot(211) s1 = s_up[-1][oi] s2 = s_down[-1][oi] ax1.plot(x, s1) ax1.plot(x, s2) ax2 = subplot(212, sharex=ax1, sharey=ax1) dx1 = p2_dict[o](x, yi[-1]+np.zeros_like(x)) ax2.plot(x-dx1, s1) dx2 = p2_dict[o](x, yi[0]+np.zeros_like(x)) ax2.plot(x-dx2, s2)
def save_wavelength_sol(self, extractor, orders_w_solutions, wvl_sol, p): oh_sol_products = PipelineProducts("Wavelength solution based on ohlines") #from libs.process_thar import ONED_SPEC_JSON from libs.products import PipelineDict from libs.storage_descriptions import SKY_WVLSOL_JSON_DESC oh_sol_products.add(SKY_WVLSOL_JSON_DESC, PipelineDict(orders=orders_w_solutions, wvl_sol=wvl_sol)) if 1: # save as WAT fits header xx = np.arange(0, 2048) xx_plus1 = np.arange(1, 2048+1) from astropy.modeling import models, fitting # We convert 2d chebyshev solution to a seriese of 1d # chebyshev. For now, use naive (and inefficient) # approach of refitting the solution with 1d. Should be # reimplemented. p1d_list = [] for o in orders_w_solutions: oo = np.empty_like(xx) oo.fill(o) wvl = p(xx, oo) / o * 1.e4 # um to angstrom p_init1d = models.Chebyshev1D(domain=[1, 2048], degree=p.x_degree) fit_p1d = fitting.LinearLSQFitter() p1d = fit_p1d(p_init1d, xx_plus1, wvl) p1d_list.append(p1d) from libs.iraf_helper import get_wat_spec, default_header_str wat_list = get_wat_spec(orders_w_solutions, p1d_list) # cards = [pyfits.Card.fromstring(l.strip()) \ # for l in open("echell_2dspec.header")] import libs.fits as pyfits cards = [pyfits.Card.fromstring(l.strip()) \ for l in default_header_str] wat = "wtype=multispec " + " ".join(wat_list) char_per_line = 68 num_line, remainder = divmod(len(wat), char_per_line) for i in range(num_line): k = "WAT2_%03d" % (i+1,) v = wat[char_per_line*i:char_per_line*(i+1)] #print k, v c = pyfits.Card(k, v) cards.append(c) if remainder > 0: i = num_line k = "WAT2_%03d" % (i+1,) v = wat[char_per_line*i:] #print k, v c = pyfits.Card(k, v) cards.append(c) if 1: # save fits with empty header header = pyfits.Header(cards) hdu = pyfits.PrimaryHDU(header=header, data=np.array([]).reshape((0,0))) from libs.storage_descriptions import SKY_WVLSOL_FITS_DESC from libs.products import PipelineImageBase oh_sol_products.add(SKY_WVLSOL_FITS_DESC, PipelineImageBase([], np.array(wvl_sol))) igr_storage = extractor.igr_storage sky_filenames = extractor.obj_filenames igr_storage.store(oh_sol_products, mastername=sky_filenames[0], masterhdu=hdu) #fn = sky_path.get_secondary_path("wvlsol_v1.fits") #hdu.writeto(fn, clobber=True) if 0: # plot all spectra for w, s in zip(wvl_sol, s_list): plot(w, s)
def process(self, recipe, band, obsids, frametypes): igr_path = self.igr_path igr_storage = self.igr_storage if recipe == "A0V_AB": DO_STD = True #FIX_TELLURIC=False elif recipe == "STELLAR_AB": DO_STD = False #FIX_TELLURIC=True elif recipe == "EXTENDED_AB": DO_STD = False #FIX_TELLURIC=True elif recipe == "EXTENDED_ONOFF": DO_STD = False #FIX_TELLURIC=True if 1: obj_filenames = igr_path.get_filenames(band, obsids) master_obsid = obsids[0] tgt_basename = os.path.splitext(os.path.basename(obj_filenames[0]))[0] db = {} basenames = {} db_types = ["flat_off", "flat_on", "thar", "sky"] for db_type in db_types: db_name = igr_path.get_section_filename_base("PRIMARY_CALIB_PATH", "%s.db" % db_type, ) db[db_type] = ProductDB(db_name) # db on output path db_types = ["a0v"] for db_type in db_types: db_name = igr_path.get_section_filename_base("OUTDATA_PATH", "%s.db" % db_type, ) db[db_type] = ProductDB(db_name) # to get basenames db_types = ["flat_off", "flat_on", "thar", "sky"] # if FIX_TELLURIC: # db_types.append("a0v") for db_type in db_types: basenames[db_type] = db[db_type].query(band, master_obsid) if 1: # make aperture from libs.storage_descriptions import SKY_WVLSOL_JSON_DESC sky_basename = db["sky"].query(band, master_obsid) wvlsol_products = igr_storage.load([SKY_WVLSOL_JSON_DESC], sky_basename)[SKY_WVLSOL_JSON_DESC] orders_w_solutions = wvlsol_products["orders"] wvl_solutions = map(np.array, wvlsol_products["wvl_sol"]) from libs.storage_descriptions import ONED_SPEC_JSON_DESC raw_spec_products = igr_storage.load([ONED_SPEC_JSON_DESC], sky_basename) from recipe_wvlsol_sky import load_aperture2 ap = load_aperture2(igr_storage, band, master_obsid, db["flat_on"], raw_spec_products[ONED_SPEC_JSON_DESC]["orders"], orders_w_solutions) # This should be saved somewhere and loaded, instead of making it every time. order_map = ap.make_order_map() slitpos_map = ap.make_slitpos_map() order_map2 = ap.make_order_map(mask_top_bottom=True) if 1: from libs.storage_descriptions import (HOTPIX_MASK_DESC, DEADPIX_MASK_DESC, ORDER_FLAT_IM_DESC, ORDER_FLAT_JSON_DESC, FLAT_MASK_DESC) hotpix_mask = igr_storage.load([HOTPIX_MASK_DESC], basenames["flat_off"])[HOTPIX_MASK_DESC] deadpix_mask = igr_storage.load([DEADPIX_MASK_DESC], basenames["flat_on"])[DEADPIX_MASK_DESC] pix_mask = hotpix_mask.data | deadpix_mask.data # aperture_solution_products = PipelineProducts.load(aperture_solutions_name) orderflat_ = igr_storage.load([ORDER_FLAT_IM_DESC], basenames["flat_on"])[ORDER_FLAT_IM_DESC] orderflat = orderflat_.data orderflat[pix_mask] = np.nan orderflat_json = igr_storage.load([ORDER_FLAT_JSON_DESC], basenames["flat_on"])[ORDER_FLAT_JSON_DESC] order_flat_meanspec = np.array(orderflat_json["mean_order_specs"]) # flat_normed = igr_storage.load([FLAT_NORMED_DESC], # basenames["flat_on"])[FLAT_NORMED_DESC] flat_mask = igr_storage.load([FLAT_MASK_DESC], basenames["flat_on"])[FLAT_MASK_DESC] bias_mask = flat_mask.data & (order_map2 > 0) SLITOFFSET_FITS_DESC = ("PRIMARY_CALIB_PATH", "SKY_", ".slitoffset_map.fits") prod_ = igr_storage.load([SLITOFFSET_FITS_DESC], basenames["sky"])[SLITOFFSET_FITS_DESC] #fn = sky_path.get_secondary_path("slitoffset_map.fits") slitoffset_map = prod_.data if 1: abba_names = obj_filenames def filter_abba_names(abba_names, frametypes, frametype): return [an for an, ft in zip(abba_names, frametypes) if ft == frametype] a_name_list = filter_abba_names(abba_names, frametypes, "A") b_name_list = filter_abba_names(abba_names, frametypes, "B") if recipe in ["A0V_AB", "STELLAR_AB"]: IF_POINT_SOURCE = True elif recipe in ["EXTENDED_AB", "EXTENDED_ONOFF"]: IF_POINT_SOURCE = False else: print "Unknown recipe : %s" % recipe if 1: #ab_names = ab_names_list[0] # master_hdu = pyfits.open(a_name_list[0])[0] a_list = [pyfits.open(name)[0].data \ for name in a_name_list] b_list = [pyfits.open(name)[0].data \ for name in b_name_list] # we may need to detrip # first define extract profile (gaussian). # dx = 100 if IF_POINT_SOURCE: # if point source # for point sources, variance estimation becomes wrong # if lenth of two is different, assert len(a_list) == len(b_list) # a_b != 1 for the cases when len(a) != len(b) a_b = float(len(a_list)) / len(b_list) a_data = np.sum(a_list, axis=0) b_data = np.sum(b_list, axis=0) data_minus = a_data - a_b*b_data #data_minus0 = data_minus from libs.destriper import destriper if 1: data_minus = destriper.get_destriped(data_minus, ~np.isfinite(data_minus), pattern=64) data_minus_flattened = data_minus / orderflat data_minus_flattened[~flat_mask.data] = np.nan #data_minus_flattened[order_flat_meanspec<0.1*order_flat_meanspec.max()] = np.nan # for variance, we need a square of a_b data_plus = (a_data + (a_b**2)*b_data) import scipy.ndimage as ni bias_mask2 = ni.binary_dilation(bias_mask) from libs import instrument_parameters gain = instrument_parameters.gain[band] # random noise variance0 = data_minus variance_ = variance0.copy() variance_[bias_mask2] = np.nan variance_[pix_mask] = np.nan mm = np.ma.array(variance0, mask=~np.isfinite(variance0)) ss = np.ma.median(mm, axis=0) variance_ = variance_ - ss # iterate over fixed number of times. # need to be improved. for i in range(5): st = np.nanstd(variance_, axis=0) variance_[np.abs(variance_) > 3*st] = np.nan #st = np.nanstd(variance_, axis=0) variance = destriper.get_destriped(variance0, ~np.isfinite(variance_), pattern=64) variance_ = variance.copy() variance_[bias_mask2] = np.nan variance_[pix_mask] = np.nan st = np.nanstd(variance_) st = np.nanstd(variance_[np.abs(variance_) < 3*st]) variance_[np.abs(variance_-ss) > 3*st] = np.nan x_std = ni.median_filter(np.nanstd(variance_, axis=0), 11) variance_map0 = np.zeros_like(variance) + x_std**2 variance_map = variance_map0 + np.abs(data_plus)/gain # add poison noise in ADU # we ignore effect of flattening # now estimate lsf # estimate lsf ordermap_bpixed = order_map.copy() ordermap_bpixed[pix_mask] = 0 ordermap_bpixed[~np.isfinite(orderflat)] = 0 # if IF_POINT_SOURCE: # if point source x1, x2 = 800, 1200 bins, lsf_list = ap.extract_lsf(ordermap_bpixed, slitpos_map, data_minus_flattened, x1, x2, bins=None) hh0 = np.sum(lsf_list, axis=0) peak1, peak2 = max(hh0), -min(hh0) lsf_x = 0.5*(bins[1:]+bins[:-1]) lsf_y = hh0/(peak1+peak2) from scipy.interpolate import UnivariateSpline lsf_ = UnivariateSpline(lsf_x, lsf_y, k=3, s=0, bbox=[0, 1]) roots = list(lsf_.roots()) #assert(len(roots) == 1) integ_list = [] from itertools import izip, cycle for ss, int_r1, int_r2 in izip(cycle([1, -1]), [0] + roots, roots + [1]): #print ss, int_r1, int_r2 integ_list.append(lsf_.integral(int_r1, int_r2)) integ = np.abs(np.sum(integ_list)) def lsf(o, x, slitpos): return lsf_(slitpos) / integ # make weight map profile_map = ap.make_profile_map(order_map, slitpos_map, lsf) # extract spec s_list, v_list = ap.extract_stellar(ordermap_bpixed, profile_map, variance_map, data_minus_flattened, slitoffset_map=slitoffset_map) # make synth_spec : profile * spectra synth_map = ap.make_synth_map(order_map, slitpos_map, profile_map, s_list, slitoffset_map=slitoffset_map) sig_map = (data_minus_flattened - synth_map)**2/variance_map ## mark sig_map > 100 as cosmicay. The threshold need to be fixed. # reextract with new variance map and CR is rejected variance_map_r = variance_map0 + np.abs(synth_map)/gain variance_map2 = np.max([variance_map, variance_map_r], axis=0) variance_map2[np.abs(sig_map) > 100] = np.nan # extract spec s_list, v_list = ap.extract_stellar(ordermap_bpixed, profile_map, variance_map2, data_minus_flattened, slitoffset_map=slitoffset_map) else: # if extended source from scipy.interpolate import UnivariateSpline if recipe in ["EXTENDED_AB", "EXTENDED_ABBA"]: delta = 0.01 lsf_ = UnivariateSpline([0, 0.5-delta, 0.5+delta, 1], [1., 1., -1., -1.], k=1, s=0, bbox=[0, 1]) else: lsf_ = UnivariateSpline([0, 1], [1., 1.], k=1, s=0, bbox=[0, 1]) def lsf(o, x, slitpos): return lsf_(slitpos) profile_map = ap.make_profile_map(order_map, slitpos_map, lsf) # we need to update the variance map by rejecting # cosmicray sources, but it is not clear how we do this # for extended source. variance_map2 = variance_map s_list, v_list = ap.extract_stellar(ordermap_bpixed, profile_map, variance_map2, data_minus_flattened, slitoffset_map=slitoffset_map ) if 1: # calculate S/N per resolution sn_list = [] for wvl, s, v in zip(wvl_solutions, s_list, v_list): dw = np.gradient(wvl) pixel_per_res_element = (wvl/40000.)/dw #print pixel_per_res_element[1024] # len(pixel_per_res_element) = 2047. But we ignore it. sn = (s/v**.5)*(pixel_per_res_element**.5) sn_list.append(sn) if 1: # save the product from libs.storage_descriptions import (COMBINED_IMAGE_DESC, VARIANCE_MAP_DESC) from libs.products import PipelineImage r = PipelineProducts("1d specs") r.add(COMBINED_IMAGE_DESC, PipelineImage([], data_minus_flattened)) r.add(VARIANCE_MAP_DESC, PipelineImage([], variance_map2)) # r.add(VARIANCE_MAP_DESC, PipelineImage([], # variance_map.data)) igr_storage.store(r, mastername=obj_filenames[0], masterhdu=None) if 1: # save spectra, variance, sn from libs.storage_descriptions import SKY_WVLSOL_FITS_DESC fn = igr_storage.get_path(SKY_WVLSOL_FITS_DESC, basenames["sky"]) # fn = sky_path.get_secondary_path("wvlsol_v1.fits") f = pyfits.open(fn) d = np.array(s_list) f[0].data = d.astype("f32") from libs.storage_descriptions import (SPEC_FITS_DESC, VARIANCE_FITS_DESC, SN_FITS_DESC) fout = igr_storage.get_path(SPEC_FITS_DESC, tgt_basename) f.writeto(fout, clobber=True) d = np.array(v_list) f[0].data = d.astype("f32") fout = igr_storage.get_path(VARIANCE_FITS_DESC, tgt_basename) f.writeto(fout, clobber=True) d = np.array(sn_list) f[0].data = d.astype("f32") fout = igr_storage.get_path(SN_FITS_DESC, tgt_basename) f.writeto(fout, clobber=True) if 1: # from libs.storage_descriptions import ORDER_FLAT_JSON_DESC prod = igr_storage.load([ORDER_FLAT_JSON_DESC], basenames["flat_on"])[ORDER_FLAT_JSON_DESC] new_orders = prod["orders"] # fitted_response = orderflat_products["fitted_responses"] i1i2_list = prod["i1i2_list"] order_indices = [] for o in ap.orders: o_new_ind = np.searchsorted(new_orders, o) order_indices.append(o_new_ind) if DO_STD: # a quick and dirty flattening for A0V stars from libs.master_calib import get_master_calib_abspath fn = get_master_calib_abspath("A0V/vegallpr25.50000resam5") d = np.genfromtxt(fn) wvl_a0v, flux_a0v, cont_a0v = (d[:,i] for i in [0, 1, 2]) wvl_a0v = wvl_a0v/1000. wvl_limits = [] for wvl_ in wvl_solutions: wvl_limits.extend([wvl_[0], wvl_[-1]]) dwvl = abs(wvl_[0] - wvl_[-1])*0.1 # padding mask_wvl1 = min(wvl_limits) - dwvl mask_wvl2 = max(wvl_limits) + dwvl #print mask_wvl1, mask_wvl2 # if band == "H": # mask_wvl1, mask_wvl2 = 1.450, 1.850 # else: # mask_wvl1, mask_wvl2 = 1.850, 2.550 mask_igr = (mask_wvl1 < wvl_a0v) & (wvl_a0v < mask_wvl2) fn = get_master_calib_abspath("telluric/LBL_A15_s0_w050_R0060000_T.fits") telluric = pyfits.open(fn)[1].data telluric_lam = telluric["lam"] tel_mask_igr = (mask_wvl1 < telluric_lam) & (telluric_lam < mask_wvl2) #plot(telluric_lam[tel_mask_H], telluric["trans"][tel_mask_H]) from scipy.interpolate import interp1d, UnivariateSpline # spl = UnivariateSpline(telluric_lam[tel_mask_igr], # telluric["trans"][tel_mask_igr], # k=1,s=0) spl = interp1d(telluric_lam[tel_mask_igr], telluric["trans"][tel_mask_igr], bounds_error=False ) trans = spl(wvl_a0v[mask_igr]) # ax1.plot(wvl_a0v[mask_igr], flux[mask_igr]/cont[mask_igr]*trans, # color="0.5", zorder=0.5) trans_m = ni.maximum_filter(trans, 128) trans_mg = ni.gaussian_filter(trans_m, 32) zzz0 = (flux_a0v/cont_a0v)[mask_igr] zzz = zzz0*trans mmm = trans/trans_mg > 0.95 zzz[~mmm] = np.nan wvl_zzz = wvl_a0v[mask_igr] #ax2.plot(, zzz) # #ax2 = subplot(212) # if DO_STD: # telluric_cor = [] a0v_flattened = [] for o_index, wvl, s in zip(order_indices, wvl_solutions, s_list): i1, i2 = i1i2_list[o_index] #sl = slice(i1, i2) wvl1, wvl2 = wvl[i1], wvl[i2] #wvl1, wvl2 = wvl[0], wvl[-1] z_m = (wvl1 < wvl_zzz) & (wvl_zzz < wvl2) wvl1, wvl2 = min(wvl), max(wvl) z_m2 = (wvl1 < wvl_zzz) & (wvl_zzz < wvl2) #z_m = z_m2 ss = interp1d(wvl, s) s_interped = ss(wvl_zzz[z_m]) xxx, yyy = wvl_zzz[z_m], s_interped/zzz[z_m] from astropy.modeling import models, fitting p_init = models.Chebyshev1D(domain=[xxx[0], xxx[-1]], degree=6) fit_p = fitting.LinearLSQFitter() x_m = np.isfinite(yyy) p = fit_p(p_init, xxx[x_m], yyy[x_m]) #ax2.plot(xxx, yyy) #ax2.plot(xxx, p(xxx)) res_ = p(wvl) z_interp = interp1d(wvl_zzz[z_m], zzz0[z_m], bounds_error=False) A0V = z_interp(wvl) #res_[res_<0.3*res_.max()] = np.nan s_f = (s/res_)/A0V s_f[:i1] = np.nan s_f[i2:] = np.nan a0v_flattened.append(s_f) d = np.array(a0v_flattened) #d[~np.isfinite(d)] = 0. f[0].data = d.astype("f32") from libs.storage_descriptions import SPEC_FITS_FLATTENED_DESC fout = igr_storage.get_path(SPEC_FITS_FLATTENED_DESC, tgt_basename) f.writeto(fout, clobber=True) db["a0v"].update(band, tgt_basename)
def process_wvlsol_band(utdate, refdate, band, obsids, config): from libs.products import ProductDB, PipelineStorage igr_path = IGRINSPath(config, utdate) igr_storage = PipelineStorage(igr_path) sky_filenames = igr_path.get_filenames(band, obsids) sky_basename = os.path.splitext(os.path.basename(sky_filenames[0]))[0] master_obsid = obsids[0] flaton_db_name = igr_path.get_section_filename_base("PRIMARY_CALIB_PATH", "flat_on.db", ) flaton_db = ProductDB(flaton_db_name) #flaton_basename = flaton_db.query(band, master_obsid) thar_db_name = igr_path.get_section_filename_base("PRIMARY_CALIB_PATH", "thar.db", ) thar_db = ProductDB(thar_db_name) #thar_basename = thar_db.query(band, master_obsid) # flaton_db = ProductDB(os.path.join(igr_path.secondary_calib_path, # "flat_on.db")) # thar_db = ProductDB(os.path.join(igr_path.secondary_calib_path, # "thar.db")) ap = load_aperture(igr_storage, band, master_obsid, flaton_db, thar_db) if 1: # from libs.process_thar import get_1d_median_specs raw_spec_product = get_1d_median_specs(sky_filenames, ap) # sky_master_fn_ = os.path.splitext(os.path.basename(sky_names[0]))[0] # sky_master_fn = igr_path.get_secondary_calib_filename(sky_master_fn_) import astropy.io.fits as pyfits masterhdu = pyfits.open(sky_filenames[0])[0] igr_storage.store(raw_spec_product, mastername=sky_filenames[0], masterhdu=masterhdu) # fn = sky_path.get_secondary_path("raw_spec") # raw_spec_product.save(fn, # masterhdu=masterhdu) from libs.master_calib import load_sky_ref_data # ref_date = "20140316" refdate = config.get_value("REFDATE", utdate) sky_ref_data = load_sky_ref_data(refdate, band) if 1: # initial wavelength solution # this need to be fixed # thar_db.query(sky_master_obsid) # json_name_ = "SDC%s_%s_0003.median_spectra.wvlsol" % (band, # igrins_log.date) from libs.storage_descriptions import THAR_WVLSOL_JSON_DESC thar_basename = thar_db.query(band, master_obsid) thar_wvl_sol = igr_storage.load([THAR_WVLSOL_JSON_DESC], thar_basename)[THAR_WVLSOL_JSON_DESC] #print thar_wvl_sol.keys() #["wvl_sol"] #json_name = thar_path.get_secondary_path("wvlsol_v0") #json_name = igr_path.get_secondary_calib_filename(json_name_) #thar_wvl_sol = PipelineProducts.load(json_name) if 1: # Now we fit with gaussian profile for matched positions. ohline_indices = sky_ref_data["ohline_indices"] ohlines_db = sky_ref_data["ohlines_db"] wvl_solutions = thar_wvl_sol["wvl_sol"] if 0: # it would be better to iteratively refit the solution fn = sky_path.get_secondary_path("wvlsol_v1") p = PipelineProducts.load(fn) wvl_solutionv = p["wvl_sol"] orders_w_solutions_ = thar_wvl_sol["orders"] from libs.storage_descriptions import ONED_SPEC_JSON_DESC orders_w_solutions = [o for o in orders_w_solutions_ if o in raw_spec_product[ONED_SPEC_JSON_DESC]["orders"]] _ = dict(zip(raw_spec_product[ONED_SPEC_JSON_DESC]["orders"], raw_spec_product[ONED_SPEC_JSON_DESC]["specs"])) s_list = [_[o]for o in orders_w_solutions] from libs.reidentify_ohlines import fit_ohlines ref_pixel_list, reidentified_lines = \ fit_ohlines(ohlines_db, ohline_indices, orders_w_solutions, wvl_solutions, s_list) # from scipy.interpolate import interp1d # from reidentify import reidentify_lines_all x = np.arange(2048) # line_indices_list = [ref_ohline_indices[str(o)] for o in igrins_orders[band]] ###### not fit identified lines from libs.ecfit import get_ordered_line_data, fit_2dspec, check_fit # d_x_wvl = {} # for order, z in echel.zdata.items(): # xy_T = affine_tr.transform(np.array([z.x, z.y]).T) # x_T = xy_T[:,0] # d_x_wvl[order]=(x_T, z.wvl) reidentified_lines_map = dict(zip(orders_w_solutions, reidentified_lines)) if band == "K": import libs.master_calib as master_calib fn = "hitran_bootstrap_K_%s.json" % refdate bootstrap_name = master_calib.get_master_calib_abspath(fn) import json bootstrap = json.load(open(bootstrap_name)) import libs.hitran as hitran r, ref_pixel_list = hitran.reidentify(wvl_solutions, s_list, bootstrap) # json_name = "hitran_reidentified_K_%s.json" % igrins_log.date # r = json.load(open(json_name)) for i, s in r.items(): ss = reidentified_lines_map[int(i)] ss0 = np.concatenate([ss[0], s["pixel"]]) ss1 = np.concatenate([ss[1], s["wavelength"]]) reidentified_lines_map[int(i)] = (ss0, ss1) xl, yl, zl = get_ordered_line_data(reidentified_lines_map) # xl : pixel # yl : order # zl : wvl * order x_domain = [0, 2047] y_domain = [orders_w_solutions[0]-2, orders_w_solutions[-1]+2] x_degree, y_degree = 4, 3 #x_degree, y_degree = 3, 2 p, m = fit_2dspec(xl, yl, zl, x_degree=x_degree, y_degree=y_degree, x_domain=x_domain, y_domain=y_domain) # derive wavelengths. xx = np.arange(2048) wvl_sol = [] for o in orders_w_solutions: oo = np.empty_like(xx) oo.fill(o) wvl = p(xx, oo) / o wvl_sol.append(list(wvl)) oh_sol_products = PipelineProducts("Wavelength solution based on ohlines") #from libs.process_thar import ONED_SPEC_JSON from libs.products import PipelineDict from libs.storage_descriptions import SKY_WVLSOL_JSON_DESC oh_sol_products.add(SKY_WVLSOL_JSON_DESC, PipelineDict(orders=orders_w_solutions, wvl_sol=wvl_sol)) if 1: if 1: # save as WAT fits header xx = np.arange(0, 2048) xx_plus1 = np.arange(1, 2048+1) from astropy.modeling import models, fitting # We convert 2d chebyshev solution to a seriese of 1d # chebyshev. For now, use naive (and inefficient) # approach of refitting the solution with 1d. Should be # reimplemented. p1d_list = [] for o in orders_w_solutions: oo = np.empty_like(xx) oo.fill(o) wvl = p(xx, oo) / o * 1.e4 # um to angstrom p_init1d = models.Chebyshev1D(domain=[1, 2048], degree=p.x_degree) fit_p1d = fitting.LinearLSQFitter() p1d = fit_p1d(p_init1d, xx_plus1, wvl) p1d_list.append(p1d) from libs.iraf_helper import get_wat_spec, default_header_str wat_list = get_wat_spec(orders_w_solutions, p1d_list) # cards = [pyfits.Card.fromstring(l.strip()) \ # for l in open("echell_2dspec.header")] cards = [pyfits.Card.fromstring(l.strip()) \ for l in default_header_str] wat = "wtype=multispec " + " ".join(wat_list) char_per_line = 68 num_line, remainder = divmod(len(wat), char_per_line) for i in range(num_line): k = "WAT2_%03d" % (i+1,) v = wat[char_per_line*i:char_per_line*(i+1)] #print k, v c = pyfits.Card(k, v) cards.append(c) if remainder > 0: i = num_line k = "WAT2_%03d" % (i+1,) v = wat[char_per_line*i:] #print k, v c = pyfits.Card(k, v) cards.append(c) if 1: # save fits with empty header header = pyfits.Header(cards) hdu = pyfits.PrimaryHDU(header=header, data=np.array([]).reshape((0,0))) from libs.storage_descriptions import SKY_WVLSOL_FITS_DESC from libs.products import PipelineImage oh_sol_products.add(SKY_WVLSOL_FITS_DESC, PipelineImage([], np.array([]).reshape((0,0)))) igr_storage.store(oh_sol_products, mastername=sky_filenames[0], masterhdu=hdu) #fn = sky_path.get_secondary_path("wvlsol_v1.fits") #hdu.writeto(fn, clobber=True) if 0: # plot all spectra for w, s in zip(wvl_sol, s_list): plot(w, s) if 1: # filter out the line indices not well fit by the surface keys = reidentified_lines_map.keys() di_list = [len(reidentified_lines_map[k_][0]) for k_ in keys] endi_list = np.add.accumulate(di_list) filter_mask = [m[endi-di:endi] for di, endi in zip(di_list, endi_list)] #from itertools import compress # _ = [list(compress(indices, mm)) for indices, mm \ # in zip(line_indices_list, filter_mask)] # line_indices_list_filtered = _ reidentified_lines_ = [reidentified_lines_map[k_] for k_ in keys] _ = [(v_[0][mm], v_[1][mm]) for v_, mm \ in zip(reidentified_lines_, filter_mask)] reidentified_lines_map_filtered = dict(zip(orders_w_solutions, _)) if 1: from matplotlib.figure import Figure fig1 = Figure(figsize=(12, 7)) check_fit(fig1, xl, yl, zl, p, orders_w_solutions, reidentified_lines_map) fig1.tight_layout() fig2 = Figure(figsize=(12, 7)) check_fit(fig2, xl[m], yl[m], zl[m], p, orders_w_solutions, reidentified_lines_map_filtered) fig2.tight_layout() if 1: from libs.qa_helper import figlist_to_pngs sky_figs = igr_path.get_section_filename_base("QA_PATH", "oh_fit2d", "oh_fit2d_"+sky_basename) figlist_to_pngs(sky_figs, [fig1, fig2]) if 1: from libs.products import ProductDB sky_db_name = igr_path.get_section_filename_base("PRIMARY_CALIB_PATH", "sky.db", ) sky_db = ProductDB(sky_db_name) sky_db.update(band, sky_basename)
def process_distortion_sky_band(utdate, refdate, band, obsids, config): from libs.products import ProductDB, PipelineStorage igr_path = IGRINSPath(config, utdate) igr_storage = PipelineStorage(igr_path) sky_filenames = igr_path.get_filenames(band, obsids) sky_basename = os.path.splitext(os.path.basename(sky_filenames[0]))[0] master_obsid = obsids[0] flaton_db_name = igr_path.get_section_filename_base("PRIMARY_CALIB_PATH", "flat_on.db", ) flaton_db = ProductDB(flaton_db_name) # thar_db_name = igr_path.get_section_filename_base("PRIMARY_CALIB_PATH", # "thar.db", # ) # thar_db = ProductDB(thar_db_name) from libs.storage_descriptions import (COMBINED_IMAGE_DESC, ONED_SPEC_JSON_DESC) raw_spec_products = igr_storage.load([COMBINED_IMAGE_DESC, ONED_SPEC_JSON_DESC], sky_basename) # raw_spec_products = PipelineProducts.load(sky_path.get_secondary_path("raw_spec")) from libs.storage_descriptions import SKY_WVLSOL_JSON_DESC wvlsol_products = igr_storage.load([SKY_WVLSOL_JSON_DESC], sky_basename)[SKY_WVLSOL_JSON_DESC] orders_w_solutions = wvlsol_products["orders"] wvl_solutions = wvlsol_products["wvl_sol"] ap = load_aperture2(igr_storage, band, master_obsid, flaton_db, raw_spec_products[ONED_SPEC_JSON_DESC]["orders"], orders_w_solutions) #orders_w_solutions = ap.orders if 1: # load reference data from libs.master_calib import load_sky_ref_data ref_utdate = config.get_value("REFDATE", utdate) sky_ref_data = load_sky_ref_data(ref_utdate, band) ohlines_db = sky_ref_data["ohlines_db"] ref_ohline_indices = sky_ref_data["ohline_indices"] orders_w_solutions = wvlsol_products["orders"] wvl_solutions = wvlsol_products["wvl_sol"] if 1: n_slice_one_direction = 2 n_slice = n_slice_one_direction*2 + 1 i_center = n_slice_one_direction slit_slice = np.linspace(0., 1., n_slice+1) slice_center = (slit_slice[i_center], slit_slice[i_center+1]) slice_up = [(slit_slice[i_center+i], slit_slice[i_center+i+1]) \ for i in range(1, n_slice_one_direction+1)] slice_down = [(slit_slice[i_center-i-1], slit_slice[i_center-i]) \ for i in range(n_slice_one_direction)] d = raw_spec_products[COMBINED_IMAGE_DESC].data s_center = ap.extract_spectra_v2(d, slice_center[0], slice_center[1]) s_up, s_down = [], [] for s1, s2 in slice_up: s = ap.extract_spectra_v2(d, s1, s2) s_up.append(s) for s1, s2 in slice_down: s = ap.extract_spectra_v2(d, s1, s2) s_down.append(s) if 1: # now fit #ohline_indices = [ref_ohline_indices[o] for o in orders_w_solutions] if 0: def test_order(oi): ax=subplot(111) ax.plot(wvl_solutions[oi], s_center[oi]) #ax.plot(wvl_solutions[oi], raw_spec_products["specs"][oi]) o = orders[oi] line_indices = ref_ohline_indices[o] for li in line_indices: um = np.take(ohlines_db.um, li) intensity = np.take(ohlines_db.intensity, li) ax.vlines(um, ymin=0, ymax=-intensity) from libs.reidentify_ohlines import fit_ohlines, fit_ohlines_pixel def get_reidentified_lines_OH(orders_w_solutions, wvl_solutions, s_center): ref_pixel_list, reidentified_lines = \ fit_ohlines(ohlines_db, ref_ohline_indices, orders_w_solutions, wvl_solutions, s_center) reidentified_lines_map = dict(zip(orders_w_solutions, reidentified_lines)) return reidentified_lines_map, ref_pixel_list if band == "H": reidentified_lines_map, ref_pixel_list_oh = \ get_reidentified_lines_OH(orders_w_solutions, wvl_solutions, s_center) def refit_centroid(s_center, ref_pixel_list=ref_pixel_list_oh): centroids = fit_ohlines_pixel(s_center, ref_pixel_list) return centroids else: # band K reidentified_lines_map, ref_pixel_list_oh = \ get_reidentified_lines_OH(orders_w_solutions, wvl_solutions, s_center) import libs.master_calib as master_calib fn = "hitran_bootstrap_K_%s.json" % ref_utdate bootstrap_name = master_calib.get_master_calib_abspath(fn) import json bootstrap = json.load(open(bootstrap_name)) import libs.hitran as hitran r, ref_pixel_dict_hitrans = hitran.reidentify(wvl_solutions, s_center, bootstrap) # for i, s in r.items(): # ss = reidentified_lines_map[int(i)] # ss0 = np.concatenate([ss[0], s["pixel"]]) # ss1 = np.concatenate([ss[1], s["wavelength"]]) # reidentified_lines_map[int(i)] = (ss0, ss1) #reidentified_lines_map, ref_pixel_list def refit_centroid(s_center, ref_pixel_list=ref_pixel_list_oh, ref_pixel_dict_hitrans=ref_pixel_dict_hitrans): centroids_oh = fit_ohlines_pixel(s_center, ref_pixel_list) s_dict = dict(zip(orders_w_solutions, s_center)) centroids_dict_hitrans = hitran.fit_hitrans_pixel(s_dict, ref_pixel_dict_hitrans) centroids = [] for o, c_oh in zip(orders_w_solutions, centroids_oh): if o in centroids_dict_hitrans: c = np.concatenate([c_oh, centroids_dict_hitrans[o]["pixel"]]) centroids.append(c) else: centroids.append(c_oh) return centroids # reidentified_lines_map = get_reidentified_lines(orders_w_solutions, # wvl_solutions, # s_center) if 1: # TODO: we should not need this, instead recycle from preivious step. fitted_centroid_center = refit_centroid(s_center) # fitted_centroid_center = fit_ohlines_pixel(s_center, # ref_pixel_list) d_shift_up = [] for s in s_up: # TODO: ref_pixel_list_filtered need to be updated with recent fit. fitted_centroid = refit_centroid(s) # fitted_centroid = fit_ohlines_pixel(s, # ref_pixel_list) d_shift = [b-a for a, b in zip(fitted_centroid_center, fitted_centroid)] d_shift_up.append(d_shift) d_shift_down = [] for s in s_down: # TODO: ref_pixel_list_filtered need to be updated with recent fit. fitted_centroid = refit_centroid(s) # fitted_centroid = fit_ohlines_pixel(s, # ref_pixel_list) #fitted_centroid_center, d_shift = [b-a for a, b in zip(fitted_centroid_center, fitted_centroid)] d_shift_down.append(d_shift) if 1: # now fit orders = orders_w_solutions x_domain = [0, 2048] y_domain = [orders[0]-2, orders[-1]+2] xl = np.concatenate(fitted_centroid_center) yl_ = [o + np.zeros_like(x_) for o, x_ in zip(orders, fitted_centroid_center)] yl = np.concatenate(yl_) from libs.ecfit import fit_2dspec, check_fit_simple zl_list = [np.concatenate(d_) for d_ \ in d_shift_down[::-1] + d_shift_up] pm_list = [] for zl in zl_list: p, m = fit_2dspec(xl, yl, zl, x_degree=1, y_degree=1, x_domain=x_domain, y_domain=y_domain) pm_list.append((p,m)) zz_std_list = [] for zl, (p, m) in zip(zl_list, pm_list): z_m = p(xl[m], yl[m]) zz = z_m - zl[m] zz_std_list.append(zz.std()) fig_list = [] from matplotlib.figure import Figure for zl, (p, m) in zip(zl_list, pm_list): fig = Figure() check_fit_simple(fig, xl[m], yl[m], zl[m], p, orders) fig_list.append(fig) if 1: xi = np.linspace(0, 2048, 128+1) from astropy.modeling import fitting from astropy.modeling.polynomial import Chebyshev2D x_domain = [0, 2048] y_domain = [0., 1.] p2_list = [] for o in orders: oi = np.zeros_like(xi) + o shift_list = [] for p,m in pm_list[:n_slice_one_direction]: shift_list.append(p(xi, oi)) shift_list.append(np.zeros_like(xi)) for p,m in pm_list[n_slice_one_direction:]: shift_list.append(p(xi, oi)) p_init = Chebyshev2D(x_degree=1, y_degree=2, x_domain=x_domain, y_domain=y_domain) f = fitting.LinearLSQFitter() yi = 0.5*(slit_slice[:-1] + slit_slice[1:]) xl, yl = np.meshgrid(xi, yi) zl = np.array(shift_list) p = f(p_init, xl, yl, zl) p2_list.append(p) if 1: p2_dict = dict(zip(orders, p2_list)) order_map = ap.make_order_map() slitpos_map = ap.make_slitpos_map() slitoffset_map = np.empty_like(slitpos_map) slitoffset_map.fill(np.nan) for o in ap.orders: xi = np.arange(0, 2048) xl, yl = np.meshgrid(xi, xi) msk = order_map == o slitoffset_map[msk] = p2_dict[o](xl[msk], slitpos_map[msk]) # import astropy.io.fits as pyfits # fn = sky_path.get_secondary_path("slitoffset_map.fits") # pyfits.PrimaryHDU(data=slitoffset_map).writeto(fn, clobber=True) from libs.storage_descriptions import SLITOFFSET_FITS_DESC from libs.products import PipelineImage, PipelineProducts distortion_products = PipelineProducts("Distortion map") distortion_products.add(SLITOFFSET_FITS_DESC, PipelineImage([], slitoffset_map)) igr_storage.store(distortion_products, mastername=sky_filenames[0], masterhdu=None) from libs.qa_helper import figlist_to_pngs sky_figs = igr_path.get_section_filename_base("QA_PATH", "oh_distortion", "oh_distortion_"+sky_basename) print fig_list figlist_to_pngs(sky_figs, fig_list) if 0: # test x = np.arange(2048, dtype="d") oi = 10 o = orders[oi] yi = 0.5*(slit_slice[:-1] + slit_slice[1:]) ax1 = subplot(211) s1 = s_up[-1][oi] s2 = s_down[-1][oi] ax1.plot(x, s1) ax1.plot(x, s2) ax2 = subplot(212, sharex=ax1, sharey=ax1) dx1 = p2_dict[o](x, yi[-1]+np.zeros_like(x)) ax2.plot(x-dx1, s1) dx2 = p2_dict[o](x, yi[0]+np.zeros_like(x)) ax2.plot(x-dx2, s2)
def process_flat_band(utdate, refdate, band, obsids_off, obsids_on, config): from libs.products import PipelineStorage igr_path = IGRINSPath(config, utdate) igr_storage = PipelineStorage(igr_path) flat_off_filenames = igr_path.get_filenames(band, obsids_off) flat_on_filenames = igr_path.get_filenames(band, obsids_on) if 1: # process flat off flat_offs_hdu_list = [pyfits.open(fn_)[0] for fn_ in flat_off_filenames] flat_offs = [hdu.data for hdu in flat_offs_hdu_list] flat = FlatOff(flat_offs) flatoff_products = flat.make_flatoff_hotpixmap(sigma_clip1=100, sigma_clip2=5) igr_storage.store(flatoff_products, mastername=flat_off_filenames[0], masterhdu=flat_offs_hdu_list[0]) if 1: # flat on from libs.storage_descriptions import (FLAT_OFF_DESC, HOTPIX_MASK_DESC, FLATOFF_JSON_DESC) desc_list = [FLAT_OFF_DESC, HOTPIX_MASK_DESC, FLATOFF_JSON_DESC] flatoff_products = igr_storage.load(desc_list, mastername=flat_off_filenames[0]) flat_on_hdu_list = [pyfits.open(fn_)[0] for fn_ in flat_on_filenames] flat_ons = [hdu.data for hdu in flat_on_hdu_list] from libs.master_calib import get_master_calib_abspath fn = get_master_calib_abspath("deadpix_mask_%s_%s.fits" % (refdate, band)) deadpix_mask_old = pyfits.open(fn)[0].data.astype(bool) flat_on = FlatOn(flat_ons) flaton_products = flat_on.make_flaton_deadpixmap(flatoff_products, deadpix_mask_old=deadpix_mask_old) igr_storage.store(flaton_products, mastername=flat_on_filenames[0], masterhdu=flat_on_hdu_list[0]) if 1: # now trace the orders from libs.process_flat import trace_orders trace_products = trace_orders(flaton_products) hdu = pyfits.open(flat_on_filenames[0])[0] igr_storage.store(trace_products, mastername=flat_on_filenames[0], masterhdu=flat_on_hdu_list[0]) from libs.process_flat import trace_solutions trace_solution_products, trace_solution_products_plot = \ trace_solutions(trace_products) if 1: trace_solution_products.keys() from libs.storage_descriptions import FLATCENTROID_SOL_JSON_DESC myproduct = trace_solution_products[FLATCENTROID_SOL_JSON_DESC] bottomup_solutions = myproduct["bottom_up_solutions"] orders = range(len(bottomup_solutions)) from libs.apertures import Apertures ap = Apertures(orders, bottomup_solutions) from libs.storage_descriptions import FLAT_MASK_DESC flat_mask = igr_storage.load1(FLAT_MASK_DESC, flat_on_filenames[0]) order_map2 = ap.make_order_map(mask_top_bottom=True) bias_mask = flat_mask.data & (order_map2 > 0) from libs.products import PipelineImageBase, PipelineProducts pp = PipelineProducts("") from libs.storage_descriptions import BIAS_MASK_DESC pp.add(BIAS_MASK_DESC, PipelineImageBase([], bias_mask)) flaton_basename = flat_on_filenames[0] igr_storage.store(pp, mastername=flaton_basename, masterhdu=hdu) # plot qa figures. if 1: from libs.process_flat import check_trace_order from matplotlib.figure import Figure fig1 = Figure(figsize=[9, 4]) check_trace_order(trace_products, fig1) if 1: from libs.process_flat import plot_trace_solutions fig2, fig3 = plot_trace_solutions(flaton_products, trace_solution_products, trace_solution_products_plot, ) flatoff_basename = os.path.splitext(os.path.basename(flat_off_filenames[0]))[0] flaton_basename = os.path.splitext(os.path.basename(flat_on_filenames[0]))[0] if 1: from libs.qa_helper import figlist_to_pngs aperture_figs = igr_path.get_section_filename_base("QA_PATH", "aperture_"+flaton_basename, "aperture_"+flaton_basename) figlist_to_pngs(aperture_figs, [fig1, fig2, fig3]) if 1: # now trace the orders #del trace_solution_products["bottom_up_solutions"] igr_storage.store(trace_solution_products, mastername=flat_on_filenames[0], masterhdu=flat_on_hdu_list[0]) # save db if 1: from libs.products import ProductDB flatoff_db_name = igr_path.get_section_filename_base("PRIMARY_CALIB_PATH", "flat_off.db", ) flatoff_db = ProductDB(flatoff_db_name) #dbname = os.path.splitext(os.path.basename(flat_off_filenames[0]))[0] flatoff_db.update(band, flatoff_basename) flaton_db_name = igr_path.get_section_filename_base("PRIMARY_CALIB_PATH", "flat_on.db", ) flaton_db = ProductDB(flaton_db_name) flaton_db.update(band, flaton_basename)
def process_wvlsol_band(utdate, refdate, band, obsids, config): from libs.products import ProductDB, PipelineStorage igr_path = IGRINSPath(config, utdate) igr_storage = PipelineStorage(igr_path) sky_filenames = igr_path.get_filenames(band, obsids) sky_basename = os.path.splitext(os.path.basename(sky_filenames[0]))[0] master_obsid = obsids[0] flaton_db_name = igr_path.get_section_filename_base( "PRIMARY_CALIB_PATH", "flat_on.db", ) flaton_db = ProductDB(flaton_db_name) #flaton_basename = flaton_db.query(band, master_obsid) thar_db_name = igr_path.get_section_filename_base( "PRIMARY_CALIB_PATH", "thar.db", ) thar_db = ProductDB(thar_db_name) #thar_basename = thar_db.query(band, master_obsid) # flaton_db = ProductDB(os.path.join(igr_path.secondary_calib_path, # "flat_on.db")) # thar_db = ProductDB(os.path.join(igr_path.secondary_calib_path, # "thar.db")) ap = load_aperture(igr_storage, band, master_obsid, flaton_db, thar_db) if 1: # from libs.process_thar import get_1d_median_specs raw_spec_product = get_1d_median_specs(sky_filenames, ap) # sky_master_fn_ = os.path.splitext(os.path.basename(sky_names[0]))[0] # sky_master_fn = igr_path.get_secondary_calib_filename(sky_master_fn_) import astropy.io.fits as pyfits masterhdu = pyfits.open(sky_filenames[0])[0] igr_storage.store(raw_spec_product, mastername=sky_filenames[0], masterhdu=masterhdu) # fn = sky_path.get_secondary_path("raw_spec") # raw_spec_product.save(fn, # masterhdu=masterhdu) from libs.master_calib import load_sky_ref_data # ref_date = "20140316" refdate = config.get_value("REFDATE", utdate) sky_ref_data = load_sky_ref_data(refdate, band) if 1: # initial wavelength solution # this need to be fixed # thar_db.query(sky_master_obsid) # json_name_ = "SDC%s_%s_0003.median_spectra.wvlsol" % (band, # igrins_log.date) from libs.storage_descriptions import THAR_WVLSOL_JSON_DESC thar_basename = thar_db.query(band, master_obsid) thar_wvl_sol = igr_storage.load([THAR_WVLSOL_JSON_DESC], thar_basename)[THAR_WVLSOL_JSON_DESC] #print thar_wvl_sol.keys() #["wvl_sol"] #json_name = thar_path.get_secondary_path("wvlsol_v0") #json_name = igr_path.get_secondary_calib_filename(json_name_) #thar_wvl_sol = PipelineProducts.load(json_name) if 1: # Now we fit with gaussian profile for matched positions. ohline_indices = sky_ref_data["ohline_indices"] ohlines_db = sky_ref_data["ohlines_db"] wvl_solutions = thar_wvl_sol["wvl_sol"] if 0: # it would be better to iteratively refit the solution fn = sky_path.get_secondary_path("wvlsol_v1") p = PipelineProducts.load(fn) wvl_solutionv = p["wvl_sol"] orders_w_solutions_ = thar_wvl_sol["orders"] from libs.storage_descriptions import ONED_SPEC_JSON_DESC orders_w_solutions = [ o for o in orders_w_solutions_ if o in raw_spec_product[ONED_SPEC_JSON_DESC]["orders"] ] _ = dict( zip(raw_spec_product[ONED_SPEC_JSON_DESC]["orders"], raw_spec_product[ONED_SPEC_JSON_DESC]["specs"])) s_list = [_[o] for o in orders_w_solutions] from libs.reidentify_ohlines import fit_ohlines ref_pixel_list, reidentified_lines = \ fit_ohlines(ohlines_db, ohline_indices, orders_w_solutions, wvl_solutions, s_list) # from scipy.interpolate import interp1d # from reidentify import reidentify_lines_all x = np.arange(2048) # line_indices_list = [ref_ohline_indices[str(o)] for o in igrins_orders[band]] ###### not fit identified lines from libs.ecfit import get_ordered_line_data, fit_2dspec, check_fit # d_x_wvl = {} # for order, z in echel.zdata.items(): # xy_T = affine_tr.transform(np.array([z.x, z.y]).T) # x_T = xy_T[:,0] # d_x_wvl[order]=(x_T, z.wvl) reidentified_lines_map = dict( zip(orders_w_solutions, reidentified_lines)) if band == "K": import libs.master_calib as master_calib fn = "hitran_bootstrap_K_%s.json" % refdate bootstrap_name = master_calib.get_master_calib_abspath(fn) import json bootstrap = json.load(open(bootstrap_name)) import libs.hitran as hitran r, ref_pixel_list = hitran.reidentify(wvl_solutions, s_list, bootstrap) # json_name = "hitran_reidentified_K_%s.json" % igrins_log.date # r = json.load(open(json_name)) for i, s in r.items(): ss = reidentified_lines_map[int(i)] ss0 = np.concatenate([ss[0], s["pixel"]]) ss1 = np.concatenate([ss[1], s["wavelength"]]) reidentified_lines_map[int(i)] = (ss0, ss1) xl, yl, zl = get_ordered_line_data(reidentified_lines_map) # xl : pixel # yl : order # zl : wvl * order x_domain = [0, 2047] y_domain = [orders_w_solutions[0] - 2, orders_w_solutions[-1] + 2] x_degree, y_degree = 4, 3 #x_degree, y_degree = 3, 2 p, m = fit_2dspec(xl, yl, zl, x_degree=x_degree, y_degree=y_degree, x_domain=x_domain, y_domain=y_domain) # derive wavelengths. xx = np.arange(2048) wvl_sol = [] for o in orders_w_solutions: oo = np.empty_like(xx) oo.fill(o) wvl = p(xx, oo) / o wvl_sol.append(list(wvl)) oh_sol_products = PipelineProducts( "Wavelength solution based on ohlines") #from libs.process_thar import ONED_SPEC_JSON from libs.products import PipelineDict from libs.storage_descriptions import SKY_WVLSOL_JSON_DESC oh_sol_products.add( SKY_WVLSOL_JSON_DESC, PipelineDict(orders=orders_w_solutions, wvl_sol=wvl_sol)) if 1: if 1: # save as WAT fits header xx = np.arange(0, 2048) xx_plus1 = np.arange(1, 2048 + 1) from astropy.modeling import models, fitting # We convert 2d chebyshev solution to a seriese of 1d # chebyshev. For now, use naive (and inefficient) # approach of refitting the solution with 1d. Should be # reimplemented. p1d_list = [] for o in orders_w_solutions: oo = np.empty_like(xx) oo.fill(o) wvl = p(xx, oo) / o * 1.e4 # um to angstrom p_init1d = models.Chebyshev1D(domain=[1, 2048], degree=p.x_degree) fit_p1d = fitting.LinearLSQFitter() p1d = fit_p1d(p_init1d, xx_plus1, wvl) p1d_list.append(p1d) from libs.iraf_helper import get_wat_spec, default_header_str wat_list = get_wat_spec(orders_w_solutions, p1d_list) # cards = [pyfits.Card.fromstring(l.strip()) \ # for l in open("echell_2dspec.header")] cards = [pyfits.Card.fromstring(l.strip()) \ for l in default_header_str] wat = "wtype=multispec " + " ".join(wat_list) char_per_line = 68 num_line, remainder = divmod(len(wat), char_per_line) for i in range(num_line): k = "WAT2_%03d" % (i + 1, ) v = wat[char_per_line * i:char_per_line * (i + 1)] #print k, v c = pyfits.Card(k, v) cards.append(c) if remainder > 0: i = num_line k = "WAT2_%03d" % (i + 1, ) v = wat[char_per_line * i:] #print k, v c = pyfits.Card(k, v) cards.append(c) if 1: # save fits with empty header header = pyfits.Header(cards) hdu = pyfits.PrimaryHDU(header=header, data=np.array([]).reshape((0, 0))) from libs.storage_descriptions import SKY_WVLSOL_FITS_DESC from libs.products import PipelineImage oh_sol_products.add(SKY_WVLSOL_FITS_DESC, PipelineImage([], np.array(wvl_sol))) igr_storage.store(oh_sol_products, mastername=sky_filenames[0], masterhdu=hdu) #fn = sky_path.get_secondary_path("wvlsol_v1.fits") #hdu.writeto(fn, clobber=True) if 0: # plot all spectra for w, s in zip(wvl_sol, s_list): plot(w, s) if 1: # filter out the line indices not well fit by the surface keys = reidentified_lines_map.keys() di_list = [len(reidentified_lines_map[k_][0]) for k_ in keys] endi_list = np.add.accumulate(di_list) filter_mask = [ m[endi - di:endi] for di, endi in zip(di_list, endi_list) ] #from itertools import compress # _ = [list(compress(indices, mm)) for indices, mm \ # in zip(line_indices_list, filter_mask)] # line_indices_list_filtered = _ reidentified_lines_ = [reidentified_lines_map[k_] for k_ in keys] _ = [(v_[0][mm], v_[1][mm]) for v_, mm \ in zip(reidentified_lines_, filter_mask)] reidentified_lines_map_filtered = dict(zip(orders_w_solutions, _)) if 1: from matplotlib.figure import Figure fig1 = Figure(figsize=(12, 7)) check_fit(fig1, xl, yl, zl, p, orders_w_solutions, reidentified_lines_map) fig1.tight_layout() fig2 = Figure(figsize=(12, 7)) check_fit(fig2, xl[m], yl[m], zl[m], p, orders_w_solutions, reidentified_lines_map_filtered) fig2.tight_layout() if 1: from libs.qa_helper import figlist_to_pngs sky_figs = igr_path.get_section_filename_base( "QA_PATH", "oh_fit2d", "oh_fit2d_" + sky_basename) figlist_to_pngs(sky_figs, [fig1, fig2]) if 1: from libs.products import ProductDB sky_db_name = igr_path.get_section_filename_base( "PRIMARY_CALIB_PATH", "sky.db", ) sky_db = ProductDB(sky_db_name) sky_db.update(band, sky_basename)
def process_distortion_sky_band(utdate, refdate, band, obsids, config): from libs.products import ProductDB, PipelineStorage igr_path = IGRINSPath(config, utdate) igr_storage = PipelineStorage(igr_path) sky_filenames = igr_path.get_filenames(band, obsids) sky_basename = os.path.splitext(os.path.basename(sky_filenames[0]))[0] master_obsid = obsids[0] flaton_db_name = igr_path.get_section_filename_base( "PRIMARY_CALIB_PATH", "flat_on.db", ) flaton_db = ProductDB(flaton_db_name) # thar_db_name = igr_path.get_section_filename_base("PRIMARY_CALIB_PATH", # "thar.db", # ) # thar_db = ProductDB(thar_db_name) from libs.storage_descriptions import (COMBINED_IMAGE_DESC, ONED_SPEC_JSON_DESC) raw_spec_products = igr_storage.load( [COMBINED_IMAGE_DESC, ONED_SPEC_JSON_DESC], sky_basename) # raw_spec_products = PipelineProducts.load(sky_path.get_secondary_path("raw_spec")) from libs.storage_descriptions import SKY_WVLSOL_JSON_DESC wvlsol_products = igr_storage.load([SKY_WVLSOL_JSON_DESC], sky_basename)[SKY_WVLSOL_JSON_DESC] orders_w_solutions = wvlsol_products["orders"] wvl_solutions = wvlsol_products["wvl_sol"] ap = load_aperture2(igr_storage, band, master_obsid, flaton_db, raw_spec_products[ONED_SPEC_JSON_DESC]["orders"], orders_w_solutions) #orders_w_solutions = ap.orders if 1: # load reference data from libs.master_calib import load_sky_ref_data ref_utdate = config.get_value("REFDATE", utdate) sky_ref_data = load_sky_ref_data(ref_utdate, band) ohlines_db = sky_ref_data["ohlines_db"] ref_ohline_indices = sky_ref_data["ohline_indices"] orders_w_solutions = wvlsol_products["orders"] wvl_solutions = wvlsol_products["wvl_sol"] if 1: n_slice_one_direction = 2 n_slice = n_slice_one_direction * 2 + 1 i_center = n_slice_one_direction slit_slice = np.linspace(0., 1., n_slice + 1) slice_center = (slit_slice[i_center], slit_slice[i_center + 1]) slice_up = [(slit_slice[i_center+i], slit_slice[i_center+i+1]) \ for i in range(1, n_slice_one_direction+1)] slice_down = [(slit_slice[i_center-i-1], slit_slice[i_center-i]) \ for i in range(n_slice_one_direction)] d = raw_spec_products[COMBINED_IMAGE_DESC].data s_center = ap.extract_spectra_v2(d, slice_center[0], slice_center[1]) s_up, s_down = [], [] for s1, s2 in slice_up: s = ap.extract_spectra_v2(d, s1, s2) s_up.append(s) for s1, s2 in slice_down: s = ap.extract_spectra_v2(d, s1, s2) s_down.append(s) if 1: # now fit #ohline_indices = [ref_ohline_indices[o] for o in orders_w_solutions] if 0: def test_order(oi): ax = subplot(111) ax.plot(wvl_solutions[oi], s_center[oi]) #ax.plot(wvl_solutions[oi], raw_spec_products["specs"][oi]) o = orders[oi] line_indices = ref_ohline_indices[o] for li in line_indices: um = np.take(ohlines_db.um, li) intensity = np.take(ohlines_db.intensity, li) ax.vlines(um, ymin=0, ymax=-intensity) from libs.reidentify_ohlines import fit_ohlines, fit_ohlines_pixel def get_reidentified_lines_OH(orders_w_solutions, wvl_solutions, s_center): ref_pixel_list, reidentified_lines = \ fit_ohlines(ohlines_db, ref_ohline_indices, orders_w_solutions, wvl_solutions, s_center) reidentified_lines_map = dict( zip(orders_w_solutions, reidentified_lines)) return reidentified_lines_map, ref_pixel_list if band == "H": reidentified_lines_map, ref_pixel_list_oh = \ get_reidentified_lines_OH(orders_w_solutions, wvl_solutions, s_center) def refit_centroid(s_center, ref_pixel_list=ref_pixel_list_oh): centroids = fit_ohlines_pixel(s_center, ref_pixel_list) return centroids else: # band K reidentified_lines_map, ref_pixel_list_oh = \ get_reidentified_lines_OH(orders_w_solutions, wvl_solutions, s_center) import libs.master_calib as master_calib fn = "hitran_bootstrap_K_%s.json" % ref_utdate bootstrap_name = master_calib.get_master_calib_abspath(fn) import json bootstrap = json.load(open(bootstrap_name)) import libs.hitran as hitran r, ref_pixel_dict_hitrans = hitran.reidentify( wvl_solutions, s_center, bootstrap) # for i, s in r.items(): # ss = reidentified_lines_map[int(i)] # ss0 = np.concatenate([ss[0], s["pixel"]]) # ss1 = np.concatenate([ss[1], s["wavelength"]]) # reidentified_lines_map[int(i)] = (ss0, ss1) #reidentified_lines_map, ref_pixel_list def refit_centroid(s_center, ref_pixel_list=ref_pixel_list_oh, ref_pixel_dict_hitrans=ref_pixel_dict_hitrans): centroids_oh = fit_ohlines_pixel(s_center, ref_pixel_list) s_dict = dict(zip(orders_w_solutions, s_center)) centroids_dict_hitrans = hitran.fit_hitrans_pixel( s_dict, ref_pixel_dict_hitrans) centroids = [] for o, c_oh in zip(orders_w_solutions, centroids_oh): if o in centroids_dict_hitrans: c = np.concatenate( [c_oh, centroids_dict_hitrans[o]["pixel"]]) centroids.append(c) else: centroids.append(c_oh) return centroids # reidentified_lines_map = get_reidentified_lines(orders_w_solutions, # wvl_solutions, # s_center) if 1: # TODO: we should not need this, instead recycle from preivious step. fitted_centroid_center = refit_centroid(s_center) # fitted_centroid_center = fit_ohlines_pixel(s_center, # ref_pixel_list) d_shift_up = [] for s in s_up: # TODO: ref_pixel_list_filtered need to be updated with recent fit. fitted_centroid = refit_centroid(s) # fitted_centroid = fit_ohlines_pixel(s, # ref_pixel_list) d_shift = [ b - a for a, b in zip(fitted_centroid_center, fitted_centroid) ] d_shift_up.append(d_shift) d_shift_down = [] for s in s_down: # TODO: ref_pixel_list_filtered need to be updated with recent fit. fitted_centroid = refit_centroid(s) # fitted_centroid = fit_ohlines_pixel(s, # ref_pixel_list) #fitted_centroid_center, d_shift = [ b - a for a, b in zip(fitted_centroid_center, fitted_centroid) ] d_shift_down.append(d_shift) if 1: # now fit orders = orders_w_solutions x_domain = [0, 2048] y_domain = [orders[0] - 2, orders[-1] + 2] xl = np.concatenate(fitted_centroid_center) yl_ = [ o + np.zeros_like(x_) for o, x_ in zip(orders, fitted_centroid_center) ] yl = np.concatenate(yl_) from libs.ecfit import fit_2dspec, check_fit_simple zl_list = [np.concatenate(d_) for d_ \ in d_shift_down[::-1] + d_shift_up] pm_list = [] for zl in zl_list: p, m = fit_2dspec(xl, yl, zl, x_degree=1, y_degree=1, x_domain=x_domain, y_domain=y_domain) pm_list.append((p, m)) zz_std_list = [] for zl, (p, m) in zip(zl_list, pm_list): z_m = p(xl[m], yl[m]) zz = z_m - zl[m] zz_std_list.append(zz.std()) fig_list = [] from matplotlib.figure import Figure for zl, (p, m) in zip(zl_list, pm_list): fig = Figure() check_fit_simple(fig, xl[m], yl[m], zl[m], p, orders) fig_list.append(fig) if 1: xi = np.linspace(0, 2048, 128 + 1) from astropy.modeling import fitting from astropy.modeling.polynomial import Chebyshev2D x_domain = [0, 2048] y_domain = [0., 1.] p2_list = [] for o in orders: oi = np.zeros_like(xi) + o shift_list = [] for p, m in pm_list[:n_slice_one_direction]: shift_list.append(p(xi, oi)) shift_list.append(np.zeros_like(xi)) for p, m in pm_list[n_slice_one_direction:]: shift_list.append(p(xi, oi)) p_init = Chebyshev2D(x_degree=1, y_degree=2, x_domain=x_domain, y_domain=y_domain) f = fitting.LinearLSQFitter() yi = 0.5 * (slit_slice[:-1] + slit_slice[1:]) xl, yl = np.meshgrid(xi, yi) zl = np.array(shift_list) p = f(p_init, xl, yl, zl) p2_list.append(p) if 1: p2_dict = dict(zip(orders, p2_list)) # save order_map, etc order_map = ap.make_order_map() slitpos_map = ap.make_slitpos_map() order_map2 = ap.make_order_map(mask_top_bottom=True) slitoffset_map = np.empty_like(slitpos_map) slitoffset_map.fill(np.nan) wavelength_map = np.empty_like(slitpos_map) wavelength_map.fill(np.nan) from scipy.interpolate import interp1d for o, wvl in zip(ap.orders, wvl_solutions): xi = np.arange(0, 2048) xl, yl = np.meshgrid(xi, xi) msk = order_map == o xl_msk = xl[msk] slitoffset_map_msk = p2_dict[o](xl_msk, slitpos_map[msk]) slitoffset_map[msk] = slitoffset_map_msk wvl_interp1d = interp1d(xi, wvl, bounds_error=False) wavelength_map[msk] = wvl_interp1d(xl_msk - slitoffset_map_msk) from libs.storage_descriptions import (ORDERMAP_FITS_DESC, SLITPOSMAP_FITS_DESC, SLITOFFSET_FITS_DESC, WAVELENGTHMAP_FITS_DESC, ORDERMAP_MASKED_FITS_DESC) from libs.products import PipelineImage, PipelineProducts products = PipelineProducts("Distortion map") for desc, im in [(ORDERMAP_FITS_DESC, order_map), (SLITPOSMAP_FITS_DESC, slitpos_map), (SLITOFFSET_FITS_DESC, slitoffset_map), (WAVELENGTHMAP_FITS_DESC, wavelength_map), (ORDERMAP_MASKED_FITS_DESC, order_map2)]: products.add(desc, PipelineImage([], im)) igr_storage.store(products, mastername=sky_filenames[0], masterhdu=None) from libs.qa_helper import figlist_to_pngs sky_figs = igr_path.get_section_filename_base( "QA_PATH", "oh_distortion", "oh_distortion_" + sky_basename) print fig_list figlist_to_pngs(sky_figs, fig_list) if 0: # test x = np.arange(2048, dtype="d") oi = 10 o = orders[oi] yi = 0.5 * (slit_slice[:-1] + slit_slice[1:]) ax1 = subplot(211) s1 = s_up[-1][oi] s2 = s_down[-1][oi] ax1.plot(x, s1) ax1.plot(x, s2) ax2 = subplot(212, sharex=ax1, sharey=ax1) dx1 = p2_dict[o](x, yi[-1] + np.zeros_like(x)) ax2.plot(x - dx1, s1) dx2 = p2_dict[o](x, yi[0] + np.zeros_like(x)) ax2.plot(x - dx2, s2)
def process(self, recipe, band, obsids, frametypes): igr_path = self.igr_path igr_storage = self.igr_storage if recipe == "A0V_AB": DO_STD = True #FIX_TELLURIC=False elif recipe == "STELLAR_AB": DO_STD = False #FIX_TELLURIC=True elif recipe == "EXTENDED_AB": DO_STD = False #FIX_TELLURIC=True elif recipe == "EXTENDED_ONOFF": DO_STD = False #FIX_TELLURIC=True if 1: obj_filenames = igr_path.get_filenames(band, obsids) master_obsid = obsids[0] tgt_basename = os.path.splitext(os.path.basename( obj_filenames[0]))[0] db = {} basenames = {} db_types = ["flat_off", "flat_on", "thar", "sky"] for db_type in db_types: db_name = igr_path.get_section_filename_base( "PRIMARY_CALIB_PATH", "%s.db" % db_type, ) db[db_type] = ProductDB(db_name) # db on output path db_types = ["a0v"] for db_type in db_types: db_name = igr_path.get_section_filename_base( "OUTDATA_PATH", "%s.db" % db_type, ) db[db_type] = ProductDB(db_name) # to get basenames db_types = ["flat_off", "flat_on", "thar", "sky"] # if FIX_TELLURIC: # db_types.append("a0v") for db_type in db_types: basenames[db_type] = db[db_type].query(band, master_obsid) if 1: # make aperture from libs.storage_descriptions import SKY_WVLSOL_JSON_DESC sky_basename = db["sky"].query(band, master_obsid) wvlsol_products = igr_storage.load( [SKY_WVLSOL_JSON_DESC], sky_basename)[SKY_WVLSOL_JSON_DESC] orders_w_solutions = wvlsol_products["orders"] wvl_solutions = map(np.array, wvlsol_products["wvl_sol"]) from libs.storage_descriptions import ONED_SPEC_JSON_DESC raw_spec_products = igr_storage.load([ONED_SPEC_JSON_DESC], sky_basename) from recipe_wvlsol_sky import load_aperture2 ap = load_aperture2( igr_storage, band, master_obsid, db["flat_on"], raw_spec_products[ONED_SPEC_JSON_DESC]["orders"], orders_w_solutions) # This should be saved somewhere and loaded, instead of making it every time. order_map = ap.make_order_map() slitpos_map = ap.make_slitpos_map() order_map2 = ap.make_order_map(mask_top_bottom=True) if 1: from libs.storage_descriptions import (HOTPIX_MASK_DESC, DEADPIX_MASK_DESC, ORDER_FLAT_IM_DESC, ORDER_FLAT_JSON_DESC, FLAT_MASK_DESC) hotpix_mask = igr_storage.load( [HOTPIX_MASK_DESC], basenames["flat_off"])[HOTPIX_MASK_DESC] deadpix_mask = igr_storage.load( [DEADPIX_MASK_DESC], basenames["flat_on"])[DEADPIX_MASK_DESC] pix_mask = hotpix_mask.data | deadpix_mask.data # aperture_solution_products = PipelineProducts.load(aperture_solutions_name) orderflat_ = igr_storage.load( [ORDER_FLAT_IM_DESC], basenames["flat_on"])[ORDER_FLAT_IM_DESC] orderflat = orderflat_.data orderflat[pix_mask] = np.nan orderflat_json = igr_storage.load( [ORDER_FLAT_JSON_DESC], basenames["flat_on"])[ORDER_FLAT_JSON_DESC] order_flat_meanspec = np.array(orderflat_json["mean_order_specs"]) # flat_normed = igr_storage.load([FLAT_NORMED_DESC], # basenames["flat_on"])[FLAT_NORMED_DESC] flat_mask = igr_storage.load([FLAT_MASK_DESC], basenames["flat_on"])[FLAT_MASK_DESC] bias_mask = flat_mask.data & (order_map2 > 0) SLITOFFSET_FITS_DESC = ("PRIMARY_CALIB_PATH", "SKY_", ".slitoffset_map.fits") prod_ = igr_storage.load([SLITOFFSET_FITS_DESC], basenames["sky"])[SLITOFFSET_FITS_DESC] #fn = sky_path.get_secondary_path("slitoffset_map.fits") slitoffset_map = prod_.data if 1: abba_names = obj_filenames def filter_abba_names(abba_names, frametypes, frametype): return [ an for an, ft in zip(abba_names, frametypes) if ft == frametype ] a_name_list = filter_abba_names(abba_names, frametypes, "A") b_name_list = filter_abba_names(abba_names, frametypes, "B") if recipe in ["A0V_AB", "STELLAR_AB"]: IF_POINT_SOURCE = True elif recipe in ["EXTENDED_AB", "EXTENDED_ONOFF"]: IF_POINT_SOURCE = False else: print "Unknown recipe : %s" % recipe if 1: #ab_names = ab_names_list[0] # master_hdu = pyfits.open(a_name_list[0])[0] a_list = [pyfits.open(name)[0].data \ for name in a_name_list] b_list = [pyfits.open(name)[0].data \ for name in b_name_list] # we may need to detrip # first define extract profile (gaussian). # dx = 100 if IF_POINT_SOURCE: # if point source # for point sources, variance estimation becomes wrong # if lenth of two is different, assert len(a_list) == len(b_list) # a_b != 1 for the cases when len(a) != len(b) a_b = float(len(a_list)) / len(b_list) a_data = np.sum(a_list, axis=0) b_data = np.sum(b_list, axis=0) data_minus = a_data - a_b * b_data #data_minus0 = data_minus from libs.destriper import destriper if 1: destrip_mask = ~np.isfinite(data_minus) | bias_mask data_minus = destriper.get_destriped(data_minus, destrip_mask, hori=True, pattern=64) data_minus_flattened = data_minus / orderflat data_minus_flattened[~flat_mask.data] = np.nan #data_minus_flattened[order_flat_meanspec<0.1*order_flat_meanspec.max()] = np.nan # for variance, we need a square of a_b data_plus = (a_data + (a_b**2) * b_data) import scipy.ndimage as ni bias_mask2 = ni.binary_dilation(bias_mask) from libs import instrument_parameters gain = instrument_parameters.gain[band] # random noise variance0 = data_minus variance_ = variance0.copy() variance_[bias_mask2] = np.nan variance_[pix_mask] = np.nan mm = np.ma.array(variance0, mask=~np.isfinite(variance0)) ss = np.ma.median(mm, axis=0) variance_ = variance_ - ss # iterate over fixed number of times. # need to be improved. for i in range(5): st = np.nanstd(variance_, axis=0) variance_[np.abs(variance_) > 3 * st] = np.nan #st = np.nanstd(variance_, axis=0) variance = destriper.get_destriped(variance0, ~np.isfinite(variance_), pattern=64) variance_ = variance.copy() variance_[bias_mask2] = np.nan variance_[pix_mask] = np.nan st = np.nanstd(variance_) st = np.nanstd(variance_[np.abs(variance_) < 3 * st]) variance_[np.abs(variance_ - ss) > 3 * st] = np.nan x_std = ni.median_filter(np.nanstd(variance_, axis=0), 11) variance_map0 = np.zeros_like(variance) + x_std**2 variance_map = variance_map0 + np.abs( data_plus) / gain # add poison noise in ADU # we ignore effect of flattening # now estimate lsf # estimate lsf ordermap_bpixed = order_map.copy() ordermap_bpixed[pix_mask] = 0 ordermap_bpixed[~np.isfinite(orderflat)] = 0 # if IF_POINT_SOURCE: # if point source x1, x2 = 800, 1200 bins, lsf_list = ap.extract_lsf(ordermap_bpixed, slitpos_map, data_minus_flattened, x1, x2, bins=None) hh0 = np.sum(lsf_list, axis=0) peak1, peak2 = max(hh0), -min(hh0) lsf_x = 0.5 * (bins[1:] + bins[:-1]) lsf_y = hh0 / (peak1 + peak2) from scipy.interpolate import UnivariateSpline lsf_ = UnivariateSpline(lsf_x, lsf_y, k=3, s=0, bbox=[0, 1]) roots = list(lsf_.roots()) #assert(len(roots) == 1) integ_list = [] from itertools import izip, cycle for ss, int_r1, int_r2 in izip(cycle([1, -1]), [0] + roots, roots + [1]): #print ss, int_r1, int_r2 integ_list.append(lsf_.integral(int_r1, int_r2)) integ = np.abs(np.sum(integ_list)) def lsf(o, x, slitpos): return lsf_(slitpos) / integ # make weight map profile_map = ap.make_profile_map(order_map, slitpos_map, lsf) # try to select portion of the slit to extract if self.frac_slit is not None: frac1, frac2 = min(self.frac_slit), max(self.frac_slit) slitpos_msk = (slitpos_map < frac1) | (slitpos_map > frac2) profile_map[slitpos_msk] = np.nan # extract spec s_list, v_list = ap.extract_stellar( ordermap_bpixed, profile_map, variance_map, data_minus_flattened, slitoffset_map=slitoffset_map) # make synth_spec : profile * spectra synth_map = ap.make_synth_map(order_map, slitpos_map, profile_map, s_list, slitoffset_map=slitoffset_map) sig_map = (data_minus_flattened - synth_map)**2 / variance_map ## mark sig_map > 100 as cosmicay. The threshold need to be fixed. # reextract with new variance map and CR is rejected variance_map_r = variance_map0 + np.abs(synth_map) / gain variance_map2 = np.max([variance_map, variance_map_r], axis=0) variance_map2[np.abs(sig_map) > 100] = np.nan # masking this out will affect the saved combined image. data_minus_flattened[np.abs(sig_map) > 100] = np.nan # extract spec s_list, v_list = ap.extract_stellar( ordermap_bpixed, profile_map, variance_map2, data_minus_flattened, slitoffset_map=slitoffset_map) else: # if extended source from scipy.interpolate import UnivariateSpline if recipe in ["EXTENDED_AB", "EXTENDED_ABBA"]: delta = 0.01 lsf_ = UnivariateSpline([0, 0.5 - delta, 0.5 + delta, 1], [1., 1., -1., -1.], k=1, s=0, bbox=[0, 1]) else: lsf_ = UnivariateSpline([0, 1], [1., 1.], k=1, s=0, bbox=[0, 1]) def lsf(o, x, slitpos): return lsf_(slitpos) profile_map = ap.make_profile_map(order_map, slitpos_map, lsf) if self.frac_slit is not None: frac1, frac2 = min(self.frac_slit), max(self.frac_slit) slitpos_msk = (slitpos_map < frac1) | (slitpos_map > frac2) profile_map[slitpos_msk] = np.nan # we need to update the variance map by rejecting # cosmic rays, but it is not clear how we do this # for extended source. variance_map2 = variance_map s_list, v_list = ap.extract_stellar( ordermap_bpixed, profile_map, variance_map2, data_minus_flattened, slitoffset_map=slitoffset_map) if 1: # calculate S/N per resolution sn_list = [] for wvl, s, v in zip(wvl_solutions, s_list, v_list): dw = np.gradient(wvl) pixel_per_res_element = (wvl / 40000.) / dw #print pixel_per_res_element[1024] # len(pixel_per_res_element) = 2047. But we ignore it. sn = (s / v**.5) * (pixel_per_res_element**.5) sn_list.append(sn) if 1: # save the product from libs.storage_descriptions import (COMBINED_IMAGE_DESC, VARIANCE_MAP_DESC) from libs.products import PipelineImage r = PipelineProducts("1d specs") r.add(COMBINED_IMAGE_DESC, PipelineImage([], data_minus_flattened)) r.add(VARIANCE_MAP_DESC, PipelineImage([], variance_map2)) # r.add(VARIANCE_MAP_DESC, PipelineImage([], # variance_map.data)) igr_storage.store(r, mastername=obj_filenames[0], masterhdu=None) if 1: # save spectra, variance, sn from libs.storage_descriptions import SKY_WVLSOL_FITS_DESC fn = igr_storage.get_path(SKY_WVLSOL_FITS_DESC, basenames["sky"]) # fn = sky_path.get_secondary_path("wvlsol_v1.fits") f = pyfits.open(fn) d = np.array(s_list) f[0].data = d.astype("f32") from libs.storage_descriptions import (SPEC_FITS_DESC, VARIANCE_FITS_DESC, SN_FITS_DESC) fout = igr_storage.get_path(SPEC_FITS_DESC, tgt_basename) f.writeto(fout, clobber=True) d = np.array(v_list) f[0].data = d.astype("f32") fout = igr_storage.get_path(VARIANCE_FITS_DESC, tgt_basename) f.writeto(fout, clobber=True) d = np.array(sn_list) f[0].data = d.astype("f32") fout = igr_storage.get_path(SN_FITS_DESC, tgt_basename) f.writeto(fout, clobber=True) if 1: # from libs.storage_descriptions import ORDER_FLAT_JSON_DESC prod = igr_storage.load([ORDER_FLAT_JSON_DESC], basenames["flat_on"])[ORDER_FLAT_JSON_DESC] new_orders = prod["orders"] # fitted_response = orderflat_products["fitted_responses"] i1i2_list_ = prod["i1i2_list"] #order_indices = [] i1i2_list = [] for o in ap.orders: o_new_ind = np.searchsorted(new_orders, o) #order_indices.append(o_new_ind) i1i2_list.append(i1i2_list_[o_new_ind]) if DO_STD: from libs.a0v_spec import (A0VSpec, TelluricTransmission, get_a0v, get_flattend) a0v_spec = A0VSpec() tel_trans = TelluricTransmission() wvl_limits = [] for wvl_ in wvl_solutions: wvl_limits.extend([wvl_[0], wvl_[-1]]) dwvl = abs(wvl_[0] - wvl_[-1]) * 0.2 # padding wvl1 = min(wvl_limits) - dwvl wvl2 = max(wvl_limits) + dwvl a0v_wvl, a0v_tel_trans, a0v_tel_trans_masked = get_a0v( a0v_spec, wvl1, wvl2, tel_trans) a0v_flattened = get_flattend(a0v_spec, a0v_wvl, a0v_tel_trans_masked, wvl_solutions, s_list, i1i2_list=i1i2_list) d = np.array(a0v_flattened) #d[~np.isfinite(d)] = 0. f[0].data = d.astype("f32") from libs.storage_descriptions import SPEC_FITS_FLATTENED_DESC fout = igr_storage.get_path(SPEC_FITS_FLATTENED_DESC, tgt_basename) f.writeto(fout, clobber=True) db["a0v"].update(band, tgt_basename)
def save_figures(helper, band, obsids): ### THIS NEEDS TO BE REFACTORED! caldb = helper.get_caldb() master_obsid = obsids[0] orders = caldb.load_resource_for((band, master_obsid), "orders")["orders"] thar_filenames = helper.get_filenames(band, obsids) thar_basename = os.path.splitext(os.path.basename(thar_filenames[0]))[0] thar_master_obsid = obsids[0] if 1: # make amp and order falt ap = get_simple_aperture(helper, band, obsids, orders=orders) # from libs.storage_descriptions import ONED_SPEC_JSON_DESC #orders = thar_products[ONED_SPEC_JSON_DESC]["orders"] order_map = ap.make_order_map() #slitpos_map = ap.make_slitpos_map() # load flat on products #flat_on_params_name = flaton_path.get_secondary_path("flat_on_params") #flaton_products = PipelineProducts.load(flat_on_params_name) from libs.storage_descriptions import (FLAT_NORMED_DESC, FLAT_MASK_DESC) flaton_db_name = helper.igr_path.get_section_filename_base("PRIMARY_CALIB_PATH", "flat_on.db", ) flaton_db = ProductDB(flaton_db_name) flaton_basename = flaton_db.query(band, thar_master_obsid) flaton_products = helper.igr_storage.load([FLAT_NORMED_DESC, FLAT_MASK_DESC], flaton_basename) from libs.process_flat import make_order_flat, check_order_flat order_flat_products = make_order_flat(flaton_products, orders, order_map) #fn = thar_path.get_secondary_path("orderflat") #order_flat_products.save(fn, masterhdu=hdu) hdu = pyfits.open(thar_filenames[0])[0] helper.igr_storage.store(order_flat_products, mastername=flaton_basename, masterhdu=hdu) flat_mask = helper.igr_storage.load1(FLAT_MASK_DESC, flaton_basename) order_map2 = ap.make_order_map(mask_top_bottom=True) bias_mask = flat_mask.data & (order_map2 > 0) pp = PipelineProducts("") from libs.storage_descriptions import BIAS_MASK_DESC pp.add(BIAS_MASK_DESC, PipelineImageBase([], bias_mask)) helper.igr_storage.store(pp, mastername=flaton_basename, masterhdu=hdu) if 1: fig_list = check_order_flat(order_flat_products) from libs.qa_helper import figlist_to_pngs orderflat_figs = helper.igr_path.get_section_filename_base("QA_PATH", "orderflat", "orderflat_"+thar_basename) figlist_to_pngs(orderflat_figs, fig_list)