def store_a0v_results(self, igr_storage, extractor, a0v_flattened_data): wvl_header, wvl_data, convert_data = \ self.get_wvl_header_data(igr_storage, extractor) f_obj = pyfits.open(extractor.obj_filenames[0]) f_obj[0].header.extend(wvl_header) from libs.products import PipelineImage as Image image_list = [Image([("EXTNAME", "SPEC_FLATTENED")], convert_data(a0v_flattened_data[0][1]))] if self.debug_output: for ext_name, data in a0v_flattened_data[1:]: image_list.append(Image([("EXTNAME", ext_name.upper())], convert_data(data))) from libs.products import PipelineImages #Base from libs.storage_descriptions import SPEC_FITS_FLATTENED_DESC r = PipelineProducts("flattened 1d specs") r.add(SPEC_FITS_FLATTENED_DESC, PipelineImages(image_list)) mastername = extractor.obj_filenames[0] igr_storage.store(r, mastername=mastername, masterhdu=f_obj[0]) tgt_basename = extractor.pr.tgt_basename extractor.db["a0v"].update(extractor.band, tgt_basename)
def get_sky_spectra(self, extractor, ap, band, master_obsid): from libs.process_thar import get_1d_median_specs sky_filenames = extractor.obj_filenames raw_spec_product = get_1d_median_specs(sky_filenames, ap) # sky_master_fn_ = os.path.splitext(os.path.basename(sky_names[0]))[0] # sky_master_fn = igr_path.get_secondary_calib_filename(sky_master_fn_) import libs.fits as pyfits masterhdu = pyfits.open(sky_filenames[0])[0] igr_storage = extractor.igr_storage igr_storage.store(raw_spec_product, mastername=sky_filenames[0], masterhdu=masterhdu) # fn = sky_path.get_secondary_path("raw_spec") # raw_spec_product.save(fn, # masterhdu=masterhdu) # initial wavelength solution # this need to be fixed # thar_db.query(sky_master_obsid) # json_name_ = "SDC%s_%s_0003.median_spectra.wvlsol" % (band, # igrins_log.date) #from libs.storage_descriptions import THAR_WVLSOL_JSON_DESC from libs.storage_descriptions import WVLSOL_V0_JSON_DESC thar_basename = extractor.db["thar"].query(band, master_obsid) thar_wvl_sol = igr_storage.load([WVLSOL_V0_JSON_DESC], thar_basename)[WVLSOL_V0_JSON_DESC] #print thar_wvl_sol.keys() #["wvl_sol"] #json_name = thar_path.get_secondary_path("wvlsol_v0") #json_name = igr_path.get_secondary_calib_filename(json_name_) #thar_wvl_sol = PipelineProducts.load(json_name) if 0: # it would be better to iteratively refit the solution fn = sky_path.get_secondary_path("wvlsol_v1") p = PipelineProducts.load(fn) wvl_solutionv = p["wvl_sol"] orders_w_solutions_ = thar_wvl_sol["orders"] from libs.storage_descriptions import ONED_SPEC_JSON_DESC orders_w_solutions = [o for o in orders_w_solutions_ if o in raw_spec_product[ONED_SPEC_JSON_DESC]["orders"]] _ = dict(zip(raw_spec_product[ONED_SPEC_JSON_DESC]["orders"], raw_spec_product[ONED_SPEC_JSON_DESC]["specs"])) s_list = [_[o]for o in orders_w_solutions] wvl_solutions = thar_wvl_sol["wvl_sol"] return orders_w_solutions, wvl_solutions, s_list
def make_combined_image_thar(helper, band, obsids): """ simple median combine with destripping. Suitable for sky. """ filenames, basename, master_obsid = helper.get_base_info(band, obsids) hdu_list = [pyfits.open(fn)[0] for fn in filenames] _data = stsci_median([hdu.data for hdu in hdu_list]) from destriper import destriper data = destriper.get_destriped(_data) return data
def load_cires(self): f = pyfits.open("crires/CR_GCAT_061130A_lines_hitran.fits") d = f[1].data wvl = d["Wavelength"]*1.e-3 weight = d["Emission"]/.5e-11 i1 = np.searchsorted(wvl, self.wvl_min) i2 = np.searchsorted(wvl, self.wvl_max) s1 = weight[i1:i2] wvl1 = wvl[i1:i2] s1_m = self.get_median_filtered_spec(wvl1, s1) return wvl1, s1_m
def get_wvl_header_data(self, igr_storage, extractor): from libs.storage_descriptions import SKY_WVLSOL_FITS_DESC fn = igr_storage.get_path(SKY_WVLSOL_FITS_DESC, extractor.basenames["sky"]) # fn = sky_path.get_secondary_path("wvlsol_v1.fits") f = pyfits.open(fn) if self.wavelength_increasing_order: import libs.iraf_helper as iraf_helper header = iraf_helper.invert_order(f[0].header) convert_data = lambda d: d[::-1] else: header = f[0].header convert_data = lambda d: d return header, f[0].data, convert_data
def store_1dspec(self, igr_storage, extractor, v_list, sn_list, s_list): wvl_header, wvl_data, convert_data = \ self.get_wvl_header_data(igr_storage, extractor) f_obj = pyfits.open(extractor.obj_filenames[0]) f_obj[0].header.extend(wvl_header) tgt_basename = extractor.pr.tgt_basename from libs.storage_descriptions import (SPEC_FITS_DESC, VARIANCE_FITS_DESC, SN_FITS_DESC) d = np.array(v_list) f_obj[0].data = convert_data(d.astype("f32")) fout = igr_storage.get_path(VARIANCE_FITS_DESC, tgt_basename) f_obj.writeto(fout, clobber=True) d = np.array(sn_list) f_obj[0].data = convert_data(d.astype("f32")) fout = igr_storage.get_path(SN_FITS_DESC, tgt_basename) f_obj.writeto(fout, clobber=True) d = np.array(s_list) f_obj[0].data = convert_data(d.astype("f32")) fout = igr_storage.get_path(SPEC_FITS_DESC, tgt_basename) hdu_wvl = pyfits.ImageHDU(data=convert_data(wvl_data), header=wvl_header) f_obj.append(hdu_wvl) f_obj.writeto(fout, clobber=True)
def get_1d_median_specs(fits_names, ap): hdu_list = [pyfits.open(fn)[0] for fn in fits_names] _data = stsci_median([hdu.data for hdu in hdu_list]) from destriper import destriper data = destriper.get_destriped(_data) s = ap.extract_spectra_v2(data) from storage_descriptions import (COMBINED_IMAGE_DESC, ONED_SPEC_JSON_DESC) r = PipelineProducts("1d median specs") r.add(COMBINED_IMAGE_DESC, PipelineImageBase([], data)) r.add(ONED_SPEC_JSON_DESC, PipelineDict(orders=ap.orders, specs=s)) return r
def get_data1(self, i, hori=True, vert=False): fn = self.obj_filenames[i] data = pyfits.open(fn)[0].data from libs.destriper import destriper destrip_mask = ~np.isfinite(data)|self.destripe_mask data = destriper.get_destriped(data, destrip_mask, pattern=64, hori=hori) if vert: #m = [np.median(row[4:-4].compressed()) for row in dd1] dd1 = np.ma.array(data, mask=destrip_mask) m = np.ma.median(dd1, axis=1) #m = [np.ma.median(d) for d in dd1] datam = data - m[:,np.newaxis] return datam else: return data
def readmultispec(fitsfile, reform=True, quiet=False): """Read IRAF echelle spectrum in multispec format from a FITS file Can read most multispec formats including linear, log, cubic spline, Chebyshev or Legendre dispersion spectra If reform is true, a single spectrum dimensioned 4,1,NWAVE is returned as 4,NWAVE (this is the default.) If reform is false, it is returned as a 3-D array. """ fh = pyfits.open(fitsfile) try: header = fh[0].header flux = fh[0].data finally: fh.close() temp = flux.shape nwave = temp[-1] if len(temp) == 1: nspec = 1 else: nspec = temp[-2] # first try linear dispersion try: crval1 = header['crval1'] crpix1 = header['crpix1'] cd1_1 = header['cd1_1'] ctype1 = header['ctype1'] if ctype1.strip() == 'LINEAR': wavelen = np.zeros((nspec, nwave), dtype=float) ww = (np.arange(nwave, dtype=float) + 1 - crpix1) * cd1_1 + crval1 for i in range(nspec): wavelen[i, :] = ww # handle log spacing too dcflag = header.get('dc-flag', 0) if dcflag == 1: wavelen = 10.0 ** wavelen if not quiet: print 'Dispersion is linear in log wavelength' elif dcflag == 0: if not quiet: print 'Dispersion is linear' else: raise ValueError('Dispersion not linear or log (DC-FLAG=%s)' % dcflag) if nspec == 1 and reform: # get rid of unity dimensions flux = np.squeeze(flux) wavelen.shape = (nwave,) return {'flux': flux, 'wavelen': wavelen, 'header': header, 'wavefields': None} except KeyError: pass # get wavelength parameters from multispec keywords try: wat2 = header['wat2_*'] count = len(wat2) except KeyError: raise ValueError('Cannot decipher header, need either WAT2_ or CRVAL keywords') # concatenate them all together into one big string watstr = [] for i in range(len(wat2)): # hack to fix the fact that older pyfits versions (< 3.1) # strip trailing blanks from string values in an apparently # irrecoverable way # v = wat2[i].value v = wat2[i] v = v + (" " * (68 - len(v))) # restore trailing blanks watstr.append(v) watstr = ''.join(watstr) # find all the spec#="..." strings specstr = [''] * nspec for i in range(nspec): sname = 'spec' + str(i + 1) p1 = watstr.find(sname) p2 = watstr.find('"', p1) p3 = watstr.find('"', p2 + 1) if p1 < 0 or p1 < 0 or p3 < 0: raise ValueError('Cannot find ' + sname + ' in WAT2_* keyword') specstr[i] = watstr[p2 + 1:p3] wparms = np.zeros((nspec, 9), dtype=float) w1 = np.zeros(9, dtype=float) for i in range(nspec): w1 = np.asarray(specstr[i].split(), dtype=float) wparms[i, :] = w1[:9] if w1[2] == -1: raise ValueError('Spectrum %d has no wavelength calibration (type=%d)' % (i + 1, w1[2])) # elif w1[6] != 0: # raise ValueError('Spectrum %d has non-zero redshift (z=%f)' % (i+1,w1[6])) wavelen = np.zeros((nspec, nwave), dtype=float) wavefields = [None] * nspec for i in range(nspec): # if i in skipped_orders: # continue verbose = (not quiet) and (i == 0) if wparms[i, 2] == 0 or wparms[i, 2] == 1: # simple linear or log spacing wavelen[i, :] = np.arange(nwave, dtype=float) * wparms[i, 4] + wparms[i, 3] if wparms[i, 2] == 1: wavelen[i, :] = 10.0 ** wavelen[i, :] if verbose: print 'Dispersion is linear in log wavelength' elif verbose: print 'Dispersion is linear' else: # non-linear wavelengths wavelen[i, :], wavefields[i] = nonlinearwave(nwave, specstr[i], verbose=verbose) wavelen *= 1.0 + wparms[i, 6] if verbose: print "Correcting for redshift: z=%f" % wparms[i, 6] if nspec == 1 and reform: # get rid of unity dimensions flux = np.squeeze(flux) wavelen.shape = (nwave,) return {'flux': flux, 'wavelen': wavelen, 'header': header, 'wavefields': wavefields}
def get_data_variance(self, destripe_pattern=64, use_destripe_mask=True, sub_horizontal_median=True): abba_names = self.obj_filenames frametypes = self.frametypes def filter_abba_names(abba_names, frametypes, frametype): return [an for an, ft in zip(abba_names, frametypes) if ft == frametype] a_name_list = filter_abba_names(abba_names, frametypes, "A") b_name_list = filter_abba_names(abba_names, frametypes, "B") a_list = [pyfits.open(name)[0].data \ for name in a_name_list] b_list = [pyfits.open(name)[0].data \ for name in b_name_list] if self.ab_mode: # for point sources, variance estimation becomes wrong # if lenth of two is different, if len(a_list) != len(b_list): raise RuntimeError("For AB nodding, number of A and B should match!") # a_b != 1 for the cases when len(a) != len(b) a_b = float(len(a_list)) / len(b_list) a_data = np.sum(a_list, axis=0) b_data = np.sum(b_list, axis=0) data_minus = a_data - a_b*b_data #data_minus0 = data_minus if destripe_pattern is not None: data_minus = self.get_destriped(data_minus, destripe_pattern=destripe_pattern, use_destripe_mask=use_destripe_mask, sub_horizontal_median=sub_horizontal_median) # if use_destripe_mask: # destrip_mask = ~np.isfinite(data_minus)|self.destripe_mask # else: # destrip_mask = None # data_minus = destriper.get_destriped(data_minus, # destrip_mask, # pattern=destripe_pattern, # hori=sub_horizontal_median) # remove sky # now estimate variance_map data_plus = (a_data + (a_b**2)*b_data) import scipy.ndimage as ni bias_mask2 = ni.binary_dilation(self.destripe_mask) from libs.variance_map import (get_variance_map, get_variance_map0) variance_map0 = get_variance_map0(data_minus, bias_mask2, self.pix_mask) variance_map = get_variance_map(data_plus, variance_map0, gain=self.gain) return data_minus, variance_map, variance_map0
fig1 = figure(1) clf() ax1 = fig1.add_subplot(211) ax2 = fig1.add_subplot(212, sharex=ax1) s = json.load(open("calib/primary/20140525/ORDERFLAT_SDC%s_20140525_0074.json" % (band,))) specs = s["mean_order_specs"] # for s1 in specs: # plot(s1) import libs.fits as pyfits dd = pyfits.open("outdata/20140525/SDC%s_20140525_0016.spec.fits" % band)[0].data # ii = 0 fig2 = figure(2) fig2.clf() ax3 = fig2.add_subplot(111) s1, a0v1, wvl1 = zip(specs, dd, wvl_sol)[5] if 1: f12 = get_smooth_continuum(s1, wvl1) ax3.plot(wvl1, s1 / f12) f12[f12 < np.nanmax(f12) * 0.05] = np.nan ax2.plot(wvl1, s1 / f12, zorder=0.2, color="0.5")
def fits_loader(fn): import libs.fits as pyfits return pyfits.open(fn)
def plot_sol(ax, sol): import matplotlib.pyplot as plt fig = plt.figure(10) fig.clf() ax = fig.add_subplot(111) ax.plot(xx, yy) ax.plot(xx, _gauss0(sol_[0])) ax.vlines(sol_[0][0]+d_centers0, 0, 1) print d_centers0 if __name__ == "__main__": import libs.fits as pyfits f = pyfits.open("crires/CR_GCAT_061130A_lines_hitran.fits") d = f[1].data wvl, s = np.array(d["Wavelength"]*1.e-3), np.array(d["Emission"]/.5e-11) wvl_igr_minmax = [(2.452465109923166, 2.4849067561010396), (2.4193347157047467, 2.4516074622043456), (2.387095719967004, 2.4191645498897985), (2.355713585492883, 2.387547928321348), (2.3251555928135925, 2.356729090391157)] import scipy.ndimage as ni dlambda_pix = 120 if 1: # let's make a cut-out of the s i1 = np.searchsorted(wvl, wvl_igr_minmax[4][0]) i2 = np.searchsorted(wvl, wvl_igr_minmax[0][-1]) s1 = s[i1:i2]
def store_2dspec(self, igr_storage, extractor, data_shft, variance_map_shft, ordermap_bpixed, cr_mask=None): wvl_header, wvl_data, convert_data = \ self.get_wvl_header_data(igr_storage, extractor) f_obj = pyfits.open(extractor.obj_filenames[0]) f_obj[0].header.extend(wvl_header) tgt_basename = extractor.pr.tgt_basename from libs.storage_descriptions import FLATCENTROID_SOL_JSON_DESC cent = igr_storage.load1(FLATCENTROID_SOL_JSON_DESC, extractor.basenames["flat_on"]) #cent = json.load(open("calib/primary/20140525/FLAT_SDCK_20140525_0074.centroid_solutions.json")) _bottom_up_solutions = cent["bottom_up_solutions"] old_orders = extractor.get_old_orders() _o_s = dict(zip(old_orders, _bottom_up_solutions)) new_bottom_up_solutions = [_o_s[o] for o in \ extractor.orders_w_solutions] from libs.correct_distortion import get_flattened_2dspec d0_shft_list, msk_shft_list = \ get_flattened_2dspec(data_shft, ordermap_bpixed, new_bottom_up_solutions) d = np.array(d0_shft_list) / np.array(msk_shft_list) f_obj[0].data = convert_data(d.astype("f32")) from libs.storage_descriptions import SPEC2D_FITS_DESC fout = igr_storage.get_path(SPEC2D_FITS_DESC, tgt_basename) hdu_wvl = pyfits.ImageHDU(data=convert_data(wvl_data), header=wvl_header) f_obj.append(hdu_wvl) f_obj.writeto(fout, clobber=True) #OUTPUT VAR2D, added by Kyle Kaplan Feb 25, 2015 to get variance map outputted as a datacube d0_shft_list, msk_shft_list = \ get_flattened_2dspec(variance_map_shft, ordermap_bpixed, new_bottom_up_solutions) d = np.array(d0_shft_list) / np.array(msk_shft_list) f_obj[0].data = d.astype("f32") from libs.storage_descriptions import VAR2D_FITS_DESC fout = igr_storage.get_path(VAR2D_FITS_DESC, tgt_basename) f_obj.writeto(fout, clobber=True)
def __init__(self): fn = get_master_calib_abspath("telluric/LBL_A15_s0_w050_R0060000_T.fits") self.telluric = pyfits.open(fn)[1].data self.trans = self.telluric["trans"] self.wvl = self.telluric["lam"]
order_indices = [] for o in orders_w_solutions: o_new_ind = np.searchsorted(new_orders, o) order_indices.append(o_new_ind) i1i2_list = [] for o_index in order_indices: i1i2_list.append(i1i2_list_[o_index]) a0v_wvl, a0v_tel_trans, a0v_tel_trans_masked = get_a0v(a0v_spec, wvl1, wvl2, tel_trans) s_list = list(pyfits.open("outdata/20140525/SDCH_20140525_0016.spec.fits")[0].data) order_flat_meanspec = np.array(of_prod["mean_order_specs"]) # for s, v in zip(s_list, order_flat_meanspec): # s[v<np.nanmax(v)*0.1] = np.nan a0v_flattened = get_flattend(a0v_spec, a0v_wvl, a0v_tel_trans_masked, wvl_solutions, s_list, i1i2_list=i1i2_list) for wvl, s2 in zip(wvl_solutions, a0v_flattened): plot(wvl, s2) plot(a0v_wvl, a0v_tel_trans)
#d0_shft = np.empty_like(d0_acc_shft) d0_shft = d0_acc_shft[1:,:]-d0_acc_shft[:-1,:] d0_shft_list.append(d0_shft) return d0_shft_list d0_shft_list = get_shifted(data) msk_shft_list = get_shifted(msk) return d0_shft_list, msk_shft_list if __name__ == "__main__": d = pyfits.open("../outdata/20140525/SDCH_20140525_0016.combined_image.fits")[0].data msk = np.isfinite(pyfits.open("../outdata/20140525/SDCH_20140525_0042.combined_image.fits")[0].data) d[~msk] = np.nan slitoffset = pyfits.open("../calib/primary/20140525/SKY_SDCH_20140525_0029.slitoffset_map.fits")[0].data d[~np.isfinite(slitoffset)] = np.nan # now shift msk = np.isfinite(d) d0 = d.copy() d0[~msk] = 0.
def save_figures(helper, band, obsids): ### THIS NEEDS TO BE REFACTORED! caldb = helper.get_caldb() master_obsid = obsids[0] orders = caldb.load_resource_for((band, master_obsid), "orders")["orders"] thar_filenames = helper.get_filenames(band, obsids) thar_basename = os.path.splitext(os.path.basename(thar_filenames[0]))[0] thar_master_obsid = obsids[0] if 1: # make amp and order falt ap = get_simple_aperture(helper, band, obsids, orders=orders) # from libs.storage_descriptions import ONED_SPEC_JSON_DESC #orders = thar_products[ONED_SPEC_JSON_DESC]["orders"] order_map = ap.make_order_map() #slitpos_map = ap.make_slitpos_map() # load flat on products #flat_on_params_name = flaton_path.get_secondary_path("flat_on_params") #flaton_products = PipelineProducts.load(flat_on_params_name) from libs.storage_descriptions import (FLAT_NORMED_DESC, FLAT_MASK_DESC) flaton_db_name = helper.igr_path.get_section_filename_base("PRIMARY_CALIB_PATH", "flat_on.db", ) flaton_db = ProductDB(flaton_db_name) flaton_basename = flaton_db.query(band, thar_master_obsid) flaton_products = helper.igr_storage.load([FLAT_NORMED_DESC, FLAT_MASK_DESC], flaton_basename) from libs.process_flat import make_order_flat, check_order_flat order_flat_products = make_order_flat(flaton_products, orders, order_map) #fn = thar_path.get_secondary_path("orderflat") #order_flat_products.save(fn, masterhdu=hdu) hdu = pyfits.open(thar_filenames[0])[0] helper.igr_storage.store(order_flat_products, mastername=flaton_basename, masterhdu=hdu) flat_mask = helper.igr_storage.load1(FLAT_MASK_DESC, flaton_basename) order_map2 = ap.make_order_map(mask_top_bottom=True) bias_mask = flat_mask.data & (order_map2 > 0) pp = PipelineProducts("") from libs.storage_descriptions import BIAS_MASK_DESC pp.add(BIAS_MASK_DESC, PipelineImageBase([], bias_mask)) helper.igr_storage.store(pp, mastername=flaton_basename, masterhdu=hdu) if 1: fig_list = check_order_flat(order_flat_products) from libs.qa_helper import figlist_to_pngs orderflat_figs = helper.igr_path.get_section_filename_base("QA_PATH", "orderflat", "orderflat_"+thar_basename) figlist_to_pngs(orderflat_figs, fig_list)
def process_flat_band(utdate, refdate, band, obsids_off, obsids_on, config): from libs.products import PipelineStorage igr_path = IGRINSPath(config, utdate) igr_storage = PipelineStorage(igr_path) flat_off_filenames = igr_path.get_filenames(band, obsids_off) flat_on_filenames = igr_path.get_filenames(band, obsids_on) if 1: # process flat off flat_offs_hdu_list = [pyfits.open(fn_)[0] for fn_ in flat_off_filenames] flat_offs = [hdu.data for hdu in flat_offs_hdu_list] flat = FlatOff(flat_offs) flatoff_products = flat.make_flatoff_hotpixmap(sigma_clip1=100, sigma_clip2=5) igr_storage.store(flatoff_products, mastername=flat_off_filenames[0], masterhdu=flat_offs_hdu_list[0]) if 1: # flat on from libs.storage_descriptions import (FLAT_OFF_DESC, HOTPIX_MASK_DESC, FLATOFF_JSON_DESC) desc_list = [FLAT_OFF_DESC, HOTPIX_MASK_DESC, FLATOFF_JSON_DESC] flatoff_products = igr_storage.load(desc_list, mastername=flat_off_filenames[0]) flat_on_hdu_list = [pyfits.open(fn_)[0] for fn_ in flat_on_filenames] flat_ons = [hdu.data for hdu in flat_on_hdu_list] from libs.master_calib import get_master_calib_abspath fn = get_master_calib_abspath("deadpix_mask_%s_%s.fits" % (refdate, band)) deadpix_mask_old = pyfits.open(fn)[0].data.astype(bool) flat_on = FlatOn(flat_ons) flaton_products = flat_on.make_flaton_deadpixmap(flatoff_products, deadpix_mask_old=deadpix_mask_old) igr_storage.store(flaton_products, mastername=flat_on_filenames[0], masterhdu=flat_on_hdu_list[0]) if 1: # now trace the orders from libs.process_flat import trace_orders trace_products = trace_orders(flaton_products) hdu = pyfits.open(flat_on_filenames[0])[0] igr_storage.store(trace_products, mastername=flat_on_filenames[0], masterhdu=flat_on_hdu_list[0]) from libs.process_flat import trace_solutions trace_solution_products, trace_solution_products_plot = \ trace_solutions(trace_products) if 1: trace_solution_products.keys() from libs.storage_descriptions import FLATCENTROID_SOL_JSON_DESC myproduct = trace_solution_products[FLATCENTROID_SOL_JSON_DESC] bottomup_solutions = myproduct["bottom_up_solutions"] orders = range(len(bottomup_solutions)) from libs.apertures import Apertures ap = Apertures(orders, bottomup_solutions) from libs.storage_descriptions import FLAT_MASK_DESC flat_mask = igr_storage.load1(FLAT_MASK_DESC, flat_on_filenames[0]) order_map2 = ap.make_order_map(mask_top_bottom=True) bias_mask = flat_mask.data & (order_map2 > 0) from libs.products import PipelineImageBase, PipelineProducts pp = PipelineProducts("") from libs.storage_descriptions import BIAS_MASK_DESC pp.add(BIAS_MASK_DESC, PipelineImageBase([], bias_mask)) flaton_basename = flat_on_filenames[0] igr_storage.store(pp, mastername=flaton_basename, masterhdu=hdu) # plot qa figures. if 1: from libs.process_flat import check_trace_order from matplotlib.figure import Figure fig1 = Figure(figsize=[9, 4]) check_trace_order(trace_products, fig1) if 1: from libs.process_flat import plot_trace_solutions fig2, fig3 = plot_trace_solutions(flaton_products, trace_solution_products, trace_solution_products_plot, ) flatoff_basename = os.path.splitext(os.path.basename(flat_off_filenames[0]))[0] flaton_basename = os.path.splitext(os.path.basename(flat_on_filenames[0]))[0] if 1: from libs.qa_helper import figlist_to_pngs aperture_figs = igr_path.get_section_filename_base("QA_PATH", "aperture_"+flaton_basename, "aperture_"+flaton_basename) figlist_to_pngs(aperture_figs, [fig1, fig2, fig3]) if 1: # now trace the orders #del trace_solution_products["bottom_up_solutions"] igr_storage.store(trace_solution_products, mastername=flat_on_filenames[0], masterhdu=flat_on_hdu_list[0]) # save db if 1: from libs.products import ProductDB flatoff_db_name = igr_path.get_section_filename_base("PRIMARY_CALIB_PATH", "flat_off.db", ) flatoff_db = ProductDB(flatoff_db_name) #dbname = os.path.splitext(os.path.basename(flat_off_filenames[0]))[0] flatoff_db.update(band, flatoff_basename) flaton_db_name = igr_path.get_section_filename_base("PRIMARY_CALIB_PATH", "flat_on.db", ) flaton_db = ProductDB(flaton_db_name) flaton_db.update(band, flaton_basename)
def get_hdus(self, band, runids): fn_list = self.get_filenames(band, runids) hdu_list = [pyfits.open(fn)[0] for fn in fn_list] return hdu_list
recipe_list = load_recipe_list(fn) recipe_dict = make_recipe_dict(recipe_list) abba = [rd[-1] for rd in recipe_dict["STELLAR_AB"] if obsid in rd[0]] objname = abba[-1][0] #obsids = abba[0] #frametypes = abba[1] obj_filenames = igrins_files.get_filenames(band, [obsid]) obj_path = ProductPath(igr_path, obj_filenames[0]) obj_master_obsid = obsid fn = obj_path.get_secondary_path("spec.fits") s_list = list(pyfits.open(fn)[0].data) if 1: sky_db = ProductDB(os.path.join(igr_path.secondary_calib_path, "sky.db")) basename = sky_db.query(band, obj_master_obsid) sky_path = ProductPath(igr_path, basename) fn = sky_path.get_secondary_path("wvlsol_v1") wvlsol_products = PipelineProducts.load(fn) orders_w_solutions = wvlsol_products["orders"] wvl_solutions = wvlsol_products["wvl_sol"]
def get_file(i): import libs.fits as pyfits f = pyfits.open("../20140526/SDCH_20140526_%04d.fits" % i) f[0].data -= np.median(f[0].data, axis=1) return f
wat_str = wat_str.replace("wtype=multispec","").strip() wat_spec_list = p.split(wat_str) wat_list_r = ["spec%d %s" % (i+1, s) for i, s in enumerate(wat_spec_list[::-1]) if s.strip()] cards = get_wat2_spec_cards(wat_list_r) new_cards.extend(cards) return type(header)(new_cards) if __name__ == "__main__": import numpy as np xxx = np.linspace(1, 2048, 100) yyy = xxx**4 from astropy.modeling import models, fitting p_init = models.Chebyshev1D(domain=[xxx[0], xxx[-1]], degree=4) fit_p = fitting.LinearLSQFitter() p = fit_p(p_init, xxx, yyy) wat_list = get_wat_spec([111], [p]) f= pyfits.open("outdata/20140525/SDCK_20140525_0016.spec.fits")
st = np.nanstd(variance_) st = np.nanstd(variance_[np.abs(variance_) < 3*st]) variance_[np.abs(variance_-ss) > 3*st] = np.nan import scipy.ndimage as ni x_std = ni.median_filter(np.nanstd(variance_, axis=0), 11) variance_map0 = np.zeros_like(variance_) + x_std**2 variance_map = variance_map0 + np.abs(a_plus_b)/gain # add poison noise in ADU return variance_map if __name__ == "__main__": import libs.fits as pyfits a = pyfits.open("../indata/20140525/SDCH_20140525_0016.fits")[0].data b = pyfits.open("../indata/20140525/SDCH_20140525_0017.fits")[0].data flat_mask = pyfits.open("../calib/primary/20140525/FLAT_SDCH_20140525_0074.flat_mask.fits")[0].data > 0 order_map2 = pyfits.open("../calib/primary/20140525/SKY_SDCH_20140525_0029.order_map_masked.fits")[0].data bias_mask2 = flat_mask & (order_map2 > 0) pix_mask0 = pyfits.open("../calib/primary/20140525/FLAT_SDCH_20140525_0074.flat_bpixed.fits")[0].data pix_mask = ~np.isfinite(pix_mask0) v = get_variance_map(a+b, a-b, bias_mask2, pix_mask, gain=2)