def invert(self, domain=None, coeff=None, deg=None): """ Utility to return a traceset modeling x vs. y instead of y vs. x """ if domain is None: domain = [self.wmin, self.wmax] ispec = np.arange(self.nspec) # Doing for all spectra if coeff is None: coeff = self.ycoeff # doing y-wavelength map ytmp = list() for ii in ispec: fit_dict = dufits.mk_fit_dict(coeff[ii, :], coeff.shape[1], 'legendre', domain[0], domain[1]) xtmp = np.array((domain[0], domain[1])) yfit = dufits.func_val(xtmp, fit_dict) ytmp.append(yfit) ymin = np.min(ytmp) ymax = np.max(ytmp) x = np.linspace(domain[0], domain[1], 1000) if deg is None: deg = self.ncoeff + 2 #- Now get the coefficients for inverse mapping c = np.zeros((coeff.shape[0], deg + 1)) for ii in ispec: fit_dict = dufits.mk_fit_dict(coeff[ii, :], coeff.shape, 'legendre', domain[0], domain[1]) y = dufits.func_val(x, fit_dict) yy = 2.0 * (y - ymin) / (ymax - ymin) - 1.0 c[ii] = legfit(yy, x, deg) return c, ymin, ymax
def wavelength(self, ispec=None, y=None): """ returns wavelength evaluated at y """ if y is None: y = np.arange(0, self.npix_y) if ispec is None: ispec = np.arange(self.nspec) c = self.icoeff ymin = self.ymin ymax = self.ymax if isinstance(ispec, numbers.Integral): new_dict = dufits.mk_fit_dict(c[ispec, :], c[ispec, :].shape, 'legendre', ymin, ymax) wfit = dufits.func_val(y, new_dict) return wfit else: ww = list() for ii in ispec: new_dict = dufits.mk_fit_dict(c[ii, :], c[ii, :].shape, 'legendre', ymin, ymax) wfit = dufits.func_val(y, new_dict) ww.append(wfit) return np.array(ww)
def x(self, ispec=None, wavelength=None): """ returns CCD x centroids for the spectra ispec can be None, scalar or a vector wavelength can be None or a vector """ if wavelength is None: #- ispec = None -> all the spectra if ispec is None: ispec = np.arange(self.nspec) x = list() #x=np.array((len(ispec),len(wavelength))) for ii in ispec: wave = self.wavelength(ii) fit_dictx = dufits.mk_fit_dict(self.xcoeff[ii], self.ncoeff, 'legendre', self.wmin, self.wmax) xfit = dufits.func_val(wave, fit_dictx) x.append(xfit) return np.array(x) if isinstance(ispec, (np.ndarray, list, tuple)): x = list() for ii in ispec: wave = self.wavelength(ii) fit_dictx = dufits.mk_fit_dict(self.xcoeff[ii], self.ncoeff, 'legendre', self.wmin, self.wmax) xfit = dufits.func_val(wave, fit_dictx) x.append(xfit) return np.array(x) else: # int ispec wave = self.wavelength(ispec) fit_dictx = dufits.mk_fit_dict(self.xcoeff[ispec], self.ncoeff, 'legendre', self.wmin, self.wmax) x = dufits.func_val(wave, fit_dictx) return np.array(x) #- wavelength not None but a scalar or 1D-vector here and below wavelength = np.asarray(wavelength) if isinstance(ispec, numbers.Integral): fit_dictx = dufits.mk_fit_dict(self.xcoeff[ispec], self.ncoeff, 'legendre', self.wmin, self.wmax) x = dufits.func_val(wavelength, fit_dictx) return np.array(x) if ispec is None: ispec = np.arange(self.nspec) x = list() for ii in ispec: #- for a None or a np.ndarray or anything that can be iterated case fit_dictx = dufits.mk_fit_dict(self.xcoeff[ii], self.ncoeff, 'legendre', self.wmin, self.wmax) xfit = dufits.func_val(wavelength, fit_dictx) x.append(xfit) return np.array(x)
def wdisp(self, ispec, wave): #- wave: scalar or vector, ispec: scalar integer TODO: make useful for other permutations if hasattr(self, 'wcoeff'): new_dict = dufits.mk_fit_dict(self.wcoeff[ispec], self.wcoeff.shape[1], 'legendre', self.wmin, self.wmax) wsigma = dufits.func_val(wave, new_dict) return wsigma
def y(self, ispec=None, wavelength=None): """ returns CCD y centroids for the spectra ispec can be None, scalar or a vector wavelength can be a vector but not allowing None #- similar as in specter.psf.PSF.y """ if wavelength is None: raise ValueError("PSF.y requires wavelength 1D vector") wavelength = np.asarray(wavelength) if ispec is None: ispec = np.arange(self.nspec) y = list() for ii in ispec: fit_dicty = dufits.mk_fit_dict(self.ycoeff[ii], self.ncoeff, 'legendre', self.wmin, self.wmax) yfit = dufits.func_val(wavelength, fit_dicty) y.append(yfit) return np.array(y) if isinstance(ispec, (np.ndarray, list, tuple)): y = list() for ii in ispec: fit_dicty = dufits.mk_fit_dict(self.ycoeff[ii], self.ncoeff, 'legendre', self.wmin, self.wmax) yfit = dufits.func_val(wavelength, fit_dicty) y.append(yfit) return np.array(y) if isinstance(ispec, numbers.Integral): # int ispec fit_dicty = dufits.mk_fit_dict(self.ycoeff[ispec], self.ncoeff, 'legendre', self.wmin, self.wmax) y = dufits.func_val(wavelength, fit_dicty) return np.array(y)
def __init__(self,mu=None,sigma=None,wdict=None,waves=None,ndiag=9): self.__ndiag=ndiag if ndiag & 0x1 == 0: raise ValueError("Need odd numbered diagonals, got %d"%ndiag) def _binIntegral(x,mu=None,sigma=None): """ x: bin boundaries vector (self.__ndiag,) mu: means vector of shape[nwave,1] sigma: sigmas of shape[nwave,1] """ nvecs=1 if sigma is not None: nvecs=sigma.shape[0] if mu is None: mu=np.zeros((nvecs,1)) if sigma is None: sigma=np.ones(mu.shape)*0.5 sx=(np.tile(x,(mu.shape[0],1))-mu)/(sigma*np.sqrt(2)) return 0.5*(np.abs(np.diff(scipy.special.erf(sx)))) mnone=mu is None snone=sigma is None dnone=wdict is None wnone=waves is None if snone: if wnone or dnone: raise ValueError('Cannot initialize Resolution data need sigma or wdict and waves') else: from lvmutil import funcfits as dufits sigma=dufits.func_val(waves,wdict) nwave = len(sigma) s=sigma.reshape((nwave,1)) bins=np.arange(ndiag,0,-1) bins=bins-(bins[0]+bins[-1])/2.0 x=np.concatenate([bins+0.5,bins[-1:]-0.5]) self.offsets=bins rdata=_binIntegral(x,mu=mu,sigma=s).T scipy.sparse.dia_matrix.__init__(self,(rdata,self.offsets),(nwave,nwave))
def test_wavelengths(self): if self.data_unavailable: self.skipTest("Failed to download test data.") # Read flat flat_hdu = fits.open(self.testflat) header = flat_hdu[0].header flat = flat_hdu[0].data ny = flat.shape[0] # Find fibers (necessary) xpk, ypos, cut = desiboot.find_fiber_peaks(flat) # Trace xset, xerr = desiboot.trace_crude_init(flat, xpk, ypos) xfit, fdicts = desiboot.fit_traces(xset, xerr) # Test fiber_gauss_old for coverage gauss = desiboot.fiber_gauss_old(flat, xfit, xerr) # Gaussian gauss = desiboot.fiber_gauss(flat, xfit, xerr) # Read arc arc_hdu = fits.open(self.testarc) arc = arc_hdu[0].data arc_ivar = np.ones(arc.shape) # Extract arc spectra (one per fiber) all_spec = desiboot.extract_sngfibers_gaussianpsf( arc, arc_ivar, xfit, gauss) # Line list camera = header['CAMERA'] llist = desiboot.load_arcline_list(camera) dlamb, gd_lines = desiboot.load_gdarc_lines(camera, llist) # all_wv_soln = [] for ii in range(1): spec = all_spec[:, ii] # Find Lines pixpk, flux = desiboot.find_arc_lines(spec) id_dict = {"pixpk": pixpk, "flux": flux} # Match a set of 5 gd_lines to detected lines desiboot.id_arc_lines_using_triplets(id_dict, gd_lines, dlamb) # Now the rest desiboot.id_remainder(id_dict, llist, deg=3) # Final fit wave vs. pix too final_fit, mask = dufits.iter_fit(np.array(id_dict['id_wave']), np.array(id_dict['id_pix']), 'polynomial', 3, xmin=0., xmax=1.) rms = np.sqrt( np.mean((dufits.func_val( np.array(id_dict['id_wave'])[mask == 0], final_fit) - np.array(id_dict['id_pix'])[mask == 0])**2)) final_fit_pix, mask2 = dufits.iter_fit(np.array(id_dict['id_pix']), np.array( id_dict['id_wave']), 'legendre', 4, niter=5) # Save id_dict['final_fit'] = final_fit id_dict['rms'] = rms id_dict['final_fit_pix'] = final_fit_pix id_dict['wave_min'] = dufits.func_val(0, final_fit_pix) id_dict['wave_max'] = dufits.func_val(ny - 1, final_fit_pix) id_dict['mask'] = mask all_wv_soln.append(id_dict) self.assertLess(all_wv_soln[0]['rms'], 0.25)
def main(args): log = get_logger() log.info("Starting") if args.triplet_matching: log.warning( "triplet_matching option deprecated, this algorithm is now used for all cases" ) lamps = None if args.lamps: lamps = np.array(args.lamps.split(",")) log.info("Using lamps = %s" % str(lamps)) else: log.info("Using default set of lamps") if (args.psffile is None) and (args.fiberflat is None): raise IOError("Must provide either a PSF file or a fiberflat") # Start QA try: pp = PdfPages(args.qafile) except ValueError: QA = False else: QA = True fiberflat_header = None if args.psffile is None: ########### # Read flat flat_hdu = fits.open(args.fiberflat) fiberflat_header = flat_hdu[0].header header = flat_hdu[0].header if len(flat_hdu) >= 3: flat = flat_hdu[0].data * (flat_hdu[1].data > 0) * (flat_hdu[2].data == 0) else: flat = flat_hdu[0].data log.warning("found only %d HDU in flat, do not use ivar" % len(flat_hdu)) ny = flat.shape[0] ########### # Find fibers log.info("Finding the fibers") xpk, ypos, cut = desiboot.find_fiber_peaks(flat) if QA: desiboot.qa_fiber_peaks(xpk, cut, pp) # Test? if args.test: log.warning("cutting down fibers for testing..") #xpk = xpk[0:100] xpk = xpk[0:50] #xpk = xpk[0:5] ########### # Trace the fiber flat spectra log.info("Tracing the fiber flat spectra") # Crude first log.info("Crudely..") xset, xerr = desiboot.trace_crude_init(flat, xpk, ypos) # Polynomial fits log.info("Fitting the traces") xfit, fdicts = desiboot.fit_traces(xset, xerr) # QA if QA: desiboot.qa_fiber_Dx(xfit, fdicts, pp) ########### # Model the PSF with Gaussian log.info("Modeling the PSF with a Gaussian, be patient..") gauss = desiboot.fiber_gauss(flat, xfit, xerr) if QA: desiboot.qa_fiber_gauss(gauss, pp) XCOEFF = None else: # Load PSF file and generate trace info log.warning("Not tracing the flat. Using the PSF file.") psf_hdu = fits.open(args.psffile) psf_head = psf_hdu[0].header # Gaussians gauss = psf_hdu[2].data # Traces WAVEMIN = psf_head['WAVEMIN'] WAVEMAX = psf_head['WAVEMAX'] XCOEFF = psf_hdu[0].data xfit = None fdicts = None arc_header = None # ARCS if not args.trace_only: ########### # Read arc log.info("Reading arc") arc_hdu = fits.open(args.arcfile) arc_header = arc_hdu[0].header if len(arc_hdu) >= 3: # set to zero ivar of masked pixels, force positive or null ivar arc_ivar = arc_hdu[1].data * (arc_hdu[2].data == 0) * (arc_hdu[1].data > 0) # and mask pixels below -5 sigma (cures unmasked dead columns in sims.) arc_ivar *= (arc_hdu[0].data * np.sqrt(arc_hdu[1].data) > -5.) # set to zero pixel values with null ivar arc = arc_hdu[0].data * (arc_ivar > 0) else: arc = arc_hdu[0].data arc_ivar = np.ones(arc.shape) log.warning("found only %d HDU in arc, do not use ivar" % len(arc_hdu)) header = arc_hdu[0].header ny = arc.shape[0] ##################################### # Extract arc spectra (one per fiber) log.info("Extracting arcs") if xfit is None: wv_array = np.linspace(WAVEMIN, WAVEMAX, num=arc.shape[0]) nfiber = XCOEFF.shape[0] ncoeff = XCOEFF.shape[1] xfit = np.zeros((arc.shape[0], nfiber)) # Generate a fit_dict fit_dict = dufits.mk_fit_dict(XCOEFF[:, 0], ncoeff, 'legendre', WAVEMIN, WAVEMAX) for ii in range(nfiber): fit_dict['coeff'] = XCOEFF[ii, :] xfit[:, ii] = dufits.func_val(wv_array, fit_dict) all_spec = desiboot.extract_sngfibers_gaussianpsf( arc, arc_ivar, xfit, gauss) ############################ # Line list camera = header['CAMERA'].lower() log.info("Loading line list") llist = desiboot.load_arcline_list(camera, vacuum=True, lamps=lamps) dlamb, gd_lines = desiboot.load_gdarc_lines( camera, llist, vacuum=True, lamps=lamps, good_lines_filename=args.good_lines) ##################################### # Loop to solve for wavelengths all_wv_soln = [] all_dlamb = [] debug = False id_dict_of_fibers = [] # first loop to find arc lines and do a first matching for ii in range(all_spec.shape[1]): spec = all_spec[:, ii] id_dict = {} id_dict["fiber"] = ii id_dict["status"] = "none" id_dict['id_pix'] = [] id_dict['id_idx'] = [] id_dict['id_wave'] = [] pixpk, flux = desiboot.find_arc_lines(spec) id_dict["pixpk"] = pixpk id_dict["flux"] = flux try: desiboot.id_arc_lines_using_triplets(id_dict, gd_lines, dlamb, ntrack=args.ntrack, nmax=args.nmax, toler=args.toler) except: log.warning(sys.exc_info()) log.warning("fiber {:d} ID_ARC failed".format(ii)) id_dict['status'] = "failed" id_dict_of_fibers.append(id_dict) continue # Add lines if len(id_dict['pixpk']) > len(id_dict['id_pix']): desiboot.id_remainder(id_dict, llist, deg=args.legendre_degree) log.info("Fiber #{:d} n_match={:d} n_detec={:d}".format( ii, len(id_dict['id_pix']), len(id_dict['pixpk']))) # Save id_dict_of_fibers.append(id_dict) # now record the list of waves identified in several fibers matched_lines = np.array([]) for ii in range(all_spec.shape[1]): matched_lines = np.append(matched_lines, id_dict_of_fibers[ii]['id_wave']) matched_lines = np.unique(matched_lines) number_of_detections = [] for line in matched_lines: ndet = 0 for ii in range(all_spec.shape[1]): if np.sum(id_dict_of_fibers[ii]['id_wave'] == line) > 0: ndet += 1 print(line, "ndet=", ndet) number_of_detections.append(ndet) # choose which lines are ok and # ok if 5 detections (coincidental error very low) min_number_of_detections = min(5, all_spec.shape[1]) number_of_detections = np.array(number_of_detections) good_matched_lines = matched_lines[ number_of_detections >= min_number_of_detections] bad_matched_lines = matched_lines[ number_of_detections < min_number_of_detections] log.info("good matched lines = {:s}".format(str(good_matched_lines))) log.info("bad matched lines = {:s}".format(str(bad_matched_lines))) # loop again on all fibers for ii in range(all_spec.shape[1]): spec = all_spec[:, ii] id_dict = id_dict_of_fibers[ii] n_matched_lines = len(id_dict['id_wave']) n_detected_lines = len(id_dict['pixpk']) # did we find any bad line for this fiber? n_bad = np.intersect1d(id_dict['id_wave'], bad_matched_lines).size # how many good lines did we find n_good = np.intersect1d(id_dict['id_wave'], good_matched_lines).size if id_dict['status'] == "ok" and ( n_bad > 0 or (n_good < good_matched_lines.size - 1 and n_good < 30)) and n_good < 40: log.info( "Try to refit fiber {:d} with n_bad={:d} and n_good={:d} when n_good_all={:d} n_detec={:d}" .format(ii, n_bad, n_good, good_matched_lines.size, n_detected_lines)) try: desiboot.id_arc_lines_using_triplets(id_dict, good_matched_lines, dlamb, ntrack=args.ntrack, nmax=args.nmax) except: log.warning(sys.exc_info()) log.warning("ID_ARC failed on fiber {:d}".format(ii)) id_dict["status"] = "failed" if id_dict['status'] == "ok" and len(id_dict['pixpk']) > len( id_dict['id_pix']): desiboot.id_remainder(id_dict, llist, deg=args.legendre_degree) else: log.info( "Do not refit fiber {:d} with n_bad={:d} and n_good={:d} when n_good_all={:d} n_detec={:d}" .format(ii, n_bad, n_good, good_matched_lines.size, n_detected_lines)) if id_dict['status'] != 'ok': all_wv_soln.append(id_dict) all_dlamb.append(0.) log.warning("Fiber #{:d} failed, no final fit".format(ii)) continue # Final fit wave vs. pix too id_wave = np.array(id_dict['id_wave']) id_pix = np.array(id_dict['id_pix']) deg = max(1, min(args.legendre_degree, id_wave.size - 2)) final_fit, mask = dufits.iter_fit(id_wave, id_pix, 'polynomial', deg, xmin=0., xmax=1., sig_rej=3.) rms = np.sqrt( np.mean((dufits.func_val(id_wave[mask == 0], final_fit) - id_pix[mask == 0])**2)) final_fit_pix, mask2 = dufits.iter_fit(id_pix[mask == 0], id_wave[mask == 0], 'legendre', deg, sig_rej=100000000.) rms_pix = np.sqrt( np.mean((dufits.func_val(id_pix[mask == 0], final_fit_pix) - id_wave[mask == 0])**2)) # Append wave = dufits.func_val(np.arange(spec.size), final_fit_pix) idlamb = np.median(np.abs(wave - np.roll(wave, 1))) all_dlamb.append(idlamb) # Save id_dict['final_fit'] = final_fit id_dict['rms'] = rms id_dict['final_fit_pix'] = final_fit_pix id_dict['wave_min'] = dufits.func_val(0, final_fit_pix) id_dict['wave_max'] = dufits.func_val(ny - 1, final_fit_pix) id_dict['mask'] = mask log.info( "Fiber #{:d} final fit rms(y->wave) = {:g} A ; rms(wave->y) = {:g} pix ; nlines = {:d}" .format(ii, rms, rms_pix, id_pix.size)) all_wv_soln.append(id_dict) if QA: desiboot.qa_arc_spec(all_spec, all_wv_soln, pp) desiboot.qa_fiber_arcrms(all_wv_soln, pp) desiboot.qa_fiber_dlamb(all_spec, all_wv_soln, pp) else: all_wv_soln = None ########### # Write PSF file log.info("Writing PSF file") desiboot.write_psf(args.outfile, xfit, fdicts, gauss, all_wv_soln, legendre_deg=args.legendre_degree, without_arc=args.trace_only, XCOEFF=XCOEFF, fiberflat_header=fiberflat_header, arc_header=arc_header) log.info("Successfully wrote {:s}".format(args.outfile)) if (not args.trace_only) and args.out_line_list: log.info("Writing list of lines found in {:s}".format( args.out_line_list)) desiboot.write_line_list(args.out_line_list, all_wv_soln, llist) log.info("Successfully wrote {:s}".format(args.out_line_list)) ########### # All done if QA: log.info("Successfully wrote {:s}".format(args.qafile)) pp.close() log.info("end") return