def handle_extract(data, outname=None, fine='fine.npy',flexure_x_corr_nm=0.0, flexure_y_corr_pix = 0.0): exfile = "extracted_%s.npy" % outname if not os.path.exists(outname + ".npy"): E = Wavelength.wavelength_extract(data, fine, filename=outname, flexure_x_corr_nm = flexure_x_corr_nm, flexure_y_corr_pix= flexure_y_corr_pix, flat_corrections=flat_corrections) np.save(exfile, [E, meta]) else: E, meta = np.load(exfile) return E
def handle_Flat(A, fine, outname=None): '''Loads 2k x 2k IFU Flat frame "A" and extracts spectra from the locations in "fine". Args: A (string): filename of ifu FITS file to extract from. fine (string): filename of NumPy file with locations + wavelength soln outname (string): filename to write results to Returns: Raises: None ''' fine = np.load(fine) if outname is None: outname = "%s" % (A) if os.path.isfile(outname+".npy"): print "Extractions already exist in %s.npy!" % outname print "rm %s.npy # if you want to recreate extractions" % outname else: print "\nCREATING extractions ..." spec = pf.open(A) print "\nExtracting object spectra" E, meta = Wavelength.wavelength_extract(spec, fine, filename=outname, flexure_x_corr_nm=0., flexure_y_corr_pix=0.) meta['airmass'] = spec[0].header['airmass'] header = {} for k,v in spec[0].header.iteritems(): try: header[k] = v except: pass meta['HA'] = spec[0].header['HA'] meta['Dec'] = spec[0].header['Dec'] meta['RA'] = spec[0].header['RA'] meta['PRLLTC'] = spec[0].header['PRLLTC'] meta['equinox'] = spec[0].header['Equinox'] meta['utc'] = spec[0].header['utc'] meta['header'] = header meta['exptime'] = spec[0].header['exptime'] np.save(outname, [E, meta])
def handle_extract(data, outname=None, fine='fine.npy', flexure_x_corr_nm=0.0, flexure_y_corr_pix=0.0): exfile = "extracted_%s.npy" % outname if not os.path.exists(outname + ".npy"): E = Wavelength.wavelength_extract( data, fine, filename=outname, flexure_x_corr_nm=flexure_x_corr_nm, flexure_y_corr_pix=flexure_y_corr_pix, flat_corrections=flat_corrections) np.save(exfile, [E, meta]) else: E, meta = np.load(exfile) return E
def QR_to_img(exts, Size=4, outname="cube.fits"): """Convert a data cube to a fits image Args: exts (list of Extraction): extractions to convert (see Extraction.py) Size (int): expansion factor, defaults to 4 outname (str): output fits file name, defaults to cube.fits Returns: None """ Xs = np.array([ext.X_as for ext in exts], dtype=np.float) Ys = np.array([ext.Y_as for ext in exts], dtype=np.float) minx = Size * np.nanmin(Xs) miny = Size * np.nanmin(Ys) maxx = Size * np.nanmax(Xs) maxy = Size * np.nanmax(Ys) Dx = (maxx - minx) / .25 Dy = (maxy - miny) / .25 l_grid = Wavelength.fiducial_spectrum() l_grid = l_grid[::-1] dl_grid = np.diff(l_grid) l_grid = l_grid[1:] img = np.zeros((Dx, Dy, len(l_grid) / 2)) img[:] = np.nan XSz = img.shape[0] / 2 YSz = img.shape[1] / 2 allspec = np.zeros((len(exts), len(l_grid) / 2)) for cnt, ext in enumerate(exts): if ext.xrange is None: continue if ext.exptime is None: ext.exptime = 1 if ext.lamcoeff is None: continue ix = np.arange(*ext.xrange) l = chebval(ix, ext.lamcoeff) s = ext.specw f = interp1d(l, s, fill_value=np.nan, bounds_error=False) fi = f(l_grid) / dl_grid fi = fi[0:len(fi):2] + fi[1:len(fi):2] allspec[cnt, :] = fi x = (ext.X_as - minx) / 0.25 y = (ext.Y_as - miny) / 0.25 try: for dx in [-1, 0, 1]: for dy in [-1, 0, 1]: img[x + dx, y + dy, :] = fi except: pass outstr = "\rX = %+10.5f, Y = %+10.5f" % (x, y) print outstr, sys.stdout.flush() back = np.median(allspec, 0) ff = pf.PrimaryHDU(img.T) ff.writeto(outname) for cnt, ext in enumerate(exts): if ext.xrange is None: continue if ext.exptime is None: ext.exptime = 1 if ext.lamcoeff is None: continue ix = np.arange(*ext.xrange) l = chebval(ix, ext.lamcoeff) s = ext.specw f = interp1d(l, s, fill_value=np.nan, bounds_error=False) fi = f(l_grid) / dl_grid fi = fi[0:len(fi):2] + fi[1:len(fi):2] - back try: x = np.round(Size * np.sqrt(3.) * (ext.Q_ix + ext.R_ix / 2)) + XSz y = np.round(Size * 3. / 2. * ext.R_ix) + YSz except: continue try: for dx in [-1, 0, 1]: for dy in [-1, 0, 1]: img[x + dx, y + dy, :] = fi except: pass ff = pf.PrimaryHDU(img.T) ff.writeto("bs_" + outname)
def measure_flexure_x( fine, HDUlist, plot=True, dY=0, skyline=589.0, lamstart=1000.0, lamratio=239.0 / 240.0, lamlen=250, extract_width=3, skywidth=9, outfile="dX", ): """Measures flexure in X direction, returns pixel offset Args: fine: List of Extraction object, the fine loc + wave solution for each spectrum HDUlist: Pyfits object for the spectrum to measure plot: Plot + save results to a file dY: the measured pixel flexure in Y direction to account for skyline(float): The night skyline to centroid on in nm skywidth(float): Fit gaussian to the ROI of (skyline-skywidth to skyline+skywidth) in nm. extract_width(int): Number of pixels to extract spectrum around - See Wavelength.fiducial spectrum for following: lamstart: Wavelength to start the grid on, default 1000 nm lamratio: Resolution of sed machine lamlen: Length of spectrum Returns: Offset number of pixels in X direction. """ dat = HDUlist[0].data exptime = HDUlist[0].header["EXPTIME"] spec_ixs = np.arange(500, 1200, 10) lamgrid = Wavelength.fiducial_spectrum(lamstart=lamstart, lamratio=lamratio, len=lamlen) specgrid = np.zeros((len(lamgrid), len(spec_ixs))) for i, ix in enumerate(spec_ixs): f = fine[ix] # bad fit if not f.ok: continue # noisy fit if f.lamnrms > 1: continue # short spectrum if f.xrange[1] - f.xrange[0] < 200: continue spec = np.zeros(f.xrange[1] - f.xrange[0]) yfun = np.poly1d(f.poly) for jx, xpos in enumerate(np.arange(f.xrange[0], f.xrange[1])): ypos = yfun(xpos) try: spec[jx] = np.sum(dat[ypos - extract_width : ypos + extract_width, xpos]) except: continue try: ll = f.get_lambda_nm() except: continue specfun = interp1d(ll, spec, bounds_error=False) specgrid[:, i] = specfun(lamgrid) skyspec = np.median(specgrid, axis=1) pl.step(lamgrid, skyspec, where="mid") roi = (lamgrid > skyline - skywidth) & (lamgrid < skyline + skywidth) ffun = FF.mpfit_residuals(FF.gaussian4) parinfo = [ {"value": np.max(skyspec[roi]), "limited": [1, 0], "limits": [0, 0]}, {"value": skyline}, {"value": 3}, {"value": np.min(skyspec[roi]), "limited": [1, 0], "limits": [0, 0]}, ] fit = FF.mpfit_do(ffun, lamgrid[roi], skyspec[roi], parinfo) pl.plot(lamgrid, FF.gaussian4(fit.params, lamgrid)) pl.savefig(outfile + ".pdf") dXnm = fit.params[1] - skyline print "dX = %3.2f nm shift" % dXnm return dXnm
def measure_flexure_x(fine, HDUlist, plot=True, dY=0, skyline=589.0, lamstart=1000.0, lamratio=239./240., lamlen=250, extract_width=3, skywidth=9, outfile='dX'): '''Measures flexure in X direction, returns pixel offset Args: fine: List of Extraction object, the fine loc + wave solution for each spectrum HDUlist: Pyfits object for the spectrum to measure plot: Plot + save results to a file dY: the measured pixel flexure in Y direction to account for skyline(float): The night skyline to centroid on in nm skywidth(float): Fit gaussian to the ROI of (skyline-skywidth to skyline+skywidth) in nm. extract_width(int): Number of pixels to extract spectrum around - See Wavelength.fiducial spectrum for following: lamstart: Wavelength to start the grid on, default 1000 nm lamratio: Resolution of sed machine lamlen: Length of spectrum Returns: Offset number of pixels in X direction. ''' dat = HDUlist[0].data exptime = HDUlist[0].header['EXPTIME'] spec_ixs = np.arange(500, 1200, 10) lamgrid = Wavelength.fiducial_spectrum(lamstart=lamstart, lamratio=lamratio, len=lamlen) specgrid = np.zeros((len(lamgrid), len(spec_ixs))) for i,ix in enumerate(spec_ixs): f = fine[ix] if not f.ok: continue if f.lamrms > 1: continue if f.xrange[1] - f.xrange[0] < 200: continue spec = np.zeros(f.xrange[1] - f.xrange[0]) yfun = np.poly1d(f.poly) for jx,xpos in enumerate(np.arange(f.xrange[0], f.xrange[1])): ypos = yfun(xpos) try:spec[jx] = np.sum(dat[ypos-extract_width:ypos+extract_width, xpos]) except: continue try:ll = f.get_lambda_nm() except: continue specfun = interp1d(ll, spec, bounds_error=False) specgrid[:,i] = specfun(lamgrid) skyspec = np.median(specgrid, axis=1) pl.step(lamgrid, skyspec, where='mid') roi = (lamgrid>skyline-skywidth) & (lamgrid<skyline+skywidth) ffun = FF.mpfit_residuals(FF.gaussian4) parinfo= [ {'value': np.max(skyspec[roi]), 'limited': [1,0], 'limits': [0, 0]}, {'value': skyline}, {'value': 3}, {'value': np.min(skyspec[roi]), 'limited': [1,0], 'limits': [0,0]}] fit = FF.mpfit_do(ffun, lamgrid[roi], skyspec[roi], parinfo) pl.plot(lamgrid, FF.gaussian4(fit.params, lamgrid)) pl.savefig(outfile + ".pdf") dXnm = fit.params[1] - skyline print "dX = %3.2f nm shift" % dXnm return dXnm
import pyfits as pf import scipy.signal as SG from scipy.spatial import KDTree from numpy.polynomial.chebyshev import chebfit, chebval from scipy.interpolate import interp1d import SEDMr.Extraction as Extraction import SEDMr.Wavelength as Wavelength import SEDMr.Spectra as SS import sys sys.setrecursionlimit(10000) # reference wavelength for X positions fid_wave = Wavelength.fiducial_wavelength() scale = 1.0 H2P = np.array([[np.sqrt(3), np.sqrt(3)/2], [0, 3/2.]]) * scale P2H = np.array([[np.sqrt(3)/3, -1/3.], [0, 2/3.]]) / scale # Rotation matrix theta = np.deg2rad(-37+13.5) ROT = np.array([[np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]]) ''' See figures here:
def handle_AB(A, B, fine, outname=None, corrfile=None, Aoffset=None, Boffset=None, radius=2, flat_corrections=None, nosky=False, lmin=650, lmax=700): '''Loads 2k x 2k IFU frame "A" and "B" and extracts A-B and A+B spectra from the "fine" location. Args: A (string): filename of ifu FITS file to extract from. B (string): filename of ifu FITS file to extract from. fine (string): filename of NumPy file with locations + wavelength soln outname (string): filename to write results to Aoffset (2tuple): X (nm)/Y (pix) shift to apply for flexure correction Boffset (2tuple): X (nm)/Y (pix) shift to apply for flexure correction radius (float): Extraction radius in arcsecond flat_corrections (list): A list of FlatCorrection objects for correcting the extraction nosky (Boolean): if True don't subtract sky, merely sum in aperture Returns: The extracted spectrum, a dictionary: {'ph_10m_nm': Flux in photon / 10 m / nanometer integrated 'var' 'nm': Wavelength solution in nm 'N_spaxA': Total number of "A" spaxels 'N_spaxB': Total number of "B" spaxels 'skyph': Sky flux in photon / 10 m / nanometer / spaxel 'radius_as': Extraction radius in arcsec 'pos': X/Y extraction location of spectrum in arcsec} Raises: None ''' fine = np.load(fine) if outname is None: outname = "%sm%s" % (A,B) if Aoffset is not None: ff = np.load(Aoffset) flexure_x_corr_nm = ff[0]['dXnm'] flexure_y_corr_pix = -ff[0]['dYpix'] print "Dx %2.1f | Dy %2.1f" % (ff[0]['dXnm'], ff[0]['dYpix']) else: flexure_x_corr_nm = 0 flexure_y_corr_pix = 0 read_var = 5*5 if os.path.isfile(outname + ".fits.npy"): print "USING extractions in %s!" % outname E, meta = np.load(outname + ".fits.npy") E_var, meta_var = np.load("var_" + outname + ".fits.npy") else: if not outname.endswith(".fits"): outname = outname + ".fits" diff = subtract(A,B, outname) add(A,B, "tmpvar_" + outname) adcspeed = diff[0].header["ADCSPEED"] if adcspeed == 2: read_var = 22*22 else: read_var = 5*5 var = add("tmpvar_" + outname, str(read_var), "var_" + outname) os.remove("tmpvar_" + outname + ".gz") E, meta = Wavelength.wavelength_extract(diff, fine, filename=outname, flexure_x_corr_nm = flexure_x_corr_nm, flexure_y_corr_pix = flexure_y_corr_pix, flat_corrections=flat_corrections) meta['airmass1'] = diff[0].header['airmass1'] meta['airmass2'] = diff[0].header['airmass2'] meta['airmass'] = diff[0].header['airmass'] header = {} for k,v in diff[0].header.iteritems(): try: header[k] = v except: pass meta['HA'] = diff[0].header['HA'] meta['Dec'] = diff[0].header['Dec'] meta['RA'] = diff[0].header['RA'] meta['PRLLTC'] = diff[0].header['PRLLTC'] meta['equinox'] = diff[0].header['Equinox'] meta['utc'] = diff[0].header['utc'] meta['header'] = header meta['exptime'] = diff[0].header['exptime'] np.save(outname, [E, meta]) exfile = "extracted_var_%s.npy" % outname E_var, meta_var = Wavelength.wavelength_extract(var, fine, filename=outname, flexure_x_corr_nm = flexure_x_corr_nm, flexure_y_corr_pix = flexure_y_corr_pix, flat_corrections=flat_corrections) np.save("var_" + outname, [E_var, meta_var]) sixA, posA, all_A = identify_spectra_gui(E, radius=radius, PRLLTC=Angle(meta['PRLLTC'], unit='deg'), lmin=lmin, lmax=lmax, airmass=meta['airmass']) sixB, posB, all_B = identify_spectra_gui(E, radius=radius, PRLLTC=Angle(meta['PRLLTC'], unit='deg'), lmin=lmin, lmax=lmax, airmass=meta['airmass']) to_image(E, meta, outname, posA=posA, posB=posB, adcpos=all_A) skyA = identify_bgd_spectra(E, posA) skyB = identify_bgd_spectra(E, posB) allix = np.concatenate([sixA, sixB]) resA = interp_spectra(E, sixA, sign=1, outname=outname+"_A.pdf", corrfile=corrfile) resB = interp_spectra(E, sixB, sign=-1, outname=outname+"_B.pdf", corrfile=corrfile) skyA = interp_spectra(E, skyA, sign=1, outname=outname+"_skyA.pdf", corrfile=corrfile) skyB = interp_spectra(E, skyB, sign=-1, outname=outname+"_skYB.pdf", corrfile=corrfile) varA = interp_spectra(E_var, sixA, sign=1, outname=outname+"_A_var.pdf", corrfile=corrfile) varB = interp_spectra(E_var, sixB, sign=1, outname=outname+"_B_var.pdf", corrfile=corrfile) ## Plot out the X/Y selected spectra XSA = [] YSA = [] XSB = [] YSB = [] for ix in sixA: XSA.append(E[ix].X_as) YSA.append(E[ix].Y_as) for ix in sixB: XSB.append(E[ix].X_as) YSB.append(E[ix].Y_as) pl.figure() pl.clf() pl.ylim(-30,30) pl.xlim(-30,30) pl.scatter(XSA,YSA, color='blue', marker='H', linewidth=.1) pl.scatter(XSB,YSB, color='red', marker='H', linewidth=.1) pl.savefig("XYs_%s.pdf" % outname) pl.close() # / End Plot np.save("sp_A_" + outname, resA) np.save("sp_B_" + outname, resB) np.save("var_A_" + outname, varA) np.save("var_B_" + outname, varB) ll = Wavelength.fiducial_spectrum() sky_A = interp1d(skyA[0]['nm'], skyA[0]['ph_10m_nm'], bounds_error=False) sky_B = interp1d(skyB[0]['nm'], skyB[0]['ph_10m_nm'], bounds_error=False) sky = np.nanmean([sky_A(ll), sky_B(ll)], axis=0) var_A = interp1d(varA[0]['nm'], varA[0]['ph_10m_nm'], bounds_error=False) var_B = interp1d(varB[0]['nm'], varB[0]['ph_10m_nm'], bounds_error=False) varspec = np.nanmean([var_A(ll), var_B(ll)], axis=0) * (len(sixA) + len(sixB)) res = np.copy(resA) res = [{"doc": resA[0]["doc"], "ph_10m_nm": np.copy(resA[0]["ph_10m_nm"]), "nm": np.copy(resA[0]["ph_10m_nm"])}] res[0]['nm'] = np.copy(ll) f1 = interp1d(resA[0]['nm'], resA[0]['ph_10m_nm'], bounds_error=False) f2 = interp1d(resB[0]['nm'], resB[0]['ph_10m_nm'], bounds_error=False) airmassA = meta['airmass1'] airmassB = meta['airmass2'] extCorrA = 10**(Atm.ext(ll*10)*airmassA/2.5) extCorrB = 10**(Atm.ext(ll*10)*airmassB/2.5) print "Median airmass corr: ", np.median(extCorrA), np.median(extCorrB) # If requested merely sum in aperture, otherwise subtract sky if nosky: res[0]['ph_10m_nm'] = \ np.nansum([ f1(ll) * extCorrA, f2(ll) * extCorrB], axis=0) * \ (len(sixA) + len(sixB)) else: res[0]['ph_10m_nm'] = \ np.nansum([ (f1(ll)-sky_A(ll)) * extCorrA, (f2(ll)-sky_B(ll)) * extCorrB], axis=0) * \ (len(sixA) + len(sixB)) res[0]['exptime'] = meta['exptime'] res[0]['Extinction Correction'] = 'Applied using Hayes & Latham' res[0]['extinction_corr_A'] = extCorrA res[0]['extinction_corr_B'] = extCorrB res[0]['skyph'] = sky res[0]['var'] = varspec res[0]['radius_as'] = radius res[0]['positionA'] = posA res[0]['positionB'] = posA res[0]['N_spaxA'] = len(sixA) res[0]['N_spaxB'] = len(sixB) res[0]['meta'] = meta res[0]['object_spaxel_ids_A'] = sixA res[0]['sky_spaxel_ids_A'] = skyA res[0]['object_spaxel_ids_B'] = sixB res[0]['sky_spaxel_ids_B'] = skyB coef = chebfit(np.arange(len(ll)), ll, 4) xs = np.arange(len(ll)+1) newll = chebval(xs, coef) res[0]['dlam'] = np.diff(newll) np.save("sp_" + outname, res)
def handle_A(A, fine, outname=None, standard=None, corrfile=None, Aoffset=None, radius=2, flat_corrections=None, nosky=False): '''Loads 2k x 2k IFU frame "A" and extracts spectra from the locations in "fine". Args: A (string): filename of ifu FITS file to extract from. fine (string): filename of NumPy file with locations + wavelength soln outname (string): filename to write results to Aoffset (2tuple): X (nm)/Y (pix) shift to apply for flexure correction radius (float): Extraction radius in arcsecond flat_corrections (list): A list of FlatCorrection objects for correcting the extraction nosky (Boolean): if True don't subtract sky, merely sum in aperture Returns: The extracted spectrum, a dictionary: {'ph_10m_nm': Flux in photon / 10 m / nanometer integrated 'nm': Wavelength solution in nm 'N_spax': Total number of spaxels that created ph_10m_nm 'skyph': Sky flux in photon / 10 m / nanometer / spaxel 'radius_as': Extraction radius in arcsec 'pos': X/Y extraction location of spectrum in arcsec} Raises: None ''' fine = np.load(fine) if outname is None: outname = "%s" % (A) spec = pf.open(A) if Aoffset is not None: ff = np.load(Aoffset) flexure_x_corr_nm = ff[0]['dXnm'] flexure_y_corr_pix = ff[0]['dYpix'] print "Dx %2.1f | Dy %2.1f" % (ff[0]['dXnm'], ff[0]['dYpix']) else: flexure_x_corr_nm = 0 flexure_y_corr_pix = 0 if os.path.isfile(outname+".npy"): print "USING extractions in %s!" % outname print "rm %s.npy # if you want to recreate extractions" % outname E, meta = np.load(outname+".npy") else: print "CREATING extractions ..." E, meta = Wavelength.wavelength_extract(spec, fine, filename=outname, flexure_x_corr_nm=flexure_x_corr_nm, flexure_y_corr_pix=flexure_y_corr_pix, flat_corrections = flat_corrections) meta['airmass'] = spec[0].header['airmass'] header = {} for k,v in spec[0].header.iteritems(): try: header[k] = v except: pass meta['HA'] = spec[0].header['HA'] meta['Dec'] = spec[0].header['Dec'] meta['RA'] = spec[0].header['RA'] meta['PRLLTC'] = spec[0].header['PRLLTC'] meta['equinox'] = spec[0].header['Equinox'] meta['utc'] = spec[0].header['utc'] meta['header'] = header np.save(outname, [E, meta]) '''six, pos, adcpos = identify_spectra_gui(E, radius=radius, PRLLTC=Angle(meta['PRLLTC'], unit='deg'), lmin=650, lmax=700, airmass=meta['airmass'])''' six, pos, adcpos = identify_spectra_Gauss_fit(E, outname=outname, radius=radius, PRLLTC=Angle(meta['PRLLTC'], unit='deg'), lmin=650, lmax=700, airmass=meta['airmass']) skyix = identify_bgd_spectra(E, pos, inner=radius*1.1) res = interp_spectra(E, six, outname=outname+".pdf", corrfile=corrfile) sky = interp_spectra(E, skyix, onto=res[0]['nm'], outname=outname+"_sky.pdf", corrfile=corrfile) to_image(E, meta, outname, posA=pos, adcpos=adcpos) if standard is not None: print "STANDARD" wav = standard[:,0]/10.0 flux = standard[:,1] fun = interp1d(wav, flux, bounds_error=False, fill_value = np.nan) correction = fun(res[0]['nm'])/res[0]['ph_10m_nm'] res[0]['std-correction'] = correction airmass = meta['airmass'] extCorr = 10**(Atm.ext(res[0]['nm']*10) * airmass/2.5) print "Median airmass corr: ", np.nanmedian(extCorr) ff = interp1d(sky[0]['nm'], sky[0]['ph_10m_nm'], bounds_error=False) skybgd = ff(res[0]['nm']) res[0]['exptime'] = spec[0].header['exptime'] res[0]['Extinction Correction'] = 'Applied using Hayes & Latham' res[0]['extinction_corr'] = extCorr res[0]['skynm'] = sky[0]['nm'] res[0]['skyph'] = sky[0]['ph_10m_nm'] if not nosky: res[0]['ph_10m_nm'] -= skybgd res[0]['ph_10m_nm'] *= extCorr * len(six) res[0]['radius_as'] = radius res[0]['position'] = pos res[0]['N_spax'] = len(six) res[0]['meta'] = meta res[0]['object_spaxel_ids'] = six res[0]['sky_spaxel_ids'] = skyix res[0]['sky_spectra'] = sky[0]['spectra'] np.save("sp_" + outname, res)
def handle_AB(A, B, fine, outname=None, corrfile=None, Aoffset=None, Boffset=None, radius=2, flat_corrections=None, lmin=650, lmax=700): '''Loads 2k x 2k IFU frame "A" and "B" and extracts A-B and A+B spectra from the "fine" location. Args: A (string): filename of ifu FITS file to extract from. B (string): filename of ifu FITS file to extract from. fine (string): filename of NumPy file with locations + wavelength soln outname (string): filename to write results to Aoffset (2tuple): X (nm)/Y (pix) shift to apply for flexure correction Boffset (2tuple): X (nm)/Y (pix) shift to apply for flexure correction radius (float): Extraction radius in arcsecond flat_corrections (list): A list of FlatCorrection objects for correcting the extraction Returns: The extracted spectrum, a dictionary: {'ph_10m_nm': Flux in photon / 10 m / nanometer integrated 'var' 'nm': Wavelength solution in nm 'N_spaxA': Total number of "A" spaxels 'N_spaxB': Total number of "B" spaxels 'skyph': Sky flux in photon / 10 m / nanometer / spaxel 'radius_as': Extraction radius in arcsec 'pos': X/Y extraction location of spectrum in arcsec} Raises: None ''' fine = np.load(fine) if outname is None: outname = "%sm%s" % (A, B) if Aoffset is not None: ff = np.load(Aoffset) f2 = np.load(Aoffset) flexure_x_corr_nm = ff[0]['dXnm'] flexure_y_corr_pix = -ff[0]['dYpix'] print "Dx %2.1f, %2.1f | Dy %2.1f %2.1f" % ( ff[0]['dXnm'], f2[0]['dXnm'], ff[0]['dYpix'], f2[0]['dYpix']) else: flexure_x_corr_nm = 0 flexure_y_corr_pix = 0 read_var = 5 * 5 if os.path.isfile(outname + ".fits.npy"): print "USING extractions in %s!" % outname E, meta = np.load(outname + ".fits.npy") E_var, meta_var = np.load("var_" + outname + ".fits.npy") else: if not outname.endswith(".fits"): outname = outname + ".fits" diff = subtract(A, B, outname) add(A, B, "tmpvar_" + outname) adcspeed = diff[0].header["ADCSPEED"] if adcspeed == 2: read_var = 22 * 22 else: read_var = 5 * 5 var = add("tmpvar_" + outname, str(read_var), "var_" + outname) os.remove("tmpvar_" + outname + ".gz") E, meta = Wavelength.wavelength_extract( diff, fine, filename=outname, flexure_x_corr_nm=flexure_x_corr_nm, flexure_y_corr_pix=flexure_y_corr_pix, flat_corrections=flat_corrections) meta['airmass1'] = diff[0].header['airmass1'] meta['airmass2'] = diff[0].header['airmass2'] meta['airmass'] = diff[0].header['airmass'] header = {} for k, v in diff[0].header.iteritems(): try: header[k] = v except: pass meta['HA'] = diff[0].header['HA'] meta['Dec'] = diff[0].header['Dec'] meta['RA'] = diff[0].header['RA'] meta['PRLLTC'] = diff[0].header['PRLLTC'] meta['equinox'] = diff[0].header['Equinox'] meta['utc'] = diff[0].header['utc'] meta['header'] = header meta['exptime'] = diff[0].header['exptime'] np.save(outname, [E, meta]) exfile = "extracted_var_%s.npy" % outname E_var, meta_var = Wavelength.wavelength_extract( var, fine, filename=outname, flexure_x_corr_nm=flexure_x_corr_nm, flexure_y_corr_pix=flexure_y_corr_pix, flat_corrections=flat_corrections) np.save("var_" + outname, [E_var, meta_var]) sixA, posA, all_A = identify_spectra_gui(E, radius=radius, PRLLTC=Angle(meta['PRLLTC'], unit='deg'), lmin=lmin, lmax=lmax, airmass=meta['airmass']) sixB, posB, all_B = identify_spectra_gui(E, radius=radius, PRLLTC=Angle(meta['PRLLTC'], unit='deg'), lmin=lmin, lmax=lmax, airmass=meta['airmass']) to_image(E, meta, outname, posA=posA, posB=posB, adcpos=all_A) skyA = identify_bgd_spectra(E, posA) skyB = identify_bgd_spectra(E, posB) allix = np.concatenate([sixA, sixB]) resA = interp_spectra(E, sixA, sign=1, outname=outname + "_A.pdf", corrfile=corrfile) resB = interp_spectra(E, sixB, sign=-1, outname=outname + "_B.pdf", corrfile=corrfile) skyA = interp_spectra(E, skyA, sign=1, outname=outname + "_skyA.pdf", corrfile=corrfile) skyB = interp_spectra(E, skyB, sign=-1, outname=outname + "_skYB.pdf", corrfile=corrfile) varA = interp_spectra(E_var, sixA, sign=1, outname=outname + "_A_var.pdf", corrfile=corrfile) varB = interp_spectra(E_var, sixB, sign=1, outname=outname + "_B_var.pdf", corrfile=corrfile) ## Plot out the X/Y selected spectra XSA = [] YSA = [] XSB = [] YSB = [] for ix in sixA: XSA.append(E[ix].X_as) YSA.append(E[ix].Y_as) for ix in sixB: XSB.append(E[ix].X_as) YSB.append(E[ix].Y_as) pl.figure() pl.clf() pl.ylim(-30, 30) pl.xlim(-30, 30) pl.scatter(XSA, YSA, color='blue', marker='H', linewidth=.1) pl.scatter(XSB, YSB, color='red', marker='H', linewidth=.1) pl.savefig("XYs_%s.pdf" % outname) pl.close() # / End Plot np.save("sp_A_" + outname, resA) np.save("sp_B_" + outname, resB) np.save("var_A_" + outname, varA) np.save("var_B_" + outname, varB) ll = Wavelength.fiducial_spectrum() sky_A = interp1d(skyA[0]['nm'], skyA[0]['ph_10m_nm'], bounds_error=False) sky_B = interp1d(skyB[0]['nm'], skyB[0]['ph_10m_nm'], bounds_error=False) sky = np.nanmean([sky_A(ll), sky_B(ll)], axis=0) var_A = interp1d(varA[0]['nm'], varA[0]['ph_10m_nm'], bounds_error=False) var_B = interp1d(varB[0]['nm'], varB[0]['ph_10m_nm'], bounds_error=False) varspec = np.nanmean([var_A(ll), var_B(ll)], axis=0) * (len(sixA) + len(sixB)) res = np.copy(resA) res = [{ "doc": resA[0]["doc"], "ph_10m_nm": np.copy(resA[0]["ph_10m_nm"]), "nm": np.copy(resA[0]["ph_10m_nm"]) }] res[0]['nm'] = np.copy(ll) f1 = interp1d(resA[0]['nm'], resA[0]['ph_10m_nm'], bounds_error=False) f2 = interp1d(resB[0]['nm'], resB[0]['ph_10m_nm'], bounds_error=False) airmassA = meta['airmass1'] airmassB = meta['airmass2'] extCorrA = 10**(Atm.ext(ll * 10) * airmassA / 2.5) extCorrB = 10**(Atm.ext(ll * 10) * airmassB / 2.5) print "Median airmass corr: ", np.median(extCorrA), np.median(extCorrB) res[0]['ph_10m_nm'] = \ np.nansum([ (f1(ll)-sky_A(ll)) * extCorrA, (f2(ll)-sky_B(ll)) * extCorrB], axis=0) * \ (len(sixA) + len(sixB)) res[0]['exptime'] = meta['exptime'] res[0]['Extinction Correction'] = 'Applied using Hayes & Latham' res[0]['extinction_corr_A'] = extCorrA res[0]['extinction_corr_B'] = extCorrB res[0]['skyph'] = sky res[0]['var'] = varspec res[0]['radius_as'] = radius res[0]['positionA'] = posA res[0]['positionB'] = posA res[0]['N_spaxA'] = len(sixA) res[0]['N_spaxB'] = len(sixB) res[0]['meta'] = meta res[0]['object_spaxel_ids_A'] = sixA res[0]['sky_spaxel_ids_A'] = skyA res[0]['object_spaxel_ids_B'] = sixB res[0]['sky_spaxel_ids_B'] = skyB coef = chebfit(np.arange(len(ll)), ll, 4) xs = np.arange(len(ll) + 1) newll = chebval(xs, coef) res[0]['dlam'] = np.diff(newll) np.save("sp_" + outname, res)
def handle_A(A, fine, outname=None, standard=None, corrfile=None, Aoffset=None, radius=2, flat_corrections=None): '''Loads 2k x 2k IFU frame "A" and extracts spectra from the locations in "fine". Args: A (string): filename of ifu FITS file to extract from. fine (string): filename of NumPy file with locations + wavelength soln outname (string): filename to write results to Aoffset (2tuple): X (nm)/Y (pix) shift to apply for flexure correction radius (float): Extraction radius in arcsecond flat_corrections (list): A list of FlatCorrection objects for correcting the extraction Returns: The extracted spectrum, a dictionary: {'ph_10m_nm': Flux in photon / 10 m / nanometer integrated 'nm': Wavelength solution in nm 'N_spax': Total number of spaxels that created ph_10m_nm 'skyph': Sky flux in photon / 10 m / nanometer / spaxel 'radius_as': Extraction radius in arcsec 'pos': X/Y extraction location of spectrum in arcsec} Raises: None ''' fine = np.load(fine) if outname is None: outname = "%s" % (A) spec = pf.open(A) if Aoffset is not None: ff = np.load(Aoffset) flexure_x_corr_nm = ff[0]['dXnm'] flexure_y_corr_pix = ff[0]['dYpix'] else: flexure_x_corr_nm = 0 flexure_y_corr_pix = 0 if os.path.isfile(outname + ".npy"): print "USING extractions in %s!" % outname print "rm %s.npy # if you want to recreate extractions" % outname E, meta = np.load(outname + ".npy") else: print "CREATING extractions ..." E, meta = Wavelength.wavelength_extract( spec, fine, filename=outname, flexure_x_corr_nm=flexure_x_corr_nm, flexure_y_corr_pix=flexure_y_corr_pix, flat_corrections=flat_corrections) meta['airmass'] = spec[0].header['airmass'] header = {} for k, v in spec[0].header.iteritems(): try: header[k] = v except: pass meta['HA'] = spec[0].header['HA'] meta['Dec'] = spec[0].header['Dec'] meta['RA'] = spec[0].header['RA'] meta['PRLLTC'] = spec[0].header['PRLLTC'] meta['equinox'] = spec[0].header['Equinox'] meta['utc'] = spec[0].header['utc'] meta['header'] = header np.save(outname, [E, meta]) six, pos, adcpos = identify_spectra_gui(E, radius=radius, PRLLTC=Angle(meta['PRLLTC'], unit='deg'), lmin=650, lmax=700, airmass=meta['airmass']) skyix = identify_bgd_spectra(E, pos, inner=radius * 1.1) res = interp_spectra(E, six, outname=outname + ".pdf", corrfile=corrfile) sky = interp_spectra(E, skyix, onto=res[0]['nm'], outname=outname + "_sky.pdf", corrfile=corrfile) to_image(E, meta, outname, posA=pos, adcpos=adcpos) if standard is not None: print "STANDARD" wav = standard[:, 0] / 10.0 flux = standard[:, 1] fun = interp1d(wav, flux, bounds_error=False, fill_value=np.nan) correction = fun(res[0]['nm']) / res[0]['ph_10m_nm'] res[0]['std-correction'] = correction airmass = meta['airmass'] extCorr = 10**(Atm.ext(res[0]['nm'] * 10) * airmass / 2.5) print "Median airmass corr: ", np.median(extCorr) ff = interp1d(sky[0]['nm'], sky[0]['ph_10m_nm'], bounds_error=False) skybgd = ff(res[0]['nm']) res[0]['exptime'] = spec[0].header['exptime'] res[0]['Extinction Correction'] = 'Applied using Hayes & Latham' res[0]['extinction_corr'] = extCorr res[0]['skynm'] = sky[0]['nm'] res[0]['skyph'] = sky[0]['ph_10m_nm'] res[0]['ph_10m_nm'] -= skybgd res[0]['ph_10m_nm'] *= extCorr * len(six) res[0]['radius_as'] = radius res[0]['position'] = pos res[0]['N_spax'] = len(six) res[0]['meta'] = meta res[0]['object_spaxel_ids'] = six res[0]['sky_spaxel_ids'] = skyix res[0]['sky_spectra'] = sky[0]['spectra'] np.save("sp_" + outname, res)
def measure_flexure_x(cube, hdulist, drow=0., skylines=(557.0, 589.0), lamstart=1000.0, lamratio=239. / 240., lamlen=250, extract_width=3, skywidth=9, outfile='dX'): """Measures flexure in X direction, returns pixel offset Args: cube (extraction array): List of Extraction object, the fine loc + wave solution for each spectrum hdulist (pyfits obj): Pyfits object for the spectrum to measure drow (float): offset in rows for flexure (y-axis) skylines (float, float): The night skylines to centroid on in nm skywidth(float): Fit gaussian to the ROI of (skyline-skywidth to skyline+skywidth) in nm. extract_width(int): Number of pixels to extract spectrum around - See Wavelength.fiducial spectrum for following: lamstart (float): Wavelength to start the grid on, default 1000 nm lamratio (float): Resolution of sed machine lamlen (int): Length of spectrum outfile (string): output pdf plot showing fits to skyline(s) Returns: Offset number of pixels in X direction. """ dat = hdulist[0].data spec_ixs = np.arange(500, 1200, 10) lamgrid = Wavelength.fiducial_spectrum(lamstart=lamstart, lamratio=lamratio, len=lamlen) specgrid = np.zeros((len(lamgrid), len(spec_ixs))) for i, ix in enumerate(spec_ixs): f = cube[ix] # bad fit if not f.ok: continue # noisy fit if f.lamnrms > 1: continue # short spectrum if f.xrange[1] - f.xrange[0] < 200: continue spec = np.zeros(f.xrange[1] - f.xrange[0]) yfun = np.poly1d(f.poly) for jx, xpos in enumerate(np.arange(f.xrange[0], f.xrange[1])): ypos = yfun(xpos) try: spec[jx] = np.sum(dat[ypos - extract_width:ypos + extract_width, xpos]) except: continue try: ll = f.get_lambda_nm() except: continue specfun = interp1d(ll, spec, bounds_error=False) specgrid[:, i] = specfun(lamgrid) skyspec = np.median(specgrid, axis=1) pl.step(lamgrid, skyspec, where='mid') pl.xlabel("Wavelength [nm]") pl.ylabel("Spec Irr [ph/10 m/nm]") sumoff = 0. sumscale = 0. legend = ["Sky"] for skyline in skylines: roi = (lamgrid > skyline - skywidth) & (lamgrid < skyline + skywidth) ffun = NFit.mpfit_residuals(NFit.gaussian4) parinfo = [{ 'value': np.max(skyspec[roi]), 'limited': [1, 0], 'limits': [0, 0] }, { 'value': skyline }, { 'value': 3 }, { 'value': np.min(skyspec[roi]), 'limited': [1, 0], 'limits': [0, 0] }] fit = NFit.mpfit_do(ffun, lamgrid[roi], skyspec[roi], parinfo) if fit.status == 1: off = fit.params[1] - skyline sumoff += off * fit.params[0] sumscale += fit.params[0] pl.plot(lamgrid[roi], NFit.gaussian4(fit.params, lamgrid[roi])) dxnm = fit.params[1] - skyline legend.append("%.1f, %.2f" % (skyline, off)) else: dxnm = 0. print("line = %6.1f (%6.1f), status = %d, dX = %3.2f nm shift" % (skyline, fit.params[0], fit.status, dxnm)) if sumscale > 0.: dxnm = sumoff / sumscale else: print "Warning: no good skylines to fit! Setting X offset to 0.0 nm" dxnm = 0. print "dX = %3.2f nm shift" % dxnm pl.title("dX = %3.2f nm shift, dY = %3.2f px shift" % (dxnm, drow)) pl.legend(legend) pl.savefig(outfile + ".pdf") return dxnm
def qr_to_img(exts, size=4, outname="cube.fits"): """Convert a data cube to a fits image Args: exts (list of Extraction): extractions to convert (see Extraction.py) size (int): expansion factor, defaults to 4 outname (str): output fits file name, defaults to cube.fits Returns: None """ xs = np.array([ex.X_as for ex in exts], dtype=np.float32) ys = np.array([ex.Y_as for ex in exts], dtype=np.float32) minx = size * np.nanmin(xs) miny = size * np.nanmin(ys) maxx = size * np.nanmax(xs) maxy = size * np.nanmax(ys) dx = (maxx - minx) / .25 dy = (maxy - miny) / .25 l_grid = Wavelength.fiducial_spectrum() l_grid = l_grid[::-1] dl_grid = np.diff(l_grid) l_grid = l_grid[1:] img = np.zeros((dx, dy, len(l_grid) / 2)) img[:] = np.nan xsz = img.shape[0] / 2 ysz = img.shape[1] / 2 allspec = np.zeros((len(exts), len(l_grid) / 2)) for cnt, ex in enumerate(exts): if ex.xrange is None: continue if ex.exptime is None: ex.exptime = 1 if ex.lamcoeff is None: continue ix = np.arange(*ex.xrange) l = chebval(ix, ex.lamcoeff) s = ex.specw f = interp1d(l, s, fill_value=np.nan, bounds_error=False) fi = f(l_grid) / dl_grid fi = fi[0:len(fi):2] + fi[1:len(fi):2] allspec[cnt, :] = fi x = (ex.X_as - minx) / 0.25 y = (ex.Y_as - miny) / 0.25 try: for dx in [-1, 0, 1]: for dy in [-1, 0, 1]: img[x + dx, y + dy, :] = fi except: pass # outstr = "\rX = %+10.5f, Y = %+10.5f" % (x[0], y[0]) # print(outstr, end="") sys.stdout.flush() back = np.nanmedian(allspec, 0) if 'fits' not in outname: outname += '.fits' ff = pf.PrimaryHDU(img.T) ff.writeto(outname) for cnt, ex in enumerate(exts): if ex.xrange is None: continue if ex.exptime is None: ex.exptime = 1 if ex.lamcoeff is None: continue ix = np.arange(*ex.xrange) l = chebval(ix, ex.lamcoeff) s = ex.specw f = interp1d(l, s, fill_value=np.nan, bounds_error=False) fi = f(l_grid) / dl_grid fi = fi[0:len(fi):2] + fi[1:len(fi):2] - back try: x = np.round(size * np.sqrt(3.) * (ex.Q_ix + ex.R_ix / 2)) + xsz y = np.round(size * 3. / 2. * ex.R_ix) + ysz except: continue try: for dx in [-1, 0, 1]: for dy in [-1, 0, 1]: img[x + dx, y + dy, :] = fi except: pass ff = pf.PrimaryHDU(img.T) ff.writeto("bs_" + outname)
def QR_to_img(exts, Size=4, outname="cube.fits"): Xs = np.array([ext.X_as for ext in exts], dtype=np.float) Ys = np.array([ext.Y_as for ext in exts], dtype=np.float) minx = Size * np.nanmin(Xs) miny = Size * np.nanmin(Ys) maxx = Size * np.nanmax(Xs) maxy = Size * np.nanmax(Ys) Dx = (maxx - minx) / .25 Dy = (maxy - miny) / .25 l_grid = Wavelength.fiducial_spectrum() l_grid = l_grid[::-1] dl_grid = np.diff(l_grid) l_grid = l_grid[1:] img = np.zeros((Dx, Dy, len(l_grid) / 2)) img[:] = np.nan XSz = img.shape[0] / 2 YSz = img.shape[1] / 2 allspec = np.zeros((len(exts), len(l_grid) / 2)) for cnt, ext in enumerate(exts): if ext.xrange is None: continue if ext.exptime is None: ext.exptime = 1 if ext.lamcoeff is None: continue ix = np.arange(*ext.xrange) l = chebval(ix, ext.lamcoeff) s = ext.specw f = interp1d(l, s, fill_value=np.nan, bounds_error=False) fi = f(l_grid) / dl_grid fi = fi[0:len(fi):2] + fi[1:len(fi):2] allspec[cnt, :] = fi x = (ext.X_as - minx) / 0.25 y = (ext.Y_as - miny) / 0.25 try: for dx in [-1, 0, 1]: for dy in [-1, 0, 1]: img[x + dx, y + dy, :] = fi except: pass print x, y back = np.median(allspec, 0) ff = pf.PrimaryHDU(img.T) ff.writeto(outname) for cnt, ext in enumerate(exts): if ext.xrange is None: continue if ext.exptime is None: ext.exptime = 1 if ext.lamcoeff is None: continue ix = np.arange(*ext.xrange) l = chebval(ix, ext.lamcoeff) s = ext.specw f = interp1d(l, s, fill_value=np.nan, bounds_error=False) fi = f(l_grid) / dl_grid fi = fi[0:len(fi):2] + fi[1:len(fi):2] - back try: x = np.round(Size * np.sqrt(3.) * (ext.Q_ix + ext.R_ix / 2)) + XSz y = np.round(Size * 3. / 2. * ext.R_ix) + YSz except: continue try: for dx in [-1, 0, 1]: for dy in [-1, 0, 1]: img[x + dx, y + dy, :] = fi except: pass ff = pf.PrimaryHDU(img.T) ff.writeto("bs_" + outname)
def handle_A(A, fine, outname=None, standard=None, corrfile=None, Aoffset=None, radius=2, flat_corrections=None, nosky=False, lmin=650, lmax=700): '''Loads 2k x 2k IFU frame "A" and extracts spectra from the locations in "fine". Args: A (string): filename of ifu FITS file to extract from. fine (string): filename of NumPy file with locations + wavelength soln outname (string): filename to write results to Aoffset (2tuple): X (nm)/Y (pix) shift to apply for flexure correction radius (float): Extraction radius in arcsecond flat_corrections (list): A list of FlatCorrection objects for correcting the extraction nosky (Boolean): if True don't subtract sky, merely sum in aperture Returns: The extracted spectrum, a dictionary: {'ph_10m_nm': Flux in photon / 10 m / nanometer integrated 'nm': Wavelength solution in nm 'N_spax': Total number of spaxels that created ph_10m_nm 'skyph': Sky flux in photon / 10 m / nanometer / spaxel 'radius_as': Extraction radius in arcsec 'pos': X/Y extraction location of spectrum in arcsec} Raises: None ''' fine = np.load(fine) if outname is None: outname = "%s" % (A) if Aoffset is not None: ff = np.load(Aoffset) flexure_x_corr_nm = ff[0]['dXnm'] flexure_y_corr_pix = ff[0]['dYpix'] print "Dx %2.1f nm | Dy %2.1f px" % (ff[0]['dXnm'], ff[0]['dYpix']) else: flexure_x_corr_nm = 0 flexure_y_corr_pix = 0 if os.path.isfile(outname+".npy"): print "USING extractions in %s.npy!" % outname print "rm %s.npy # if you want to recreate extractions" % outname E, meta = np.load(outname+".npy") E_var, meta_var = np.load("var_" + outname + ".npy") else: print "\nCREATING extractions ..." spec = pf.open(A) adcspeed = spec[0].header["ADCSPEED"] if adcspeed == 2: read_var = 22*22 else: read_var = 5*5 var = addcon(A, str(read_var), "var_" + outname + ".fits") print "\nExtracting object spectra" E, meta = Wavelength.wavelength_extract(spec, fine, filename=outname, flexure_x_corr_nm=flexure_x_corr_nm, flexure_y_corr_pix=flexure_y_corr_pix, flat_corrections = flat_corrections) meta['airmass'] = spec[0].header['airmass'] header = {} for k,v in spec[0].header.iteritems(): try: header[k] = v except: pass meta['HA'] = spec[0].header['HA'] meta['Dec'] = spec[0].header['Dec'] meta['RA'] = spec[0].header['RA'] meta['PRLLTC'] = spec[0].header['PRLLTC'] meta['equinox'] = spec[0].header['Equinox'] meta['utc'] = spec[0].header['utc'] meta['header'] = header meta['exptime'] = spec[0].header['exptime'] np.save(outname, [E, meta]) print "\nExtracting variance spectra" E_var, meta_var = Wavelength.wavelength_extract(var, fine, filename=outname, flexure_x_corr_nm = flexure_x_corr_nm, flexure_y_corr_pix = flexure_y_corr_pix, flat_corrections=flat_corrections) np.save("var_" + outname, [E_var, meta_var]) object = meta['header']['OBJECT'].split()[0] sixA, posA, adcpos, radius_used = identify_spectra_gui(E, radius=radius, PRLLTC=Angle(meta['PRLLTC'], unit='deg'), lmin=lmin, lmax=lmax, object=object, airmass=meta['airmass']) to_image(E, meta, outname, posA=posA, adcpos=adcpos) if standard is None: kixA = identify_bgd_spectra(E, posA, inner=radius_used*1.1) else: kixA = identify_sky_spectra(E, posA, inner=radius_used*1.1) # get the mean spectrum over the selected spaxels resA = interp_spectra(E, sixA, outname=outname+".pdf", corrfile=corrfile) skyA = interp_spectra(E, kixA, outname=outname+"_sky.pdf", corrfile=corrfile) varA = interp_spectra(E_var, sixA, outname=outname+"_var.pdf", corrfile=corrfile) ## Plot out the X/Y positions of the selected spaxels XSA = [] YSA = [] XSK = [] YSK = [] for ix in sixA: XSA.append(E[ix].X_as) YSA.append(E[ix].Y_as) for ix in kixA: XSK.append(E[ix].X_as) YSK.append(E[ix].Y_as) pl.figure() pl.clf() pl.ylim(-30,30) pl.xlim(-30,30) pl.scatter(XSA,YSA, color='red', marker='H', linewidth=.1) pl.scatter(XSK,YSK, color='green', marker='H', linewidth=.1) pl.savefig("XYs_%s.pdf" % outname) pl.close() # / End Plot # Define our standard wavelength grid ll = Wavelength.fiducial_spectrum() # Resample sky onto standard wavelength grid sky_A = interp1d(skyA[0]['nm'], skyA[0]['ph_10m_nm'], bounds_error=False) sky = sky_A(ll) # Resample variance onto standard wavelength grid var_A = interp1d(varA[0]['nm'], varA[0]['ph_10m_nm'], bounds_error=False) varspec = var_A(ll) # Copy and resample object spectrum onto standard wavelength grid res = np.copy(resA) res = [{"doc": resA[0]["doc"], "ph_10m_nm": np.copy(resA[0]["ph_10m_nm"]), "spectra": np.copy(resA[0]["spectra"]), "coefficients": np.copy(resA[0]["coefficients"]), "nm": np.copy(resA[0]["ph_10m_nm"])}] res[0]['nm'] = np.copy(ll) f1 = interp1d(resA[0]['nm'], resA[0]['ph_10m_nm'], bounds_error=False) # Calculate airmass correction airmass = meta['airmass'] extCorr = 10**(Atm.ext(ll*10) * airmass/2.5) print "Median airmass corr: %.4f" % np.median(extCorr) # Calculate output corrected spectrum if nosky: # Account for airmass and aperture res[0]['ph_10m_nm'] = f1(ll) * extCorr * len(sixA) else: # Account for sky, airmass and aperture res[0]['ph_10m_nm'] = (f1(ll)-sky_A(ll)) * extCorr * len(sixA) # Process standard star objects if standard is not None: print "STANDARD" # Extract reference data wav = standard[:,0]/10.0 flux = standard[:,1] # Calculate/Interpolate correction onto object wavelengths fun = interp1d(wav, flux, bounds_error=False, fill_value = np.nan) correction0 = fun(res[0]['nm'])/res[0]['ph_10m_nm'] # Filter for resolution flxf = filters.gaussian_filter(flux,19.) # Calculate/Interpolate filtered correction fun = interp1d(wav, flxf, bounds_error=False, fill_value = np.nan) correction = fun(res[0]['nm'])/res[0]['ph_10m_nm'] # Use unfiltered for H-beta region ROI = (res[0]['nm'] > 470.) & (res[0]['nm'] < 600.) correction[ROI] = correction0[ROI] res[0]['std-correction'] = correction res[0]['std-maxnm'] = np.max(wav) res[0]['exptime'] = meta['exptime'] res[0]['Extinction Correction'] = 'Applied using Hayes & Latham' res[0]['extinction_corr'] = extCorr res[0]['skyph'] = sky * len(sixA) res[0]['skynm'] = ll res[0]['var'] = varspec res[0]['radius_as'] = radius_used res[0]['position'] = posA res[0]['N_spax'] = len(sixA) res[0]['meta'] = meta res[0]['object_spaxel_ids'] = sixA res[0]['sky_spaxel_ids'] = kixA res[0]['sky_spectra'] = skyA[0]['spectra'] coef = chebfit(np.arange(len(ll)), ll, 4) xs = np.arange(len(ll)+1) newll = chebval(xs, coef) res[0]['dlam'] = np.diff(newll) np.save("sp_" + outname, res) print "Wrote sp_"+outname+".npy"
def measure_flexure_x(cube, hdulist, drow=0., skylines=(557.0, 589.0), lamstart=1000.0, lamratio=239./240., lamlen=250, extract_width=3, skywidth=9, outfile='dX', plot=False): """Measures flexure in X direction, returns pixel offset Args: cube (extraction array): List of Extraction object, the fine loc + wave solution for each spectrum hdulist (pyfits obj): Pyfits object for the spectrum to measure drow (float): offset in rows for flexure (y-axis) skylines (float, float): The night skylines to centroid on in nm - See Wavelength.fiducial spectrum for following: lamstart (float): Wavelength to start the grid on, default 1000 nm lamratio (float): Resolution of sed machine lamlen (int): Length of spectrum extract_width(int): Number of pixels to extract spectrum around skywidth(float): Fit gaussian to the ROI of skyline+-skywidth in nm. outfile (string): output pdf plot showing fits to skyline(s) plot (bool): set to True to show plot, else plot to pdf file Returns: Offset number of pixels in X direction. """ # read in image dat = hdulist[0].data # select 70 representative spaxels spec_ixs = np.arange(500, 1200, 10) # fiducial wavelength grid lamgrid = Wavelength.fiducial_spectrum(lamstart=lamstart, lamratio=lamratio, len=lamlen) # initialize grid of spaxel spectra specgrid = np.zeros((len(lamgrid), len(spec_ixs))) # loop over selected spaxels for i, ix in enumerate(spec_ixs): # get spaxel f = cube[ix] # skip baddies # bad fit if not f.ok: continue # noisy fit if f.lamnrms > 1: continue # short spectrum if f.xrange[1] - f.xrange[0] < 200: continue # set up spectral vector for this spaxel spec = np.zeros(f.xrange[1] - f.xrange[0]) yfun = np.poly1d(f.poly) # loop over x positions in spaxel for jx, xpos in enumerate(np.arange(f.xrange[0], f.xrange[1])): # get y position on image ypos = yfun(xpos) # extract spectrum from image try: spec[jx] = np.sum(dat[ypos-extract_width:ypos+extract_width, xpos]) except: continue # get wavelengths of spaxel try: ll = f.get_lambda_nm() except: continue # resample spectrum on fiducial wavelength grid specfun = interp1d(ll, spec, bounds_error=False) # insert into grid of spaxel spectra specgrid[:, i] = specfun(lamgrid) # create a median spectrum from spaxel grid # taking a median minimizes impact of objects in sample skyspec = np.median(specgrid, axis=1) # plot resulting sky spectrum pl.step(lamgrid, skyspec, where='mid') pl.xlabel("Wavelength [nm]") pl.ylabel("Spec Irr [ph/10 m/nm]") legend = ["Sky"] # accumulate average offsets from known sky lines sumoff = 0. sumscale = 0. minsig = 10000. # loop over input sky lines for skyline in skylines: # extract a wavelength window around sky line roi = (lamgrid > skyline-skywidth) & (lamgrid < skyline+skywidth) # prepare for Gaussian fit ffun = NFit.mpfit_residuals(NFit.gaussian4) # initial setup of fit parinfo = [ {'value': np.max(skyspec[roi]), 'limited': [1, 0], 'limits': [0, 0]}, {'value': skyline}, {'value': 3}, {'value': np.min(skyspec[roi]), 'limited': [1, 0], 'limits': [0, 0]}] # do the fit fit = NFit.mpfit_do(ffun, lamgrid[roi], skyspec[roi], parinfo) # did the fit succeed? if fit.status == 1 and fit.params[2] > 0.: off = fit.params[1] - skyline sumoff += off * fit.params[0] sumscale += fit.params[0] if fit.params[2] < minsig: minsig = fit.params[2] pl.plot(lamgrid[roi], NFit.gaussian4(fit.params, lamgrid[roi])) dxnm = fit.params[1] - skyline legend.append("%.1f, %.2f" % (skyline, off)) else: dxnm = 0. print("line = %6.1f (%6.1f), FWHM = %.2f nm, status = %d, dX = %3.2f nm shift" % (skyline, fit.params[0], fit.params[2]*2.354, fit.status, dxnm)) if sumscale > 0.: dxnm = sumoff / sumscale else: print "Warning: no good skylines to fit! Setting X offset to 0.0 nm" dxnm = 0. minsig = 0. print "dX = %3.2f nm shift" % dxnm pl.title("dX = %3.2f nm shift, dY = %3.2f px shift" % (dxnm, drow)) pl.legend(legend) if plot: pl.show() else: pl.savefig(outfile + ".pdf") # return offset and best FWHM return dxnm, minsig*2.354
def QR_to_img(exts, Size=4, outname="cube.fits"): Xs = np.array([ext.X_as for ext in exts], dtype=np.float) Ys = np.array([ext.Y_as for ext in exts], dtype=np.float) minx = Size * np.nanmin(Xs) miny = Size * np.nanmin(Ys) maxx = Size * np.nanmax(Xs) maxy = Size * np.nanmax(Ys) Dx = (maxx - minx) / 0.25 Dy = (maxy - miny) / 0.25 l_grid = Wavelength.fiducial_spectrum() l_grid = l_grid[::-1] dl_grid = np.diff(l_grid) l_grid = l_grid[1:] img = np.zeros((Dx, Dy, len(l_grid) / 2)) img[:] = np.nan XSz = img.shape[0] / 2 YSz = img.shape[1] / 2 allspec = np.zeros((len(exts), len(l_grid) / 2)) for cnt, ext in enumerate(exts): if ext.xrange is None: continue if ext.exptime is None: ext.exptime = 1 if ext.lamcoeff is None: continue ix = np.arange(*ext.xrange) l = chebval(ix, ext.lamcoeff) s = ext.specw f = interp1d(l, s, fill_value=np.nan, bounds_error=False) fi = f(l_grid) / dl_grid fi = fi[0 : len(fi) : 2] + fi[1 : len(fi) : 2] allspec[cnt, :] = fi x = (ext.X_as - minx) / 0.25 y = (ext.Y_as - miny) / 0.25 try: for dx in [-1, 0, 1]: for dy in [-1, 0, 1]: img[x + dx, y + dy, :] = fi except: pass print x, y back = np.median(allspec, 0) ff = pf.PrimaryHDU(img.T) ff.writeto(outname) for cnt, ext in enumerate(exts): if ext.xrange is None: continue if ext.exptime is None: ext.exptime = 1 if ext.lamcoeff is None: continue ix = np.arange(*ext.xrange) l = chebval(ix, ext.lamcoeff) s = ext.specw f = interp1d(l, s, fill_value=np.nan, bounds_error=False) fi = f(l_grid) / dl_grid fi = fi[0 : len(fi) : 2] + fi[1 : len(fi) : 2] - back try: x = np.round(Size * np.sqrt(3.0) * (ext.Q_ix + ext.R_ix / 2)) + XSz y = np.round(Size * 3.0 / 2.0 * ext.R_ix) + YSz except: continue try: for dx in [-1, 0, 1]: for dy in [-1, 0, 1]: img[x + dx, y + dy, :] = fi except: pass ff = pf.PrimaryHDU(img.T) ff.writeto("bs_" + outname)
def QR_to_img(exts, Size=4, outname="cube.fits"): """Convert a data cube to a fits image Args: exts (list of Extraction): extractions to convert (see Extraction.py) Size (int): expansion factor, defaults to 4 outname (str): output fits file name, defaults to cube.fits Returns: None """ Xs = np.array([ext.X_as for ext in exts], dtype=np.float) Ys = np.array([ext.Y_as for ext in exts], dtype=np.float) minx = Size * np.nanmin(Xs) miny = Size * np.nanmin(Ys) maxx = Size * np.nanmax(Xs) maxy = Size * np.nanmax(Ys) Dx = (maxx-minx)/.25 Dy = (maxy-miny)/.25 l_grid = Wavelength.fiducial_spectrum() l_grid = l_grid[::-1] dl_grid = np.diff(l_grid) l_grid = l_grid[1:] img = np.zeros((Dx, Dy, len(l_grid)/2)) img[:] = np.nan XSz = img.shape[0]/2 YSz = img.shape[1]/2 allspec = np.zeros((len(exts), len(l_grid)/2)) for cnt, ext in enumerate(exts): if ext.xrange is None: continue if ext.exptime is None: ext.exptime = 1 if ext.lamcoeff is None: continue ix = np.arange(*ext.xrange) l = chebval(ix, ext.lamcoeff) s = ext.specw f = interp1d(l, s, fill_value=np.nan, bounds_error=False) fi = f(l_grid) / dl_grid fi = fi[0:len(fi):2] + fi[1:len(fi):2] allspec[cnt, :] = fi x = (ext.X_as - minx)/0.25 y = (ext.Y_as - miny)/0.25 try: for dx in [-1, 0, 1]: for dy in [-1, 0, 1]: img[x+dx, y+dy, :] = fi except: pass outstr = "\rX = %+10.5f, Y = %+10.5f" % (x, y) print outstr, sys.stdout.flush() back = np.median(allspec, 0) if 'fits' not in outname: outname += '.fits' ff = pf.PrimaryHDU(img.T) ff.writeto(outname) for cnt, ext in enumerate(exts): if ext.xrange is None: continue if ext.exptime is None: ext.exptime = 1 if ext.lamcoeff is None: continue ix = np.arange(*ext.xrange) l = chebval(ix, ext.lamcoeff) s = ext.specw f = interp1d(l, s, fill_value=np.nan, bounds_error=False) fi = f(l_grid)/dl_grid fi = fi[0:len(fi):2] + fi[1:len(fi):2] - back try: x = np.round(Size*np.sqrt(3.) * (ext.Q_ix + ext.R_ix/2)) + XSz y = np.round(Size*3./2. * ext.R_ix) + YSz except: continue try: for dx in [-1, 0, 1]: for dy in [-1, 0, 1]: img[x+dx, y+dy, :] = fi except: pass ff = pf.PrimaryHDU(img.T) ff.writeto("bs_" + outname)