def test_user_redo(): # Check for files spectrograph = util.load_spectrograph('shane_kast_blue') # Instantiate par = spectrograph.default_pypeit_par()['calibrations']['wavelengths'] master_dir = os.path.join(os.getenv('PYPEIT_DEV'), 'Cooked', 'WaveCalib') master_key = 'ShaneKastBlue_A' waveCalib = wavecalib.WaveCalib(None, None, spectrograph, par, master_dir=master_dir, master_key=master_key, reuse_masters=True) assert os.path.isfile(waveCalib.file_path), 'Did not finde Cooked file.' wv_calib = waveCalib.load() # Setup waveCalib.par['sigdetect'] = 5. nslit = 1 waveCalib.make_maskslits(nslit) npix = len(waveCalib.wv_calib['0']['spec']) waveCalib.arccen = np.zeros((npix, nslit)) waveCalib.arccen[:, 0] = waveCalib.wv_calib['0']['spec'] # Do it new_wv_calib = waveCalib.build_wv_calib(waveCalib.arccen, 'holy-grail', skip_QA=True) # Test assert new_wv_calib['0']['rms'] < 0.2 # Now also test the utility script that reads in the wavecalib wv_calib_load = wavecalib.WaveCalib.load_from_file(waveCalib.file_path) assert np.all(wv_calib['0']['fitc'] == wv_calib_load['0']['fitc'])
def test_user_redo(): # Check for files spectrograph = util.load_spectrograph('shane_kast_blue') wvcalib_file = os.path.join(os.getenv('PYPEIT_DEV'), 'Cooked', 'WaveCalib', 'MasterWaveCalib_ShaneKastBlue_A.json') assert os.path.isfile(wvcalib_file) # Instantiate waveCalib = wavecalib.WaveCalib( None, None, spectrograph, spectrograph.default_pypeit_par()['calibrations']['wavelengths']) wv_calib, _ = waveCalib.load_master(wvcalib_file) # Setup waveCalib.par['sigdetect'] = 5. nslit = 1 _ = waveCalib.make_maskslits(nslit) npix = len(waveCalib.wv_calib['0']['spec']) waveCalib.arccen = np.zeros((npix, nslit)) waveCalib.arccen[:, 0] = waveCalib.wv_calib['0']['spec'] # Do it new_wv_calib = waveCalib.build_wv_calib(waveCalib.arccen, 'holy-grail', skip_QA=True) # Test assert new_wv_calib['0']['rms'] < 0.2 # Now also test the utility script that reads in the wavecalib wv_calib_load, par = wavecalib.load_wv_calib(wvcalib_file) assert np.all(wv_calib['0']['fitc'] == wv_calib_load['0']['fitc'])
def get_wv_calib(self): """ Load or generate the 1D wavelength calibrations Requirements: msarc, msbpm, slits, det, par Returns: dict: :attr:`wv_calib` calibration dict and the updated slit mask array """ # Check for existing data if not self._chk_objs(['msarc', 'msbpm', 'slits']): msgs.warn( 'Not enough information to load/generate the wavelength calibration. Skipping and may crash down the line' ) return None # Check internals self._chk_set(['det', 'calib_ID', 'par']) if 'arc' not in self.master_key_dict.keys(): msgs.error('Arc master key not set. First run get_arc.') # No wavelength calibration requested if self.par['wavelengths']['reference'] == 'pixel': msgs.info("A wavelength calibration will not be performed") self.wv_calib = None return self.wv_calib # Grab arc binning (may be different from science!) # TODO : Do this internally when we have a wv_calib DataContainer binspec, binspat = parse.parse_binning(self.msarc.detector.binning) # Instantiate self.waveCalib = wavecalib.WaveCalib( self.msarc, self.slits, self.spectrograph, self.par['wavelengths'], binspectral=binspec, det=self.det, master_key=self.master_key_dict['arc'], # For QA naming qa_path=self.qa_path, msbpm=self.msbpm) # Load from disk (MasterFrame)? masterframe_name = masterframe.construct_file_name( wavecalib.WaveCalib, self.master_key_dict['arc'], master_dir=self.master_dir) if os.path.isfile(masterframe_name) and self.reuse_masters: # Load from disk self.wv_calib = self.waveCalib.load(masterframe_name) self.slits.mask_wvcalib(self.wv_calib) else: self.wv_calib = self.waveCalib.run(skip_QA=(not self.write_qa)) # Save to Masters self.waveCalib.save(outfile=masterframe_name) # Return return self.wv_calib
def test_identify(): arc_file = os.path.join(os.getenv('PYPEIT_DEV'), 'Cooked', 'shane_kast_blue', 'MasterArc_A_1_DET01.fits') slits_file = os.path.join(os.getenv('PYPEIT_DEV'), 'Cooked', 'shane_kast_blue', 'MasterSlits_A_1_DET01.fits.gz') # Just list pargs = scripts.identify.Identify.parse_args( [arc_file, slits_file, '--test']) arcfitter = scripts.identify.Identify.main(pargs) # Load line list arcfitter.load_IDs(fname=data_path('waveid_tests.ascii')) assert arcfitter._detns.size == 31, 'Bad load' # Fit arcfitter._fitdict['polyorder'] = 3 arcfitter.fitsol_fit() assert arcfitter._fitdict['fitc'].size == 4, 'Bad fit' # Auto arcfitter.auto_id() assert np.sum(arcfitter._lineflg < 3) > 10, 'Bad auto ID' arcfitter.fitsol_fit() # Write final_fit = arcfitter.get_results() waveCalib = wavecalib.WaveCalib( nslits=1, wv_fits=np.atleast_1d(arcfitter._fitdict['WaveFit']), arc_spectra=np.atleast_2d(arcfitter.specdata).T, spat_ids=np.atleast_1d(int(arcfitter._spatid)), PYP_SPEC='shane_kast_blue', ) # If you touch the following line, you probably need to update the call in scripts/identify.py wvarxiv_fn = arcfitter.store_solution(final_fit, 1, rmstol=0.1, force_save=True, wvcalib=waveCalib) # Test we can read it tmp = wavecalib.WaveCalib.from_file('wvcalib.fits') # Clean up -- If these fail then the store solution failed os.remove('waveid.ascii') os.remove(wvarxiv_fn) os.remove('wvcalib.fits')
def get_wv_calib(self): """ Load or generate the 1D wavelength calibrations Requirements: msarc, msbpm, tslits_dict det, par Returns: dict, ndarray: :attr:`wv_calib` calibration dict and the updated slit mask array """ # Check for existing data if not self._chk_objs(['msarc', 'msbpm', 'tslits_dict']): msgs.error('dont have all the objects') # Check internals self._chk_set(['det', 'calib_ID', 'par']) if 'arc' not in self.master_key_dict.keys(): msgs.error('Arc master key not set. First run get_arc.') # Return existing data if self._cached('wavecalib', self.master_key_dict['arc']) \ and self._cached('wvmask', self.master_key_dict['arc']): self.wv_calib = self.calib_dict[ self.master_key_dict['arc']]['wavecalib'] self.wv_maskslits = self.calib_dict[ self.master_key_dict['arc']]['wvmask'] self.tslits_dict['maskslits'] += self.wv_maskslits return self.wv_calib # No wavelength calibration requested if self.par['wavelengths']['reference'] == 'pixel': msgs.info("A wavelength calibration will not be performed") self.wv_calib = None self.wv_maskslits = np.zeros_like(self.maskslits, dtype=bool) self.tslits_dict['maskslits'] += self.wv_maskslits return self.wv_calib # Grab arc binning (may be different from science!) arc_rows = self.fitstbl.find_frames('arc', calib_ID=self.calib_ID, index=True) self.arc_files = self.fitstbl.frame_paths(arc_rows) binspec, binspat = parse.parse_binning( self.spectrograph.get_meta_value(self.arc_files[0], 'binning')) # Instantiate self.waveCalib = wavecalib.WaveCalib( self.msarc, self.tslits_dict, self.spectrograph, self.par['wavelengths'], binspectral=binspec, det=self.det, master_key=self.master_key_dict['arc'], master_dir=self.master_dir, reuse_masters=self.reuse_masters, qa_path=self.qa_path, msbpm=self.msbpm) # Load from disk (MasterFrame)? self.wv_calib = self.waveCalib.load() if self.wv_calib is None: self.wv_calib, _ = self.waveCalib.run(skip_QA=(not self.write_qa)) # Save to Masters if self.save_masters: self.waveCalib.save() # Create the mask (needs to be done here in case wv_calib was loaded from Masters) # TODO: This should either be done here or save as part of the # master frame file. As it is, if not loaded from the master # frame file, mask_maskslits is run twice, once in run above and # once here... self.wv_maskslits = self.waveCalib.make_maskslits( self.tslits_dict['slit_left'].shape[1]) self.tslits_dict['maskslits'] += self.wv_maskslits # Save & return self._update_cache('arc', ('wavecalib', 'wvmask'), (self.wv_calib, self.wv_maskslits)) # Return return self.wv_calib
def test_wavecalib(): "Fuss with the WaveCalib DataContainer" out_file = data_path('test_wavecalib.fits') if os.path.isfile(out_file): os.remove(out_file) # Pieces pypeitFit = fitting.PypeItFit(fitc=np.arange(5).astype(float), xval=np.linspace(1, 100., 100)) # 2D fit pypeitFit2 = fitting.PypeItFit(fitc=np.linspace((1, 2), (10, 20), 10), xval=np.linspace(1, 100., 100), x2=np.linspace(1, 100., 100)) waveFit = wv_fitting.WaveFit(232, pypeitfit=pypeitFit, pixel_fit=np.arange(10).astype(float), wave_fit=np.linspace(1., 10., 10)) waveCalib = wavecalib.WaveCalib(wv_fits=np.asarray([waveFit]), nslits=1, spat_ids=np.asarray([232]), wv_fit2d=pypeitFit2) # Write waveCalib.to_file(out_file) # Read waveCalib2 = wavecalib.WaveCalib.from_file(out_file) # Test assert np.array_equal(waveCalib.spat_ids, waveCalib2.spat_ids), 'Bad spat_ids' assert np.array_equal(waveCalib.wv_fits[0].pypeitfit.fitc, waveCalib2.wv_fits[0].pypeitfit.fitc), 'Bad fitc' assert np.array_equal(waveCalib.wv_fit2d.xval, waveCalib2.wv_fit2d.xval) # Write again! waveCalib2.to_file(out_file, overwrite=True) # Finish os.remove(out_file) # With None (failed wave) spat_ids = np.asarray([232, 949]) waveCalib3 = wavecalib.WaveCalib(wv_fits=np.asarray( [waveFit, wv_fitting.WaveFit(949)]), nslits=2, spat_ids=spat_ids, wv_fit2d=pypeitFit2) waveCalib3.to_file(out_file) waveCalib4 = wavecalib.WaveCalib.from_file(out_file) # Check masking slits = slittrace.SlitTraceSet(left_init=np.full((1000, 2), 2, dtype=float), right_init=np.full((1000, 2), 8, dtype=float), pypeline='MultiSlit', spat_id=spat_ids, nspat=2, PYP_SPEC='dummy') slits.mask_wvcalib(waveCalib3) assert slits.bitmask.flagged(slits.mask[1], flag='BADWVCALIB') # Finish os.remove(out_file)
par=par['calibrations']['tilts'], det=1, tslits_dict=tslits_dict, pixlocn=pixlocn) nslits = tslits_dict['lcen'].shape[1] maskslits = np.zeros(nslits, dtype=bool) # QA is not working here mstilts, wt_maskslits = waveTilts.run(maskslits=maskslits, wv_calib=None, doqa=False) # Wavelength calibration from scienceB image arcparam = {} spectrograph.setup_arcparam(arcparam) setup = 'NIRES' waveCalib = wavecalib.WaveCalib(msarc, spectrograph=spectrograph, #par=par['calibrations']['wavelengths'], det=1, setup=setup, arcparam=arcparam) wv_calib, _ = waveCalib.run(tslits_dict['lcen'], tslits_dict['rcen'], pixlocn, nonlinear=spectrograph.detector[0]['nonlinear']) # Get 2-D wavelength solution waveImage = waveimage.WaveImage(tslits_dict['slitpix'], mstilts, wv_calib, setup=setup, maskslits=maskslits) waveimg = waveImage._build_wave()
def wv_calib_from_extern(wave_soln, arc, lamps, outfile=None, sigdetect=5.0, fwhm=4.0, nonlinear_counts=1e10, outroot='./', debug=False): """ Args: wave_soln: arc: lamps: outfile: sigdetect: fwhm: nonlinear_counts: outroot: debug: Returns: """ # TODO add array size checking etc. nslits = wave_soln.shape[1] nspec = wave_soln.shape[0] line_lists = wavecal.waveio.load_line_lists(lamps) wv_calib = {} spec_vec = np.arange(nspec) for islit in range(nslits): print(str(islit)) # Find peaks for this slit tcent, ecent, cut_tcent, icut, spec_cont_sub = wavecal.wvutils.arc_lines_from_spec( arc[:, islit], sigdetect=sigdetect, nonlinear_counts=nonlinear_counts, fwhm=fwhm, debug=debug) detections = tcent[icut] wave_det = (scipy.interpolate.interp1d(spec_vec, wave_soln[:, islit], kind='cubic'))(detections) patt_dict = {} patt_dict['mask'] = np.ones_like(detections, dtype=bool) patt_dict['IDs'] = wave_det patt_dict['bdisp'] = (np.max(wave_soln[:, islit]) - np.min(wave_soln[:, islit])) / nspec final_fit = wavecal.fitting.fit_slit(spec_cont_sub, patt_dict, detections, line_lists, vel_tol=300.0, outroot=outroot, verbose=True) wv_calib[str(islit)] = final_fit qa_file = qa.set_qa_filename('GNIRS', 'arc_fit_qa', slit=islit, out_dir=outroot) wavecal.qa.arc_fit_qa(wv_calib[str(islit)], outfile=qa_file) if debug: # Show the QA wavecal.qa.arc_fit_qa(wv_calib[str(islit)]) waveCalib = wavecalib.WaveCalib(None, None) if outfile is not None: waveCalib.save_master(wv_calib, outfile=outfile) return wv_calib
def load_kast_blue_masters(get_spectrograph=False, aimg=False, tslits=False, tilts=False, datasec=False, wvcalib=False): """ Load up the set of shane_kast_blue master frames Args: get_spectrograph: aimg: tslits: tilts: datasec: wvcalib: Returns: """ spectrograph = load_spectrograph('shane_kast_blue') spectrograph.naxis = (2112,350) # Image shape with overscan root_path = data_path('MF') if os.getenv('PYPEIT_DEV') is None \ else os.path.join(os.getenv('PYPEIT_DEV'), 'Cooked', 'MF') master_dir = root_path+'_'+spectrograph.spectrograph reuse_masters = True # Load up the Masters ret = [] if get_spectrograph: ret.append(spectrograph) master_key = 'A_1_01' if aimg: AImg = arcimage.ArcImage(spectrograph, master_key=master_key, master_dir=master_dir, reuse_masters=reuse_masters) msarc, _ = AImg.load_master(AImg.ms_name) ret.append(msarc) if tslits: traceSlits = traceslits.TraceSlits(None,spectrograph,None) # TODO: Should this be json now? tslits_dict, mstrace = traceSlits.load_master(os.path.join(master_dir,'MasterTrace_A_1_01.fits')) # This is a bit of a hack, but I'm adding the mstrace to the dict since we need it in the flat field test tslits_dict['mstrace'] = mstrace ret.append(tslits_dict) if tilts: wvTilts = wavetilts.WaveTilts(None, None, spectrograph, None, None, master_key=master_key, master_dir=master_dir, reuse_masters=reuse_masters) tilts_dict, _ = wvTilts.master() ret.append(tilts_dict) if datasec: datasec_img = spectrograph.get_datasec_img(data_path('b1.fits.gz'), 1) ret.append(datasec_img) if wvcalib: Wavecalib = wavecalib.WaveCalib(None, None, spectrograph, spectrograph.default_pypeit_par()['calibrations']['wavelengths'], master_key=master_key, master_dir=master_dir, reuse_masters=reuse_masters) wv_calib, _ = Wavecalib.master() ret.append(wv_calib) # Return return ret