示例#1
0
    def build_wv_calib(self, arccen, method, skip_QA=False):
        """
        Main routine to generate the wavelength solutions in a loop over slits
        Wrapper to arc.simple_calib or arc.calib_with_arclines

        self.maskslits is updated for slits that fail

        Args:
            method : str
              'simple' -- arc.simple_calib
              'arclines' -- arc.calib_with_arclines
              'holy-grail' -- wavecal.autoid.HolyGrail
              'reidentify' -- wavecal.auotid.ArchiveReid
              'full_template' -- wavecal.auotid.full_template
            skip_QA (bool, optional)

        Returns:
            dict:  self.wv_calib
        """
        # Obtain a list of good slits
        ok_mask = np.where(~self.maskslits)[0]

        # Obtain calibration for all slits
        if method == 'simple':
            lines = self.par['lamps']
            line_lists = waveio.load_line_lists(lines)

            self.wv_calib = arc.simple_calib_driver(
                self.msarc,
                line_lists,
                arccen,
                ok_mask,
                nfitpix=self.par['nfitpix'],
                IDpixels=self.par['IDpixels'],
                IDwaves=self.par['IDwaves'])
        elif method == 'semi-brute':
            # TODO: THIS IS CURRENTLY BROKEN
            debugger.set_trace()
            final_fit = {}
            for slit in ok_mask:
                # HACKS BY JXP
                self.par['wv_cen'] = 8670.
                self.par['disp'] = 1.524
                # ToDO remove these hacks and use the parset in semi_brute
                best_dict, ifinal_fit \
                        = autoid.semi_brute(arccen[:, slit], self.par['lamps'], self.par['wv_cen'],
                                            self.par['disp'], match_toler=self.par['match_toler'],
                                            func=self.par['func'], n_first=self.par['n_first'],
                                            sigrej_first=self.par['n_first'],
                                            n_final=self.par['n_final'],
                                            sigrej_final=self.par['sigrej_final'],
                                            sigdetect=self.par['sigdetect'],
                                            nonlinear_counts= self.nonlinear_counts)
                final_fit[str(slit)] = ifinal_fit.copy()
        elif method == 'basic':
            final_fit = {}
            for slit in ok_mask:
                status, ngd_match, match_idx, scores, ifinal_fit = \
                        autoid.basic(arccen[:, slit], self.par['lamps'], self.par['wv_cen'],
                                     self.par['disp'], nonlinear_counts=self.nonlinear_counts)
                final_fit[str(slit)] = ifinal_fit.copy()
                if status != 1:
                    self.maskslits[slit] = True
        elif method == 'holy-grail':
            # Sometimes works, sometimes fails
            arcfitter = autoid.HolyGrail(arccen, par=self.par, ok_mask=ok_mask)
            patt_dict, final_fit = arcfitter.get_results()
        elif method == 'reidentify':
            # Now preferred
            # Slit positions
            arcfitter = autoid.ArchiveReid(arccen,
                                           self.spectrograph,
                                           self.par,
                                           ok_mask=ok_mask,
                                           slit_spat_pos=self.slit_spat_pos)
            patt_dict, final_fit = arcfitter.get_results()
        elif method == 'full_template':
            # Now preferred
            if self.binspectral is None:
                msgs.error(
                    "You must specify binspectral for the full_template method!"
                )
            final_fit = autoid.full_template(arccen,
                                             self.par,
                                             ok_mask,
                                             self.det,
                                             self.binspectral,
                                             nsnippet=self.par['nsnippet'])
        else:
            msgs.error(
                'Unrecognized wavelength calibration method: {:}'.format(
                    method))

        self.wv_calib = final_fit

        # Remake mask (*mainly for the QA that follows*)
        self.maskslits = self.make_maskslits(len(self.maskslits))
        ok_mask = np.where(~self.maskslits)[0]

        # QA
        if not skip_QA:
            for slit in ok_mask:
                outfile = qa.set_qa_filename(self.master_key,
                                             'arc_fit_qa',
                                             slit=slit,
                                             out_dir=self.qa_path)
                autoid.arc_fit_qa(self.wv_calib[str(slit)], outfile=outfile)

        # Return
        self.steps.append(inspect.stack()[0][3])
        return self.wv_calib
示例#2
0
    def build_wv_calib(self, arccen, method, skip_QA=False):
        """
        Main routine to generate the wavelength solutions in a loop over slits
        Wrapper to arc.simple_calib or arc.calib_with_arclines

        self.maskslits is updated for slits that fail

        Args:
            method : str
              'simple' -- arc.simple_calib
              'arclines' -- arc.calib_with_arclines
              'holy-grail' -- wavecal.autoid.HolyGrail
              'reidentify' -- wavecal.auotid.ArchiveReid
              'identify' -- wavecal.identify.Identify
              'full_template' -- wavecal.auotid.full_template
            skip_QA (bool, optional)

        Returns:
            dict:  self.wv_calib
        """
        # Obtain a list of good slits
        ok_mask = np.where(np.invert(self.maskslits))[0]
        # Obtain calibration for all slits
        if method == 'simple':
            lines = self.par['lamps']
            line_lists = waveio.load_line_lists(lines)

            final_fit = arc.simple_calib_driver(
                line_lists,
                arccen,
                ok_mask,
                n_final=self.par['n_final'],
                sigdetect=self.par['sigdetect'],
                IDpixels=self.par['IDpixels'],
                IDwaves=self.par['IDwaves'])
        elif method == 'semi-brute':
            # TODO: THIS IS CURRENTLY BROKEN
            embed()
            final_fit = {}
            for slit in ok_mask:
                # HACKS BY JXP
                self.par['wv_cen'] = 8670.
                self.par['disp'] = 1.524
                # ToDO remove these hacks and use the parset in semi_brute
                best_dict, ifinal_fit \
                        = autoid.semi_brute(arccen[:, slit], self.par['lamps'], self.par['wv_cen'],
                                            self.par['disp'], match_toler=self.par['match_toler'],
                                            func=self.par['func'], n_first=self.par['n_first'],
                                            sigrej_first=self.par['n_first'],
                                            n_final=self.par['n_final'],
                                            sigrej_final=self.par['sigrej_final'],
                                            sigdetect=self.par['sigdetect'],
                                            nonlinear_counts= self.nonlinear_counts)
                final_fit[str(slit)] = ifinal_fit.copy()
        elif method == 'basic':
            final_fit = {}
            for slit in ok_mask:
                status, ngd_match, match_idx, scores, ifinal_fit = \
                        autoid.basic(arccen[:, slit], self.par['lamps'], self.par['wv_cen'],
                                     self.par['disp'], nonlinear_counts=self.nonlinear_counts)
                final_fit[str(slit)] = ifinal_fit.copy()
                if status != 1:
                    self.maskslits[slit] = True
        elif method == 'holy-grail':
            # Sometimes works, sometimes fails
            arcfitter = autoid.HolyGrail(arccen, par=self.par, ok_mask=ok_mask)
            patt_dict, final_fit = arcfitter.get_results()
        elif method == 'identify':
            final_fit = {}
            # Manually identify lines
            msgs.info("Initializing the wavelength calibration tool")
            # TODO: Move this loop to the GUI initalise method
            embed()
            for slit in ok_mask:
                arcfitter = gui_identify.initialise(arccen,
                                                    slit=slit,
                                                    par=self.par)
                final_fit[str(slit)] = arcfitter.get_results()
                if final_fit[str(slit)] is not None:
                    ans = 'y'
                    # ans = ''
                    # while ans != 'y' and ans != 'n':
                    #     ans = input("Would you like to store this wavelength solution in the archive? (y/n): ")
                    if ans == 'y' and final_fit[str(
                            slit)]['rms'] < self.par['rms_threshold']:
                        # Store the results in the user reid arxiv
                        specname = self.spectrograph.spectrograph
                        gratname = "UNKNOWN"  # input("Please input the grating name: ")
                        dispangl = "UNKNOWN"  # input("Please input the dispersion angle: ")
                        templates.pypeit_identify_record(
                            final_fit[str(slit)], self.binspectral, specname,
                            gratname, dispangl)
                        msgs.info("Your wavelength solution has been stored")
                        msgs.info(
                            "Please consider sending your solution to the PYPEIT team!"
                        )

        elif method == 'reidentify':
            # Now preferred
            # Slit positions
            arcfitter = autoid.ArchiveReid(arccen,
                                           self.spectrograph,
                                           self.par,
                                           ok_mask=ok_mask,
                                           slit_spat_pos=self.slit_spat_pos)
            patt_dict, final_fit = arcfitter.get_results()
        elif method == 'full_template':
            # Now preferred
            if self.binspectral is None:
                msgs.error(
                    "You must specify binspectral for the full_template method!"
                )
            final_fit = autoid.full_template(arccen,
                                             self.par,
                                             ok_mask,
                                             self.det,
                                             self.binspectral,
                                             nsnippet=self.par['nsnippet'])
        else:
            msgs.error(
                'Unrecognized wavelength calibration method: {:}'.format(
                    method))

        self.wv_calib = final_fit

        # Remake mask (*mainly for the QA that follows*)
        self.maskslits = self.make_maskslits(len(self.maskslits))
        ok_mask = np.where(np.invert(self.maskslits))[0]

        # QA
        if not skip_QA:
            for slit in ok_mask:
                outfile = qa.set_qa_filename(self.master_key,
                                             'arc_fit_qa',
                                             slit=slit,
                                             out_dir=self.qa_path)
                autoid.arc_fit_qa(self.wv_calib[str(slit)], outfile=outfile)

        # Return
        self.steps.append(inspect.stack()[0][3])
        return self.wv_calib
示例#3
0
def main(flg):

    # Keck LRISb
    if flg & (2**0): # B300, all lamps
        binspec = 1
        slits = [15]
        xidl_file = os.path.join(template_path, 'Keck_LRIS', 'B300', 'lris_blue_300.sav')
        outroot = 'keck_lris_blue_300_d680.fits'
        build_template(xidl_file, slits, None, binspec, outroot, lowredux=True)

    if flg & (2**1): # B400, all lamps I think)
        binspec = 2
        outroot='keck_lris_blue_400_d560.fits'
        slits = [19,14]
        lcut = [5500.]
        xidl_file = os.path.join(template_path, 'Keck_LRIS', 'B400', 'lris_blue_400_d560.sav')
        build_template(xidl_file, slits, lcut, binspec, outroot, lowredux=True)

    if flg & (2**2): # B600, all lamps
        binspec = 2
        outroot='keck_lris_blue_600_d560.fits'
        slits = [0,7]
        lcut = [4500.]
        wfile = os.path.join(template_path, 'Keck_LRIS', 'B600', 'MasterWaveCalib_A_1_01.json')
        build_template(wfile, slits, lcut, binspec, outroot, lowredux=False)

    if flg & (2**3): # B1200, all lamps?
        binspec = 2
        outroot='keck_lris_blue_1200_d460.fits'
        slits = [19,44]
        lcut = [3700.]
        xidl_file = os.path.join(template_path, 'Keck_LRIS', 'B1200', 'lris_blue_1200.sav')
        build_template(xidl_file, slits, lcut, binspec, outroot, lowredux=True)

    # Shane Kastb
    if flg & (2**4):  # 452/3306
        binspec = 1
        slits = [0]
        xidl_file = os.path.join(template_path, 'Shane_Kast', '452_3306', 'kast_452_3306.sav')
        outroot = 'shane_kast_blue_452.fits'
        build_template(xidl_file, slits, None, binspec, outroot, lowredux=True)

    if flg & (2**5):  # 600/4310
        binspec = 1
        slits = [0,3]
        lcut = [4550.]
        xidl_file = os.path.join(template_path, 'Shane_Kast', '600_4310', 'kast_600_4310.sav')
        outroot = 'shane_kast_blue_600.fits'
        build_template(xidl_file, slits, lcut, binspec, outroot, lowredux=True)

    if flg & (2**6):  # 830/3460
        binspec = 1
        slits = [0]
        xidl_file = os.path.join(template_path, 'Shane_Kast', '830_3460', 'kast_830_3460.sav')
        outroot = 'shane_kast_blue_830.fits'
        build_template(xidl_file, slits, None, binspec, outroot, lowredux=True)

    # Keck/DEIMOS
    if flg & (2**7):  # 600ZD :: Might not go red enough
        binspec = 1
        slits = [0,1]
        lcut = [7192.]
        xidl_file = os.path.join(template_path, 'Keck_DEIMOS', '600ZD', 'deimos_600.sav')
        outroot = 'keck_deimos_600.fits'
        build_template(xidl_file, slits, lcut, binspec, outroot, lowredux=True)

    if flg & (2**8):  # 830G
        binspec = 1
        outroot='keck_deimos_830G.fits'
        # 3-12 = blue  6508 -- 8410
        # 7-24 = blue  8497 -- 9925 (no lines after XeI)
        ifiles = [0, 0, 1]
        slits = [12, 14, 24]
        lcut = [8400., 8480]
        wfile1 = os.path.join(template_path, 'Keck_DEIMOS', '830G_M_8600', 'MasterWaveCalib_A_1_03.json')
        wfile2 = os.path.join(template_path, 'Keck_DEIMOS', '830G_M_8600', 'MasterWaveCalib_A_1_07.json')
        # det_dict
        det_cut = {}
        det_cut['dets'] = [[1,2,3,4], [5,6,7,8]]
        det_cut['wcuts'] = [[0,9000.], [8200,1e9]]  # Significant overlap is fine
        #
        build_template([wfile1,wfile2], slits, lcut, binspec, outroot, lowredux=False,
                       ifiles=ifiles, det_cut=det_cut)

    if flg & (2**9):  # 1200
        binspec = 1
        outroot='keck_deimos_1200G.fits'
        # 3-3 = blue  6268.23 -- 7540
        # 3-14 = red   6508 -- 7730
        # 7-3 = blue  7589 -- 8821
        # 7-17 = red  8000 - 9230
        # 7c-0 = red  9120 -- 9950
        ifiles = [0, 0, 1, 1, 2]
        slits = [3, 14, 3, 17, 0]
        lcut = [7450., 7730., 8170, 9120]
        wfile1 = os.path.join(template_path, 'Keck_DEIMOS', '1200G', 'MasterWaveCalib_A_1_03.json')
        wfile2 = os.path.join(template_path, 'Keck_DEIMOS', '1200G', 'MasterWaveCalib_A_1_07.json')
        wfile3 = os.path.join(template_path, 'Keck_DEIMOS', '1200G', 'MasterWaveCalib_A_1_07c.json')
        # det_dict
        det_cut = None
        #det_cut = {}
        #det_cut['dets'] = [[1,2,3,4], [5,6,7,8]]
        #det_cut['wcuts'] = [[0,9000.], [8200,1e9]]  # Significant overlap is fine
        #
        build_template([wfile1,wfile2,wfile3], slits, lcut, binspec, outroot, lowredux=False,
                       ifiles=ifiles, det_cut=det_cut, chk=True)

    # ###############################################3
    # Keck/LRISr
    if flg & (2**10): # R400
        binspec = 2
        outroot='keck_lris_red_400.fits'
        slits = [7]  # Quite blue, but not the bluest
        lcut = []
        wfile = os.path.join(template_path, 'Keck_LRIS', 'R400', 'MasterWaveCalib_A_1_01.json')
        build_template(wfile, slits, lcut, binspec, outroot, lowredux=False)

    if flg & (2**11):  # R1200
        # slits = [2-3]  # 7726 -- 9250
        # slits = [1-4]  # 9250 -- 9925
        binspec = 1
        outroot='keck_lris_red_1200_9000.fits'
        ifiles = [0, 1]
        slits = [3, 7]
        lcut = [9250.]
        wfile1 = os.path.join(template_path, 'Keck_LRIS', 'R1200_9000', 'MasterWaveCalib_A_1_02.json')  # Original Dev
        wfile2 = os.path.join(template_path, 'Keck_LRIS', 'R1200_9000', 'MasterWaveCalib_A_1_01.json')  # Dev suite 2x1
        build_template([wfile1,wfile2], slits, lcut, binspec, outroot, lowredux=False,
                       ifiles=ifiles)

    if flg & (2**12):  # R600/5000
        # slits = [1-4]  # 5080 -- 7820
        # slits = [1-7]  # 7820 -- 9170
        binspec = 2
        outroot='keck_lris_red_600_5000.fits'
        slits = [4, 7]
        lcut = [7820.]
        wfile = os.path.join(template_path, 'Keck_LRIS', 'R600_5000', 'MasterWaveCalib_B_1_01.json')
        build_template(wfile, slits, lcut, binspec, outroot, lowredux=False)

    if flg & (2**27):  # R600/7500
        # slits = [1-10]  # 5000 -- 7840
        # slits = [1-4]  # 7840 -- 9230
        binspec = 2
        outroot='keck_lris_red_600_7500.fits'
        slits = [10, 4]
        lcut = [7840.]
        wfile = os.path.join(template_path, 'Keck_LRIS', 'R600_7500', 'MasterWaveCalib_I_1_01.json')
        build_template(wfile, slits, lcut, binspec, outroot, lowredux=False,
                       chk=True, normalize=True, subtract_conti=True)

    # ##################################
    # Magellan/MagE
    if flg & (2**13):
        # Load
        mase_path = os.path.join(os.getenv('XIDL_DIR'), 'Magellan', 'MAGE', 'mase', 'Calib')
        sav_file = os.path.join(mase_path, 'MagE_wvguess_jfh.idl')
        mase_dict = readsav(sav_file)
        mase_sol = Table(mase_dict['all_arcfit'])
        # Do it
        all_wave = np.zeros((2048, 15))
        all_flux = np.zeros_like(all_wave)
        for order in np.arange(15):
            all_flux[:,order] = mase_dict['sv_aspec'][order]
            # Build the wavelengths
            wv_air = cheby_val(mase_sol['FFIT'][order], np.arange(2048), mase_sol['NRM'][order],
                                         mase_sol['NORD'][order])
            all_wave[:,order] = airtovac(wv_air * units.AA).value
        # Write
        tbl = Table()
        tbl['wave'] = all_wave.T
        tbl['flux'] = all_flux.T
        tbl['order'] = np.arange(20, 5, -1, dtype=int)
        tbl.meta['BINSPEC'] = 1
        # Write
        outroot='magellan_mage.fits'
        outfile = os.path.join(template_path, outroot)
        tbl.write(outfile, overwrite=True)
        print("Wrote: {}".format(outfile))

    if flg & (2**14):  # Magellan/MagE Plots
        outpath = os.path.join(resource_filename('pypeit', 'data'), 'arc_lines', 'plots')
        new_mage_file = os.path.join(resource_filename('pypeit', 'data'), 'arc_lines', 'reid_arxiv',
                                     'magellan_mage.fits')
        # Load
        mage_wave = Table.read(new_mage_file)
        llist = waveio.load_line_lists(['ThAr_MagE'])
        #
        for kk in range(mage_wave['wave'].shape[1]):
            wv = mage_wave['wave'][:, kk]
            fx = mage_wave['flux'][:, kk]
            order = 20 - kk
            # Reidentify
            detections, spec_cont_sub, patt_dict = autoid.reidentify(fx, fx, wv, llist, 1)
            # Fit
            final_fit = fitting.fit_slit(fx, patt_dict, detections, llist)
            # Output
            outfile=os.path.join(outpath, 'MagE_order{:2d}_IDs.pdf'.format(order))
            autoid.arc_fit_qa(final_fit, outfile=outfile, ids_only=True)
            print("Wrote: {}".format(outfile))
            autoid.arc_fit_qa(final_fit, outfile=os.path.join(outpath, 'MagE_order{:2d}_full.pdf'.format(order)))

    if flg & (2**15):  # VLT/X-Shooter reid_arxiv
        # VIS
        reid_path = os.path.join(resource_filename('pypeit', 'data'), 'arc_lines', 'reid_arxiv')
        for iroot, iout in zip(['vlt_xshooter_vis1x1.json', 'vlt_xshooter_nir.json'],
            ['vlt_xshooter_vis1x1.fits', 'vlt_xshooter_nir.fits']):
            # Load
            old_file = os.path.join(reid_path, iroot)
            odict, par = waveio.load_reid_arxiv(old_file)

            # Do it
            orders = odict['fit2d']['orders'][::-1].astype(int)  # Flipped
            all_wave = np.zeros((odict['0']['nspec'], orders.size))
            all_flux = np.zeros_like(all_wave)
            for kk,order in enumerate(orders):
                all_flux[:,kk] = odict[str(kk)]['spec']
                if 'nir' in iroot:
                    all_wave[:,kk] = odict[str(kk)]['wave_soln']
                else:
                    all_wave[:,kk] = airtovac(odict[str(kk)]['wave_soln'] * units.AA).value
            # Write
            tbl = Table()
            tbl['wave'] = all_wave.T
            tbl['flux'] = all_flux.T
            tbl['order'] = orders
            tbl.meta['BINSPEC'] = 1
            # Write
            outfile = os.path.join(reid_path, iout)
            tbl.write(outfile, overwrite=True)
            print("Wrote: {}".format(outfile))

    if flg & (2**16):  # VLT/X-Shooter line list
        line_path = os.path.join(resource_filename('pypeit', 'data'), 'arc_lines', 'lists')
        old_file = os.path.join(line_path, 'ThAr_XSHOOTER_VIS_air_lines.dat')
        # Load
        air_list = waveio.load_line_list(old_file)
        # Vacuum
        vac_wv = airtovac(air_list['wave']*units.AA).value
        vac_list = air_list.copy()
        vac_list['wave'] = vac_wv
        # Write
        new_file = os.path.join(line_path, 'ThAr_XSHOOTER_VIS_lines.dat')
        vac_list.write(new_file, format='ascii.fixed_width', overwrite=True)
        print("Wrote: {}".format(new_file))

    if flg & (2**17):  # NIRES
        reid_path = os.path.join(resource_filename('pypeit', 'data'), 'arc_lines', 'reid_arxiv')
        iroot = 'keck_nires.json'
        iout = 'keck_nires.fits'
        # Load
        old_file = os.path.join(reid_path, iroot)
        odict, par = waveio.load_reid_arxiv(old_file)

        # Do it
        orders = odict['fit2d']['orders'][::-1].astype(int)  # Flipped
        all_wave = np.zeros((odict['0']['nspec'], orders.size))
        all_flux = np.zeros_like(all_wave)
        for kk,order in enumerate(orders):
            all_flux[:,kk] = odict[str(kk)]['spec']
            if 'nir' in iroot:
                all_wave[:,kk] = odict[str(kk)]['wave_soln']
            else:
                all_wave[:,kk] = airtovac(odict[str(kk)]['wave_soln'] * units.AA).value
        # Write
        tbl = Table()
        tbl['wave'] = all_wave.T
        tbl['flux'] = all_flux.T
        tbl['order'] = orders
        tbl.meta['BINSPEC'] = 1
        # Write
        outfile = os.path.join(reid_path, iout)
        tbl.write(outfile, overwrite=True)
        print("Wrote: {}".format(outfile))


    if flg & (2**18):  # Gemini/GNIRS
        reid_path = os.path.join(resource_filename('pypeit', 'data'), 'arc_lines', 'reid_arxiv')
        iroot = 'gemini_gnirs.json'
        iout = 'gemini_gnirs.fits'
        # Load
        old_file = os.path.join(reid_path, iroot)
        odict, par = waveio.load_reid_arxiv(old_file)

        # Do it
        orders = odict['fit2d']['orders'][::-1].astype(int)  # Flipped
        all_wave = np.zeros((odict['0']['nspec'], orders.size))
        all_flux = np.zeros_like(all_wave)
        for kk,order in enumerate(orders):
            all_flux[:,kk] = odict[str(kk)]['spec']
            if 'nir' in iroot:
                all_wave[:,kk] = odict[str(kk)]['wave_soln']
            else:
                all_wave[:,kk] = airtovac(odict[str(kk)]['wave_soln'] * units.AA).value
        # Write
        tbl = Table()
        tbl['wave'] = all_wave.T
        tbl['flux'] = all_flux.T
        tbl['order'] = orders
        tbl.meta['BINSPEC'] = 1
        # Write
        outfile = os.path.join(reid_path, iout)
        tbl.write(outfile, overwrite=True)
        print("Wrote: {}".format(outfile))

    # ##############################
    if flg & (2**20):  # GMOS R400 Hamamatsu
        binspec = 2
        outroot='gemini_gmos_r400_ham.fits'
        #
        ifiles = [0, 1, 2, 3, 4]
        slits = [0, 2, 3, 0, 0]  # Be careful with the order..
        lcut = [5400., 6620., 8100., 9000.]
        wfile1 = os.path.join(template_path, 'GMOS', 'R400', 'MasterWaveCalib_A_01_aa.json')
        wfile5 = os.path.join(template_path, 'GMOS', 'R400', 'MasterWaveCalib_A_05_aa.json') # 5190 -- 6679
        #wfile2 = os.path.join(template_path, 'GMOS', 'R400', 'MasterWaveCalib_A_02_aa.json')
        wfile3 = os.path.join(template_path, 'GMOS', 'R400', 'MasterWaveCalib_A_04_aa.json')
        wfile4 = os.path.join(template_path, 'GMOS', 'R400', 'MasterWaveCalib_A_03_aa.json')
        wfile6 = os.path.join(template_path, 'GMOS', 'R400', 'MasterWaveCalib_A_06_aa.json')
        #
        build_template([wfile1,wfile5,wfile3,wfile4, wfile6], slits, lcut, binspec,
                       outroot, lowredux=False, ifiles=ifiles, chk=True,
                       normalize=True, subtract_conti=True)


    # ##############################
    if flg & (2**21):  # GMOS R400 E2V
        binspec = 2
        outroot='gemini_gmos_r400_e2v.fits'
        #
        ifiles = [0, 1, 2]
        slits = [0, 0, 0]
        lcut = [6000., 7450]
        wfile1 = os.path.join(template_path, 'GMOS', 'R400', 'MasterWaveCalib_A_1_01.json')
        wfile2 = os.path.join(template_path, 'GMOS', 'R400', 'MasterWaveCalib_A_1_02.json')
        wfile3 = os.path.join(template_path, 'GMOS', 'R400', 'MasterWaveCalib_A_1_03.json')
        #
        build_template([wfile1,wfile2,wfile3], slits, lcut, binspec,
                       outroot, lowredux=False, ifiles=ifiles, chk=True,
                       normalize=True)

    # ##############################
    if flg & (2**22):  # GMOS R400 Hamamatsu
        binspec = 2
        outroot='gemini_gmos_b600_ham.fits'
        #
        ifiles = [0, 1, 2, 3, 4]
        slits = [0, 0, 0, 0, 0]
        lcut = [4250., 4547., 5250., 5615.]
        wfile1 = os.path.join(template_path, 'GMOS', 'B600', 'MasterWaveCalib_C_1_01.json')
        wfile5 = os.path.join(template_path, 'GMOS', 'B600', 'MasterWaveCalib_D_1_01.json') # - 4547
        wfile2 = os.path.join(template_path, 'GMOS', 'B600', 'MasterWaveCalib_C_1_02.json')
        wfile4 = os.path.join(template_path, 'GMOS', 'B600', 'MasterWaveCalib_D_1_02.json') # 4610-5608
        wfile3 = os.path.join(template_path, 'GMOS', 'B600', 'MasterWaveCalib_C_1_03.json')
        #
        build_template([wfile1,wfile5,wfile2,wfile4,wfile3], slits, lcut, binspec,
                       outroot, lowredux=False, ifiles=ifiles, chk=True,
                       normalize=True, subtract_conti=True, miny=-100.)

    if flg & (2**23):  # WHT/ISIS
        iroot = 'wht_isis_blue_1200_4800.json'
        outroot = 'wht_isis_blue_1200_4800.fits'
        wfile = os.path.join(template_path, 'WHT_ISIS', '1200B', iroot)
        binspec = 2
        slits = [0]
        lcut = [3200.]
        build_template(wfile, slits, lcut, binspec, outroot, lowredux=False)

    if flg & (2**24):  # Magellan/FIRE
        reid_path = os.path.join(resource_filename('pypeit', 'data'), 'arc_lines', 'reid_arxiv')
        iroot = 'magellan_fire_echelle.json'
        iout = 'magellan_fire_echelle.fits'
        # Load
        old_file = os.path.join(reid_path, iroot)
        odict, par = waveio.load_reid_arxiv(old_file)

        # Do it
        orders = odict['fit2d']['orders'][::-1].astype(int)  # Flipped
        all_wave = np.zeros((odict['0']['nspec'], orders.size))
        all_flux = np.zeros_like(all_wave)
        for kk,order in enumerate(orders):
            all_flux[:,kk] = odict[str(kk)]['spec']
            if 'nir' in iroot:
                all_wave[:,kk] = odict[str(kk)]['wave_soln']
            else:
                all_wave[:,kk] = airtovac(odict[str(kk)]['wave_soln'] * units.AA).value

        # Write
        tbl = Table()
        tbl['wave'] = all_wave.T
        tbl['flux'] = all_flux.T
        tbl['order'] = orders
        tbl.meta['BINSPEC'] = 1
        # Write
        outfile = os.path.join(reid_path, iout)
        tbl.write(outfile, overwrite=True)
        print("Wrote: {}".format(outfile))

    if flg & (2**25): # FIRE longslit
        binspec = 1
        reid_path = os.path.join(resource_filename('pypeit', 'data'), 'arc_lines', 'reid_arxiv')
        outroot = 'magellan_fire_long.fits'
        xidl_file = os.path.join(os.getenv('FIRE_DIR'), 'LowDispersion', 'NeNeAr_archive_fit.fits')
        spec_file = os.path.join(os.getenv('FIRE_DIR'), 'LowDispersion', 'NeNeAr2.sav')
        fire_sol = Table.read(xidl_file)
        wave = cheby_val(fire_sol['FFIT'].data[0], np.arange(2048), fire_sol['NRM'].data[0], fire_sol['NORD'].data[0])
        wv_vac = airtovac(wave * units.AA)
        xidl_dict = readsav(spec_file)
        flux = xidl_dict['arc1d']
        write_template(wv_vac.value, flux, binspec, reid_path, outroot, det_cut=None)

    # Gemini/Flamingos2
    if flg & (2**26):
        reid_path = os.path.join(resource_filename('pypeit', 'data'), 'arc_lines', 'reid_arxiv')
        iroot = ['Flamingos2_JH_JH.json','Flamingos2_HK_HK.json']
        outroot=['Flamingos2_JH_JH.fits','Flamingos2_HK_HK.fits']
        binspec = 1
        slits = [0]
        lcut = []
        for ii in range(len(iroot)):
            wfile = os.path.join(reid_path, iroot[ii])
            build_template(wfile, slits, lcut, binspec, outroot[ii], lowredux=False)


    # MDM/OSMOS -- MDM4K
    if flg & (2 ** 28):
        # ArI 4159 -- 6800
        wfile = os.path.join(template_path, 'MDM_OSMOS', 'MasterWaveCalib_MDM4K_01.json')
        outroot = 'mdm_osmos_mdm4k.fits'
        binspec = 1
        slits = [0]
        lcut = [3200.]
        build_template(wfile, slits, lcut, binspec, outroot, lowredux=False,
                       chk=True, subtract_conti=True)
示例#4
0
def reidentify_old(spec,
                   wv_calib_arxiv,
                   lamps,
                   nreid_min,
                   detections=None,
                   cc_thresh=0.8,
                   cc_local_thresh=0.8,
                   line_pix_tol=2.0,
                   nlocal_cc=11,
                   rms_threshold=0.15,
                   nonlinear_counts=1e10,
                   sigdetect=5.0,
                   use_unknowns=True,
                   match_toler=3.0,
                   func='legendre',
                   n_first=2,
                   sigrej_first=3.0,
                   n_final=4,
                   sigrej_final=2.0,
                   seed=None,
                   debug_xcorr=False,
                   debug_reid=False):
    """ Determine  a wavelength solution for a set of spectra based on archival wavelength solutions

    Parameters
    ----------
    spec :  float ndarray (nspec, nslits)
       Array of arc spectra for which wavelength solutions are desired.

    wv_calib_arxiv: dict
       Dictionary containing archival wavelength solutions for a collection of slits/orders to be used to reidentify
       lines and  determine the wavelength solution for spec. This dict is a standard format for PypeIt wavelength solutions
       as created by pypeit.core.wavecal.fitting.iterative_fitting

    lamps: list of strings
       The are line lamps that are on or the name of the linelist that should be used. For example for Shane Kast blue
       this would ['CdI','HgI','HeI']. For X-shooter NIR which calibrates of a custom OH sky line list,
       it is the name of the line list, i.e. ['OH_XSHOOTER']


    Optional Parameters
    -------------------
    detections: float ndarray, default = None
       An array containing the pixel centroids of the lines in the arc as computed by the pypeit.core.arc.detect_lines
       code. If this is set to None, the line detection will be run inside the code.

    cc_thresh: float, default = 0.8
       Threshold for the *global* cross-correlation coefficient between an input spectrum and member of the archive required to
       attempt reidentification. Spectra from the archive with a lower cross-correlation are not used for reidentification

    cc_local_thresh: float, default = 0.8
       Threshold for the *local* cross-correlation coefficient, evaluated at each reidentified line,  between an input
       spectrum and the shifted and stretched archive spectrum above which a line must be to be considered a good line for
       reidentification. The local cross-correlation is evaluated at each candidate reidentified line
       (using a window of nlocal_cc), and is then used to score the the reidentified lines to arrive at the final set of
       good reidentifications

    line_pix_tol: float, default = 2.0
       Matching tolerance in pixels for a line reidentification. A good line match must match within this tolerance to the
       the shifted and stretched archive spectrum, and the archive wavelength solution at this match must be within
       line_pix_tol dispersion elements from the line in line list.

    n_local_cc: int, defualt = 11
       Size of pixel window used for local cross-correlation computation for each arc line. If not an odd number one will
       be added to it to make it odd.

    rms_threshold: float, default = 0.15
       Minimum rms for considering a wavelength solution to be an acceptable good fit. Slits/orders with a larger RMS
       than this are flagged as bad slits

    nonlinear_counts: float, default = 1e10
       Arc lines above this saturation threshold are not used in wavelength solution fits because they cannot be accurately
       centroided

    sigdetect: float, default 5.0
       Sigma threshold above fluctuations for arc-line detection. Arcs are continuum subtracted and the fluctuations are
       computed after continuum subtraction.

    use_unknowns : bool, default = True
       If True, arc lines that are known to be present in the spectra, but have not been attributed to an element+ion,
       will be included in the fit.

    match_toler: float, default = 3.0
       Matching tolerance when searching for new lines. This is the difference in pixels between the wavlength assigned to
       an arc line by an iteration of the wavelength solution to the wavelength in the line list.

    func: str, default = 'legendre'
       Name of function used for the wavelength solution

    n_first: int, default = 2
       Order of first guess to the wavelength solution.

    sigrej_first: float, default = 2.0
       Number of sigma for rejection for the first guess to the wavelength solution.

    n_final: int, default = 4
       Order of the final wavelength solution fit

    sigrej_final: float, default = 3.0
       Number of sigma for rejection for the final fit to the wavelength solution.

    seed: int or np.random.RandomState, optional, default = None
       Seed for scipy.optimize.differential_evolution optimizer. If not specified, the calculation will be seeded
       in a deterministic way from the input arc spectrum spec.

    debug_xcorr: bool, default = False
       Show plots useful for debugging the cross-correlation used for shift/stretch computation

    debug_reid: bool, default = False
       Show plots useful for debugging the line reidentification

    Returns
    -------
    (wv_calib, patt_dict, bad_slits)

    wv_calib: dict
       Wavelength solution for the input arc spectra spec. These are stored in standard pypeit format, i.e.
       each index of spec[:,slit] corresponds to a key in the wv_calib dictionary wv_calib[str(slit)] which yields
       the final_fit dictionary for this slit

    patt_dict: dict
       Arc lines pattern dictionary with some information about the IDs as well as the cross-correlation values

    bad_slits: ndarray, int
       Numpy array with the indices of the bad slits. These are the indices in the input arc spectrum array spec[:,islit]


    Revision History
    ----------------
    November 2018 by J.F. Hennawi. Based on an initial version of this code written by Ryan Cooke.
    """

    # Determine the seed for scipy.optimize.differential_evolution optimizer
    if seed is None:
        # If no seed is specified just take the sum of all the elements and round that to an integer
        seed = np.fmin(int(np.sum(spec)), 2**32 - 1)

    random_state = np.random.RandomState(seed=seed)

    nlocal_cc_odd = nlocal_cc + 1 if nlocal_cc % 2 == 0 else nlocal_cc
    window = 1.0 / nlocal_cc_odd * np.ones(nlocal_cc_odd)

    # Generate the line list
    line_lists = waveio.load_line_lists(lamps)
    unknwns = waveio.load_unknown_list(lamps)
    if use_unknowns:
        tot_list = table.vstack([line_lists, unknwns])
    else:
        tot_list = line_lists
    # Generate the final linelist and sort
    wvdata = np.array(tot_list['wave'].data)  # Removes mask if any
    wvdata.sort()

    nspec, nslits = spec.shape
    narxiv = len(wv_calib_arxiv)
    nspec_arxiv = wv_calib_arxiv['0']['spec'].size
    if nspec_arxiv != nspec:
        msgs.error(
            'Different spectral binning is not supported yet but it will be soon'
        )

    # If the detections were not passed in find the lines in each spectrum
    if detections is None:
        detections = {}
        for islit in range(nslits):
            tcent, ecent, cut_tcent, icut = wvutils.arc_lines_from_spec(
                spec[:, islit],
                sigdetect=sigdetect,
                nonlinear_counts=nonlinear_counts)
            detections[str(islit)] = [tcent[icut].copy(), ecent[icut].copy()]
    else:
        if len(detections) != nslits:
            msgs.error('Detections must be a dictionary with nslit elements')

    # For convenience pull out all the spectra from the wv_calib_arxiv archive
    spec_arxiv = np.zeros((nspec, narxiv))
    wave_soln_arxiv = np.zeros((nspec, narxiv))
    wvc_arxiv = np.zeros(narxiv, dtype=float)
    disp_arxiv = np.zeros(narxiv, dtype=float)
    xrng = np.arange(nspec_arxiv)
    for iarxiv in range(narxiv):
        spec_arxiv[:, iarxiv] = wv_calib_arxiv[str(iarxiv)]['spec']
        fitc = wv_calib_arxiv[str(iarxiv)]['fitc']
        fitfunc = wv_calib_arxiv[str(iarxiv)]['function']
        fmin, fmax = wv_calib_arxiv[str(iarxiv)]['fmin'], wv_calib_arxiv[str(
            iarxiv)]['fmax']
        wave_soln_arxiv[:, iarxiv] = utils.func_val(fitc,
                                                    xrng,
                                                    fitfunc,
                                                    minv=fmin,
                                                    maxv=fmax)
        wvc_arxiv[iarxiv] = wave_soln_arxiv[nspec_arxiv // 2, iarxiv]
        disp_arxiv[iarxiv] = np.median(wave_soln_arxiv[:, iarxiv] -
                                       np.roll(wave_soln_arxiv[:, iarxiv], 1))

    wv_calib = {}
    patt_dict = {}
    bad_slits = np.array([], dtype=np.int)

    marker_tuple = ('o', 'v', '<', '>', '8', 's', 'p', 'P', '*', 'X', 'D', 'd',
                    'x')
    color_tuple = ('black', 'green', 'red', 'cyan', 'magenta', 'blue',
                   'darkorange', 'yellow', 'dodgerblue', 'purple',
                   'lightgreen', 'cornflowerblue')
    marker = itertools.cycle(marker_tuple)
    colors = itertools.cycle(color_tuple)

    # Loop over the slits in the spectrum and cross-correlate each with each arxiv spectrum to identify lines
    for islit in range(nslits):
        slit_det = detections[str(islit)][0]
        line_indx = np.array([], dtype=np.int)
        det_indx = np.array([], dtype=np.int)
        line_cc = np.array([], dtype=float)
        line_iarxiv = np.array([], dtype=np.int)
        wcen = np.zeros(narxiv)
        disp = np.zeros(narxiv)
        shift_vec = np.zeros(narxiv)
        stretch_vec = np.zeros(narxiv)
        ccorr_vec = np.zeros(narxiv)
        for iarxiv in range(narxiv):
            msgs.info('Cross-correlating slit # {:d}'.format(islit + 1) +
                      ' with arxiv slit # {:d}'.format(iarxiv + 1))
            # Match the peaks between the two spectra. This code attempts to compute the stretch if cc > cc_thresh
            success, shift_vec[iarxiv], stretch_vec[iarxiv], ccorr_vec[iarxiv], _, _ = \
                wvutils.xcorr_shift_stretch(spec[:, islit], spec_arxiv[:, iarxiv], cc_thresh=cc_thresh, seed = random_state,
                                            debug=debug_xcorr)
            # If cc < cc_thresh or if this optimization failed, don't reidentify from this arxiv spectrum
            if success != 1:
                continue
            # Estimate wcen and disp for this slit based on its shift/stretch relative to the archive slit
            disp[iarxiv] = disp_arxiv[iarxiv] / stretch_vec[iarxiv]
            wcen[iarxiv] = wvc_arxiv[iarxiv] - shift_vec[iarxiv] * disp[iarxiv]
            # For each peak in the arxiv spectrum, identify the corresponding peaks in the input islit spectrum. Do this by
            # transforming these arxiv slit line pixel locations into the (shifted and stretched) input islit spectrum frame
            arxiv_det = wv_calib_arxiv[str(iarxiv)]['xfit']
            arxiv_det_ss = arxiv_det * stretch_vec[iarxiv] + shift_vec[iarxiv]
            spec_arxiv_ss = wvutils.shift_and_stretch(spec_arxiv[:, iarxiv],
                                                      shift_vec[iarxiv],
                                                      stretch_vec[iarxiv])

            if debug_xcorr:
                plt.figure(figsize=(14, 6))
                tampl_slit = np.interp(slit_det, xrng, spec[:, islit])
                plt.plot(xrng,
                         spec[:, islit],
                         color='red',
                         drawstyle='steps-mid',
                         label='input arc',
                         linewidth=1.0,
                         zorder=10)
                plt.plot(slit_det,
                         tampl_slit,
                         'r.',
                         markersize=10.0,
                         label='input arc lines',
                         zorder=10)
                tampl_arxiv = np.interp(arxiv_det, xrng, spec_arxiv[:, iarxiv])
                plt.plot(xrng,
                         spec_arxiv[:, iarxiv],
                         color='black',
                         drawstyle='steps-mid',
                         linestyle=':',
                         label='arxiv arc',
                         linewidth=0.5)
                plt.plot(arxiv_det,
                         tampl_arxiv,
                         'k+',
                         markersize=8.0,
                         label='arxiv arc lines')
                # tampl_ss = np.interp(gsdet_ss, xrng, gdarc_ss)
                for iline in range(arxiv_det_ss.size):
                    plt.plot([arxiv_det[iline], arxiv_det_ss[iline]],
                             [tampl_arxiv[iline], tampl_arxiv[iline]],
                             color='cornflowerblue',
                             linewidth=1.0)
                plt.plot(xrng,
                         spec_arxiv_ss,
                         color='black',
                         drawstyle='steps-mid',
                         label='arxiv arc shift/stretch',
                         linewidth=1.0)
                plt.plot(arxiv_det_ss,
                         tampl_arxiv,
                         'k.',
                         markersize=10.0,
                         label='predicted arxiv arc lines')
                plt.title('Cross-correlation of input slit # {:d}'.format(
                    islit + 1) + ' and arxiv slit # {:d}'.format(iarxiv + 1) +
                          ': ccor = {:5.3f}'.format(ccorr_vec[iarxiv]) +
                          ', shift = {:6.1f}'.format(shift_vec[iarxiv]) +
                          ', stretch = {:5.4f}'.format(stretch_vec[iarxiv]) +
                          ', wv_cen = {:7.1f}'.format(wcen[iarxiv]) +
                          ', disp = {:5.3f}'.format(disp[iarxiv]))
                plt.ylim(1.2 * spec[:, islit].min(),
                         1.5 * spec[:, islit].max())
                plt.legend()
                plt.show()

            # Calculate wavelengths for all of the gsdet detections
            wvval_arxiv = utils.func_val(
                wv_calib_arxiv[str(iarxiv)]['fitc'],
                arxiv_det,
                wv_calib_arxiv[str(iarxiv)]['function'],
                minv=wv_calib_arxiv[str(iarxiv)]['fmin'],
                maxv=wv_calib_arxiv[str(iarxiv)]['fmax'])
            # Compute a "local" zero lag correlation of the slit spectrum and the shifted and stretch arxiv spectrum over a
            # a nlocal_cc_odd long segment of spectrum. We will then uses spectral similarity as a further criteria to
            # decide which lines are good matches
            prod_smooth = scipy.ndimage.filters.convolve1d(
                spec[:, islit] * spec_arxiv_ss, window)
            spec2_smooth = scipy.ndimage.filters.convolve1d(
                spec[:, islit]**2, window)
            arxiv2_smooth = scipy.ndimage.filters.convolve1d(
                spec_arxiv_ss**2, window)
            denom = np.sqrt(spec2_smooth * arxiv2_smooth)
            corr_local = np.zeros_like(denom)
            corr_local[denom > 0] = prod_smooth[denom > 0] / denom[denom > 0]
            corr_local[denom == 0.0] = -1.0

            # Loop over the current slit line pixel detections and find the nearest arxiv spectrum line
            for iline in range(slit_det.size):
                # match to pixel in shifted/stretch arxiv spectrum
                pdiff = np.abs(slit_det[iline] - arxiv_det_ss)
                bstpx = np.argmin(pdiff)
                # If a match is found within 2 pixels, consider this a successful match
                if pdiff[bstpx] < line_pix_tol:
                    # Using the arxiv arc wavelength solution, search for the nearest line in the line list
                    bstwv = np.abs(wvdata - wvval_arxiv[bstpx])
                    # This is a good wavelength match if it is within line_pix_tol disperion elements
                    if bstwv[np.argmin(
                            bstwv)] < line_pix_tol * disp_arxiv[iarxiv]:
                        line_indx = np.append(
                            line_indx, np.argmin(bstwv)
                        )  # index in the line list array wvdata of this match
                        det_indx = np.append(
                            det_indx, iline
                        )  # index of this line in the detected line array slit_det
                        line_cc = np.append(
                            line_cc,
                            np.interp(slit_det[iline], xrng, corr_local)
                        )  # local cross-correlation at this match
                        line_iarxiv = np.append(line_iarxiv, iarxiv)

        narxiv_used = np.sum(wcen != 0.0)
        if (narxiv_used == 0) or (len(np.unique(line_indx)) < 3):
            wv_calib[str(islit)] = {}
            patt_dict[str(islit)] = {}
            bad_slits = np.append(bad_slits, islit)
            continue

        if debug_reid:
            plt.figure(figsize=(14, 6))
            # Plot a summary of the local x-correlation values for each line on each slit
            for iarxiv in range(narxiv):
                # Only plot those that we actually tried to reidentify (i.e. above cc_thresh)
                if wcen[iarxiv] != 0.0:
                    this_iarxiv = line_iarxiv == iarxiv
                    plt.plot(wvdata[line_indx[this_iarxiv]],
                             line_cc[this_iarxiv],
                             marker=next(marker),
                             color=next(colors),
                             linestyle='',
                             markersize=5.0,
                             label='arxiv slit={:d}'.format(iarxiv))

            plt.hlines(cc_local_thresh,
                       wvdata[line_indx].min(),
                       wvdata[line_indx].max(),
                       color='red',
                       linestyle='--',
                       label='Local xcorr threshhold')
            plt.title(
                'slit={:d}'.format(islit + 1) +
                ': Local x-correlation for reidentified lines from narxiv_used={:d}'
                .format(narxiv_used) +
                ' arxiv slits. Requirement: nreid_min={:d}'.format(nreid_min) +
                ' matches > threshold')
            plt.xlabel('wavelength from line list')
            plt.ylabel('Local x-correlation coefficient')
            #plt.ylim((0.0, 1.2))
            plt.legend()
            plt.show()

        # Finalize the best guess of each line
        # Initialise the patterns dictionary, min_nsig not used anywhere
        patt_dict_slit = dict(acceptable=False,
                              nmatch=0,
                              ibest=-1,
                              bwv=0.,
                              min_nsig=sigdetect,
                              mask=np.zeros(slit_det.size, dtype=np.bool))
        patt_dict_slit['sign'] = 1  # This is not used anywhere
        patt_dict_slit['bwv'] = np.median(wcen[wcen != 0.0])
        patt_dict_slit['bdisp'] = np.median(disp[disp != 0.0])
        patterns.solve_xcorr(slit_det,
                             wvdata,
                             det_indx,
                             line_indx,
                             line_cc,
                             patt_dict=patt_dict_slit,
                             nreid_min=nreid_min,
                             cc_local_thresh=cc_local_thresh)

        if debug_reid:
            tmp_list = table.vstack([line_lists, unknwns])
            qa.match_qa(spec[:, islit], slit_det, tmp_list,
                        patt_dict_slit['IDs'], patt_dict_slit['scores'])

        # Use only the perfect IDs
        iperfect = np.array(patt_dict_slit['scores']) != 'Perfect'
        patt_dict_slit['mask'][iperfect] = False
        patt_dict_slit['nmatch'] = np.sum(patt_dict_slit['mask'])
        if patt_dict_slit['nmatch'] < 3:
            patt_dict_slit['acceptable'] = False

        # Check if an acceptable reidentification solution was found
        if not patt_dict_slit['acceptable']:
            wv_calib[str(islit)] = {}
            patt_dict[str(islit)] = copy.deepcopy(patt_dict_slit)
            bad_slits = np.append(bad_slits, islit)
            continue
        # Perform the fit
        final_fit = fitting.fit_slit(spec[:, islit],
                                     patt_dict_slit,
                                     slit_det,
                                     line_lists,
                                     match_toler=match_toler,
                                     func=func,
                                     n_first=n_first,
                                     sigrej_first=sigrej_first,
                                     n_final=n_final,
                                     sigrej_final=sigrej_final)

        # Did the fit succeed?
        if final_fit is None:
            # This pattern wasn't good enough
            wv_calib[str(islit)] = {}
            patt_dict[str(islit)] = copy.deepcopy(patt_dict_slit)
            bad_slits = np.append(bad_slits, islit)
            continue
        # Is the RMS below the threshold?
        if final_fit['rms'] > rms_threshold:
            msgs.warn(
                '---------------------------------------------------' +
                msgs.newline() +
                'Reidentify report for slit {0:d}/{1:d}:'.format(
                    islit + 1, nslits) + msgs.newline() +
                '  Poor RMS ({0:.3f})! Need to add additional spectra to arxiv to improve fits'
                .format(final_fit['rms']) + msgs.newline() +
                '---------------------------------------------------')
            bad_slits = np.append(bad_slits, islit)
            # Note this result in new_bad_slits, but store the solution since this might be the best possible

        # Add the patt_dict and wv_calib to the output dicts
        patt_dict[str(islit)] = copy.deepcopy(patt_dict_slit)
        wv_calib[str(islit)] = copy.deepcopy(final_fit)
        if debug_reid:
            qa.arc_fit_qa(wv_calib[str(islit)])
            #yplt = utils.func_val(final_fit['fitc'], xrng, final_fit['function'], minv=final_fit['fmin'], maxv=final_fit['fmax'])
            #plt.plot(final_fit['xfit'], final_fit['yfit'], 'bx')
            #plt.plot(xrng, yplt, 'r-')
            #plt.show()

    return wv_calib, patt_dict, bad_slits
示例#5
0
    def build_wv_calib(self, arccen, method, skip_QA=False):
        """
        Main routine to generate the wavelength solutions in a loop over slits
        Wrapper to arc.simple_calib or arc.calib_with_arclines

        self.maskslits is updated for slits that fail

        Args:
            method : str
              'simple' -- arc.simple_calib
              'arclines' -- arc.calib_with_arclines
              'holy-grail' -- wavecal.autoid.HolyGrail
              'reidentify' -- wavecal.auotid.ArchiveReid
              'identify' -- wavecal.identify.Identify
              'full_template' -- wavecal.auotid.full_template
            skip_QA (bool, optional)

        Returns:
            dict:  self.wv_calib
        """
        # Obtain a list of good slits
        ok_mask_idx = np.where(np.invert(self.wvc_bpm))[0]

        # Obtain calibration for all slits
        if method == 'simple':
            lines = self.par['lamps']
            line_lists = waveio.load_line_lists(lines)

            final_fit = arc.simple_calib_driver(
                line_lists,
                arccen,
                ok_mask_idx,
                n_final=self.par['n_final'],
                sigdetect=self.par['sigdetect'],
                IDpixels=self.par['IDpixels'],
                IDwaves=self.par['IDwaves'])
        elif method == 'holy-grail':
            # Sometimes works, sometimes fails
            arcfitter = autoid.HolyGrail(
                arccen,
                par=self.par,
                ok_mask=ok_mask_idx,
                nonlinear_counts=self.nonlinear_counts)
            patt_dict, final_fit = arcfitter.get_results()
        elif method == 'identify':
            final_fit = {}
            # Manually identify lines
            msgs.info("Initializing the wavelength calibration tool")
            embed(header='line 222 wavecalib.py')
            for slit_idx in ok_mask_idx:
                arcfitter = Identify.initialise(arccen,
                                                self.slits,
                                                slit=slit_idx,
                                                par=self.par)
                final_fit[str(slit_idx)] = arcfitter.get_results()
                arcfitter.store_solution(final_fit[str(slit_idx)],
                                         "",
                                         self.binspectral,
                                         specname=self.spectrograph.name,
                                         gratname="UNKNOWN",
                                         dispangl="UNKNOWN")
        elif method == 'reidentify':
            # Now preferred
            # Slit positions
            arcfitter = autoid.ArchiveReid(
                arccen,
                self.spectrograph,
                self.par,
                ok_mask=ok_mask_idx,
                #slit_spat_pos=self.spat_coo,
                orders=self.orders,
                nonlinear_counts=self.nonlinear_counts)
            patt_dict, final_fit = arcfitter.get_results()
        elif method == 'full_template':
            # Now preferred
            if self.binspectral is None:
                msgs.error(
                    "You must specify binspectral for the full_template method!"
                )
            final_fit = autoid.full_template(
                arccen,
                self.par,
                ok_mask_idx,
                self.det,
                self.binspectral,
                nonlinear_counts=self.nonlinear_counts,
                nsnippet=self.par['nsnippet'])
        else:
            msgs.error(
                'Unrecognized wavelength calibration method: {:}'.format(
                    method))

        # Build the DataContainer
        # Loop on WaveFit items
        tmp = []
        for idx in range(self.slits.nslits):
            item = final_fit.pop(str(idx))
            if item is None:  # Add an empty WaveFit
                tmp.append(wv_fitting.WaveFit(self.slits.spat_id[idx]))
            else:
                # This is for I/O naming
                item.spat_id = self.slits.spat_id[idx]
                tmp.append(item)
        self.wv_calib = WaveCalib(
            wv_fits=np.asarray(tmp),
            arc_spectra=arccen,
            nslits=self.slits.nslits,
            spat_ids=self.slits.spat_id,
            PYP_SPEC=self.spectrograph.name,
        )

        # Update mask
        self.update_wvmask()

        #TODO For generalized echelle (not hard wired) assign order number here before, i.e. slits.ech_order

        # QA
        if not skip_QA:
            ok_mask_idx = np.where(np.invert(self.wvc_bpm))[0]
            for slit_idx in ok_mask_idx:
                outfile = qa.set_qa_filename(
                    self.master_key,
                    'arc_fit_qa',
                    slit=self.slits.slitord_id[slit_idx],
                    out_dir=self.qa_path)
                #
                #autoid.arc_fit_qa(self.wv_calib[str(self.slits.slitord_id[slit_idx])],
                #                  outfile=outfile)
                autoid.arc_fit_qa(
                    self.wv_calib.wv_fits[slit_idx],
                    #str(self.slits.slitord_id[slit_idx]),
                    outfile=outfile)

        # Return
        self.steps.append(inspect.stack()[0][3])
        return self.wv_calib
示例#6
0
def initialise(arccen, slit=0, par=None, wv_calib_all=None):
    """Initialise the 'Identify' window for real-time wavelength calibration

    .. todo::

        * Implement multislit functionality

    Parameters
    ----------
    arccen : ndarray
        Arc spectrum
    slit : int, optional
        The slit to be used for wavelength calibration
    par : :obj:`int`, optional
        The slit to be used for wavelength calibration
    wv_calib_all : :obj:`dict`, None, optional
        If a best-fitting solution exists, and you wish to load it, provide the wv_calib dictionary.

    Returns
    -------
    class
        Returns an instance of the Identify class, which contains the results of the fit
    """

    # Double check that a WavelengthSolutionPar was input
    par = pypeitpar.WavelengthSolutionPar() if par is None else par

    # If a wavelength calibration has been performed already, load it:
    wv_calib = wv_calib_all[str(slit)]

    # Extract the lines that are detected in arccen
    thisarc = arccen[:, slit]
    tdetns, _, _, icut, _ = wvutils.arc_lines_from_spec(
        thisarc,
        sigdetect=par['sigdetect'],
        nonlinear_counts=par['nonlinear_counts'])
    detns = tdetns[icut]

    # Load line lists
    if 'ThAr' in par['lamps']:
        line_lists_all = waveio.load_line_lists(par['lamps'])
        line_lists = line_lists_all[np.where(
            line_lists_all['ion'] != 'UNKNWN')]
    else:
        line_lists = waveio.load_line_lists(par['lamps'])

    # Create a Line2D instance for the arc spectrum
    spec = Line2D(np.arange(thisarc.size),
                  thisarc,
                  linewidth=1,
                  linestyle='solid',
                  color='k',
                  drawstyle='steps',
                  animated=True)

    # Add the main figure axis
    fig, ax = plt.subplots(figsize=(16, 9), facecolor="white")
    plt.subplots_adjust(bottom=0.05, top=0.85, left=0.05, right=0.65)
    ax.add_line(spec)
    ax.set_ylim((0.0, 1.1 * spec.get_ydata().max()))

    # Add two residual fitting axes
    axfit = fig.add_axes([0.7, .5, .28, 0.35])
    axres = fig.add_axes([0.7, .1, .28, 0.35])
    # Residuals
    lflag_color = ['grey', 'blue', 'yellow', 'red']
    residcmap = LinearSegmentedColormap.from_list("my_list",
                                                  lflag_color,
                                                  N=len(lflag_color))
    resres = axres.scatter(detns,
                           np.zeros(detns.size),
                           marker='x',
                           c=np.zeros(detns.size),
                           cmap=residcmap,
                           norm=Normalize(vmin=0.0, vmax=3.0))
    axres.axhspan(-0.1, 0.1, alpha=0.5,
                  color='grey')  # Residuals of 0.1 pixels
    axres.axhline(0.0, color='r', linestyle='-')  # Zero level
    axres.set_xlim((0, thisarc.size - 1))
    axres.set_ylim((-0.3, 0.3))
    axres.set_xlabel('Pixel')
    axres.set_ylabel('Residuals (Pix)')

    # pixel vs wavelength
    respts = axfit.scatter(detns,
                           np.zeros(detns.size),
                           marker='x',
                           c=np.zeros(detns.size),
                           cmap=residcmap,
                           norm=Normalize(vmin=0.0, vmax=3.0))
    resfit = Line2D(np.arange(thisarc.size),
                    np.zeros(thisarc.size),
                    linewidth=1,
                    linestyle='-',
                    color='r')
    axfit.add_line(resfit)
    axfit.set_xlim((0, thisarc.size - 1))
    axfit.set_ylim(
        (-0.3, 0.3))  # This will get updated as lines are identified
    axfit.set_xlabel('Pixel')
    axfit.set_ylabel('Wavelength')

    # Add an information GUI axis
    axinfo = fig.add_axes([0.15, .92, .7, 0.07])
    axinfo.get_xaxis().set_visible(False)
    axinfo.get_yaxis().set_visible(False)
    axinfo.text(0.5,
                0.5,
                "Press '?' to list the available options",
                transform=axinfo.transAxes,
                horizontalalignment='center',
                verticalalignment='center')
    axinfo.set_xlim((0, 1))
    axinfo.set_ylim((0, 1))
    specres = dict(pixels=respts, model=resfit, resid=resres)

    axes = dict(main=ax, fit=axfit, resid=axres, info=axinfo)
    # Initialise the identify window and display to screen
    fig.canvas.set_window_title('PypeIt - Identify')
    ident = Identify(fig.canvas,
                     axes,
                     spec,
                     specres,
                     detns,
                     line_lists,
                     par,
                     lflag_color,
                     slit=slit,
                     wv_calib=wv_calib)
    plt.show()

    # Now return the results
    return ident
示例#7
0
                                                minv=fmin,
                                                maxv=fmax)
    det_arxiv[str(iarxiv)] = wv_calib_arxiv[str(iarxiv)]['xfit']

match_toler = 2.0  #par['match_toler']
n_first = par['n_first']
sigrej_first = par['sigrej_first']
n_final = par['n_final']
sigrej_final = par['sigrej_final']
func = par['func']
nonlinear_counts = par['nonlinear_counts']
sigdetect = par['lowest_nsig']
rms_threshold = par['rms_threshold']
lamps = par['lamps']

line_list = waveio.load_line_lists(lamps)

cc_thresh = 0.8
cc_local_thresh = 0.8
n_local_cc = 11

nreid_min = 1

new = False
if new:
    all_patt_dict = {}
    all_detections = {}
    for islit in range(nslits):
        all_detections[str(islit)], all_patt_dict[str(
            islit)] = autoid.reidentify(spec[:, islit],
                                        spec_arxiv,
示例#8
0
def main(polygon, numsearch=8, maxlinear=100.0, use_unknowns=True, leafsize=30, verbose=False,
         ret_treeindx=False, outname=None, ):
    """Driving method for generating the KD Tree

    Parameters
    ----------
    polygon : int
      Number of sides to the polygon used in pattern matching
    numsearch : int
      Number of adjacent lines to use when deriving patterns
    maxlinear : float
      Over how many Angstroms is the solution deemed to be linear
    use_unknowns : bool
      Include unknown lines in the wavelength calibration (these may arise from lines other than Th I/II and Ar I/II)
    leafsize : int
      The leaf size of the tree
    """

    # Load the ThAr linelist
    line_lists_all = waveio.load_line_lists(['ThAr'])
    line_lists = line_lists_all[np.where(line_lists_all['ion'] != 'UNKNWN')]
    unknwns = line_lists_all[np.where(line_lists_all['ion'] == 'UNKNWN')]
    if use_unknowns:
        tot_list = vstack([line_lists, unknwns])
    else:
        tot_list = line_lists
    wvdata = np.array(tot_list['wave'].data)  # Removes mask if any
    wvdata.sort()

    # NIST_lines = (line_lists_all['NIST'] > 0) & (np.char.find(line_lists_all['Source'].data, 'MURPHY') >= 0)
    # wvdata = line_lists_all['wave'].data[NIST_lines]
    # wvdata.sort()

    if polygon == 3:
        if verbose: print("Generating patterns for a trigon")
        pattern, index = trigon(wvdata, numsearch, maxlinear)
    elif polygon == 4:
        if verbose: print("Generating patterns for a tetragon")
        pattern, index = tetragon(wvdata, numsearch, maxlinear)
    elif polygon == 5:
        if verbose: print("Generating patterns for a pentagon")
        pattern, index = pentagon(wvdata, numsearch, maxlinear)
    elif polygon == 6:
        if verbose: print("Generating patterns for a hexagon")
        pattern, index = hexagon(wvdata, numsearch, maxlinear)
    else:
        if verbose: print("Patterns can only be generated with 3 <= polygon <= 6")
        return None

    if outname is None:
        outname = '../../data/arc_lines/lists/ThAr_patterns_poly{0:d}_search{1:d}.kdtree'.format(polygon, numsearch)
    outindx = outname.replace('.kdtree', '.index')
    print("Generating Tree")
    tree = cKDTree(pattern, leafsize=leafsize)
    print("Saving Tree")
    pickle.dump(tree, open(outname, 'wb'))
    print("Written KD Tree file:\n{0:s}".format(outname))
    np.save(outindx, index)
    print("Written index file:\n{0:s}".format(outindx))
    #_ = pickle.load(open(outname, 'rb'))
    #print("loaded successfully")
    if ret_treeindx:
        return tree, index
def reidentify_old(spec, wv_calib_arxiv, lamps, nreid_min, detections=None, cc_thresh=0.8,cc_local_thresh = 0.8,
               line_pix_tol=2.0, nlocal_cc=11, rms_threshold=0.15, nonlinear_counts=1e10,sigdetect = 5.0,
               use_unknowns=True,match_toler=3.0,func='legendre',n_first=2,sigrej_first=3.0,n_final=4, sigrej_final=2.0,
               seed=None, debug_xcorr=False, debug_reid=False):

    """ Determine  a wavelength solution for a set of spectra based on archival wavelength solutions

    Parameters
    ----------
    spec :  float ndarray (nspec, nslits)
       Array of arc spectra for which wavelength solutions are desired.

    wv_calib_arxiv: dict
       Dictionary containing archival wavelength solutions for a collection of slits/orders to be used to reidentify
       lines and  determine the wavelength solution for spec. This dict is a standard format for PypeIt wavelength solutions
       as created by pypeit.core.wavecal.fitting.iterative_fitting

    lamps: list of strings
       The are line lamps that are on or the name of the linelist that should be used. For example for Shane Kast blue
       this would ['CdI','HgI','HeI']. For X-shooter NIR which calibrates of a custom OH sky line list,
       it is the name of the line list, i.e. ['OH_XSHOOTER']


    Optional Parameters
    -------------------
    detections: float ndarray, default = None
       An array containing the pixel centroids of the lines in the arc as computed by the pypeit.core.arc.detect_lines
       code. If this is set to None, the line detection will be run inside the code.

    cc_thresh: float, default = 0.8
       Threshold for the *global* cross-correlation coefficient between an input spectrum and member of the archive required to
       attempt reidentification. Spectra from the archive with a lower cross-correlation are not used for reidentification

    cc_local_thresh: float, default = 0.8
       Threshold for the *local* cross-correlation coefficient, evaluated at each reidentified line,  between an input
       spectrum and the shifted and stretched archive spectrum above which a line must be to be considered a good line for
       reidentification. The local cross-correlation is evaluated at each candidate reidentified line
       (using a window of nlocal_cc), and is then used to score the the reidentified lines to arrive at the final set of
       good reidentifications

    line_pix_tol: float, default = 2.0
       Matching tolerance in pixels for a line reidentification. A good line match must match within this tolerance to the
       the shifted and stretched archive spectrum, and the archive wavelength solution at this match must be within
       line_pix_tol dispersion elements from the line in line list.

    n_local_cc: int, defualt = 11
       Size of pixel window used for local cross-correlation computation for each arc line. If not an odd number one will
       be added to it to make it odd.

    rms_threshold: float, default = 0.15
       Minimum rms for considering a wavelength solution to be an acceptable good fit. Slits/orders with a larger RMS
       than this are flagged as bad slits

    nonlinear_counts: float, default = 1e10
       Arc lines above this saturation threshold are not used in wavelength solution fits because they cannot be accurately
       centroided

    sigdetect: float, default 5.0
       Sigma threshold above fluctuations for arc-line detection. Arcs are continuum subtracted and the fluctuations are
       computed after continuum subtraction.

    use_unknowns : bool, default = True
       If True, arc lines that are known to be present in the spectra, but have not been attributed to an element+ion,
       will be included in the fit.

    match_toler: float, default = 3.0
       Matching tolerance when searching for new lines. This is the difference in pixels between the wavlength assigned to
       an arc line by an iteration of the wavelength solution to the wavelength in the line list.

    func: str, default = 'legendre'
       Name of function used for the wavelength solution

    n_first: int, default = 2
       Order of first guess to the wavelength solution.

    sigrej_first: float, default = 2.0
       Number of sigma for rejection for the first guess to the wavelength solution.

    n_final: int, default = 4
       Order of the final wavelength solution fit

    sigrej_final: float, default = 3.0
       Number of sigma for rejection for the final fit to the wavelength solution.

    seed: int or np.random.RandomState, optional, default = None
       Seed for scipy.optimize.differential_evolution optimizer. If not specified, the calculation will be seeded
       in a deterministic way from the input arc spectrum spec.

    debug_xcorr: bool, default = False
       Show plots useful for debugging the cross-correlation used for shift/stretch computation

    debug_reid: bool, default = False
       Show plots useful for debugging the line reidentification

    Returns
    -------
    (wv_calib, patt_dict, bad_slits)

    wv_calib: dict
       Wavelength solution for the input arc spectra spec. These are stored in standard pypeit format, i.e.
       each index of spec[:,slit] corresponds to a key in the wv_calib dictionary wv_calib[str(slit)] which yields
       the final_fit dictionary for this slit

    patt_dict: dict
       Arc lines pattern dictionary with some information about the IDs as well as the cross-correlation values

    bad_slits: ndarray, int
       Numpy array with the indices of the bad slits. These are the indices in the input arc spectrum array spec[:,islit]


    Revision History
    ----------------
    November 2018 by J.F. Hennawi. Based on an initial version of this code written by Ryan Cooke.
    """

    # Determine the seed for scipy.optimize.differential_evolution optimizer
    if seed is None:
        # If no seed is specified just take the sum of all the elements and round that to an integer
        seed = np.fmin(int(np.sum(spec)),2**32-1)

    random_state = np.random.RandomState(seed = seed)


    nlocal_cc_odd = nlocal_cc + 1 if nlocal_cc % 2 == 0 else nlocal_cc
    window = 1.0/nlocal_cc_odd* np.ones(nlocal_cc_odd)

    # Generate the line list
    line_lists = waveio.load_line_lists(lamps)
    unknwns = waveio.load_unknown_list(lamps)
    if use_unknowns:
        tot_list = table.vstack([line_lists, unknwns])
    else:
        tot_list = line_lists
    # Generate the final linelist and sort
    wvdata = np.array(tot_list['wave'].data)  # Removes mask if any
    wvdata.sort()

    nspec, nslits = spec.shape
    narxiv = len(wv_calib_arxiv)
    nspec_arxiv = wv_calib_arxiv['0']['spec'].size
    if nspec_arxiv != nspec:
        msgs.error('Different spectral binning is not supported yet but it will be soon')

    # If the detections were not passed in find the lines in each spectrum
    if detections is None:
        detections = {}
        for islit in range(nslits):
            tcent, ecent, cut_tcent, icut = wvutils.arc_lines_from_spec(spec[:, islit], sigdetect=sigdetect,nonlinear_counts=nonlinear_counts)
            detections[str(islit)] = [tcent[icut].copy(), ecent[icut].copy()]
    else:
        if len(detections) != nslits:
            msgs.error('Detections must be a dictionary with nslit elements')

    # For convenience pull out all the spectra from the wv_calib_arxiv archive
    spec_arxiv = np.zeros((nspec, narxiv))
    wave_soln_arxiv = np.zeros((nspec, narxiv))
    wvc_arxiv = np.zeros(narxiv, dtype=float)
    disp_arxiv = np.zeros(narxiv, dtype=float)
    xrng = np.arange(nspec_arxiv)
    for iarxiv in range(narxiv):
        spec_arxiv[:,iarxiv] = wv_calib_arxiv[str(iarxiv)]['spec']
        fitc = wv_calib_arxiv[str(iarxiv)]['fitc']
        fitfunc = wv_calib_arxiv[str(iarxiv)]['function']
        fmin, fmax = wv_calib_arxiv[str(iarxiv)]['fmin'],wv_calib_arxiv[str(iarxiv)]['fmax']
        wave_soln_arxiv[:,iarxiv] = utils.func_val(fitc, xrng, fitfunc, minv=fmin, maxv=fmax)
        wvc_arxiv[iarxiv] = wave_soln_arxiv[nspec_arxiv//2, iarxiv]
        disp_arxiv[iarxiv] = np.median(wave_soln_arxiv[:,iarxiv] - np.roll(wave_soln_arxiv[:,iarxiv], 1))

    wv_calib = {}
    patt_dict = {}
    bad_slits = np.array([], dtype=np.int)

    marker_tuple = ('o','v','<','>','8','s','p','P','*','X','D','d','x')
    color_tuple = ('black','green','red','cyan','magenta','blue','darkorange','yellow','dodgerblue','purple','lightgreen','cornflowerblue')
    marker = itertools.cycle(marker_tuple)
    colors = itertools.cycle(color_tuple)

    # Loop over the slits in the spectrum and cross-correlate each with each arxiv spectrum to identify lines
    for islit in range(nslits):
        slit_det = detections[str(islit)][0]
        line_indx = np.array([], dtype=np.int)
        det_indx = np.array([], dtype=np.int)
        line_cc = np.array([], dtype=float)
        line_iarxiv = np.array([], dtype=np.int)
        wcen = np.zeros(narxiv)
        disp = np.zeros(narxiv)
        shift_vec = np.zeros(narxiv)
        stretch_vec = np.zeros(narxiv)
        ccorr_vec = np.zeros(narxiv)
        for iarxiv in range(narxiv):
            msgs.info('Cross-correlating slit # {:d}'.format(islit + 1) + ' with arxiv slit # {:d}'.format(iarxiv + 1))
            # Match the peaks between the two spectra. This code attempts to compute the stretch if cc > cc_thresh
            success, shift_vec[iarxiv], stretch_vec[iarxiv], ccorr_vec[iarxiv], _, _ = \
                wvutils.xcorr_shift_stretch(spec[:, islit], spec_arxiv[:, iarxiv], cc_thresh=cc_thresh, seed = random_state,
                                            debug=debug_xcorr)
            # If cc < cc_thresh or if this optimization failed, don't reidentify from this arxiv spectrum
            if success != 1:
                continue
            # Estimate wcen and disp for this slit based on its shift/stretch relative to the archive slit
            disp[iarxiv] = disp_arxiv[iarxiv] / stretch_vec[iarxiv]
            wcen[iarxiv] = wvc_arxiv[iarxiv] - shift_vec[iarxiv]*disp[iarxiv]
            # For each peak in the arxiv spectrum, identify the corresponding peaks in the input islit spectrum. Do this by
            # transforming these arxiv slit line pixel locations into the (shifted and stretched) input islit spectrum frame
            arxiv_det = wv_calib_arxiv[str(iarxiv)]['xfit']
            arxiv_det_ss = arxiv_det*stretch_vec[iarxiv] + shift_vec[iarxiv]
            spec_arxiv_ss = wvutils.shift_and_stretch(spec_arxiv[:, iarxiv], shift_vec[iarxiv], stretch_vec[iarxiv])

            if debug_xcorr:
                plt.figure(figsize=(14, 6))
                tampl_slit = np.interp(slit_det, xrng, spec[:, islit])
                plt.plot(xrng, spec[:, islit], color='red', drawstyle='steps-mid', label='input arc',linewidth=1.0, zorder=10)
                plt.plot(slit_det, tampl_slit, 'r.', markersize=10.0, label='input arc lines', zorder=10)
                tampl_arxiv = np.interp(arxiv_det, xrng, spec_arxiv[:, iarxiv])
                plt.plot(xrng, spec_arxiv[:, iarxiv], color='black', drawstyle='steps-mid', linestyle=':',
                         label='arxiv arc', linewidth=0.5)
                plt.plot(arxiv_det, tampl_arxiv, 'k+', markersize=8.0, label='arxiv arc lines')
                # tampl_ss = np.interp(gsdet_ss, xrng, gdarc_ss)
                for iline in range(arxiv_det_ss.size):
                    plt.plot([arxiv_det[iline], arxiv_det_ss[iline]], [tampl_arxiv[iline], tampl_arxiv[iline]],
                             color='cornflowerblue', linewidth=1.0)
                plt.plot(xrng, spec_arxiv_ss, color='black', drawstyle='steps-mid', label='arxiv arc shift/stretch',linewidth=1.0)
                plt.plot(arxiv_det_ss, tampl_arxiv, 'k.', markersize=10.0, label='predicted arxiv arc lines')
                plt.title(
                    'Cross-correlation of input slit # {:d}'.format(islit + 1) + ' and arxiv slit # {:d}'.format(iarxiv + 1) +
                    ': ccor = {:5.3f}'.format(ccorr_vec[iarxiv]) +
                    ', shift = {:6.1f}'.format(shift_vec[iarxiv]) +
                    ', stretch = {:5.4f}'.format(stretch_vec[iarxiv]) +
                    ', wv_cen = {:7.1f}'.format(wcen[iarxiv]) +
                    ', disp = {:5.3f}'.format(disp[iarxiv]))
                plt.ylim(1.2*spec[:, islit].min(), 1.5 *spec[:, islit].max())
                plt.legend()
                plt.show()

            # Calculate wavelengths for all of the gsdet detections
            wvval_arxiv= utils.func_val(wv_calib_arxiv[str(iarxiv)]['fitc'], arxiv_det,wv_calib_arxiv[str(iarxiv)]['function'],
                                        minv=wv_calib_arxiv[str(iarxiv)]['fmin'], maxv=wv_calib_arxiv[str(iarxiv)]['fmax'])
            # Compute a "local" zero lag correlation of the slit spectrum and the shifted and stretch arxiv spectrum over a
            # a nlocal_cc_odd long segment of spectrum. We will then uses spectral similarity as a further criteria to
            # decide which lines are good matches
            prod_smooth = scipy.ndimage.filters.convolve1d(spec[:, islit]*spec_arxiv_ss, window)
            spec2_smooth = scipy.ndimage.filters.convolve1d(spec[:, islit]**2, window)
            arxiv2_smooth = scipy.ndimage.filters.convolve1d(spec_arxiv_ss**2, window)
            denom = np.sqrt(spec2_smooth*arxiv2_smooth)
            corr_local = np.zeros_like(denom)
            corr_local[denom > 0] = prod_smooth[denom > 0]/denom[denom > 0]
            corr_local[denom == 0.0] = -1.0

            # Loop over the current slit line pixel detections and find the nearest arxiv spectrum line
            for iline in range(slit_det.size):
                # match to pixel in shifted/stretch arxiv spectrum
                pdiff = np.abs(slit_det[iline] - arxiv_det_ss)
                bstpx = np.argmin(pdiff)
                # If a match is found within 2 pixels, consider this a successful match
                if pdiff[bstpx] < line_pix_tol:
                    # Using the arxiv arc wavelength solution, search for the nearest line in the line list
                    bstwv = np.abs(wvdata - wvval_arxiv[bstpx])
                    # This is a good wavelength match if it is within line_pix_tol disperion elements
                    if bstwv[np.argmin(bstwv)] < line_pix_tol*disp_arxiv[iarxiv]:
                        line_indx = np.append(line_indx, np.argmin(bstwv))  # index in the line list array wvdata of this match
                        det_indx = np.append(det_indx, iline)             # index of this line in the detected line array slit_det
                        line_cc = np.append(line_cc,np.interp(slit_det[iline],xrng,corr_local)) # local cross-correlation at this match
                        line_iarxiv = np.append(line_iarxiv,iarxiv)

        narxiv_used = np.sum(wcen != 0.0)
        if (narxiv_used == 0) or (len(np.unique(line_indx)) < 3):
            wv_calib[str(islit)] = {}
            patt_dict[str(islit)] = {}
            bad_slits = np.append(bad_slits,islit)
            continue

        if debug_reid:
            plt.figure(figsize=(14, 6))
            # Plot a summary of the local x-correlation values for each line on each slit
            for iarxiv in range(narxiv):
                # Only plot those that we actually tried to reidentify (i.e. above cc_thresh)
                if wcen[iarxiv] != 0.0:
                    this_iarxiv = line_iarxiv == iarxiv
                    plt.plot(wvdata[line_indx[this_iarxiv]],line_cc[this_iarxiv],marker=next(marker),color=next(colors),
                             linestyle='',markersize=5.0,label='arxiv slit={:d}'.format(iarxiv))

            plt.hlines(cc_local_thresh, wvdata[line_indx].min(), wvdata[line_indx].max(), color='red', linestyle='--',label='Local xcorr threshhold')
            plt.title('slit={:d}'.format(islit + 1) + ': Local x-correlation for reidentified lines from narxiv_used={:d}'.format(narxiv_used) +
                      ' arxiv slits. Requirement: nreid_min={:d}'.format(nreid_min) + ' matches > threshold')
            plt.xlabel('wavelength from line list')
            plt.ylabel('Local x-correlation coefficient')
            #plt.ylim((0.0, 1.2))
            plt.legend()
            plt.show()

        # Finalize the best guess of each line
        # Initialise the patterns dictionary, min_nsig not used anywhere
        patt_dict_slit = dict(acceptable=False, nmatch=0, ibest=-1, bwv=0., min_nsig=sigdetect,mask=np.zeros(slit_det.size, dtype=np.bool))
        patt_dict_slit['sign'] = 1 # This is not used anywhere
        patt_dict_slit['bwv'] = np.median(wcen[wcen != 0.0])
        patt_dict_slit['bdisp'] = np.median(disp[disp != 0.0])
        patterns.solve_xcorr(slit_det, wvdata, det_indx, line_indx, line_cc, patt_dict=patt_dict_slit,nreid_min=nreid_min,
                             cc_local_thresh=cc_local_thresh)

        if debug_reid:
            tmp_list = table.vstack([line_lists, unknwns])
            qa.match_qa(spec[:, islit], slit_det, tmp_list, patt_dict_slit['IDs'], patt_dict_slit['scores'])

        # Use only the perfect IDs
        iperfect = np.array(patt_dict_slit['scores']) != 'Perfect'
        patt_dict_slit['mask'][iperfect] = False
        patt_dict_slit['nmatch'] = np.sum(patt_dict_slit['mask'])
        if patt_dict_slit['nmatch'] < 3:
            patt_dict_slit['acceptable'] = False

        # Check if an acceptable reidentification solution was found
        if not patt_dict_slit['acceptable']:
            wv_calib[str(islit)] = {}
            patt_dict[str(islit)] = copy.deepcopy(patt_dict_slit)
            bad_slits = np.append(bad_slits,islit)
            continue
        # Perform the fit
        final_fit = fitting.fit_slit(spec[:,islit], patt_dict_slit, slit_det, line_lists, match_toler=match_toler,
                             func=func, n_first=n_first,sigrej_first=sigrej_first,n_final=n_final,
                             sigrej_final=sigrej_final)

        # Did the fit succeed?
        if final_fit is None:
            # This pattern wasn't good enough
            wv_calib[str(islit)] = {}
            patt_dict[str(islit)] = copy.deepcopy(patt_dict_slit)
            bad_slits = np.append(bad_slits, islit)
            continue
        # Is the RMS below the threshold?
        if final_fit['rms'] > rms_threshold:
            msgs.warn('---------------------------------------------------' + msgs.newline() +
                      'Reidentify report for slit {0:d}/{1:d}:'.format(islit + 1, nslits) + msgs.newline() +
                      '  Poor RMS ({0:.3f})! Need to add additional spectra to arxiv to improve fits'.format(final_fit['rms']) + msgs.newline() +
                      '---------------------------------------------------')
            bad_slits = np.append(bad_slits, islit)
            # Note this result in new_bad_slits, but store the solution since this might be the best possible

        # Add the patt_dict and wv_calib to the output dicts
        patt_dict[str(islit)] = copy.deepcopy(patt_dict_slit)
        wv_calib[str(islit)] = copy.deepcopy(final_fit)
        if debug_reid:
            qa.arc_fit_qa(wv_calib[str(islit)])
            #yplt = utils.func_val(final_fit['fitc'], xrng, final_fit['function'], minv=final_fit['fmin'], maxv=final_fit['fmax'])
            #plt.plot(final_fit['xfit'], final_fit['yfit'], 'bx')
            #plt.plot(xrng, yplt, 'r-')
            #plt.show()

    return wv_calib, patt_dict, bad_slits
    det_arxiv[str(iarxiv)] = wv_calib_arxiv[str(iarxiv)]['xfit']



match_toler = 2.0 #par['match_toler']
n_first = par['n_first']
sigrej_first = par['sigrej_first']
n_final = par['n_final']
sigrej_final = par['sigrej_final']
func = par['func']
nonlinear_counts=par['nonlinear_counts']
sigdetect = par['lowest_nsig']
rms_threshold = par['rms_threshold']
lamps = par['lamps']

line_list = waveio.load_line_lists(lamps)


cc_thresh =0.8
cc_local_thresh = 0.8
n_local_cc =11


nreid_min = 1

new = False
if new:
    all_patt_dict={}
    all_detections = {}
    for islit in range(nslits):
        all_detections[str(islit)], all_patt_dict[str(islit)] = autoid.reidentify(spec[:,islit], spec_arxiv, wave_soln_arxiv, det_arxiv, line_list, nreid_min,
示例#11
0
def main(flg):

    # Keck LRISb
    if flg & (2**0):  # B300, all lamps
        binspec = 1
        slits = [15]
        xidl_file = os.path.join(template_path, 'Keck_LRIS', 'B300',
                                 'lris_blue_300.sav')
        outroot = 'keck_lris_blue_300_d680.fits'
        build_template(xidl_file, slits, None, binspec, outroot, lowredux=True)

    if flg & (2**1):  # B400, all lamps I think)
        binspec = 2
        outroot = 'keck_lris_blue_400_d560.fits'
        slits = [19, 14]
        lcut = [5500.]
        xidl_file = os.path.join(template_path, 'Keck_LRIS', 'B400',
                                 'lris_blue_400_d560.sav')
        build_template(xidl_file, slits, lcut, binspec, outroot, lowredux=True)

    if flg & (2**2):  # B600, all lamps
        binspec = 2
        outroot = 'keck_lris_blue_600_d560.fits'
        slits = [0, 7]
        lcut = [4500.]
        wfile = os.path.join(template_path, 'Keck_LRIS', 'B600',
                             'MasterWaveCalib_A_1_01.json')
        build_template(wfile, slits, lcut, binspec, outroot, lowredux=False)

    if flg & (2**3):  # B1200, all lamps?
        binspec = 2
        outroot = 'keck_lris_blue_1200_d460.fits'
        slits = [19, 44]
        lcut = [3700.]
        xidl_file = os.path.join(template_path, 'Keck_LRIS', 'B1200',
                                 'lris_blue_1200.sav')
        build_template(xidl_file, slits, lcut, binspec, outroot, lowredux=True)

    # ###############################################3
    # Keck/LRISr
    if flg & (2**10):  # R400
        binspec = 2
        outroot = 'keck_lris_red_400.fits'
        slits = [7]  # Quite blue, but not the bluest
        lcut = []
        wfile = os.path.join(template_path, 'Keck_LRIS', 'R400',
                             'MasterWaveCalib_A_1_01.json')
        build_template(wfile, slits, lcut, binspec, outroot, lowredux=False)

    if flg & (2**11):  # R1200
        # slits = [2-3]  # 7726 -- 9250
        # slits = [1-4]  # 9250 -- 9925
        binspec = 1
        outroot = 'keck_lris_red_1200_9000.fits'
        ifiles = [0, 1]
        slits = [3, 7]
        lcut = [9250.]
        wfile1 = os.path.join(template_path, 'Keck_LRIS', 'R1200_9000',
                              'MasterWaveCalib_A_1_02.json')  # Original Dev
        wfile2 = os.path.join(template_path, 'Keck_LRIS', 'R1200_9000',
                              'MasterWaveCalib_A_1_01.json')  # Dev suite 2x1
        build_template([wfile1, wfile2],
                       slits,
                       lcut,
                       binspec,
                       outroot,
                       lowredux=False,
                       ifiles=ifiles)

    if flg & (2**12):  # R600/5000
        # slits = [1-4]  # 5080 -- 7820
        # slits = [1-7]  # 7820 -- 9170
        binspec = 2
        outroot = 'keck_lris_red_600_5000.fits'
        slits = [4, 7]
        lcut = [7820.]
        wfile = os.path.join(template_path, 'Keck_LRIS', 'R600_5000',
                             'MasterWaveCalib_B_1_01.json')
        build_template(wfile, slits, lcut, binspec, outroot, lowredux=False)

    if flg & (2**27):  # R600/7500
        # slits = [1-10]  # 5000 -- 7840
        # slits = [1-4]  # 7840 -- 9230
        binspec = 2
        outroot = 'keck_lris_red_600_7500.fits'
        slits = [10, 4]
        lcut = [7840.]
        wfile = os.path.join(template_path, 'Keck_LRIS', 'R600_7500',
                             'MasterWaveCalib_I_1_01.json')
        build_template(wfile,
                       slits,
                       lcut,
                       binspec,
                       outroot,
                       lowredux=False,
                       chk=True,
                       normalize=True,
                       subtract_conti=True)

    # ##################################
    # Magellan/MagE
    if flg & (2**13):
        # Load
        mase_path = os.path.join(os.getenv('XIDL_DIR'), 'Magellan', 'MAGE',
                                 'mase', 'Calib')
        sav_file = os.path.join(mase_path, 'MagE_wvguess_jfh.idl')
        mase_dict = readsav(sav_file)
        mase_sol = Table(mase_dict['all_arcfit'])
        # Do it
        all_wave = np.zeros((2048, 15))
        all_flux = np.zeros_like(all_wave)
        for order in np.arange(15):
            all_flux[:, order] = mase_dict['sv_aspec'][order]
            # Build the wavelengths
            wv_air = cheby_val(mase_sol['FFIT'][order], np.arange(2048),
                               mase_sol['NRM'][order], mase_sol['NORD'][order])
            all_wave[:, order] = airtovac(wv_air * units.AA).value
        # Write
        tbl = Table()
        tbl['wave'] = all_wave.T
        tbl['flux'] = all_flux.T
        tbl['order'] = np.arange(20, 5, -1, dtype=int)
        tbl.meta['BINSPEC'] = 1
        # Write
        outroot = 'magellan_mage.fits'
        outfile = os.path.join(template_path, outroot)
        tbl.write(outfile, overwrite=True)
        print("Wrote: {}".format(outfile))

    if flg & (2**14):  # Magellan/MagE Plots
        outpath = os.path.join(resource_filename('pypeit', 'data'),
                               'arc_lines', 'plots')
        new_mage_file = os.path.join(resource_filename('pypeit',
                                                       'data'), 'arc_lines',
                                     'reid_arxiv', 'magellan_mage.fits')
        # Load
        mage_wave = Table.read(new_mage_file)
        llist = waveio.load_line_lists(['ThAr_MagE'])
        #
        for kk in range(mage_wave['wave'].shape[1]):
            wv = mage_wave['wave'][:, kk]
            fx = mage_wave['flux'][:, kk]
            order = 20 - kk
            # Reidentify
            detections, spec_cont_sub, patt_dict = autoid.reidentify(
                fx, fx, wv, llist, 1)
            # Fit
            final_fit = wv_fitting.fit_slit(fx, patt_dict, detections, llist)
            # Output
            outfile = os.path.join(outpath,
                                   'MagE_order{:2d}_IDs.pdf'.format(order))
            autoid.arc_fit_qa(final_fit, outfile=outfile, ids_only=True)
            print("Wrote: {}".format(outfile))
            autoid.arc_fit_qa(final_fit,
                              outfile=os.path.join(
                                  outpath,
                                  'MagE_order{:2d}_full.pdf'.format(order)))

    if flg & (2**15):  # VLT/X-Shooter reid_arxiv
        # VIS
        reid_path = os.path.join(resource_filename('pypeit', 'data'),
                                 'arc_lines', 'reid_arxiv')
        for iroot, iout in zip(
            ['vlt_xshooter_vis1x1.json', 'vlt_xshooter_nir.json'],
            ['vlt_xshooter_vis1x1.fits', 'vlt_xshooter_nir.fits']):
            # Load
            old_file = os.path.join(reid_path, iroot)
            odict, par = waveio.load_reid_arxiv(old_file)

            # Do it
            orders = odict['fit2d']['orders'][::-1].astype(int)  # Flipped
            all_wave = np.zeros((odict['0']['nspec'], orders.size))
            all_flux = np.zeros_like(all_wave)
            for kk, order in enumerate(orders):
                all_flux[:, kk] = odict[str(kk)]['spec']
                if 'nir' in iroot:
                    all_wave[:, kk] = odict[str(kk)]['wave_soln']
                else:
                    all_wave[:, kk] = airtovac(odict[str(kk)]['wave_soln'] *
                                               units.AA).value
            # Write
            tbl = Table()
            tbl['wave'] = all_wave.T
            tbl['flux'] = all_flux.T
            tbl['order'] = orders
            tbl.meta['BINSPEC'] = 1
            # Write
            outfile = os.path.join(reid_path, iout)
            tbl.write(outfile, overwrite=True)
            print("Wrote: {}".format(outfile))

    if flg & (2**16):  # VLT/X-Shooter line list
        line_path = os.path.join(resource_filename('pypeit', 'data'),
                                 'arc_lines', 'lists')
        old_file = os.path.join(line_path, 'ThAr_XSHOOTER_VIS_air_lines.dat')
        # Load
        air_list = waveio.load_line_list(old_file)
        # Vacuum
        vac_wv = airtovac(air_list['wave'] * units.AA).value
        vac_list = air_list.copy()
        vac_list['wave'] = vac_wv
        # Write
        new_file = os.path.join(line_path, 'ThAr_XSHOOTER_VIS_lines.dat')
        vac_list.write(new_file, format='ascii.fixed_width', overwrite=True)
        print("Wrote: {}".format(new_file))

    if flg & (2**17):  # NIRES
        reid_path = os.path.join(resource_filename('pypeit', 'data'),
                                 'arc_lines', 'reid_arxiv')
        iroot = 'keck_nires.json'
        iout = 'keck_nires.fits'
        # Load
        old_file = os.path.join(reid_path, iroot)
        odict, par = waveio.load_reid_arxiv(old_file)

        # Do it
        orders = odict['fit2d']['orders'][::-1].astype(int)  # Flipped
        all_wave = np.zeros((odict['0']['nspec'], orders.size))
        all_flux = np.zeros_like(all_wave)
        for kk, order in enumerate(orders):
            all_flux[:, kk] = odict[str(kk)]['spec']
            if 'nir' in iroot:
                all_wave[:, kk] = odict[str(kk)]['wave_soln']
            else:
                all_wave[:, kk] = airtovac(odict[str(kk)]['wave_soln'] *
                                           units.AA).value
        # Write
        tbl = Table()
        tbl['wave'] = all_wave.T
        tbl['flux'] = all_flux.T
        tbl['order'] = orders
        tbl.meta['BINSPEC'] = 1
        # Write
        outfile = os.path.join(reid_path, iout)
        tbl.write(outfile, overwrite=True)
        print("Wrote: {}".format(outfile))

    if flg & (2**18):  # Gemini/GNIRS
        reid_path = os.path.join(resource_filename('pypeit', 'data'),
                                 'arc_lines', 'reid_arxiv')
        iroot = 'gemini_gnirs.json'
        iout = 'gemini_gnirs.fits'
        # Load
        old_file = os.path.join(reid_path, iroot)
        odict, par = waveio.load_reid_arxiv(old_file)

        # Do it
        orders = odict['fit2d']['orders'][::-1].astype(int)  # Flipped
        all_wave = np.zeros((odict['0']['nspec'], orders.size))
        all_flux = np.zeros_like(all_wave)
        for kk, order in enumerate(orders):
            all_flux[:, kk] = odict[str(kk)]['spec']
            if 'nir' in iroot:
                all_wave[:, kk] = odict[str(kk)]['wave_soln']
            else:
                all_wave[:, kk] = airtovac(odict[str(kk)]['wave_soln'] *
                                           units.AA).value
        # Write
        tbl = Table()
        tbl['wave'] = all_wave.T
        tbl['flux'] = all_flux.T
        tbl['order'] = orders
        tbl.meta['BINSPEC'] = 1
        # Write
        outfile = os.path.join(reid_path, iout)
        tbl.write(outfile, overwrite=True)
        print("Wrote: {}".format(outfile))

    if flg & (2**23):  # WHT/ISIS
        iroot = 'wht_isis_blue_1200_4800.json'
        outroot = 'wht_isis_blue_1200_4800.fits'
        wfile = os.path.join(template_path, 'WHT_ISIS', '1200B', iroot)
        binspec = 2
        slits = [0]
        lcut = [3200.]
        build_template(wfile, slits, lcut, binspec, outroot, lowredux=False)

    if flg & (2**24):  # Magellan/FIRE
        reid_path = os.path.join(resource_filename('pypeit', 'data'),
                                 'arc_lines', 'reid_arxiv')
        iroot = 'magellan_fire_echelle.json'
        iout = 'magellan_fire_echelle.fits'
        # Load
        old_file = os.path.join(reid_path, iroot)
        odict, par = waveio.load_reid_arxiv(old_file)

        # Do it
        orders = odict['fit2d']['orders'][::-1].astype(int)  # Flipped
        all_wave = np.zeros((odict['0']['nspec'], orders.size))
        all_flux = np.zeros_like(all_wave)
        for kk, order in enumerate(orders):
            all_flux[:, kk] = odict[str(kk)]['spec']
            if 'nir' in iroot:
                all_wave[:, kk] = odict[str(kk)]['wave_soln']
            else:
                all_wave[:, kk] = airtovac(odict[str(kk)]['wave_soln'] *
                                           units.AA).value

        # Write
        tbl = Table()
        tbl['wave'] = all_wave.T
        tbl['flux'] = all_flux.T
        tbl['order'] = orders
        tbl.meta['BINSPEC'] = 1
        # Write
        outfile = os.path.join(reid_path, iout)
        tbl.write(outfile, overwrite=True)
        print("Wrote: {}".format(outfile))

    if flg & (2**25):  # FIRE longslit
        binspec = 1
        reid_path = os.path.join(resource_filename('pypeit', 'data'),
                                 'arc_lines', 'reid_arxiv')
        outroot = 'magellan_fire_long.fits'
        xidl_file = os.path.join(os.getenv('FIRE_DIR'), 'LowDispersion',
                                 'NeNeAr_archive_fit.fits')
        spec_file = os.path.join(os.getenv('FIRE_DIR'), 'LowDispersion',
                                 'NeNeAr2.sav')
        fire_sol = Table.read(xidl_file)
        wave = cheby_val(fire_sol['FFIT'].data[0], np.arange(2048),
                         fire_sol['NRM'].data[0], fire_sol['NORD'].data[0])
        wv_vac = airtovac(wave * units.AA)
        xidl_dict = readsav(spec_file)
        flux = xidl_dict['arc1d']
        wvutils.write_template(wv_vac.value,
                               flux,
                               binspec,
                               reid_path,
                               outroot,
                               det_cut=None)

    # Gemini/Flamingos2
    if flg & (2**26):
        reid_path = os.path.join(resource_filename('pypeit', 'data'),
                                 'arc_lines', 'reid_arxiv')
        iroot = ['Flamingos2_JH_JH.json', 'Flamingos2_HK_HK.json']
        outroot = ['Flamingos2_JH_JH.fits', 'Flamingos2_HK_HK.fits']
        binspec = 1
        slits = [0]
        lcut = []
        for ii in range(len(iroot)):
            wfile = os.path.join(reid_path, iroot[ii])
            build_template(wfile,
                           slits,
                           lcut,
                           binspec,
                           outroot[ii],
                           lowredux=False)

    # MDM/OSMOS -- MDM4K
    if flg & (2**28):
        # ArI 4159 -- 6800
        wfile = os.path.join(template_path, 'MDM_OSMOS',
                             'MasterWaveCalib_MDM4K_01.json')
        outroot = 'mdm_osmos_mdm4k.fits'
        binspec = 1
        slits = [0]
        lcut = [3200.]
        build_template(wfile,
                       slits,
                       lcut,
                       binspec,
                       outroot,
                       lowredux=False,
                       chk=True,
                       subtract_conti=True)

    # Keck KCWI
    if flg & (2**29):
        # FeAr BH2
        wfile1 = os.path.join(template_path, 'KCWI', 'BH2',
                              'Keck_KCWI_BH2_4200.json')
        outroot = 'keck_kcwi_BH2_4200.fits'
        binspec = 1
        slits = [1015]
        lcut = [4350.0, 8000.0]
        build_template([wfile1],
                       slits,
                       lcut,
                       binspec,
                       outroot,
                       lowredux=False,
                       normalize=True)
        # FeAr BM
        wfile1 = os.path.join(template_path, 'KCWI', 'BM',
                              'Keck_KCWI_BM_4060.json')
        wfile2 = os.path.join(template_path, 'KCWI', 'BM',
                              'Keck_KCWI_BM_4670.json')
        outroot = 'keck_kcwi_BM.fits'
        binspec = 1
        slits = [1026, 1021]
        lcut = [4350.0, 8000.0]
        build_template([wfile1, wfile2],
                       slits,
                       lcut,
                       binspec,
                       outroot,
                       lowredux=False,
                       normalize=True)

    # P200 DBSP r
    if flg & (2**30):
        # HeNeAr
        wfile = os.path.join(template_path, 'P200_DBSP', 'R316_7500_D55',
                             'P200_DBSP_Red.json')
        outroot = 'p200_dbsp_red_316_7500_d55.fits'
        binspec = 1
        slits = [221]
        lcut = None  # only matters if >1 slit
        build_template([wfile],
                       slits,
                       lcut,
                       binspec,
                       outroot,
                       lowredux=False,
                       normalize=True)

    # P200 DBSP b
    if flg & (2**31):
        # FeAr
        wfile = os.path.join(template_path, 'P200_DBSP', 'B600_4000_D55',
                             'P200_DBSP_Blue.json')
        outroot = 'p200_dbsp_blue_600_4000_d55.fits'
        binspec = 1
        slits = [231]
        lcut = None
        build_template([wfile],
                       slits,
                       lcut,
                       binspec,
                       outroot,
                       lowredux=False,
                       normalize=True)

    # MMT/MMIRS
    if flg & (2**32):
        reid_path = os.path.join(resource_filename('pypeit', 'data'),
                                 'arc_lines', 'reid_arxiv')
        iroot = [
            'mmt_mmirs_HK_zJ.json', 'mmt_mmirs_J_zJ.json',
            'mmt_mmirs_K3000_Kspec.json'
        ]
        outroot = [
            'mmt_mmirs_HK_zJ.fits', 'mmt_mmirs_J_zJ.fits',
            'mmt_mmirs_K3000_Kspec.fits'
        ]
        binspec = 1
        slits = [1020, 1020, 1020]
        lcut = []
        for ii in range(len(iroot)):
            wfile = os.path.join(reid_path, iroot[ii])
            build_template(wfile,
                           slits,
                           lcut,
                           binspec,
                           outroot[ii],
                           lowredux=False)
    # LBT/MODS
    if flg & (2**33):
        reid_path = os.path.join(resource_filename('pypeit', 'data'),
                                 'arc_lines', 'reid_arxiv')
        iroot = ['lbt_mods1r_red.json', 'lbt_mods2r_red.json']
        outroot = ['lbt_mods1r_red.fits', 'lbt_mods2r_red.fits']
        binspec = 1
        slits = [[1557], [1573]]
        lcut = []
        for ii in range(len(iroot)):
            wfile = os.path.join(reid_path, iroot[ii])
            build_template(wfile,
                           slits[ii],
                           lcut,
                           binspec,
                           outroot[ii],
                           lowredux=False)
    # P200 Triplespec
    if flg & (2**34):
        reid_path = os.path.join(resource_filename('pypeit', 'data'),
                                 'arc_lines', 'reid_arxiv')
        iroot = 'p200_triplespec_MasterWaveCalib.fits'
        iout = 'p200_triplespec.fits'
        # Load
        old_file = os.path.join(reid_path, iroot)
        par = io.fits_open(old_file)
        pyp_spec = par[0].header['PYP_SPEC']
        spectrograph = load_spectrograph(pyp_spec)
        orders = spectrograph.orders

        # Do it
        all_wave = np.zeros((par[2].data['spec'].size, orders.size))
        all_flux = np.zeros_like(all_wave)
        for kk, order in enumerate(orders):
            all_flux[:, kk] = par[2 * kk + 2].data['spec']
            all_wave[:, kk] = par[2 * kk + 2].data['wave_soln']
        # Write
        tbl = Table()
        tbl['wave'] = all_wave.T
        tbl['flux'] = all_flux.T
        tbl['order'] = orders
        tbl.meta['BINSPEC'] = 1
        # Write
        outfile = os.path.join(reid_path, iout)
        tbl.write(outfile, overwrite=True)
        print("Wrote: {}".format(outfile))
示例#12
0
    def build_wv_calib(self, arccen, method, skip_QA=False):
        """
        Main routine to generate the wavelength solutions in a loop over slits
        Wrapper to arc.simple_calib or arc.calib_with_arclines

        self.maskslits is updated for slits that fail

        Args:
            method : str
              'simple' -- arc.simple_calib
              'arclines' -- arc.calib_with_arclines
              'holy-grail' -- wavecal.autoid.HolyGrail
              'reidentify' -- wavecal.auotid.ArchiveReid
              'identify' -- wavecal.identify.Identify
              'full_template' -- wavecal.auotid.full_template
            skip_QA (bool, optional)

        Returns:
            dict:  self.wv_calib
        """
        # Obtain a list of good slits
        ok_mask_idx = np.where(np.invert(self.wvc_bpm))[0]

        # Obtain calibration for all slits
        if method == 'simple':
            lines = self.par['lamps']
            line_lists = waveio.load_line_lists(lines)

            final_fit = arc.simple_calib_driver(
                line_lists,
                arccen,
                ok_mask_idx,
                n_final=self.par['n_final'],
                sigdetect=self.par['sigdetect'],
                IDpixels=self.par['IDpixels'],
                IDwaves=self.par['IDwaves'])
#        elif method == 'basic':
#            final_fit = {}
#            for slit in ok_mask:
#                status, ngd_match, match_idx, scores, ifinal_fit = \
#                        autoid.basic(arccen[:, slit], self.par['lamps'], self.par['wv_cen'],
#                                     self.par['disp'], nonlinear_counts=self.nonlinear_counts)
#                final_fit[str(slit)] = ifinal_fit.copy()
#                if status != 1:
#                    self.maskslits[slit] = True
        elif method == 'holy-grail':
            # Sometimes works, sometimes fails
            arcfitter = autoid.HolyGrail(
                arccen,
                par=self.par,
                ok_mask=ok_mask_idx,
                nonlinear_counts=self.nonlinear_counts)
            patt_dict, final_fit = arcfitter.get_results()
        elif method == 'identify':
            final_fit = {}
            # Manually identify lines
            msgs.info("Initializing the wavelength calibration tool")
            # TODO: Move this loop to the GUI initalise method
            embed()
            for slit_idx in ok_mask_idx:
                arcfitter = gui_identify.initialise(arccen,
                                                    slit=slit_idx,
                                                    par=self.par)
                final_fit[str(slit_idx)] = arcfitter.get_results()
                if final_fit[str(slit_idx)] is not None:
                    ans = 'y'
                    # ans = ''
                    # while ans != 'y' and ans != 'n':
                    #     ans = input("Would you like to store this wavelength solution in the archive? (y/n): ")
                    if ans == 'y' and final_fit[str(
                            slit_idx)]['rms'] < self.par['rms_threshold']:
                        # Store the results in the user reid arxiv
                        specname = self.spectrograph.spectrograph
                        gratname = "UNKNOWN"  # input("Please input the grating name: ")
                        dispangl = "UNKNOWN"  # input("Please input the dispersion angle: ")
                        templates.pypeit_identify_record(
                            final_fit[str(slit_idx)], self.binspectral,
                            specname, gratname, dispangl)
                        msgs.info("Your wavelength solution has been stored")
                        msgs.info(
                            "Please consider sending your solution to the PYPEIT team!"
                        )

        elif method == 'reidentify':
            # Now preferred
            # Slit positions
            arcfitter = autoid.ArchiveReid(
                arccen,
                self.spectrograph,
                self.par,
                ok_mask=ok_mask_idx,
                slit_spat_pos=self.spat_coo,
                nonlinear_counts=self.nonlinear_counts)
            patt_dict, final_fit = arcfitter.get_results()
        elif method == 'full_template':
            # Now preferred
            if self.binspectral is None:
                msgs.error(
                    "You must specify binspectral for the full_template method!"
                )
            final_fit = autoid.full_template(
                arccen,
                self.par,
                ok_mask_idx,
                self.det,
                self.binspectral,
                nonlinear_counts=self.nonlinear_counts,
                nsnippet=self.par['nsnippet'])
        else:
            msgs.error(
                'Unrecognized wavelength calibration method: {:}'.format(
                    method))

        # Convert keys to spatial system
        self.wv_calib = {}
        tmp = copy.deepcopy(final_fit)
        for idx in range(self.slits.nslits):
            if str(idx) in final_fit.keys():
                self.wv_calib[str(self.slits.slitord_id[idx])] = final_fit.pop(
                    str(idx))

        # Update mask
        self.update_wvmask()

        #TODO For generalized echelle (not hard wired) assign order number here before, i.e. slits.ech_order

        # QA
        if not skip_QA:
            ok_mask_idx = np.where(np.invert(self.wvc_bpm))[0]
            for slit_idx in ok_mask_idx:
                outfile = qa.set_qa_filename(
                    self.master_key,
                    'arc_fit_qa',
                    slit=self.slits.slitord_id[slit_idx],
                    out_dir=self.qa_path)
                autoid.arc_fit_qa(self.wv_calib[str(
                    self.slits.slitord_id[slit_idx])],
                                  outfile=outfile)

        # Return
        self.steps.append(inspect.stack()[0][3])
        return self.wv_calib