示例#1
0
def main(args):
    """
    Parameters
    ----------
    args

    Returns
    -------

    """

    import numpy as np
    from pypit import pyputils
    msgs = pyputils.get_dummy_logger()
    from pypit import arqa
    from linetools.utils import loadjson

    # Read JSON
    fdict = loadjson(args.wave_soln)
    for key in fdict.keys():
        if isinstance(fdict[key], list):
            fdict[key] = np.array(fdict[key])

    # Generate QA
    arqa.arc_fit_qa(None, fdict, outfil=args.outfile, ids_only=True,
                    title=args.title)
    print("Wrote {:s}".format(args.outfile))
示例#2
0
def main(args):
    """
    Parameters
    ----------
    args

    Returns
    -------

    """

    import numpy as np
    from pypit import pyputils
    msgs = pyputils.get_dummy_logger()
    from pypit import arqa
    from linetools.utils import loadjson

    # Read JSON
    fdict = loadjson(args.wave_soln)
    for key in fdict.keys():
        if isinstance(fdict[key], list):
            fdict[key] = np.array(fdict[key])

    # Generate QA
    arqa.arc_fit_qa(None,
                    fdict,
                    outfil=args.outfile,
                    ids_only=True,
                    title=args.title)
    print("Wrote {:s}".format(args.outfile))
示例#3
0
def main() :

    parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    parser.add_argument('file', type = str, default = None, help = 'FITS file')
    parser.add_argument("--list", default=False, help="List the extensions only?", action="store_true")
    parser.add_argument('--raw_lris', action="store_true")
    parser.add_argument('--exten', type=int, help="FITS extension")

    pargs = parser.parse_args()

    # List?
    if pargs.list:
        from astropy.io import fits
        hdu = fits.open(pargs.file)
        print(hdu.info())
        return

    kludge_fil = 'tmp_ginga.fits'

    # Setup for PYPIT imports
    import subprocess
    from astropy.io import fits
    from pypit import pyputils
    msgs = pyputils.get_dummy_logger()

    from pypit import arlris

    # Extension
    if pargs.exten is not None:
        hdu = fits.open(pargs.file)
        img = hdu[pargs.exten].data
        # Write
        msgs.warn('Writing kludge file to {:s}'.format(kludge_fil))
        hdunew = fits.PrimaryHDU(img)
        hdulist = fits.HDUList([hdunew])
        hdulist.writeto(kludge_fil,clobber=True)
        #
        pargs.file = kludge_fil

    # RAW_LRIS??
    if pargs.raw_lris:
        # 
        img, head, _ = arlris.read_lris(pargs.file)
        # Generate hdu
        hdu = fits.PrimaryHDU(img)
        hdulist = fits.HDUList([hdu])
        # Write
        msgs.warn('Writing kludge file to {:s}'.format(kludge_fil))
        hdulist.writeto(kludge_fil,clobber=True)
        pargs.file = kludge_fil

    # Spawn ginga
    subprocess.call(["ginga", pargs.file])

    if pargs.raw_lris:
        msgs.warn('Removing kludge file {:s}'.format(kludge_fil))
        subprocess.call(["rm", pargs.file])
示例#4
0
def tst_unknwn_wvcen(spec,
                     lines,
                     wv_cen,
                     disp,
                     siglev=20.,
                     min_ampl=300.,
                     swv_uncertainty=350.,
                     pix_tol=2,
                     plot_fil=None,
                     wvoff=1000.):
    """ As named
    """
    dcen = swv_uncertainty * 0.8
    wvcens = np.arange(wv_cen - wvoff, wv_cen + wvoff + dcen, dcen)
    # Best
    best_dict = dict(nmatch=0, nID=0, rms=0., ibest=-1, bwv=0.)
    # Loop
    for ss, iwv_cen in enumerate(wvcens):
        print('wv_cen guess = {:g}'.format(iwv_cen))
        stuff = grail.basic(spec,
                            lines,
                            iwv_cen,
                            disp,
                            siglev=siglev,
                            min_ampl=min_ampl,
                            swv_uncertainty=swv_uncertainty,
                            pix_tol=pix_tol,
                            plot_fil=None)
        if stuff[0] == -1:
            print("Solution failed for iwv_cen={:g}. Nmatch={:d}".format(
                iwv_cen, stuff[1]))
        elif stuff[0] == 1:
            # Unpack
            status, nmatch, match_idx, scores, final_fit = stuff
            print("Fit finished for iwv_cen={:g}".format(iwv_cen))
            # Metrics -- nmatch, nID, RMS
            nID = len(final_fit['xfit'])
            print("   Nmatch={:d}, nID={:d}, RMS={:g}".format(
                nmatch, nID, final_fit['rms']))
            # Best?
            if nID > best_dict['nID']:
                best_dict['nmatch'] = nmatch
                best_dict['nID'] = nID
                best_dict['rms'] = final_fit['rms']
                best_dict['ibest'] = ss
                best_dict['bwv'] = iwv_cen
                best_dict['fit'] = final_fit
    # Report
    print("Here is the best:")
    print(best_dict)
    # QA
    if plot_fil is not None:
        from pypit import pyputils
        msgs = pyputils.get_dummy_logger()
        from pypit import arqa
        arqa.arc_fit_qa(None, best_dict['fit'], outfil=plot_fil)
示例#5
0
def main(args):

    # List only?
    from astropy.io import fits
    if args.list:
        hdu = fits.open(args.file)
        print(hdu.info())
        return

    kludge_fil = 'tmp_ginga.fits'

    # Setup for PYPIT imports
    import subprocess
    from pypit import pyputils
    msgs = pyputils.get_dummy_logger()

    from pypit import arlris

    # Extension
    if args.exten is not None:
        hdu = fits.open(args.file)
        img = hdu[args.exten].data
        # Write
        msgs.warn('Writing kludge file to {:s}'.format(kludge_fil))
        hdunew = fits.PrimaryHDU(img)
        hdulist = fits.HDUList([hdunew])
        hdulist.writeto(kludge_fil,clobber=True)
        #
        args.file = kludge_fil

    # RAW_LRIS??
    if args.raw_lris:
        # 
        img, head, _ = arlris.read_lris(args.file)
        # Generate hdu
        hdu = fits.PrimaryHDU(img)
        hdulist = fits.HDUList([hdu])
        # Write
        msgs.warn('Writing kludge file to {:s}'.format(kludge_fil))
        hdulist.writeto(kludge_fil,clobber=True)
        args.file = kludge_fil

    # Spawn ginga
    subprocess.call(["ginga", args.file])

    if args.raw_lris:
        msgs.warn('Removing kludge file {:s}'.format(kludge_fil))
        subprocess.call(["rm", args.file])
示例#6
0
# Module to run tests on simple fitting routines for arrays

### TEST_UNICODE_LITERALS

import numpy as np
import sys
import os, pdb
import pytest

from astropy import units as u

from pypit import pyputils
import pypit

msgs = pyputils.get_dummy_logger()

# from xastropy.xutils import afits as xafits
# from xastropy.xutils import xdebug as xdb

# def data_path(filename):
#    data_dir = os.path.join(os.path.dirname(__file__), 'files')
#    return os.path.join(data_dir, filename)


def test_find_standard():
    from pypit import arutils as arut
    from pypit import arflux as arflx

    # Dummy self
    slf = arut.dummy_self()
    # G191b2b
示例#7
0
文件: setup.py 项目: lwymarie/PYPIT
def main(args):

    from pypit.scripts import run_pypit
    from pypit import pyputils
    from pypit.pypit import load_input
    import os
    import datetime

    '''
    # Check for existing .setups file
    setup_files = glob.glob('./{:s}*.setups'.format(args.spectrograph))
    if len(setup_files) > 0:
        print("Working directory already includes a .setups file")
        for ifile in setup_files:
            print("Remove: {:s}".format(ifile))
        print("Then you can re-run this script")
        sys.exit()
    '''

    # Generate a dummy .pypit file
    if not args.pypit_file:
        # Name
        date = str(datetime.date.today().strftime('%Y-%b-%d'))
        root = args.spectrograph+'_'+date
        pyp_file = args.redux_path+root+'.pypit'
        # Generate
        dfname = "{:s}*{:s}*".format(args.files_root, args.extension)
        # parlines
        parlines = ['run ncpus 1\n',
                    'output overwrite True\n']
        parlines += ["run spectrograph {:s}\n".format(args.spectrograph)]
        parlines += ["output sorted {:s}\n".format(root)]
        pyputils.make_pypit_file(pyp_file, args.spectrograph,
                              [dfname], setup_script=True, parlines=parlines)
        print("Wrote {:s}".format(pyp_file))
    else:
        pyp_file = args.files_root

    # Run
    pinp = [pyp_file]
    if args.develop:
        pinp += ['-d']
    pargs = run_pypit.parser(pinp)
    sorted_file = pyp_file.replace('.pypit', '.sorted')
    run_pypit.main(pargs)

    # #####################
    # Generate custom .pypit files
    if not args.custom:
        return

    # Read master file
    from pypit import pyputils
    from pypit import arsort
    msgs = pyputils.get_dummy_logger()
    pyp_dict = load_input(pyp_file, msgs)
    parlines, datlines, spclines, dfnames = [pyp_dict[ii] for ii in ['par','dat','spc','dfn']]

    # Get paths
    paths = []
    for datline in datlines:
        islsh = datline.rfind('/')
        path = datline[:islsh+1]
        if path not in paths:
            paths.append(path)

    # Remove run setup from parlines
    for jj,parline in enumerate(parlines):
        if 'run setup' in parline:
            #parlines[jj] = 'run setup False\n'
            parlines[jj] = '\n'

    # Generate .pypit files and sub-folders
    all_setups, all_setuplines, all_setupfiles = arsort.load_sorted(sorted_file)
    for setup, setuplines,setupfiles in zip(all_setups, all_setuplines,all_setupfiles):
        root = args.spectrograph+'_setup_'
        # Make the dir
        newdir = args.redux_path+root+setup
        if not os.path.exists(newdir):
            os.mkdir(newdir)
        # Now the file
        pyp_file = newdir+'/'+root+setup+'.pypit'
        # Modify parlines
        for kk,pline in enumerate(parlines):
            if 'output sorted' in pline:
                parlines.pop(kk)
        parlines += ["output sorted {:s}\n".format(root+setup)]

        pyputils.make_pypit_file(pyp_file, args.spectrograph, [],
                                 parlines=parlines,
                                 spclines=None,
                                 setuplines=setuplines,
                                 setupfiles=setupfiles,
                                 paths=paths,
                                 calcheck=False)
        print("Wrote {:s}".format(pyp_file))
示例#8
0
# Module to run tests on armasters

import pytest

from pypit import pyputils
msgs = pyputils.get_dummy_logger()  #develop=True)
from pypit import armasters

#def data_path(filename):
#    data_dir = os.path.join(os.path.dirname(__file__), 'files')
#    return os.path.join(data_dir, filename)

#@pytest.fixture
#def fitsdict():
#    return arutils.dummy_fitsdict()


def test_master_name():
    """ Test master name method
    """
    types = [
        'bias', 'badpix', 'trace', 'normpixelflat', 'arc', 'wave', 'wv_calib',
        'tilts'
    ]
    suff = [
        'Bias', 'BadPix', 'Trace', 'FlatField', 'Arc', 'Wave', 'WaveCalib',
        'Tilts'
    ]
    for isuff, itype in zip(suff, types):
        if itype == 'wv_calib':
            exten = 'json'
示例#9
0
def generate_hdf(sav_file, instr, lamps, outfil, dtoler=0.6):
    """ Given an input LR IDL save file, generate an hdf5
    IDs arc lines too

    Parameters
    ----------
    sav_file : str
      Root name of the IDL save file from LowRedux, e.g. lris_blue_600.sav
    lamps
    outfil

    Returns
    -------

    """
    from pypit import pyputils
    msgs = pyputils.get_dummy_logger()

    from pypit import arwave
    from pypit import arutils
    arutils.dummy_settings()
    #
    from arclines.pypit_utils import find_peaks
    from arclines.io import load_line_lists
    #

    # Read IDL save file
    sav_file = os.getenv('LONGSLIT_DIR') + 'calib/linelists/' + sav_file
    s = readsav(sav_file)
    ctbl = Table(s['calib'])  # For writing later

    # Line list
    alist = load_line_lists(lamps)

    # One spectrum?
    ashape = s['archive_arc'].shape
    if len(ashape) == 1:
        nspec = 1
        npix = ashape[0]
    else:
        nspec = s['archive_arc'].shape[0]
        npix = ashape[1]

    # Meta data
    mdict = dict(
        npix=npix,
        instr=instr,
        lamps=[str(ilamp) for ilamp in lamps],  # For writing to hdf5
        nspec=nspec,
        infil=sav_file,
        IDairvac='vac')
    print("Processing {:d} spectra in {:s}".format(mdict['nspec'], sav_file))

    # Start output
    outh5 = h5py.File(out_path + outfil, 'w')
    outh5.create_group('arcs')

    # Loop on spectra
    for ss in range(mdict['nspec']):
        sss = str(ss)
        # Parse
        if nspec == 1:
            spec = s['archive_arc']
        else:
            spec = s['archive_arc'][ss]
        calib = s['calib'][ss]
        # Peaks
        tampl, tcent, twid, w, yprep = find_peaks(spec)
        pixpk = tcent[w]
        pixampl = tampl[w]

        # Wavelength solution
        if calib['func'] == 'CHEBY':
            wv_air = cheby_val(calib['ffit'], np.arange(mdict['npix']),
                               calib['nrm'], calib['nord'])
        elif calib['func'] == 'POLY':
            wv_air = poly_val(calib['ffit'], np.arange(mdict['npix']),
                              calib['nrm'])
        # Check blue->red or vice-versa
        if ss == 0:
            if wv_air[0] > wv_air[-1]:
                mdict['bluered'] = False
            else:
                mdict['bluered'] = True

        # Peak waves
        if calib['func'] == 'CHEBY':
            twave_air = cheby_val(calib['ffit'], pixpk, calib['nrm'],
                                  calib['nord'])
        else:
            twave_air = poly_val(calib['ffit'], pixpk, calib['nrm'])
        # Air to Vac
        twave_vac = arwave.airtovac(twave_air * u.AA)
        wave_vac = arwave.airtovac(wv_air * u.AA)
        if ss == 0:
            disp = np.median(np.abs(wave_vac - np.roll(wave_vac, 1)))
            print("Average dispersion = {:g}".format(disp))
        # IDs
        idwv = np.zeros_like(pixpk)
        idsion = np.array([str('12345')] * len(pixpk))
        for kk, twv in enumerate(twave_vac.value):
            # diff
            diff = np.abs(twv - alist['wave'])
            if np.min(diff) < dtoler:
                imin = np.argmin(diff)
                idwv[kk] = alist['wave'][imin]
                #idsion[kk] = alist['Ion'][imin]  NIST
                idsion[kk] = alist['ion'][imin]
        # Red to blue?
        if mdict['bluered'] is False:
            pixpk = mdict['npix'] - 1 - pixpk
            # Re-sort
            asrt = np.argsort(pixpk)
            pixpk = pixpk[asrt]
            idwv = idwv[asrt]
            # Reverse
            spec = spec[::-1]
            wave_vac = wave_vac[::-1]
        # Output
        outh5['arcs'].create_group(sss)
        # Datasets
        outh5['arcs'][sss]['wave'] = wave_vac
        outh5['arcs'][sss]['wave'].attrs['airvac'] = 'vac'
        outh5['arcs'][sss]['spec'] = spec
        outh5['arcs'][sss]['spec'].attrs['flux'] = 'counts'
        outh5['arcs'][sss]['pixpk'] = pixpk
        outh5['arcs'][sss]['ID'] = idwv
        outh5['arcs'][sss]['ID'].attrs['airvac'] = 'vac'
        outh5['arcs'][sss]['Ion'] = idsion
        # LR wavelengths
        outh5['arcs'][sss]['LR_wave'] = wv_air
        outh5['arcs'][sss]['LR_wave'].attrs['airvac'] = 'air'
        # LR Fit
        outh5['arcs'][sss].create_group('LR_fit')
        for key in ctbl.keys():
            outh5['arcs'][sss]['LR_fit'][key] = ctbl[ss][key]

    # Meta data
    outh5.create_group('meta')
    for key in mdict.keys():
        try:
            outh5['meta'][key] = mdict[key]
        except TypeError:  # Probably a unicode thing
            pdb.set_trace()
    # Close
    outh5.close()
    print('Wrote {:s}'.format(out_path + outfil))
示例#10
0
# Module to run tests on ararclines


import os
import numpy as np
import pytest

from astropy.table import Table

from pypit import pyputils
msgs = pyputils.get_dummy_logger()
from pypit import arutils
from pypit import artrace
from pypit import arcytrace


def data_path(filename):
    data_dir = os.path.join(os.path.dirname(__file__), 'files')
    return os.path.join(data_dir, filename)


def test_very_close_obj():
    """ Find objects with minima algorithm
    """
    # Read trace example
    tbl = Table.read(data_path('real_close_trace.fits'))
    trcprof = tbl['trc'].data.astype(np.float64)
    # Call
    objl, objr, bckl, bckr = artrace.find_obj_minima(trcprof, fwhm=3., nsmooth=0)
    assert objl[1] == 55
    assert objr[1] == 63
示例#11
0
def main():

    parser = argparse.ArgumentParser(
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    parser.add_argument('file', type=str, default=None, help='FITS file')
    parser.add_argument("--list",
                        default=False,
                        help="List the extensions only?",
                        action="store_true")
    parser.add_argument('--raw_lris', action="store_true")
    parser.add_argument('--exten', type=int, help="FITS extension")

    pargs = parser.parse_args()

    # List?
    if pargs.list:
        from astropy.io import fits
        hdu = fits.open(pargs.file)
        print(hdu.info())
        return

    kludge_fil = 'tmp_ginga.fits'

    # Setup for PYPIT imports
    import subprocess
    from astropy.io import fits
    from pypit import pyputils
    msgs = pyputils.get_dummy_logger()

    from pypit import arlris

    # Extension
    if pargs.exten is not None:
        hdu = fits.open(pargs.file)
        img = hdu[pargs.exten].data
        # Write
        msgs.warn('Writing kludge file to {:s}'.format(kludge_fil))
        hdunew = fits.PrimaryHDU(img)
        hdulist = fits.HDUList([hdunew])
        hdulist.writeto(kludge_fil, clobber=True)
        #
        pargs.file = kludge_fil

    # RAW_LRIS??
    if pargs.raw_lris:
        #
        img, head, _ = arlris.read_lris(pargs.file)
        # Generate hdu
        hdu = fits.PrimaryHDU(img)
        hdulist = fits.HDUList([hdu])
        # Write
        msgs.warn('Writing kludge file to {:s}'.format(kludge_fil))
        hdulist.writeto(kludge_fil, clobber=True)
        pargs.file = kludge_fil

    # Spawn ginga
    subprocess.call(["ginga", pargs.file])

    if pargs.raw_lris:
        msgs.warn('Removing kludge file {:s}'.format(kludge_fil))
        subprocess.call(["rm", pargs.file])
示例#12
0
def test_dummy_logger():
    msgs = pyputils.get_dummy_logger()
    msgs.info('testing 123')
示例#13
0
def iterative_fitting(spec,
                      tcent,
                      ifit,
                      IDs,
                      llist,
                      disp,
                      plot_fil=None,
                      verbose=False,
                      load_pypit=False,
                      aparm=None):

    if aparm is None:
        aparm = dict(
            llist='',
            disp=disp,  # Ang/unbinned pixel
            disp_toler=0.1,  # 10% tolerance
            match_toler=3.,  # Matcing tolerance (pixels)
            func='legendre',  # Function for fitting
            n_first=3,  # Order of polynomial for first fit
            n_final=4,  # Order of polynomial for final fit
            nsig_rej=2.,  # Number of sigma for rejection
            nsig_rej_final=3.0)  # Number of sigma for rejection (final fit)
    # PYPIT
    if load_pypit:
        from pypit import pyputils
        msgs = pyputils.get_dummy_logger()
    from pypit import arutils
    from pypit import arqa
    if load_pypit:
        arutils.dummy_settings()

    npix = spec.size

    # Setup for fitting
    sv_ifit = list(ifit)  # Keep the originals
    all_ids = -999. * np.ones(len(tcent))
    all_idsion = np.array(['UNKNWN'] * len(tcent))
    all_ids[ifit] = IDs

    # Fit
    n_order = aparm['n_first']
    flg_quit = False
    fmin, fmax = -1., 1.
    while (n_order <= aparm['n_final']) and (flg_quit is False):
        # Fit with rejection
        xfit, yfit = tcent[ifit], all_ids[ifit]
        mask, fit = arutils.robust_polyfit(xfit,
                                           yfit,
                                           n_order,
                                           function=aparm['func'],
                                           sigma=aparm['nsig_rej'],
                                           minv=fmin,
                                           maxv=fmax)

        rms_ang = arutils.calc_fit_rms(xfit[mask == 0],
                                       yfit[mask == 0],
                                       fit,
                                       aparm['func'],
                                       minv=fmin,
                                       maxv=fmax)
        rms_pix = rms_ang / disp
        if verbose:
            print("RMS = {:g}".format(rms_pix))
        # DEBUG
        # Reject but keep originals (until final fit)
        ifit = list(ifit[mask == 0]) + sv_ifit
        # Find new points (should we allow removal of the originals?)
        twave = arutils.func_val(fit,
                                 tcent,
                                 aparm['func'],
                                 minv=fmin,
                                 maxv=fmax)
        for ss, iwave in enumerate(twave):
            mn = np.min(np.abs(iwave - llist['wave']))
            if mn / aparm['disp'] < aparm['match_toler']:
                imn = np.argmin(np.abs(iwave - llist['wave']))
                #if verbose:
                #    print('Adding {:g} at {:g}'.format(llist['wave'][imn],tcent[ss]))
                # Update and append
                all_ids[ss] = llist['wave'][imn]
                all_idsion[ss] = llist['ion'][imn]
                ifit.append(ss)
        # Keep unique ones
        ifit = np.unique(np.array(ifit, dtype=int))
        # Increment order
        if n_order < (aparm['n_final'] + 2):
            n_order += 1
        else:
            # This does 2 iterations at the final order
            flg_quit = True

    # Final fit (originals can now be rejected)
    fmin, fmax = 0., 1.
    xfit, yfit = tcent[ifit] / (npix - 1), all_ids[ifit]
    mask, fit = arutils.robust_polyfit(xfit,
                                       yfit,
                                       n_order,
                                       function=aparm['func'],
                                       sigma=aparm['nsig_rej_final'],
                                       minv=fmin,
                                       maxv=fmax)  #, debug=True)
    irej = np.where(mask == 1)[0]
    if len(irej) > 0:
        xrej = xfit[irej]
        yrej = yfit[irej]
        if verbose:
            for kk, imask in enumerate(irej):
                wave = arutils.func_val(fit,
                                        xrej[kk],
                                        aparm['func'],
                                        minv=fmin,
                                        maxv=fmax)
                print('Rejecting arc line {:g}; {:g}'.format(
                    yfit[imask], wave))
    else:
        xrej = []
        yrej = []
    xfit = xfit[mask == 0]
    yfit = yfit[mask == 0]
    ions = all_idsion[ifit][mask == 0]
    # Final RMS
    rms_ang = arutils.calc_fit_rms(xfit,
                                   yfit,
                                   fit,
                                   aparm['func'],
                                   minv=fmin,
                                   maxv=fmax)
    rms_pix = rms_ang / disp
    #
    '''
    if msgs._debug['arc']:
        msarc = slf._msarc[det-1]
        wave = arutils.func_val(fit, np.arange(msarc.shape[0])/float(msarc.shape[0]),
            'legendre', minv=fmin, maxv=fmax)
        debugger.xplot(xfit,yfit, scatter=True,
            xtwo=np.arange(msarc.shape[0])/float(msarc.shape[0]),
            ytwo=wave)
        debugger.xpcol(xfit*msarc.shape[0], yfit)
        debugger.set_trace()
    '''

    #debugger.xplot(xfit, np.ones(len(xfit)), scatter=True,
    #    xtwo=np.arange(msarc.shape[0]),ytwo=yprep)
    #debugger.xplot(xfit,yfit, scatter=True, xtwo=np.arange(msarc.shape[0]),
    #    ytwo=wave)
    #debugger.set_trace()
    #wave = arutils.func_val(fit, np.arange(msarc.shape[0])/float(msarc.shape[0]),
    #    'legendre', min=fmin, max=fmax)

    # Pack up fit
    final_fit = dict(fitc=fit,
                     function=aparm['func'],
                     xfit=xfit,
                     yfit=yfit,
                     ions=ions,
                     fmin=fmin,
                     fmax=fmax,
                     xnorm=float(npix),
                     xrej=xrej,
                     yrej=yrej,
                     mask=mask,
                     spec=spec,
                     nrej=aparm['nsig_rej_final'],
                     shift=0.,
                     tcent=tcent,
                     rms=rms_pix)
    # QA
    if plot_fil is not None:
        arqa.arc_fit_qa(None, final_fit, outfil=plot_fil)
    # Return
    return final_fit
示例#14
0
def main(args):

    # List only?
    import os
    from astropy.io import fits
    hdu = fits.open(args.file)
    head0 = hdu[0].header
    if args.list:
        print(hdu.info())
        return

    # Setup for PYPIT imports
    from pypit import pyputils
    from pypit import armasters
    from pypit.arparse import get_dnum
    from pypit.arspecobj import get_slitid
    from astropy.table import Table
    msgs = pyputils.get_dummy_logger()
    from pypit import ginga as pyp_ginga
    import pdb as debugger

    # Init
    sdet = get_dnum(args.det, prefix=False)

    # One detector, sky sub for now
    names = [hdu[i].name for i in range(len(hdu))]
    try:
        exten = names.index('DET{:s}-SKYSUB'.format(sdet))
    except ValueError:  # Backwards compatability
        exten = names.index('DET{:d}-SKYSUB'.format(args.det))
    skysub = hdu[exten].data

    # Show Image
    cwd = os.getcwd()
    wcs_img = cwd + '/' + head0[
        'PYPMFDIR'] + '/MasterWave_' + '{:s}_{:02d}_{:s}.fits'.format(
            head0['PYPCNFIG'], args.det, head0['PYPCALIB'])
    viewer, ch = pyp_ginga.show_image(skysub,
                                      chname='DET{:s}'.format(sdet),
                                      wcs_img=wcs_img)

    # Add slits
    testing = False
    if testing:
        mdir = 'MF_keck_lris_blue/'
        setup = 'A_{:s}_aa'.format(sdet)
    else:
        mdir = head0['PYPMFDIR'] + '/'
        setup = '{:s}_{:s}_{:s}'.format(head0['PYPCNFIG'], sdet,
                                        head0['PYPCALIB'])
    trc_file = armasters.master_name('trace', setup, mdir=mdir)
    trc_hdu = fits.open(trc_file)
    lordloc = trc_hdu[1].data  # Should check name
    rordloc = trc_hdu[2].data  # Should check name
    # Get slit ids
    stup = (trc_hdu[0].data.shape, lordloc, rordloc)
    slit_ids = [
        get_slitid(stup, None, ii)[0] for ii in range(lordloc.shape[1])
    ]
    pyp_ginga.show_slits(viewer, ch, lordloc, rordloc, slit_ids)  #, args.det)

    # Object traces
    spec1d_file = args.file.replace('spec2d', 'spec1d')
    hdulist_1d = fits.open(spec1d_file)
    det_nm = 'D{:s}'.format(sdet)
    for hdu in hdulist_1d:
        if det_nm in hdu.name:
            tbl = Table(hdu.data)
            trace = tbl['obj_trace']
            obj_id = hdu.name.split('-')[0]
            pyp_ginga.show_trace(viewer, ch, trace, obj_id,
                                 color='green')  #hdu.name)