Exemplo n.º 1
0
def test_instr_setup(fitsdict):
    """ Test instrument setup naming convention
    """
    from pypit import arsciexp
    arutils.dummy_settings(spectrograph='shane_kast_blue', set_idx=False)
    # Load
    settings.argflag['run']['setup'] = True  # Over-ride default numbers
    filesort = arsort.sort_data(fitsdict)
    # Match and test
    arsort.match_science(fitsdict, filesort)
    # Make a science frame
    sciexp = arsciexp.ScienceExposure(0, fitsdict, do_qa=False)
    # Get an ID
    setup_dict = {}
    setupID = arsort.instr_setup(sciexp, 1, fitsdict, setup_dict)
    assert setupID == 'A_01_aa'
    # Should get same thing
    setupID = arsort.instr_setup(sciexp, 1, fitsdict, setup_dict)
    assert setupID == 'A_01_aa'
    # New det (fake out kast_blue)
    settings.spect['det02'] = dict(numamplifiers=1)
    setupID2 = arsort.instr_setup(sciexp, 2, fitsdict, setup_dict)
    assert setupID2 == 'A_02_aa'
    # New calib set
    settings.spect['arc']['index'][1] = np.array(
        [9])  # Not really an arc, but ok
    sciexp1 = arsciexp.ScienceExposure(1, fitsdict, do_qa=False)
    setupID3 = arsort.instr_setup(sciexp1, 1, fitsdict, setup_dict)
    assert setupID3 == 'A_01_ab'
    assert setup_dict['A']['ab']['arcs'][0] == 'b009.fits'
Exemplo n.º 2
0
def test_gen_sensfunc():
    # Load a random spectrum for the sensitivity function
    sfile = data_path('spec1d_J0025-0312_KASTr_2015Jan23T025323.85.fits')
    specobjs = arload.load_specobj(sfile)
    # Settings, etc.
    arutils.dummy_settings()
    settings.argflag['run']['spectrograph'] = 'shane_kast_blue'
    settings.argflag['reduce']['masters']['setup'] = 'C_01_aa'
    settings.spect['arc'] = {}
    settings.spect['arc']['index'] = [[0]]
    fitsdict = arutils.dummy_fitsdict()
    slf = arutils.dummy_self()
    slf._msstd[0]['RA'] = '05:06:36.6'
    slf._msstd[0]['DEC'] = '52:52:01.0'
    # Generate
    slf._sensfunc = arflx.generate_sensfunc(slf, 4, [specobjs], fitsdict)
    # Save
    try:
        os.mkdir('MF_shane_kast_blue')
    except FileExistsError:
        pass
    armasters.save_sensfunc(slf, 'C_01_aa')
    # Test
    assert isinstance(slf._sensfunc, dict)
    assert isinstance(slf._sensfunc['wave_min'], Quantity)
Exemplo n.º 3
0
def test_load_headers():
    arutils.dummy_settings(spectrograph='shane_kast_blue', set_idx=False)
    kast_files = [data_path('b1.fits.gz'), data_path('b27.fits.gz')]
    fitsdict, updates = arl.load_headers(kast_files)
    # Test
    headers = fitsdict['headers']
    assert len(headers) == 2
    assert headers[0][0]['OBJECT'] == 'Arcs'
Exemplo n.º 4
0
def test_update_masters():
    # Dummy self
    arut.dummy_settings(spectrograph='shane_kast_blue', set_idx=True)
    slf1 = arut.dummy_self()
    slf1._idx_arcs = np.array([0, 1])
    slf2 = arut.dummy_self()
    sciexp = [slf1, slf2]
    #  Not actually filling anything
    armb.UpdateMasters(sciexp, 0, 1, 'arc')
Exemplo n.º 5
0
def test_user_frametype(fitsdict):
    """ Test setting frametype manually
    """
    arutils.dummy_settings(spectrograph='shane_kast_blue', set_idx=False)
    # Modify settings -- WARNING: THIS IS GLOBAL!
    settings.spect['set'] = {}
    settings.spect['set']['standard'] = ['b009.fits']
    filesort = arsort.sort_data(fitsdict)
    assert 9 in filesort['standard']
    settings.spect['set'] = {}
Exemplo n.º 6
0
def test_sort_data(fitsdict):
    """ Test sort_data
    """
    arutils.dummy_settings(spectrograph='shane_kast_blue', set_idx=False)
    # Sort
    filesort = arsort.sort_data(fitsdict)
    assert filesort['bias'][0] == 0
    assert filesort['arc'][0] == 1
    assert filesort['trace'][0] == 2
    assert filesort['standard'][0] == 4
    assert len(filesort['science']) == 5
Exemplo n.º 7
0
def test_neg_match_science(fitsdict):
    """ Test using negative number for calibs
    """
    arutils.dummy_settings(spectrograph='shane_kast_blue', set_idx=False)
    # Load
    filesort = arsort.sort_data(fitsdict)
    # Use negative number
    for ftype in ['arc', 'pixelflat', 'trace', 'bias']:
        settings.spect[ftype]['number'] = 1
    settings.spect['trace']['number'] = -1
    arsort.match_science(fitsdict, filesort)
    assert len(settings.spect['trace']['index'][1]) == 2
Exemplo n.º 8
0
def test_match_science(fitsdict):
    """ Test match_science routine
    """
    arutils.dummy_settings(spectrograph='shane_kast_blue', set_idx=False)
    # Load
    settings.argflag['run']['setup'] = True  # Over-ride default numbers
    filesort = arsort.sort_data(fitsdict)
    # Match and test
    arsort.match_science(fitsdict, filesort)
    assert settings.spect['arc']['index'][1][0] == 1
    assert settings.spect['standard']['index'][1][0] == 4
    assert len(settings.spect['trace']['index'][0]) == 2
Exemplo n.º 9
0
def test_save1d_fits():
    """ save1d to FITS and HDF5
    """
    arut.dummy_settings()
    fitsdict = arut.dummy_fitsdict(nfile=10, spectrograph='shane_kast_blue', directory=data_path(''))
    # Dummy self
    slf = arut.dummy_self()
    slf._specobjs = []
    slf._specobjs.append([])
    slf._specobjs[0].append([mk_specobj()])
    # Write to FITS
    arsv.save_1d_spectra_fits(slf, fitsdict)
Exemplo n.º 10
0
def test_ampsec(fitsdict):
    """ Test sort_data
    """
    arutils.dummy_settings(spectrograph='shane_kast_blue')
    slf = arutils.dummy_self()
    # Run
    det, scidx = 1, 5
    arproc.get_datasec_trimmed(slf, fitsdict, det, scidx)
    # Test
    assert slf._datasec[det - 1].shape == (2112, 2048)
    assert np.sum(np.isclose(slf._datasec[0], 1)) == 2162688  # Data region
    assert np.sum(np.isclose(slf._datasec[0], 2)) == 2162688  # second amp
    assert settings.spect['det01']['oscansec01'] == [[0, 0], [2049, 2080]]
    assert settings.spect['det01']['datasec01'] == [[0, 0], [0, 1024]]
Exemplo n.º 11
0
def test_flex_shift():
    if not os.getenv('PYPIT'):
        pass
    else:
        # Dummy slf
        arut.dummy_settings()
        settings.argflag['reduce']['flexure']['maxshift'] = 50
        slf = arut.dummy_self()
        # Read spectra
        obj_spec = lsio.readspec(data_path('obj_lrisb_600_sky.fits'))
        arx_file = pypit.__path__[0]+'/data/sky_spec/sky_LRISb_600.fits'
        arx_spec = lsio.readspec(arx_file)
        # Call
        #msgs._debug['flexure'] = True
        flex_dict = arwave.flex_shift(slf, 1, obj_spec, arx_spec)
        assert np.abs(flex_dict['shift'] - 43.7) < 0.1
Exemplo n.º 12
0
def test_geocorrect(fitsdict):
    """
    """
    # Initialize some settings
    arutils.dummy_settings(spectrograph='shane_kast_blue')  #, set_idx=False)
    # Load Dummy self
    slf = arutils.dummy_self(fitsdict=fitsdict)
    # Specobjs
    specobjs = arutils.dummy_specobj(fitsdict, extraction=True)
    slf._specobjs[0] = [specobjs]
    # Run
    # vhel = x_keckhelio(106.59770833333332, 30.34736111111111, 2000., jd=2457046.5036, OBS='lick')  9.3166 km/s
    helio, hel_corr = py_arwave.geomotion_correct(slf, 1, fitsdict)
    assert np.isclose(helio, -9.3344957,
                      rtol=1e-5)  # Checked against x_keckhelio
    assert np.isclose(slf._specobjs[0][0][0].boxcar['wave'][0].value,
                      3999.8754558341816,
                      rtol=1e-8)
Exemplo n.º 13
0
def test_setup_param():
    """ Run the parameter setup script
    Returns
    -------

    """
    # Initialize some settings
    arut.dummy_settings()
    # Load Dummy self
    slf = arut.dummy_self()
    settings.argflag['run']['spectrograph'] = 'shane_kast_blue'
    settings.spect['arc'] = {}
    settings.spect['arc']['index'] = [[0]]
    fitsdict = arut.dummy_fitsdict()
    # Run
    arcparm = pyarc.setup_param(slf, 0, 1, fitsdict)
    for key in ['llist', 'disp', 'wvmnx']:
        assert key in arcparm
Exemplo n.º 14
0
def test_load_linelist():
    """ Touches nearly all the methods in ararclines
    Returns
    -------

    """
    # Init
    arutils.dummy_settings()
    # Load
    alist = alines.load_arcline_list(None,
                                     None, ['CuI', 'ArI', 'NeI'],
                                     None,
                                     modify_parse_dict=dict(
                                         NeI={'min_wave': 3000.},
                                         ArI={'min_intensity': 399.}))
    # Min NeI
    NeI = alist['Ion'] == 'NeI'
    np.testing.assert_allclose(np.min(alist['wave'][NeI]), 3455.1837999999998)
Exemplo n.º 15
0
def test_save2d_fits():
    arut.dummy_settings()
    # Dummy self
    slf = arut.dummy_self()
    fitsdict = arut.dummy_fitsdict(nfile=1, spectrograph='none', directory=data_path(''))
    fitsdict['filename'] = np.array(['b1.fits.gz'])
    # Settings
    settings.argflag['run']['directory']['science'] = data_path('')
    settings.argflag['reduce']['masters']['setup'] = 'A_01_aa'
    # Fill with dummy images
    dum = np.ones((100,100))
    slf._sciframe[0] = dum
    slf._modelvarframe[0] = dum * 2
    slf._bgframe[0] = dum + 0.1
    slf._basename = 'test'
    slf._idx_sci[0] = 0
    # Call
    arsv.save_2d_images(slf, fitsdict)
    # Read and test
    head0 = pyfits.getheader(data_path('spec2d_test.fits'))
    assert head0['PYPCNFIG'] == 'A'
    assert head0['PYPCALIB'] == 'aa'
    assert 'PYPIT' in head0['PIPELINE']
Exemplo n.º 16
0
def generate_hdf(sav_file, instr, lamps, outfil, dtoler=0.6):
    """ Given an input LR IDL save file, generate an hdf5
    IDs arc lines too

    Parameters
    ----------
    sav_file : str
      Root name of the IDL save file from LowRedux, e.g. lris_blue_600.sav
    lamps
    outfil

    Returns
    -------

    """
    from pypit import pyputils
    msgs = pyputils.get_dummy_logger()

    from pypit import arwave
    from pypit import arutils
    arutils.dummy_settings()
    #
    from arclines.pypit_utils import find_peaks
    from arclines.io import load_line_lists
    #

    # Read IDL save file
    sav_file = os.getenv('LONGSLIT_DIR') + 'calib/linelists/' + sav_file
    s = readsav(sav_file)
    ctbl = Table(s['calib'])  # For writing later

    # Line list
    alist = load_line_lists(lamps)

    # One spectrum?
    ashape = s['archive_arc'].shape
    if len(ashape) == 1:
        nspec = 1
        npix = ashape[0]
    else:
        nspec = s['archive_arc'].shape[0]
        npix = ashape[1]

    # Meta data
    mdict = dict(
        npix=npix,
        instr=instr,
        lamps=[str(ilamp) for ilamp in lamps],  # For writing to hdf5
        nspec=nspec,
        infil=sav_file,
        IDairvac='vac')
    print("Processing {:d} spectra in {:s}".format(mdict['nspec'], sav_file))

    # Start output
    outh5 = h5py.File(out_path + outfil, 'w')
    outh5.create_group('arcs')

    # Loop on spectra
    for ss in range(mdict['nspec']):
        sss = str(ss)
        # Parse
        if nspec == 1:
            spec = s['archive_arc']
        else:
            spec = s['archive_arc'][ss]
        calib = s['calib'][ss]
        # Peaks
        tampl, tcent, twid, w, yprep = find_peaks(spec)
        pixpk = tcent[w]
        pixampl = tampl[w]

        # Wavelength solution
        if calib['func'] == 'CHEBY':
            wv_air = cheby_val(calib['ffit'], np.arange(mdict['npix']),
                               calib['nrm'], calib['nord'])
        elif calib['func'] == 'POLY':
            wv_air = poly_val(calib['ffit'], np.arange(mdict['npix']),
                              calib['nrm'])
        # Check blue->red or vice-versa
        if ss == 0:
            if wv_air[0] > wv_air[-1]:
                mdict['bluered'] = False
            else:
                mdict['bluered'] = True

        # Peak waves
        if calib['func'] == 'CHEBY':
            twave_air = cheby_val(calib['ffit'], pixpk, calib['nrm'],
                                  calib['nord'])
        else:
            twave_air = poly_val(calib['ffit'], pixpk, calib['nrm'])
        # Air to Vac
        twave_vac = arwave.airtovac(twave_air * u.AA)
        wave_vac = arwave.airtovac(wv_air * u.AA)
        if ss == 0:
            disp = np.median(np.abs(wave_vac - np.roll(wave_vac, 1)))
            print("Average dispersion = {:g}".format(disp))
        # IDs
        idwv = np.zeros_like(pixpk)
        idsion = np.array([str('12345')] * len(pixpk))
        for kk, twv in enumerate(twave_vac.value):
            # diff
            diff = np.abs(twv - alist['wave'])
            if np.min(diff) < dtoler:
                imin = np.argmin(diff)
                idwv[kk] = alist['wave'][imin]
                #idsion[kk] = alist['Ion'][imin]  NIST
                idsion[kk] = alist['ion'][imin]
        # Red to blue?
        if mdict['bluered'] is False:
            pixpk = mdict['npix'] - 1 - pixpk
            # Re-sort
            asrt = np.argsort(pixpk)
            pixpk = pixpk[asrt]
            idwv = idwv[asrt]
            # Reverse
            spec = spec[::-1]
            wave_vac = wave_vac[::-1]
        # Output
        outh5['arcs'].create_group(sss)
        # Datasets
        outh5['arcs'][sss]['wave'] = wave_vac
        outh5['arcs'][sss]['wave'].attrs['airvac'] = 'vac'
        outh5['arcs'][sss]['spec'] = spec
        outh5['arcs'][sss]['spec'].attrs['flux'] = 'counts'
        outh5['arcs'][sss]['pixpk'] = pixpk
        outh5['arcs'][sss]['ID'] = idwv
        outh5['arcs'][sss]['ID'].attrs['airvac'] = 'vac'
        outh5['arcs'][sss]['Ion'] = idsion
        # LR wavelengths
        outh5['arcs'][sss]['LR_wave'] = wv_air
        outh5['arcs'][sss]['LR_wave'].attrs['airvac'] = 'air'
        # LR Fit
        outh5['arcs'][sss].create_group('LR_fit')
        for key in ctbl.keys():
            outh5['arcs'][sss]['LR_fit'][key] = ctbl[ss][key]

    # Meta data
    outh5.create_group('meta')
    for key in mdict.keys():
        try:
            outh5['meta'][key] = mdict[key]
        except TypeError:  # Probably a unicode thing
            pdb.set_trace()
    # Close
    outh5.close()
    print('Wrote {:s}'.format(out_path + outfil))
Exemplo n.º 17
0
def iterative_fitting(spec,
                      tcent,
                      ifit,
                      IDs,
                      llist,
                      disp,
                      plot_fil=None,
                      verbose=False,
                      load_pypit=False,
                      aparm=None):

    if aparm is None:
        aparm = dict(
            llist='',
            disp=disp,  # Ang/unbinned pixel
            disp_toler=0.1,  # 10% tolerance
            match_toler=3.,  # Matcing tolerance (pixels)
            func='legendre',  # Function for fitting
            n_first=3,  # Order of polynomial for first fit
            n_final=4,  # Order of polynomial for final fit
            nsig_rej=2.,  # Number of sigma for rejection
            nsig_rej_final=3.0)  # Number of sigma for rejection (final fit)
    # PYPIT
    if load_pypit:
        from pypit import pyputils
        msgs = pyputils.get_dummy_logger()
    from pypit import arutils
    from pypit import arqa
    if load_pypit:
        arutils.dummy_settings()

    npix = spec.size

    # Setup for fitting
    sv_ifit = list(ifit)  # Keep the originals
    all_ids = -999. * np.ones(len(tcent))
    all_idsion = np.array(['UNKNWN'] * len(tcent))
    all_ids[ifit] = IDs

    # Fit
    n_order = aparm['n_first']
    flg_quit = False
    fmin, fmax = -1., 1.
    while (n_order <= aparm['n_final']) and (flg_quit is False):
        # Fit with rejection
        xfit, yfit = tcent[ifit], all_ids[ifit]
        mask, fit = arutils.robust_polyfit(xfit,
                                           yfit,
                                           n_order,
                                           function=aparm['func'],
                                           sigma=aparm['nsig_rej'],
                                           minv=fmin,
                                           maxv=fmax)

        rms_ang = arutils.calc_fit_rms(xfit[mask == 0],
                                       yfit[mask == 0],
                                       fit,
                                       aparm['func'],
                                       minv=fmin,
                                       maxv=fmax)
        rms_pix = rms_ang / disp
        if verbose:
            print("RMS = {:g}".format(rms_pix))
        # DEBUG
        # Reject but keep originals (until final fit)
        ifit = list(ifit[mask == 0]) + sv_ifit
        # Find new points (should we allow removal of the originals?)
        twave = arutils.func_val(fit,
                                 tcent,
                                 aparm['func'],
                                 minv=fmin,
                                 maxv=fmax)
        for ss, iwave in enumerate(twave):
            mn = np.min(np.abs(iwave - llist['wave']))
            if mn / aparm['disp'] < aparm['match_toler']:
                imn = np.argmin(np.abs(iwave - llist['wave']))
                #if verbose:
                #    print('Adding {:g} at {:g}'.format(llist['wave'][imn],tcent[ss]))
                # Update and append
                all_ids[ss] = llist['wave'][imn]
                all_idsion[ss] = llist['ion'][imn]
                ifit.append(ss)
        # Keep unique ones
        ifit = np.unique(np.array(ifit, dtype=int))
        # Increment order
        if n_order < (aparm['n_final'] + 2):
            n_order += 1
        else:
            # This does 2 iterations at the final order
            flg_quit = True

    # Final fit (originals can now be rejected)
    fmin, fmax = 0., 1.
    xfit, yfit = tcent[ifit] / (npix - 1), all_ids[ifit]
    mask, fit = arutils.robust_polyfit(xfit,
                                       yfit,
                                       n_order,
                                       function=aparm['func'],
                                       sigma=aparm['nsig_rej_final'],
                                       minv=fmin,
                                       maxv=fmax)  #, debug=True)
    irej = np.where(mask == 1)[0]
    if len(irej) > 0:
        xrej = xfit[irej]
        yrej = yfit[irej]
        if verbose:
            for kk, imask in enumerate(irej):
                wave = arutils.func_val(fit,
                                        xrej[kk],
                                        aparm['func'],
                                        minv=fmin,
                                        maxv=fmax)
                print('Rejecting arc line {:g}; {:g}'.format(
                    yfit[imask], wave))
    else:
        xrej = []
        yrej = []
    xfit = xfit[mask == 0]
    yfit = yfit[mask == 0]
    ions = all_idsion[ifit][mask == 0]
    # Final RMS
    rms_ang = arutils.calc_fit_rms(xfit,
                                   yfit,
                                   fit,
                                   aparm['func'],
                                   minv=fmin,
                                   maxv=fmax)
    rms_pix = rms_ang / disp
    #
    '''
    if msgs._debug['arc']:
        msarc = slf._msarc[det-1]
        wave = arutils.func_val(fit, np.arange(msarc.shape[0])/float(msarc.shape[0]),
            'legendre', minv=fmin, maxv=fmax)
        debugger.xplot(xfit,yfit, scatter=True,
            xtwo=np.arange(msarc.shape[0])/float(msarc.shape[0]),
            ytwo=wave)
        debugger.xpcol(xfit*msarc.shape[0], yfit)
        debugger.set_trace()
    '''

    #debugger.xplot(xfit, np.ones(len(xfit)), scatter=True,
    #    xtwo=np.arange(msarc.shape[0]),ytwo=yprep)
    #debugger.xplot(xfit,yfit, scatter=True, xtwo=np.arange(msarc.shape[0]),
    #    ytwo=wave)
    #debugger.set_trace()
    #wave = arutils.func_val(fit, np.arange(msarc.shape[0])/float(msarc.shape[0]),
    #    'legendre', min=fmin, max=fmax)

    # Pack up fit
    final_fit = dict(fitc=fit,
                     function=aparm['func'],
                     xfit=xfit,
                     yfit=yfit,
                     ions=ions,
                     fmin=fmin,
                     fmax=fmax,
                     xnorm=float(npix),
                     xrej=xrej,
                     yrej=yrej,
                     mask=mask,
                     spec=spec,
                     nrej=aparm['nsig_rej_final'],
                     shift=0.,
                     tcent=tcent,
                     rms=rms_pix)
    # QA
    if plot_fil is not None:
        arqa.arc_fit_qa(None, final_fit, outfil=plot_fil)
    # Return
    return final_fit