Example #1
0
def test_update_masters():
    # Dummy self
    arut.dummy_settings(spectrograph='shane_kast_blue', set_idx=True)
    slf1 = arut.dummy_self()
    slf1._idx_arcs = np.array([0, 1])
    slf2 = arut.dummy_self()
    sciexp = [slf1, slf2]
    #  Not actually filling anything
    armb.UpdateMasters(sciexp, 0, 1, 'arc')
Example #2
0
def test_gen_sensfunc():
    # Load a random spectrum for the sensitivity function
    sfile = data_path('spec1d_J0025-0312_KASTr_2015Jan23T025323.85.fits')
    specobjs = arload.load_specobj(sfile)
    # Settings, etc.
    arutils.dummy_settings()
    settings.argflag['run']['spectrograph'] = 'shane_kast_blue'
    settings.argflag['reduce']['masters']['setup'] = 'C_01_aa'
    settings.spect['arc'] = {}
    settings.spect['arc']['index'] = [[0]]
    fitsdict = arutils.dummy_fitsdict()
    slf = arutils.dummy_self()
    slf._msstd[0]['RA'] = '05:06:36.6'
    slf._msstd[0]['DEC'] = '52:52:01.0'
    # Generate
    slf._sensfunc = arflx.generate_sensfunc(slf, 4, [specobjs], fitsdict)
    # Save
    try:
        os.mkdir('MF_shane_kast_blue')
    except FileExistsError:
        pass
    armasters.save_sensfunc(slf, 'C_01_aa')
    # Test
    assert isinstance(slf._sensfunc, dict)
    assert isinstance(slf._sensfunc['wave_min'], Quantity)
Example #3
0
def load_arcline_list(slf, idx, lines, disperser, wvmnx=None, modify_parse_dict=None):
    """Loads arc line list from NIST files
    Parses and rejects

    Parameters
    ----------
    idx : list  
      indices of the arc
    lines : list
      List of ions to load
    wvmnx : list or tuple
      wvmin, wvmax for line list
    disperser : str
      Name of the disperser
    modify_parse_dict : dict, optional
      Used to over-ride default settings of parse_dict

    Returns
    -------
    alist : Table
      Table of arc lines
    """
    # Get the parse dict
    parse_dict = load_parse_dict(modify_dict=modify_parse_dict)
    # Read rejection file
    if slf is None:
        from pypit import arutils as arut
        msgs.warn("Using arutils.dummy_self.  Better know what you are doing.")
        slf = arut.dummy_self()
    root = slf._argflag['run']['pypitdir']
    with open(root+'/data/arc_lines/rejected_lines.yaml', 'r') as infile:
        rej_dict = yaml.load(infile)
    # Loop through the NIST Tables
    tbls = []
    for iline in lines:
        # Load
        tbl = parse_nist(slf,iline)
        # Parse
        if iline in parse_dict.keys():
            tbl = parse_nist_tbl(tbl,parse_dict[iline])
        # Reject
        if iline in rej_dict.keys():
            msgs.info("Rejecting select {:s} lines".format(iline))
            tbl = reject_lines(slf,tbl,idx,rej_dict[iline],disperser)
        tbls.append(tbl[['Ion','wave','RelInt']])
    # Stack
    alist = vstack(tbls)

    # wvmnx?
    if wvmnx is not None:
        msgs.info('Cutting down line list by wvmnx: {:g},{:g}'.format(wvmnx[0],wvmnx[1]))
        gdwv = (alist['wave'] >= wvmnx[0]) & (alist['wave'] <= wvmnx[1])
        alist = alist[gdwv]
    # Return
    return alist
Example #4
0
def parse_nist(slf, ion):
    """Parse a NIST ASCII table.  Note that the long ---- should have
    been commented out and also the few lines at the start.

    Parameters
    ----------
    ion : str
      Name of ion
    """
    # Root (for development only)
    if slf is None:
        from pypit import arutils as arut
        msgs.warn("Using arutils.dummy_self.  Better know what you are doing.")
        slf = arut.dummy_self()
    root = settings.argflag['run']['pypitdir']
    # Find file
    srch_file = root + '/data/arc_lines/NIST/' + ion + '_vacuum.ascii'
    nist_file = glob.glob(srch_file)
    if len(nist_file) == 0:
        msgs.error("Cannot find NIST file {:s}".format(srch_file))
    elif len(nist_file) != 1:
        msgs.error("Multiple NIST files for {:s}".format(srch_file))
    # Read
    nist_tbl = Table.read(nist_file[0],
                          format='ascii.fixed_width',
                          comment='#')
    gdrow = nist_tbl['Observed'] > 0.  # Eliminate dummy lines
    nist_tbl = nist_tbl[gdrow]
    # Now unique values only (no duplicates)
    uniq, indices = np.unique(nist_tbl['Observed'], return_index=True)
    nist_tbl = nist_tbl[indices]
    # Deal with Rel
    agdrel = []
    for row in nist_tbl:
        try:
            gdrel = int(row['Rel.'])
        except:
            try:
                gdrel = int(row['Rel.'][:-1])
            except:
                gdrel = 0
        agdrel.append(gdrel)
    agdrel = np.array(agdrel)
    # Remove and add
    nist_tbl.remove_column('Rel.')
    nist_tbl.remove_column('Ritz')
    nist_tbl.add_column(Column(agdrel, name='RelInt'))
    nist_tbl.remove_column('Acc.')
    nist_tbl.remove_column('Type')
    #nist_tbl.add_column(Column([ion]*len(nist_tbl), name='Ion', dtype='S5'))
    nist_tbl.add_column(Column([ion] * len(nist_tbl), name='Ion', dtype='U5'))
    nist_tbl.rename_column('Observed', 'wave')
    # Return
    return nist_tbl
Example #5
0
def test_save1d_fits():
    """ save1d to FITS and HDF5
    """
    arut.dummy_settings()
    fitsdict = arut.dummy_fitsdict(nfile=10, spectrograph='shane_kast_blue', directory=data_path(''))
    # Dummy self
    slf = arut.dummy_self()
    slf._specobjs = []
    slf._specobjs.append([])
    slf._specobjs[0].append([mk_specobj()])
    # Write to FITS
    arsv.save_1d_spectra_fits(slf, fitsdict)
Example #6
0
def test_save1d_hdf5():
    """ save1d to FITS and HDF5
    """
    # Dummy self
    slf = arut.dummy_self()
    fitsdict = arut.dummy_fitsdict(nfile=1, spectrograph='none')
    # specobj
    slf._specobjs = []
    slf._specobjs.append([])
    slf._specobjs[0].append([mk_specobj(objid=455), mk_specobj(flux=3., objid=555)])
    # Write to HDF5
    arsv.save_1d_spectra_hdf5(slf, fitsdict)
Example #7
0
def test_instr_config():
    from pypit import arutils as arut
    from pypit import armlsd
    # Dummy self
    slf = arut.dummy_self()
    # Grab
    fits_dict = {'slitwid': [0.5], 'dichroic': ['d55'],
                 'disperser': ['B600/400'], 'cdangle': [11000.]}
    det, scidx = 1, 0
    #
    config = armlsd.instconfig(slf, det, scidx, fits_dict)
    # Test
    assert config == 'S05-D55-G600400-T110000-B11'
Example #8
0
def parse_nist(slf,ion):
    """Parse a NIST ASCII table.  Note that the long ---- should have
    been commented out and also the few lines at the start.

    Parameters
    ----------
    ion : str
      Name of ion
    """
    # Root (for development only)
    if slf is None:
        from pypit import arutils as arut
        msgs.warn("Using arutils.dummy_self.  Better know what you are doing.")
        slf = arut.dummy_self()
    root = slf._argflag['run']['pypitdir']
    # Find file
    srch_file = root + '/data/arc_lines/NIST/'+ion+'_vacuum.ascii'
    nist_file = glob.glob(srch_file)
    if len(nist_file) == 0:
        msgs.error("Cannot find NIST file {:s}".format(srch_file))
    elif len(nist_file) != 1:
        msgs.error("Multiple NIST files for {:s}".format(srch_file))
    # Read
    nist_tbl = Table.read(nist_file[0], format='ascii.fixed_width')
    gdrow = nist_tbl['Observed'] > 0.  # Eliminate dummy lines
    nist_tbl = nist_tbl[gdrow]
    # Now unique values only (no duplicates)
    uniq, indices = np.unique(nist_tbl['Observed'],return_index=True)
    nist_tbl = nist_tbl[indices]
    # Deal with Rel
    agdrel = []
    for row in nist_tbl:
        try:
            gdrel = int(row['Rel.'])
        except:
            try:
                gdrel = int(row['Rel.'][:-1])
            except: 
                gdrel = 0
        agdrel.append(gdrel) 
    agdrel = np.array(agdrel)
    # Remove and add
    nist_tbl.remove_column('Rel.')
    nist_tbl.remove_column('Ritz')
    nist_tbl.add_column(Column(agdrel,name='RelInt'))
    #nist_tbl.add_column(Column([ion]*len(nist_tbl), name='Ion', dtype='S5'))
    nist_tbl.add_column(Column([ion]*len(nist_tbl), name='Ion', dtype='U5'))
    nist_tbl.rename_column('Observed','wave')
    # Return
    return nist_tbl
Example #9
0
def test_ampsec(fitsdict):
    """ Test sort_data
    """
    arutils.dummy_settings(spectrograph='shane_kast_blue')
    slf = arutils.dummy_self()
    # Run
    det, scidx = 1, 5
    arproc.get_datasec_trimmed(slf, fitsdict, det, scidx)
    # Test
    assert slf._datasec[det - 1].shape == (2112, 2048)
    assert np.sum(np.isclose(slf._datasec[0], 1)) == 2162688  # Data region
    assert np.sum(np.isclose(slf._datasec[0], 2)) == 2162688  # second amp
    assert settings.spect['det01']['oscansec01'] == [[0, 0], [2049, 2080]]
    assert settings.spect['det01']['datasec01'] == [[0, 0], [0, 1024]]
Example #10
0
def test_extinction_correction():
    from pypit import arflux as arflx
    from pypit import arutils as arut
    # Dummy self
    slf = arut.dummy_self()
    slf._spect['mosaic']['latitude'] = 37.3413889
    slf._spect['mosaic']['longitude'] = 121.6428
    # Load
    extinct = arflx.load_extinction_data(slf)
    # Correction
    wave = np.arange(3000., 10000.) * u.AA
    AM = 1.5
    flux_corr = arflx.extinction_correction(wave, AM, extinct)
    # Test
    np.testing.assert_allclose(flux_corr[0], 4.47095192)
Example #11
0
def test_flex_shift():
    if not os.getenv('PYPIT'):
        pass
    else:
        # Dummy slf
        arut.dummy_settings()
        settings.argflag['reduce']['flexure']['maxshift'] = 50
        slf = arut.dummy_self()
        # Read spectra
        obj_spec = lsio.readspec(data_path('obj_lrisb_600_sky.fits'))
        arx_file = pypit.__path__[0]+'/data/sky_spec/sky_LRISb_600.fits'
        arx_spec = lsio.readspec(arx_file)
        # Call
        #msgs._debug['flexure'] = True
        flex_dict = arwave.flex_shift(slf, 1, obj_spec, arx_spec)
        assert np.abs(flex_dict['shift'] - 43.7) < 0.1
Example #12
0
def test_detect_lines():
    if os.getenv('RUN_ALL_PYPIT_TESTS'
                 ) is None:  # REMOVE WHEN WORKING WITH Python 3
        assert True
        return
    from linetools.spectra import xspectrum1d
    import pypit
    slf = arut.dummy_self()
    det = 1
    # Using Paranal night sky as an 'arc'
    arx_sky = xspectrum1d.XSpectrum1D.from_file(
        pypit.__path__[0] + '/data/sky_spec/paranal_sky.fits')
    arx_amp, arx_cent, arx_wid, arx_w, arx_satsnd, arx_yprep = pyarc.detect_lines(
        slf, det, msarc=None, censpec=arx_sky.flux.value, MK_SATMASK=False)
    # Test
    assert len(arx_w[0]) == 1767
Example #13
0
def test_extinction_correction():
    from pypit import arflux as arflx
    from pypit import arutils as arut

    # Dummy self
    slf = arut.dummy_self()
    slf._spect["mosaic"]["latitude"] = 37.3413889
    slf._spect["mosaic"]["longitude"] = 121.6428
    # Load
    extinct = arflx.load_extinction_data(slf)
    # Correction
    wave = np.arange(3000.0, 10000.0) * u.AA
    AM = 1.5
    flux_corr = arflx.extinction_correction(wave, AM, extinct)
    # Test
    np.testing.assert_allclose(flux_corr[0], 4.47095192)
Example #14
0
def test_instr_config():
    from pypit import arutils as arut
    from pypit import armlsd
    # Dummy self
    slf = arut.dummy_self()
    # Grab
    fits_dict = {
        'slitwid': [0.5],
        'dichroic': ['d55'],
        'disperser': ['B600/400'],
        'cdangle': [11000.]
    }
    det, scidx = 1, 0
    #
    config = armlsd.instconfig(slf, det, scidx, fits_dict)
    # Test
    assert config == 'S05-D55-G600400-T110000-B11'
Example #15
0
def test_setup_param():
    """ Run the parameter setup script
    Returns
    -------

    """
    # Initialize some settings
    arut.dummy_settings()
    # Load Dummy self
    slf = arut.dummy_self()
    settings.argflag['run']['spectrograph'] = 'shane_kast_blue'
    settings.spect['arc'] = {}
    settings.spect['arc']['index'] = [[0]]
    fitsdict = arut.dummy_fitsdict()
    # Run
    arcparm = pyarc.setup_param(slf, 0, 1, fitsdict)
    for key in ['llist', 'disp', 'wvmnx']:
        assert key in arcparm
Example #16
0
def test_geocorrect(fitsdict):
    """
    """
    # Initialize some settings
    arutils.dummy_settings(spectrograph='shane_kast_blue')  #, set_idx=False)
    # Load Dummy self
    slf = arutils.dummy_self(fitsdict=fitsdict)
    # Specobjs
    specobjs = arutils.dummy_specobj(fitsdict, extraction=True)
    slf._specobjs[0] = [specobjs]
    # Run
    # vhel = x_keckhelio(106.59770833333332, 30.34736111111111, 2000., jd=2457046.5036, OBS='lick')  9.3166 km/s
    helio, hel_corr = py_arwave.geomotion_correct(slf, 1, fitsdict)
    assert np.isclose(helio, -9.3344957,
                      rtol=1e-5)  # Checked against x_keckhelio
    assert np.isclose(slf._specobjs[0][0][0].boxcar['wave'][0].value,
                      3999.8754558341816,
                      rtol=1e-8)
Example #17
0
def test_load_extinction():
    from pypit import arflux as arflx
    from pypit import arutils as arut
    # Dummy self
    slf = arut.dummy_self()
    slf._spect['mosaic']['latitude'] = 37.3413889
    slf._spect['mosaic']['longitude'] = 121.6428
    # Load
    extinct = arflx.load_extinction_data(slf)
    np.testing.assert_allclose(extinct['wave'][0], 3200.)
    assert extinct['wave'].unit == u.AA
    np.testing.assert_allclose(extinct['mag_ext'][0], 1.084)
    # Fail
    slf._spect['mosaic']['latitude'] = 37.3413889
    slf._spect['mosaic']['longitude'] = 0.
    #
    extinct = arflx.load_extinction_data(slf)
    assert extinct is None
Example #18
0
def test_load_extinction():
    from pypit import arflux as arflx
    from pypit import arutils as arut

    # Dummy self
    slf = arut.dummy_self()
    slf._spect["mosaic"]["latitude"] = 37.3413889
    slf._spect["mosaic"]["longitude"] = 121.6428
    # Load
    extinct = arflx.load_extinction_data(slf)
    np.testing.assert_allclose(extinct["wave"][0], 3200.0)
    assert extinct["wave"].unit == u.AA
    np.testing.assert_allclose(extinct["mag_ext"][0], 1.084)
    # Fail
    slf._spect["mosaic"]["latitude"] = 37.3413889
    slf._spect["mosaic"]["longitude"] = 0.0
    #
    extinct = arflx.load_extinction_data(slf)
    assert extinct is None
Example #19
0
def test_setup_param():
    """ Run the parameter setup script
    Returns
    -------

    """
    # Load
    # Dummy self
    slf = arut.dummy_self()
    slf._argflag['run']['spectrograph'] = 'kast_blue'
    slf._spect['arc'] = {}
    slf._spect['arc']['index'] = [[0]]
    fitsdict = {}
    fitsdict["disperser"] = ['600/4310']
    fitsdict["binning"] = [[None]]
    # Run
    arcparm = pyarc.setup_param(slf, 0, 1, fitsdict)
    for key in ['llist','disp','wvmnx']:
        assert key in arcparm
Example #20
0
def test_find_standard():
    from pypit import arutils as arut
    from pypit import arflux as arflx
    # Dummy self
    slf = arut.dummy_self()
    # G191b2b
    std_ra = '05:06:36.6'
    std_dec = '52:52:01.0'
    # Grab
    std_dict = arflx.find_standard_file(slf._argflag, (std_ra, std_dec))
    # Test
    assert std_dict['name'] == 'G191B2B'
    assert std_dict['file'] == '/data/standards/calspec/g191b2b_mod_005.fits'
    assert std_dict['fmt'] == 1
    # Fail to find
    # near G191b2b
    std_ra = '05:06:36.6'
    std_dec = '52:22:01.0'
    std_dict = arflx.find_standard_file(slf._argflag, (std_ra, std_dec))
    assert std_dict is None
Example #21
0
def test_find_standard():
    from pypit import arutils as arut
    from pypit import arflux as arflx

    # Dummy self
    slf = arut.dummy_self()
    # G191b2b
    std_ra = "05:06:36.6"
    std_dec = "52:52:01.0"
    # Grab
    std_dict = arflx.find_standard_file(slf._argflag, (std_ra, std_dec))
    # Test
    assert std_dict["name"] == "G191B2B"
    assert std_dict["file"] == "/data/standards/calspec/g191b2b_mod_005.fits"
    assert std_dict["fmt"] == 1
    # Fail to find
    # near G191b2b
    std_ra = "05:06:36.6"
    std_dec = "52:22:01.0"
    std_dict = arflx.find_standard_file(slf._argflag, (std_ra, std_dec))
    assert std_dict is None
Example #22
0
def test_save2d_fits():
    arut.dummy_settings()
    # Dummy self
    slf = arut.dummy_self()
    fitsdict = arut.dummy_fitsdict(nfile=1, spectrograph='none', directory=data_path(''))
    fitsdict['filename'] = np.array(['b1.fits.gz'])
    # Settings
    settings.argflag['run']['directory']['science'] = data_path('')
    settings.argflag['reduce']['masters']['setup'] = 'A_01_aa'
    # Fill with dummy images
    dum = np.ones((100,100))
    slf._sciframe[0] = dum
    slf._modelvarframe[0] = dum * 2
    slf._bgframe[0] = dum + 0.1
    slf._basename = 'test'
    slf._idx_sci[0] = 0
    # Call
    arsv.save_2d_images(slf, fitsdict)
    # Read and test
    head0 = pyfits.getheader(data_path('spec2d_test.fits'))
    assert head0['PYPCNFIG'] == 'A'
    assert head0['PYPCALIB'] == 'aa'
    assert 'PYPIT' in head0['PIPELINE']
Example #23
0
def load_arcline_list(slf,
                      idx,
                      lines,
                      disperser,
                      wvmnx=None,
                      modify_parse_dict=None):
    """Loads arc line list from NIST files
    Parses and rejects

    Parameters
    ----------
    idx : list  
      indices of the arc
    lines : list
      List of ions to load
    wvmnx : list or tuple
      wvmin, wvmax for line list
    disperser : str
      Name of the disperser
    modify_parse_dict : dict, optional
      Used to over-ride default settings of parse_dict

    Returns
    -------
    alist : Table
      Table of arc lines
    """
    # Get the parse dict
    parse_dict = load_parse_dict(modify_dict=modify_parse_dict)
    # Read rejection file
    if slf is None:
        from pypit import arutils as arut
        msgs.warn("Using arutils.dummy_self.  Better know what you are doing.")
        slf = arut.dummy_self()
    root = settings.argflag['run']['pypitdir']
    with open(root + '/data/arc_lines/rejected_lines.yaml', 'r') as infile:
        rej_dict = yaml.load(infile)
    # Loop through the NIST Tables
    tbls = []
    for iline in lines:
        # Load
        tbl = parse_nist(slf, iline)
        # Parse
        if iline in parse_dict.keys():
            tbl = parse_nist_tbl(tbl, parse_dict[iline])
        # Reject
        if iline in rej_dict.keys():
            msgs.info("Rejecting select {:s} lines".format(iline))
            tbl = reject_lines(slf, tbl, idx, rej_dict[iline], disperser)
        tbls.append(tbl[['Ion', 'wave', 'RelInt']])
    # Stack
    alist = vstack(tbls)

    # wvmnx?
    if wvmnx is not None:
        msgs.info('Cutting down line list by wvmnx: {:g},{:g}'.format(
            wvmnx[0], wvmnx[1]))
        gdwv = (alist['wave'] >= wvmnx[0]) & (alist['wave'] <= wvmnx[1])
        alist = alist[gdwv]
    # Return
    return alist