Ejemplo n.º 1
0
def test_coadd1d_2():
    """
    Test combining Echelle
    """
    # NOTE: flux_value is False
    parfile = 'coadd1d.par'
    if os.path.isfile(parfile):
        os.remove(parfile)
    coadd_ofile = data_path('pisco_coadd.fits')
    if os.path.isfile(coadd_ofile):
        os.remove(coadd_ofile)

    coadd_ifile = data_path('gemini_gnirs_32_sb_sxd.coadd1d')
    scripts.coadd_1dspec.CoAdd1DSpec.main(
        scripts.coadd_1dspec.CoAdd1DSpec.parse_args(
            [coadd_ifile, '--test_spec_path',
             data_path('')]))

    hdu = io.fits_open(coadd_ofile)
    assert hdu[1].header['EXT_MODE'] == 'OPT'
    assert hdu[1].header['FLUXED'] is False

    # Test that the output file is kosher and contains the right quantities
    spec = onespec.OneSpec.from_file(coadd_ofile)
    assert spec.wave.shape == spec.wave_grid_mid.shape

    # Clean up
    hdu.close()
    os.remove(parfile)
    os.remove(coadd_ofile)
Ejemplo n.º 2
0
def test_wmko_flux_std():

    outfile = data_path('tmp_sens.fits')
    if os.path.isfile(outfile):
        os.remove(outfile)

    # Do it
    wmko_file = data_path('2017may28_d0528_0088.fits')
    spectrograph = load_spectrograph('keck_deimos')

    # Load + write
    spec1dfile = data_path('tmp_spec1d.fits')
    sobjs = keck_deimos.load_wmko_std_spectrum(wmko_file, outfile=spec1dfile)

    # Sensfunc
    #  The following mirrors the main() call of sensfunc.py
    par = spectrograph.default_pypeit_par()
    par['sensfunc']['algorithm'] = "IR"
    par['sensfunc']['multi_spec_det'] = [3, 7]

    # Instantiate the relevant class for the requested algorithm
    sensobj = sensfunc.SensFunc.get_instance(spec1dfile, outfile,
                                             par['sensfunc'])
    # Generate the sensfunc
    sensobj.run()
    # Write it out to a file
    sensobj.to_file(outfile)

    os.remove(spec1dfile)
    os.remove(outfile)
Ejemplo n.º 3
0
def test_masterframe_methods():
    master_key = 'A_1_DET01'
    master_dir = data_path('')

    # Filename
    Aimg = buildimage.ArcImage(None)
    Aimg.PYP_SPEC = 'shane_kast_blue'
    filename = masterframe.construct_file_name(Aimg,
                                               master_key,
                                               master_dir=master_dir)
    assert isinstance(filename, str)
    assert filename == data_path(
        f'Master{Aimg.master_type}_{master_key}.{Aimg.master_file_format}')

    # Header
    hdr = masterframe.build_master_header(Aimg, master_key, master_dir)
    assert isinstance(hdr, fits.Header)

    # Get those keys!
    _master_key, _master_dir = masterframe.grab_key_mdir(hdr)
    assert _master_key == master_key

    _master_key2, _master_dir2 = masterframe.grab_key_mdir(filename,
                                                           from_filename=True)
    assert _master_key2 == master_key
Ejemplo n.º 4
0
def test_coadd1d_2():
    """
    Test combining Echelle
    """
    # NOTE: flux_value is False
    parfile = 'coadd1d.par'
    if os.path.isfile(parfile):
        os.remove(parfile)
    coadd_ofile = data_path('pisco_coadd.fits')
    if os.path.isfile(coadd_ofile):
        os.remove(coadd_ofile)

    coadd_ifile = data_path('gemini_gnirs_32_sb_sxd.coadd1d')
    coadd_1dspec.main(
        coadd_1dspec.parse_args(
            [coadd_ifile, '--test_spec_path',
             data_path('')]))

    hdu = io.fits_open(coadd_ofile)
    assert hdu[1].header['EXT_MODE'] == 'OPT'
    assert hdu[1].header['FLUXED'] is False

    # Clean up
    hdu.close()
    os.remove(parfile)
    os.remove(coadd_ofile)
Ejemplo n.º 5
0
def test_from_sens_func(kast_blue_files):

    sens_file = data_path('sensfunc.fits')
    if os.path.isfile(sens_file):
        os.remove(sens_file)

    spectrograph = load_spectrograph('shane_kast_blue')
    par = spectrograph.default_pypeit_par()
    std_file, sci_file = kast_blue_files

    # Build the sensitivity function
    sensFunc = sensfunc.UVISSensFunc(std_file, sens_file)
    sensFunc.run()
    sensFunc.to_file(sens_file)

    # Instantiate and run
    outfile = data_path(os.path.basename(sci_file))
    fluxCalibrate = fluxcalibrate.MultiSlitFC([sci_file], [sens_file],
                                              par=par['fluxcalib'],
                                              outfiles=[outfile])
    # Test
    sobjs = specobjs.SpecObjs.from_fitsfile(outfile)
    assert 'OPT_FLAM' in sobjs[0].keys()

    os.remove(sens_file)
    os.remove(outfile)
Ejemplo n.º 6
0
def test_coadd1d_1(monkeypatch):
    """
    Test basic coadd using shane_kast_blue
    """
    dp = data_path('')
    # Change to the parent directory of the data path, so we can test that
    # coadding without a coadd output file specified places the output next
    # to the spec1ds. Using monkeypatch means the current working directory
    # will be restored after the test.
    monkeypatch.chdir(Path(dp).parent)

    # NOTE: flux_value is False
    parfile = 'files/coadd1d.par'
    if os.path.isfile(parfile):
        os.remove(parfile)
    coadd_ofile = data_path('coadd1d_J1217p3905_KASTb_20150520_20150520.fits')
    if os.path.isfile(coadd_ofile):
        os.remove(coadd_ofile)

    coadd_ifile = data_path('shane_kast_blue.coadd1d')
    scripts.coadd_1dspec.CoAdd1DSpec.main(
        scripts.coadd_1dspec.CoAdd1DSpec.parse_args(
            [coadd_ifile, "--par_outfile", parfile]))
    hdu = io.fits_open(coadd_ofile)
    assert hdu[1].header['EXT_MODE'] == 'OPT'
    assert hdu[1].header['FLUXED'] is False
    # Test that the output file is kosher and contains the right quantities
    spec = onespec.OneSpec.from_file(coadd_ofile)
    assert spec.wave.shape == spec.wave_grid_mid.shape

    # Clean up
    hdu.close()
    os.remove(parfile)
    os.remove(coadd_ofile)
Ejemplo n.º 7
0
def test_coadd1d_1():
    """
    Test basic coadd using shane_kast_blue
    """
    # NOTE: flux_value is False
    parfile = 'coadd1d.par'
    if os.path.isfile(parfile):
        os.remove(parfile)
    coadd_ofile = data_path('J1217p3905_coadd.fits')
    if os.path.isfile(coadd_ofile):
        os.remove(coadd_ofile)

    coadd_ifile = data_path('shane_kast_blue.coadd1d')
    coadd_1dspec.main(
        coadd_1dspec.parse_args(
            [coadd_ifile, '--test_spec_path',
             data_path('')]))

    hdu = io.fits_open(coadd_ofile)
    assert hdu[1].header['EXT_MODE'] == 'OPT'
    assert hdu[1].header['FLUXED'] is False

    # Clean up
    hdu.close()
    os.remove(parfile)
    os.remove(coadd_ofile)
Ejemplo n.º 8
0
def test_quicklook():
    # The following needs the LRISb calibration files to be
    # found in a CALIBS/ folder in the DEV Suite.  You can get
    # that folder here:
    # https://drive.google.com/drive/folders/1NSg5Rmr8hD_1-fOchQc3WXjt59D6f9od?usp=sharing
    calib_dir = os.path.join(os.environ['PYPEIT_DEV'], 'CALIBS')
    if not os.path.isdir(calib_dir):
        raise IOError("You need to get the CALIBS folder as described above!!")

    # Define the output directories (HARDCODED!!)
    cdir = os.getcwd()
    os.chdir(data_path(''))
    outdir = data_path('keck_lris_blue_A')
    # Remove them if they already exist
    if os.path.isdir(outdir):
        shutil.rmtree(outdir)

    # Raw path
    droot = os.path.join(os.environ['PYPEIT_DEV'], 'RAW_DATA',
                         'keck_lris_blue', 'long_600_4000_d560')
    ql_mos.main(
        ql_mos.parse_args([
            'keck_lris_blue', droot, 'b150910_2033.fits.gz',
            'b150910_2051.fits.gz', 'b150910_2070.fits.gz', '--det=2',
            '--user_pixflat={0}'.format(
                os.path.join(
                    calib_dir,
                    'PYPEIT_LRISb_pixflat_B600_2x2_17sep2009.fits.gz'))
        ]))

    # Cleanup
    os.chdir(cdir)
    shutil.rmtree(outdir)
Ejemplo n.º 9
0
def test_run_pypeit():

    # Just get a few files
    testrawdir = data_path('')

    outdir = data_path('REDUX_OUT_TEST')

    # For previously failed tests
    if os.path.isdir(outdir):
        shutil.rmtree(outdir)

    # Run the setup
    sargs = Setup.parse_args([
        '-r', testrawdir + 'b', '-s', 'shane_kast_blue', '-c all', '-o',
        '--output_path', outdir
    ])
    Setup.main(sargs)

    # Change to the configuration directory and set the pypeit file
    configdir = os.path.join(outdir, 'shane_kast_blue_A')
    pyp_file = os.path.join(configdir, 'shane_kast_blue_A.pypeit')
    assert os.path.isfile(pyp_file), 'PypeIt file not written.'

    # Try to run with -m and -o
    pargs = RunPypeIt.parse_args([pyp_file, '-o', '-m', '-r', configdir])
    RunPypeIt.main(pargs)

    # TODO: Add some code that will try to open the QA HTML and check that it
    # has the correct PNGs in it.

    # #########################################################
    # Test!!
    # Files exist
    spec1d_file = os.path.join(
        configdir, 'Science',
        'spec1d_b27-J1217p3905_KASTb_20150520T045733.560.fits')
    assert os.path.isfile(spec1d_file), 'spec1d file missing'

    # spec1d
    specObjs = specobjs.SpecObjs.from_fitsfile(spec1d_file)

    # Check RMS
    assert specObjs[
        0].WAVE_RMS < 0.02  # difference must be less than 0.02 pixels

    # Flexure
    assert abs(-0.03 - specObjs[0].FLEX_SHIFT_TOTAL
               ) < 0.1  # difference must be less than 0.1 pixels

    # Helio
    assert abs(specObjs[0].VEL_CORR - 0.9999261685542624) < 1.0E-10

    # Now re-use those master files
    pargs = RunPypeIt.parse_args([pyp_file, '-o', '-r', configdir])
    RunPypeIt.main(pargs)

    # Clean-up
    shutil.rmtree(outdir)
Ejemplo n.º 10
0
def test_io():
    detector = detector_container.DetectorContainer(**def_det)
    detector.to_file(data_path('tmp_detector.fits'), overwrite=True)

    _new_detector = detector.from_file(data_path('tmp_detector.fits'))

    # Check a few attributes are equal
    assert detector['dataext'] == _new_detector['dataext'], 'Bad read dataext'
    assert np.array_equal(detector['gain'],
                          _new_detector['gain']), 'Bad read gain'
    assert detector['binning'] == _new_detector['binning'], 'Bad read binning'
    assert np.array_equal(detector['datasec'],
                          _new_detector['datasec']), 'Bad read datasec'

    os.remove(data_path('tmp_detector.fits'))
Ejemplo n.º 11
0
def test_yamlify():
    """ This tests the yamlify method and also the approach to 
    writing and reading the Setup block of PypeIt"""

    obj = dict(a=1., b='acb', datasec='[2:23,:2048]', d=dict(c=3))

    new_obj = utils.yamlify(obj)

    # Write
    tst_file = tstutils.data_path('tst.yaml')
    with open(tst_file, 'w') as f:
        setup_lines = io.dict_to_lines(new_obj, level=1)
        f.write('\n'.join(setup_lines)+'\n')

    # Read
    with open(tst_file, 'r') as f:
        lines = f.readlines()

    # Strip white space
    lines = [line.strip() for line in lines]
    # Add back in \n
    ystr = '\n'.join(lines)
    sdict = yaml.safe_load(ystr)

    # Clean up
    os.remove(tst_file)
    
Ejemplo n.º 12
0
def test_all2dobj_update_image(init_dict):

    allspec2D = spec2dobj.AllSpec2DObj()
    allspec2D['meta']['bkg_redux'] = False
    allspec2D['meta']['find_negative'] = False
    for i in range(2):
        d = load_spectrograph('keck_deimos').get_detector_par(i + 1)
        allspec2D[d.name] = spec2dobj.Spec2DObj(detector=d, **init_dict)

    # Write
    ofile = tstutils.data_path('tst_allspec2d.fits')
    if os.path.isfile(ofile):
        os.remove(ofile)
    allspec2D.write_to_fits(ofile)

    _allspec2D = spec2dobj.AllSpec2DObj()
    _allspec2D['meta']['bkg_redux'] = False
    _allspec2D['meta']['find_negative'] = False
    d = load_spectrograph('keck_deimos').get_detector_par(2)
    detname = d.name
    init_dict['sciimg'] = allspec2D[detname].sciimg.copy() * 2
    _allspec2D[detname] = spec2dobj.Spec2DObj(detector=d, **init_dict)

    _allspec2D.write_to_fits(ofile,
                             update_det=_allspec2D.detectors,
                             overwrite=True)

    # Check
    allspec2D_2 = spec2dobj.AllSpec2DObj.from_fits(ofile)
    assert np.array_equal(allspec2D_2[detname].sciimg,
                          _allspec2D[detname].sciimg), 'Bad update'
    assert np.array_equal(allspec2D_2[detname].sciimg,
                          allspec2D[detname].sciimg * 2), 'Bad update'

    os.remove(ofile)
Ejemplo n.º 13
0
def test_all2dobj_write(init_dict):
    # Build one
    init_dict['detector'] = tstutils.get_kastb_detector()
    spec2DObj = spec2dobj.Spec2DObj(**init_dict)
    allspec2D = spec2dobj.AllSpec2DObj()
    allspec2D['meta']['bkg_redux'] = False
    allspec2D['meta']['find_negative'] = False
    detname = spec2DObj.detname
    allspec2D[detname] = spec2DObj
    # Write
    ofile = tstutils.data_path('tst_allspec2d.fits')
    if os.path.isfile(ofile):
        os.remove(ofile)
    allspec2D.write_to_fits(ofile)
    # Read
    _allspec2D = spec2dobj.AllSpec2DObj.from_fits(ofile)
    # Check
    assert allspec2D.detectors == _allspec2D.detectors, 'Bad read: detector mismatch'
    assert allspec2D['meta'] == _allspec2D['meta'], 'Bad read: meta mismatch'
    # Try to update it
    _allspec2D['meta']['bkg_redux'] = True
    _allspec2D[detname].vel_corr = 2.
    _allspec2D.write_to_fits(ofile, update_det='DET01')

    __allspec2D = spec2dobj.AllSpec2DObj.from_fits(ofile)
    assert __allspec2D['meta'] == _allspec2D['meta'], 'Bad read: meta mismatch'
    assert __allspec2D['meta'] != allspec2D['meta'], 'Bad read: meta mismatch'
    assert __allspec2D[detname].vel_corr == 2., 'Bad update'
    os.remove(ofile)
Ejemplo n.º 14
0
def test_io():
    file = os.path.join(os.environ['PYPEIT_DEV'], 'RAW_DATA', 'gemini_gmos',
                        'GN_HAM_R400_885', 'N20190205S0035.fits')

    # Load the spectrograph
    spec = load_spectrograph('gemini_gmos_north_ham')
    msc = spec.get_mosaic_par((1, 2, 3), hdu=fits.open(file))

    test_file = data_path('tmp_mosaic.fits')
    msc.to_file(test_file, overwrite=True)

    _msc = Mosaic.from_file(test_file)

    # Check a few attributes are equal
    assert np.array_equal(msc.tform, _msc.tform), 'Bad transform read'
    assert _msc.shape == msc.shape, 'Bad shape read'
    assert _msc.name == msc.name, 'Bad name setup'

    assert len(_msc.detectors) == len(msc.detectors), 'Bad number of detectors'

    assert _msc.detectors[0]['dataext'] == msc.detectors[0][
        'dataext'], 'Bad read dataext'
    assert np.array_equal(_msc.detectors[1]['gain'],
                          msc.detectors[1]['gain']), 'Bad read gain'
    assert _msc.detectors[2]['binning'] == msc.detectors[2][
        'binning'], 'Bad read binning'
    assert np.array_equal(_msc.detectors[1]['datasec'], msc.detectors[1]['datasec']), \
            'Bad read datasec'

    os.remove(test_file)
Ejemplo n.º 15
0
def test_gen_sensfunc(kast_blue_files):

    sens_file = data_path('sensfunc.fits')
    if os.path.isfile(sens_file):
        os.remove(sens_file)

    # Get it started
    spectrograph = load_spectrograph('shane_kast_blue')
    par = spectrograph.default_pypeit_par()
    std_file, sci_file = kast_blue_files
    # Instantiate
    sensFunc = sensfunc.UVISSensFunc(std_file, sens_file)
    # Test the standard loaded
    assert sensFunc.meta_spec['BINNING'] == '1,1'
    assert sensFunc.meta_spec['TARGET'] == 'Feige 66'

    # Generate the sensitivity function
    sensFunc.run()
    # Test
    assert os.path.basename(sensFunc.std_cal) == 'feige66_002.fits'
    # TODO: @jhennawi, please check this edit
    assert 'SENS_ZEROPOINT' in sensFunc.sens.keys(), 'Bad column names'
    # Write
    sensFunc.to_file(sens_file)

    os.remove(sens_file)
Ejemplo n.º 16
0
def test_read_combid():

    # ------------------------------------------------------------------
    # In case of failed tests
    setup_dir = data_path('setup_files')
    if os.path.isdir(setup_dir):
        shutil.rmtree(setup_dir)
    config_dir = data_path('shane_kast_blue_A')
    if os.path.isdir(config_dir):
        shutil.rmtree(config_dir)
    # ------------------------------------------------------------------

    # Generate the pypeit file with the comb_id
    droot = data_path('b')
    pargs = Setup.parse_args([
        '-r', droot, '-s', 'shane_kast_blue', '-c=all', '-b',
        '--extension=fits.gz', '--output_path={:s}'.format(data_path(''))
    ])
    Setup.main(pargs)
    shutil.rmtree(setup_dir)

    pypeit_file = os.path.join(config_dir, 'shane_kast_blue_A.pypeit')
    cfg_lines, data_files, frametype, usrdata, setups, _ = parse_pypeit_file(
        pypeit_file)

    # Get the spectrograph
    spectrograph = None
    for l in cfg_lines:
        if 'spectrograph' in l:
            spectrograph = load_spectrograph(l.split(' ')[-1])
            break
    assert spectrograph is not None, 'Did not appropriately read spectrograph'

    # Set the metadata
    pmd = PypeItMetaData(spectrograph,
                         spectrograph.default_pypeit_par(),
                         files=data_files,
                         usrdata=usrdata,
                         strict=False)

    indx = pmd['filename'] == 'b27.fits.gz'
    assert pmd['comb_id'][indx] == [1], 'Incorrect combination group ID'
    assert pmd['comb_id'][np.where(~indx)
                          [0]][0] == -1, 'Incorrect combination group ID'

    shutil.rmtree(config_dir)
Ejemplo n.º 17
0
def test_all2dobj_hdr(init_dict):
    # Build one
    init_dict['detector'] = tstutils.get_kastb_detector()
    spec2DObj = spec2dobj.Spec2DObj(**init_dict)
    allspec2D = spec2dobj.AllSpec2DObj()
    allspec2D['meta']['bkg_redux'] = False
    allspec2D['meta']['find_negative'] = False
    allspec2D[spec2DObj.detname] = spec2DObj
    #
    kast_file = tstutils.data_path('b1.fits.gz')
    header = fits.getheader(kast_file)
    spectrograph = load_spectrograph('shane_kast_blue')
    # Do it
    hdr = allspec2D.build_primary_hdr(header,
                                      spectrograph,
                                      master_dir=tstutils.data_path(''))
    # Test it
    assert hdr['SKYSUB'] == 'MODEL'
Ejemplo n.º 18
0
def test_intrv_versions():
    from pypeit.bspline.utilpy import intrv as intrv_py
    from pypeit.bspline.utilc import intrv as intrv_c

    d = np.load(data_path('intrv.npz'))

    indx = intrv_py(d['nord'], d['breakpoints'], d['x'])
    _indx = intrv_c(d['nord'], d['breakpoints'], d['x'])
    assert np.allclose(indx, _indx), 'Differences in index'
Ejemplo n.º 19
0
def test_initialization():
    """ Load input PypeIt file
    """
    # Generate a PYPIT file
    pypit_file = data_path('test.pypeit')
    make_pypeit_file(pypit_file, 'shane_kast_blue', [data_path('b*fits.gz')], setup_mode=True)

    # Perform the setup
    setup = pypeitsetup.PypeItSetup.from_pypeit_file(pypit_file)
    par, spectrograph, fitstbl = setup.run(sort_dir=data_path(''))

    # Test
    assert spectrograph.name == 'shane_kast_blue'
    assert len(fitstbl) == 8

    # Clean-up
    os.remove(data_path('test.calib'))
    os.remove(data_path('test.pypeit'))
Ejemplo n.º 20
0
def test_io():
    """
    Test that bspline_profile (1) is successful and (2) produces the
    same result for a set of data fit spectrally.
    """
    # Files created using `rmtdict` branch (30 Jan 2020)
    files = [
        data_path('gemini_gnirs_32_{0}_spec_fit.npz'.format(slit))
        for slit in [0, 1]
    ]
    logrej = 0.5
    spec_samp_fine = 1.2
    for f in files:
        d = np.load(f)
        spec_bspl, spec_gpm_fit, spec_flat_fit, _, exit_status \
            = fitting.bspline_profile(d['spec_coo_data'], d['spec_flat_data'], d['spec_ivar_data'],
                              np.ones_like(d['spec_coo_data']), ingpm=d['spec_gpm_data'],
                              nord=4, upper=logrej, lower=logrej,
                              kwargs_bspline={'bkspace': spec_samp_fine},
                              kwargs_reject={'groupbadpix': True, 'maxrej': 5}, quiet=True)
    # Write
    ofile = data_path('tst_bspline.fits')
    if os.path.isfile(ofile):
        os.remove(ofile)
    spec_bspl.to_file(ofile)
    # Read
    _spec_bspl = bspline.bspline.from_file(ofile)
    # Check that the data read in is the same
    assert np.array_equal(_spec_bspl.breakpoints,
                          spec_bspl.breakpoints), 'Bad read'
    # Evaluate the bsplines and check that the written and read in data
    # provide the same result
    tmp = spec_bspl.value(np.linspace(0., 1., 100))[0]
    _tmp = _spec_bspl.value(np.linspace(0., 1., 100))[0]
    assert np.array_equal(tmp, _tmp), 'Bad bspline evaluate'

    # Test overwrite
    _spec_bspl.to_file(ofile, overwrite=True)

    # None
    bspl = bspline.bspline(None)
    bspl.to_file(ofile, overwrite=True)

    os.remove(ofile)
Ejemplo n.º 21
0
def test_chk_calibs_not():
    os.chdir(data_path(''))
    droot = os.path.join(os.environ['PYPEIT_DEV'],
                         'RAW_DATA/not_alfosc/grism4')
    droot += '/ALD'

    pargs = chk_for_calibs.parse_args([droot, '-s', 'not_alfosc'])
    answers, _ = chk_for_calibs.main(pargs)

    assert answers['pass'][0], 'One or more failures!'
Ejemplo n.º 22
0
def test_load_specobjs():
    spec_file = data_path(
        'spec1d_r153-J0025-0312_KASTr_20150123T025323.850.fits')
    sobjs = specobjs.SpecObjs.from_fitsfile(spec_file)

    # Test
    assert isinstance(sobjs, specobjs.SpecObjs)
    assert len(sobjs[0].BOX_COUNTS) == 1200

    assert isinstance(sobjs[0], specobj.SpecObj)
Ejemplo n.º 23
0
def test_spec2dobj_io(init_dict):
    init_dict['detector'] = tstutils.get_kastb_detector()
    spec2DObj = spec2dobj.Spec2DObj(**init_dict)
    # Write
    ofile = tstutils.data_path('tst_spec2d.fits')
    if os.path.isfile(ofile):
        os.remove(ofile)
    spec2DObj.to_file(ofile)
    # Read
    _spec2DObj = spec2dobj.Spec2DObj.from_file(ofile, spec2DObj.detname)
    os.remove(ofile)
Ejemplo n.º 24
0
def test_io():
    sobj = specobj.SpecObj('MultiSlit', 'DET01', SLITID=0)
    # Can we handle 1 array?
    sobj['BOX_WAVE'] = np.arange(100).astype(float)
    ofile = data_path('tmp.fits')
    sobj.to_file(ofile, overwrite=True)
    _sobj = specobj.SpecObj.from_file(ofile)
    assert np.array_equal(sobj.BOX_WAVE, _sobj.BOX_WAVE)

    # Cleanup
    os.remove(ofile)
Ejemplo n.º 25
0
def test_model_versions():
    from pypeit.bspline.utilpy import bspline_model as bspline_model_py
    from pypeit.bspline.utilc import bspline_model as bspline_model_c

    d = np.load(data_path('bspline_model.npz'))

    mod = bspline_model_py(d['x'], d['action'], d['lower'], d['upper'],
                           d['coeff'], d['n'], d['nord'], d['npoly'])
    _mod = bspline_model_c(d['x'], d['action'], d['lower'], d['upper'],
                           d['coeff'], d['n'], d['nord'], d['npoly'])

    assert np.allclose(mod, _mod), 'Differences in index'
Ejemplo n.º 26
0
def test_rms():
    """Test on some real data."""
    center = np.load(
        data_path('example_trace_deimos_1200G_M_7750.npz'))['center']

    pca = TracePCA(trace_cen=center, npca=2)
    pca.build_interpolator(np.array([3, 1]),
                           function='legendre')  #, debug=True)
    pca_center = pca.predict(center[pca.reference_row, :])

    rms = np.std(center - pca_center, axis=0)
    assert np.sum(rms > 0.2) == 1, 'Change in the accuracy of the PCA.'
Ejemplo n.º 27
0
def test_io(sobj1, sobj2, sobj3, sobj4):
    sobjs = specobjs.SpecObjs([sobj1, sobj2, sobj3, sobj4])
    sobjs[0]['BOX_WAVE'] = np.arange(1000).astype(float)
    sobjs[1]['BOX_WAVE'] = np.arange(1000).astype(float)
    sobjs[2]['BOX_WAVE'] = np.arange(1000).astype(float)
    #sobjs[0]['BOX_COUNTS'] = np.ones_like(sobjs[0].BOX_WAVE)  # This tests single array
    sobjs[1]['BOX_COUNTS'] = np.ones_like(sobjs[0].BOX_WAVE)
    sobjs[2]['BOX_COUNTS'] = np.ones_like(sobjs[0].BOX_WAVE)
    # Detector
    sobjs[0]['DETECTOR'] = tstutils.get_kastb_detector()
    tmp = tstutils.get_kastb_detector()

    tmp['det'] = 2
    sobjs[1]['DETECTOR'] = tmp
    # Write
    header = fits.PrimaryHDU().header
    header['TST'] = 'TEST'
    ofile = tstutils.data_path('tst_specobjs.fits')
    if os.path.isfile(ofile):
        os.remove(ofile)
    sobjs.write_to_fits(header, ofile, overwrite=False)
    # Read
    hdul = io.fits_open(ofile)
    assert len(hdul) == 7  # Primary + 4 Obj + 2 Detectors
    assert hdul[0].header['NSPEC'] == 4
    hdul.close()
    #
    _sobjs = specobjs.SpecObjs.from_fitsfile(ofile)
    assert _sobjs.nobj == 4
    assert np.array_equal(sobjs[0].BOX_WAVE, _sobjs[0].BOX_WAVE)
    assert np.array_equal(sobjs[1].BOX_WAVE, _sobjs[1].BOX_WAVE)
    _sobjs.write_to_fits(header, ofile, overwrite=True)

    # Detector
    assert _sobjs[0].DETECTOR is not None, '1st object started with Detector'
    assert _sobjs[
        1].DETECTOR is not None, '2nd object has DET=1 so should get decorated'
    assert _sobjs[2].DETECTOR is None

    # Now try updates!
    sobjs1 = specobjs.SpecObjs([sobj1])
    sobjs[0]['BOX_WAVE'] = np.arange(2000).astype(float)
    sobjs1[0]['DETECTOR'] = tstutils.get_kastb_detector()
    header1 = fits.PrimaryHDU().header
    sobjs1.write_to_fits(header1, ofile, overwrite=True, update_det='DET01')

    # Test
    _sobjs1 = specobjs.SpecObjs.from_fitsfile(ofile)
    assert _sobjs1.nobj == 3
    assert _sobjs1[2].BOX_WAVE.size == 2000
    os.remove(ofile)
Ejemplo n.º 28
0
def test_identify():
    arc_file = os.path.join(os.getenv('PYPEIT_DEV'), 'Cooked',
                            'shane_kast_blue', 'MasterArc_A_1_DET01.fits')
    slits_file = os.path.join(os.getenv('PYPEIT_DEV'), 'Cooked',
                              'shane_kast_blue',
                              'MasterSlits_A_1_DET01.fits.gz')
    # Just list
    pargs = scripts.identify.Identify.parse_args(
        [arc_file, slits_file, '--test'])
    arcfitter = scripts.identify.Identify.main(pargs)

    # Load line list
    arcfitter.load_IDs(fname=data_path('waveid_tests.ascii'))
    assert arcfitter._detns.size == 31, 'Bad load'

    # Fit
    arcfitter._fitdict['polyorder'] = 3
    arcfitter.fitsol_fit()
    assert arcfitter._fitdict['fitc'].size == 4, 'Bad fit'

    # Auto
    arcfitter.auto_id()
    assert np.sum(arcfitter._lineflg < 3) > 10, 'Bad auto ID'
    arcfitter.fitsol_fit()

    # Write
    final_fit = arcfitter.get_results()

    waveCalib = wavecalib.WaveCalib(
        nslits=1,
        wv_fits=np.atleast_1d(arcfitter._fitdict['WaveFit']),
        arc_spectra=np.atleast_2d(arcfitter.specdata).T,
        spat_ids=np.atleast_1d(int(arcfitter._spatid)),
        PYP_SPEC='shane_kast_blue',
    )

    # If you touch the following line, you probably need to update the call in scripts/identify.py
    wvarxiv_fn = arcfitter.store_solution(final_fit,
                                          1,
                                          rmstol=0.1,
                                          force_save=True,
                                          wvcalib=waveCalib)

    # Test we can read it
    tmp = wavecalib.WaveCalib.from_file('wvcalib.fits')

    # Clean up -- If these fail then the store solution failed
    os.remove('waveid.ascii')
    os.remove(wvarxiv_fn)
    os.remove('wvcalib.fits')
Ejemplo n.º 29
0
def test_cholesky_solve_versions():
    # Import only when the test is performed
    from pypeit.bspline.utilpy import cholesky_solve as cholesky_solve_py
    from pypeit.bspline.utilc import cholesky_solve as cholesky_solve_c

    # Read data
    d = np.load(data_path('cholesky_solve_abb.npz'))

    # Run python version
    e, b = cholesky_solve_py(d['a'], d['bb'])
    # Run C version
    e, _b = cholesky_solve_c(d['a'], d['bb'])
    # Check output arrays
    assert np.allclose(b, _b), 'Differences in cholesky_solve'
Ejemplo n.º 30
0
def test_solution_array_versions():
    # Import only when the test is performed
    from pypeit.bspline.utilpy import solution_arrays as sol_py
    from pypeit.bspline.utilc import solution_arrays as sol_c

    d = np.load(data_path('solution_arrays.npz'))

    a, b = sol_py(d['nn'], d['npoly'], d['nord'], d['ydata'], d['action'],
                  d['ivar'], d['upper'], d['lower'])
    _a, _b = sol_c(d['nn'], d['npoly'], d['nord'], d['ydata'], d['action'],
                   d['ivar'], d['upper'], d['lower'])

    assert np.allclose(a, _a), 'Differences in alpha'
    assert np.allclose(b, _b), 'Differences in beta'