コード例 #1
0
ファイル: utils.py プロジェクト: lwymarie/specdb
def load_db(db_type, **kwargs):
    """
    Parameters
    ----------
    db_type : str
      Type of database
      Current allowed entries are [igmspec]


    Returns
    -------
    dbobj : SpecDB object
      Generally a child

    """
    if db_type == 'igmspec':
        from specdb.specdb import IgmSpec
        Specdb = IgmSpec(**kwargs)
    elif db_type == 'uvqs':
        from specdb.specdb import UVQS
        Specdb = UVQS(**kwargs)
    elif db_type == 'priv':  # Private
        from specdb.specdb import SpecDB
        Specdb = SpecDB(**kwargs)
    else:
        raise IOError("Not ready for this dbase value: {:s}".format(db_type))

    # Return
    return Specdb
コード例 #2
0
def test_sdss_ssa_querydata():
    igmsp = IgmSpec()  #db_file=db_file)
    #
    ssa = spdb_ssa.SSAInterface(igmsp)
    votable = ssa.querydata('0.027228,0.515341', SIZE=1e-3)
    # Write
    votable.to_xml('sdss_querydata.xml')
コード例 #3
0
def test_chalos_ssa_querydata():
    """ Mixes COS and HIRES
    """
    igmsp = IgmSpec()  #db_file=db_file)
    #
    ssa = spdb_ssa.SSAInterface(igmsp)
    votable = ssa.querydata('344.4092,13.6793', SIZE=1e-3)
    # Write
    votable.to_xml('cos_querydata.xml')
コード例 #4
0
ファイル: test_ssa.py プロジェクト: li-jr/igmspec
def test_sdss_ssa_querydata():
    if os.getenv('SPECDB') is None:  # Would need to generate a new debug file for this to work..
        assert True
        return
    igmsp = IgmSpec()#db_file=db_file)
    #
    ssa = spdb_ssa.SSAInterface(igmsp)
    votable = ssa.querydata('0.027228,0.515341', SIZE=1e-3)
    # Write
    votable.to_xml('sdss_querydata.xml')
コード例 #5
0
ファイル: mk_debug_files.py プロジェクト: specdb/specdb
def igmspec_file(version='v02', nspec=5):
    """ Build a debug file from IGMspec
    Returns
    -------

    """
    # Load IGMSpec
    igmsp = IgmSpec(version='v02')  # Will advance as needed
    #
    outfil = specdb.__path__[0]+'/tests/files/IGMspec_DB_{:s}_debug.hdf5'.format(version)
    hdf = h5py.File(outfil,'w')
    # Grab 100 sources from several datasets
    dsets = ['BOSS_DR12', 'HD-LLS_DR1', 'SDSS_DR7', 'GGG']#, '2QZ']
    flags = igmsp.cat['flag_group'].data
    all_IDs = []
    for dset in dsets:
        sflag = igmsp.group_dict[dset]
        query = (flags % (sflag*2)) >= sflag
        gdi = np.where(query)[0]
        # Take nspec
        keep = gdi[0:nspec]
        all_IDs += keep.tolist()  # Save for main catalog
        # Grab data
        if dset == '2QZ':
            pdb.set_trace()
        rows = igmsp[dset].ids_to_allrows(keep)   # Match to all rows
        spec, meta = igmsp[dset].grab_specmeta(rows)  # Grab
        # Group
        grp = hdf.create_group(dset)
        spec_set = hdf[dset].create_dataset('spec', data=spec.data, chunks=True, compression='gzip')
        hdf[dset]['meta'] = meta
        # Add attrs :: SSA -- read from igmspec later
        for key in igmsp[dset].hdf[dset+'/meta'].attrs.keys():
            hdf[dset]['meta'].attrs[key] = igmsp[dset].hdf[dset+'/meta'].attrs[key]
        '''
        if 'SSA' not in hdf[dset]['meta'].attrs.keys():
            pdb.set_trace()
            from specdb.ssa import default_fields
            Title='BOSS DR12 Quasars'
            ssa_dict = default_fields(Title, flux='flambda', fxcalib='ABSOLUTE')
            hdf[dset]['meta'].attrs['SSA'] = json.dumps(ltu.jsonify(ssa_dict))
        '''
    # Catalog
    IDs = np.unique(np.array(all_IDs))
    zpri = igmsp.hdf['catalog'].attrs['Z_PRIORITY']
    # Strip out unused groups
    hdfkeys = hdf.keys()
    sdict = igmsp.group_dict.copy()
    for dkey in sdict.keys():
        if dkey not in hdfkeys:
            sdict.pop(dkey, None)
    sdbbu.write_hdf(hdf, str('igmspec'), igmsp.qcat.cat[IDs], zpri,
                    sdict, version, Publisher=str('specdb'))
    print("Wrote {:s} DB file".format(outfil))
コード例 #6
0
ファイル: test_ssa.py プロジェクト: li-jr/igmspec
def test_chalos_ssa_querydata():
    """ Mixes COS and HIRES
    """
    if os.getenv('SPECDB') is None:  # Would need to generate a new debug file for this to work..
        assert True
        return
    igmsp = IgmSpec()#db_file=db_file)
    #
    ssa = spdb_ssa.SSAInterface(igmsp)
    votable = ssa.querydata('344.4092,13.6793', SIZE=1e-3)
    # Write
    votable.to_xml('cos_querydata.xml')
コード例 #7
0
def chk_v02(pair_sep=10 * u.arcsec):
    """ Check v02 for pairs
    
    Returns
    -------

    """
    print("checking..")
    igmsp = IgmSpec()
    # Grab candidate pairs
    cpairs = chk_for_pairs(igmsp.qcat.cat, pair_sep=pair_sep)
    # Coords
    c_main = SkyCoord(ra=igmsp.qcat.cat['RA'],
                      dec=igmsp.qcat.cat['DEC'],
                      unit='deg')
    # Skip
    stbl = skip_gd_pair()

    # Loop
    flg_cp = np.array([False] * len(igmsp.qcat.cat))
    for qq, cpair in enumerate(cpairs):
        # Skip those already done
        if flg_cp[cpair]:
            continue
        # Find the matches
        sep = c_main[cpair].separation(c_main)
        pairs = sep < pair_sep
        flg_cp[pairs] = True
        # Skip pairs with very different zem
        if np.sum(pairs) == 2:
            zem = igmsp.qcat.cat['zem'][pairs]
            if np.abs(zem[0] - zem[1]) > 0.1:
                continue
            # Both BOSS?
            if (igmsp.qcat.cat['flag_survey'][pairs][0]
                    == 1.) & (igmsp.qcat.cat['flag_survey'][pairs][1] == 1.):
                continue
        # Skip table?
        if np.min(np.abs(igmsp.qcat.cat['RA'][pairs][0] - stbl['RA'])) < 1e-4:
            continue
        # XQ-100? -- These have bad coords but have been matched
        if igmsp.qcat.cat['flag_survey'][pairs][1] == 64.:
            pdb.set_trace()
        # Print
        print('qq = {:d}'.format(qq))
        print(igmsp.qcat.cat[['RA', 'DEC', 'IGM_ID', 'zem',
                              'flag_survey']][pairs])
        print(sep.to('arcsec')[pairs])
        pdb.set_trace()
    # All clear?
    print("All clear..")
コード例 #8
0
def generate_csv(zmax=6.):
    igmsp = IgmSpec()
    boss_dr12 = igmsp['BOSS_DR12'].meta
    # Restrict to z<6 (higher ones are probably junk anyhow)
    gdq = (boss_dr12['zem_GROUP'] > 1.95) & (boss_dr12['zem_GROUP'] < zmax)
    boss_dr12 = boss_dr12[gdq]
    # Build the Table -- NOTE THE ORDER DOES MATTER!
    dr12_set = Table()
    dr12_set['PLATE'] = boss_dr12['PLATE']
    dr12_set['MJD'] = boss_dr12['MJD']
    dr12_set['FIB'] = boss_dr12['FIBERID']
    dr12_set['RA'] = boss_dr12['RA_GROUP']
    dr12_set['DEC'] = boss_dr12['DEC_GROUP']
    # Write
    dr12_set.write('dr12_set.csv', format='csv', overwrite=True)
コード例 #9
0
ファイル: tpe_stack.py プロジェクト: qsopairs/papers
def tpe_stack_lris(dv=100 * u.km / u.s):
    """ Testing stacks with LRIS
    """
    # Load sample
    ipos = this_file.rfind('/')
    if ipos == -1:
        path = './'
    else:
        path = this_file[0:ipos]
    tpe = Table.read(path + '/../TPE_DR12_31.2_spec.fits')
    # Load spectra
    # Coordiantes
    b_coords = SkyCoord(ra=tpe['BG_RA'], dec=tpe['BG_DEC'], unit='deg')
    f_coords = SkyCoord(ra=tpe['FG_RA'], dec=tpe['FG_DEC'], unit='deg')

    # Cut on impact parameter and BOSS
    kpc_amin = cosmo.kpc_comoving_per_arcmin(tpe['FG_Z'])  # kpc per arcmin
    ang_seps = b_coords.separation(f_coords)
    rho = ang_seps.to('arcmin') * kpc_amin / (1 + tpe['FG_Z'])

    cut_Rlris = (rho.to('Mpc').value < 4) & (tpe['BG_LYA_INSTRUMENT'] == 'LRIS'
                                             )  # & (
    #tpe['FG_Z'] > 2.) # Some of these have too low z (just barely)

    # Cut
    gd_b_coords = b_coords[cut_Rlris]
    gd_tpe = tpe[cut_Rlris]

    # Grab these spectra from QPQ
    #   For boss, we are ok taking the first entry of each
    #   The returned set is aligned with the input coords
    qpq = IgmSpec(db_file=qpq_file, skip_test=True)

    IDs = qpq.qcat.match_coord(gd_b_coords, group='LRIS')
    meta = qpq['LRIS'].meta
    gcut = meta['GRATING'] == '1200/3400'  # There is one with B400
    B1200 = np.in1d(IDs, meta['PRIV_ID'][gcut])
    print("There are {:d} sources without B1200".format(np.sum(~B1200)))
    # Cut again
    gd_b_coords = gd_b_coords[B1200]
    gd_tpe = gd_tpe[B1200]
    gd_IDs = IDs[B1200]

    # Find the rows
    idx = cat_utils.match_ids(gd_IDs, meta['PRIV_ID'])
    rows = meta['GROUP_ID'][idx]
    pdb.set_trace()

    spec, meta = qpq.coords_to_spectra(gd_b_coords, 'LRIS', all_spec=False)

    # Check for continua
    has_co = np.array([True] * spec.nspec)
    for ii in range(spec.nspec):
        # Select
        spec.select = ii
        # Match to lya
        lya = (1 + gd_tpe['FG_Z'][ii]) * 1215.67 * u.AA
        iwave = np.argmin(np.abs(spec.wavelength - lya))
        # Check for co
        #coval = spec.co[iwave]
        #print('spec: {:d} with co={:g}'.format(ii, coval))
        if np.isclose(spec.co[iwave], 0.) or np.isclose(spec.co[iwave], 1.):
            has_co[ii] = False

    # Slice to good co
    print("{:d} BOSS spectra with a continuum".format(np.sum(has_co)))
    co_spec = spec[has_co]
    co_spec.normed = True  # Apply continuum

    # NEED TO ZERO OUT REGIONS WITHOUT CONTINUUM
    #  May also wish to isolate in wavelength to avoid rejected pixels
    for ii in range(co_spec.nspec):
        co_spec.select = ii
        co = co_spec.co.value
        bad_pix = np.any([(co == 0.), (co == 1.)], axis=0)
        co_spec.add_to_mask(bad_pix, compressed=True)

    # Rebin to rest
    zarr = gd_tpe['FG_Z'][has_co]
    rebin_spec = lspu.rebin_to_rest(co_spec, zarr, dv)

    # Stack
    stack = lspu.smash_spectra(rebin_spec)

    # Plot
    plot_stack(stack, 'LRIS_stack.pdf')

    return stack
コード例 #10
0
def read_igmspec(plate, fiber, ra=-1, dec=-1, mjd=-1, table_name='SDSS_DR7'):
    with open(os.devnull, 'w') as devnull:
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            # Hack to avoid specdb spamming us with print statements
            stdout = sys.stdout
            sys.stdout = devnull

            from specdb.specdb import IgmSpec  # Custom package only used in this optional read function

            # global igmtables, igmsp
            global cache
            if table_name not in cache.keys():  # ~cache.has_key(table_name):
                with lock:
                    if ~cache.has_key(table_name):
                        cache['igmsp'] = IgmSpec()
                        cache[table_name] = Table(
                            cache['igmsp'].hdf[table_name + "/meta"].value)
            igmsp = cache['igmsp']
            mtbl = cache[table_name]

            print("Plate/Fiber: ", plate, fiber)
            plate = int(plate)
            fiber = int(fiber)

            # Find plate/fiber
            if table_name == 'SDSS_DR7':
                imt = np.where((mtbl['PLATE'] == plate)
                               & (mtbl['FIBER'] == fiber))[0]
            elif table_name == 'BOSS_DR12':
                imt = np.where((mtbl['PLATE'] == plate)
                               & (mtbl['FIBERID'] == fiber)
                               & (mtbl['MJD'] == mjd))[0]
            igmid = mtbl['IGM_ID'][imt]
            # print "imt, igmid", imt, igmid, type(imt), type(igmid), type(mtbl), np.shape(mtbl), "end-print"
            assert np.shape(
                igmid
            )[0] == 1, "Expected igmid to contain exactly 1 value, found %d" % np.shape(
                igmid)[0]

            raw_data = {}
            # spec, meta = igmsp.idb.grab_spec([table_name], igmid)
            # spec, meta = igmsp.allspec_of_ID(igmid, groups=[table_name])
            spec, meta = igmsp.spectra_from_ID(igmid, groups=[table_name])

            z_qso = meta['zem_GROUP'][0]
            flux = np.array(spec[0].flux)
            sig = np.array(spec[0].sig)
            loglam = np.log10(np.array(spec[0].wavelength))
            (loglam_padded, flux_padded, sig_padded) = pad_loglam_flux(loglam,
                                                                       flux,
                                                                       z_qso,
                                                                       sig=sig)
            # Sanity check that we're getting the log10 values
            assert np.all(
                loglam < 10), "Loglam values > 10, example: %f" % loglam[0]

            raw_data['flux'] = flux_padded
            raw_data['sig'] = sig_padded
            raw_data['loglam'] = loglam_padded
            raw_data['plate'] = plate
            raw_data['mjd'] = 0
            raw_data['fiber'] = fiber
            raw_data['ra'] = ra
            raw_data['dec'] = dec
            assert np.shape(raw_data['flux']) == np.shape(raw_data['loglam'])
            sys.stdout = stdout

    return raw_data, z_qso
コード例 #11
0
ファイル: tpe_sample.py プロジェクト: qsopairs/papers
def make_old_sample(min_logLV, outfil=None, tpe_sav=None):
    """ Generate TPE sample
    Parameters
    ----------
    min_logLV : float
    outfil : str

    Returns
    -------
    tpe_sample : Table
      Unified table of QSO pairs for TPE
    """
    # Read SAV file
    if tpe_sav is None:
        svfile = os.getenv('DROPBOX_DIR')+'QSOPairs/TPE_DR12/TPE_DR12_Mon-May-16-18:00:17-2016_concat.sav'
        print("Loading save file {:s}".format(svfile))
        print("Be patient....")
        tpe_sav = readsav(svfile)
    # Cosmology dict
    cdict = {}
    for key in ['omega_m', 'omega_v', 'w', 'lit_h']:
        cdict[key] = tpe_sav[key]

    # Tables for convenience
    qso_fg = Table(tpe_sav['qso_fg'])
    qso_bg = Table(tpe_sav['qso_bg'])

    # Luminosity cut
    Lcut = qso_fg['LOGLV'] > min_logLV
    print("{:d} pairs satisfy the Lcut of {:g}".format(np.sum(Lcut), min_logLV))
    qso_fg = qso_fg[Lcut]
    qso_bg = qso_bg[Lcut]

    # Load IgmSpec and QPQ
    igmsp = IgmSpec()
    qpq_file = os.getenv('DROPBOX_DIR')+'/QSOPairs/spectra/qpq_oir_spec.hdf5'
    qpq = IgmSpec(db_file=qpq_file, skip_test=True)

    # Insist on existing spectra
    b_coords = SkyCoord(ra=qso_bg['RA'], dec=qso_bg['DEC'], unit='deg')
    f_coords = SkyCoord(ra=qso_fg['RA'], dec=qso_fg['DEC'], unit='deg')

    bin_igmsp = igmsp.qcat.match_coord(b_coords) >= 0
    bin_qpq = qpq.qcat.match_coord(b_coords) >= 0
    fin_igmsp = igmsp.qcat.match_coord(f_coords) >= 0
    fin_qpq = qpq.qcat.match_coord(f_coords) >= 0

    fgd = np.any([fin_igmsp, fin_qpq],axis=0)
    print("{:d} f/g quasars in IgmSpec or QPQspec".format(np.sum(fgd)))
    bgd = np.any([bin_igmsp, bin_qpq],axis=0)
    print("{:d} b/g quasars in IgmSpec or QPQspec".format(np.sum(bgd)))

    gd_pairs = fgd & bgd
    print("{:d} good quasar pairs (ie. in both)".format(np.sum(gd_pairs)))

    # Generate TPE Table
    tpe_tbl = tpe_table(qso_fg[gd_pairs], qso_bg[gd_pairs])

    # Write
    if outfil is not None:
        print("Writing {:s}".format(outfil))
        tpe_tbl.write(outfil, overwrite=True)
    # Return
    return tpe_tbl
コード例 #12
0
ファイル: tpe_sample.py プロジェクト: qsopairs/papers
def tpe_chk_spec(tpe_file):
    """ Check spectrum exists and then look for continuum

    Parameters
    ----------
    tpe_file

    Returns
    -------

    """
    from astropy import units as u

    # Load spectral datasets
    igmsp = IgmSpec()
    qpq_file = os.getenv('DROPBOX_DIR')+'/QSOPairs/spectra/qpq_oir_spec.hdf5'
    qpq = IgmSpec(db_file=qpq_file, skip_test=True)
    # Load TPE table
    tpe = Table.read(tpe_file)
    b_coords = SkyCoord(ra=tpe['BG_RA'], dec=tpe['BG_DEC'], unit='deg')
    uni_instr = np.unique(tpe['BG_LYA_INSTRUMENT'])

    # Instrument dict
    inst_dict = {}
    for instr in uni_instr:
        inst_dict[instr] = {}
        inst_dict[instr]['NO_CO'] = 0
    inst_dict['LRIS']['GRATING'] = '1200/3400'
    inst_dict['BOSS']['GROUP'] = 'BOSS_DR12'
    inst_dict['SDSS']['GROUP'] = 'SDSS_DR7'

    # Standard process
    # -- Find all b/g spectra covering Lya (igmspec, QPQ): Generate list (INSTR,GRATING)
    # -- Order by: UVES/HIRES/MIKE,
    #              MagE/ESI/XShooter,
    #              LRIS+B1200,  B600?, B400?
    #              MODS?/GMOS+B600?
    #              BOSS/SDSS

    # Query catalogs
    igm_cat = igmsp.qcat.query_coords(b_coords)

    # Scan
    for instr in uni_instr:
        # Parse
        idx = np.where(tpe['BG_LYA_INSTRUMENT'] == instr)[0]
        inst_dict[instr]['NSPEC'] = len(idx)
        gd_b_coords = b_coords[idx]
        # Load
        if instr in ['BOSS', 'SDSS']:
            spec, meta = igmsp.coords_to_spectra(gd_b_coords, inst_dict[instr]['GROUP'], all_spec=False)
        else:
            continue
        # Checks
        for ii,iidx in enumerate(idx):
            lya = (1+tpe['FG_Z'][iidx]) * 1215.67 * u.AA
            spec.select = ii
            iwave = np.argmin(np.abs(spec.wavelength-lya))
            if np.isclose(spec.co[iwave], 0.) or np.isclose(spec.co[iwave],1.):
                print("BG source {} at z={} has no continuum at Lya".format(b_coords[iidx],
                                                                            tpe['FG_Z'][iidx]))
                inst_dict[instr]['NO_CO'] += 1
    pdb.set_trace()
コード例 #13
0
def fig_resolution(outfil='fig_resolution.png'):
    """ Plots of FJ0812 in several spectrometers
    """
    # Load spectra
    igmsp = IgmSpec()
    sdss, _ = igmsp.get_sdss(861, 333, groups=['SDSS_DR7'])
    sdss.normed = True

    esi = lsio.readspec(
        os.getenv('DROPBOX_DIR') +
        'Keck/ESI/RedData/FJ0812+32/FJ0812+32_f.fits')
    hires = lsio.readspec(
        os.getenv('DROPBOX_DIR') +
        'Keck/HIRES/RedData/FJ0812+32/FJ0812+32B_f.fits')

    # Initialize
    xmnx = (4100., 4450)
    ymnx = (-0.05, 1.28)
    lw = 1.0
    # Start the plot
    fig = plt.figure(figsize=(8.5, 5.0))

    plt.clf()
    gs = gridspec.GridSpec(2, 2)
    lbls = [
        'SDSS: R=2000\n N ~ 100,000', 'ESI: R=8000\n N~1,000',
        'HIRES: R=30000\n N~100'
    ]
    clrs = ['blue', 'red', 'green']

    # Final plot
    ax2 = plt.subplot(gs[1, 1])
    ax2.set_xlim(4270, 4295)
    ax2.set_ylim(ymnx)
    ax2.set_xlabel('Wavelength (Angstroms)')

    for qq in range(3):
        scl = 1.
        if qq == 0:
            spec = sdss
            scl = 1.1
        elif qq == 1:
            spec = esi
        elif qq == 2:
            spec = hires

        # SDSS
        ax = plt.subplot(gs[qq % 2, qq // 2])
        #ax.xaxis.set_minor_locator(plt.MultipleLocator(0.5))
        #ax.xaxis.set_major_locator(plt.MultipleLocator(20.))
        #ax.yaxis.set_minor_locator(plt.MultipleLocator(0.1))
        #ax.yaxis.set_major_locator(plt.MultipleLocator(0.2))
        ax.set_xlim(xmnx)
        ax.set_ylim(ymnx)
        ax.set_ylabel('Normalized Flux')
        # if qq == 0:
        #     ax.get_xaxis().set_ticks([])
        # else:
        ax.set_xlabel('Wavelength (Angstroms)')

        ax.plot(spec.wavelength, spec.flux / scl, 'k', linewidth=lw)
        ax2.plot(spec.wavelength,
                 spec.flux / scl,
                 color=clrs[qq],
                 linewidth=lw,
                 drawstyle='steps-mid')

        # Label
        csz = 12.
        ax.text(0.95,
                0.8,
                lbls[qq],
                transform=ax.transAxes,
                color=clrs[qq],
                size=csz,
                ha='right',
                bbox={'facecolor': 'white'})

    # Layout and save
    print('Writing {:s}'.format(outfil))
    plt.tight_layout(pad=0.2, h_pad=0.3, w_pad=0.4)
    plt.savefig(outfil, dpi=500)
    plt.close()
コード例 #14
0
ファイル: cos_halos.py プロジェクト: xiaoleihappy/igmspec
def grab_meta():
    """ Grab COS-Halos meta table
    Returns
    -------

    """
    from time import strptime
    from specdb.zem.utils import zem_from_radec
    from specdb.specdb import IgmSpec
    from specdb.defs import get_res_dicts
    Rdicts = get_res_dicts()
    igmsp = IgmSpec(db_file=os.getenv('SPECDB') + '/IGMspec_DB_v01.hdf5',
                    skip_test=True)

    summ_file = os.getenv('RAW_IGMSPEC') + '/COS-Halos/cos_halos_obs.ascii'
    chalos_meta = Table.read(summ_file, format='ascii')
    # RA/DEC, DATE
    # Visits from this page: http://www.stsci.edu/cgi-bin/get-visit-status?id=11598&markupFormat=html
    visit_file = os.getenv('RAW_IGMSPEC') + '/COS-Halos/cos_halos_visits.ascii'
    ch_visits = Table.read(visit_file, format='ascii')
    ra = []
    dec = []
    datet = []
    for row in chalos_meta:
        coord = ltu.radec_to_coord(row['QSO'])
        ra.append(coord.ra.value)
        dec.append(coord.dec.value)
        #
        visit = row['Visit']
        mtv = np.where(ch_visits['Visit'] == visit)[0]
        if len(mtv) != 1:
            pdb.set_trace()
        else:
            chv = ch_visits['Start_UT'][mtv].data[0]
        icmma = chv.find(',')
        datet.append('{:s}-{:02d}-{:02d}'.format(
            chv[icmma + 1:icmma + 5],
            strptime(chv[:3], '%b').tm_mon, int(chv[3:icmma])))
    chalos_meta.add_column(Column(ra, name='RA'))
    chalos_meta.add_column(Column(dec, name='DEC'))
    chalos_meta.add_column(Column(datet, name='DATE-OBS'))
    # Others
    chalos_meta.add_column(
        Column(['      '] * len(chalos_meta), name='TELESCOPE'))  # Padding
    chalos_meta.add_column(Column(['     '] * len(chalos_meta),
                                  name='INSTR'))  # Padding for HIRES
    chalos_meta.add_column(
        Column(['G130M/G160M'] * len(chalos_meta), name='DISPERSER'))
    chalos_meta.add_column(Column([20000.] * len(chalos_meta), name='R'))
    chalos_meta.add_column(Column([2000.] * len(chalos_meta), name='EPOCH'))
    chalos_meta['INSTR'] = 'COS'  # Deals with padding
    chalos_meta['TELESCOPE'] = 'HST'
    # Myers for zem
    zem, zsource = zem_from_radec(chalos_meta['RA'], chalos_meta['DEC'],
                                  Table(igmsp.hdf['quasars'].value))
    badz = zem <= 0.
    if np.sum(badz) > 0:
        raise ValueError("Bad zem in COS-Halos")
    chalos_meta['zem'] = zem
    chalos_meta['sig_zem'] = 0.  # Need to add
    chalos_meta['flag_zem'] = zsource
    # HIRES
    hires_files = glob.glob(
        os.getenv('RAW_IGMSPEC') + '/COS-Halos/HIRES/J*f.fits.gz')
    hires_tab = chalos_meta[0:0]
    subnm = np.array([row['QSO'][4:9] for row in chalos_meta])
    signs = np.array([row['QSO'][14] for row in chalos_meta])
    for ifile in hires_files:
        print(ifile)
        fname = ifile.split('/')[-1]
        mt = np.where((subnm == fname[0:5]) & (signs == fname[5]))[0]
        if len(mt) != 1:
            pdb.set_trace()
        # Add row
        hires_tab.add_row(chalos_meta[mt[0]])
        hires_tab[-1]['INSTR'] = 'HIRES'
        hires_tab[-1]['TELESCOPE'] = 'Keck I'
        hires_tab[-1]['DISPERSER'] = 'Red'
        hires_tab[-1]['R'] = Rdicts['HIRES']['C1']
    # Combine
    chalos_meta = vstack([chalos_meta, hires_tab])
    chalos_meta['STYPE'] = str('QSO')
    # Rename
    chalos_meta.rename_column('RA', 'RA_GROUP')
    chalos_meta.rename_column('DEC', 'DEC_GROUP')
    chalos_meta.rename_column('zem', 'zem_GROUP')
    # Check
    assert chk_meta(chalos_meta, chk_cat_only=True)
    # Done
    return chalos_meta
コード例 #15
0
def igmsp():
    from specdb.specdb import IgmSpec
    db_file = tdata_path('IGMspec_DB_{:s}_debug.hdf5'.format(version))
    igmsp = IgmSpec(db_file=db_file)
    return igmsp
コード例 #16
0
ファイル: tpe_stack.py プロジェクト: qsopairs/papers
def tpe_stack_boss(dv=100 * u.km / u.s):
    """ Testing stacks with BOSS
    """
    # Load sample
    ipos = this_file.rfind('/')
    if ipos == -1:
        path = './'
    else:
        path = this_file[0:ipos]
    tpe = Table.read(path + '/../TPE_DR12_31.2_spec.fits')
    # Load spectra
    igmsp = IgmSpec()
    # Coordiantes
    b_coords = SkyCoord(ra=tpe['BG_RA'], dec=tpe['BG_DEC'], unit='deg')
    f_coords = SkyCoord(ra=tpe['FG_RA'], dec=tpe['FG_DEC'], unit='deg')

    # Cut on impact parameter and BOSS
    kpc_amin = cosmo.kpc_comoving_per_arcmin(tpe['FG_Z'])  # kpc per arcmin
    ang_seps = b_coords.separation(f_coords)
    rho = ang_seps.to('arcmin') * kpc_amin / (1 + tpe['FG_Z'])

    cut_Rboss = (rho.to('Mpc').value < 4) & (
        tpe['BG_LYA_INSTRUMENT'] == 'BOSS') & (
            tpe['FG_Z'] > 2.)  # Some of these have too low z (just barely)

    # Cut
    gd_b_coords = b_coords[cut_Rboss]
    gd_f_coords = f_coords[cut_Rboss]
    gd_tpe = tpe[cut_Rboss]

    # Grab these spectra from igmsp
    #   For boss, we are ok taking the first entry of each
    #   The returned set is aligned with the input coords
    spec, meta = igmsp.coords_to_spectra(gd_b_coords,
                                         'BOSS_DR12',
                                         all_spec=False)

    # Check for continua
    has_co = np.array([True] * spec.nspec)
    for ii in range(spec.nspec):
        # Select
        spec.select = ii
        # Match to lya
        lya = (1 + gd_tpe['FG_Z'][ii]) * 1215.67 * u.AA
        iwave = np.argmin(np.abs(spec.wavelength - lya))
        # Check for co
        #coval = spec.co[iwave]
        #print('spec: {:d} with co={:g}'.format(ii, coval))
        if np.isclose(spec.co[iwave], 0.) or np.isclose(spec.co[iwave], 1.):
            has_co[ii] = False

    # Slice to good co
    print("{:d} BOSS spectra with a continuum".format(np.sum(has_co)))
    co_spec = spec[has_co]
    co_spec.normed = True  # Apply continuum

    # NEED TO ZERO OUT REGIONS WITHOUT CONTINUUM
    #  May also wish to isolate in wavelength to avoid rejected pixels
    for ii in range(co_spec.nspec):
        co_spec.select = ii
        co = co_spec.co.value
        bad_pix = np.any([(co == 0.), (co == 1.)], axis=0)
        co_spec.add_to_mask(bad_pix, compressed=True)

    # Rebin to rest
    zarr = gd_tpe['FG_Z'][has_co]
    rebin_spec = lspu.rebin_to_rest(co_spec, zarr, dv)

    # Check 2D
    check_td = True
    if check_td:
        fx = rebin_spec.data['flux']
        sig = rebin_spec.data['sig']
        gds = sig > 0.
        fx[~gds] = 0.
        xdb.set_trace()  # xdb.ximshow(fx)

    # Stack
    stack = lspu.smash_spectra(rebin_spec)
    # Plot
    plot_stack(stack, 'BOSS_stack.pdf')
    print('Wrote')

    return stack
コード例 #17
0
ファイル: tpe_stack.py プロジェクト: qsopairs/papers
def build_spectra(tpe, spec_tbl=None, outfil=None):
    """ Generate an XSpectrum1D object of TPE spectra
    Parameters
    ----------
    tpe : Table
    spec_tbl : str or Table, optional
    outfil : str, optional

    Returns
    -------
    spec : XSpectrum1D

    """
    from specdb.build import utils as spbu
    from linetools.spectra import utils as ltspu
    # Grab spectra table -- might read from disk eventually
    if spec_tbl is not None:
        if isinstance(spec_tbl, Table):
            pass
        elif isinstance(spec_tbl, basestring):
            spec_tbl = Table.read(spec_tbl)
    else:
        spec_tbl = get_spec_meta(tpe)
    assert len(tpe) == len(spec_tbl)
    # Load spectral sets
    igmsp = IgmSpec()
    qpq = IgmSpec(db_file=qpq_file, skip_test=True)
    # Grab igmspec spectra
    iigms = spec_tbl['DBASE'] == 'igmspec'
    sub_meta = spec_tbl[['GROUP', 'GROUP_ID']][iigms]
    igm_spec = igmsp.spectra_from_meta(sub_meta)
    # Grab QPQ
    iqpq = spec_tbl['DBASE'] == 'qpq'
    sub_meta = spec_tbl[['GROUP', 'GROUP_ID']][iqpq]
    qpq_spec = qpq.spectra_from_meta(sub_meta)
    # Cut TPE
    gdtpe = spec_tbl['GROUP_ID'] >= 0
    cut_tpe = tpe[gdtpe]
    cut_stbl = spec_tbl[gdtpe]
    if np.sum(~gdtpe) > 0:
        print("These pairs had no good b/g spectrum")
        print(tpe[['BG_RA', 'BG_DEC']][~gdtpe])
    # Collate
    coll_spec = ltspu.collate([igm_spec, qpq_spec])
    # Reorder to match cut_tpe
    idxi = np.where(iigms)[0]
    idxq = np.where(iqpq)[0]
    alli = np.concatenate([idxi, idxq])
    isrt = np.argsort(alli)
    fin_spec = coll_spec[isrt]
    # Check continua
    has_co = chk_continua(fin_spec, cut_tpe['FG_Z'])
    cut_stbl['HAS_CO'] = has_co
    if np.sum(~has_co) > 0:
        print("These spectra need a continuum")
        print(cut_stbl[['SPEC_FILE']][~has_co])
    # Write
    if outfil is not None:
        hdf = h5py.File(outfil, 'w')
        fin_spec.write_to_hdf5('dumb', hdf5=hdf)
        # Add Tables
        spbu.clean_table_for_hdf(cut_tpe)
        hdf['TPE'] = cut_tpe
        spbu.clean_table_for_hdf(cut_stbl)
        hdf['SPEC_TBL'] = cut_stbl
        # Close
        hdf.close()
        print("Wrote: {:s}".format(outfil))
    return cut_tpe, cut_stbl, fin_spec
コード例 #18
0
ファイル: xq100.py プロジェクト: xiaoleihappy/igmspec
def grab_meta():
    """ Grab XQ-100 meta Table

    Returns
    -------

    """
    from specdb.specdb import IgmSpec
    igmsp = IgmSpec()
    #
    xq100_table = Table.read(
        os.getenv('RAW_IGMSPEC') + '/XQ-100/XQ100_v1_2.fits.gz')
    nqso = len(xq100_table)
    # ESO meta
    eso_tbl = Table.read(os.getenv('RAW_IGMSPEC') +
                         '/XQ-100/metadata_eso_XQ100.csv',
                         format='ascii.csv')
    ar_files = eso_tbl['ARCFILE'].data
    # Spectral files
    spec_files = glob.glob(os.getenv('RAW_IGMSPEC') + '/XQ-100/ADP.*')
    # Dummy column
    xq100_coords = SkyCoord(ra=xq100_table['RA'],
                            dec=xq100_table['DEC'],
                            unit='deg')
    matches = []
    sv_spec_files = []
    sv_orig_files = []
    sv_rescale_files = []
    for spec_file in spec_files:
        if 'ADP.2016-07-15T08:22:40.682.fits' in spec_file:
            print("XQ-100: Skipping summary file")
            continue
        # ESO file
        ssfile = spec_file[spec_file.rfind('/') + 1:-5]
        eso_mt = np.where(ar_files == ssfile)[0]
        try:
            ofile = eso_tbl['ORIGFILE'][eso_mt][0]
        except IndexError:
            print("XQ-100: File {:s} not really in XQ100!".format(spec_file))
            continue
        if ('_1' in ofile) or ('_2' in ofile) or ('_3' in ofile) or ('_4'
                                                                     in ofile):
            print("XQ-100: Skipping additional file: {:s}".format(ofile))
            continue
        # Match
        hdu = fits.open(spec_file)
        head0 = hdu[0].header
        if head0['DISPELEM'] == 'UVB,VIS,NIR':
            print("XQ-100: Skipping merged spectrum file")
            if 'rescale' not in ofile:
                print('no rescale')
                pdb.set_trace()
            continue
        try:
            coord = SkyCoord(ra=head0['RA'], dec=head0['DEC'], unit='deg')
        except KeyError:
            pdb.set_trace()
        sep = coord.separation(xq100_coords)
        imt = np.argmin(sep)
        if sep[imt] > 0.1 * u.arcsec:
            pdb.set_trace()
            raise ValueError("Bad offset")
        # Save
        matches.append(imt)
        sv_spec_files.append(spec_file)
        sv_orig_files.append(ofile)
    # Finish up
    xq100_meta = xq100_table[np.array(matches)]
    nspec = len(xq100_meta)
    # Add spec_files
    xq100_meta['SPEC_FILE'] = sv_spec_files
    xq100_meta['ORIG_FILE'] = sv_orig_files
    # Add zem
    xq100_meta['zem_GROUP'] = xq100_meta['Z_QSO']
    xq100_meta['sig_zem'] = xq100_meta['ERR_ZQSO']
    xq100_meta['flag_zem'] = [str('XQ-100')] * nspec
    # Rename
    xq100_meta.rename_column('RA', 'RA_GROUP')
    xq100_meta.rename_column('DEC', 'DEC_GROUP')
    # Match to Myers
    myers = Table(igmsp.hdf['quasars'].value)
    myers_coord = SkyCoord(ra=myers['RA'], dec=myers['DEC'], unit='deg')
    xq100_coord = SkyCoord(ra=xq100_meta['RA_GROUP'],
                           dec=xq100_meta['DEC_GROUP'],
                           unit='deg')
    idx, d2d, _ = match_coordinates_sky(xq100_coord,
                                        myers_coord,
                                        nthneighbor=1)
    xq100_meta['RA_GROUP'] = myers_coord.ra.value[idx]
    xq100_meta['DEC_GROUP'] = myers_coord.dec.value[idx]
    # One bad one (Taking RA/DEC from Simbad)
    bad_c = d2d.to('arcsec') > 20 * u.arcsec
    xq100_meta['RA_GROUP'][bad_c] = 215.2823
    xq100_meta['DEC_GROUP'][bad_c] = -6.73232
    # DATE-OBS
    meanmjd = []
    for row in xq100_meta:
        gdm = row['MJD_OBS'] > 0.
        meanmjd.append(np.mean(row['MJD_OBS'][gdm]))
    t = Time(meanmjd, format='mjd', out_subfmt='date')  # Fixes to YYYY-MM-DD
    xq100_meta.add_column(Column(t.iso, name='DATE-OBS'))
    #
    xq100_meta.add_column(Column([2000.] * nspec, name='EPOCH'))
    xq100_meta['STYPE'] = str('QSO')
    # Sort
    xq100_meta.sort('RA_GROUP')
    # Check
    assert chk_meta(xq100_meta, chk_cat_only=True)
    #
    return xq100_meta
コード例 #19
0
ファイル: tpe_stack.py プロジェクト: qsopairs/papers
def get_spec_meta(tpe, outfil=None):
    """ Given a TPE table, generate a table describing available spectra
    and the preferred choice.
    Parameters
    ----------
    tpe : Table
    outfil : str, optional

    Returns
    -------
    spec_tbl : Table
      Table describing the spectra; aligned with input TPE Table
        specm -- str, describing all available spectra
        nspec -- int, number of available spectra
        best_spec -- str, describes the best one given priority dict
        nok -- number good enough for TPE (i.e. in instr_priority dict)
        ibest, best_row -- int, uninteresting indices
    """
    # Load spectral sets
    igmsp = IgmSpec()
    qpq = IgmSpec(db_file=qpq_file, skip_test=True)
    #
    b_coords = SkyCoord(ra=tpe['BG_RA'], dec=tpe['BG_DEC'], unit='deg')
    # Query the spectral catalogs
    #igm_cat_match, igm_cat, igm_ID = igmsp.qcat.query_coords(b_coords)
    #qpq_cat_match, qpq_cat, qpq_ID = qpq.qcat.query_coords(b_coords)
    # Generate lists of meta tables
    igm_meta_match, igm_meta_list = igmsp.meta_from_coords(b_coords,
                                                           first=False)
    qpq_meta_match, qpq_meta_list = qpq.meta_from_coords(b_coords, first=False)
    # Identify best instrument/grating combo
    instr_pri_dict = instr_priority()
    spec_dict = dict(specm='',
                     best_spec='',
                     nspec=0,
                     ibest=-1,
                     nok=0,
                     best_row=-1)
    spec_meta = [spec_dict.copy() for i in range(len(tpe))]
    for qq, pair in enumerate(tpe):
        # igmspec
        if igm_meta_match[qq]:
            # Add
            add_to_specmeta('igmsp', igm_meta_list[qq], qq, spec_meta,
                            pair['FG_Z'], instr_pri_dict)
        # QPQ
        if qpq_meta_match[qq]:
            # Meta + add
            add_to_specmeta('qpq', qpq_meta_list[qq], qq, spec_meta,
                            pair['FG_Z'], instr_pri_dict)
    # Convert to Table
    spec_tbl = Table()
    for key in spec_dict.keys():
        clm = [sdict[key] for sdict in spec_meta]
        spec_tbl[key] = clm
    # Add Group, Group_ID
    dbase, group, group_id, specfile = [], [], [], []
    for kk, row in enumerate(spec_tbl):
        if row['best_spec'][0:4] == 'igms':
            dbase.append('igmspec')
            group.append(igm_meta_list[kk]['GROUP'][row['best_row']])
            group_id.append(igm_meta_list[kk]['GROUP_ID'][row['best_row']])
            specfile.append(igm_meta_list[kk]['SPEC_FILE'][row['best_row']])
        elif row['best_spec'][0:3] == 'qpq':
            dbase.append('qpq')
            group.append(qpq_meta_list[kk]['GROUP'][row['best_row']])
            group_id.append(qpq_meta_list[kk]['GROUP_ID'][row['best_row']])
            specfile.append(qpq_meta_list[kk]['SPEC_FILE'][row['best_row']])
        else:
            dbase.append('none')
            group.append('none')
            group_id.append(-1)
            specfile.append('N/A')
    spec_tbl['DBASE'] = dbase
    spec_tbl['GROUP'] = group
    spec_tbl['GROUP_ID'] = group_id
    spec_tbl['SPEC_FILE'] = specfile
    # Write?
    if outfil is not None:
        spec_tbl.write(outfil, overwrite=True)
        print("Writing spec table: {:s}".format(outfil))
    # Return
    return spec_tbl
コード例 #20
0
def mk_db(dbname, tree, outfil, iztbl, version='v00', id_key='PRIV_ID',
          publisher='Unknown', **kwargs):
    """ Generate the DB

    Parameters
    ----------
    dbname : str
      Name for the database
    tree : str
      Path to top level of the tree of FITS files
      Typically, each branch in the tree corresponds to a single instrument
    outfil : str
      Output file name for the hdf5 file
    iztbl : Table or str
      If Table, see meta() docs for details on its format
      If str, it must be 'igmspec' and the user must have that DB downloaded
    version : str, optional
      Version code

    Returns
    -------

    """
    from specdb import defs

    # ztbl
    if isinstance(iztbl, str):
        if iztbl == 'igmspec':
            from specdb.specdb import IgmSpec
            igmsp = IgmSpec()
            ztbl = Table(igmsp.idb.hdf['quasars'][...])
    elif isinstance(iztbl, Table):
        ztbl = iztbl
    else:
        raise IOError("Bad type for ztbl")

    # Find the branches
    branches = glob.glob(tree+'/*')
    branches.sort()
    # HDF5 file
    hdf = h5py.File(outfil,'w')

    # Defs
    zpri = defs.z_priority()
    gdict = {}

    # Main DB Table
    maindb, tkeys = spbu.start_maindb(id_key)

    # MAIN LOOP
    for ss,branch in enumerate(branches):
        # Skip files
        if not os.path.isdir(branch):
            continue
        print('Working on branch: {:s}'.format(branch))
        # Files
        fits_files, out_tup = grab_files(branch)
        meta_file, mtbl_file, ssa_file = out_tup

        # Meta
        maxpix, phead, mdict, stype = 10000, None, None, 'QSO'
        if meta_file is not None:
            # Load
            meta_dict = ltu.loadjson(meta_file)
            # Maxpix
            if 'maxpix' in meta_dict.keys():
                maxpix = meta_dict['maxpix']
            # STYPE
            if 'stype' in meta_dict.keys():
                stype = meta_dict['stype']
            # Parse header
            if 'parse_head' in meta_dict.keys():
                phead = meta_dict['parse_head']
            if 'meta_dict' in meta_dict.keys():
                mdict = meta_dict['meta_dict']
        full_meta = mk_meta(fits_files, ztbl, mtbl_file=mtbl_file,
                            parse_head=phead, mdict=mdict, **kwargs)
        # Update group dict
        group_name = branch.split('/')[-1]
        flag_g = spbu.add_to_group_dict(group_name, gdict)
        # IDs
        maindb = add_ids(maindb, full_meta, flag_g, tkeys, 'PRIV_ID', first=(flag_g==1))
        # Ingest
        ingest_spectra(hdf, group_name, full_meta, max_npix=maxpix, **kwargs)
        # SSA
        if ssa_file is not None:
            user_ssa = ltu.loadjson(ssa_file)
            ssa_dict = default_fields(user_ssa['Title'], flux=user_ssa['flux'], fxcalib=user_ssa['fxcalib'])
            hdf[group_name]['meta'].attrs['SSA'] = json.dumps(ltu.jsonify(ssa_dict))

    # Check stacking
    if not spbu.chk_vstack(hdf):
        print("Meta data will not stack using specdb.utils.clean_vstack")
        print("Proceed to write at your own risk..")
        pdb.set_trace()

    # Write
    write_hdf(hdf, str(dbname), maindb, zpri, gdict, version,
              Publisher=publisher)
    print("Wrote {:s} DB file".format(outfil))
コード例 #21
0
def fig_lowz_hiz(outfil='fig_loz_hiz.png'):
    """ Show varying IGM transmission
    """
    #hdlls_path = '/u/xavier/paper/LLS/Optical/Data/DR1/Spectra/'
    esi_path = '/u/xavier/Keck/ESI/RedData/'
    hst_path = '/u/xavier/HST/Cycle23/z1IGM/Archive/PG1206+459/'
    #
    igmsp = IgmSpec()
    idicts = [
        dict(filename='Data/3C273_STIS_E140M_F.fits'),
        dict(filename=hst_path + 'PG1206+459_E230M_f.fits'),
        dict(coord='J212329.50-005052.9', group=['HD-LLS_DR1']),
        dict(coord='J113621.00+005021.0', group=['HD-LLS_DR1']),
        dict(coord='J113246.5+120901.6', group=['ESI_DLA']),
        dict(filename=esi_path +
             'J1148+5251/SDSSJ1148+5251_stack.fits'),  # z=6
    ]
    lbls = [
        'HST/STIS: 3C273',
        'HST/STIS: PG1206+459',
        'Keck/HIRES: J2123-0050',  # 2.26
        'Magellan/MIKE: J1136+0050',  # 3.43
        'Keck/ESI: J1132+1209',  # 5.17
        'Keck/ESI: J1148+5251',  # 6.4
    ]
    zems = [0.17, 1.16, 2.26, 3.43, 5.17, 6.4]
    xrest = np.array([1080, 1200.])
    ymnx = [-0.1, 1.1]

    # Cut down
    idicts = [idicts[0], idicts[3]]
    lbls = [lbls[0], lbls[3]]
    zems = [zems[0], zems[3]]

    lw = 1.
    csz = 19.

    # Start the plot
    #fig = plt.figure(figsize=(5.0, 8.0))
    fig = plt.figure(figsize=(8.0, 5.0))

    plt.clf()
    gs = gridspec.GridSpec(len(lbls), 1)

    # Loop
    for qq, lbl in enumerate(lbls):

        # Grab data
        idict = idicts[qq]
        if 'coord' in idict.keys():
            qdict = {}
            for key in idict.keys():
                if key not in ['coord', 'group']:
                    qdict[key] = idict[key]
            spec, meta = igmsp.spectra_from_coord(
                idict['coord'], tol=5. * u.arcsec,
                groups=idict['group'])  #, query_dict=qdict)
            if meta is None:
                print("Bad coord?")
                pdb.set_trace()
            elif len(meta) > 1:
                pdb.set_trace()
        else:
            spec = lsio.readspec(idict['filename'])

        if lbl == 'HST/STIS: 3C273':
            hdu = fits.open('Data/3C273_STIS_E140M_c.fits')
            conti_3c273 = hdu[0].data
            spec.co = conti_3c273
            spec.normed = True

        # Spectrum
        ax = plt.subplot(gs[qq])
        ax.set_xlim(xrest * (1 + zems[qq]) / 1215.67 - 1)
        ax.set_ylim(ymnx)
        if qq == 3:
            ax.set_ylabel('Normalized Flux')
        if qq == len(lbls) - 1:
            ax.set_xlabel(r'Redshift of Ly$\alpha$')

        ax.plot(spec.wavelength.value / 1215.6701 - 1,
                spec.flux,
                'k',
                linewidth=lw)

        # Label
        #ax.text(0.05, 0.95, lbl+' zem={:0.1f}'.format(zems[qq]), color='blue',
        #    transform=ax.transAxes, size=csz, ha='left', bbox={'facecolor':'white'})
        #
        set_fontsize(ax, 12.)

    # Layout and save
    #plt.subplots_adjust(hspace=0)
    plt.tight_layout(pad=0.2, h_pad=0.0, w_pad=0.4)
    plt.savefig(outfil, dpi=600)
    plt.close()
    # Finish
    print('Writing {:s}'.format(outfil))
コード例 #22
0
def igmsp():
    db_file = data_path('IGMspec_DB_{:s}_debug.hdf5'.format(version))
    igmsp = IgmSpec(db_file=db_file)
    return igmsp
コード例 #23
0
def grab_meta():
    """ Grab UVES Dall'Aglio meta table

    Returns
    -------

    """
    #
    uvesdall_meta = Table.read(os.getenv('RAW_IGMSPEC') +
                               '/UVES_Dall/uves_dall_summ.dat',
                               format='ascii')
    nspec = len(uvesdall_meta)
    # DATE
    #datearr = [day.split('/') for day in list(uvesdall_meta['ObsDate'])]
    #ndate = ['20'+str(day[2])+'-'+str(day[0])+'-'+str(day[1]) for day in datearr]
    t = Time(uvesdall_meta['OBS-DATE'],
             out_subfmt='date')  # Fixes to YYYY-MM-DD
    uvesdall_meta.add_column(Column(t.iso, name='DATE-OBS'))
    # RA/DEC
    coord = SkyCoord(ra=uvesdall_meta['RA'],
                     dec=uvesdall_meta['DEC'],
                     unit=(u.hour, u.deg))
    rad = [icoord.ra.value for icoord in coord]
    decd = [icoord.dec.value for icoord in coord]
    uvesdall_meta.rename_column('RA', 'RA_STR')
    uvesdall_meta.rename_column('DEC', 'DEC_STR')
    uvesdall_meta['RA_GROUP'] = rad
    uvesdall_meta['DEC_GROUP'] = decd
    # Add zem
    igmsp = IgmSpec()
    ztbl = Table(igmsp.hdf['quasars'].value)
    zem, zsource = zem_from_radec(rad, decd, ztbl)
    badz = np.where(zem < 0.1)[0]
    for ibadz in badz:
        if uvesdall_meta['NAME'][ibadz] == 'HE2243-6031':
            zem[ibadz] = 3.005
            zsource[ibadz] = 'FOP13'  # Fumagalli+13
        elif uvesdall_meta['NAME'][ibadz] == 'HE1341-1020':
            zem[ibadz] = 2.137
            zsource[ibadz] = 'Dall08'  # Dall'Aglio+08
        elif uvesdall_meta['NAME'][ibadz] == 'Q0002-422':
            zem[ibadz] = 2.769
            zsource[ibadz] = 'Dall08'  # Dall'Aglio+08
        elif uvesdall_meta['NAME'][ibadz] == 'PKS2000-330':
            zem[ibadz] = 3.786
            zsource[ibadz] = 'Dall08'  # Dall'Aglio+08
        else:
            raise ValueError("Should not be here")

    uvesdall_meta['zem_GROUP'] = zem
    uvesdall_meta['sig_zem'] = [0.] * nspec
    uvesdall_meta['flag_zem'] = zsource
    #
    uvesdall_meta.add_column(Column([2000.] * nspec, name='EPOCH'))
    uvesdall_meta.add_column(Column(['VLT'] * nspec, name='TELESCOPE'))
    uvesdall_meta.add_column(Column(['UVES'] * nspec, name='INSTR'))
    uvesdall_meta.add_column(Column(['BOTH'] * nspec, name='DISPERSER'))
    uvesdall_meta.add_column(Column([45000.] * nspec, name='R'))
    uvesdall_meta['STYPE'] = str('QSO')
    # Sort
    uvesdall_meta.sort('RA_GROUP')
    # Check
    assert chk_meta(uvesdall_meta, chk_cat_only=True)
    return uvesdall_meta