Beispiel #1
0
def test_radeccoord():
    lcoord = ['J124511+144523', '124511+144523',
                  'J12:45:11+14:45:23', ('12:45:11', '+14:45:23'),
                  ('12:45:11', '14:45:23'), ('12 45 11', '+14 45 23')]
    for radec in lcoord:
        coord = ltu.radec_to_coord(radec)
        # Test
        np.testing.assert_allclose(coord.ra.value, 191.2958333333333)
    # List
    coords = ltu.radec_to_coord(lcoord)
    assert len(coords) == 6
Beispiel #2
0
def test_radeccoord():
    lcoord = ['J124511+144523', '124511+144523',
                  'J12:45:11+14:45:23', ('12:45:11', '+14:45:23'),
                  ('12:45:11', '14:45:23'), ('12 45 11', '+14 45 23')]
    for radec in lcoord:
        coord = ltu.radec_to_coord(radec)
        # Test
        np.testing.assert_allclose(coord.ra.value, 191.2958333333333)
    # List
    coords = ltu.radec_to_coord(lcoord)
    assert len(coords) == 6
    # Galactic
    gcoord = ltu.radec_to_coord((280.5,-32.9), gal=True) # LMC
    assert np.isclose(gcoord.icrs.ra.value, 80.8456130588062)
    assert np.isclose(gcoord.icrs.dec.value, -69.78267074987376)
Beispiel #3
0
def grab_meta():
    """ Grab KODIAQ meta Table
    Returns
    -------

    """
    kodiaq_file = igms_path+'/data/meta/KODIAQ_DR1_summary.ascii'
    kodiaq_meta = Table.read(kodiaq_file, format='ascii', comment='#')
    nspec = len(kodiaq_meta)
    # Verify DR1
    for row in kodiaq_meta:
        assert row['kodrelease'] == 1
    # RA/DEC, DATE
    ra = []
    dec = []
    dateobs = []
    for row in kodiaq_meta:
        # Fix DEC
        # Get RA/DEC
        coord = ltu.radec_to_coord((row['sRA'],row['sDEC']))
        ra.append(coord.ra.value)
        dec.append(coord.dec.value)
        # DATE
        dvals = row['pi_date'].split('_')
        tymd = str('{:s}-{:s}-{:02d}'.format(dvals[-1],dvals[1][0:3],int(dvals[2])))
        tval = datetime.datetime.strptime(tymd, '%Y-%b-%d')
        dateobs.append(datetime.datetime.strftime(tval,'%Y-%m-%d'))
    kodiaq_meta.add_column(Column(ra, name='RA'))
    kodiaq_meta.add_column(Column(dec, name='DEC'))
    kodiaq_meta.add_column(Column(dateobs, name='DATE-OBS'))
    #
    kodiaq_meta.add_column(Column(['HIRES']*nspec, name='INSTR'))
    kodiaq_meta.add_column(Column(['Keck-I']*nspec, name='TELESCOPE'))
    #
    return kodiaq_meta
Beispiel #4
0
def grab_meta_mike():
    """ Grab MIKE meta Table
    Returns
    -------

    """
    mike_file = igms_path + '/data/meta/HD-LLS_DR1_MIKE.ascii'
    mike_meta = Table.read(mike_file,
                           format='ascii',
                           delimiter='&',
                           guess=False,
                           comment='#')
    # RA/DEC, DATE
    ra = []
    dec = []
    dateobs = []
    for row in mike_meta:
        # Fix DEC
        if '--' in row['sDEC']:
            row['sDEC'] = row['sDEC'].replace('--', '-')
        # Get RA/DEC
        coord = ltu.radec_to_coord((row['sRA'], row['sDEC']))
        ra.append(coord.ra.value)
        dec.append(coord.dec.value)
        # DATE
        dvals = row['DATE'].split(' ')
        dateobs.append(
            str('{:s}-{:s}-{:s}'.format(dvals[2], dvals[1], dvals[0])))
    mike_meta.add_column(Column(ra, name='RA_GROUP'))
    mike_meta.add_column(Column(dec, name='DEC_GROUP'))
    mike_meta.add_column(Column(dateobs, name='DATE-OBS'))
    #
    return mike_meta
Beispiel #5
0
    def __init__(self, radec, sl_type=None, em_type=None, comment=None, name=None, **kwargs):
        """  Initiator

        Parameters
        ----------
        radec : tuple or SkyCoord
            (RA,DEC) in deg or astropy.coordinate.SkyCoord
        sl_type : str, optional
          Sightline type, e.g. IGM
        em_type : str, optional
          Type of emission source for absorption sightline, e.g. QSO
        comment : str, optional
          A comment, default is ``
        name : str, optional
            Name of the sightline, e.g. '3C273'
        """
        # Required
        self.coord = ltu.radec_to_coord(radec)

        # Lists
        self._components = []

        # Name
        if name is None:
            self.name = ltu.name_from_coord(self.coord)
        else:
            self.name = name

        # Others
        self.em_type = em_type
        self.sl_type = sl_type
        self._abssystems = None  # Saving the namespace for future usage
Beispiel #6
0
def main(pargs):
    """ Run
    """
    import json

    from linetools import utils as ltu
    from linetools.scripts.utils import coord_arg_to_coord

    from frb.galaxies import nebular
    from frb import mw
    from frb.surveys import survey_utils

    # Deal with coord
    icoord = ltu.radec_to_coord(coord_arg_to_coord(pargs.coord))

    # EBV
    EBV = nebular.get_ebv(icoord)['meanValue']  #
    AV = EBV * 3.1  # RV

    print("AV = {}".format(AV))

    # NE 2001
    DM_ISM = mw.ismDM(icoord)
    print(f"NE2001 = {DM_ISM}")

    # Surveys
    print("Checking the imaging surveys...")
    inside = survey_utils.in_which_survey(icoord)
    print(inside)
Beispiel #7
0
def test_geocorrect(fitstbl):
    """
    """

    # Specobj (wrap in a list to mimic a slit)
    scidx = 5
    obstime = Time(fitstbl['mjd'][scidx],
                   format='mjd')  #'%Y-%m-%dT%H:%M:%S.%f')
    radec = ltu.radec_to_coord((fitstbl["ra"][scidx], fitstbl["dec"][scidx]))

    helio, hel_corr = wave.geomotion_correct(radec, obstime, lon, lat, alt,
                                             'heliocentric')
    assert np.isclose(helio, -9.17461338,
                      rtol=1e-5)  # Checked against x_keckhelio
    #assert np.isclose(helio, -9.3344957, rtol=1e-5)  # Original
    assert np.isclose(1 - hel_corr, 3.060273748e-05, rtol=1e-5)

    # Now apply to a specobj
    npix = 1000
    sobj = specobj.SpecObj('MultiSlit', 1, SLITID=0)
    sobj.BOX_WAVE = np.linspace(4000., 6000., npix)
    sobj.BOX_COUNTS = 50. * (sobj.BOX_WAVE / 5000.)**-1.
    sobj.BOX_COUNTS_IVAR = 1. / sobj.BOX_COUNTS.copy()
    sobj.apply_helio(hel_corr, 'heliocentric')
    assert np.isclose(sobj.BOX_WAVE[0], 3999.877589008, rtol=1e-8)
Beispiel #8
0
def grab_meta():
    """ Grab KODIAQ meta Table
    Returns
    -------

    """
    hstz2_meta = Table.read(os.getenv('RAW_IGMSPEC') + '/HST_z2/hst_z2.ascii',
                            format='ascii')
    nspec = len(hstz2_meta)
    # RA/DEC, DATE
    ra = []
    dec = []
    for row in hstz2_meta:
        # Fix DEC
        # Get RA/DEC
        coord = ltu.radec_to_coord((row['ra'], row['dec']))
        ra.append(coord.ra.value)
        dec.append(coord.dec.value)
    hstz2_meta.add_column(Column(ra, name='RA_GROUP'))
    hstz2_meta.add_column(Column(dec, name='DEC_GROUP'))
    # z
    hstz2_meta.rename_column('zem', 'zem_GROUP')
    hstz2_meta['sig_zem'] = [0.] * nspec
    hstz2_meta['flag_zem'] = [str('SDSS_PIPE')] * nspec
    hstz2_meta['STYPE'] = [str('QSO')] * nspec
    #
    hstz2_meta.rename_column('obsdate', 'DATE-OBS')
    hstz2_meta.rename_column('tel', 'TELESCOPE')
    hstz2_meta.rename_column('inst', 'INSTR')
    hstz2_meta.rename_column('grating', 'DISPERSER')
    hstz2_meta.rename_column('resolution', 'R')
    # Check
    assert chk_meta(hstz2_meta, chk_cat_only=True)
    # Return
    return hstz2_meta
Beispiel #9
0
def grab_meta_mike():
    """ Grab MIKE meta Table
    Returns
    -------

    """
    mike_file = igms_path+'/data/meta/HD-LLS_DR1_MIKE.ascii'
    mike_meta = Table.read(mike_file, format='ascii', delimiter='&',
                           guess=False, comment='#')
    # RA/DEC, DATE
    ra = []
    dec = []
    dateobs = []
    for row in mike_meta:
        # Fix DEC
        if '--' in row['sDEC']:
            row['sDEC'] = row['sDEC'].replace('--','-')
        # Get RA/DEC
        coord = ltu.radec_to_coord((row['sRA'],row['sDEC']))
        ra.append(coord.ra.value)
        dec.append(coord.dec.value)
        # DATE
        dvals = row['DATE'].split(' ')
        dateobs.append(str('{:s}-{:s}-{:s}'.format(dvals[2],dvals[1],dvals[0])))
    mike_meta.add_column(Column(ra, name='RA_GROUP'))
    mike_meta.add_column(Column(dec, name='DEC_GROUP'))
    mike_meta.add_column(Column(dateobs, name='DATE-OBS'))
    #
    return mike_meta
Beispiel #10
0
def main(args=None):
    pargs = parser(options=args)
    if pargs.inp is None and pargs.all is False:
        print("No option selected.  Use -h for Help")
        return
    # Setup
    from linetools import utils as ltu
    from .utils import coord_arg_to_coord
    from astropy import units as u

    # RA, DEC
    icoord = coord_arg_to_coord(pargs.inp)
    coord = ltu.radec_to_coord(icoord, gal=pargs.gal)

    # Time to print
    print('      ')
    print('J{:s}{:s}'.format(coord.icrs.ra.to_string(unit=u.hour,sep='',pad=True),
                             coord.icrs.dec.to_string(sep='',pad=True,alwayssign=True)))
    print('   ')
    print('   {:s} {:s}   (J2000)'.format(coord.icrs.ra.to_string(unit=u.hour,sep=':',pad=True),
                             coord.icrs.dec.to_string(sep=':',pad=True,alwayssign=True)))
    print('   RA={:f} deg, DEC={:f} deg'.format(coord.icrs.ra.deg, coord.icrs.dec.deg))
    print('   radec = ({:f},{:f}) deg'.format(coord.icrs.ra.deg, coord.icrs.dec.deg))
    print('   Galactic = ({:f},{:f}) deg'.format(coord.galactic.l.deg, coord.galactic.b.deg))
    print('   ')
    print('SDSS finding chart: https://skyserver.sdss.org/dr12/en/tools/chart/navi.aspx?ra={:f}&dec={:f}&opt='.format(coord.icrs.ra.deg, coord.icrs.dec.deg))
Beispiel #11
0
def main(args=None):
    from astropy.coordinates import SkyCoord
    from astropy import units as u
    from linetools import utils as ltu
    from pyigm.abssys.dla import DLASystem
    from pyigm.abssys.lls import LLSSystem

    if args is None:
        pargs = parser()
    else:
        pargs = args

    # Coordinates
    if pargs.jcoord is not None:
        coord = ltu.radec_to_coord(pargs.jcoord)
    else:
        coord = SkyCoord(ra=0., dec=0., unit='deg')

    # vlim
    if pargs.vlim is not None:
        vlims = [float(vlim) for vlim in pargs.vlim.split(',')]*u.km/u.s
    else:
        vlims = None

    # go
    if pargs.itype == 'dla':
        isys = DLASystem(coord, pargs.zabs, vlims, pargs.NHI, zem=pargs.zem, sig_NHI=pargs.sigNHI)
    elif pargs.itype == 'lls':
        isys = LLSSystem(coord, pargs.zabs, vlims, NHI=pargs.NHI, zem=pargs.zem, sig_NHI=pargs.sigNHI)
    else:
        raise IOError("Not prepared for this type of IGMSystem")

    # Write
    isys.write_json(pargs.outfile)
Beispiel #12
0
def test_radeccoord():
    for radec in ['J124511+144523', '124511+144523',
                  'J12:45:11+14:45:23', ('12:45:11', '+14:45:23'),
                  ('12:45:11', '14:45:23'), ('12 45 11', '+14 45 23')]:
        coord = ltu.radec_to_coord(radec)
        # Test
        np.testing.assert_allclose(coord.ra.value, 191.2958333333333)
Beispiel #13
0
def main(args=None):
    pargs = parser(options=args)
    if pargs.inp is None and pargs.all is False:
        print("No option selected.  Use -h for Help")
        return
    # Setup
    from linetools import utils as ltu
    from .utils import coord_arg_to_coord
    from astropy import units as u

    # RA, DEC
    icoord = coord_arg_to_coord(pargs.inp)
    coord = ltu.radec_to_coord(icoord, gal=pargs.gal)

    # Time to print
    print('      ')
    print('J{:s}{:s}'.format(
        coord.icrs.ra.to_string(unit=u.hour, sep='', pad=True),
        coord.icrs.dec.to_string(sep='', pad=True, alwayssign=True)))
    print('   ')
    print('   {:s} {:s}   (J2000)'.format(
        coord.icrs.ra.to_string(unit=u.hour, sep=':', pad=True),
        coord.icrs.dec.to_string(sep=':', pad=True, alwayssign=True)))
    print('   RA={:f} deg, DEC={:f} deg'.format(coord.icrs.ra.deg,
                                                coord.icrs.dec.deg))
    print('   radec = ({:f},{:f}) deg'.format(coord.icrs.ra.deg,
                                              coord.icrs.dec.deg))
    print('   Galactic = ({:f},{:f}) deg'.format(coord.galactic.l.deg,
                                                 coord.galactic.b.deg))
    print('   ')
    print(
        'SDSS finding chart: https://skyserver.sdss.org/dr12/en/tools/chart/navi.aspx?ra={:f}&dec={:f}&opt='
        .format(coord.icrs.ra.deg, coord.icrs.dec.deg))
Beispiel #14
0
def grab_meta():
    """ Grab KODIAQ meta Table
    Returns
    -------

    """
    hstz2_meta = Table.read(os.getenv('RAW_IGMSPEC')+'/HST_z2/hst_z2.ascii', format='ascii')
    # RA/DEC, DATE
    ra = []
    dec = []
    for row in hstz2_meta:
        # Fix DEC
        # Get RA/DEC
        coord = ltu.radec_to_coord((row['ra'],row['dec']))
        ra.append(coord.ra.value)
        dec.append(coord.dec.value)
    hstz2_meta.add_column(Column(ra, name='RA'))
    hstz2_meta.add_column(Column(dec, name='DEC'))
    # RENAME
    hstz2_meta.rename_column('obsdate','DATE-OBS')
    hstz2_meta.rename_column('tel','TELESCOPE')
    hstz2_meta.rename_column('inst','INSTR')
    hstz2_meta.rename_column('grating','GRATING')
    hstz2_meta.rename_column('resolution','R')
    return hstz2_meta
Beispiel #15
0
    def __init__(self, radec, sl_type='', em_type='', comment=None, name=None, **kwargs):
        """  Initiator

        Parameters
        ----------
        radec : tuple or SkyCoord
            (RA,DEC) in deg or astropy.coordinate.SkyCoord
        sl_type : str, optional
          Sightline type, e.g. IGM
        em_type : str, optional
          Type of emission source for absorption sightline, e.g. QSO
        comment : str, optional
          A comment, default is ``
        name : str, optional
            Name of the sightline, e.g. '3C273'
        """
        # Required
        self.coord = ltu.radec_to_coord(radec)

        # Lists
        self._components = []

        # Name
        if name is None:
            self.name = ltu.name_from_coord(self.coord)
        else:
            self.name = name

        # Others
        self.em_type = em_type
        self.sl_type = sl_type
        self._abssystems = []  # Saving the namespace for future usage
Beispiel #16
0
def grab_meta():
    """ Grab KODIAQ meta Table
    Returns
    -------

    """
    hstz2_meta = Table.read(os.getenv('RAW_IGMSPEC')+'/HST_z2/hst_z2.ascii', format='ascii')
    nspec = len(hstz2_meta)
    # RA/DEC, DATE
    ra = []
    dec = []
    for row in hstz2_meta:
        # Fix DEC
        # Get RA/DEC
        coord = ltu.radec_to_coord((row['ra'],row['dec']))
        ra.append(coord.ra.value)
        dec.append(coord.dec.value)
    hstz2_meta.add_column(Column(ra, name='RA_GROUP'))
    hstz2_meta.add_column(Column(dec, name='DEC_GROUP'))
    # z
    hstz2_meta.rename_column('zem', 'zem_GROUP')
    hstz2_meta['sig_zem'] = [0.]*nspec
    hstz2_meta['flag_zem'] = [str('SDSS_PIPE')]*nspec
    hstz2_meta['STYPE'] = [str('QSO')]*nspec
    #
    hstz2_meta.rename_column('obsdate','DATE-OBS')
    hstz2_meta.rename_column('tel','TELESCOPE')
    hstz2_meta.rename_column('inst','INSTR')
    hstz2_meta.rename_column('grating','DISPERSER')
    hstz2_meta.rename_column('resolution','R')
    # Check
    assert chk_meta(hstz2_meta, chk_cat_only=True)
    # Return
    return hstz2_meta
Beispiel #17
0
def test_geocorrect(fitstbl):
    """
    """

    # Specobj (wrap in a list to mimic a slit)
    npix = 1000
    sobj = specobj.SpecObj('MultiSlit', 1, SLITID=0)
    sobj.BOX_WAVE = np.linspace(4000., 6000., npix)
    sobj.BOX_COUNTS = 50. * (sobj.BOX_WAVE / 5000.)**-1.
    sobj.BOX_COUNTS_IVAR = 1. / sobj.BOX_COUNTS.copy()
    # SpecObjs
    specObjs = specobjs.SpecObjs()
    specObjs.add_sobj(sobj)
    scidx = 5
    obstime = Time(fitstbl['mjd'][scidx],
                   format='mjd')  #'%Y-%m-%dT%H:%M:%S.%f')
    maskslits = np.array([False] * specObjs.nobj)
    radec = ltu.radec_to_coord((fitstbl["ra"][scidx], fitstbl["dec"][scidx]))

    helio, hel_corr = wave.geomotion_correct(specObjs, radec, obstime,
                                             maskslits, lon, lat, alt,
                                             'heliocentric')
    assert np.isclose(helio, -9.17461338,
                      rtol=1e-5)  # Checked against x_keckhelio
    #assert np.isclose(helio, -9.3344957, rtol=1e-5)  # Original
    assert np.isclose(specObjs[0].BOX_WAVE[0], 3999.877589008, rtol=1e-8)
Beispiel #18
0
def test_radeccoord():
    lcoord = [
        'J124511+144523', '124511+144523', 'J12:45:11+14:45:23',
        ('12:45:11', '+14:45:23'), ('12:45:11', '14:45:23'),
        ('12 45 11', '+14 45 23')
    ]
    for radec in lcoord:
        coord = ltu.radec_to_coord(radec)
        # Test
        np.testing.assert_allclose(coord.ra.value, 191.2958333333333)
    # List
    coords = ltu.radec_to_coord(lcoord)
    assert len(coords) == 6
    # Galactic
    gcoord = ltu.radec_to_coord((280.5, -32.9), gal=True)  # LMC
    assert np.isclose(gcoord.icrs.ra.value, 80.8456130588062)
    assert np.isclose(gcoord.icrs.dec.value, -69.78267074987376)
Beispiel #19
0
def main(args=None):
    from linetools.scripts.utils import coord_arg_to_coord
    from linetools import utils as ltu
    from astropy.io import fits, ascii
    from astropy.coordinates import SkyCoord
    import astropy.units as u
    from pyntejos.catalogs import add_radec_deg_columns

    pargs = parser(options=args)
    # RA,DEC
    icoord = coord_arg_to_coord(pargs.radec)
    coord = ltu.radec_to_coord(icoord)

    # define catalog
    print('Reading {} catalog'.format(pargs.catalog))
    if pargs.catalog == 'QSO':
        # read qsos from MILLIQUAS catalog
        col_names = [
            'ra_d', 'dec_d', 'name', 'description', 'rmag', 'bmag', 'comment',
            'psf_r', 'psf_b', 'z', 'cite', 'zcite', 'qso_prob', 'Xname',
            'Rname', 'Lobe1', 'Lobe2'
        ]
        cat = ascii.read(
            '/media/ntejos/disk1/catalogs/qsos/milliquas/milliquas.txt',
            format='fixed_width',
            names=col_names)
    elif pargs.catalog == 'GC':
        # read MW globular cluster catalog
        cat = ascii.read(
            '/media/ntejos/disk1/catalogs/globular_clusters/mwgc10_1.dat',
            format='fixed_width')
        # add ra_d, dec_d columns
        cat = add_radec_deg_columns(cat)
    else:
        print(' Not implemented for such catalog.')
        return

    # cross-match
    print('Cross-matching...')
    cat_coords = SkyCoord(cat['ra_d'], cat['dec_d'], unit='deg')
    seplim = pargs.angsep * u.arcmin
    sep2d = coord.separation(cat_coords)
    cond = sep2d <= seplim
    cat = cat[cond]
    if len(cat) < 1:
        print("No matches found.")
    else:
        cat['sep2d'] = sep2d[cond]
        if pargs.redshift is not None:
            from astropy.cosmology import Planck15 as cosmo
            import pdb
            pdb.set_trace()
            sep = (cosmo.kpc_comoving_per_arcmin(float(pargs.redshift)) *
                   cat['sep2d']).to('Mpc')
            cat['sep_mpc'] = sep.value
        cat.sort('sep2d')
        print(cat)
Beispiel #20
0
def test_radeccoord():
    for radec in [
            'J124511+144523', '124511+144523', 'J12:45:11+14:45:23',
        ('12:45:11', '+14:45:23'), ('12:45:11', '14:45:23'),
        ('12 45 11', '+14 45 23')
    ]:
        coord = ltu.radec_to_coord(radec)
        # Test
        np.testing.assert_allclose(coord.ra.value, 191.2958333333333)
Beispiel #21
0
 def __init__(self, radec, z=None, name=None):
     self.coord = radec_to_coord(radec)
     # Redshift
     self.z = z
     
     # Name
     if name is None:
         self.name = ('J'+self.coord.ra.to_string(unit=u.hour, sep='', pad=True)+
                     self.coord.dec.to_string(sep='', pad=True, alwayssign=True))
     else:
         self.name = name
Beispiel #22
0
    def __init__(self, radec, z=None, name=None):
        self.coord = radec_to_coord(radec)
        # Redshift
        self.z = z

        # Name
        if name is None:
            self.name = (
                'J' + self.coord.ra.to_string(unit=u.hour, sep='', pad=True) +
                self.coord.dec.to_string(sep='', pad=True, alwayssign=True))
        else:
            self.name = name
Beispiel #23
0
def main(args=None):
    from pyntejos.utils import igmgjson_from_joebvp
    from astropy.coordinates import SkyCoord
    from linetools.utils import radec_to_coord

    pargs = parser(options=args)
    filename = pargs.filename
    specfile = pargs.specfile
    output = pargs.o # output
    fwhm = pargs.fwhm
    radec = radec_to_coord(parser.radec)

    igmgjson_from_joebvp(filename, radec, specfile, fwhm, output)
Beispiel #24
0
    def __init__(self,
                 radec,
                 zabs,
                 vlim,
                 zem=0.,
                 abs_type=None,
                 NHI=0.,
                 sig_NHI=np.zeros(2),
                 flag_NHI=0,
                 name=None):

        self.zabs = zabs
        self.zem = zem
        self.vlim = vlim
        self.NHI = NHI
        self.sig_NHI = sig_NHI
        self.flag_NHI = flag_NHI
        self.coord = ltu.radec_to_coord(radec)
        if name is None:
            self.name = 'J{:s}{:s}_z{:.3f}'.format(
                self.coord.ra.to_string(unit=u.hour, sep='', pad=True),
                self.coord.dec.to_string(sep='', pad=True, alwayssign=True),
                self.zabs)
        else:
            self.name = name

        # Abs type
        if abs_type is None:
            self.abs_type = 'NONE'
        else:
            self.abs_type = abs_type

        # Components
        self._components = []  # List of AbsComponent objects

        # Kinematics
        self.kin = {}

        # Metallicity
        self.ZH = 0.
        self.sig_ZH = 0.

        # Abundances and Tables
        self._EW = QTable()
        self._ionN = None  # Needs to be None for fill_ion
        self._trans = QTable()
        self._ionstate = {}
        self._abund = QTable()

        # Refs (list of references)
        self.Refs = []
Beispiel #25
0
def main(pargs):
    """ Run
    """
    import numpy as np

    from linetools import utils as ltu
    from linetools.scripts.utils import coord_arg_to_coord

    from frb import mw
    from frb.dm import prob_dmz

    # Deal with coord
    icoord = ltu.radec_to_coord(coord_arg_to_coord(pargs.coord))

    # NE 2001
    DM_ISM = mw.ismDM(icoord)
    print("")
    print("-----------------------------------------------------")
    print(f"NE2001 = {DM_ISM:.2f}")

    # DM Cosmic
    DM_cosmic = pargs.DM_FRB - DM_ISM.value - pargs.dm_hostmw

    # Redshift estimates

    # Load
    sdict = prob_dmz.grab_repo_grid()
    PDM_z = sdict['PDM_z']
    z = sdict['z']
    DM = sdict['DM']

    # Do it
    iDM = np.argmin(np.abs(DM - DM_cosmic))
    PzDM = PDM_z[iDM, :] / np.sum(PDM_z[iDM, :])

    cum_sum = np.cumsum(PzDM)
    limits = pargs.cl

    z_min = z[np.argmin(np.abs(cum_sum - limits[0] / 100.))]
    z_max = z[np.argmin(np.abs(cum_sum - limits[1] / 100.))]

    # Finish
    print("")
    print(f"The redshift range for your confidence interval {pargs.cl} is:")
    print(f"z = [{z_min:.3f}, {z_max:.3f}]")

    return z_min, z_max
Beispiel #26
0
def main(args=None):
    from linetools.scripts.utils import coord_arg_to_coord
    from linetools import utils as ltu
    from astropy.io import fits, ascii
    from astropy.coordinates import SkyCoord
    import astropy.units as u
    from pyntejos.catalogs import add_radec_deg_columns

    pargs = parser(options=args)
    # RA,DEC
    icoord = coord_arg_to_coord(pargs.radec)
    coord = ltu.radec_to_coord(icoord)

    # define catalog
    print('Reading {} catalog'.format(pargs.catalog))
    if pargs.catalog == 'QSO':
        # read qsos from MILLIQUAS catalog
        col_names = ['ra_d', 'dec_d', 'name', 'description', 'rmag', 'bmag', 'comment', 'psf_r', 'psf_b', 'z', 'cite', 'zcite', 'qso_prob', 'Xname', 'Rname', 'Lobe1', 'Lobe2'] 
        cat = ascii.read('/media/ntejos/disk1/catalogs/qsos/milliquas/milliquas.txt', format='fixed_width', names=col_names)
    elif pargs.catalog == 'GC':
        # read MW globular cluster catalog 
        cat = ascii.read('/media/ntejos/disk1/catalogs/globular_clusters/mwgc10_1.dat', format='fixed_width')
        # add ra_d, dec_d columns
        cat = add_radec_deg_columns(cat)
    else:
        print(' Not implemented for such catalog.')
        return

    # cross-match
    print('Cross-matching...')
    cat_coords = SkyCoord(cat['ra_d'], cat['dec_d'], unit='deg')
    seplim = pargs.angsep * u.arcmin
    sep2d = coord.separation(cat_coords)
    cond = sep2d <= seplim
    cat = cat[cond]
    if len(cat) < 1:
        print("No matches found.")
    else:
        cat['sep2d'] = sep2d[cond]
        if pargs.redshift is not None:
            from astropy.cosmology import Planck15 as cosmo
            import pdb; pdb.set_trace()
            sep = (cosmo.kpc_comoving_per_arcmin(float(pargs.redshift)) * cat['sep2d']).to('Mpc')
            cat['sep_mpc'] = sep.value
        cat.sort('sep2d')
        print(cat)
Beispiel #27
0
    def refframe_correct(self, ra, dec, obstime, sobjs=None):
        """ Correct the calibrated wavelength to the user-supplied reference frame

        Args:
            radec (astropy.coordiantes.SkyCoord):
                Sky Coordinate of the observation
            obstime (:obj:`astropy.time.Time`):
                Observation time
            sobjs (:class:`pypeit.specobjs.Specobjs`, None):
                Spectrally extracted objects

        """
        # Correct Telescope's motion
        refframe = self.par['calibrations']['wavelengths']['refframe']
        if refframe in ['heliocentric', 'barycentric'] \
                and self.par['calibrations']['wavelengths']['reference'] != 'pixel':
            msgs.info("Performing a {0} correction".format(self.par['calibrations']['wavelengths']['refframe']))
            # Calculate correction
            radec = ltu.radec_to_coord((ra, dec))
            vel, vel_corr = wave.geomotion_correct(radec, obstime,
                                                   self.spectrograph.telescope['longitude'],
                                                   self.spectrograph.telescope['latitude'],
                                                   self.spectrograph.telescope['elevation'],
                                                   refframe)
            # Apply correction to objects
            msgs.info('Applying {0} correction = {1:0.5f} km/s'.format(refframe, vel))
            if (sobjs is not None) and (sobjs.nobj != 0):
                # Loop on slits to apply
                gd_slitord = self.slits.slitord_id[np.logical_not(self.extract_bpm)]
                for slitord in gd_slitord:
                    indx = sobjs.slitorder_indices(slitord)
                    this_specobjs = sobjs[indx]
                    # Loop on objects
                    for specobj in this_specobjs:
                        if specobj is None:
                            continue
                        specobj.apply_helio(vel_corr, refframe)

            # Apply correction to wavelength image
            self.vel_corr = vel_corr
            self.waveimg *= vel_corr

        else:
            msgs.info('A wavelength reference frame correction will not be performed.')
Beispiel #28
0
    def __init__(self, radec, zem, vlim=None, em_type=None, name=None):

        self.zem = zem
        if vlim is None:
            self.vlim = [-300., 300.]*u.km/u.s
        else:
            self.vlim = vlim
        self.coord = ltu.radec_to_coord(radec)
        if name is None:
            self.name = 'J{:s}{:s}_z{:.3f}'.format(
                    self.coord.ra.to_string(unit=u.hour,sep='',pad=True),
                    self.coord.dec.to_string(sep='',pad=True,alwayssign=True),
                    self.zem)
        else:
            self.name = name

        # Em type
        if em_type is None:
            self.em_type = 'NONE'
        else:
            self.em_type = em_type

        # Components
        self._emlines = []  # List of EmLine objects

        # Components
        #self._components = []  # List of EmComponent objects

        # Kinematics
        self.kin = {}

        # Metallicity
        self.ZH = 0.
        self.sig_ZH = 0.

        # Abundances and Tables
        self._EW = Table()
        self._fluxes = None   # Needs to be None for fill_ion
        #self._trans = Table()
        self._abund = Table()

        # Refs (list of references)
        self.Refs = []
Beispiel #29
0
    def __init__(self, radec, zem, vlim=None, em_type=None, name=None):

        self.zem = zem
        if vlim is None:
            self.vlim = [-300., 300.]*u.km/u.s
        else:
            self.vlim = vlim
        self.coord = ltu.radec_to_coord(radec)
        if name is None:
            self.name = 'J{:s}{:s}_z{:.3f}'.format(
                    self.coord.ra.to_string(unit=u.hour,sep='',pad=True),
                    self.coord.dec.to_string(sep='',pad=True,alwayssign=True),
                    self.zem)
        else:
            self.name = name

        # Em type
        if em_type is None:
            self.em_type = 'NONE'
        else:
            self.em_type = em_type

        # Components
        self._emlines = []  # List of EmLine objects

        # Components
        #self._components = []  # List of EmComponent objects

        # Kinematics
        self.kin = {}

        # Metallicity
        self.ZH = 0.
        self.sig_ZH = 0.

        # Abundances and Tables
        self._EW = Table()
        self._fluxes = None   # Needs to be None for fill_ion
        #self._trans = Table()
        self._abund = Table()

        # Refs (list of references)
        self.Refs = []
Beispiel #30
0
    def __init__(self, radec, zabs, vlim, zem=0., abs_type=None,
                 NHI=0., sig_NHI=np.zeros(2), flag_NHI=0, name=None):

        self.zabs = zabs
        self.zem = zem
        self.vlim = vlim
        self.NHI = NHI
        self.sig_NHI = sig_NHI
        self.flag_NHI = flag_NHI
        self.coord = ltu.radec_to_coord(radec)
        if name is None:
            self.name = 'J{:s}{:s}_z{:.3f}'.format(
                    self.coord.ra.to_string(unit=u.hour,sep='',pad=True),
                    self.coord.dec.to_string(sep='',pad=True,alwayssign=True),
                    self.zabs)
        else:
            self.name = name

        # Abs type
        if abs_type is None:
            self.abs_type = 'NONE'
        else:
            self.abs_type = abs_type

        # Components
        self._components = []  # List of AbsComponent objects

        # Kinematics
        self.kin = {}

        # Metallicity
        self.ZH = 0.
        self.sig_ZH = 0.

        # Abundances and Tables
        self._EW = QTable()
        self._ionN = None   # Needs to be None for fill_ion
        self._trans = QTable()
        self._ionstate = {}
        self._abund = QTable()

        # Refs (list of references)
        self.Refs = []
Beispiel #31
0
def grab_meta():
    """ Grab KODIAQ meta Table
    Returns
    -------

    """
    kodiaq_file = igms_path + '/data/meta/KODIAQ_DR1_summary.ascii'
    kodiaq_meta = Table.read(kodiaq_file, format='ascii', comment='#')
    nspec = len(kodiaq_meta)
    # Verify DR1
    for row in kodiaq_meta:
        assert row['kodrelease'] == 1
    # RA/DEC, DATE
    ra = []
    dec = []
    dateobs = []
    for row in kodiaq_meta:
        # Fix DEC
        # Get RA/DEC
        coord = ltu.radec_to_coord((row['sRA'], row['sDEC']))
        ra.append(coord.ra.value)
        dec.append(coord.dec.value)
        # DATE
        dvals = row['pi_date'].split('_')
        tymd = str('{:s}-{:s}-{:02d}'.format(dvals[-1], dvals[1][0:3],
                                             int(dvals[2])))
        tval = datetime.datetime.strptime(tymd, '%Y-%b-%d')
        dateobs.append(datetime.datetime.strftime(tval, '%Y-%m-%d'))
    kodiaq_meta.add_column(Column(ra, name='RA_GROUP'))
    kodiaq_meta.add_column(Column(dec, name='DEC_GROUP'))
    kodiaq_meta.add_column(Column(dateobs, name='DATE-OBS'))
    #
    kodiaq_meta.add_column(Column(['HIRES'] * nspec, name='INSTR'))
    kodiaq_meta.add_column(Column(['Keck-I'] * nspec, name='TELESCOPE'))
    kodiaq_meta['STYPE'] = [str('QSO')] * nspec
    # z
    kodiaq_meta.rename_column('zem', 'zem_GROUP')
    kodiaq_meta['sig_zem'] = [0.] * nspec
    kodiaq_meta['flag_zem'] = [str('SIMBAD')] * nspec
    #
    assert chk_meta(kodiaq_meta, chk_cat_only=True)
    return kodiaq_meta
Beispiel #32
0
    def from_alis(cls, alis_file, radec, **kwargs):
        """ Load from an ALIS output file

        Parameters
        ----------
        alis_file : .mod ALIS output file
        radec : SkyCoord or tuple of RA,DEC
        kwargs

        Returns
        -------
        EmSystem

        """
        emlines = lio.emlines_from_alis_output(alis_file)
        # Add coordinates
        coord = ltu.radec_to_coord(radec)
        for emline in emlines:
            emline.attrib['coord'] = coord
        #
        return cls.from_emlines(emlines, **kwargs)
Beispiel #33
0
def test_geocorrect(fitstbl):
    """
    """
    # Spectrograph
    # (KBW) Had to change this to keck to match the telecope parameters,
    # then just changed to use definitions above directly.
#    spectrograph = load_spectrograph('keck_lris_blue')

    # Specobjs (wrap in a list to mimic a slit)
    sobj_list = specobjs.dummy_specobj((2048,2048), extraction=True)
    specObjs = specobjs.SpecObjs(sobj_list)
    scidx = 5
    obstime = Time(fitstbl['mjd'][scidx], format='mjd')#'%Y-%m-%dT%H:%M:%S.%f')
    maskslits = np.array([False]*specObjs.nobj)
    radec = ltu.radec_to_coord((fitstbl["ra"][scidx], fitstbl["dec"][scidx]))

    helio, hel_corr = wave.geomotion_correct(specObjs, radec, obstime, maskslits,
                                               lon, lat, alt, 'heliocentric')
    assert np.isclose(helio, -9.17461338, rtol=1e-5)  # Checked against x_keckhelio
    #assert np.isclose(helio, -9.3344957, rtol=1e-5)  # Original
    assert np.isclose(specObjs[0].boxcar['WAVE'][0].value, 3999.877589008, rtol=1e-8)
Beispiel #34
0
    def from_alis(cls, alis_file, radec, **kwargs):
        """ Load from an ALIS output file

        Parameters
        ----------
        alis_file : .mod ALIS output file
        radec : SkyCoord or tuple of RA,DEC
        kwargs

        Returns
        -------
        EmSystem

        """
        emlines = lio.emlines_from_alis_output(alis_file)
        # Add coordinates
        coord = ltu.radec_to_coord(radec)
        for emline in emlines:
            emline.attrib['coord'] = coord
        #
        return cls.from_emlines(emlines, **kwargs)
Beispiel #35
0
def grab_meta():
    """ Grab KODIAQ meta Table
    Returns
    -------

    """
    kodiaq_file = os.getenv('RAW_IGMSPEC')+'/KODIAQ2/KODIAQ_DR2_summary.ascii'
    kodiaq_meta = Table.read(kodiaq_file, format='ascii', comment='#')
    # Verify DR1
    dr2 = kodiaq_meta['kodrelease'] == 2
    kodiaq_meta = kodiaq_meta[dr2]
    # RA/DEC, DATE
    ra = []
    dec = []
    dateobs = []
    for row in kodiaq_meta:
        # Fix DEC
        # Get RA/DEC
        coord = ltu.radec_to_coord((row['sRA'],row['sDEC']))
        ra.append(coord.ra.value)
        dec.append(coord.dec.value)
        # DATE
        dvals = row['pi_date'].split('_')
        tymd = str('{:s}-{:s}-{:02d}'.format(dvals[-1],dvals[1][0:3],int(dvals[2])))
        tval = datetime.datetime.strptime(tymd, '%Y-%b-%d')
        dateobs.append(datetime.datetime.strftime(tval,'%Y-%m-%d'))
    kodiaq_meta.add_column(Column(ra, name='RA_GROUP'))
    kodiaq_meta.add_column(Column(dec, name='DEC_GROUP'))
    kodiaq_meta.add_column(Column(dateobs, name='DATE-OBS'))
    #
    kodiaq_meta['INSTR'] = 'HIRES'
    kodiaq_meta['TELESCOPE'] = 'Keck-I'
    kodiaq_meta['STYPE'] = str('QSO')
    # z
    kodiaq_meta.rename_column('zem', 'zem_GROUP')
    kodiaq_meta['sig_zem'] = 0.
    kodiaq_meta['flag_zem'] = str('SDSS-SIMBAD')
    #
    assert chk_meta(kodiaq_meta, chk_cat_only=True)
    return kodiaq_meta
Beispiel #36
0
    def radial_search(self, inp, radius, verbose=True):
        """ Search for sources in a radius around the input coord

        Parameters
        ----------
        inp : str or tuple or SkyCoord
          See linetools.utils.radec_to_coord
        toler
        verbose

        Returns
        -------

        """
        # Convert to SkyCoord
        coord = ltu.radec_to_coord(inp)
        # Separation
        sep = coord.separation(self.coords)
        # Match
        good = sep < radius
        # Return
        if verbose:
            print("Your search yielded {:d} match[es]".format(np.sum(good)))
        return self.cat['IGM_ID'][good]
Beispiel #37
0
    def from_igmguesses(cls, radec, zem, igmgfile, name=None, **kwargs):
        """ Instantiate from a JSON file from IGMGuesses
        The input coordinates are used for all the components

        Parameters
        ----------
        radec : RA/DEC input
          See ltu.radec_to_coord for options
        zem : float
          Emission redshift of sightline
        igmgfile : str
          Filename

        Returns
        -------

        """
        # Read
        jdict = ltu.loadjson(igmgfile)  # cmps, specfile
        # Add in additional keys
        coord = ltu.radec_to_coord(radec)
        jdict['RA'] = coord.fk5.ra.deg
        jdict['DEC'] = coord.fk5.dec.deg
        jdict['zem'] = zem
        # Name
        if name is None:
            name = 'J{:s}{:s}_z{:0.3f}'.format(
                coord.fk5.ra.to_string(unit=u.hour, sep='', pad=True)[0:4],
                coord.fk5.dec.to_string(sep='', pad=True,
                                        alwayssign=True)[0:5], zem)
        jdict['name'] = name
        jdict['components'] = jdict.pop('cmps')
        kwargs['use_coord'] = True
        slf = cls.from_dict(jdict, **kwargs)
        # Return
        return slf
Beispiel #38
0
    def load_HDLLS(cls, load_sys=True, grab_spectra=False, isys_path=None):
        """ Default sample of LLS (HD-LLS, DR1)

        Parameters
        ----------
        grab_spectra : bool, optional
          Grab 1D spectra?  (155Mb)
        load_sys : bool, optional
          Load systems using the sys tarball
        isys_path : str, optional
          Read system files from this path

        Return
        ------
        lls_survey
        """

        # Pull from Internet (as necessary)
        summ_fil = pyigm_path+"/data/LLS/HD-LLS/HD-LLS_DR1.fits"
        print('HD-LLS: Loading summary file {:s}'.format(summ_fil))

        # Ions
        ions_fil = pyigm_path+"/data/LLS/HD-LLS/HD-LLS_ions.json"
        print('HD-LLS: Loading ions file {:s}'.format(ions_fil))

        # System files
        sys_files = pyigm_path+"/data/LLS/HD-LLS/HD-LLS_sys.tar.gz"

        # Transitions
        #clm_files = pyigm_path+"/data/LLS/HD-LLS/HD-LLS_clms.tar.gz"

        # Metallicity
        ZH_fil = pyigm_path+"/data/LLS/HD-LLS/HD-LLS_DR1_dustnhi.hdf5"
        print('HD-LLS: Loading metallicity file {:s}'.format(ZH_fil))

        # Load systems via the sys tarball.  Includes transitions
        if load_sys:  # This approach takes ~120s
            if isys_path is not None:
                lls_survey = pyisu.load_sys_files(isys_path, 'LLS',sys_path=True)
            else:
                lls_survey = pyisu.load_sys_files(sys_files, 'LLS')
            lls_survey.fill_ions(use_components=True)
        else:
            # Read
            lls_survey = cls.from_sfits(summ_fil)
            # Load ions
            lls_survey.fill_ions(jfile=ions_fil)
        lls_survey.ref = 'HD-LLS'

        """
        # Load transitions
        if not skip_trans:
            print('HD-LLS: Loading transitions from {:s}'.format(clm_files))
            tar = tarfile.open(clm_files)
            for member in tar.getmembers():
                if '.' not in member.name:
                    print('Skipping a likely folder: {:s}'.format(member.name))
                    continue
                # Extract
                f = tar.extractfile(member)
                tdict = json.load(f)
                # Find system
                i0 = member.name.rfind('/')
                i1 = member.name.rfind('_clm')
                try:
                    idx = names.index(member.name[i0+1:i1])
                except ValueError:
                    print('Skipping {:s}, not statistical in DR1'.format(member.name[i0+1:i1]))
                    continue
                # Fill up
                lls_survey._abs_sys[idx].load_components(tdict)
                lls_survey._abs_sys[idx]._components = lls_survey._abs_sys[idx].subsys['A']._components
        """


        # Load metallicity
        fh5=h5py.File(ZH_fil, 'r')
        ras = []
        decs = []
        zval = []
        mkeys = fh5['met'].keys()
        mkeys.remove('left_edge_bins')
        for key in mkeys:
            radec, z = key.split('z')
            coord = ltu.radec_to_coord(radec)
            # Save
            zval.append(float(z))
            ras.append(coord.ra.value)
            decs.append(coord.dec.value)
        mcoords = SkyCoord(ras, decs, unit='deg')
        zval = np.array(zval)

        # Set data path and metallicity
        spath = pyigm_path+"/data/LLS/HD-LLS/Spectra/"
        for lls in lls_survey._abs_sys:
            lls.spec_path = spath
            # Match
            sep = lls.coord.separation(mcoords)
            mt = np.where((sep < 15*u.arcsec) & (np.abs(zval-lls.zabs) < 2e-3))[0]
            if len(mt) == 0:
                pdb.set_trace()
                raise ValueError("Bad match")
            elif len(mt) > 1:  # Take closest
                mt = np.argmin(sep)
            # Save
            lls.metallicity = MetallicityPDF(fh5['met']['left_edge_bins']+
                                             fh5['met']['left_edge_bins'].attrs['BINSIZE']/2.,
                                             fh5['met'][mkeys[mt]])

        # Spectra?
        if grab_spectra:
            specfils = glob.glob(spath+'HD-LLS_J*.fits')
            if len(specfils) < 100:
                import tarfile
                print('HD-LLS: Downloading a 155Mb file.  Be patient..')
                url = 'http://www.ucolick.org/~xavier/HD-LLS/DR1/HD-LLS_spectra.tar.gz'
                spectra_fil = pyigm_path+'/data/LLS/HD-LLS/HD-LLS_spectra.tar.gz'
                f = urllib2.urlopen(url)
                with open(spectra_fil, "wb") as code:
                    code.write(f.read())
                # Unpack
                print('HD-LLS: Unpacking..')
                outdir = pyigm_path+"/data/LLS/HD-LLS"
                t = tarfile.open(spectra_fil, 'r:gz')
                t.extractall(outdir)
                # Done
                print('HD-LLS: All done')
            else:
                print('HD-LLS: Using files in {:s}'.format(spath))

        return lls_survey
Beispiel #39
0
def hecto_targets(field, obs_path, hecto_path=None):
    '''Read files related to Hectospec targets

    Parameters:
    -----------
    field : tuple
      (Name, ra, dec)
    obs_path : str, optional
      Path to the observing tree
    hecto_path : str, optional
      Path within the file tree to Hectospec data

    Returns:
    ----------
    Target and observing info 
    '''
    if hecto_path is None:
        hecto_path = '/Galx_Spectra/Hectospec/'

    # Targets
    targ_path = obs_path+field[0]+hecto_path

    # Target file
    targ_file = glob.glob(targ_path+'*.targ')
    if len(targ_file) != 1:
        raise ValueError('Wrong number of Hectospec target files')
    else:
        targ_file = targ_file[0]

    # Read PI, program info [NOT IMPLEMENTED]
    #f = open(msk_file, 'r')
    #lines = f.readlines()
    #f.close()

    # Read target table
    tab = ascii.read(targ_file,comment='#')
    # Restrict to targets
    itarg = np.where(tab['type']=='TARGET')
    targs = tab[itarg]
    # Polish
    nrow = len(targs)
    targs.rename_column('ra','RAS')
    targs.rename_column('dec','DECS')
    targs.add_column(Column([0.]*nrow,name='TARG_RA'))
    targs.add_column(Column([0.]*nrow,name='TARG_DEC'))
    # Get RA/DEC in degrees
    for k,row in enumerate(targs):
        coord = ltu.radec_to_coord((row['RAS'], row['DECS']))
        targs[k]['TARG_RA'] = coord.ra.value
        targs[k]['TARG_DEC'] = coord.dec.value
    # ID/Mag (not always present)
    targ_coord = SkyCoord(ra=targs['TARG_RA']*u.deg, dec=targs['TARG_DEC']*u.deg)
    try:
        targs.rename_column('objid','TARG_ID')
    except KeyError:
        targs.add_column(Column([0]*nrow,name='TARG_ID'))
        targs.add_column(Column([0.]*nrow,name='TARG_MAG'))
        flg_id = 0
    else:
        flg_id = 1
        targs.rename_column('mag','TARG_MAG')
    targs.add_column(Column([0.]*nrow,name='EPOCH'))
    targs.add_column(Column(['SDSS']*nrow,name='TARG_IMG'))
    targs.add_column(Column(['HECTOSPEC']*nrow,name='INSTR'))

    targ_mask = {}
    cnames = ['MASK_NAME', 'MASK_ID']
    smsk = '--'
    msk_val = [smsk]*len(cnames)
    for kk,cname in enumerate(cnames):
        targ_mask[cname] = [msk_val[kk]]*nrow

    # Now the 'mask' files
    mask_files = glob.glob(targ_path+'*.cat')
    all_obs = []
    all_masks = []
    for mask_file in mask_files:
        print('Reading Hectospec mask file: {:s}'.format(mask_file))
        i0 = mask_file.rfind('/')
        mask_nm = mask_file[i0+1:mask_file.find('.cat')]
        # Grab info from spectrum file
        #xdb.set_trace()
        spec_fil = glob.glob(mask_file[:i0+1]+'spHect-'+mask_nm+'.*.fits.gz')
        if len(spec_fil) == 0:
            raise ValueError('Mask not found! {:s}'.format(spec_fil))
            #ras, decs = xra.dtos1((field[1],field[2]))
            #pa=0.
        else:
            header = fits.open(spec_fil[0])[0].header
            if header['APERTURE'] != mask_nm:
                raise ValueError('Mask doesnt match!')
            pa = header['POSANGLE']
            ras = header['CAT-RA']
            decs = header['CAT-DEC']
        # Continuing
        mask_dict = dict(INSTR='HECTOSPEC',MASK_NAME=mask_nm,
            MASK_RA=ras, MASK_DEC=decs, MASK_EPOCH=2000.,
            MASK_PA=pa) # SHOULD GRAB PA, RA, DEC FROM SPECTRA FITS HEADER
        all_masks.append(mask_dict)
        # Read obs
        f = open(mask_file, 'r')
        lines = f.readlines()
        f.close()
        iall_obs = []
        for line in lines:
            if 'OBS' in line:
                prs = line.strip().split(' ')
                gdprs = [iprs for iprs in prs if len(iprs)>0]
                obs_dict = {}
                obs_dict['DATE'] = gdprs[2]
                obs_dict['TEXP'] = float(gdprs[3])
                obs_dict['DISPERSER'] = gdprs[4]
                obs_dict['CONDITIONS'] = gdprs[5]
                # 
                iall_obs.append(obs_dict) 
        obs_tab = xxul.dict_list_to_table(iall_obs)
        obs_tab['TEXP'].unit = u.s
        # Read observed targets
        obs_targ = ascii.read(mask_file,comment='#')
        gdt = np.where(obs_targ['flag'] == 1)[0]
        # Match to target list
        obs_coord = SkyCoord(ra=obs_targ['ra'][gdt]*u.hour, dec=obs_targ['dec'][gdt]*u.deg)
        idx, d2d, d3d = coords.match_coordinates_sky(obs_coord, targ_coord, nthneighbor=1)
        gdm = np.where(d2d < 1.*u.arcsec)[0]
        if len(gdm) != len(gdt):
            raise ValueError('No match')
        else:
            for ii in range(len(gdm)):
                targ_mask['MASK_NAME'][idx[ii]] = mask_nm
                if flg_id == 0:
                    targs['TARG_ID'][idx[ii]] = int(obs_targ['objid'][gdt[ii]])
        """
        for gdi in gdt:
            mtt = np.where(targs['TARG_ID']==
                int(obs_targ['objid'][gdi]))[0]
            if len(mtt) != 1:
                raise ValueError('Multiple matches?!')
            targ_mask['MASK_NAME'][mtt[0]] = mask_nm
        """
        all_obs.append(obs_tab)
    # Add columns to targs
    for tt,cname in enumerate(cnames):
        mask = np.array([False]*len(targs))
        bad = np.where(np.array(targ_mask[cname])==msk_val[tt])[0]
        if len(bad)>0:
            mask[bad]=True
        #
        clm = MaskedColumn(targ_mask[cname],name=cname, mask=mask)
        targs.add_column(clm)

    # Look for ID duplicates (rare)
    gdobj = targs['TARG_ID'] > 0
    idval = np.array(targs[gdobj]['TARG_ID']).astype(int)
    uni, counts = np.unique(idval, return_counts=True)
    if len(uni) != np.sum(gdobj):
        warnings.warn("Found duplicated ID values in Hectospect cat files")
        warnings.warn("Modifying these by hand!")
        dup = np.where(counts>1)[0]
        # Fix by-hand
        for idup in dup:
            dobj = np.where(targs['TARG_ID'] == uni[idup])[0]
            if len(dobj) == 1:
                xdb.set_trace()
            # Confirm RA/DEC are different
            dcoord = SkyCoord(ra=targs['TARG_RA'][dobj]*u.deg,
                              dec=targs['TARG_DEC'][dobj]*u.deg)
            idx, d2d, d3d = coords.match_coordinates_sky(dcoord, dcoord, nthneighbor=2)
            if np.sum(d2d < 1*u.arcsec) > 0:
                raise ValueError("Two with the same RA/DEC.  Deal")
            else:
                for ii in range(1,len(dobj)):
                    # Increment
                    print('Setting TARG_ID to {:d} from {:d}'.format(
                            (ii+1)*targs['TARG_ID'][dobj[ii]],targs['TARG_ID'][dobj[ii]]))
                    targs['TARG_ID'][dobj[ii]] = (ii+1)*targs['TARG_ID'][dobj[ii]]
    # Double check
    idval = np.array(targs[gdobj]['TARG_ID']).astype(int)
    uni, counts = np.unique(idval, return_counts=True)
    if len(uni) != np.sum(gdobj):
        raise ValueError("Cannot happen")

    # Finish
    return all_masks, all_obs, targs
Beispiel #40
0
    def load_HDLLS(cls, load_sys=True, grab_spectra=False, isys_path=None):
        """ Default sample of LLS (HD-LLS, DR1)

        Parameters
        ----------
        grab_spectra : bool, optional
          Grab 1D spectra?  (155Mb)
        load_sys : bool, optional
          Load systems using the sys tarball
        isys_path : str, optional
          Read system files from this path

        Return
        ------
        lls_survey
        """

        # Pull from Internet (as necessary)
        summ_fil = pyigm_path + "/data/LLS/HD-LLS/HD-LLS_DR1.fits"
        print('HD-LLS: Loading summary file {:s}'.format(summ_fil))

        # Ions
        ions_fil = pyigm_path + "/data/LLS/HD-LLS/HD-LLS_ions.json"
        print('HD-LLS: Loading ions file {:s}'.format(ions_fil))

        # System files
        sys_files = pyigm_path + "/data/LLS/HD-LLS/HD-LLS_sys.tar.gz"

        # Transitions
        #clm_files = pyigm_path+"/data/LLS/HD-LLS/HD-LLS_clms.tar.gz"

        # Metallicity
        ZH_fil = pyigm_path + "/data/LLS/HD-LLS/HD-LLS_DR1_dustnhi.hdf5"
        print('HD-LLS: Loading metallicity file {:s}'.format(ZH_fil))

        # Load systems via the sys tarball.  Includes transitions
        if load_sys:  # This approach takes ~120s
            if isys_path is not None:
                lls_survey = pyisu.load_sys_files(isys_path,
                                                  'LLS',
                                                  sys_path=True,
                                                  ref='HD-LLS')
            else:
                lls_survey = pyisu.load_sys_files(sys_files,
                                                  'LLS',
                                                  ref='HD-LLS')
            lls_survey.fill_ions(use_components=True)
        else:
            # Read
            lls_survey = cls.from_sfits(summ_fil)
            # Load ions
            lls_survey.fill_ions(jfile=ions_fil)
        lls_survey.ref = 'HD-LLS'
        """
        # Load transitions
        if not skip_trans:
            print('HD-LLS: Loading transitions from {:s}'.format(clm_files))
            tar = tarfile.open(clm_files)
            for member in tar.getmembers():
                if '.' not in member.name:
                    print('Skipping a likely folder: {:s}'.format(member.name))
                    continue
                # Extract
                f = tar.extractfile(member)
                tdict = json.load(f)
                # Find system
                i0 = member.name.rfind('/')
                i1 = member.name.rfind('_clm')
                try:
                    idx = names.index(member.name[i0+1:i1])
                except ValueError:
                    print('Skipping {:s}, not statistical in DR1'.format(member.name[i0+1:i1]))
                    continue
                # Fill up
                lls_survey._abs_sys[idx].load_components(tdict)
                lls_survey._abs_sys[idx]._components = lls_survey._abs_sys[idx].subsys['A']._components
        """

        # Load metallicity
        fh5 = h5py.File(ZH_fil, 'r')
        ras = []
        decs = []
        zval = []
        mkeys = fh5['met'].keys()
        mkeys.remove('left_edge_bins')
        for key in mkeys:
            radec, z = key.split('z')
            coord = ltu.radec_to_coord(radec)
            # Save
            zval.append(float(z))
            ras.append(coord.ra.value)
            decs.append(coord.dec.value)
        mcoords = SkyCoord(ras, decs, unit='deg')
        zval = np.array(zval)

        # Set data path and metallicity
        spath = pyigm_path + "/data/LLS/HD-LLS/Spectra/"
        for lls in lls_survey._abs_sys:
            lls.spec_path = spath
            # Match
            sep = lls.coord.separation(mcoords)
            mt = np.where((sep < 15 * u.arcsec)
                          & (np.abs(zval - lls.zabs) < 2e-3))[0]
            if len(mt) == 0:
                pdb.set_trace()
                raise ValueError("Bad match")
            elif len(mt) > 1:  # Take closest
                mt = np.argmin(sep)
            else:
                mt = mt[0]
            # Save
            lls.metallicity = MetallicityPDF(
                fh5['met']['left_edge_bins'] +
                fh5['met']['left_edge_bins'].attrs['BINSIZE'] / 2.,
                fh5['met'][mkeys[mt]])

        # Spectra?
        if grab_spectra:
            specfils = glob.glob(spath + 'HD-LLS_J*.fits')
            if len(specfils) < 100:
                import tarfile
                print('HD-LLS: Downloading a 155Mb file.  Be patient..')
                url = 'http://www.ucolick.org/~xavier/HD-LLS/DR1/HD-LLS_spectra.tar.gz'
                spectra_fil = pyigm_path + '/data/LLS/HD-LLS/HD-LLS_spectra.tar.gz'
                f = urllib2.urlopen(url)
                with open(spectra_fil, "wb") as code:
                    code.write(f.read())
                # Unpack
                print('HD-LLS: Unpacking..')
                outdir = pyigm_path + "/data/LLS/HD-LLS"
                t = tarfile.open(spectra_fil, 'r:gz')
                t.extractall(outdir)
                # Done
                print('HD-LLS: All done')
            else:
                print('HD-LLS: Using files in {:s}'.format(spath))

        return lls_survey
Beispiel #41
0
def ingest_johnson15():
    """ Ingest Johnson+15
    """
    # Dict for QSO coords
    qsos = {}
    qsos['1ES1028+511'] = ltu.radec_to_coord('J103118.52517+505335.8193')
    qsos['FBQS1010+3003'] = ltu.radec_to_coord((152.5029167, 30.056111))
    qsos['HE0226-4110'] = ltu.radec_to_coord('J022815.252-405714.62')
    qsos['HS1102+3441'] = ltu.radec_to_coord('J110539.8189+342534.672')
    qsos['LBQS1435-0134'] = ltu.radec_to_coord((219.451183, -1.786328))
    qsos['PG0832+251'] = ltu.radec_to_coord('J083535.8048+245940.146')
    qsos['PG1522+101'] = ltu.radec_to_coord((231.1023075, 9.9749372))
    qsos['PKS0405-123'] = ltu.radec_to_coord('J040748.4376-121136.662')
    qsos['SBS1108+560'] = ltu.radec_to_coord((167.8841667, 55.790556))
    qsos['SBS1122+594'] = ltu.radec_to_coord((171.4741250, 59.172667))
    qsos['Ton236'] = ltu.radec_to_coord((232.1691746, 28.424928))

    # Virial matching
    j15_file = resource_filename('pyigm',
                                 'data/CGM/z0/johnson2015_table1.fits')
    j15_tbl = Table.read(j15_file)

    # Clip COS-Halos
    keep = j15_tbl['Survey'] != 'COS-Halos'
    j15_tbl = j15_tbl[keep]

    # CGM Survey
    j15 = CGMAbsSurvey(survey='J15', ref='Johnson+15')

    # Linelist
    llist = LineList('ISM')

    for row in j15_tbl:
        # RA, DEC
        # Galaxy
        gal = Galaxy((row['RAJ2000'], row['DEJ2000']), z=float(row['zgal']))
        gal.Class = row['Class']
        gal.Mstar = row['logM_']
        gal.field = row['Name']
        gal.Env = row['Env']
        gal.d_Rh = row['d_Rh']
        #
        igmsys = IGMSystem(qsos[row['Name']], float(row['zgal']),
                           (-400., 400.) * u.km / u.s)
        # HI
        if np.isnan(row['logNHI']):
            pass
        else:
            # HI component
            if row['l_logNHI'] == '<':
                flagN = 3
                sigNHI = 99.
            elif np.isnan(row['e_logNHI']):
                flagN = 2
                sigNHI = 99.
            else:
                flagN = 1
                sigNHI = row['e_logNHI']
            HIcomp = AbsComponent(qsos[row['Name']], (1, 1),
                                  float(row['zgal']), (-400, 400) * u.km / u.s,
                                  Ntup=(flagN, row['logNHI'], sigNHI))
            igmsys._components.append(HIcomp)
            # NHI
            igmsys.NHI = HIcomp.logN
            igmsys.flag_NHI = HIcomp.flag_N
            igmsys.sig_NHI = HIcomp.sig_N
        # OVI
        if np.isnan(row['logNHOVI']):
            pass
        else:
            # OVI component
            if row['l_logNHOVI'] == '<':
                flagN = 3
                sigNHOVI = 99.
            elif np.isnan(row['e_logNHOVI']):
                flagN = 2
                sigNHOVI = 99.
            else:
                flagN = 1
                sigNHOVI = row['e_logNHOVI']
            OVIcomp = AbsComponent(qsos[row['Name']], (8, 6),
                                   float(row['zgal']),
                                   (-400, 400) * u.km / u.s,
                                   Ntup=(flagN, row['logNHOVI'], sigNHOVI))
            igmsys._components.append(OVIcomp)
        # CGM
        cgmabs = CGMAbsSys(gal, igmsys, chk_lowz=False)
        j15.cgm_abs.append(cgmabs)
    # Write tarball
    out_file = resource_filename('pyigm', '/data/CGM/z0/J15_sys.tar')
    j15.to_json_tarball(out_file)
Beispiel #42
0
    def __init__(self,
                 radec,
                 Zion,
                 zcomp,
                 vlim,
                 Ej=0. / u.cm,
                 A=None,
                 Ntup=None,
                 comment='',
                 name=None,
                 stars=None,
                 reliability='none'):
        """  Initiator

        Parameters
        ----------
        radec : tuple or SkyCoord
            (RA,DEC) in deg or astropy.coordinate.SkyCoord
        Zion : tuple
            Atomic number, ion -- (int,int)
            e.g. (8,1) for OI
            Note: (-1, -1) is special and is meant for moleculer (e.g. H2)
                  This notation will most likely change in the future.
        zcomp : float
            Absorption component redshift
        vlim : Quantity array
            Velocity limits of the component w/r to `z`
            e.g.  [-300,300]*u.km/u.s
        A : int, optional
            Atomic mass -- used to distinguish isotopes
        Ntup : tuple
            (int,float,two-element list,tuple or array)
            (flag_N, logN, sig_logN)
            flag_N : Flag describing N measurement  (0: no info; 1: detection; 2: saturated; 3: non-detection)
            logN : log10 N column density
            sig_logN : Error in log10 N.  Two elements are expected but not required
        Ej : Quantity, optional
            Energy of lower level (1/cm)
        stars : str, optional
            asterisks to add to name, e.g. '**' for CI**
            Required if name=None and Ej>0.
        reliability : str, optional
            Reliability of AbsComponent
                'a' - reliable
                'b' - possible
                'c' - uncertain
                'none' - not defined (default)
        comment : str, optional
            A comment, default is ``
        """

        # Required
        self.coord = ltu.radec_to_coord(radec)
        self.Zion = Zion
        # Limits
        zlim = ltu.z_from_dv(vlim, zcomp)
        self.limits = zLimits(zcomp, zlim.tolist())

        # Attributes
        self.attrib = init_attrib.copy()

        # Optional
        self.A = A
        self.Ej = Ej
        self.stars = stars
        self.comment = comment
        if Ntup is not None:
            self.attrib['flag_N'] = Ntup[0]
            self.attrib['logN'] = Ntup[1]
            if isiterable(Ntup[2]):
                self.attrib['sig_logN'] = np.array(Ntup[2])
            else:
                self.attrib['sig_logN'] = np.array([Ntup[2]] * 2)
            _, _ = ltaa.linear_clm(self.attrib)  # Set linear quantities

        # Name
        if (name is None) and (self.Zion != (-1, -1)):
            iname = ions.ion_to_name(self.Zion, nspace=0)
            if self.Ej.value > 0:  # Need to put *'s in name
                if stars is not None:
                    iname += stars
                else:
                    warnings.warn(
                        "No stars provided.  Adding one because Ej > 0.")
                    iname += '*'
            self.name = '{:s}_z{:0.5f}'.format(iname, self.zcomp)
        elif (name is None) and (self.Zion == (-1, -1)):
            self.name = 'mol_z{:0.5f}'.format(self.zcomp)
        else:
            self.name = name

        # reliability
        if reliability not in ['a', 'b', 'c', 'none']:
            raise ValueError(
                "Input reliability `{}` not valid.".format(reliability))
        self.reliability = reliability

        # AbsLines
        self._abslines = []
Beispiel #43
0
    def __init__(self, radec, Zion, zcomp, vlim, Ej=0./u.cm, A=None,
                 Ntup=None, comment='', name=None, stars=None):
        """  Initiator

        Parameters
        ----------
        radec : tuple or SkyCoord
            (RA,DEC) in deg or astropy.coordinate.SkyCoord
        Zion : tuple
            Atomic number, ion -- (int,int)
            e.g. (8,1) for OI
        zcomp : float
            Absorption component redshift
        vlim : Quantity array
            Velocity limits of the component w/r to `z`
            e.g.  [-300,300]*u.km/u.s
        A : int, optional
            Atomic mass -- used to distinguish isotopes
        Ntup : tuple
            (int,float,float)
            (flag_N,logN,sig_logN)
            flag_N : Flag describing N measurement  (0: no info; 1: detection; 2: saturated; 3: non-detection)
            logN : log10 N column density
            sig_logN : Error in log10 N
        Ej : Quantity, optional
            Energy of lower level (1/cm)
        stars : str, optional
            asterisks to add to name, e.g. '**' for CI**
            Required if name=None and Ej>0.
        comment : str, optional
            A comment, default is ``
        """

        # Required
        self.coord = ltu.radec_to_coord(radec)
        self.Zion = Zion
        self.zcomp = zcomp
        self.vlim = vlim

        # Optional
        self.A = A
        self.Ej = Ej
        self.comment = comment
        if Ntup is not None:
            self.flag_N = Ntup[0]
            self.logN = Ntup[1]
            self.sig_logN = Ntup[2]
            _, _ = ltaa.linear_clm(self)  # Set linear quantities
        else:
            self.flag_N = 0
            self.logN = 0.
            self.sig_logN = 0.

        # Name
        if name is None:
            iname = ions.ion_name(self.Zion, nspace=0)
            if self.Ej.value > 0:  # Need to put *'s in name
                try:
                    iname += stars
                except:
                    raise IOError("Need to provide 'stars' parameter.")
            self.name = '{:s}_z{:0.5f}'.format(iname, self.zcomp)
        else:
            self.name = name

        # Potential for attributes
        self.attrib = dict()

        # Other
        self._abslines = []
Beispiel #44
0
def hdf5_adddata(hdf, IDs, sname, debug=False, chk_meta_only=False,
                 mk_test_file=False):
    """ Append HST_z2 data to the h5 file

    Parameters
    ----------
    hdf : hdf5 pointer
    IDs : ndarray
      int array of IGM_ID values in mainDB
    sname : str
      Survey name
    chk_meta_only : bool, optional
      Only check meta file;  will not write
    mk_test_file : bool, optional
      Generate the debug test file for Travis??

    Returns
    -------

    """
    # Add Survey
    print("Adding {:s} survey to DB".format(sname))
    hstz2_grp = hdf.create_group(sname)
    # Load up
    meta = grab_meta()
    bmeta = meta_for_build()
    # Checks
    if sname != 'HST_z2':
        raise IOError("Not expecting this survey..")
    if np.sum(IDs < 0) > 0:
        raise ValueError("Bad ID values")
    # Open Meta tables
    if len(bmeta) != len(IDs):
        raise ValueError("Wrong sized table..")

    # Generate ID array from RA/DEC
    c_cut = SkyCoord(ra=bmeta['RA'], dec=bmeta['DEC'], unit='deg')
    c_all = SkyCoord(ra=meta['RA'], dec=meta['DEC'], unit='deg')
    # Find new sources
    idx, d2d, d3d = match_coordinates_sky(c_all, c_cut, nthneighbor=1)
    if np.sum(d2d > 0.1*u.arcsec):
        raise ValueError("Bad matches in HST_z2")
    meta_IDs = IDs[idx]

    # Loop me to bid the full survey catalog
    meta.add_column(Column(meta_IDs, name='IGM_ID'))

    # Build spectra (and parse for meta)
    nspec = len(meta)
    max_npix = 300  # Just needs to be large enough
    data = np.ma.empty((1,),
                       dtype=[(str('wave'), 'float64', (max_npix)),
                              (str('flux'), 'float32', (max_npix)),
                              (str('sig'),  'float32', (max_npix)),
                              #(str('co'),   'float32', (max_npix)),
                             ])
    # Init
    spec_set = hdf[sname].create_dataset('spec', data=data, chunks=True,
                                         maxshape=(None,), compression='gzip')
    spec_set.resize((nspec,))
    Rlist = []
    wvminlist = []
    wvmaxlist = []
    gratinglist = []
    npixlist = []
    speclist = []
    # Loop
    #path = os.getenv('RAW_IGMSPEC')+'/KODIAQ_data_20150421/'
    path = os.getenv('RAW_IGMSPEC')+'/HST_z2/'
    maxpix = 0
    for jj,row in enumerate(meta):
        # Generate full file
        if row['INSTR'] == 'ACS':
            full_file = path+row['qso']+'.fits.gz'
        elif row['INSTR'] == 'WFC3':
            coord = ltu.radec_to_coord((row['RA'],row['DEC']))
            full_file = path+'/J{:s}{:s}_wfc3.fits.gz'.format(coord.ra.to_string(unit=u.hour,sep='',precision=2,pad=True),
                                               coord.dec.to_string(sep='',pad=True,alwayssign=True,precision=1))
        # Extract
        print("HST_z2: Reading {:s}".format(full_file))
        hduf = fits.open(full_file)
        head = hduf[0].header
        spec = lsio.readspec(full_file)
        # Parse name
        fname = full_file.split('/')[-1]
        # npix
        npix = spec.npix
        if npix > max_npix:
            raise ValueError("Not enough pixels in the data... ({:d})".format(npix))
        else:
            maxpix = max(npix,maxpix)
        # Some fiddling about
        for key in ['wave','flux','sig']:
            data[key] = 0.  # Important to init (for compression too)
        data['flux'][0][:npix] = spec.flux.value
        data['sig'][0][:npix] = spec.sig.value
        data['wave'][0][:npix] = spec.wavelength.value
        # Meta
        speclist.append(str(fname))
        wvminlist.append(np.min(data['wave'][0][:npix]))
        wvmaxlist.append(np.max(data['wave'][0][:npix]))
        npixlist.append(npix)
        if chk_meta_only:
            continue
        # Only way to set the dataset correctly
        spec_set[jj] = data

    #
    print("Max pix = {:d}".format(maxpix))
    # Add columns
    meta.add_column(Column([2000.]*nspec, name='EPOCH'))
    meta.add_column(Column(speclist, name='SPEC_FILE'))
    meta.add_column(Column(npixlist, name='NPIX'))
    meta.add_column(Column(wvminlist, name='WV_MIN'))
    meta.add_column(Column(wvmaxlist, name='WV_MAX'))
    meta.add_column(Column(np.arange(nspec,dtype=int),name='SURVEY_ID'))

    # Add HDLLS meta to hdf5
    if iiu.chk_meta(meta):
        if chk_meta_only:
            pdb.set_trace()
        hdf[sname]['meta'] = meta
    else:
        raise ValueError("meta file failed")
    # References
    refs = [dict(url='http://adsabs.harvard.edu/abs/2011ApJS..195...16O',
                 bib='omeara11')
            ]
    jrefs = ltu.jsonify(refs)
    hdf[sname]['meta'].attrs['Refs'] = json.dumps(jrefs)
    #
    return
Beispiel #45
0
    def run(self,
            basename=None,
            ra=None,
            dec=None,
            obstime=None,
            std_trace=None,
            manual_extract_dict=None,
            show_peaks=False):
        """
        Primary code flow for PypeIt reductions

        *NOT* used by COADD2D

        Args:
            basename (str, optional):
                Required if flexure correction is to be applied
            ra (str, optional):
                Required if helio-centric correction is to be applied
            dec (str, optional):
                Required if helio-centric correction is to be applied
            obstime (:obj:`astropy.time.Time`, optional):
                Required if helio-centric correction is to be applied
            std_trace (np.ndarray, optional):
                Trace of the standard star
            manual_extract_dict (dict, optional):
            show_peaks (bool, optional):
                Show peaks in find_objects methods

        Returns:
            tuple: skymodel (ndarray), objmodel (ndarray), ivarmodel (ndarray),
               outmask (ndarray), sobjs (SpecObjs), waveimg (`numpy.ndarray`_),
               tilts (`numpy.ndarray`_).
               See main doc string for description

        """

        # Deal with dynamic calibrations
        # Tilts
        self.waveTilts.is_synced(self.slits)
        #   Deal with Flexure
        if self.par['calibrations']['tiltframe']['process'][
                'spat_flexure_correct']:
            _spat_flexure = 0. if self.spat_flexure_shift is None else self.spat_flexure_shift
            # If they both shifted the same, there will be no reason to shift the tilts
            tilt_flexure_shift = _spat_flexure - self.waveTilts.spat_flexure
        else:
            tilt_flexure_shift = self.spat_flexure_shift
        self.tilts = self.waveTilts.fit2tiltimg(self.slitmask,
                                                flexure=tilt_flexure_shift)

        # Wavelengths (on unmasked slits)
        self.waveimg = wavecalib.build_waveimg(
            self.spectrograph,
            self.tilts,
            self.slits,
            self.wv_calib,
            spat_flexure=self.spat_flexure_shift)

        # First pass object finding
        self.sobjs_obj, self.nobj, skymask_init = \
            self.find_objects(self.sciImg.image, std_trace=std_trace,
                              show_peaks=show_peaks,
                              show=self.reduce_show & (not self.std_redux),
                              manual_extract_dict=manual_extract_dict)

        # Global sky subtract
        self.initial_sky = \
            self.global_skysub(skymask=skymask_init).copy()

        # Second pass object finding on sky-subtracted image
        if (not self.std_redux) and (
                not self.par['reduce']['findobj']['skip_second_find']):
            self.sobjs_obj, self.nobj, self.skymask = \
                self.find_objects(self.sciImg.image - self.initial_sky,
                                  std_trace=std_trace,
                                  show=self.reduce_show,
                                  show_peaks=show_peaks,
                                  manual_extract_dict=manual_extract_dict)
        else:
            msgs.info("Skipping 2nd run of finding objects")

        # Do we have any positive objects to proceed with?
        if self.nobj > 0:
            # Global sky subtraction second pass. Uses skymask from object finding
            if (self.std_redux
                    or self.par['reduce']['extraction']['skip_optimal']
                    or self.par['reduce']['findobj']['skip_second_find']):
                self.global_sky = self.initial_sky.copy()
            else:
                self.global_sky = self.global_skysub(skymask=self.skymask,
                                                     show=self.reduce_show)
            # Extract + Return
            self.skymodel, self.objmodel, self.ivarmodel, self.outmask, self.sobjs \
                = self.extract(self.global_sky, self.sobjs_obj)
        else:  # No objects, pass back what we have
            self.skymodel = self.initial_sky
            self.objmodel = np.zeros_like(self.sciImg.image)
            # Set to sciivar. Could create a model but what is the point?
            self.ivarmodel = np.copy(self.sciImg.ivar)
            # Set to the initial mask in case no objects were found
            self.outmask = self.sciImg.fullmask
            # empty specobjs object from object finding
            self.sobjs = self.sobjs_obj

        # Purge out the negative objects if this was a near-IR reduction.
        if self.ir_redux:
            self.sobjs.purge_neg()

        # Finish up
        if self.sobjs.nobj == 0:
            msgs.warn('No objects to extract!')
        else:
            # TODO -- Should we move these to redux.run()?
            # Flexure correction if this is not a standard star
            if not self.std_redux:
                self.spec_flexure_correct(self.sobjs, basename)
            # Heliocentric
            radec = ltu.radec_to_coord((ra, dec))
            self.helio_correct(self.sobjs, radec, obstime)

        # Update the mask
        reduce_masked = np.where(
            np.invert(self.reduce_bpm_init) & self.reduce_bpm)[0]
        if len(reduce_masked) > 0:
            self.slits.mask[reduce_masked] = self.slits.bitmask.turn_on(
                self.slits.mask[reduce_masked], 'BADREDUCE')

        # Return
        return self.skymodel, self.objmodel, self.ivarmodel, self.outmask, self.sobjs, \
               self.waveimg, self.tilts
Beispiel #46
0
    def __init__(self, radec, Zion, zcomp, vlim, Ej=0./u.cm, A=None,
                 Ntup=None, comment='', name=None, stars=None, reliability='none'):
        """  Initiator

        Parameters
        ----------
        radec : tuple or SkyCoord
            (RA,DEC) in deg or astropy.coordinate.SkyCoord
        Zion : tuple
            Atomic number, ion -- (int,int)
            e.g. (8,1) for OI
            Note: (-1, -1) is special and is meant for moleculer (e.g. H2)
                  This notation will most likely change in the future.
        zcomp : float
            Absorption component redshift
        vlim : Quantity array
            Velocity limits of the component w/r to `z`
            e.g.  [-300,300]*u.km/u.s
        A : int, optional
            Atomic mass -- used to distinguish isotopes
        Ntup : tuple
            (int,float,two-element list,tuple or array)
            (flag_N, logN, sig_logN)
            flag_N : Flag describing N measurement  (0: no info; 1: detection; 2: saturated; 3: non-detection)
            logN : log10 N column density
            sig_logN : Error in log10 N.  Two elements are expected but not required
        Ej : Quantity, optional
            Energy of lower level (1/cm)
        stars : str, optional
            asterisks to add to name, e.g. '**' for CI**
            Required if name=None and Ej>0.
        reliability : str, optional
            Reliability of AbsComponent
                'a' - reliable
                'b' - possible
                'c' - uncertain
                'none' - not defined (default)
        comment : str, optional
            A comment, default is ``
        """

        # Required
        self.coord = ltu.radec_to_coord(radec)
        self.Zion = Zion
        # Limits
        zlim = ltu.z_from_dv(vlim, zcomp)
        self.limits = zLimits(zcomp, zlim.tolist())

        # Attributes
        self.attrib = init_attrib.copy()

        # Optional
        self.A = A
        self.Ej = Ej
        self.stars = stars
        self.comment = comment
        if Ntup is not None:
            self.attrib['flag_N'] = Ntup[0]
            self.attrib['logN'] = Ntup[1]
            if isiterable(Ntup[2]):
                self.attrib['sig_logN'] = np.array(Ntup[2])
            else:
                self.attrib['sig_logN'] = np.array([Ntup[2]]*2)
            _, _ = ltaa.linear_clm(self.attrib)  # Set linear quantities

        # Name
        if (name is None) and (self.Zion != (-1, -1)):
            iname = ions.ion_to_name(self.Zion, nspace=0)
            if self.Ej.value > 0:  # Need to put *'s in name
                if stars is not None:
                    iname += stars
                else:
                    warnings.warn("No stars provided.  Adding one because Ej > 0.")
                    iname += '*'
            self.name = '{:s}_z{:0.5f}'.format(iname, self.zcomp)
        elif (name is None) and (self.Zion == (-1, -1)):
            self.name = 'mol_z{:0.5f}'.format(self.zcomp)
        else:
            self.name = name

        # reliability
        if reliability not in ['a', 'b', 'c', 'none']:
            raise ValueError("Input reliability `{}` not valid.".format(reliability))
        self.reliability = reliability

        # AbsLines
        self._abslines = []
Beispiel #47
0
    def query_position(self, inp, radius, query_dict=None, max_match=None,
                       verbose=True, groups=None, cosmo=None, **kwargs):
        """ Search for sources in a radius around the input coord

        Parameters
        ----------
        inp : str or tuple or SkyCoord
          See linetools.utils.radec_to_coord for details
          Single coordinate
        radius : Angle or Quantity
          Tolerance for a match
          If Quantity has dimensions of length (e.g. kpc), then
          it is assumed a physical radius (dependent on Cosmology)
        groups : list, optional
          Restrict to matches within one or more groups
          Uses query_dict()
        query_dict : dict, optional
          Restrict on criteria specified in the query_dict
          Uses query_dict()
        max_match : int, optional
          Maximum number of rows to return in sub_cat and IDs
          Ordered by separation distance
        cosmo : astropy.cosmology, optional
          Used if radius is a length
        verbose
        kwargs

        Returns
        -------
        matches : bool ndarray
          True if the row in the catalog is a match
          Size matches complete catalog irrespective of max_match
        sub_cat : Table
          Slice of the catalog with matched rows
          Ordered by separation; May be limited by max_match
        IDs : int ndarray
          Array of IDKEY values of the matches
          Ordered by separation; May be limited by max_match
        """
        # Checks
        if not isinstance(radius, (Angle, Quantity)):
            raise IOError("Input radius must be an Angle or Quantity type, e.g. 10.*u.arcsec or 300*u.kpc")
        # Convert to SkyCoord
        coord = ltu.radec_to_coord(inp)
        # Separation
        sep = coord.separation(self.coords)

        # Find matches within tolerance
        if radius.cgs.unit == u.cm:
            # Cosmology
            if not hasattr(self, 'cosmo'):
                from astropy.cosmology import Planck15
                self.cosmo = Planck15
            # Offset
            kpc_proper = self.cosmo.kpc_proper_per_arcmin(self.cat['zem'])
            phys_sep = kpc_proper * sep.to('arcmin')
            good_z = self.cat['zem'] > 1e-3  # Floating point but somewhat arbitrary
            # Match
            matches = (phys_sep < radius) & good_z
        else:
            # Match
            matches = sep < radius

        # Query dict?
        if (query_dict is not None) or (groups is not None):
            if query_dict is None:
                query_dict = {}
            qmatches, _, _ = self.query_dict(query_dict, groups=groups, **kwargs)
            matches &= qmatches
        if verbose:
            print("Your search yielded {:d} match[es] within radius={:g}".format(np.sum(matches), radius))

        # Sort by separation
        asort = np.argsort(sep[matches])
        if max_match is not None:
            imax = min(asort.size, max_match)
            asort = asort[:imax]

        # Return
        return matches, self.cat[matches][asort], self.cat[self.idkey][matches].data[asort]
Beispiel #48
0
def hecto_targets(field, obs_path, hecto_path=None):
    '''Read files related to Hectospec targets

    Parameters:
    -----------
    field : tuple
      (Name, ra, dec)
    obs_path : str, optional
      Path to the observing tree
    hecto_path : str, optional
      Path within the file tree to Hectospec data

    Returns:
    ----------
    Target and observing info 
    '''
    if hecto_path is None:
        hecto_path = '/Galx_Spectra/Hectospec/'

    # Targets
    targ_path = obs_path + field[0] + hecto_path

    # Target file
    targ_file = glob.glob(targ_path + '*.targ')
    if len(targ_file) != 1:
        raise ValueError('Wrong number of Hectospec target files')
    else:
        targ_file = targ_file[0]

    # Read PI, program info [NOT IMPLEMENTED]
    #f = open(msk_file, 'r')
    #lines = f.readlines()
    #f.close()

    # Read target table
    tab = ascii.read(targ_file, comment='#')
    # Restrict to targets
    itarg = np.where(tab['type'] == 'TARGET')
    targs = tab[itarg]
    # Polish
    nrow = len(targs)
    targs.rename_column('ra', 'RAS')
    targs.rename_column('dec', 'DECS')
    targs.add_column(Column([0.] * nrow, name='TARG_RA'))
    targs.add_column(Column([0.] * nrow, name='TARG_DEC'))
    # Get RA/DEC in degrees
    for k, row in enumerate(targs):
        coord = ltu.radec_to_coord((row['RAS'], row['DECS']))
        targs[k]['TARG_RA'] = coord.ra.value
        targs[k]['TARG_DEC'] = coord.dec.value
    # ID/Mag (not always present)
    targ_coord = SkyCoord(ra=targs['TARG_RA'] * u.deg,
                          dec=targs['TARG_DEC'] * u.deg)
    try:
        targs.rename_column('objid', 'TARG_ID')
    except KeyError:
        targs.add_column(Column([0] * nrow, name='TARG_ID'))
        targs.add_column(Column([0.] * nrow, name='TARG_MAG'))
        flg_id = 0
    else:
        flg_id = 1
        targs.rename_column('mag', 'TARG_MAG')
    targs.add_column(Column([0.] * nrow, name='EPOCH'))
    targs.add_column(Column(['SDSS'] * nrow, name='TARG_IMG'))
    targs.add_column(Column(['HECTOSPEC'] * nrow, name='INSTR'))

    targ_mask = {}
    cnames = ['MASK_NAME', 'MASK_ID']
    smsk = '--'
    msk_val = [smsk] * len(cnames)
    for kk, cname in enumerate(cnames):
        targ_mask[cname] = [msk_val[kk]] * nrow

    # Now the 'mask' files
    mask_files = glob.glob(targ_path + '*.cat')
    all_obs = []
    all_masks = []
    for mask_file in mask_files:
        print('Reading Hectospec mask file: {:s}'.format(mask_file))
        i0 = mask_file.rfind('/')
        mask_nm = mask_file[i0 + 1:mask_file.find('.cat')]
        # Grab info from spectrum file
        #xdb.set_trace()
        spec_fil = glob.glob(mask_file[:i0 + 1] + 'spHect-' + mask_nm +
                             '.*.fits.gz')
        if len(spec_fil) == 0:
            raise ValueError('Mask not found! {:s}'.format(spec_fil))
            #ras, decs = xra.dtos1((field[1],field[2]))
            #pa=0.
        else:
            header = fits.open(spec_fil[0])[0].header
            if header['APERTURE'] != mask_nm:
                raise ValueError('Mask doesnt match!')
            pa = header['POSANGLE']
            ras = header['CAT-RA']
            decs = header['CAT-DEC']
        # Continuing
        mask_dict = dict(
            INSTR='HECTOSPEC',
            MASK_NAME=mask_nm,
            MASK_RA=ras,
            MASK_DEC=decs,
            MASK_EPOCH=2000.,
            MASK_PA=pa)  # SHOULD GRAB PA, RA, DEC FROM SPECTRA FITS HEADER
        all_masks.append(mask_dict)
        # Read obs
        f = open(mask_file, 'r')
        lines = f.readlines()
        f.close()
        iall_obs = []
        for line in lines:
            if 'OBS' in line:
                prs = line.strip().split(' ')
                gdprs = [iprs for iprs in prs if len(iprs) > 0]
                obs_dict = {}
                obs_dict['DATE'] = gdprs[2]
                obs_dict['TEXP'] = float(gdprs[3])
                obs_dict['DISPERSER'] = gdprs[4]
                obs_dict['CONDITIONS'] = gdprs[5]
                #
                iall_obs.append(obs_dict)
        obs_tab = xxul.dict_list_to_table(iall_obs)
        obs_tab['TEXP'].unit = u.s
        # Read observed targets
        obs_targ = ascii.read(mask_file, comment='#')
        gdt = np.where(obs_targ['flag'] == 1)[0]
        # Match to target list
        obs_coord = SkyCoord(ra=obs_targ['ra'][gdt] * u.hour,
                             dec=obs_targ['dec'][gdt] * u.deg)
        idx, d2d, d3d = coords.match_coordinates_sky(obs_coord,
                                                     targ_coord,
                                                     nthneighbor=1)
        gdm = np.where(d2d < 1. * u.arcsec)[0]
        if len(gdm) != len(gdt):
            raise ValueError('No match')
        else:
            for ii in range(len(gdm)):
                targ_mask['MASK_NAME'][idx[ii]] = mask_nm
                if flg_id == 0:
                    targs['TARG_ID'][idx[ii]] = int(obs_targ['objid'][gdt[ii]])
        """
        for gdi in gdt:
            mtt = np.where(targs['TARG_ID']==
                int(obs_targ['objid'][gdi]))[0]
            if len(mtt) != 1:
                raise ValueError('Multiple matches?!')
            targ_mask['MASK_NAME'][mtt[0]] = mask_nm
        """
        all_obs.append(obs_tab)
    # Add columns to targs
    for tt, cname in enumerate(cnames):
        mask = np.array([False] * len(targs))
        bad = np.where(np.array(targ_mask[cname]) == msk_val[tt])[0]
        if len(bad) > 0:
            mask[bad] = True
        #
        clm = MaskedColumn(targ_mask[cname], name=cname, mask=mask)
        targs.add_column(clm)

    # Look for ID duplicates (rare)
    gdobj = targs['TARG_ID'] > 0
    idval = np.array(targs[gdobj]['TARG_ID']).astype(int)
    uni, counts = np.unique(idval, return_counts=True)
    if len(uni) != np.sum(gdobj):
        warnings.warn("Found duplicated ID values in Hectospect cat files")
        warnings.warn("Modifying these by hand!")
        dup = np.where(counts > 1)[0]
        # Fix by-hand
        for idup in dup:
            dobj = np.where(targs['TARG_ID'] == uni[idup])[0]
            if len(dobj) == 1:
                xdb.set_trace()
            # Confirm RA/DEC are different
            dcoord = SkyCoord(ra=targs['TARG_RA'][dobj] * u.deg,
                              dec=targs['TARG_DEC'][dobj] * u.deg)
            idx, d2d, d3d = coords.match_coordinates_sky(dcoord,
                                                         dcoord,
                                                         nthneighbor=2)
            if np.sum(d2d < 1 * u.arcsec) > 0:
                raise ValueError("Two with the same RA/DEC.  Deal")
            else:
                for ii in range(1, len(dobj)):
                    # Increment
                    print('Setting TARG_ID to {:d} from {:d}'.format(
                        (ii + 1) * targs['TARG_ID'][dobj[ii]],
                        targs['TARG_ID'][dobj[ii]]))
                    targs['TARG_ID'][
                        dobj[ii]] = (ii + 1) * targs['TARG_ID'][dobj[ii]]
    # Double check
    idval = np.array(targs[gdobj]['TARG_ID']).astype(int)
    uni, counts = np.unique(idval, return_counts=True)
    if len(uni) != np.sum(gdobj):
        raise ValueError("Cannot happen")

    # Finish
    return all_masks, all_obs, targs
Beispiel #49
0
def hdf5_adddata(hdf, sname, meta, debug=False, chk_meta_only=False,
                 mk_test_file=False):
    """ Append COS-Halos data to the h5 file

    Parameters
    ----------
    hdf : hdf5 pointer
    IDs : ndarray
      int array of IGM_ID values in mainDB
    sname : str
      Survey name
    chk_meta_only : bool, optional
      Only check meta file;  will not write
    mk_test_file : bool, optional
      Generate the debug test file for Travis??

    Returns
    -------

    """
    # Add Survey
    print("Adding {:s} survey to DB".format(sname))
    chalos_grp = hdf.create_group(sname)
    # Load up
    # Checks
    if sname != 'COS-Halos':
        raise IOError("Not expecting this survey..")

    # Build spectra (and parse for meta)
    nspec = len(meta)
    max_npix = 160000  # Just needs to be large enough
    data = init_data(max_npix, include_co=False)
    # Init
    spec_set = hdf[sname].create_dataset('spec', data=data, chunks=True,
                                         maxshape=(None,), compression='gzip')
    spec_set.resize((nspec,))
    wvminlist = []
    wvmaxlist = []
    npixlist = []
    speclist = []
    # Loop
    path = os.getenv('RAW_IGMSPEC')+'/COS-Halos/'
    maxpix = 0
    for jj,row in enumerate(meta):
        # Generate full file
        coord = ltu.radec_to_coord((row['RA_GROUP'],row['DEC_GROUP']))
        if row['INSTR'].strip() == 'COS':
            full_file = path+'/J{:s}{:s}_nbin3_coadd.fits.gz'.format(coord.ra.to_string(unit=u.hour,sep='',pad=True)[0:4],
                                               coord.dec.to_string(sep='',pad=True,alwayssign=True)[0:5])
        else: # HIRES
            full_file = path+'/HIRES/J{:s}{:s}_f.fits.gz'.format(coord.ra.to_string(unit=u.hour,sep='',pad=True)[0:4], coord.dec.to_string(sep='',pad=True,alwayssign=True)[0:5])
        # Extract
        print("COS-Halos: Reading {:s}".format(full_file))
        spec = lsio.readspec(full_file)
        # Parse name
        fname = full_file.split('/')[-1]
        # npix
        npix = spec.npix
        if npix > max_npix:
            raise ValueError("Not enough pixels in the data... ({:d})".format(npix))
        else:
            maxpix = max(npix,maxpix)
        # Some fiddling about
        for key in ['wave','flux','sig']:
            data[key] = 0.  # Important to init (for compression too)
        data['flux'][0][:npix] = spec.flux.value
        data['sig'][0][:npix] = spec.sig.value
        data['wave'][0][:npix] = spec.wavelength.value
        # Meta
        speclist.append(str(fname))
        wvminlist.append(np.min(data['wave'][0][:npix]))
        wvmaxlist.append(np.max(data['wave'][0][:npix]))
        npixlist.append(npix)
        if chk_meta_only:
            continue
        # Only way to set the dataset correctly
        spec_set[jj] = data

    #
    print("Max pix = {:d}".format(maxpix))
    # Add columns
    meta.add_column(Column(speclist, name='SPEC_FILE'))
    meta.add_column(Column(npixlist, name='NPIX'))
    meta.add_column(Column(wvminlist, name='WV_MIN'))
    meta.add_column(Column(wvmaxlist, name='WV_MAX'))
    meta.add_column(Column(np.arange(nspec,dtype=int), name='GROUP_ID'))

    # Add HDLLS meta to hdf5
    if chk_meta(meta):
        if chk_meta_only:
            pdb.set_trace()
        hdf[sname]['meta'] = meta
    else:
        raise ValueError("meta file failed")
    # References
    refs = [dict(url='http://adsabs.harvard.edu/abs/2013ApJ...777...59T',
                 bib='tumlinson+13'),
            dict(url='http://adsabs.harvard.edu/abs/2013ApJS..204...17W',
                         bib='werk+13')
            ]
    jrefs = ltu.jsonify(refs)
    hdf[sname]['meta'].attrs['Refs'] = json.dumps(jrefs)
    #
    return
Beispiel #50
0
def grab_meta():
    """ Grab COS-Halos meta table
    Returns
    -------

    """
    from time import strptime
    from specdb.zem.utils import zem_from_radec
    from specdb.specdb import IgmSpec
    from specdb.defs import get_res_dicts
    Rdicts = get_res_dicts()
    igmsp = IgmSpec(db_file=os.getenv('SPECDB')+'/IGMspec_DB_v01.hdf5', skip_test=True)

    summ_file = os.getenv('RAW_IGMSPEC')+'/COS-Halos/cos_halos_obs.ascii'
    chalos_meta = Table.read(summ_file, format='ascii')
    # RA/DEC, DATE
    # Visits from this page: http://www.stsci.edu/cgi-bin/get-visit-status?id=11598&markupFormat=html
    visit_file = os.getenv('RAW_IGMSPEC')+'/COS-Halos/cos_halos_visits.ascii'
    ch_visits = Table.read(visit_file,format='ascii')
    ra = []
    dec = []
    datet = []
    for row in chalos_meta:
        coord = ltu.radec_to_coord(row['QSO'])
        ra.append(coord.ra.value)
        dec.append(coord.dec.value)
        #
        visit = row['Visit']
        mtv = np.where(ch_visits['Visit'] == visit)[0]
        if len(mtv) != 1:
            pdb.set_trace()
        else:
            chv = ch_visits['Start_UT'][mtv].data[0]
        icmma = chv.find(',')
        datet.append('{:s}-{:02d}-{:02d}'.format(
                chv[icmma+1:icmma+5], strptime(chv[:3],'%b').tm_mon,
                int(chv[3:icmma])))
    chalos_meta.add_column(Column(ra, name='RA'))
    chalos_meta.add_column(Column(dec, name='DEC'))
    chalos_meta.add_column(Column(datet, name='DATE-OBS'))
    # Others
    chalos_meta.add_column(Column(['      ']*len(chalos_meta), name='TELESCOPE')) # Padding
    chalos_meta.add_column(Column(['     ']*len(chalos_meta), name='INSTR')) # Padding for HIRES
    chalos_meta.add_column(Column(['G130M/G160M']*len(chalos_meta), name='DISPERSER'))
    chalos_meta.add_column(Column([20000.]*len(chalos_meta), name='R'))
    chalos_meta.add_column(Column([2000.]*len(chalos_meta), name='EPOCH'))
    chalos_meta['INSTR'] = 'COS' # Deals with padding
    chalos_meta['TELESCOPE'] = 'HST'
    # Myers for zem
    zem, zsource = zem_from_radec(chalos_meta['RA'], chalos_meta['DEC'], Table(igmsp.hdf['quasars'].value))
    badz = zem <= 0.
    if np.sum(badz) > 0:
        raise ValueError("Bad zem in COS-Halos")
    chalos_meta['zem'] = zem
    chalos_meta['sig_zem'] = 0.  # Need to add
    chalos_meta['flag_zem'] = zsource
    # HIRES
    hires_files = glob.glob(os.getenv('RAW_IGMSPEC')+'/COS-Halos/HIRES/J*f.fits.gz')
    hires_tab = chalos_meta[0:0]
    subnm = np.array([row['QSO'][4:9] for row in chalos_meta])
    signs = np.array([row['QSO'][14] for row in chalos_meta])
    for ifile in hires_files:
        print(ifile)
        fname = ifile.split('/')[-1]
        mt = np.where((subnm == fname[0:5]) & (signs == fname[5]))[0]
        if len(mt) != 1:
            pdb.set_trace()
        # Add row
        hires_tab.add_row(chalos_meta[mt[0]])
        hires_tab[-1]['INSTR'] = 'HIRES'
        hires_tab[-1]['TELESCOPE'] = 'Keck I'
        hires_tab[-1]['DISPERSER'] = 'Red'
        hires_tab[-1]['R'] = Rdicts['HIRES']['C1']
    # Combine
    chalos_meta = vstack([chalos_meta, hires_tab])
    chalos_meta['STYPE'] = str('QSO')
    # Rename
    chalos_meta.rename_column('RA', 'RA_GROUP')
    chalos_meta.rename_column('DEC', 'DEC_GROUP')
    chalos_meta.rename_column('zem', 'zem_GROUP')
    # Check
    assert chk_meta(chalos_meta, chk_cat_only=True)
    # Done
    return chalos_meta
Beispiel #51
0
def main(pargs):
    """ Run
    """
    import json
    import os
    import numpy as np
    from pkg_resources import resource_filename

    from linetools import utils as ltu
    from linetools.scripts.utils import coord_arg_to_coord

    from frb.galaxies import nebular
    from frb.galaxies import photom
    from frb.galaxies import utils as frb_gal_u
    from frb import mw
    from frb.dm import prob_dmz

    # Deal with coord
    icoord = ltu.radec_to_coord(coord_arg_to_coord(pargs.coord))

    # EBV
    EBV = nebular.get_ebv(icoord)['meanValue']  #
    print(f"EBV = {EBV}")

    # NE 2001
    DM_ISM = mw.ismDM(icoord)
    print(f"NE2001 = {DM_ISM}")

    # DM Cosmic
    DM_cosmic = pargs.DM_FRB - DM_ISM.value - pargs.dm_hostmw

    # Redshift estimates

    # Load
    sdict = prob_dmz.grab_repo_grid()
    PDM_z = sdict['PDM_z']
    z = sdict['z']
    DM = sdict['DM']

    # Do it
    iDM = np.argmin(np.abs(DM - DM_cosmic))
    PzDM = PDM_z[iDM, :] / np.sum(PDM_z[iDM, :])

    cum_sum = np.cumsum(PzDM)
    limits = [10, 90]

    z_min = z[np.argmin(np.abs(cum_sum - limits[0] / 100.))]
    z_max = z[np.argmin(np.abs(cum_sum - limits[1] / 100.))]

    # Setup Luminosity

    # Extinction correct
    dust_correct = photom.extinction_correction(pargs.filter, EBV)
    mag_dust = 2.5 * np.log10(1. / dust_correct)
    mag_corr = pargs.mag_limit + mag_dust

    # ##########################3
    # Convert to L

    # Load f_mL
    f_mL = frb_gal_u.load_f_mL()
    # m_r(L*)
    m_r_Lstar_min = float(f_mL(z_min))
    m_r_Lstar_max = float(f_mL(z_max))

    frac_Lstar_min = 10**(-0.4 * (mag_corr - m_r_Lstar_min))
    frac_Lstar_max = 10**(-0.4 * (mag_corr - m_r_Lstar_max))

    # Finish
    print("-----------------------------------------------------")
    print(
        f"For z_{limits[0]}={z_min:.2f}, the limiting magnitude corresponds to L={frac_Lstar_min:.5f}L*"
    )
    print(
        f"For z_{limits[1]}={z_max:.2f}, the limiting magnitude corresponds to L={frac_Lstar_max:.5f}L*"
    )

    return frac_Lstar_min, frac_Lstar_max