Beispiel #1
0
def test_compare_stuff():
    # Compare files
    assert ltu.compare_two_files(data_path('dum_file1.txt'), data_path('dum_file2.txt'))
    # Compare 2 json files
    assert ltu.compare_two_json(data_path('dum1.json'), data_path('dum2.json'))
    # Dicts
    d1 = ltu.loadjson(data_path('dum1.json'))
    d3 = ltu.loadjson(data_path('dum3.json'))
    added, removed, modified, same = ltu.compare_two_dict(d1,d3)
    assert removed == set('e')
    for key in ['a','d']:
        assert key in modified.keys()
Beispiel #2
0
def main(args):
    """
    Parameters
    ----------
    args

    Returns
    -------

    """

    import numpy as np
    from pypit import pyputils
    msgs = pyputils.get_dummy_logger()
    from pypit import arqa
    from linetools.utils import loadjson

    # Read JSON
    fdict = loadjson(args.wave_soln)
    for key in fdict.keys():
        if isinstance(fdict[key], list):
            fdict[key] = np.array(fdict[key])

    # Generate QA
    arqa.arc_fit_qa(None, fdict, outfil=args.outfile, ids_only=True,
                    title=args.title)
    print("Wrote {:s}".format(args.outfile))
Beispiel #3
0
def test_save_load_json():
    tmp_dict = dict(a=1, b=2, c='adsf')
    # Write
    ltu.savejson('tmp.json', tmp_dict, overwrite=True)
    # Load
    new_dict = ltu.loadjson('tmp.json')
    assert new_dict['a'] == 1
    # Write with gzip
    ltu.savejson('tmp.json.gz', tmp_dict, overwrite=True)
    # Load
    new_dict = ltu.loadjson('tmp.json.gz')
    assert new_dict['a'] == 1
    # Write with easy to read
    ltu.savejson('tmp2.json', tmp_dict, overwrite=True, easy_to_read=True)
    new_dict2 = ltu.loadjson('tmp2.json')
    assert new_dict2['a'] == 1
def test_dicts():
    # Init HI Lya
    abslin = AbsLine(1215.6700*u.AA)
    adict = abslin.to_dict()
    assert isinstance(adict, dict)
    # Write
    #pdb.set_trace()
    ltu.savejson('tmp.json', adict, overwrite=True)
    # Read
    newdict = ltu.loadjson('tmp.json')
    newlin = SpectralLine.from_dict(newdict)
    assert newlin.name == 'HI 1215'
def test_dicts():
    # Init Halpha
    emisslin = EmLine(6564.613*u.AA)
    emisslin.analy['spec'] = 'tmp.fits'
    edict = emisslin.to_dict()
    assert isinstance(edict, dict)
    # Write
    ltu.savejson('tmp.json', edict, overwrite=True)
    # Read
    newdict = ltu.loadjson('tmp.json')
    newlin = SpectralLine.from_dict(newdict)
    assert newlin.name == 'Halpha'
    assert newlin.ltype == 'Em'
Beispiel #6
0
def load_sys_files(inp, type, ref=None, sys_path=False, **kwargs):
    """ Load up a set of SYS files from the hard-drive (JSON files)

    Parameters
    ----------
    inp : str
    type : str
      type of IGMSystem, e.g. LLS
    ref : str, optional
      Reference label
    sys_path : str, optional
      indicates that inp is a path to a set of JSON SYS files
      otherwise, inp should be the filename of a tarball of JSON files

    Returns
    -------
    survey : IGMSurvey
    """
    import tarfile
    #
    survey = class_by_type(type)(ref=ref)
    system = pyasu.class_by_type(type)
    if sys_path:
        # Individual files
        files = glob.glob(inp+'*.json')
        files.sort()
        for ifile in files:
            tdict = ltu.loadjson(ifile)
            abssys = system.from_dict(tdict)
            survey._abs_sys.append(abssys)
    else:  # tarball
        print('Loading systems from {:s}'.format(inp))
        tar = tarfile.open(inp)
        for member in tar.getmembers():
            if '.' not in member.name:
                print('Skipping a likely folder: {:s}'.format(member.name))
                continue
            # Extract
            f = tar.extractfile(member)
            tdict = json.load(f)
            # Add keys (for backwards compatability)
            if ('NHI' in tdict.keys()) and ('flag_NHI' not in tdict.keys()):
                tdict['flag_NHI'] = 1
            # Generate
            abssys = system.from_dict(tdict, chk_sep=False, **kwargs)   # Consider use_coord=True as default
            survey._abs_sys.append(abssys)
        tar.close()
    # Return
    return survey
Beispiel #7
0
    def from_json(cls, json_file, **kwargs):
        """
        Parameters
        ----------
        json_file : str
        **kwargs : passed to cls.from_dict()

        Returns
        -------
        AbsComponent

        """
        # Load dict
        jdict = ltu.loadjson(json_file)
        # Instantiate
        return cls.from_dict(jdict, **kwargs)
Beispiel #8
0
    def from_json(cls, json_file, **kwargs):
        """ Load from a JSON file (via from_dict)

        Parameters
        ----------
        json_file
        kwargs

        Returns
        -------
        AbsSystem

        """
        idict = ltu.loadjson(json_file)
        slf = cls.from_dict(idict, **kwargs)
        return slf
Beispiel #9
0
def add_s2n_after(ids, json_file, debug=False, CHUNK_SIZE=1000):
    from linetools import utils as ltu
    from dla_cnn.absorption import get_s2n_for_absorbers  # Needs to be here

    # Load json file
    predictions = ltu.loadjson(json_file)
    jids = [ii['id'] for ii in predictions]

    num_cores = multiprocessing.cpu_count() - 2
    p = Pool(num_cores)  # a thread pool we'll reuse
    sightlines_processed_count = 0

    # IDs
    ids.sort(key=methodcaller('id_string'))
    for sss, ids_batch in enumerate(
            np.array_split(ids, np.arange(CHUNK_SIZE, len(ids), CHUNK_SIZE))):
        num_sightlines = len(ids_batch)
        # Read batch
        process_timer = timeit.default_timer()
        print("Reading {:d} sightlines with {:d} cores".format(
            num_sightlines, num_cores))
        sightlines_batch = p.map(read_sightline, ids_batch)
        print("Done reading")

        for sightline in sightlines_batch:
            jidx = jids.index(sightline.id.id_string())
            # Any absorbers?
            if (predictions[jidx]['num_dlas']) + (
                    predictions[jidx]['num_subdlas']) == 0:
                continue
            lam, lam_rest, ix_dla_range = get_lam_data(sightline.loglam,
                                                       sightline.z_qso,
                                                       REST_RANGE)
            # DLAs, subDLAs
            get_s2n_for_absorbers(sightline, lam, predictions[jidx]['dlas'])
            get_s2n_for_absorbers(sightline, lam, predictions[jidx]['subdlas'])

        runtime = timeit.default_timer() - process_timer
        print(
            "Processed {:d} of {:d} in {:0.0f}s - {:0.2f}s per sample".format(
                sightlines_processed_count + num_sightlines, len(ids), runtime,
                runtime / num_sightlines))
        sightlines_processed_count += num_sightlines
    # Write
    print("About to over-write your JSON file.  Continue at your own risk!")
    # Return new predictions
    return predictions
Beispiel #10
0
def high_nhi_neg():
    """ Examine High NHI false negatives in 10k test
    """
    # Load ML
    ml_abs = pred_to_tbl('../Vetting/data/test_dlas_96629_predictions.json.gz')
    # Load Test
    test_dlas = test_to_tbl('../Vetting/data/test_dlas_96629_10000.json.gz')
    # Load vette
    vette_10k = ltu.loadjson('../Vetting/vette_10k.json')
    test_ml_idx = np.array(vette_10k['test_idx'])

    misses = np.where(test_ml_idx == -99999)[0]
    highNHI = test_dlas['NHI'][misses] > 21.2
    high_tbl = test_dlas[misses[highNHI]]

    # Write
    high_tbl.write('test_highNHI_neg.ascii', format='ascii.fixed_width', overwrite=True)
    def from_json(cls, jsonfile, **kwargs):
        """ Instantiate from a JSON file

        Parameters
        ----------
        jsonfile : str
          Filename
          See from_dict for required keys
        kwargs : passed to from_dict

        Returns
        -------

        """
        jdict = ltu.loadjson(jsonfile)
        slf = cls.from_dict(jdict, **kwargs)
        # Return
        return slf
Beispiel #12
0
def load_dla_fits(fit_file=None):
    """ Load fit file(s)
    Parameters
    ----------
    fit_file : str

    Returns
    -------

    """
    if fit_file is None:
        fit_file = resource_filename('pyigm', 'data/DLA/dla_fits.json')
    if os.path.exists(fit_file):
        dla_fits = ltu.loadjson(fit_file)
    else:
        dla_fits = {}
    # Return
    return dla_fits, fit_file
Beispiel #13
0
def load_master(name, exten=0, frametype='<None>'):
    """
    Load a pre-existing master calibration frame

    Parameters
    ----------
    name : str
      Name of the master calibration file to be loaded
    exten : int, optional
    frametype : str, optional
      The type of master calibration frame being loaded.
      This keyword is only used for terminal print out.

    Returns
    -------
    frame : ndarray
      The data from the master calibration frame
    """
    if frametype is None:
        msgs.info("Loading a pre-existing master calibration frame")
        try:
            hdu = pyfits.open(name)
        except:
            msgs.error("Master calibration file does not exist:" +
                       msgs.newline() + name)
        msgs.info("Master {0:s} frame loaded successfully:".format(
            hdu[0].header['FRAMETYP']) + msgs.newline() + name)
        head = hdu[0].header
        data = hdu[exten].data.astype(np.float)
        return data, head
        #return np.array(infile[0].data, dtype=np.float)
    else:
        from linetools import utils as ltu
        msgs.info("Loading Master {0:s} frame:".format(frametype) +
                  msgs.newline() + name)
        if frametype == 'wv_calib':
            ldict = ltu.loadjson(name)
            return ldict
        else:
            # Load
            hdu = pyfits.open(name)
            head = hdu[0].header
            data = hdu[exten].data.astype(np.float)
            return data, head
Beispiel #14
0
def pred_to_tbl(pred_file):
    """
    Parameters
    ----------
    pred_file

    Returns
    -------

    """
    spec_list = ltu.loadjson(pred_file)
    ids, zabs, conf, NHI, sigNHI, biasNHI = [], [], [], [], [], []
    # Loop to my loop
    for ss, spec in enumerate(spec_list):
        # DLAs
        for dla in spec['dlas']:
            if dla['type'] == "LYB":
                continue
            ids.append(ss)
            zabs.append(dla['z_dla'])
            NHI.append(dla['column_density'])
            sigNHI.append(dla['std_column_density'])
            biasNHI.append(dla['column_density_bias_adjust'])
            conf.append(dla['dla_confidence'])
        # SLLS
        for slls in spec['subdlas']:
            if slls['type'] == "LYB":
                continue
            ids.append(ss)
            zabs.append(slls['z_dla'])
            NHI.append(slls['column_density'])
            sigNHI.append(slls['std_column_density'])
            biasNHI.append(slls['column_density_bias_adjust'])
            conf.append(slls['dla_confidence'])
    # Table
    dla_tbl = Table()
    dla_tbl['ids'] = ids
    dla_tbl['zabs'] = zabs
    dla_tbl['conf'] = conf
    dla_tbl['sigNHI'] = sigNHI
    dla_tbl['biasNHI'] = biasNHI
    dla_tbl['NHI'] = NHI
    # Return
    return dla_tbl
Beispiel #15
0
def main():

    parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    parser.add_argument("wave_soln", type=str, default=None, help="MasterWaveSoln file [JSON]")
    parser.add_argument("title", type=str, default=None, help="Title for the plot")
    parser.add_argument("outfile", type=str, default=None, help="Output PDF file")

    pargs = parser.parse_args()

    # Read JSON
    fdict = loadjson(pargs.wave_soln)
    for key in fdict.keys():
        if isinstance(fdict[key], list):
            fdict[key] = np.array(fdict[key])

    # Generate QA
    arqa.arc_fit_qa(None, fdict, outfil=pargs.outfile, ids_only=True, title=pargs.title)
    print("Wrote {:s}".format(pargs.outfile))
Beispiel #16
0
def load_sys_files(inp, type, sys_path=False):
    """ Load up a set of SYS files from the hard-drive (JSON files)

    Parameters
    ----------
    inp : str
    type : str
      type of IGMSystem
    sys_path : str, optional
      indicates that inp is a path to a set of SYS files
      otherwise, it should be the filename of a tarball

    Returns
    -------
    survey : IGMSurvey
    """
    import tarfile
    #
    survey = class_by_type(type)(ref='HD-LLS')
    system = pyasu.class_by_type(type)
    if sys_path:
        # Individual files
        files = glob.glob(inp+'*.json')
        files.sort()
        for ifile in files:
            tdict = ltu.loadjson(ifile)
            abssys = system.from_dict(tdict)
            survey._abs_sys.append(abssys)
    else:  # tarball
        print('Loading systems from {:s}'.format(inp))
        tar = tarfile.open(inp)
        for member in tar.getmembers():
            if '.' not in member.name:
                print('Skipping a likely folder: {:s}'.format(member.name))
                continue
            # Extract
            f = tar.extractfile(member)
            tdict = json.load(f)
            # Generate
            abssys = system.from_dict(tdict)
            survey._abs_sys.append(abssys)
    # Return
    return survey
Beispiel #17
0
def main(pargs, unit_test=False):
    """ Shows the spectrum
    """

    import sys
    import pdb

    from matplotlib import pyplot as plt
    from linetools import utils as ltu

    wvcalib = ltu.loadjson(pargs.file)

    # Grab it
    spec = wvcalib[pargs.slit]['spec']

    plt.clf()
    ax = plt.gca()
    ax.plot(spec)
    plt.show()
Beispiel #18
0
def test_dicts():
    # Init HI Lya
    abslin = AbsLine(1215.6700 * u.AA)
    abslin.analy['spec'] = 'tmp.fits'
    adict = abslin.to_dict()
    assert isinstance(adict, dict)
    # Write
    #pdb.set_trace()
    ltu.savejson('tmp.json', adict, overwrite=True)
    # Read
    newdict = ltu.loadjson('tmp.json')
    newlin = SpectralLine.from_dict(newdict)
    assert newlin.name == 'HI 1215'
    # Old dict for compatability
    newdict.pop('limits')
    newdict['analy']['vlim'] = [-150, 150] * u.km / u.s
    newdict['attrib']['z'] = 0.5
    tmp3 = SpectralLine.from_dict(newdict)
    assert newlin.name == 'HI 1215'
def test_dicts():
    # Init HI Lya
    abslin = AbsLine(1215.6700*u.AA)
    abslin.analy['spec'] = 'tmp.fits'
    adict = abslin.to_dict()
    assert isinstance(adict, dict)
    # Write
    #pdb.set_trace()
    ltu.savejson('tmp.json', adict, overwrite=True)
    # Read
    newdict = ltu.loadjson('tmp.json')
    newlin = SpectralLine.from_dict(newdict)
    assert newlin.name == 'HI 1215'
    # Old dict for compatability
    newdict.pop('limits')
    newdict['analy']['vlim'] = [-150,150]*u.km/u.s
    newdict['attrib']['z'] = 0.5
    tmp3 = SpectralLine.from_dict(newdict)
    assert newlin.name == 'HI 1215'
Beispiel #20
0
def pypeit_arcspec(in_file, slit):
    """
    Load up the arc spectrum from an input JSON file

    Args:
        in_file (str):
        slit (int):
            slit index

    Returns:
        np.ndarray, np.ndarray:  wave, flux

    """
    wv_dict = ltu.loadjson(in_file)
    iwv_calib = wv_dict[str(slit)]
    x = np.arange(len(iwv_calib['spec']))
    wv_vac = utils.func_val(iwv_calib['fitc'], x/iwv_calib['xnorm'], iwv_calib['function'],
                           minx=iwv_calib['fmin'], maxx=iwv_calib['fmax'])
    # Return
    return wv_vac, np.array(iwv_calib['spec']).flatten()  # JXP added flatten on 2019-11-09
Beispiel #21
0
def test_to_tbl(test_file):
    test_dict = ltu.loadjson(test_file)
    ids, zabs, sl, NHI, = [], [], [], []
    ntest = len(test_dict)
    # Loop to my loop
    for ss in range(ntest):
        ndla = test_dict[str(ss)]['nDLA']
        for idla in range(ndla):
            ids.append(ss)
            zabs.append(test_dict[str(ss)][str(idla)]['zabs'])
            NHI.append(test_dict[str(ss)][str(idla)]['NHI'])
            sl.append(test_dict[str(ss)]['sl'])
    # Table
    test_tbl = Table()
    test_tbl['ids'] = ids
    test_tbl['zabs'] = zabs
    test_tbl['NHI'] = NHI
    test_tbl['sl'] = sl
    # Return
    return test_tbl
Beispiel #22
0
    def load_json(self,
                  jfile,
                  build_data=True,
                  build_sys=False,
                  verbose=True,
                  **kwargs):
        """
        Parameters
        ----------
        jfile : str
        build_data : bool, optional
          Generate the internal _data Table  [very fast and recommended]
        build_sys : bool, optional
          Generate the list of cgm_abs objects from the internal _dict [May be slow]
        kwargs

        Returns
        -------

        """
        # Load
        self._dict = ltu.loadjson(jfile)
        # Generate
        if build_sys:
            self.build_systems_from_dict(**kwargs)

        # Galaxy coords
        ras = [self._dict[key]['RA'] for key in self._dict.keys()]
        decs = [self._dict[key]['DEC'] for key in self._dict.keys()]
        self.coords = SkyCoord(ra=ras, dec=decs, unit='deg')

        # Sightline coords
        ras = [self._dict[key]['igm_sys']['RA'] for key in self._dict.keys()]
        decs = [self._dict[key]['igm_sys']['DEC'] for key in self._dict.keys()]
        self.scoords = SkyCoord(ra=ras, dec=decs, unit='deg')

        # Data table
        if build_data:
            self.build_data_from_dict()
        # Return
        return
Beispiel #23
0
def load_master(name, exten=0, frametype='<None>'):
    """
    Load a pre-existing master calibration frame

    Parameters
    ----------
    name : str
      Name of the master calibration file to be loaded
    exten : int, optional
    frametype : str, optional
      The type of master calibration frame being loaded.
      This keyword is only used for terminal print out.

    Returns
    -------
    frame : ndarray
      The data from the master calibration frame
    """
    if frametype is None:
        msgs.info("Loading a pre-existing master calibration frame")
        try:
            hdu = pyfits.open(name)
        except:
            msgs.error("Master calibration file does not exist:"+msgs.newline()+name)
        msgs.info("Master {0:s} frame loaded successfully:".format(hdu[0].header['FRAMETYP'])+msgs.newline()+name)
        head = hdu[0].header
        data = hdu[exten].data.astype(np.float)
        return data, head
        #return np.array(infile[0].data, dtype=np.float)
    else:
        from linetools import utils as ltu
        msgs.info("Loading Master {0:s} frame:".format(frametype)+msgs.newline()+name)
        if frametype == 'wv_calib':
            ldict = ltu.loadjson(name)
            return ldict
        else:
            # Load
            hdu = pyfits.open(name)
            head = hdu[0].header
            data = hdu[exten].data.astype(np.float)
            return data, head
Beispiel #24
0
def main(args):
    import numpy as np

    from linetools.utils import loadjson

    # TODO: This must be an out-dated script that is never used.
    # Deprecate it?
    from pypeit import arqa
    from pypeit import msgs
    msgs.reset(verbosity=2)

    # Read JSON
    fdict = loadjson(args.wave_soln)
    for key in fdict.keys():
        if isinstance(fdict[key], list):
            fdict[key] = np.array(fdict[key])

    # Generate QA
    arqa.arc_fit_qa(None, fdict, outfil=args.outfile, ids_only=True,
                    title=args.title)
    print("Wrote {:s}".format(args.outfile))
Beispiel #25
0
def main() :

    parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    parser.add_argument('wave_soln', type = str, default = None,
                        help = 'MasterWaveSoln file [JSON]')
    parser.add_argument('title', type = str, default = None, help = 'Title for the plot')
    parser.add_argument('outfile', type = str, default = None, help = 'Output PDF file')

    pargs = parser.parse_args()

    # Read JSON
    fdict = loadjson(pargs.wave_soln)
    for key in fdict.keys():
        if isinstance(fdict[key], list):
            fdict[key] = np.array(fdict[key])

    # Generate QA
    arqa.arc_fit_qa(None, fdict, outfil=pargs.outfile, ids_only=True,
                    title=pargs.title)
    print("Wrote {:s}".format(pargs.outfile))
Beispiel #26
0
def main(args):
    """
    Parameters
    ----------
    args

    Returns
    -------

    """
    import numpy as np

    try:
        from xastropy.xutils import xdebug as debugger
    except:
        import pdb as debugger

    from linetools.utils import loadjson

    from pypeit import arqa
    from pypeit import msgs
    msgs.reset(verbosity=2)

    # Read JSON
    fdict = loadjson(args.wave_soln)
    for key in fdict.keys():
        if isinstance(fdict[key], list):
            fdict[key] = np.array(fdict[key])

    # Generate QA
    arqa.arc_fit_qa(None,
                    fdict,
                    outfil=args.outfile,
                    ids_only=True,
                    title=args.title)
    print("Wrote {:s}".format(args.outfile))
Beispiel #27
0
def load_spectrum(spec_file, index=0):
    """ Load a simple spectrum from input file

    Parameters
    ----------
    spec_file : str
      .fits --  Assumes simple ndarray in 0 extension
      .ascii -- Assumes Table.read(format='ascii') will work with single column

    Returns
    -------

    """
    import h5py
    iext = spec_file.rfind('.')
    if 'ascii' in spec_file[iext:]:
        tbl = Table.read(spec_file, format='ascii')
        key = tbl.keys()[0]
        spec = tbl[key].data
    elif 'fits' in spec_file[iext:]:
        spec = fits.open(spec_file)[0].data
    elif 'hdf5' in spec_file[iext:]:
        hdf = h5py.File(spec_file, 'r')
        if 'arcs' in hdf.keys():
            print("Taking arc={:d} in this file".format(index))
            spec = hdf['arcs/' + str(index) + '/spec'].value
        else:
            raise IOError("Not ready for this hdf5 file")
    elif 'json' in spec_file[iext:]:
        jdict = ltu.loadjson(spec_file)
        try:
            spec = np.array(jdict['spec'])
        except KeyError:
            raise IOError("spec not in your JSON dict")
    # Return
    return spec
Beispiel #28
0
    def from_igmguesses(cls, radec, zem, igmgfile, name=None, **kwargs):
        """ Instantiate from a JSON file from IGMGuesses
        The input coordinates are used for all the components

        Parameters
        ----------
        radec : RA/DEC input
          See ltu.radec_to_coord for options
        zem : float
          Emission redshift of sightline
        igmgfile : str
          Filename

        Returns
        -------

        """
        # Read
        jdict = ltu.loadjson(igmgfile)  # cmps, specfile
        # Add in additional keys
        coord = ltu.radec_to_coord(radec)
        jdict['RA'] = coord.fk5.ra.deg
        jdict['DEC'] = coord.fk5.dec.deg
        jdict['zem'] = zem
        # Name
        if name is None:
            name = 'J{:s}{:s}_z{:0.3f}'.format(
                coord.fk5.ra.to_string(unit=u.hour, sep='', pad=True)[0:4],
                coord.fk5.dec.to_string(sep='', pad=True,
                                        alwayssign=True)[0:5], zem)
        jdict['name'] = name
        jdict['components'] = jdict.pop('cmps')
        kwargs['use_coord'] = True
        slf = cls.from_dict(jdict, **kwargs)
        # Return
        return slf
Beispiel #29
0
def examine_false_pos(test_file='data/test_dlas_96629_10000.json.gz',
                      pred_file='data/test_dlas_96629_predictions.json.gz',
                      vette_file='vette_10k.json'):
    """ Examine false positives in the Test set (held out)
    """
    from pyigm.surveys.dlasurvey import DLASurvey
    import h5py
    import json
    from matplotlib import pyplot as plt
    # Load Test
    test_dlas = test_to_tbl(test_file)
    ntest = len(test_dlas)
    # Load hdf5
    CNN_result_path = '/home/xavier/Projects/ML_DLA_results/CNN/'
    hdf5_datafile = CNN_result_path + 'gensample_hdf5_files/test_dlas_96629_10000.hdf5'
    hdf = h5py.File(hdf5_datafile, 'r')
    headers = json.loads(hdf['meta'].value)['headers']
    # Load ML
    ml_abs = pred_to_tbl(pred_file)
    # Vette
    vette = ltu.loadjson(vette_file)
    test_ml_idx = np.array(vette['test_idx'])
    # Load DR5
    dr5 = DLASurvey.load_SDSS_DR5()
    all_dr5 = DLASurvey.load_SDSS_DR5(sample='all_sys')

    # False positives
    fpos = ml_abs['NHI'] >= 20.3  # Must be a DLA
    imatched = np.where(test_ml_idx >= 0)[0]
    match_val = test_ml_idx[imatched]
    fpos[match_val] = False
    print("There are {:d} total false positives".format(np.sum(fpos)))
    # This nearly matches David's.  Will run with his analysis.

    fpos_in_dr5 = fpos.copy()
    # Restrict on DR5
    for idx in np.where(fpos_in_dr5)[0]:
        # Convoluted indexing..
        mlid = ml_abs['ids'][idx]
        # Plate/Fiber
        plate = headers[mlid]['PLATE']
        fib = headers[mlid]['FIBER']
        # Finally, match to DR5
        dr5_sl = np.where((dr5.sightlines['PLATE'] == plate)
                          & (dr5.sightlines['FIB'] == fib))[0][0]
        if (ml_abs['zabs'][idx] >= dr5.sightlines['Z_START'][dr5_sl]) & \
                (ml_abs['zabs'][idx] <= dr5.sightlines['Z_END'][dr5_sl]):
            pass
        else:
            fpos_in_dr5[idx] = False
    print("Number of FP in DR5 analysis region = {:d}".format(
        np.sum(fpos_in_dr5)))

    # How many match to DR5 SLLS?
    slls = all_dr5.NHI < 20.3
    slls_coord = all_dr5.coord[slls]
    slls_zabs = all_dr5.zabs[slls]
    nslls = 0
    for idx in np.where(fpos_in_dr5)[0]:
        # Convoluted indexing..
        mlid = ml_abs['ids'][idx]
        # RA/DEC
        ra = headers[mlid]['RA_GROUP']
        dec = headers[mlid]['DEC_GROUP']
        coord = SkyCoord(ra=ra, dec=dec, unit='deg')
        # Match coord
        mt = coord.separation(slls_coord) < 3 * u.arcsec
        if np.any(mt):
            # Match redshift
            if np.min(np.abs(slls_zabs[mt] - ml_abs['zabs'][idx])) < 0.015:
                nslls += 1
    print("Number of FP that are SLLS in DR5 = {:d}".format(nslls))

    low_NHI = ml_abs['NHI'][fpos_in_dr5] < 20.5
    print("Number of FP that are NHI <= 20.5 = {:d}".format(np.sum(low_NHI)))

    # Write out
    fp_tbl = Table()
    for key in ['ids', 'NHI', 'zabs', 'conf']:
        fp_tbl[key] = ml_abs[key][fpos_in_dr5]
    fp_tbl.write('test10k_false_pos.ascii',
                 format='ascii.fixed_width',
                 overwrite=True)

    # Histogram
    dr5_idx = np.where(fpos_in_dr5)
    plt.clf()
    ax = plt.gca()
    ax.hist(ml_abs['conf'][dr5_idx])
    plt.show()
############################################

if __name__ == '__main__':
    args = parse_arguments()

    if os.path.exists('mkdir -p QA/PNGs/'):
        pass
    else:
        os.system('mkdir -p QA/PNGs/')

    # ToDo -- fix ifs
    if args.show:
        gingashow = True
    ## Get tslits_dict either from flatfiles or load from a json file
    if args.flatfiles is None:
        jdict = ltu.loadjson('tilt_nires.json')
        tslits_dict = jdict.copy()
        for tkey in tslits_dict.keys():
            tslits_dict[tkey] = np.array(tslits_dict[tkey])
    else:
        tslits_dict = get_tslits_nires(args.flatfiles, user_settings=par, gingashow=gingashow)

    # Get Tilts from scienceB image
    aImage = arcimage.ArcImage(spectrograph,
                               file_list=args.sciBfiles,
                               par=par['calibrations']['arcframe'])
    msarc = aImage.process(bias_subtract='overscan',
                           trim=False)
    pixlocn = pixels.gen_pixloc(aImage.stack.shape)
    bpm = spectrograph.bpm(shape=msarc.shape, det=1)
    # Extract spectrum at the center
Beispiel #31
0
def ARMLSD(argflag, spect, fitsdict, reuseMaster=False):
    """
    Automatic Reduction and Modeling of Long Slit Data

    Parameters
    ----------
    argflag : dict
      Arguments and flags used for reduction
    spect : dict
      Properties of the spectrograph.
    fitsdict : dict
      Contains relevant information from fits header files
    reuseMaster : bool
      If True, a master frame that will be used for another science frame
      will not be regenerated after it is first made.
      This setting comes with a price, and if a large number of science frames are
      being generated, it may be more efficient to simply regenerate the master
      calibrations on the fly.

    Returns
    -------
    status : int
      Status of the reduction procedure
      0 = Successful execution
      1 = ...
    """
    status = 0

    # Create a list of science exposure classes
    sciexp = armbase.SetupScience(argflag, spect, fitsdict)
    numsci = len(sciexp)

    # Create a list of master calibration frames
    masters = armasters.MasterFrames(spect['mosaic']['ndet'])

    # Use Masters?  Requires setup file
    setup_file = argflag['out']['sorted']+'.setup'
    try:
        calib_dict = ltu.loadjson(setup_file)
    except:
        msgs.info("No setup file {:s} for MasterFrames".format(setup_file))
        calib_dict = {}
    else:
        argflag['masters']['setup_file'] = setup_file

    # Start reducing the data
    for sc in range(numsci):
        slf = sciexp[sc]
        scidx = slf._idx_sci[0]
        msgs.info("Reducing file {0:s}, target {1:s}".format(fitsdict['filename'][scidx], slf._target_name))
        msgs.sciexp = slf  # For QA writing on exit, if nothing else.  Could write Masters too
        # Loop on Detectors
        for kk in xrange(slf._spect['mosaic']['ndet']):
            det = kk + 1  # Detectors indexed from 1
            slf.det = det
            ###############
            # Get amplifier sections
            arproc.get_ampsec_trimmed(slf, fitsdict, det, scidx)
            # Setup
            setup = arsort.calib_setup(slf, sc, det, fitsdict, calib_dict, write=False)
            slf._argflag['masters']['setup'] = setup
            ###############
            # Generate master bias frame
            update = slf.MasterBias(fitsdict, det)
            if update and reuseMaster:
                armbase.UpdateMasters(sciexp, sc, det, ftype="bias")
            ###############
            # Generate a bad pixel mask (should not repeat)
            update = slf.BadPixelMask(fitsdict, det)
            if update and reuseMaster:
                armbase.UpdateMasters(sciexp, sc, det, ftype="arc")
            ###############
            # Generate a master arc frame
            update = slf.MasterArc(fitsdict, det)
            if update and reuseMaster:
                armbase.UpdateMasters(sciexp, sc, det, ftype="arc")
            ###############
            # Determine the dispersion direction (and transpose if necessary)
            slf.GetDispersionDirection(fitsdict, det, scidx)
            if slf._bpix[det-1] is None:  # Needs to be done here after nspec is set
                slf.SetFrame(slf._bpix, np.zeros((slf._nspec[det-1], slf._nspat[det-1])), det)
            '''
            ###############
            # Estimate gain and readout noise for the amplifiers
            msgs.work("Estimate Gain and Readout noise from the raw frames...")
            update = slf.MasterRN(fitsdict, det)
            if update and reuseMaster:
                armbase.UpdateMasters(sciexp, sc, det, ftype="readnoise")
            '''
            ###############
            # Generate a master trace frame
            update = slf.MasterTrace(fitsdict, det)
            if update and reuseMaster:
                armbase.UpdateMasters(sciexp, sc, det, ftype="flat", chktype="trace")
            ###############
            # Generate an array that provides the physical pixel locations on the detector
            slf.GetPixelLocations(det)
            # Determine the edges of the spectrum (spatial)
            if 'trace'+slf._argflag['masters']['setup'] not in slf._argflag['masters']['loaded']:
                ###############
                # Determine the edges of the spectrum (spatial)
                lordloc, rordloc, extord = artrace.trace_orders(slf, slf._mstrace[det-1], det, singleSlit=True, pcadesc="PCA trace of the slit edges")
                slf.SetFrame(slf._lordloc, lordloc, det)
                slf.SetFrame(slf._rordloc, rordloc, det)

                # Convert physical trace into a pixel trace
                msgs.info("Converting physical trace locations to nearest pixel")
                pixcen = artrace.phys_to_pix(0.5*(slf._lordloc[det-1]+slf._rordloc[det-1]), slf._pixlocn[det-1], 1)
                pixwid = (slf._rordloc[det-1]-slf._lordloc[det-1]).mean(0).astype(np.int)
                lordpix = artrace.phys_to_pix(slf._lordloc[det-1], slf._pixlocn[det-1], 1)
                rordpix = artrace.phys_to_pix(slf._rordloc[det-1], slf._pixlocn[det-1], 1)
                slf.SetFrame(slf._pixcen, pixcen, det)
                slf.SetFrame(slf._pixwid, pixwid, det)
                slf.SetFrame(slf._lordpix, lordpix, det)
                slf.SetFrame(slf._rordpix, rordpix, det)
                # Save QA for slit traces
                arqa.slit_trace_qa(slf, slf._mstrace[det-1], slf._lordpix[det-1], slf._rordpix[det-1], extord, desc="Trace of the slit edges")

            ###############
            # Prepare the pixel flat field frame
            update = slf.MasterFlatField(fitsdict, det)
            if update and reuseMaster: armbase.UpdateMasters(sciexp, sc, det, ftype="flat", chktype="pixflat")
            ###############
            # Generate the 1D wavelength solution
            update = slf.MasterWaveCalib(fitsdict, sc, det)
            if update and reuseMaster:
                armbase.UpdateMasters(sciexp, sc, det, ftype="arc", chktype="trace")
            ###############
            # Derive the spectral tilt
            if slf._tilts[det-1] is None:
                if slf._argflag['masters']['use']:
                    mstilt_name = armasters.master_name(slf._argflag['run']['masterdir'],
                                                        'tilts', slf._argflag['masters']['setup'])
                    try:
                        tilts, head = arload.load_master(mstilt_name, frametype="tilts")
                    except IOError:
                        pass
                    else:
                        slf.SetFrame(slf._tilts, tilts, det)
                        slf._argflag['masters']['loaded'].append('tilts'+slf._argflag['masters']['setup'])
                if 'tilts'+slf._argflag['masters']['setup'] not in slf._argflag['masters']['loaded']:
                    # First time tilts are derived for this arc frame --> derive the order tilts
                    tilts, satmask, outpar = artrace.model_tilt(slf, det, slf._msarc[det-1])
                    slf.SetFrame(slf._tilts, tilts, det)
                    slf.SetFrame(slf._satmask, satmask, det)
                    slf.SetFrame(slf._tiltpar, outpar, det)

            ###############
            # Generate/load a master wave frame
            update = slf.MasterWave(fitsdict, sc, det)
            if update and reuseMaster:
                armbase.UpdateMasters(sciexp, sc, det, ftype="arc", chktype="wave")

            # Check if the user only wants to prepare the calibrations only
            msgs.info("All calibration frames have been prepared")
            if slf._argflag['run']['preponly']:
                msgs.info("If you would like to continue with the reduction,"
                          +msgs.newline()+"disable the run+preponly command")
                continue

            # Write setup
            setup = arsort.calib_setup(slf, sc, det, fitsdict, calib_dict, write=True)
            # Write MasterFrames (currently per detector)
            armasters.save_masters(slf, det, setup)

            ###############
            # Load the science frame and from this generate a Poisson error frame
            msgs.info("Loading science frame")
            sciframe = arload.load_frames(slf, fitsdict, [scidx], det,
                                          frametype='science',
                                          msbias=slf._msbias[det-1],
                                          transpose=slf._transpose)
            sciframe = sciframe[:, :, 0]
            # Extract
            msgs.info("Processing science frame")
            arproc.reduce_frame(slf, sciframe, scidx, fitsdict, det)

            #continue
            #msgs.error("UP TO HERE")
            ###############
            # Perform a velocity correction
            if (slf._argflag['reduce']['heliocorr'] == True) & False:
                if slf._argflag['science']['load']['extracted'] == True:
                    msgs.warn("Heliocentric correction will not be applied if an extracted science frame exists, and is used")
                msgs.work("Perform a full barycentric correction")
                msgs.work("Include the facility to correct for gravitational redshifts and time delays (see Pulsar timing work)")
                msgs.info("Performing a heliocentric correction")
                # Load the header for the science frame
                slf._waveids = arvcorr.helio_corr(slf, scidx[0])
            else:
                msgs.info("A heliocentric correction will not be performed")

            ###############
            # Using model sky, calculate a flexure correction

        # Close the QA for this object
        slf._qa.close()

        ###############
        # Flux
        ###############
        # Standard star (is this a calibration, e.g. goes above?)
        msgs.info("Processing standard star")
        msgs.info("Assuming one star per detector mosaic")
        msgs.info("Waited until last detector to process")

        msgs.work("Need to check for existing sensfunc")
        update = slf.MasterStandard(scidx, fitsdict)
        if update and reuseMaster:
            armbase.UpdateMasters(sciexp, sc, 0, ftype="standard")
        #
        msgs.work("Consider using archived sensitivity if not found")
        msgs.info("Fluxing with {:s}".format(slf._sensfunc['std']['name']))
        for kk in xrange(slf._spect['mosaic']['ndet']):
            det = kk + 1  # Detectors indexed from 1
            arflux.apply_sensfunc(slf, det, scidx, fitsdict)

        # Write 1D spectra
        arsave.save_1d_spectra(slf)
        # Write 2D images for the Science Frame
        arsave.save_2d_images(slf)
        # Free up some memory by replacing the reduced ScienceExposure class
        sciexp[sc] = None
    return status
Beispiel #32
0
    def __init__(self,
                 ispec,
                 guessfile=None,
                 parent=None,
                 zsys=None,
                 norm=None,
                 exten=None,
                 rsp_kwargs={},
                 unit_test=False,
                 screen_scale=1.,
                 **kwargs):
        QMainWindow.__init__(self, parent)
        """
        ispec = str, XSpectrum1D or tuple of arrays
          Input spectrum or spectrum filename.  If tuple then (wave,
          fx), (wave, fx, sig) or (wave, fx, sig, co)
        guessfile : str, optional
          name of the .json file generated with igmguesses GUI in Pyigm (see https://github.com/pyigm/pyigm/blob/master/docs/igmguesses.rst)
          if not None - overplot fitted line profiles from igmguesses
        parent : Widget parent, optional
        zsys : float, optional
          intial redshift
        exten : int, optional
          extension for the spectrum in multi-extension FITS file
        norm : bool, optional
          True if the spectrum is normalized
        screen_scale : float, optional
          Scale the default sizes for the gui size
        """
        #reload(ltgl)
        #reload(ltgsp)
        # INIT
        #QtCore.pyqtRemoveInputHook()
        #xdb.set_trace()
        #QtCore.pyqtRestoreInputHook()

        self.scale = screen_scale

        # Needed to avoid crash in large spectral files
        rcParams['agg.path.chunksize'] = 20000
        rcParams[
            'axes.formatter.useoffset'] = False  # avoid scientific notation in axes tick labels

        # Build a widget combining several others
        self.main_widget = QWidget()

        # Status bar
        self.create_status_bar()

        # Grab the pieces and tie together
        self.pltline_widg = ltgl.PlotLinesWidget(status=self.statusBar,
                                                 init_z=zsys,
                                                 screen_scale=self.scale)
        self.pltline_widg.setMaximumWidth(300 * self.scale)

        ## Abs sys
        abs_sys = None
        voigtsfit = None
        if guessfile is not None:
            # Load
            ism = LineList('ISM')
            igm_guess = ltu.loadjson(guessfile)
            comps = []
            for key in igm_guess['cmps'].keys():
                comp = AbsComponent.from_dict(igm_guess['cmps'][key],
                                              chk_vel=False,
                                              linelist=ism)
                comps.append(comp)
            abs_sys = ltiu.build_systems_from_components(
                comps, vsys=500. * u.km /
                u.s)  # ,chk_z=False)  ### 100000.*u.km/u.s   ok

            ### voigt fit - added
            # Spectrum
            spec, spec_fil = ltgu.read_spec(ispec,
                                            exten=exten,
                                            norm=norm,
                                            rsp_kwargs=rsp_kwargs)

            voigtsfit = np.asarray([0] * len(spec.wavelength))
            alllines = []
            for iabs_sys in abs_sys:
                lines = iabs_sys.list_of_abslines()
                alllines = alllines + lines
            if len(alllines) > 0:
                voigtsfit = lav.voigt_from_abslines(spec.wavelength,
                                                    alllines,
                                                    fwhm=3.).flux.value

            if not norm:
                voigtsfit = voigtsfit * spec.co

        # Hook the spec widget to Plot Line
        self.spec_widg = ltgsp.ExamineSpecWidget(ispec,
                                                 guessfile=guessfile,
                                                 voigtsfit=voigtsfit,
                                                 status=self.statusBar,
                                                 parent=self,
                                                 llist=self.pltline_widg.llist,
                                                 zsys=zsys,
                                                 norm=norm,
                                                 exten=exten,
                                                 abs_sys=abs_sys,
                                                 screen_scale=self.scale,
                                                 rsp_kwargs=rsp_kwargs,
                                                 **kwargs)
        # Reset redshift from spec
        if zsys is None:
            if hasattr(self.spec_widg.spec, 'z'):
                self.pltline_widg.setz(
                    str(self.spec_widg.spec.z[self.spec_widg.select]))
        # Auto set line list if spec has proper object type
        if hasattr(self.spec_widg.spec, 'stypes'):
            if self.spec_widg.spec.stypes[
                    self.spec_widg.select].lower() == 'galaxy':
                self.pltline_widg.llist = ltgu.set_llist(
                    'Galaxy', in_dict=self.pltline_widg.llist)
            elif self.spec_widg.spec.stypes[
                    self.spec_widg.select].lower() == 'absorber':
                self.pltline_widg.llist = ltgu.set_llist(
                    'Strong', in_dict=self.pltline_widg.llist)
            self.pltline_widg.llist['Plot'] = True
            idx = self.pltline_widg.lists.index(
                self.pltline_widg.llist['List'])
            self.pltline_widg.llist_widget.setCurrentRow(idx)
        #
        self.pltline_widg.spec_widg = self.spec_widg
        # Multi spec
        self.mspec_widg = ltgsp.MultiSpecWidget(self.spec_widg)

        self.spec_widg.canvas.mpl_connect('button_press_event', self.on_click)

        # Layout

        # Extras
        extras = QWidget()
        extras.setMinimumWidth(180 * self.scale)
        extras.setMaximumWidth(280 * self.scale)
        vbox = QVBoxLayout()
        qbtn = QPushButton(self)
        qbtn.setText('Quit')
        qbtn.clicked.connect(self.quit)
        vbox.addWidget(self.pltline_widg)
        vbox.addWidget(self.mspec_widg)
        vbox.addWidget(qbtn)
        extras.setLayout(vbox)

        # Main window
        hbox = QHBoxLayout()
        hbox.addWidget(self.spec_widg)
        hbox.addWidget(extras)

        self.main_widget.setLayout(hbox)

        # Point MainWindow
        self.setCentralWidget(self.main_widget)
        if unit_test:
            self.quit()
Beispiel #33
0
def generate_boss_tables():
    """
    Returns
    -------

    """
    # Load JSON file
    dr12_json = resource_filename('dla_cnn',
                                  'catalogs/boss_dr12/predictions_DR12.json')
    dr12 = ltu.loadjson(dr12_json)

    # Load Garnett Table 2 for BALs
    tbl2_garnett_file = '/media/xavier/ExtraDrive2/Projects/ML_DLA_results/garnett16/ascii_catalog/table2.dat'
    tbl2_garnett = Table.read(tbl2_garnett_file, format='cds')
    tbl2_garnett_coords = SkyCoord(ra=tbl2_garnett['RAdeg'],
                                   dec=tbl2_garnett['DEdeg'],
                                   unit='deg')

    # Parse into tables
    s_plates = []
    s_fibers = []
    s_mjds = []
    s_ra = []
    s_dec = []
    s_zem = []

    a_zabs = []
    a_NHI = []
    a_sigNHI = []
    a_conf = []
    a_plates = []
    a_fibers = []
    a_mjds = []
    a_ra = []
    a_dec = []
    a_zem = []
    for sline in dr12:
        # Plate/fiber
        plate, mjd, fiber = [int(spl) for spl in sline['id'].split('-')]
        s_plates.append(plate)
        s_mjds.append(mjd)
        s_fibers.append(fiber)
        # RA/DEC/zem
        s_ra.append(sline['ra'])
        s_dec.append(sline['dec'])
        s_zem.append(sline['z_qso'])
        # DLAs/SLLS
        for abs in sline['dlas'] + sline['subdlas']:
            a_plates.append(plate)
            a_mjds.append(mjd)
            a_fibers.append(fiber)
            # RA/DEC/zem
            a_ra.append(sline['ra'])
            a_dec.append(sline['dec'])
            a_zem.append(sline['z_qso'])
            # Absorber
            a_zabs.append(abs['z_dla'])
            a_NHI.append(abs['column_density'])
            a_sigNHI.append(abs['std_column_density'])
            a_conf.append(abs['dla_confidence'])
    # Sightline tables
    sline_tbl = Table()
    sline_tbl['Plate'] = s_plates
    sline_tbl['Fiber'] = s_fibers
    sline_tbl['MJD'] = s_mjds
    sline_tbl['RA'] = s_ra
    sline_tbl['DEC'] = s_dec
    sline_tbl['zem'] = s_zem

    # Match and fill BAL flag
    dr12_sline_coord = SkyCoord(ra=sline_tbl['RA'],
                                dec=sline_tbl['DEC'],
                                unit='deg')
    sline_tbl['flg_BAL'] = -1
    idx, d2d, d3d = match_coordinates_sky(dr12_sline_coord,
                                          tbl2_garnett_coords,
                                          nthneighbor=1)
    in_garnett = d2d < 1 * u.arcsec  # Check
    sline_tbl['flg_BAL'][in_garnett] = tbl2_garnett['f_BAL'][idx[in_garnett]]
    print("There were {:d} DR12 sightlines not in Garnett".format(
        np.sum(~in_garnett)))

    # Write
    dr12_sline = resource_filename('dla_cnn',
                                   'catalogs/boss_dr12/DR12_sightlines.fits')
    sline_tbl.write(dr12_sline, overwrite=True)
    print("Wrote {:s}".format(dr12_sline))

    # DLA/SLLS table
    abs_tbl = Table()
    abs_tbl['Plate'] = a_plates
    abs_tbl['Fiber'] = a_fibers
    abs_tbl['MJD'] = a_mjds
    abs_tbl['RA'] = a_ra
    abs_tbl['DEC'] = a_dec
    abs_tbl['zem'] = a_zem
    #
    abs_tbl['zabs'] = a_zabs
    abs_tbl['NHI'] = a_NHI
    abs_tbl['sigNHI'] = a_sigNHI
    abs_tbl['conf'] = a_conf
    # BAL
    dr12_abs_coord = SkyCoord(ra=abs_tbl['RA'], dec=abs_tbl['DEC'], unit='deg')
    idx, d2d, d3d = match_coordinates_sky(dr12_abs_coord,
                                          tbl2_garnett_coords,
                                          nthneighbor=1)
    in_garnett = d2d < 1 * u.arcsec  # Check
    abs_tbl['flg_BAL'] = -1
    abs_tbl['flg_BAL'][in_garnett] = tbl2_garnett['f_BAL'][idx[in_garnett]]
    abs_tbl['SNR'] = 0.
    abs_tbl['SNR'][in_garnett] = tbl2_garnett['SNRSpec'][idx[in_garnett]]
    print("There were {:d} DR12 absorbers not covered by Garnett".format(
        np.sum(~in_garnett)))

    dr12_abs = resource_filename('dla_cnn',
                                 'catalogs/boss_dr12/DR12_DLA_SLLS.fits')
    abs_tbl.write(dr12_abs, overwrite=True)
    print("Wrote {:s}".format(dr12_abs))

    # Garnett
    ml_path = os.getenv('PROJECT_ML')
    g16_dlas = Table.read(ml_path + '/garnett16/ascii_catalog/table3.dat',
                          format='cds')
    tbl3_garnett_coords = SkyCoord(ra=g16_dlas['RAdeg'],
                                   dec=g16_dlas['DEdeg'],
                                   unit='deg')
    idx, d2d, d3d = match_coordinates_sky(tbl3_garnett_coords,
                                          tbl2_garnett_coords,
                                          nthneighbor=1)
    in_garnett = d2d < 1 * u.arcsec  # Check
    g16_dlas['flg_BAL'] = -1
    g16_dlas['flg_BAL'][in_garnett] = tbl2_garnett['f_BAL'][idx[in_garnett]]
    g16_dlas['SNR'] = 0.
    g16_dlas['SNR'][in_garnett] = tbl2_garnett['SNRSpec'][idx[in_garnett]]
    g16_outfile = resource_filename(
        'dla_cnn', 'catalogs/boss_dr12/DR12_DLA_garnett16.fits')
    g16_dlas.write(g16_outfile, overwrite=True)
    print("Wrote {:s}".format(g16_outfile))
import numpy as np

from astropy.table import vstack

from linetools import utils as ltu
from linetools.spectra.xspectrum1d import XSpectrum1D

from pypeit.core.wavecal import autoid
from pypeit.core.wavecal import waveio
from pypeit.core import arc
from pypeit.spectrographs import gemini_gmos

# Load the spectra
chip = 2
if chip == 1:
    jdict = ltu.loadjson('GMOS_R400_blue.json.gz')
    outroot = 'GMOS_R400_blue_'
elif chip == 2:
    jdict = ltu.loadjson('GMOS_R400_chip2.json.gz')
    outroot = 'GMOS_R400_chip2_'

spectrograph = gemini_gmos.GeminiGMOSNE2VSpectrograph()
arcparam = {}
spectrograph.setup_arcparam(arcparam, disperser='R400')
arcparam['n_first'] = 2
arcparam['n_final'] = 3
arcparam['func'] = 'legendre'
arcparam['nsig_rej'] = 2.
arcparam['nsig_rej_final'] = 3.
arcparam['disp'] = 0.67 * 2.
arcparam['match_toler'] = 3.
Beispiel #35
0
    hdu_sedg = fits.open(user +
                         '/Dropbox/Cowie_2002-02-17/Flats/SEdgECH75_1x1.fits')
    data = hdu_sedg[0].data
    slit_left = data[0, :, :].T
    slit_righ = data[1, :, :].T
    plate_scale = 0.149
    hdu_std = fits.open(
        user +
        '/Dropbox/Cowie_2002-02-17/Extract/Obj_ES.20020217.20037.fits.gz')
    std_trace = hdu_std[1].data['trace'][:, 0:slit_left.shape[0]].T
    std_trace[:, -1] = std_trace[:, -2] + (np.median(std_trace[:200, -1]) -
                                           np.median(std_trace[:200, -2]))
    #The structure is exten = 1, and the xpos,ypos are the standard trace
elif spectro == 'NIRES':
    from linetools import utils as ltu
    jdict = ltu.loadjson(user + '/Dropbox/hires_fndobj/tilt_nires.json')
    slit_left = np.array(jdict['lcen'])
    slit_righ = np.array(jdict['rcen'])
    hdu = fits.open(
        user +
        'Dropbox/hires_fndobj/spec2d_J1724+1901_NIRES_2018Jun04T130207.856.fits'
    )
    objminsky = hdu[1].data - hdu[3].data
    ivar = hdu[2].data
    mask = (ivar > 0)
    plate_scale = 0.123
    std_trace = None
elif spectro == 'GNIRS':
    from scipy.io import readsav
    #hdu = fits.open(user + '/Dropbox/hires_fndobj/sci-N20170331S0216-219.fits')
    #hdu = fits.open(user + '/Dropbox/hires_fndobj/GNIRS/J021514.76+004223.8/Science/J021514.76+004223.8_1/sci-N20170927S0294-297.fits')
    ivar = utils.calc_ivar(var)
    mask = (var > 0.0)
    skyimg = hdu[2].data
    objminsky = sciimg - skyimg
    hdu_sedg = fits.open(user + '/Dropbox/Cowie_2002-02-17/Flats/SEdgECH75_1x1.fits')
    data = hdu_sedg[0].data
    slit_left = data[0,:,:].T
    slit_righ = data[1,:,:].T
    plate_scale = 0.149
    hdu_std =fits.open(user + '/Dropbox/Cowie_2002-02-17/Extract/Obj_ES.20020217.20037.fits.gz')
    std_trace = hdu_std[1].data['trace'][:,0:slit_left.shape[0]].T
    std_trace[:,-1] = std_trace[:,-2]+ (np.median(std_trace[:200,-1])-np.median(std_trace[:200,-2]))
    #The structure is exten = 1, and the xpos,ypos are the standard trace
elif spectro == 'NIRES':
    from linetools import utils as ltu
    jdict = ltu.loadjson(user + '/Dropbox/hires_fndobj/tilt_nires.json')
    slit_left = np.array(jdict['lcen'])
    slit_righ = np.array(jdict['rcen'])
    hdu = fits.open(user + 'Dropbox/hires_fndobj/spec2d_J1724+1901_NIRES_2018Jun04T130207.856.fits')
    objminsky = hdu[1].data - hdu[3].data
    ivar = hdu[2].data
    mask = (ivar>0)
    plate_scale = 0.123
    std_trace = None
elif spectro == 'GNIRS':
    from scipy.io import readsav
    #hdu = fits.open(user + '/Dropbox/hires_fndobj/sci-N20170331S0216-219.fits')
    #hdu = fits.open(user + '/Dropbox/hires_fndobj/GNIRS/J021514.76+004223.8/Science/J021514.76+004223.8_1/sci-N20170927S0294-297.fits')
    #hdu = fits.open(user + 'Dropbox/hires_fndobj/GNIRS/J005424.45+004750.2/Science/J005424.45+004750.2_7/sci-N20171021S0264-267.fits')
    hdu = fits.open(user + 'Dropbox/hires_fndobj/GNIRS/J002407.02-001237.2/Science/J002407.02-001237.2_5/sci-N20171006S0236-239.fits')
    obj = hdu[0].data
Beispiel #37
0
def load_sys_files(inp,
                   type,
                   ref=None,
                   sys_path=False,
                   build_abs_sys=False,
                   **kwargs):
    """ Load up a set of SYS files from the hard-drive (JSON files)

    Parameters
    ----------
    inp : str
      Name of JSON tarball or if sys_path=True then the path to a folder of JSON files
    type : str
      type of IGMSystem, e.g. LLS
    ref : str, optional
      Reference label
    sys_path : str, optional
      indicates that inp is a path to a set of JSON SYS files
      otherwise, inp should be the filename of a tarball of JSON files
    build_abs_sys : bool, optional
      Build a list of AbsSystem's?  Can always be instantiated later
    **kwargs :
      Passed to system

    Returns
    -------
    survey : IGMSurvey
    """
    import tarfile
    #
    survey = class_by_type(type)(ref=ref)
    system = pyasu.class_by_type(type)
    if sys_path:
        pdb.set_trace()  # THIS NEEDS TO BE UPDATED AS WAS DONE FOR THE TARBALL
        # Individual files
        files = glob.glob(inp + '*.json')
        files.sort()
        for ifile in files:
            tdict = ltu.loadjson(ifile)
            abssys = system.from_dict(tdict, linelist=llist)
            survey._abs_sys.append(abssys)
    else:  # tarball
        print('Loading systems from {:s}'.format(inp))
        tar = tarfile.open(inp, 'r:gz')
        for member in tar.getmembers():
            if '.' not in member.name:
                print('Skipping a likely folder: {:s}'.format(member.name))
                continue
            # Extract
            f = tar.extractfile(member)
            f = f.read()
            f = f.decode('utf-8')

            tdict = json.loads(f)
            # Add keys (for backwards compatability)
            if ('NHI' in tdict.keys()) and ('flag_NHI' not in tdict.keys()):
                tdict['flag_NHI'] = 1
            # Add to list of dicts
            survey._dict[tdict['Name']] = tdict
        tar.close()
    # Mask
    survey.init_mask()

    # Set coordinates
    ras = [survey._dict[key]['RA'] for key in survey._dict.keys()]
    decs = [survey._dict[key]['DEC'] for key in survey._dict.keys()]
    survey.coords = SkyCoord(ra=ras, dec=decs, unit='deg')

    # Build AbsSystem objects?
    if build_abs_sys:
        survey.build_all_abs_sys(linelist=llist)

    # Generate the data table
    print("Building the data Table from the internal dict")
    survey.data_from_dict()

    # Return
    return survey
Beispiel #38
0
def mk_db(dbname, tree, outfil, iztbl, version='v00', id_key='PRIV_ID',
          publisher='Unknown', **kwargs):
    """ Generate the DB

    Parameters
    ----------
    dbname : str
      Name for the database
    tree : str
      Path to top level of the tree of FITS files
      Typically, each branch in the tree corresponds to a single instrument
    outfil : str
      Output file name for the hdf5 file
    iztbl : Table or str
      If Table, see meta() docs for details on its format
      If str, it must be 'igmspec' and the user must have that DB downloaded
    version : str, optional
      Version code

    Returns
    -------

    """
    from specdb import defs

    # ztbl
    if isinstance(iztbl, str):
        if iztbl == 'igmspec':
            from specdb.specdb import IgmSpec
            igmsp = IgmSpec()
            ztbl = Table(igmsp.idb.hdf['quasars'][...])
    elif isinstance(iztbl, Table):
        ztbl = iztbl
    else:
        raise IOError("Bad type for ztbl")

    # Find the branches
    branches = glob.glob(tree+'/*')
    branches.sort()
    # HDF5 file
    hdf = h5py.File(outfil,'w')

    # Defs
    zpri = defs.z_priority()
    gdict = {}

    # Main DB Table
    maindb, tkeys = spbu.start_maindb(id_key)

    # MAIN LOOP
    for ss,branch in enumerate(branches):
        # Skip files
        if not os.path.isdir(branch):
            continue
        print('Working on branch: {:s}'.format(branch))
        # Files
        fits_files, out_tup = grab_files(branch)
        meta_file, mtbl_file, ssa_file = out_tup

        # Meta
        maxpix, phead, mdict, stype = 10000, None, None, 'QSO'
        if meta_file is not None:
            # Load
            meta_dict = ltu.loadjson(meta_file)
            # Maxpix
            if 'maxpix' in meta_dict.keys():
                maxpix = meta_dict['maxpix']
            # STYPE
            if 'stype' in meta_dict.keys():
                stype = meta_dict['stype']
            # Parse header
            if 'parse_head' in meta_dict.keys():
                phead = meta_dict['parse_head']
            if 'meta_dict' in meta_dict.keys():
                mdict = meta_dict['meta_dict']
        full_meta = mk_meta(fits_files, ztbl, mtbl_file=mtbl_file,
                            parse_head=phead, mdict=mdict, **kwargs)
        # Update group dict
        group_name = branch.split('/')[-1]
        flag_g = spbu.add_to_group_dict(group_name, gdict)
        # IDs
        maindb = add_ids(maindb, full_meta, flag_g, tkeys, 'PRIV_ID', first=(flag_g==1))
        # Ingest
        ingest_spectra(hdf, group_name, full_meta, max_npix=maxpix, **kwargs)
        # SSA
        if ssa_file is not None:
            user_ssa = ltu.loadjson(ssa_file)
            ssa_dict = default_fields(user_ssa['Title'], flux=user_ssa['flux'], fxcalib=user_ssa['fxcalib'])
            hdf[group_name]['meta'].attrs['SSA'] = json.dumps(ltu.jsonify(ssa_dict))

    # Check stacking
    if not spbu.chk_vstack(hdf):
        print("Meta data will not stack using specdb.utils.clean_vstack")
        print("Proceed to write at your own risk..")
        pdb.set_trace()

    # Write
    write_hdf(hdf, str(dbname), maindb, zpri, gdict, version,
              Publisher=publisher)
    print("Wrote {:s} DB file".format(outfil))
Beispiel #39
0
def ARMLSD(argflag, spect, fitsdict, reuseMaster=False):
    """
    Automatic Reduction and Modeling of Long Slit Data

    Parameters
    ----------
    argflag : dict
      Arguments and flags used for reduction
    spect : dict
      Properties of the spectrograph.
    fitsdict : dict
      Contains relevant information from fits header files
    reuseMaster : bool
      If True, a master frame that will be used for another science frame
      will not be regenerated after it is first made.
      This setting comes with a price, and if a large number of science frames are
      being generated, it may be more efficient to simply regenerate the master
      calibrations on the fly.

    Returns
    -------
    status : int
      Status of the reduction procedure
      0 = Successful execution
      1 = ...
    """
    status = 0

    # Create a list of science exposure classes
    sciexp = armbase.SetupScience(argflag, spect, fitsdict)
    numsci = len(sciexp)

    # Create a list of master calibration frames
    masters = armasters.MasterFrames(spect['mosaic']['ndet'])

    # Use Masters?  Requires setup file
    setup_file = argflag['out']['sorted'] + '.setup'
    try:
        calib_dict = ltu.loadjson(setup_file)
    except:
        msgs.info("No setup file {:s} for MasterFrames".format(setup_file))
        calib_dict = {}
    else:
        argflag['masters']['setup_file'] = setup_file

    # Start reducing the data
    for sc in range(numsci):
        slf = sciexp[sc]
        scidx = slf._idx_sci[0]
        msgs.info("Reducing file {0:s}, target {1:s}".format(
            fitsdict['filename'][scidx], slf._target_name))
        msgs.sciexp = slf  # For QA writing on exit, if nothing else.  Could write Masters too
        # Loop on Detectors
        for kk in xrange(slf._spect['mosaic']['ndet']):
            det = kk + 1  # Detectors indexed from 1
            slf.det = det
            ###############
            # Get amplifier sections
            arproc.get_ampsec_trimmed(slf, fitsdict, det, scidx)
            # Setup
            setup = arsort.calib_setup(slf,
                                       sc,
                                       det,
                                       fitsdict,
                                       calib_dict,
                                       write=False)
            slf._argflag['masters']['setup'] = setup
            ###############
            # Generate master bias frame
            update = slf.MasterBias(fitsdict, det)
            if update and reuseMaster:
                armbase.UpdateMasters(sciexp, sc, det, ftype="bias")
            ###############
            # Generate a bad pixel mask (should not repeat)
            update = slf.BadPixelMask(fitsdict, det)
            if update and reuseMaster:
                armbase.UpdateMasters(sciexp, sc, det, ftype="arc")
            ###############
            # Generate a master arc frame
            update = slf.MasterArc(fitsdict, det)
            if update and reuseMaster:
                armbase.UpdateMasters(sciexp, sc, det, ftype="arc")
            ###############
            # Determine the dispersion direction (and transpose if necessary)
            slf.GetDispersionDirection(fitsdict, det, scidx)
            if slf._bpix[
                    det -
                    1] is None:  # Needs to be done here after nspec is set
                slf.SetFrame(
                    slf._bpix,
                    np.zeros((slf._nspec[det - 1], slf._nspat[det - 1])), det)
            '''
            ###############
            # Estimate gain and readout noise for the amplifiers
            msgs.work("Estimate Gain and Readout noise from the raw frames...")
            update = slf.MasterRN(fitsdict, det)
            if update and reuseMaster:
                armbase.UpdateMasters(sciexp, sc, det, ftype="readnoise")
            '''
            ###############
            # Generate a master trace frame
            update = slf.MasterTrace(fitsdict, det)
            if update and reuseMaster:
                armbase.UpdateMasters(sciexp,
                                      sc,
                                      det,
                                      ftype="flat",
                                      chktype="trace")
            ###############
            # Generate an array that provides the physical pixel locations on the detector
            slf.GetPixelLocations(det)
            # Determine the edges of the spectrum (spatial)
            if 'trace' + slf._argflag['masters']['setup'] not in slf._argflag[
                    'masters']['loaded']:
                ###############
                # Determine the edges of the spectrum (spatial)
                lordloc, rordloc, extord = artrace.trace_orders(
                    slf,
                    slf._mstrace[det - 1],
                    det,
                    singleSlit=True,
                    pcadesc="PCA trace of the slit edges")
                slf.SetFrame(slf._lordloc, lordloc, det)
                slf.SetFrame(slf._rordloc, rordloc, det)

                # Convert physical trace into a pixel trace
                msgs.info(
                    "Converting physical trace locations to nearest pixel")
                pixcen = artrace.phys_to_pix(
                    0.5 * (slf._lordloc[det - 1] + slf._rordloc[det - 1]),
                    slf._pixlocn[det - 1], 1)
                pixwid = (slf._rordloc[det - 1] -
                          slf._lordloc[det - 1]).mean(0).astype(np.int)
                lordpix = artrace.phys_to_pix(slf._lordloc[det - 1],
                                              slf._pixlocn[det - 1], 1)
                rordpix = artrace.phys_to_pix(slf._rordloc[det - 1],
                                              slf._pixlocn[det - 1], 1)
                slf.SetFrame(slf._pixcen, pixcen, det)
                slf.SetFrame(slf._pixwid, pixwid, det)
                slf.SetFrame(slf._lordpix, lordpix, det)
                slf.SetFrame(slf._rordpix, rordpix, det)
                # Save QA for slit traces
                arqa.slit_trace_qa(slf,
                                   slf._mstrace[det - 1],
                                   slf._lordpix[det - 1],
                                   slf._rordpix[det - 1],
                                   extord,
                                   desc="Trace of the slit edges")

            ###############
            # Prepare the pixel flat field frame
            update = slf.MasterFlatField(fitsdict, det)
            if update and reuseMaster:
                armbase.UpdateMasters(sciexp,
                                      sc,
                                      det,
                                      ftype="flat",
                                      chktype="pixflat")
            ###############
            # Generate the 1D wavelength solution
            update = slf.MasterWaveCalib(fitsdict, sc, det)
            if update and reuseMaster:
                armbase.UpdateMasters(sciexp,
                                      sc,
                                      det,
                                      ftype="arc",
                                      chktype="trace")
            ###############
            # Derive the spectral tilt
            if slf._tilts[det - 1] is None:
                if slf._argflag['masters']['use']:
                    mstilt_name = armasters.master_name(
                        slf._argflag['run']['masterdir'], 'tilts',
                        slf._argflag['masters']['setup'])
                    try:
                        tilts, head = arload.load_master(mstilt_name,
                                                         frametype="tilts")
                    except IOError:
                        pass
                    else:
                        slf.SetFrame(slf._tilts, tilts, det)
                        slf._argflag['masters']['loaded'].append(
                            'tilts' + slf._argflag['masters']['setup'])
                if 'tilts' + slf._argflag['masters'][
                        'setup'] not in slf._argflag['masters']['loaded']:
                    # First time tilts are derived for this arc frame --> derive the order tilts
                    tilts, satmask, outpar = artrace.model_tilt(
                        slf, det, slf._msarc[det - 1])
                    slf.SetFrame(slf._tilts, tilts, det)
                    slf.SetFrame(slf._satmask, satmask, det)
                    slf.SetFrame(slf._tiltpar, outpar, det)

            ###############
            # Generate/load a master wave frame
            update = slf.MasterWave(fitsdict, sc, det)
            if update and reuseMaster:
                armbase.UpdateMasters(sciexp,
                                      sc,
                                      det,
                                      ftype="arc",
                                      chktype="wave")

            # Check if the user only wants to prepare the calibrations only
            msgs.info("All calibration frames have been prepared")
            if slf._argflag['run']['preponly']:
                msgs.info("If you would like to continue with the reduction," +
                          msgs.newline() + "disable the run+preponly command")
                continue

            # Write setup
            setup = arsort.calib_setup(slf,
                                       sc,
                                       det,
                                       fitsdict,
                                       calib_dict,
                                       write=True)
            # Write MasterFrames (currently per detector)
            armasters.save_masters(slf, det, setup)

            ###############
            # Load the science frame and from this generate a Poisson error frame
            msgs.info("Loading science frame")
            sciframe = arload.load_frames(slf,
                                          fitsdict, [scidx],
                                          det,
                                          frametype='science',
                                          msbias=slf._msbias[det - 1],
                                          transpose=slf._transpose)
            sciframe = sciframe[:, :, 0]
            # Extract
            msgs.info("Processing science frame")
            arproc.reduce_frame(slf, sciframe, scidx, fitsdict, det)

            #continue
            #msgs.error("UP TO HERE")
            ###############
            # Perform a velocity correction
            if (slf._argflag['reduce']['heliocorr'] == True) & False:
                if slf._argflag['science']['load']['extracted'] == True:
                    msgs.warn(
                        "Heliocentric correction will not be applied if an extracted science frame exists, and is used"
                    )
                msgs.work("Perform a full barycentric correction")
                msgs.work(
                    "Include the facility to correct for gravitational redshifts and time delays (see Pulsar timing work)"
                )
                msgs.info("Performing a heliocentric correction")
                # Load the header for the science frame
                slf._waveids = arvcorr.helio_corr(slf, scidx[0])
            else:
                msgs.info("A heliocentric correction will not be performed")

            ###############
            # Using model sky, calculate a flexure correction

        # Close the QA for this object
        slf._qa.close()

        ###############
        # Flux
        ###############
        # Standard star (is this a calibration, e.g. goes above?)
        msgs.info("Processing standard star")
        msgs.info("Assuming one star per detector mosaic")
        msgs.info("Waited until last detector to process")

        msgs.work("Need to check for existing sensfunc")
        update = slf.MasterStandard(scidx, fitsdict)
        if update and reuseMaster:
            armbase.UpdateMasters(sciexp, sc, 0, ftype="standard")
        #
        msgs.work("Consider using archived sensitivity if not found")
        msgs.info("Fluxing with {:s}".format(slf._sensfunc['std']['name']))
        for kk in xrange(slf._spect['mosaic']['ndet']):
            det = kk + 1  # Detectors indexed from 1
            arflux.apply_sensfunc(slf, det, scidx, fitsdict)

        # Write 1D spectra
        arsave.save_1d_spectra(slf)
        # Write 2D images for the Science Frame
        arsave.save_2d_images(slf)
        # Free up some memory by replacing the reduced ScienceExposure class
        sciexp[sc] = None
    return status
Beispiel #40
0
def main(args=None):
    pargs = parser(options=args)
    import sys
    import os
    from linetools import utils as ltu
    from vetrr.vet_redrock import VetRedRockGui
    from PyQt5.QtWidgets import QApplication
    from collections import OrderedDict
    import yaml
    import glob

    if pargs.initials:
        initials = pargs.initials
    else:
        initials = None
    print(initials)

    # get the local red rock file (input to vetrr)
    # first check if its there, if not look above
    if len(glob.glob("J*_rr.fits")) != 0:
        infile = glob.glob("J*_rr.fits")[0]
    elif glob.glob("J*_rr.fits") == 0:
        infile = glob.glob("../J*_rr.fits")[0]
    else:
        import pdb
        pdb.set_trace()

    # Get the coadd_file (yaml)
    if len(glob.glob("J*coadd.yaml")) != 0:
        coadd = glob.glob("J*coadd.yaml")[0]
    else:
        import pdb
        pdb.set_trace()

    # check to see if there are any previous vetrr.json files
    outguess = glob.glob("J*vetrr*.json")
    # This is the case when no initials were supplied but no
    #   previous json file either.
    if len(outguess) == 0:
        if initials is not None:
            outfile = infile[:-7] + "vetrr_" + initials + ".json"
        else:
            print("ERROR: No existing output. Need an outfile and initials")
            outfile = None
    elif initials is not None:
        print("Createing a new output file")
        outfile = infile[:-7] + "vetrr_" + initials + ".json"
    else:
        outfile = outguess[0]

    print("******************************************************")
    print("Auto found these files:")
    print("INFILE:", infile)
    print("OUTFILE:", outfile)
    print("COADD:", coadd)
    print("****************************************************** \n")

    # Load outfile if it exists
    if os.path.isfile(outfile):
        print("******************************************************")
        print("WARNING:  Loading previous file and will over-write it!")
        print("******************************************************")
        zdict = ltu.loadjson(outfile)
        zdict = OrderedDict(zdict)
    else:
        zdict = None

    # YAML coadd file?
    if coadd is not None:
        # Load the input file
        with open(coadd, 'r') as in_file:
            coadd_dict = yaml.load(in_file)

    app = QApplication(sys.argv)
    gui = VetRedRockGui(infile,
                        outfile=outfile,
                        zdict=zdict,
                        coadd_dict=coadd_dict)
    gui.show()
    app.exec_()
    # ESI
    hdu = fits.open('/Users/feige/Dropbox/Cowie_2002-02-17/Final/fringe_ES.20020217.35453.fits.gz')
    sciimg = hdu[0].data
    var  = hdu[1].data
    ivar = utils.calc_ivar(var)
    mask = (var > 0.0)
    skyimg = hdu[2].data
    objminsky = sciimg - skyimg
    hdu_sedg = fits.open('/Users/feige/Dropbox/Cowie_2002-02-17/Flats/SEdgECH75_1x1.fits')
    data = hdu_sedg[0].data
    slit_left = data[0,:,:].T
    slit_righ = data[1,:,:].T
    plate_scale = 0.149
elif spectro == 'NIRES':
    from linetools import utils as ltu
    jdict = ltu.loadjson('/Users/feige/Dropbox/hires_fndobj/tilt_nires.json')
    slit_left = np.array(jdict['lcen'])
    slit_righ = np.array(jdict['rcen'])
    hdu = fits.open('/Users/feige/Dropbox/hires_fndobj/spec2d_J1724+1901_NIRES_2018Jun04T130207.856.fits')
    objminsky = hdu[1].data - hdu[3].data
    ivar = hdu[2].data
    mask = (ivar>0)
    plate_scale = 0.123
elif spectro == 'GNIRS':
    from scipy.io import readsav
    #hdu = fits.open('/Users/feige/Dropbox/hires_fndobj/sci-N20170331S0216-219.fits')
    hdu = fits.open('/Users/feige/Dropbox/hires_fndobj/GNIRS/J021514.76+004223.8/Science/J021514.76+004223.8_1/sci-N20170927S0294-297.fits')
    hdu = fits.open('/Users/feige/Dropbox/hires_fndobj/GNIRS/J005424.45+004750.2/Science/J005424.45+004750.2_7/sci-N20171021S0264-267.fits')
    hdu = fits.open('/Users/feige/Dropbox/hires_fndobj/GNIRS/J002407.02-001237.2/Science/J002407.02-001237.2_5/sci-N20171006S0236-239.fits')
    obj = hdu[0].data
    #objminsky = obj - hdu[1].data
Beispiel #42
0
    def from_igmguesses(cls,
                        igmgfile,
                        name=None,
                        radec=None,
                        zem=None,
                        **kwargs):
        """ Instantiate from a JSON file from IGMGuesses

        Parameters
        ----------
        igmgfile : str
          Filename
        name : str, optional
          Name of the IGMSightline
        radec : RA/DEC input, optional
          See ltu.radec_to_coord for options on format
          If given, it will overwrite the RA/DEC in IGMGuesses file (if any)
        zem : float, optional
          Emission redshift of sightline
          If given, it will overwrite the zem in IGMGuesses file (if any)

        Returns
        -------

        """
        from linetools.isgm import abscomponent
        # Read
        jdict = ltu.loadjson(igmgfile)  # cmps, specfile, meta
        # Add in additional keys
        # Coords
        if radec is not None:
            coord = ltu.radec_to_coord(radec)
        else:
            coord = SkyCoord(jdict['meta']['RA'],
                             jdict['meta']['DEC'],
                             unit='deg')
        jdict['RA'] = coord.icrs.ra.deg
        jdict['DEC'] = coord.icrs.dec.deg
        # zem
        if zem is not None:
            jdict['zem'] = zem
        else:
            jdict['zem'] = jdict['meta']['zem']
            if jdict[
                    'zem'] == 0.:  # conforming IGMGuesses current rule zem = 0. means zem not set
                jdict['zem'] = None
        # Name
        if name is None:
            if jdict['zem'] is None:
                zem_name = 'z-unknown'
            else:
                zem_name = 'z{:0.3f}'.format(jdict['zem'])
            name = 'J{:s}{:s}_{:s}'.format(
                coord.icrs.ra.to_string(unit=u.hour, sep='', pad=True)[0:4],
                coord.icrs.dec.to_string(sep='', pad=True,
                                         alwayssign=True)[0:5], zem_name)
        jdict['name'] = name
        # Components
        jdict['components'] = jdict.pop('cmps')

        kwargs['use_coord'] = True
        slf = cls.from_dict(jdict, **kwargs)

        # Separate IGMGuesses attributes from AbsComponent
        acomp_keys = list(abscomponent.init_attrib.keys())
        for comp in slf._components:
            comp.igmg_attrib = {}
            akeys = list(comp.attrib.keys())
            for key in akeys:
                if key in acomp_keys:
                    pass
                else:
                    comp.igmg_attrib[key] = comp.attrib.pop(key)
            # Add other IGMGuesses specific attributes
            comp.igmg_attrib['top_level'] = {}
            for key in [
                    'Nfit', 'bfit', 'zfit', 'mask_abslines', 'wrest', 'vlim'
            ]:  # I added vlim because these aren't always consistent. Must be something bad in igmguesses
                if key in jdict['components'][comp.name].keys():
                    comp.igmg_attrib['top_level'][key] = jdict['components'][
                        comp.name][key]
        # Slurp a few other IGMGuesses things
        slf.igmg_dict = {}
        for key in ['spec_file', 'meta', 'fwhm']:
            slf.igmg_dict[key] = jdict[key]
        # Return
        return slf
Beispiel #43
0
def load_ml_file(pred_file):
    """ Load the search results from the CNN into a DLASurvey object
    Parameters
    ----------
    pred_file

    Returns
    -------
    ml_llssurvey: LLSSurvey
    ml_dlasusrvey: DLASurvey
    """
    print("Loading {:s}.  Please be patient..".format(pred_file))
    # Read
    ml_results = ltu.loadjson(pred_file)
    use_platef = False
    if 'plate' in ml_results[0].keys():
        use_platef = True
    else:
        if 'id' in ml_results[0].keys():
            use_id = True
    # Init
    idict = dict(ra=[], dec=[], plate=[], fiber=[])
    if use_platef:
        for key in ['plate', 'fiber', 'mjd']:
            idict[key] = []
    dlasystems = []
    llssystems = []

    # Generate coords to speed things up
    for obj in ml_results:
        for key in ['ra', 'dec']:
            idict[key].append(obj[key])
    ml_coords = SkyCoord(ra=idict['ra'], dec=idict['dec'], unit='deg')
    ra_names = ml_coords.icrs.ra.to_string(unit=u.hour, sep='', pad=True)
    dec_names = ml_coords.icrs.dec.to_string(sep='', pad=True, alwayssign=True)
    vlim = [-500., 500.] * u.km / u.s
    dcoord = SkyCoord(ra=0., dec=0., unit='deg')

    # Loop on list
    didx, lidx = [], []
    print("Looping on sightlines..")
    for tt, obj in enumerate(ml_results):
        #if (tt % 100) == 0:
        #    print('tt: {:d}'.format(tt))
        # Sightline
        if use_id:
            plate, fiber = [int(spl) for spl in obj['id'].split('-')]
            idict['plate'].append(plate)
            idict['fiber'].append(fiber)

        # Systems
        for ss, syskey in enumerate(['dlas', 'subdlas']):
            for idla in obj[syskey]:
                name = 'J{:s}{:s}_z{:.3f}'.format(ra_names[tt], dec_names[tt],
                                                  idla['z_dla'])
                if ss == 0:
                    isys = DLASystem(dcoord,
                                     idla['z_dla'],
                                     vlim,
                                     NHI=idla['column_density'],
                                     zem=obj['z_qso'],
                                     name=name)
                else:
                    isys = LLSSystem(dcoord,
                                     idla['z_dla'],
                                     vlim,
                                     NHI=idla['column_density'],
                                     zem=obj['z_qso'],
                                     name=name)
                isys.confidence = idla['dla_confidence']
                isys.s2n = idla['s2n']
                if use_platef:
                    isys.plate = obj['plate']
                    isys.fiber = obj['fiber']
                elif use_id:
                    isys.plate = plate
                    isys.fiber = fiber
                # Save
                if ss == 0:
                    didx.append(tt)
                    dlasystems.append(isys)
                else:
                    lidx.append(tt)
                    llssystems.append(isys)
    # Generate sightline tables
    sightlines = Table()
    sightlines['RA'] = idict['ra']
    sightlines['DEC'] = idict['dec']
    sightlines['PLATE'] = idict['plate']
    sightlines['FIBERID'] = idict['fiber']
    # Surveys
    ml_llssurvey = LLSSurvey()
    ml_llssurvey.sightlines = sightlines.copy()
    ml_llssurvey._abs_sys = llssystems
    ml_llssurvey.coords = ml_coords[np.array(lidx)]

    ml_dlasurvey = DLASurvey()
    ml_dlasurvey.sightlines = sightlines.copy()
    ml_dlasurvey._abs_sys = dlasystems
    ml_dlasurvey.coords = ml_coords[np.array(didx)]

    # Return
    return ml_llssurvey, ml_dlasurvey
    )
    sciimg = hdu[0].data
    var = hdu[1].data
    ivar = utils.calc_ivar(var)
    mask = (var > 0.0)
    skyimg = hdu[2].data
    objminsky = sciimg - skyimg
    hdu_sedg = fits.open(
        '/Users/feige/Dropbox/Cowie_2002-02-17/Flats/SEdgECH75_1x1.fits')
    data = hdu_sedg[0].data
    slit_left = data[0, :, :].T
    slit_righ = data[1, :, :].T
    plate_scale = 0.149
elif spectro == 'NIRES':
    from linetools import utils as ltu
    jdict = ltu.loadjson('/Users/feige/Dropbox/hires_fndobj/tilt_nires.json')
    slit_left = np.array(jdict['lcen'])
    slit_righ = np.array(jdict['rcen'])
    hdu = fits.open(
        '/Users/feige/Dropbox/hires_fndobj/spec2d_J1724+1901_NIRES_2018Jun04T130207.856.fits'
    )
    objminsky = hdu[1].data - hdu[3].data
    ivar = hdu[2].data
    mask = (ivar > 0)
    plate_scale = 0.123
elif spectro == 'GNIRS':
    from scipy.io import readsav
    #hdu = fits.open('/Users/feige/Dropbox/hires_fndobj/sci-N20170331S0216-219.fits')
    hdu = fits.open(
        '/Users/feige/Dropbox/hires_fndobj/GNIRS/J021514.76+004223.8/Science/J021514.76+004223.8_1/sci-N20170927S0294-297.fits'
    )
Beispiel #45
0
def json_to_sdss_dlasurvey(json_file, sdss_survey, add_pf=True, debug=False):
    """ Convert JSON output file to a DLASurvey object
    Assumes SDSS bookkeeping for sightlines (i.e. PLATE, FIBER)

    Parameters
    ----------
    json_file : str
      Full path to the JSON results file
    sdss_survey : DLASurvey
      SDSS survey, usually human (e.g. JXP for DR5)
    add_pf : bool, optional
      Add plate/fiber to DLAs in sdss_survey

    Returns
    -------
    ml_survey : LLSSurvey
      Survey object for the LLS

    """
    print("Loading SDSS Survey from JSON file {:s}".format(json_file))
    # imports
    from pyigm.abssys.dla import DLASystem
    from pyigm.abssys.lls import LLSSystem
    # Fiber key
    for fkey in ['FIBER', 'FIBER_ID', 'FIB']:
        if fkey in sdss_survey.sightlines.keys():
            break
    # Read
    ml_results = ltu.loadjson(json_file)
    use_platef = False
    if 'plate' in ml_results[0].keys():
        use_platef = True
    else:
        if 'id' in ml_results[0].keys():
            use_id = True
    # Init
    #idict = dict(plate=[], fiber=[], classification_confidence=[],  # FOR v2
    #             classification=[], ra=[], dec=[])
    idict = dict(ra=[], dec=[])
    if use_platef:
        for key in ['plate', 'fiber', 'mjd']:
            idict[key] = []
    ml_tbl = Table()
    ml_survey = LLSSurvey()
    systems = []
    in_ml = np.array([False]*len(sdss_survey.sightlines))
    # Loop
    for obj in ml_results:
        # Sightline
        for key in idict.keys():
            idict[key].append(obj[key])
        # DLAs
        #if debug:
        #    if (obj['plate'] == 1366) & (obj['fiber'] == 614):
        #        sv_coord = SkyCoord(ra=obj['ra'], dec=obj['dec'], unit='deg')
        #        print("GOT A MATCH IN RESULTS FILE")
        for idla in obj['dlas']:
            """
            dla = DLASystem((sdss_survey.sightlines['RA'][mt[0]],
                             sdss_survey.sightlines['DEC'][mt[0]]),
                            idla['spectrum']/(1215.6701)-1., None,
                            idla['column_density'])
            """
            if idla['z_dla'] < 1.8:
                continue
            isys = LLSSystem((obj['ra'],obj['dec']),
                    idla['z_dla'], None, NHI=idla['column_density'], zem=obj['z_qso'])
            isys.confidence = idla['dla_confidence']
            if use_platef:
                isys.plate = obj['plate']
                isys.fiber = obj['fiber']
            elif use_id:
                plate, fiber = [int(spl) for spl in obj['id'].split('-')]
                isys.plate = plate
                isys.fiber = fiber
            # Save
            systems.append(isys)
    # Connect to sightlines
    ml_coord = SkyCoord(ra=idict['ra'], dec=idict['dec'], unit='deg')
    s_coord = SkyCoord(ra=sdss_survey.sightlines['RA'], dec=sdss_survey.sightlines['DEC'], unit='deg')
    idx, d2d, d3d = match_coordinates_sky(s_coord, ml_coord, nthneighbor=1)
    used = d2d < 1.*u.arcsec
    for iidx in np.where(~used)[0]:
        print("Sightline RA={:g}, DEC={:g} was not used".format(sdss_survey.sightlines['RA'][iidx],
                                                                sdss_survey.sightlines['DEC'][iidx]))
    # Add plate/fiber to statistical DLAs
    if add_pf:
        dla_coord = sdss_survey.coord
        idx2, d2d, d3d = match_coordinates_sky(dla_coord, s_coord, nthneighbor=1)
        if np.min(d2d.to('arcsec').value) > 1.:
            raise ValueError("Bad match to sightlines")
        for jj,igd in enumerate(np.where(sdss_survey.mask)[0]):
            dla = sdss_survey._abs_sys[igd]
            try:
                dla.plate = sdss_survey.sightlines['PLATE'][idx2[jj]]
            except IndexError:
                pdb.set_trace()
            dla.fiber = sdss_survey.sightlines[fkey][idx2[jj]]
    # Finish
    ml_survey._abs_sys = systems
    if debug:
        ml2_coord = ml_survey.coord
        minsep = np.min(sv_coord.separation(ml2_coord))
        minsep2 = np.min(sv_coord.separation(s_coord))
        tmp = sdss_survey.sightlines[used]
        t_coord = SkyCoord(ra=tmp['RA'], dec=tmp['DEC'], unit='deg')
        minsep3 = np.min(sv_coord.separation(t_coord))
        pdb.set_trace()
    ml_survey.sightlines = sdss_survey.sightlines[used]
    for key in idict.keys():
        ml_tbl[key] = idict[key]
    ml_survey.ml_tbl = ml_tbl
    # Return
    return ml_survey