Esempio n. 1
0
 def test_read_from_fileobj(self, tmpdir):
     filename = str(tmpdir.join('test_read_from_fileobj.fits'))
     hdu = BinTableHDU(self.data)
     hdu.writeto(filename, overwrite=True)
     with open(filename, 'rb') as f:
         t = Table.read(f)
     assert equal_data(t, self.data)
Esempio n. 2
0
def trim_throughput(indir, outdir):
    '''downsample throughput files'''
    assert os.path.basename(indir) == 'throughput'
    if not os.path.exists(outdir):
        os.makedirs(outdir)

    for targettype in ('elg', 'lrg', 'perfect', 'qso', 'sky', 'star'):
        filename = 'fiberloss-{}.dat'.format(targettype)
        shutil.copy(os.path.join(indir, filename),
                    os.path.join(outdir, filename))

    for filename in ['thru-b.fits', 'thru-r.fits', 'thru-z.fits']:
        fx = fits.open(indir + '/' + filename)
        hdus = HDUList()
        hdus.append(fx[0])
        hdus.append(BinTableHDU(fx[1].data[::20], header=fx[1].header))
        hdus.append(BinTableHDU(fx[2].data[::20], header=fx[2].header))
        hdus.writeto(outdir + '/' + filename)
        fx.close()

    for filename in [
            'DESI-0347_blur.ecsv', 'DESI-0347_offset.ecsv',
            'DESI-0347_random_offset_1.fits'
    ]:
        shutil.copy(os.path.join(indir, filename),
                    os.path.join(outdir, filename))
Esempio n. 3
0
 def test_read_from_fileobj(self, tmpdir):
     filename = str(tmpdir.join('test_read_from_fileobj.fits'))
     hdu = BinTableHDU(self.data)
     hdu.writeto(filename, overwrite=True)
     with open(filename, 'rb') as f:
         t = Table.read(f)
     assert equal_data(t, self.data)
Esempio n. 4
0
    def setup_class(self):
        self.data1 = np.array(list(
            zip([1, 2, 3, 4], ['a', 'b', 'c', 'd'], [2.3, 4.5, 6.7, 8.9])),
                              dtype=[('a', int), ('b', 'U1'), ('c', float)])
        self.data2 = np.array(list(
            zip([1.4, 2.3, 3.2, 4.7], [2.3, 4.5, 6.7, 8.9])),
                              dtype=[('p', float), ('q', float)])
        hdu1 = PrimaryHDU()
        hdu2 = BinTableHDU(self.data1, name='first')
        hdu3 = BinTableHDU(self.data2, name='second')

        self.hdus = HDUList([hdu1, hdu2, hdu3])
Esempio n. 5
0
def table_to_bintablehdu(table, extname=None):
    """
    Convert an astropy Table object to a BinTableHDU before writing to disk.

    Parameters
    ----------
    table: astropy.table.Table instance
        the table to be converted to a BinTableHDU
    extname: str
        name to go in the EXTNAME field of the FITS header

    Returns
    -------
    BinTableHDU
    """
    add_header_to_table(table)
    array = table.as_array()
    header = table.meta['header'].copy()
    if extname:
        header['EXTNAME'] = (extname, 'added by AstroData')
    coldefs = []
    for n, name in enumerate(array.dtype.names, 1):
        coldefs.append(
            Column(name=header.get('TTYPE{}'.format(n)),
                   format=header.get('TFORM{}'.format(n)),
                   unit=header.get('TUNIT{}'.format(n)),
                   null=header.get('TNULL{}'.format(n)),
                   bscale=header.get('TSCAL{}'.format(n)),
                   bzero=header.get('TZERO{}'.format(n)),
                   disp=header.get('TDISP{}'.format(n)),
                   start=header.get('TBCOL{}'.format(n)),
                   dim=header.get('TDIM{}'.format(n)),
                   array=array[name]))

    return BinTableHDU(data=FITS_rec.from_columns(coldefs), header=header)
Esempio n. 6
0
    def toFits(self, fits):
        """Write to a FITS file

        Parameters
        ----------
        fits : `astropy.io.fits.HDUList`
            Opened FITS file.
        """
        from astropy.io.fits import BinTableHDU, Column
        maxLength = max(
            len(ff) for ff in self.fiberMags.keys()) if self.fiberMags else 1
        header = astropyHeaderFromDict(
            {attr.upper(): getattr(self, attr)
             for attr in self._attributes})
        header.update(TargetType.getFitsHeaders())
        hdu = BinTableHDU.from_columns([
            Column("filterName",
                   "%dA" % maxLength,
                   array=list(self.fiberMags.keys())),
            Column("fiberMag",
                   "E",
                   array=np.array(list(self.fiberMags.values()))),
        ],
                                       header=header,
                                       name="TARGET")
        fits.append(hdu)
Esempio n. 7
0
def beams_to_bintable(beams):
    """
    Convert a list of beams to a CASA-style BinTableHDU
    """

    c1 = Column(name='BMAJ',
                format='1E',
                array=[bm.major.to(u.arcsec).value for bm in beams],
                unit=u.arcsec.to_string('FITS'))
    c2 = Column(name='BMIN',
                format='1E',
                array=[bm.minor.to(u.arcsec).value for bm in beams],
                unit=u.arcsec.to_string('FITS'))
    c3 = Column(name='BPA',
                format='1E',
                array=[bm.pa.to(u.deg).value for bm in beams],
                unit=u.deg.to_string('FITS'))
    c4 = Column(
        name='CHAN',
        format='1J',
        array=[bm.meta['CHAN'] if 'CHAN' in bm.meta else 0 for bm in beams])
    c5 = Column(
        name='POL',
        format='1J',
        array=[bm.meta['POL'] if 'POL' in bm.meta else 0 for bm in beams])

    bmhdu = BinTableHDU.from_columns([c1, c2, c3, c4, c5])
    bmhdu.header['EXTNAME'] = 'BEAMS'
    bmhdu.header['EXTVER'] = 1
    bmhdu.header['XTENSION'] = 'BINTABLE'
    bmhdu.header['NCHAN'] = len(beams)
    bmhdu.header['NPOL'] = len(set([bm.meta['POL'] for bm in beams]))
    return bmhdu
Esempio n. 8
0
 def test_read_with_nonstandard_units(self):
     hdu = BinTableHDU(self.data)
     hdu.columns[0].unit = 'RADIANS'
     hdu.columns[1].unit = 'spam'
     hdu.columns[2].unit = 'millieggs'
     t = Table.read(hdu)
     assert equal_data(t, self.data)
Esempio n. 9
0
def create_psf_table_hdu(
    psf,
    true_energy_bins,
    source_offset_bins,
    fov_offset_bins,
    extname="PSF",
    **header_cards,
):
    """
    Create a fits binary table HDU in GADF format for the PSF table.
    See the specification at
    https://gamma-astro-data-formats.readthedocs.io/en/latest/irfs/full_enclosure/psf/psf_table/index.html

    Parameters
    ----------
    psf: astropy.units.Quantity[(solid angle)^-1]
        Point spread function array, must have shape
        (n_energy_bins, n_fov_offset_bins, n_source_offset_bins)
    true_energy_bins: astropy.units.Quantity[energy]
        Bin edges in true energy
    source_offset_bins: astropy.units.Quantity[angle]
        Bin edges in the source offset.
    fov_offset_bins: astropy.units.Quantity[angle]
        Bin edges in the field of view offset.
        For Point-Like IRFs, only giving a single bin is appropriate.
    extname: str
        Name for BinTableHDU
    **header_cards
        Additional metadata to add to the header, use this to set e.g. TELESCOP or
        INSTRUME.
    """

    psf = QTable({
        "ENERG_LO":
        u.Quantity(true_energy_bins[:-1], ndmin=2).to(u.TeV),
        "ENERG_HI":
        u.Quantity(true_energy_bins[1:], ndmin=2).to(u.TeV),
        "THETA_LO":
        u.Quantity(fov_offset_bins[:-1], ndmin=2).to(u.deg),
        "THETA_HI":
        u.Quantity(fov_offset_bins[1:], ndmin=2).to(u.deg),
        "RAD_LO":
        u.Quantity(source_offset_bins[:-1], ndmin=2).to(u.deg),
        "RAD_HI":
        u.Quantity(source_offset_bins[1:], ndmin=2).to(u.deg),
        # transpose as FITS uses opposite dimension order
        "RPSF":
        psf.T[np.newaxis, ...].to(1 / u.sr),
    })

    # required header keywords
    header = DEFAULT_HEADER.copy()
    header["HDUCLAS1"] = "RESPONSE"
    header["HDUCLAS2"] = "PSF"
    header["HDUCLAS3"] = "FULL-ENCLOSURE"
    header["HDUCLAS4"] = "PSF_TABLE"
    header["DATE"] = Time.now().utc.iso
    _add_header_cards(header, **header_cards)

    return BinTableHDU(psf, header=header, name=extname)
Esempio n. 10
0
def create_energy_dispersion_hdu(
    energy_dispersion,
    true_energy_bins,
    migration_bins,
    fov_offset_bins,
    point_like=True,
    extname="EDISP",
    **header_cards,
):
    """
    Create a fits binary table HDU in GADF format for the energy dispersion.
    See the specification at
    https://gamma-astro-data-formats.readthedocs.io/en/latest/irfs/full_enclosure/aeff/index.html

    Parameters
    ----------
    energy_dispersion: numpy.ndarray
        Energy dispersion array, must have shape
        (n_energy_bins, n_migra_bins, n_source_offset_bins)
    true_energy_bins: astropy.units.Quantity[energy]
        Bin edges in true energy
    migration_bins: numpy.ndarray
        Bin edges for the relative energy migration (``reco_energy / true_energy``)
    fov_offset_bins: astropy.units.Quantity[angle]
        Bin edges in the field of view offset.
        For Point-Like IRFs, only giving a single bin is appropriate.
    point_like: bool
        If the provided effective area was calculated after applying a direction cut,
        pass ``True``, else ``False`` for a full-enclosure effective area.
    extname: str
        Name for BinTableHDU
    **header_cards
        Additional metadata to add to the header, use this to set e.g. TELESCOP or
        INSTRUME.
    """

    psf = QTable(
        {
            "ENERG_LO": u.Quantity(true_energy_bins[:-1], ndmin=2).to(u.TeV),
            "ENERG_HI": u.Quantity(true_energy_bins[1:], ndmin=2).to(u.TeV),
            "MIGRA_LO": u.Quantity(migration_bins[:-1], ndmin=2).to(u.one),
            "MIGRA_HI": u.Quantity(migration_bins[1:], ndmin=2).to(u.one),
            "THETA_LO": u.Quantity(fov_offset_bins[:-1], ndmin=2).to(u.deg),
            "THETA_HI": u.Quantity(fov_offset_bins[1:], ndmin=2).to(u.deg),
            # transpose as FITS uses opposite dimension order
            "MATRIX": u.Quantity(energy_dispersion.T[np.newaxis, ...]).to(u.one),
        }
    )

    # required header keywords
    header = DEFAULT_HEADER.copy()
    header["HDUCLAS1"] = "RESPONSE"
    header["HDUCLAS2"] = "EDISP"
    header["HDUCLAS3"] = "POINT-LIKE" if point_like else "FULL-ENCLOSURE"
    header["HDUCLAS4"] = "EDISP_2D"
    header["DATE"] = Time.now().utc.iso
    _add_header_cards(header, **header_cards)

    return BinTableHDU(psf, header=header, name=extname)
Esempio n. 11
0
def fake_hdulist(extver=1, version=2, timesys="TDB", telescop="KEPLER"):
    new_header = fake_header(extver, version, timesys, telescop)
    return [
        HDUList(hdus=[
            PrimaryHDU(header=new_header),
            BinTableHDU(header=new_header, name="LIGHTCURVE")
        ])
    ]
Esempio n. 12
0
 def test_read_with_nonstandard_units(self):
     hdu = BinTableHDU(self.data)
     hdu.columns[0].unit = 'RADIANS'
     hdu.columns[1].unit = 'spam'
     hdu.columns[2].unit = 'millieggs'
     with pytest.warns(u.UnitsWarning, match="did not parse as fits unit"):
         t = Table.read(hdu)
     assert equal_data(t, self.data)
Esempio n. 13
0
 def test_read_with_unit_aliases(self, table_type):
     hdu = BinTableHDU(self.data)
     hdu.columns[0].unit = 'Angstroms'
     hdu.columns[2].unit = 'ergs/(cm.s.Angstroms)'
     with u.set_enabled_aliases(dict(Angstroms=u.AA, ergs=u.erg)):
         t = table_type.read(hdu)
     assert t['a'].unit == u.AA
     assert t['c'].unit == u.erg / (u.cm * u.s * u.AA)
Esempio n. 14
0
def test_exposure_extra_hdu(exposure, index):

    extra_hdu = BinTableHDU(Table(rows=[[1, 2, 3]], names=["a", "b", "c"]))
    exposure.add_hdu(extra_hdu, index=index)

    hdulist = exposure.to_hdu()
    assert len(hdulist) == 2

    hdu_index = 1 if index is None else index
    assert isinstance(hdulist[hdu_index], BinTableHDU)
Esempio n. 15
0
    def setup_class(self):
        self.data1 = np.array(list(
            zip([1, 2, 3, 4], ['a', 'b', 'c', 'd'], [2.3, 4.5, 6.7, 8.9])),
                              dtype=[('a', int), ('b', 'U1'), ('c', float)])
        self.data2 = np.array(list(
            zip([1.4, 2.3, 3.2, 4.7], [2.3, 4.5, 6.7, 8.9])),
                              dtype=[('p', float), ('q', float)])
        self.data3 = np.array(list(zip([1, 2, 3, 4], [2.3, 4.5, 6.7, 8.9])),
                              dtype=[('A', int), ('B', float)])
        hdu0 = PrimaryHDU()
        hdu1 = BinTableHDU(self.data1, name='first')
        hdu2 = BinTableHDU(self.data2, name='second')
        hdu3 = ImageHDU(np.ones((3, 3)), name='third')
        hdu4 = BinTableHDU(self.data3)

        self.hdus = HDUList([hdu0, hdu1, hdu2, hdu3, hdu4])
        self.hdusb = HDUList([hdu0, hdu3, hdu2, hdu1])
        self.hdus3 = HDUList([hdu0, hdu3, hdu2])
        self.hdus2 = HDUList([hdu0, hdu1, hdu3])
        self.hdus1 = HDUList([hdu0, hdu1])
Esempio n. 16
0
def trim_quickpsf(indir, outdir, filename):
    assert os.path.abspath(indir) != os.path.abspath(outdir)
    infile = os.path.join(indir, filename)
    outfile = os.path.join(outdir, filename)
    fx = fits.open(infile)
    hdus = HDUList()
    hdus.append(fx[0])
    for i in [1,2,3]:
        d = fx[i].data
        hdus.append(BinTableHDU(d[::10], header=fx[i].header))
    hdus.writeto(outfile, clobber=True)
    fx.close()
Esempio n. 17
0
def create_rad_max_hdu(
    rad_max,
    reco_energy_bins,
    fov_offset_bins,
    point_like=True,
    extname="RAD_MAX",
    **header_cards,
):
    """
    Create a fits binary table HDU in GADF format for the directional cut.
    See the specification at
    https://gamma-astro-data-formats.readthedocs.io/en/latest/irfs/full_enclosure/aeff/index.html

    Parameters
    ----------
    rad_max: astropy.units.Quantity[angle]
        Array of the directional (theta) cut.
        Must have shape (n_reco_energy_bins, n_fov_offset_bins)
    reco_energy_bins: astropy.units.Quantity[energy]
        Bin edges in reconstructed energy
    fov_offset_bins: astropy.units.Quantity[angle]
        Bin edges in the field of view offset.
        For Point-Like IRFs, only giving a single bin is appropriate.
    extname: str
        Name for BinTableHDU
    **header_cards
        Additional metadata to add to the header, use this to set e.g. TELESCOP or
        INSTRUME.
    """
    rad_max_table = QTable({
        "ENERG_LO":
        u.Quantity(reco_energy_bins[:-1], ndmin=2).to(u.TeV),
        "ENERG_HI":
        u.Quantity(reco_energy_bins[1:], ndmin=2).to(u.TeV),
        "THETA_LO":
        u.Quantity(fov_offset_bins[:-1], ndmin=2).to(u.deg),
        "THETA_HI":
        u.Quantity(fov_offset_bins[1:], ndmin=2).to(u.deg),
        # transpose as FITS uses opposite dimension order
        "RAD_MAX":
        rad_max.T[np.newaxis, ...].to(u.deg),
    })

    # required header keywords
    header = DEFAULT_HEADER.copy()
    header["HDUCLAS1"] = "RESPONSE"
    header["HDUCLAS2"] = "RAD_MAX"
    header["HDUCLAS3"] = "POINT-LIKE"
    header["HDUCLAS4"] = "RAD_MAX_2D"
    header["DATE"] = Time.now().utc.iso
    _add_header_cards(header, **header_cards)

    return BinTableHDU(rad_max_table, header=header, name=extname)
Esempio n. 18
0
def create_background_2d_hdu(
    background_2d,
    reco_energy_bins,
    fov_offset_bins,
    extname="BACKGROUND",
    **header_cards,
):
    """
    Create a fits binary table HDU in GADF format for the background 2d table.
    See the specification at
    https://gamma-astro-data-formats.readthedocs.io/en/latest/irfs/full_enclosure/bkg/index.html#bkg-2d

    Parameters
    ----------
    background_2d: astropy.units.Quantity[(MeV s sr)^-1]
        Background rate, must have shape
        (n_energy_bins, n_fov_offset_bins)
    reco_energy_bins: astropy.units.Quantity[energy]
        Bin edges in reconstructed energy
    fov_offset_bins: astropy.units.Quantity[angle]
        Bin edges in the field of view offset.
    extname: str
        Name for BinTableHDU
    **header_cards
        Additional metadata to add to the header, use this to set e.g. TELESCOP or
        INSTRUME.
    """

    bkg = QTable({
        "ENERG_LO":
        u.Quantity(reco_energy_bins[:-1], ndmin=2).to(u.TeV),
        "ENERG_HI":
        u.Quantity(reco_energy_bins[1:], ndmin=2).to(u.TeV),
        "THETA_LO":
        u.Quantity(fov_offset_bins[:-1], ndmin=2).to(u.deg),
        "THETA_HI":
        u.Quantity(fov_offset_bins[1:], ndmin=2).to(u.deg),
        # transpose as FITS uses opposite dimension order
        "BKG":
        background_2d.T[np.newaxis, ...].to(GADF_BACKGROUND_UNIT),
    })

    # required header keywords
    header = DEFAULT_HEADER.copy()
    header["HDUCLAS1"] = "RESPONSE"
    header["HDUCLAS2"] = "BKG"
    header["HDUCLAS3"] = "FULL-ENCLOSURE"
    header["HDUCLAS4"] = "BKG_2D"
    header["DATE"] = Time.now().utc.iso
    _add_header_cards(header, **header_cards)

    return BinTableHDU(bkg, header=header, name=extname)
Esempio n. 19
0
def append_fits_entry(base_rec, new_entry):
    '''
    Helper script to append an entry to a FITS_rec object.
    Based on astropy documentation here:
    https://docs.astropy.org/en/stable/io/fits/usage/table.html

    '''
    from astropy.io.fits import BinTableHDU
    
    old_rows = base_rec.shape[0]
    new_rows = old_rows + 1
    new_rec = hdu = BinTableHDU.from_columns(base_rec.columns, nrows=new_rows)
    new_rec.data[old_rows:] = new_entry
    return new_rec.data
Esempio n. 20
0
def create_aeff2d_hdu(
    effective_area,
    true_energy_bins,
    fov_offset_bins,
    extname="EFFECTIVE AREA",
    point_like=True,
    **header_cards,
):
    """
    Create a fits binary table HDU in GADF format for effective area.
    See the specification at
    https://gamma-astro-data-formats.readthedocs.io/en/latest/irfs/full_enclosure/aeff/index.html

    Parameters
    ----------
    effective_area: astropy.units.Quantity[area]
        Effective area array, must have shape (n_energy_bins, n_fov_offset_bins)
    true_energy_bins: astropy.units.Quantity[energy]
        Bin edges in true energy
    fov_offset_bins: astropy.units.Quantity[angle]
        Bin edges in the field of view offset.
        For Point-Like IRFs, only giving a single bin is appropriate.
    point_like: bool
        If the provided effective area was calculated after applying a direction cut,
        pass ``True``, else ``False`` for a full-enclosure effective area.
    extname: str
        Name for BinTableHDU
    **header_cards
        Additional metadata to add to the header, use this to set e.g. TELESCOP or
        INSTRUME.
    """
    aeff = QTable()
    aeff["ENERG_LO"] = u.Quantity(true_energy_bins[:-1], ndmin=2).to(u.TeV)
    aeff["ENERG_HI"] = u.Quantity(true_energy_bins[1:], ndmin=2).to(u.TeV)
    aeff["THETA_LO"] = u.Quantity(fov_offset_bins[:-1], ndmin=2).to(u.deg)
    aeff["THETA_HI"] = u.Quantity(fov_offset_bins[1:], ndmin=2).to(u.deg)
    # transpose because FITS uses opposite dimension order than numpy
    aeff["EFFAREA"] = effective_area.T[np.newaxis, ...].to(u.m**2)

    # required header keywords
    header = DEFAULT_HEADER.copy()
    header["HDUCLAS1"] = "RESPONSE"
    header["HDUCLAS2"] = "EFF_AREA"
    header["HDUCLAS3"] = "POINT-LIKE" if point_like else "FULL-ENCLOSURE"
    header["HDUCLAS4"] = "AEFF_2D"
    header["DATE"] = Time.now().utc.iso
    _add_header_cards(header, **header_cards)

    return BinTableHDU(aeff, header=header, name=extname)
Esempio n. 21
0
    def _writeImpl(self, fits):
        """Implementation for writing to FITS file

        Parameters
        ----------
        fits : `astropy.io.fits.HDUList`
            List of FITS HDUs. This has a Primary HDU already, the header of
            which may be supplemented with additional keywords.
        """
        from astropy.io.fits import BinTableHDU, Column
        fits.append(BinTableHDU.from_columns([
            Column("wavelength", "D", array=self.wavelength),
            Column("flux", "D", array=self.flux),
            Column("mask", "K", array=self.mask),
        ], header=astropyHeaderFromDict(self.flags.toFitsHeader()), name="FLUXTBL"))
        self.target.toFits(fits)
Esempio n. 22
0
    def _writeImpl(self, fits):
        """Implementation for writing to FITS file

        Parameters
        ----------
        fits : `astropy.io.fits.HDUList`
            List of FITS HDUs. This has a Primary HDU already, the header of
            which may be supplemented with additional keywords.
        """
        from astropy.io.fits import BinTableHDU, Column
        fits.append(BinTableHDU.from_columns([
            Column("wavelength", "D", array=self.wavelength),
            Column("flux", "D", array=self.flux),
            Column("mask", "K", array=self.mask),
        ], header=astropyHeaderFromDict(self.flags.toFitsHeader()), name="FLUXTBL"))
        self.target.toFits(fits)
Esempio n. 23
0
    def toFits(self, fits):
        """Write to a FITS file

        Parameters
        ----------
        fits : `astropy.io.fits.HDUList`
            Opened FITS file.
        """
        from astropy.io.fits import BinTableHDU, Column
        identityLength = max(len(str(ident)) for ident in self.identity)
        hdu = BinTableHDU.from_columns([
            Column("identity", "%dA" % identityLength, array=self.identity),
            Column("fiberId", "K", array=self.fiberId),
            Column("pfiNominal", "2D", array=self.pfiNominal),
            Column("pfiCenter", "2D", array=self.pfiCenter),
        ],
                                       name="OBSERVATIONS")
        fits.append(hdu)
Esempio n. 24
0
def beams_to_bintable(beams):
    """
    Convert a list of beams to a CASA-style BinTableHDU
    """

    c1 = Column(name='BMAJ', format='1E', array=[bm.major.to(u.arcsec).value for bm in beams], unit=u.arcsec.to_string('FITS'))
    c2 = Column(name='BMIN', format='1E', array=[bm.minor.to(u.arcsec).value for bm in beams], unit=u.arcsec.to_string('FITS'))
    c3 = Column(name='BPA', format='1E', array=[bm.pa.to(u.deg).value for bm in beams], unit=u.deg.to_string('FITS'))
    c4 = Column(name='CHAN', format='1J', array=[bm.meta['CHAN'] if 'CHAN' in bm.meta else 0 for bm in beams])
    c5 = Column(name='POL', format='1J', array=[bm.meta['POL'] if 'POL' in bm.meta else 0 for bm in beams])

    bmhdu = BinTableHDU.from_columns([c1, c2, c3, c4, c5])
    bmhdu.header['EXTNAME'] = 'BEAMS'
    bmhdu.header['EXTVER'] = 1
    bmhdu.header['XTENSION'] = 'BINTABLE'
    bmhdu.header['NCHAN'] = len(beams)
    bmhdu.header['NPOL'] = len(set([bm.meta['POL'] for bm in beams]))
    return bmhdu
Esempio n. 25
0
    def toFits(self, fits):
        """Write to a FITS file

        Parameters
        ----------
        fits : `astropy.io.fits.HDUList`
            Opened FITS file.
        """
        from astropy.io.fits import BinTableHDU, Column
        header = self.flags.toFitsHeader()
        hdu = BinTableHDU.from_columns([
            Column("wavelength", "E", array=self.wavelength),
            Column("flux", "E", array=self.flux),
            Column("error", "E", array=self.error),
            Column("mask", "K", array=self.mask),
        ],
                                       header=astropyHeaderFromDict(header),
                                       name=self._hduName)
        fits.append(hdu)
Esempio n. 26
0
    def toFits(self, fits):
        """Write to a FITS file

        Parameters
        ----------
        fits : `astropy.io.fits.HDUList`
            Opened FITS file.
        """
        armLength = max(len(arm) for arm in self.arm)
        columns = [
            Column("visit", "J", array=self.visit),
            Column("arm", f"{armLength}A", array=self.arm),
            Column("spectrograph", "J", array=self.spectrograph),
            Column("fiberId", "J", array=self.fiberId),
            Column("pfsDesignId", "K", array=self.pfsDesignId),
            Column("pfiNominal", "2E", array=self.pfiNominal),
            Column("pfiCenter", "2E", array=self.pfiCenter),
        ]
        hdu = BinTableHDU.from_columns(columns, name="OBSERVATIONS")
        fits.append(hdu)
Esempio n. 27
0
 def time_from_columns_bytes(self):
     x = np.repeat(b'a', 2_000_000)
     array = np.array(x, dtype=[('col', 'S1')])
     BinTableHDU.from_columns(array)
Esempio n. 28
0
def write_to_fits(output, chi2, sampler, nwalkers, thin, params, jfree,
                  metadata, meta_names, iternum, nwritten,
                  Nobsbins, array, BinTableHDU, Column, ctime, enumerate,
                  isfile, izip, transpose, xrange):
    nexclude = len(chi2)
    lnprior, lnPderived, chi2, lnlike = chi2
    if isfile(output):
        remove(output)
    chain = transpose(sampler.chain, axes=(2,1,0))
    columns = [Column(name=param, format='E', array=data[:iternum].flatten())
               for param, data in izip(params[jfree], chain)]
    columns.append(Column(name='lnprob', format='E',
                          array=sampler.lnprobability.T[:iternum].flatten()))
    if len(meta_names) > 0:
        # save only the last chunk (starting from t),
        # all others are already in metadata.
        # NOTE that this is only implemented for a model with the
        # same format as fiducial()
        for j, blob in izip(xrange(nwritten, iternum),
                            sampler.blobs[nwritten:]):
            data = [transpose([b[i] for b in blob])
                    for i in xrange(len(blob[0])-nexclude)]
            # re-arrange blobs
            if Nobsbins == 1:
                for i in xrange(len(data)):
                    if len(data[i].shape) == 2:
                        data[i] = array([b[i] for b in blob])
            else:
                for i in xrange(len(data)):
                    if len(data[i].shape) == 3:
                        data[i] = transpose([b[i] for b in blob],
                                            axes=(1,0,2))
            # store data
            for k in xrange(len(data)):
                for i in xrange(len(data[k])):
                    metadata[k][i][j*nwalkers:(j+1)*nwalkers] = data[k][i]
            lnPderived[j*nwalkers:(j+1)*nwalkers] = array([b[-4]
                                                           for b in blob])
            lnprior[j*nwalkers:(j+1)*nwalkers] = array([b[-3] for b in blob])
            chi2[j*nwalkers:(j+1)*nwalkers] = array([b[-2] for b in blob])
            lnlike[j*nwalkers:(j+1)*nwalkers] = array([b[-1] for b in blob])
        columns.append(Column(name='lnprior', format='E', array=lnprior))
        columns.append(Column(name='lnPderived', format='E',
                              array=lnPderived))
        columns.append(Column(name='chi2', format='E', array=chi2))
        columns.append(Column(name='lnlike', format='E', array=lnlike))
        # this handles exclude_bins properly
        for name, val in izip(meta_names, metadata):
            for name_i, val_i in izip(name, val):
                try:
                    fmt = '{0}E'.format(val_i.shape[1])
                except IndexError:
                    fmt = 'E'
                columns.append(Column(name=name_i, array=val_i, format=fmt))
        nwritten = iternum * nwalkers
    fitstbl = BinTableHDU.from_columns(columns)
    fitstbl.writeto(output)
    print 'Saved to {0} with {1} samples'.format(output, iternum*nwalkers),
    if thin > 1:
        print '(printing every {0}th sample)'.format(thin),
    print '- {0}'.format(ctime())
    return metadata, nwritten
Esempio n. 29
0
def write_table_fits(input, output, overwrite=False):
    """
    Write a Table object to a FITS file

    Parameters
    ----------
    input : Table
        The table to write out.
    output : str
        The filename to write the table to.
    overwrite : bool
        Whether to overwrite any existing file without warning.
    """

    # Check if output file already exists
    if isinstance(output, six.string_types) and os.path.exists(output):
        if overwrite:
            os.remove(output)
        else:
            raise IOError("File exists: {0}".format(output))

    # Create a new HDU object
    if input.masked:
        table_hdu = BinTableHDU(np.array(input.filled()))
        for col in table_hdu.columns:
            # The astype is necessary because if the string column is less
            # than one character, the fill value will be N/A by default which
            # is too long, and so no values will get masked.
            fill_value = input[col.name].get_fill_value()
            col.null = fill_value.astype(input[col.name].dtype)
    else:
        table_hdu = BinTableHDU(np.array(input))

    # Set units for output HDU
    for col in table_hdu.columns:
        if input[col.name].units is not None:
            col.unit = input[col.name].units.to_string(format='fits')

    for key, value in input.meta.items():

        if is_column_keyword(key.upper()) or key.upper() in REMOVE_KEYWORDS:

            log.warn("Meta-data keyword {0} will be ignored since it "
                     "conflicts with a FITS reserved keyword".format(key))

        if isinstance(value, list):
            for item in value:
                try:
                    table_hdu.header.append((key, item))
                except ValueError:
                    log.warn("Attribute `{0}` of type {1} cannot be written "
                             "to FITS files - skipping".format(
                                 key, type(value)))
        else:
            try:
                table_hdu.header[key] = value
            except ValueError:
                log.warn("Attribute `{0}` of type {1} cannot be written to "
                         "FITS files - skipping".format(key, type(value)))

    # Write out file
    table_hdu.writeto(output)
Esempio n. 30
0
def main():

    usage = "usage: %(prog)s [archive file]"
    description = "Build the extended archive from the master archive YAML file."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    parser.add_argument('--outname', default=None, required=True)

    parser.add_argument('--vernum', default=0, required=True)

    parser.add_argument('masterfile',
                        help='Extended archive master YAML file.')

    args = parser.parse_args()

    npar_max = 5
    sources = yaml.load(open(args.masterfile))
    cols = [
        Column(name='Source_Name', format='18A'),
        Column(name='RAJ2000', format='E', unit='deg', disp='F8.4'),
        Column(name='DEJ2000', format='E', unit='deg', disp='F8.4'),
        Column(name='GLON', format='E', unit='deg', disp='F8.4'),
        Column(name='GLAT', format='E', unit='deg', disp='F8.4'),
        Column(name='Photon_Flux', format='E', unit='ph cm-2 s-1',
               disp='E8.2'),
        Column(name='Energy_Flux',
               format='E',
               unit='erg cm-2 s-1',
               disp='E8.2'),
        Column(name='Model_Form', format='12A'),
        Column(name='Model_SemiMajor', format='E', unit='deg', disp='E7.3'),
        Column(name='Model_SemiMinor', format='E', unit='deg', disp='E7.3'),
        Column(name='Model_PosAng', format='E', unit='deg', disp='E6.1'),
        Column(name='Spatial_Function', format='15A'),
        Column(name='Spatial_Filename', format='50A'),
        Column(name='Spectral_Function', format='12A'),
        Column(name='Spectral_Filename', format='40A'),
        Column(name='Name_1FGL', format='18A'),
        Column(name='Name_2FGL', format='18A'),
        Column(name='Name_3FGL', format='18A'),
        Column(name='Spectral_Param_Name', format='45A9'),
        Column(name='Spectral_Param_Value',
               format='E',
               dim=str(npar_max),
               disp='E9.4'),
        Column(name='Spectral_Param_Error',
               format='E',
               dim=str(npar_max),
               disp='E9.4'),
        Column(name='Spectral_Param_Scale', format='E', dim=str(npar_max)),
    ]

    for c in cols:
        c.array = build_column_array(c.name, sources, npar_max)

    record = FITS_rec.from_columns(cols)
    record.sort(order="RAJ2000")

    outdir = args.outname + "_v" + args.vernum
    mkdir(outdir)

    fitsname = "LAT_extended_sources_v" + args.vernum + ".fits"
    output = BinTableHDU(record)
    output.writeto(os.path.join(outdir, fitsname), overwrite=True)

    xmldir = os.path.join(outdir, 'XML')
    mkdir(xmldir)

    for k, v in sources.items():
        xmlpath = os.path.join(xmldir,
                               v['Source_Name'].replace(' ', '') + '.xml')
        to_xml(xmlpath, v['Source_Name'], v)
Esempio n. 31
0
def write_table_fits(input, output, overwrite=False):
    """
    Write a Table object to a FITS file

    Parameters
    ----------
    input : Table
        The table to write out.
    output : str
        The filename to write the table to.
    overwrite : bool
        Whether to overwrite any existing file without warning.
    """

    # Check if output file already exists
    if isinstance(output, basestring) and os.path.exists(output):
        if overwrite:
            os.remove(output)
        else:
            raise IOError("File exists: {0}".format(output))

    # Create a new HDU object
    if input.masked:
        table_hdu = BinTableHDU(np.array(input.filled()))
        for col in table_hdu.columns:
            # The astype is necessary because if the string column is less
            # than one character, the fill value will be N/A by default which
            # is too long, and so no values will get masked.
            fill_value = input[col.name].get_fill_value()
            col.null = fill_value.astype(input[col.name].dtype)
    else:
        table_hdu = BinTableHDU(np.array(input))

    # Set units for output HDU
    for col in table_hdu.columns:
        if input[col.name].units is not None:
            col.unit = input[col.name].units.to_string(format='fits')

    for key, value in input.meta.items():

        if is_column_keyword(key.upper()) or key.upper() in REMOVE_KEYWORDS:

            log.warn("Meta-data keyword {0} will be ignored since it "
                     "conflicts with a FITS reserved keyword".format(key))

        if isinstance(value, list):
            for item in value:
                try:
                    table_hdu.header.append((key, item))
                except ValueError:
                    log.warn("Attribute `{0}` of type {1} cannot be written "
                             "to FITS files - skipping".format(key,
                                                               type(value)))
        else:
            try:
                table_hdu.header[key] = value
            except ValueError:
                log.warn("Attribute `{0}` of type {1} cannot be written to "
                         "FITS files - skipping".format(key, type(value)))

    # Write out file
    table_hdu.writeto(output)