Example #1
0
    def write(self, outfile):
        """Write the livetime cube to a FITS file."""

        hdu_pri = fits.PrimaryHDU()

        hdu_exp = self._create_exp_hdu(self.data)
        hdu_exp.name = 'EXPOSURE'
        hdu_exp_wt = self._create_exp_hdu(self._data_wt)
        hdu_exp_wt.name = 'WEIGHTED_EXPOSURE'

        cols = [Column(name='CTHETA_MIN', dtype='f4',
                       data=self.costh_edges[:-1][::-1]),
                Column(name='CTHETA_MAX',  dtype='f4',
                       data=self.costh_edges[1:][::-1]), ]
        hdu_bnds = fits.table_to_hdu(Table(cols))
        hdu_bnds.name = 'CTHETABOUNDS'

        hdu_gti = fits.table_to_hdu(self._tab_gti)
        hdu_gti.name = 'GTI'

        hdus = [hdu_pri, hdu_exp, hdu_exp_wt,
                hdu_bnds, hdu_gti]

        for hdu in hdus:
            hdu.header['TSTART'] = self.tstart
            hdu.header['TSTOP'] = self.tstop

        with fits.HDUList(hdus) as hdulist:
            hdulist.writeto(outfile, clobber=True)
Example #2
0
    def writefits(self, filename, z, lmu):
        """
        Write optical depth to a fits file using 
        the astropy table environment. 

        Parameters
        ----------
        filename: str,
             full file path for output fits file

        z: `~numpy.ndarray` or list
            source redshift, m-dimensional

        lmu: `~numpy.ndarray` or list
            wavelenghts in micrometer, n-dimensional
        """
        t = Table([z, self.ebl_array(z, lmu)], names=('REDSHIFT', 'EBL_DENS'))
        t2 = Table()
        t2['WAVELENGTH'] = Column(lmu, unit='micrometer')

        hdulist = fits.HDUList(
            [fits.PrimaryHDU(),
             fits.table_to_hdu(t),
             fits.table_to_hdu(t2)])

        hdulist[1].name = 'NUINU_VS_Z'
        hdulist[2].name = 'WAVELENGTHS'

        hdulist.writeto(filename, overwrite=True)
        return
Example #3
0
def make_hdulist_fits_file(width=3):
    pri_hdu = fits.PrimaryHDU()
    meta = {"ECAT": 1, "EDATA": 2}
    pri_hdu.header.update(meta)

    # names = ["description", "extension_id", "aperture_id", "image_plane_id"]
    # data = [["IJ", "HK"], [2, 3], [0, 0], [0, 0]]
    # cat_tbl = Table(names=names, data=data)
    # cat_hdu = fits.table_to_hdu(cat_tbl)
    # cat_hdu.header["EXTNAME"] = "CAT_TRAC"

    # ij_tbl = make_trace_table(0.75, 1.5, width)
    # ij_hdu = fits.table_to_hdu(ij_tbl)
    # ij_hdu.header["EXTNAME"] = "IJ_TRACE"
    #
    # hk_tbl = make_trace_table(1.40, 2.5, width)
    # hk_hdu = fits.table_to_hdu(hk_tbl)
    # hk_hdu.header["EXTNAME"] = "HK_TRACE"
    #
    # hdu_list = fits.HDUList([pri_hdu, cat_hdu, ij_hdu, hk_hdu])

    names = ["description", "extension_id", "aperture_id", "image_plane_id"]
    data = [["IJHK"], [2], [0], [0]]
    cat_tbl = Table(names=names, data=data)
    cat_hdu = fits.table_to_hdu(cat_tbl)
    cat_hdu.header["EXTNAME"] = "CAT_TRAC"

    ijhk_tbl = make_trace_table(0.75, 2.5, width)
    ijhk_hdu = fits.table_to_hdu(ijhk_tbl)
    ijhk_hdu.header["EXTNAME"] = "IJHK"

    hdu_list = fits.HDUList([pri_hdu, cat_hdu, ijhk_hdu])

    return hdu_list
Example #4
0
    def write_catalog(self, f: Any, *args: Any, **kwargs: Any) -> None:
        """Write catalog to file object."""
        if self.catalog is None:
            return

        # create HDU and write it
        table_to_hdu(self.catalog).writeto(f, *args, **kwargs)
Example #5
0
    def save(self):
        """
        Saves sensitivity to self.sensfile
        """

        # Write to outfile
        msgs.info('Writing sensitivity function results to file: {:}'.format(
            self.sensfile))

        # Standard init
        hdr = initialize_header()

        hdr['PYP_SPEC'] = (self.spectrograph.spectrograph,
                           'PypeIt: Spectrograph name')
        hdr['PYPELINE'] = self.spectrograph.pypeline
        #   - List the completed steps
        hdr['STEPS'] = (','.join(self.steps), 'Completed sensfunc steps')
        #   - Provide the file names
        hdr['SPC1DFIL'] = self.spec1dfile

        # Write the fits file
        data = [self.wave_sens, self.sensfunc]
        extnames = ['WAVE', 'SENSFUNC']
        # Write the fits file
        hdulist = fits.HDUList(
            [fits.PrimaryHDU(header=hdr)] +
            [fits.ImageHDU(data=d, name=n) for d, n in zip(data, extnames)])
        hdu_meta = fits.table_to_hdu(self.meta_table)
        hdu_meta.name = 'METADATA'
        hdu_out = fits.table_to_hdu(self.out_table)
        hdu_out.name = 'OUT_TABLE'
        hdulist.append(hdu_meta)
        hdulist.append(hdu_out)
        hdulist.writeto(self.sensfile, overwrite=True, checksum=True)
Example #6
0
    def writefits(self, filename, z, ETeV):
        """
	Write optical depth to a fits file using 
	the astropy table environment. 

	Parameters
	----------
	filename: str,
	     full file path for output fits file
	z: `~numpy.ndarray` or list
	    source redshift, m-dimensional

	ETeV: `~numpy.ndarray` or list
	    Energies in TeV, n-dimensional
	"""
        t = Table([z, self.opt_depth(z, ETeV)],
                  names=('REDSHIFT', 'OPT_DEPTH'))
        t2 = Table()
        t2['ENERGY'] = Column(ETeV * 1e3, unit='GeV')

        hdulist = fits.HDUList(
            [fits.PrimaryHDU(),
             fits.table_to_hdu(t),
             fits.table_to_hdu(t2)])

        hdulist[1].name = 'TAU_VS_Z'
        hdulist[2].name = 'ENERGIES'

        hdulist.writeto(filename, overwrite=True)
        return
Example #7
0
    def write(self, outfile):
        """Write the livetime cube to a FITS file."""

        hdu_pri = fits.PrimaryHDU()

        hdu_exp = self._create_exp_hdu(self.data)
        hdu_exp.name = 'EXPOSURE'
        hdu_exp_wt = self._create_exp_hdu(self._data_wt)
        hdu_exp_wt.name = 'WEIGHTED_EXPOSURE'

        cols = [Column(name='CTHETA_MIN', dtype='f4',
                       data=self.costh_edges[:-1][::-1]),
                Column(name='CTHETA_MAX',  dtype='f4',
                       data=self.costh_edges[1:][::-1]), ]
        hdu_bnds = fits.table_to_hdu(Table(cols))
        hdu_bnds.name = 'CTHETABOUNDS'

        hdu_gti = fits.table_to_hdu(self._tab_gti)
        hdu_gti.name = 'GTI'

        hdus = [hdu_pri, hdu_exp, hdu_exp_wt,
                hdu_bnds, hdu_gti]

        for hdu in hdus:
            hdu.header['TSTART'] = self.tstart
            hdu.header['TSTOP'] = self.tstop

        with fits.HDUList(hdus) as hdulist:
            hdulist.writeto(outfile, clobber=True)
Example #8
0
    def test_table_non_stringifyable_unit_to_hdu(self):
        table = Table([[1, 2, 3], ['a', 'b', 'c'], [2.3, 4.5, 6.7]],
                      names=['a', 'b', 'c'], dtype=['i', 'U1', 'f'])
        table['a'].unit = u.core.IrreducibleUnit("test")

        with pytest.warns(AstropyUserWarning, match="The unit 'test' could not be saved") as w:
            fits.table_to_hdu(table)
        assert len(w) == 1
Example #9
0
    def write(self, outfile, compress=False):

        cols = [
            Column(name='E_MIN',
                   dtype='f8',
                   data=self._energy_bins[None, :-1],
                   unit='MeV'),
            Column(name='E_MAX',
                   dtype='f8',
                   data=self._energy_bins[None, 1:],
                   unit='MeV'),
            Column(name='COSTHETA_MIN',
                   dtype='f8',
                   data=self._ctheta_bins[None, :-1]),
            Column(name='COSTHETA_MAX',
                   dtype='f8',
                   data=self._ctheta_bins[None, 1:]),
            Column(name='SEP_MIN',
                   dtype='f8',
                   data=self._sep_bins[None, :, :-1],
                   unit='deg'),
            Column(name='SEP_MAX',
                   dtype='f8',
                   data=self._sep_bins[None, :, 1:],
                   unit='deg')
        ]

        tab0 = Table(cols)

        cols = []
        for k, v in sorted(self.hists.items()):
            cols += [Column(name=k, dtype='f8', data=v)]

        tab1 = Table(cols)

        cols = []
        for k, v in sorted(self._hists_eff.items()):
            cols += [Column(name=k, dtype='f8', data=v)]

        tab2 = Table(cols)

        hdulist = fits.HDUList([
            fits.PrimaryHDU(),
            fits.table_to_hdu(tab1),
            fits.table_to_hdu(tab2),
            fits.table_to_hdu(tab0)
        ])

        hdulist[1].name = 'DATA'
        hdulist[2].name = 'EFFICIENCY'
        hdulist[3].name = 'AXES'
        hdulist[0].header['DATATYPE'] = self._type

        if compress:
            fp = gzip.GzipFile(outfile + '.gz', 'wb')
            hdulist.writeto(fp, clobber=True)
        else:
            hdulist.writeto(outfile, clobber=True)
Example #10
0
    def test_table_non_stringifyable_unit_to_hdu(self):
        table = Table([[1, 2, 3], ['a', 'b', 'c'], [2.3, 4.5, 6.7]],
                      names=['a', 'b', 'c'],
                      dtype=['i', 'U1', 'f'])
        table['a'].unit = u.core.IrreducibleUnit("test")

        with catch_warnings() as w:
            fits.table_to_hdu(table)
            assert len(w) == 1
            assert str(
                w[0].message).startswith("The unit 'test' could not be saved")
Example #11
0
def make_trace_hdulist(trace_ids=None, traces=None):
    """
    Makes a Trace lists FITS file

    Default Traces which can be included are:
    - 0: A basic trace straight down the middle of a 4096x4096 chip
    - 1: A bi-directionally sheared trace in the bottom left of the chip
    - 2: A rotated trace in the top left of the chip
    - 3: A bent trace in the top right of the chip
    - 4: A short straight horizontal trace in the centre right of the chip
    - 5: A sheared trace extending off the chip to the bottom right

    Parameters
    ----------
    trace_ids : list, optional
        Default is None. List of indexes of Traces that should be included in
        the FITS file
    traces : list of Tables
        Default is None. If not None, a list of Trace tables.

    Returns
    -------
    tbl : Table

    """
    if traces is None:
        traces = [trace_0(), trace_1(), trace_2(),
                  trace_3(), trace_4(), trace_5()]
    if trace_ids is None:
        trace_ids = np.arange(len(traces))
    traces = [traces[i] for i in trace_ids]

    pri_hdu = fits.PrimaryHDU()
    pri_hdu.header["ECAT"] = 1
    pri_hdu.header["EDATA"] = 2

    id_hdu = fits.table_to_hdu(id_table(trace_ids))

    tr_hdus = [fits.table_to_hdu(trace) for trace in traces]
    for hdu in tr_hdus:
        hdu.header["WAVECOLN"] = "wavelength"
        hdu.header["SLITPOSN"] = "s"

    hdulist = fits.HDUList([pri_hdu, id_hdu] + tr_hdus)

    if PLOTS:
        plt.figure(figsize=(15, 7))
        plt.subplot(121)
        plot_traces(traces)
        plt.subplot(122)
        plot_traces_mm(traces)
        plt.show()

    return hdulist
Example #12
0
def write_templates(filename, flux, wave, target, truth, objtruth):
    """Write galaxy templates to a FITS file.

    Parameters
    ----------
    filename : str
        Path to output file.
    flux : ndarray
        Array of flux data for template spectra.
    wave : ndarray
        Array of wavelengths.
    target : Table
        Target information.
    truth : Table
        Template simulation truth.
    objtruth : Table
        Object-specific truth data.
    """
    import astropy.units as u
    from astropy.io import fits

    hx = fits.HDUList()

    # Write the wavelength table.
    hdu_wave = fits.PrimaryHDU(wave)
    hdu_wave.header['EXTNAME'] = 'WAVE'
    hdu_wave.header['BUNIT'] = 'Angstrom'
    hdu_wave.header['AIRORVAC'] = ('vac', 'Vacuum wavelengths')
    hx.append(hdu_wave)

    # Write the flux table.
    fluxunits = 1e-17 * u.erg / (u.s * u.cm**2 * u.Angstrom)
    hdu_flux = fits.ImageHDU(flux)
    hdu_flux.header['EXTNAME'] = 'FLUX'
    hdu_flux.header['BUNIT'] = str(fluxunits)
    hx.append(hdu_flux)

    # Write targets table.
    hdu_targets = fits.table_to_hdu(target)
    hdu_targets.header['EXTNAME'] = 'TARGETS'
    hx.append(hdu_targets)

    # Write truth table.
    hdu_truth = fits.table_to_hdu(truth)
    hdu_truth.header['EXTNAME'] = 'TRUTH'
    hx.append(hdu_truth)

    # Write objtruth table.
    hdu_objtruth = fits.table_to_hdu(objtruth)
    hdu_objtruth.header['EXTNAME'] = 'OBJTRUTH'
    hx.append(hdu_objtruth)

    print('Writing {}'.format(filename))
    hx.writeto(filename, overwrite=True)
Example #13
0
def run_wide_query(outfil, R_MAX=15.*u.Mpc):
    # 15 cMpc, no vetting
    qso_fg, qso_bg = qpq_query.run_query(Z_MIN=1.715, Z_MAX=6.0,
                                         R_MIN=0.0*u.Mpc, R_MAX=R_MAX,  # co-moving
                                         VEL_MIN=2000.0*u.km/u.s)
    # Write
    prihdu = fits.PrimaryHDU()
    fhdu = fits.table_to_hdu(qso_fg)
    bhdu = fits.table_to_hdu(qso_bg)
    thdulist = fits.HDUList([prihdu,fhdu,bhdu])
    thdulist.writeto(outfil, overwrite=True)
Example #14
0
 def _write_source_list(self, 
         p_sourcelist,   # Name of utput FIT table with source info
         p_fitsimg,      # Name of input FITS image that was processed
         p_max_sources,  # Max number of sources for output table
         hdr,            # FITS header of input image
         kw_dict,        # Dictionary of keyword values to add to sourcelist header
         src_table,      # Astropy table of source photometry
         psf_table):     # None or Astropy table of source PSF fitting.   
     """Write detected source information to a FITS table with info on the
        original data file
        
     This function also:
      - Prints a summary of values useful for astrometry.net at INFO level.
     """
     
     # Create an X and Y table for use with astrometry.net,
     # adding 1 to get FITS like pixel indices
     self._logger.debug('Converting python 0-based coordinates to FITS 1-based pixel coordinates for XY table.')
     x = src_table['xcenter'] + 1.0 * u.Unit('pix')
     y = src_table['ycenter'] + 1.0 * u.Unit('pix')
     xy_table = Table([x, y],
         names=('X', 'Y'))
 
     # Currently just write trimmed FITS-format XY pos and merged position/photomtry
     self._logger.info('Writing source list to FITS binary table {}'.format(p_sourcelist))
 
     pri_hdr = self._create_primary_header(kw_dict)
     pri_hdu = fits.PrimaryHDU(header=pri_hdr)
     
     tbl_hdu1 = fits.table_to_hdu(xy_table)
     tbl_hdu1.header['EXTNAME'] = 'AP_XYPOS'
     tbl_hdu1.header['COMMENT'] = 'Uses FITS 1-based pixel coordinate system.'
     
     tbl_hdu2 = fits.table_to_hdu(src_table)
     tbl_hdu2.header['EXTNAME'] = 'AP_L1MAG'
     tbl_hdu2.header['COMMENT'] = 'Aperature photometry using photutils within ApFindStars.'
     tbl_hdu2.header['COMMENT'] = 'Uses python 0-based pixel coordinate system.'
 
     # Always add these to the output file as we know they're present
     hdus_to_append = [pri_hdu, tbl_hdu1, tbl_hdu2]
     
     if psf_table is not None:
         tbl_hdu3 = fits.table_to_hdu(psf_table)
         tbl_hdu3.header['EXTNAME'] = 'AP_L1PSF'
         tbl_hdu3.header['COMMENT'] = 'PSF characterization using ApMeasureStars.'
         tbl_hdu3.header['COMMENT'] = 'Uses python 0-based pixel coordinate system.'
         hdus_to_append.append(tbl_hdu3)
 
     hdu_list = fits.HDUList(hdus_to_append)
     hdu_list.writeto(p_sourcelist, overwrite=True)
 
     return
Example #15
0
File: ogip.py Project: hamogu/ARCUS
    def write(self, rmffile, overwrite=False, TLMIN=None):
        '''
        Paramters
        ---------
        TLMIN : int or `None`
            If `None` then the TLMIN# keywords shoud already be set, if not,
            if is a convenient way to set them on writing.
       '''
        _check_col_and_type(self.matrix, self.matrix_required_cols)
        _check_col_and_type(self.ebounds, self.ebounds_required_cols)
        matrix_tlmin = 'TLMIN{}'.format(
            self.matrix.colnames.index('F_CHAN') + 1)
        eb_tlmin = 'TLMIN{}'.format(self.ebounds.colnames.index('CHANNEL') + 1)
        if TLMIN is not None:
            self.matrix.meta[matrix_tlmin] = TLMIN
            self.ebounds.meta[eb_tlmin] = TLMIN
        _check_mandatory_keywords(self.matrix,
                                  ['CHANTYPE', 'DETCHANS', matrix_tlmin])
        _check_mandatory_keywords(self.ebounds,
                                  ['CHANTYPE', 'DETCHANS', eb_tlmin])
        self.matrix.meta['EXTNAME'] = 'MATRIX'
        self.matrix.meta['HDUCLASS'] = 'OGIP'
        self.matrix.meta['HDUVERS'] = '1.1.0'
        self.matrix.meta['HDUCLAS1'] = 'RESPONSE'
        self.matrix.meta['HDUCLAS2'] = 'RSP_MATRIX'
        # futher required keywords: CHANTYPE (PI or PHA), DETCHANS, TLMIN# (# is column number of F_CHAN)
        # Recommended keywords: NUMGRP, NUMELT

        # EBOUNDS
        self.ebounds.meta['EXTNAME'] = 'EBOUNDS'
        self.ebounds.meta['HDUCLASS'] = 'OGIP'
        self.ebounds.meta['HDUCLAS1'] = 'RESPONSE'
        self.ebounds.meta['HDUCLAS2'] = 'EBOUNDS'
        self.ebounds.meta['HDUVERS'] = '1.2.0'

        # Here I'm assuming that if MATRIX is an object (a list) then
        # F_CHAN and N_CHAN are also. That's the case when this class
        # is used ot make them, but might not be true in general if
        # read in from a file.
        if self.matrix['MATRIX'].dtype == np.object:
            self.variable_length_to_fixed_length()

        hdulist = fits.HDUList([
            fits.PrimaryHDU(),
            fits.table_to_hdu(self.matrix),
            fits.table_to_hdu(self.ebounds)
        ])

        hdulist.writeto(rmffile, overwrite=overwrite, checksum=True)
Example #16
0
def create_weather(outfile):
    from astropy.table import Table
    import astropy.io.fits as af

    nstep = 101
    TQI = [np.linspace(0, 0.5, nstep) for x in range(24)]
    TQL = [np.linspace(0, 0.1, nstep) for x in range(24)]
    TQV = [np.linspace(1, 12, nstep) for x in range(24)]
    QV10M = [np.linspace(0.001, 0.007, nstep) for x in range(24)]
    PS = [np.linspace(58600, 59000, nstep) for x in range(24)]
    TS = [np.linspace(270, 280, nstep) for x in range(24)]
    T10M = [np.linspace(270, 280, nstep) for x in range(24)]
    U10M = [np.linspace(-2, 8, nstep) for x in range(24)]
    V10M = [np.linspace(-1, 3, nstep) for x in range(24)]

    hdus = af.HDUList([af.PrimaryHDU()])

    for mon in range(12):
        tab = Table(
            [TQI, TQL, TQV, QV10M, PS, TS, T10M, U10M, V10M],
            names=("TQI", "TQL", "TQV", "QV10M", "PS", "TS", "T10M", "U10M", "V10M"),
            meta={
                "PROBSTRT": 0.0,
                "PROBSTOP": 1.0,
                "PROBSTEP": 0.01,
                "NSTEP": nstep,
                "MONTH": mon,
            },
        )
        hdus.append(af.table_to_hdu(tab))

    hdus.writeto(outfile, overwrite=True)

    return
Example #17
0
    def save(self, filename, comments=None):
        """save the instance in a fits file

        Parameters
        ----------
        filename : string
            file path and name to store the data
        comments : string, optional
            comments for the header of primary HDU
        """
        # primary hdu
        header = fits.Header()
        for i, edges in enumerate(self._edges_list):
            header["SHAPE_%02d" % i] = len(edges) - 1
        if comments is not None:
            header["COMMENTS"] = comments
        primary_hdu = fits.PrimaryHDU(header=header)
        # data
        data_hdu = fits.ImageHDU(data=self._data)
        # edges
        hdu_list = [primary_hdu, data_hdu]
        for i, edges in enumerate(self._edges_list):
            edges_df = pd.DataFrame({"edges_%02d" % i: edges})
            edges_hdu = fits.table_to_hdu(Table.from_pandas(edges_df))
            hdu_list.append(edges_hdu)
        # write to file
        hdul = fits.HDUList(hdu_list)
        hdul.writeto(filename)
Example #18
0
def make_instrument_extension(sed_table, instlist, version):
    """
    Make the instrument extension for a MEGA-MUSCLES file, containing translations of the instrument codes and the list of component file names
    
    """
    telescopes = [
        instruments.getinststr(inst)[0:3].upper() for inst in instlist
    ]
    instrus = [instruments.getinststr(inst)[4:7].upper() for inst in instlist]
    gratings = [
        instruments.getinststr(inst)[8:].replace('-----', 'na').upper()
        for inst in instlist
    ]
    target = sed_table.meta['TARGNAME']
    filenames = [
        'hlsp_muscles_{}_{}_{}_{}_v{}_component-spec.fits'.format(
            tel, inst, target, grating, version).lower()
        for tel, inst, grating in zip(telescopes, instrus, gratings)
    ]
    data = Table(
        [instlist, telescopes, instrus, gratings, filenames],
        names=['BITVALUE', 'TELESCOPE', 'INSTRUMENT', 'GRATING', 'HLSP_FILE'])
    hdu = fits.table_to_hdu(data)

    comment = 'This extension is a legend for the integer identifiers in the instrument column of the '\
              'spectrum extension. Instruments are identified by bitwise flags so that any combination of '\
              'instruments contributing to the data within a spectral element can be identified together. '\
              'For example, if instruments 4 and 16 (100 and 10000 in binary) both contribute to the data '\
              'in a bin, then that bin will have the value 20, or 10100 in binary, to signify that both '\
              'instruments 4 and 16 have contributed. This is identical to the handling of bitwise data '\
              'quality flags. Note that polynomial fits for filling spectral gaps were not saved as separate spectra.'\

    hdu.header.append(('COMMENT', comment))
    return hdu
Example #19
0
    def test_write_append(self, tmpdir):

        t = Table(self.data)
        hdu = table_to_hdu(t)

        def check_equal(filename, expected, start_from=1):
            with fits.open(filename) as hdu_list:
                assert len(hdu_list) == expected
                for hdu_table in hdu_list[start_from:]:
                    assert hdu_table.header == hdu.header
                    assert np.all(hdu_table.data == hdu.data)

        filename = str(tmpdir.join('test_write_append.fits'))
        t.write(filename, append=True)
        t.write(filename, append=True)
        check_equal(filename, 3)

        # Check the overwrite works correctly.
        t.write(filename, append=True, overwrite=True)
        t.write(filename, append=True)
        check_equal(filename, 3)

        # Normal write, check it's not appending.
        t.write(filename, overwrite=True)
        t.write(filename, overwrite=True)
        check_equal(filename, 2)

        # Now write followed by append, with different shaped tables.
        t2 = Table(np.array([1, 2]))
        t2.write(filename, overwrite=True)
        t.write(filename, append=True)
        check_equal(filename, 3, start_from=2)
        assert equal_data(t2, Table.read(filename, hdu=1))
Example #20
0
    def _make_extension_fits(self, ext, filename, **kwargs):

        maps = {
            'EXT_TOT_MAP': ext['ext_tot_map'],
            'EXT_SRC_MAP': ext['ext_src_map'],
            'EXT_BKG_MAP': ext['ext_bkg_map'],
            'PTSRC_TOT_MAP': ext['ptsrc_tot_map'],
            'PTSRC_SRC_MAP': ext['ptsrc_src_map'],
            'PTSRC_BKG_MAP': ext['ptsrc_bkg_map']
        }

        hdu_images = []
        for k, v in sorted(maps.items()):
            if v is None:
                continue
            hdu_images += [v.make_hdu(k)]

        tab = fits_utils.dict_to_table(ext)
        hdu_data = fits.table_to_hdu(tab)
        hdu_data.name = 'EXT_DATA'

        if ext.get('tsmap'):
            hdus = [ext['tsmap'].make_hdu(hdu='PRIMARY')]
        else:
            hdus = [fits.PrimaryHDU()]

        hdus += [hdu_data] + hdu_images
        hdus[0].header['CONFIG'] = json.dumps(utils.tolist(ext['config']))
        hdus[1].header['CONFIG'] = json.dumps(utils.tolist(ext['config']))
        fits_utils.write_hdus(hdus,
                              filename,
                              keywords={'SRCNAME': ext['name']})
Example #21
0
    def _make_lc_fits(self, lc, filename, **kwargs):

        # produce columns in fits file
        cols = OrderedDict()
        cols['tmin'] = Column(name='tmin', dtype='f8',
                              data=lc['tmin'], unit='s')
        cols['tmax'] = Column(name='tmax', dtype='f8',
                              data=lc['tmax'], unit='s')
        cols['tmin_mjd'] = Column(name='tmin_mjd', dtype='f8',
                                  data=lc['tmin_mjd'], unit='day')
        cols['tmax_mjd'] = Column(name='tmax_mjd', dtype='f8',
                                  data=lc['tmax_mjd'], unit='day')

        # add in columns for model parameters
        for k, v in lc.items():
            if k in cols:
                continue
            if isinstance(v, np.ndarray):
                cols[k] = Column(name=k, data=v, dtype=v.dtype)

        tab = Table(cols.values())
        hdu_lc = fits.table_to_hdu(tab)
        hdu_lc.name = 'LIGHTCURVE'
        hdus = [fits.PrimaryHDU(), hdu_lc]
        keywords = {'SRCNAME': lc['name'],
                    'CONFIG': json.dumps(lc['config'])}
        fits_utils.write_hdus(hdus, filename, keywords=keywords)
Example #22
0
    def _make_lc_fits(self, lc, filename, **kwargs):

        # produce columns in fits file
        cols = OrderedDict()
        cols['tmin'] = Column(name='tmin', dtype='f8',
                              data=lc['tmin'], unit='s')
        cols['tmax'] = Column(name='tmax', dtype='f8',
                              data=lc['tmax'], unit='s')
        cols['tmin_mjd'] = Column(name='tmin_mjd', dtype='f8',
                                  data=lc['tmin_mjd'], unit='day')
        cols['tmax_mjd'] = Column(name='tmax_mjd', dtype='f8',
                                  data=lc['tmax_mjd'], unit='day')

        # add in columns for model parameters
        for k, v in lc.items():
            if k in cols:
                continue
            if isinstance(v, np.ndarray):
                cols[k] = Column(name=k, data=v, dtype=v.dtype)

        tab = Table(cols.values())
        hdu_lc = fits.table_to_hdu(tab)
        hdu_lc.name = 'LIGHTCURVE'
        hdus = [fits.PrimaryHDU(), hdu_lc]
        keywords = {'SRCNAME': lc['name'],
                    'CONFIG': json.dumps(lc['config'])}
        fits_utils.write_hdus(hdus, filename, keywords=keywords)
Example #23
0
def save_to_fits(posterior, outdir, label):
    """ Generate a fits file from a posterior array """
    from astropy.io import fits
    from astropy.units import pixel
    from astropy.table import Table
    import healpy as hp
    nside = hp.get_nside(posterior)
    npix = hp.nside2npix(nside)
    logger.debug('Generating table')
    m = Table([posterior], names=['PROB'])
    m['PROB'].unit = pixel**-1

    ordering = 'RING'
    extra_header = [
        ('PIXTYPE', 'HEALPIX', 'HEALPIX pixelisation'),
        ('ORDERING', ordering,
         'Pixel ordering scheme: RING, NESTED, or NUNIQ'),
        ('COORDSYS', 'C', 'Ecliptic, Galactic or Celestial (equatorial)'),
        ('NSIDE', hp.npix2nside(npix), 'Resolution parameter of HEALPIX'),
        ('INDXSCHM', 'IMPLICIT', 'Indexing: IMPLICIT or EXPLICIT')
    ]

    fname = '{}/{}_{}.fits'.format(outdir, label, nside)
    hdu = fits.table_to_hdu(m)
    hdu.header.extend(extra_header)
    hdulist = fits.HDUList([fits.PrimaryHDU(), hdu])
    logger.debug('Writing to a fits file')
    hdulist.writeto(fname, overwrite=True)
Example #24
0
def save_to_fits(data, metadata, dataset_hdu, savepath, version):
    """
    Saves to a MUSCLES-standard fits file
    """
    if os.path.exists(savepath) == False:
        os.mkdir(savepath)
    file_name = make_component_filename(metadata, version)
    hdr = fits.Header(metadata)
    primary_hdu = fits.PrimaryHDU(header=hdr)
    hdu = fits.table_to_hdu(Table(data))
    descriptions = [
        'midpoint of the wavelength bin',
        'left/blue edge of the wavelength bin',
        'right/red edge of the wavelength bin', 'average flux over the bin',
        'error on the flux', 'cumulative exposure time for the bin',
        'data quality flags (HST data only)',
        'modified julian date of start of first exposure',
        'modified julian date of end of last exposure'
    ]
    hdu.header.insert(8, ('EXTNAME', 'SPECTRUM'))
    hdu.header.insert(9, ('EXTNO', 2))
    [
        hdu.header.insert(i[0] + 10, ('TDESC%s' % (i[0]), i[1]))
        for i in enumerate(descriptions)
    ]
    hdul = fits.HDUList([primary_hdu, hdu, dataset_hdu])
    hdul.writeto(savepath + file_name + '.fits', overwrite=True)
    print('Spectrum saved as ' + file_name + '.fits')
Example #25
0
    def _create_exp_hdu(self, data):

        pix_skydir = self.hpx.get_sky_dirs()
        cols = [
            Column(name='COSBINS',
                   unit='s',
                   dtype='f4',
                   data=data.T[:, ::-1],
                   shape=(len(self.costh_center), )),
            Column(name='RA', unit='deg', dtype='f4', data=pix_skydir.ra.deg),
            Column(name='DEC', unit='deg', dtype='f4', data=pix_skydir.dec.deg)
        ]

        hdu_exp = fits.table_to_hdu(Table(cols))
        hdu_exp.header['THETABIN'] = 'SQRT(1-COSTHETA)'
        hdu_exp.header['NBRBINS'] = len(self.costh_center)
        hdu_exp.header['COSMIN'] = self.costh_edges[0]
        hdu_exp.header['ZENMIN'] = self.zmin
        hdu_exp.header['ZENMAX'] = self.zmax
        hdu_exp.header['NDSKEYS'] = 0
        hdu_exp.header['PHIBINS'] = 0

        header = self.hpx.make_header()
        hdu_exp.header.update(header)

        return hdu_exp
Example #26
0
def make_data_ext(table):
    """
    The table extension, takes an astroy table 
    """
    hdu = fits.table_to_hdu(table)
    hdu.header = data_header(hdu.header)
    return hdu
Example #27
0
    def run(self) :
        """
        Main function.
        This stage:
        - Reduces the raw catalog by imposing quality cuts, a cut on i-band magnitude and a star-galaxy separation cat.
        - Produces mask maps, dust maps, depth maps and star density maps.
        """

        #Read list of files
        f=open(self.get_input('raw_data'))
        files=[s.strip() for s in f.readlines()]
        f.close()

        #Read catalog
        cat=Table.read(files[0])
        if len(cat)>1 :
            for fname in files[1:] :
                c=Table.read(fname)
                cat=vstack([cat,c],join_type='exact')

        logger.info('Initial catalog size: %d'%(len(cat)))
            
        # Clean nulls and nans
        logger.info("Applying basic quality cuts.")
        sel=np.ones(len(cat),dtype=bool)
        names=[n for n in cat.keys()]
        isnull_names=[]
        for key in cat.keys() :
            if key.__contains__('isnull') :
                sel[cat[key]]=0
                isnull_names.append(key)
            else:
                if not key.startswith("pz_") : #Keep photo-z's even if they're NaNs
                    sel[np.isnan(cat[key])]=0
                logger.info("Will drop %d rows"%(len(sel)-np.sum(sel)))
        cat.remove_columns(isnull_names)
        cat.remove_rows(~sel)

        fsk=FlatMapInfo.from_coords(cat['ra'],cat['dec'],self.config['res'],
                                    pad=self.config['pad']/self.config['res'],
                                    projection=self.config['flat_project'],
                                    move_equator=self.config['shift_to_equator'])

        ####
        # Generate flatmap info
        flatmap_info_descr = 'FlatmapInfo'
        fsk.write_flat_map(self.get_output('flatmap_info'), np.ones(fsk.npix),
                           descript=flatmap_info_descr)

        ####
        # Write final catalog
        # 1- header
        logger.info("Writing cleaned catalog.")
        hdr=fits.Header()
        prm_hdu=fits.PrimaryHDU(header=hdr)
        # 2- Catalog
        cat_hdu=fits.table_to_hdu(cat)
        # 3- Actual writing
        hdul=fits.HDUList([prm_hdu,cat_hdu])
        hdul.writeto(self.get_output('clean_catalog'), overwrite=True)
def cluster(X, dimensions, coords, wcs):
    if X.dtype is np.dtype(object):
        X = X.astype(np.float)
    X = X[np.isfinite(X).all(axis=1)]
    mask = np.zeros(dimensions)
    db = DBSCAN(eps=5, min_samples=3).fit(X)
    cluster_table = make_cluster_table()
    X_ra_dec = wcs.wcs_pix2world(X, 1)
    for k in set(db.labels_):
        if k > -1:
            idx = db.labels_ == k
            cluster_table['count'].append(idx.sum())
            m = X[idx].mean(axis=0)
            cluster_table['x'].append(m[0])
            cluster_table['y'].append(m[1])
            cov = np.cov(X[idx].T)
            cluster_table['var_x'].append(cov[0, 0])
            cluster_table['var_y'].append(cov[1, 1])
            cluster_table['var_x_y'].append(cov[0, 1])
            a, b, theta = cov_to_ellipse_params(cov, m, nstd=2)
            if (a > 0) and (b > 0):
                inside = inside_ellipse(coords, m, a, b,
                                        theta).reshape(*dimensions)
                mask += inside * idx.sum()
            # now calculate center and cov in RA DEC
            m_ra_dec = X_ra_dec[idx].mean(axis=0)
            cluster_table['ra'].append(m_ra_dec[0])
            cluster_table['dec'].append(m_ra_dec[1])
            cov_ra_dec = np.cov(X_ra_dec[idx].T)
            cluster_table['var_ra'].append(cov_ra_dec[0, 0])
            cluster_table['var_dec'].append(cov_ra_dec[1, 1])
            cluster_table['var_ra_dec'].append(cov_ra_dec[0, 1])
    return mask, fits.table_to_hdu(Table(cluster_table))
Example #29
0
    def writeFits(self, filename):
        """Write calibration data to a FITS file.

        Parameters
        ----------
        filename : `str`
            Filename to write data to.

        Returns
        -------
        used : `str`
            The name of the file used to write the data.

        """
        tableList = self.toTable()
        with warnings.catch_warnings():
            warnings.filterwarnings("ignore",
                                    category=Warning,
                                    module="astropy.io")
            astropyList = [fits.table_to_hdu(table) for table in tableList]
            astropyList.insert(0, fits.PrimaryHDU())

            writer = fits.HDUList(astropyList)
            writer.writeto(filename, overwrite=True)
        return filename
Example #30
0
    def test_table_to_hdu_filter_reserved(self):
        """
        Regression test for https://github.com/astropy/astropy/issues/9387
        """
        diag = 'be ignored since it conflicts with a FITS reserved keyword'
        ins_cards = {
            'EXPTIME': 32.1,
            'XTENSION': 'NEWTABLE',
            'NAXIS': 1,
            'NAXIS1': 3,
            'NAXIS2': 9,
            'PCOUNT': 42,
            'OBSERVER': 'Adams'
        }
        table = Table([[1, 2, 3], ['a', 'b', 'c'], [2.3, 4.5, 6.7]],
                      names=['a', 'b', 'c'],
                      dtype=['i4', 'U1', 'f8'])
        table.meta.update(ins_cards)

        with pytest.warns(AstropyUserWarning,
                          match=rf'Meta-data keyword \w+ will {diag}') as w:
            hdu = fits.table_to_hdu(table)

        # This relies on the warnings being raised in the order of the
        # meta dict (note that the first and last card are legitimate keys)
        assert len(w) == len(ins_cards) - 2
        for i, key in enumerate(list(ins_cards)[1:-1]):
            assert f'Meta-data keyword {key}' in str(w[i].message)

        assert hdu.header.get('XTENSION') == 'BINTABLE'
        assert hdu.header.get('NAXIS') == 2
        assert hdu.header.get('NAXIS1') == 13
        assert hdu.header.get('NAXIS2') == 3
        assert hdu.header.get('PCOUNT') == 0
        np.testing.assert_almost_equal(hdu.header.get('EXPTIME'), 3.21e1)
Example #31
0
def test_from_hdulist2():
    tablehdu = fits.table_to_hdu(Table([[1]]))
    tablehdu.name = 'REFCAT'

    hdul = fits.HDUList([
        fits.PrimaryHDU(header=fits.Header({'INSTRUME': 'FISH'})),
        fits.ImageHDU(data=np.zeros(10), name='SCI', ver=1),
        fits.ImageHDU(data=np.ones(10), name='VAR', ver=1),
        fits.ImageHDU(data=np.zeros(10, dtype='uint16'), name='DQ', ver=1),
        tablehdu,
        fits.BinTableHDU.from_columns(
            [fits.Column(array=['a', 'b'], format='A', name='col')],
            ver=1,
        ),  # This HDU will be skipped because it has no EXTNAME
    ])

    with pytest.warns(UserWarning,
                      match='Skip HDU .* because it has no EXTNAME'):
        ad = astrodata.open(hdul)

    assert len(ad) == 1
    assert ad.phu['INSTRUME'] == 'FISH'
    assert_array_equal(ad[0].data, 0)
    assert_array_equal(ad[0].variance, 1)
    assert_array_equal(ad[0].mask, 0)
    assert len(ad.REFCAT) == 1
    assert ad.exposed == {'REFCAT'}
    assert ad[0].exposed == {'REFCAT'}
Example #32
0
    def _make_extension_fits(self, ext, filename, **kwargs):

        maps = {'EXT_TOT_MAP': ext['ext_tot_map'],
                'EXT_SRC_MAP': ext['ext_src_map'],
                'EXT_BKG_MAP': ext['ext_bkg_map'],
                'PTSRC_TOT_MAP': ext['ptsrc_tot_map'],
                'PTSRC_SRC_MAP': ext['ptsrc_src_map'],
                'PTSRC_BKG_MAP': ext['ptsrc_bkg_map']}

        hdu_images = []
        for k, v in sorted(maps.items()):
            if v is None:
                continue
            hdu_images += [v.make_hdu(k)]

        tab = fits_utils.dict_to_table(ext)
        hdu_data = fits.table_to_hdu(tab)
        hdu_data.name = 'EXT_DATA'

        if ext.get('tsmap'):
            hdus = [ext['tsmap'].make_hdu(hdu='PRIMARY')]
        else:
            hdus = [fits.PrimaryHDU()]

        hdus += [hdu_data] + hdu_images
        hdus[0].header['CONFIG'] = json.dumps(utils.tolist(ext['config']))
        hdus[1].header['CONFIG'] = json.dumps(utils.tolist(ext['config']))
        fits_utils.write_hdus(hdus, filename,
                              keywords={'SRCNAME': ext['name']})
Example #33
0
    def save_datatables(self, filepath, **kwargs):
        """Save all of the `Table` objects in this object to a file

        Parameters
        ----------
        filepath : `str`
            The file to save it to
        kwargs
            Passed to write functions

        Raises
        ------
        ValueError : If the output file type is not known.
        """
        extype = os.path.splitext(filepath)[1]
        if extype in HDF5_SUFFIXS:
            for key, val in self._table_dict.items():
                val.write(filepath, path=key, **kwargs)
        elif extype in FITS_SUFFIXS:
            if self._primary is None:
                hlist = [fits.PrimaryHDU()]
            else:
                hlist = [self._primary]
            for key, val in self._table_dict.items():
                hdu = fits.table_to_hdu(val)
                hdu.name = key
                hlist.append(hdu)
            hdulist = fits.HDUList(hlist)
            hdulist.writeto(filepath, overwrite=True, **kwargs)
        else:
            raise ValueError(
                "Can only write pickle and hdf5 files for now, not %s" %
                extype)
Example #34
0
File: slow.py Project: OSSOS/MOP
def main(expnum, ccd):


    header = storage.get_astheader(expnum, ccd)
    datasec = storage.datasec_to_list(header.get('DATASEC', '[80:2080,30,4160]'))
    try:
        fwhm = "{:5.2f}".format(storage.get_fwhm(expnum, ccd))
    except:
        fwhm = 'unknown'
    for keyword in del_keyword_list:
        try:
            del(header[keyword])
        except:
            pass
    header['FWHM'] = (fwhm, 'FWHM in pixels')
    header['EXTNAME'] = 'header'
    primary_hdu = fits.PrimaryHDU(header=header)
    hdu_list = fits.HDUList([primary_hdu, ])
    for ext in ['jmp', 'matt']:
        extension = 'obj.'+ext
        name = "{}p{:02d}.{}".format(expnum, ccd, extension)
        try:
            os.unlink(name)
            os.unlink(name+".fits")
        except:
            pass
        logging.info("Retrieving {}".format(name))
        obj_file = mop_file.Parser(expnum, ccd, extension)
        obj_file.parse()
        
        t = numpy.all([datasec[0] < obj_file.data['X'], obj_file.data['X'] < datasec[1],
                       datasec[2] < obj_file.data['Y'], obj_file.data['Y'] < datasec[3]], axis=0)
        logging.info("Source remaining after datasec cut: {} of {}".format(len(obj_file.data[t]['X']), len(t)))
        table_hdu = fits.table_to_hdu(obj_file.data[t])
        table_hdu.header['CATALOG'] = name
        table_hdu.header['EXTNAME'] = ext
        hdu_list.append(table_hdu)
        del table_hdu
        del obj_file
        os.unlink(name)

    name = "{}p{:02d}.{}".format(expnum, ccd, 'obj.fits')
    if os.access(name, os.F_OK):
        os.unlink(name)
    hdu_list.writeto(name)
    uri = storage.dbimages_uri(expnum, ccd, 'p', ext=".obj.fits")
    logging.info(name+" -> "+uri)
    count = 0
    while True:
        print("Copy attempt {}".format(count))
        try:
            storage.copy(name, uri)
            os.unlink(name)
            break
        except Exception as ex:
            if count > 10:
                raise ex
            count += 1
Example #35
0
    def test_table_to_hdu_convert_comment_convention(self):
        """
        Regression test for https://github.com/astropy/astropy/issues/6079
        """
        table = Table([[1, 2, 3], ['a', 'b', 'c'], [2.3, 4.5, 6.7]],
                      names=['a', 'b', 'c'], dtype=['i', 'U1', 'f'])
        table.meta['comments'] = ['This', 'is', 'a', 'comment']
        hdu = fits.table_to_hdu(table)

        assert hdu.header.get('comment') == ['This', 'is', 'a', 'comment']
        with pytest.raises(ValueError):
            hdu.header.index('comments')
Example #36
0
    def write_fits(self, filepath, **kwargs):
        # replace nan/inf values in meta dict with -999.
        for k, v in self.meta.items():
            if np.isnan(v) or np.isinf(v):
                self.meta[k] = -999.

        # construct fits Primary HDU
        prihdr = fits.Header()
        prihdr['AUTHOR'] = 'Bo Zhang'
        for k, v in self.meta.items():
            prihdr[k] = v
        prihdu = fits.PrimaryHDU(header=prihdr)

        # construct fits HDU list
        hdulist = fits.HDUList([prihdu,
                                fits.table_to_hdu(self.syn_model),
                                fits.table_to_hdu(self.syn_spec)])
        if os.path.exists(filepath):
            print('[StarlightOuput.write_fits()]: filepath exists: %s'
                  % filepath)
        hdulist.writeto(filepath, **kwargs)
Example #37
0
    def write(self, outfile, compress=False):

        cols = [Column(name='E_MIN', dtype='f8', data=self._energy_bins[None, :-1], unit='MeV'),
                Column(name='E_MAX', dtype='f8',
                       data=self._energy_bins[None, 1:], unit='MeV'),
                Column(name='COSTHETA_MIN', dtype='f8',
                       data=self._ctheta_bins[None, :-1]),
                Column(name='COSTHETA_MAX', dtype='f8',
                       data=self._ctheta_bins[None, 1:]),
                Column(name='SEP_MIN', dtype='f8',
                       data=self._sep_bins[None, :, :-1], unit='deg'),
                Column(name='SEP_MAX', dtype='f8', data=self._sep_bins[None, :, 1:], unit='deg')]

        tab0 = Table(cols)

        cols = []
        for k, v in sorted(self.hists.items()):
            cols += [Column(name=k, dtype='f8', data=v)]

        tab1 = Table(cols)

        cols = []
        for k, v in sorted(self._hists_eff.items()):
            cols += [Column(name=k, dtype='f8', data=v)]

        tab2 = Table(cols)

        hdulist = fits.HDUList([fits.PrimaryHDU(), fits.table_to_hdu(tab1),
                                fits.table_to_hdu(tab2), fits.table_to_hdu(tab0)])

        hdulist[1].name = 'DATA'
        hdulist[2].name = 'EFFICIENCY'
        hdulist[3].name = 'AXES'
        hdulist[0].header['DATATYPE'] = self._type

        if compress:
            fp = gzip.GzipFile(outfile + '.gz', 'wb')
            hdulist.writeto(fp, clobber=True)
        else:
            hdulist.writeto(outfile, clobber=True)
Example #38
0
    def _make_localize_fits(self, loc, filename, **kwargs):

        tab = fits_utils.dict_to_table(loc)
        hdu_data = fits.table_to_hdu(tab)
        hdu_data.name = 'LOC_DATA'

        hdus = [loc['tsmap_peak'].create_primary_hdu(),
                loc['tsmap'].create_image_hdu('TSMAP'),
                hdu_data]

        hdus[0].header['CONFIG'] = json.dumps(loc['config'])
        hdus[2].header['CONFIG'] = json.dumps(loc['config'])
        fits_utils.write_hdus(hdus, filename)
Example #39
0
    def test_table_to_hdu(self):
        table = Table([[1, 2, 3], ['a', 'b', 'c'], [2.3, 4.5, 6.7]],
                      names=['a', 'b', 'c'], dtype=['i', 'U1', 'f'])
        table['a'].unit = 'm/s'
        table['b'].unit = 'not-a-unit'

        with catch_warnings() as w:
            hdu = fits.table_to_hdu(table)
            assert len(w) == 1
            assert str(w[0].message).startswith("'not-a-unit' did not parse as"
                                                " fits unit")

        # Check that TUNITn cards appear in the correct order
        # (https://github.com/astropy/astropy/pull/5720)
        assert hdu.header.index('TUNIT1') < hdu.header.index('TTYPE2')

        assert isinstance(hdu, fits.BinTableHDU)
        filename = self.temp('test_table_to_hdu.fits')
        hdu.writeto(filename, overwrite=True)
Example #40
0
    def _make_tsmap_fits(self, data, filename, **kwargs):

        maps =  {'SQRT_TS_MAP': data['sqrt_ts'],
                 'NPRED_MAP': data['npred'],
                 'N_MAP': data['amplitude']}

        hdu_images = []
        for k, v in sorted(maps.items()):
            if v is None:
                continue
            hdu_images += [v.create_image_hdu(k)]
        
        tab = fits_utils.dict_to_table(data)
        hdu_data = fits.table_to_hdu(tab)
        hdu_data.name = 'TSMAP_DATA'

        hdus = [data['ts'].create_primary_hdu(),
                hdu_data] + hdu_images

        hdus[0].header['CONFIG'] = json.dumps(data['config'])
        hdus[1].header['CONFIG'] = json.dumps(data['config'])        
        fits_utils.write_hdus(hdus, filename)
Example #41
0
def VestergaardWilkes01_Fe(fepath=None):
    if fepath is None:
        fepath = 'VW01_Fe'
    rv = []
    for fn in ['Fe_UVtemplt_B.asc','Fe2_UV191.asc','Fe3_UV47.asc',
               'Fe_UVOPT_V01_T06_BR92.asc']:
        wave,flux = np.loadtxt(os.path.join(fepath,fn),unpack=True)
        # continuum parameters given in VW01 pg. 6
        a_nu = -1.9
        fcont1 = 3.45e-14 * (wave/1500.)**(-2-a_nu)
        a_nu = -1.0
        fcont2 = 3.89e-14 * (wave/1500.)**(-2-a_nu)
        fcont = np.choose(wave>1716,[fcont1,fcont2])
        flux /= fcont
        tmplname = fn.replace('.asc','')
        if not 'UVOPT' in tmplname:
            tmplname = 'VW01_'+tmplname
        tab = Table(dict(wave=wave,f_lambda=flux))
        hdu = fits.table_to_hdu(tab)
        hdu.name = tmplname
        rv.append(hdu)
    return rv
Example #42
0
    def _create_exp_hdu(self, data):

        pix_skydir = self.hpx.get_sky_dirs()
        cols = [Column(name='COSBINS', unit='s', dtype='f4',
                       data=data.T[:, ::-1],
                       shape=(len(self.costh_center),)),
                Column(name='RA', unit='deg', dtype='f4',
                       data=pix_skydir.ra.deg),
                Column(name='DEC', unit='deg', dtype='f4',
                       data=pix_skydir.dec.deg)]

        hdu_exp = fits.table_to_hdu(Table(cols))
        hdu_exp.header['THETABIN'] = 'SQRT(1-COSTHETA)'
        hdu_exp.header['NBRBINS'] = len(self.costh_center)
        hdu_exp.header['COSMIN'] = self.costh_edges[0]
        hdu_exp.header['ZENMIN'] = self.zmin
        hdu_exp.header['ZENMAX'] = self.zmax
        hdu_exp.header['NDSKEYS'] = 0
        hdu_exp.header['PHIBINS'] = 0

        header = self.hpx.make_header()
        hdu_exp.header.update(header)

        return hdu_exp
Example #43
0
def VandenBerkSDSSCompositeTemplate():
    all_lines = Table.read('VandenBerk2001_AJ122_549_table2.txt',
                           format='ascii')
    hdu = fits.table_to_hdu(all_lines)
    hdu.name = 'VdB01CompEmLines'
    return hdu
def update_caldb_leapsec(NewLeapSecondDate, NewLeapSecond, updater="MFC", outdir=".", clobber=True):
    """
    Given the date of a new leapsecond (for example 2017-01-01T00:00:00)
    creates a new leapsecond file for transfer to the caldb

    NewLeapSecDate = date of new leap second in ISOT format YYYY-MM-DDTHH:MM:SS
    NewLeapSecond = amount of new leap second (usually 1.0)

    writes a new FITS file to the current working directory by default
    """
    from astropy.io import fits as pyfits
    from astropy.time import Time
    from astropy.table import Table
    import ftputil
    import time
    #
    # this block retrieves the lastest leapsecond file from /FTP/caldb/data/gen/bcf
    # based on the leapsecond file naming convention, "leapsec_mmddyy.fits"
    #
    LSdir = "FTP/caldb/data/gen/bcf/"
    host = ftputil.FTPHost('heasarc.gsfc.nasa.gov', "anonymous", "*****@*****.**")
    genbcf = host.listdir(LSdir)  # get directory listing
    host.close()

    LeapsecFileList = [f for f in genbcf if 'leapsec' in f]
    LeapsecFileYear = [y.split("_")[1].split(".fits")[0][4:6] for y in LeapsecFileList]
    LeapsecFileYear = [('19' + y if int(y) > 50 else '20' + y) for y in LeapsecFileYear]
    LeapsecFileMonth = [m.split("_")[1].split(".fits")[0][2:4] for m in LeapsecFileList]
    LeapsecFileDay = [d.split("_")[1].split(".fits")[0][0:2] for d in LeapsecFileList]

    maxjd = 0.0

    for i in range(len(LeapsecFileList)):
        tiso = LeapsecFileYear[i] + "-" + LeapsecFileMonth[i] + "-" + LeapsecFileDay[i]
        fjd = Time(tiso).jd
        if fjd > maxjd:
            maxjd = fjd
            LatestLSF = LeapsecFileList[i]
    print("Latest Leapsecond File = {0}".format(LatestLSF))

    hdu = pyfits.open("http://heasarc.gsfc.nasa.gov/" + LSdir + "/" + LatestLSF) # open the file

    orig_header = hdu[1].header
    mjdref = orig_header['MJDREF']

    UpdateDate = time.strftime('%Y-%m-%d %H:%M:%S')
    outfile = 'leapsec_' + NewLeapSecondDate[8:10] + NewLeapSecondDate[5:7] + NewLeapSecondDate[2:4] + '.fits'


    #
    # create new row for new leapsecond information
    #
    newdate = NewLeapSecondDate.split('T')[0]
    newtime = NewLeapSecondDate.split('T')[1]
    newmjd = Time(NewLeapSecondDate, format='isot').mjd
    newsecs = (newmjd - mjdref) * 86400
    newLS = NewLeapSecond
    NewLeapSecondRow = [newdate, newtime, newmjd, newsecs, newLS]
    #
    # append new leapsecond to table and write to output file
    #
    tbdata = hdu[1].data
    t = Table(tbdata)  # convert hdu data to a python Table to add new row
    t.add_row(NewLeapSecondRow)  # add row of data
    hdunew = pyfits.table_to_hdu(t)  # convert table back to hdu (with minimal header)

    hdunew.columns.change_unit('SECONDS', 's') # table_to_hdu doesn't seem to preserve the Unit
    hdunew.columns.change_unit('LEAPSECS', 's') # table_to_hdu doesn't seem to preserve the Unit

    hdunew.header = orig_header  # use header from original file
    hdunew.header['COMMENT'] = UpdateDate+": "+updater+" ADDED "+NewLeapSecondDate+" LEAP SECOND"
    hdunew.header['HISTORY'] = "File modified by user "+updater+" on "+UpdateDate
    pyfits.writeto(outdir + "/" + outfile, hdunew.data, hdunew.header, clobber=clobber, checksum=True)
    return outfile
Example #45
0
def run_flux_sensitivity(**kwargs):

    index = kwargs.get('index', 2.0)
    sedshape = kwargs.get('sedshape', 'PowerLaw')
    cutoff = kwargs.get('cutoff', 1e3)
    curvindex = kwargs.get('curvindex', 1.0)
    beta = kwargs.get('beta', 0.0)
    dmmass = kwargs.get('DMmass', 100.0)
    dmchannel = kwargs.get('DMchannel', 'bb')
    emin = kwargs.get('emin', 10**1.5)
    emax = kwargs.get('emax', 10**6.0)
    nbin = kwargs.get('nbin', 18)
    glon = kwargs.get('glon', 0.0)
    glat = kwargs.get('glat', 0.0)
    ltcube_filepath = kwargs.get('ltcube', None)
    galdiff_filepath = kwargs.get('galdiff', None)
    isodiff_filepath = kwargs.get('isodiff', None)
    galdiff_fit_filepath = kwargs.get('galdiff_fit', None)
    isodiff_fit_filepath = kwargs.get('isodiff_fit', None)
    wcs_npix = kwargs.get('wcs_npix', 40)
    wcs_cdelt = kwargs.get('wcs_cdelt', 0.5)
    wcs_proj = kwargs.get('wcs_proj', 'AIT')
    map_type = kwargs.get('map_type', None)
    spatial_model = kwargs.get('spatial_model', 'PointSource')
    spatial_size = kwargs.get('spatial_size', 1E-2)

    obs_time_yr = kwargs.get('obs_time_yr', None)
    event_class = kwargs.get('event_class', 'P8R2_SOURCE_V6')
    min_counts = kwargs.get('min_counts', 3.0)
    ts_thresh = kwargs.get('ts_thresh', 25.0)
    nside = kwargs.get('hpx_nside', 16)
    output = kwargs.get('output', None)

    event_types = [['FRONT', 'BACK']]

    if sedshape == 'PowerLaw':
        fn = spectrum.PowerLaw([1E-13, -index], scale=1E3)
    elif sedshape == 'PLSuperExpCutoff':
        fn = spectrum.PLSuperExpCutoff(
            [1E-13, -index, cutoff, curvindex], scale=1E3)
    elif sedshape == 'LogParabola':
        fn = spectrum.LogParabola([1E-13, -index, beta], scale=1E3)
    elif sedshape == 'DM':
        fn = spectrum.DMFitFunction([1E-26, dmmass], chan=dmchannel)

    log_ebins = np.linspace(np.log10(emin),
                            np.log10(emax), nbin + 1)
    ebins = 10**log_ebins
    ectr = np.exp(utils.edge_to_center(np.log(ebins)))

    c = SkyCoord(glon, glat, unit='deg', frame='galactic')

    if ltcube_filepath is None:

        if obs_time_yr is None:
            raise Exception('No observation time defined.')

        ltc = LTCube.create_from_obs_time(obs_time_yr * 365 * 24 * 3600.)
    else:
        ltc = LTCube.create(ltcube_filepath)
        if obs_time_yr is not None:
            ltc._counts *= obs_time_yr * 365 * \
                24 * 3600. / (ltc.tstop - ltc.tstart)

    gdiff = skymap.Map.create_from_fits(galdiff_filepath)
    gdiff_fit = None
    if galdiff_fit_filepath is not None:
        gdiff_fit = skymap.Map.create_from_fits(galdiff_fit_filepath)

    if isodiff_filepath is None:
        isodiff = utils.resolve_file_path('iso_%s_v06.txt' % event_class,
                                          search_dirs=[os.path.join('$FERMIPY_ROOT', 'data'),
                                                       '$FERMI_DIFFUSE_DIR'])
        isodiff = os.path.expandvars(isodiff)
    else:
        isodiff = isodiff_filepath

    iso = np.loadtxt(isodiff, unpack=True)
    iso_fit = None
    if isodiff_fit_filepath is not None:
        iso_fit = np.loadtxt(isodiff_fit_filepath, unpack=True)

    scalc = SensitivityCalc(gdiff, iso, ltc, ebins,
                            event_class, event_types, gdiff_fit=gdiff_fit,
                            iso_fit=iso_fit, spatial_model=spatial_model,
                            spatial_size=spatial_size)

    # Compute Maps
    map_diff_flux = None
    map_diff_npred = None
    map_int_flux = None
    map_int_npred = None

    map_nstep = 500

    if map_type == 'hpx':

        hpx = HPX(nside, True, 'GAL', ebins=ebins)
        map_diff_flux = HpxMap(np.zeros((nbin, hpx.npix)), hpx)
        map_diff_npred = HpxMap(np.zeros((nbin, hpx.npix)), hpx)
        map_skydir = map_diff_flux.hpx.get_sky_dirs()

        for i in range(0, len(map_skydir), map_nstep):
            s = slice(i, i + map_nstep)
            o = scalc.diff_flux_threshold(
                map_skydir[s], fn, ts_thresh, min_counts)
            map_diff_flux.data[:, s] = o['flux'].T
            map_diff_npred.data[:, s] = o['npred'].T

        hpx = HPX(nside, True, 'GAL')
        map_int_flux = HpxMap(np.zeros((hpx.npix)), hpx)
        map_int_npred = HpxMap(np.zeros((hpx.npix)), hpx)
        map_skydir = map_int_flux.hpx.get_sky_dirs()

        for i in range(0, len(map_skydir), map_nstep):
            s = slice(i, i + map_nstep)
            o = scalc.int_flux_threshold(
                map_skydir[s], fn, ts_thresh, min_counts)
            map_int_flux.data[s] = o['flux']
            map_int_npred.data[s] = o['npred']

    elif map_type == 'wcs':

        wcs_shape = [wcs_npix, wcs_npix]
        wcs_size = wcs_npix * wcs_npix

        map_diff_flux = Map.create(
            c, wcs_cdelt, wcs_shape, 'GAL', wcs_proj, ebins=ebins)
        map_diff_npred = Map.create(
            c, wcs_cdelt, wcs_shape, 'GAL', wcs_proj, ebins=ebins)
        map_skydir = map_diff_flux.get_pixel_skydirs()

        for i in range(0, len(map_skydir), map_nstep):
            idx = np.unravel_index(
                np.arange(i, min(i + map_nstep, wcs_size)), wcs_shape)
            s = (slice(None), idx[1], idx[0])
            o = scalc.diff_flux_threshold(
                map_skydir[slice(i, i + map_nstep)], fn, ts_thresh, min_counts)
            map_diff_flux.data[s] = o['flux'].T
            map_diff_npred.data[s] = o['npred'].T

        map_int_flux = Map.create(c, wcs_cdelt, wcs_shape, 'GAL', wcs_proj)
        map_int_npred = Map.create(c, wcs_cdelt, wcs_shape, 'GAL', wcs_proj)
        map_skydir = map_int_flux.get_pixel_skydirs()

        for i in range(0, len(map_skydir), map_nstep):
            idx = np.unravel_index(
                np.arange(i, min(i + map_nstep, wcs_size)), wcs_shape)
            s = (idx[1], idx[0])
            o = scalc.int_flux_threshold(
                map_skydir[slice(i, i + map_nstep)], fn, ts_thresh, min_counts)
            map_int_flux.data[s] = o['flux']
            map_int_npred.data[s] = o['npred']

    o = scalc.diff_flux_threshold(c, fn, ts_thresh, min_counts)

    cols = [Column(name='e_min', dtype='f8', data=scalc.ebins[:-1], unit='MeV'),
            Column(name='e_ref', dtype='f8', data=o['e_ref'], unit='MeV'),
            Column(name='e_max', dtype='f8', data=scalc.ebins[1:], unit='MeV'),
            Column(name='flux', dtype='f8', data=o[
                   'flux'], unit='ph / (cm2 s)'),
            Column(name='eflux', dtype='f8', data=o[
                   'eflux'], unit='MeV / (cm2 s)'),
            Column(name='dnde', dtype='f8', data=o['dnde'],
                   unit='ph / (MeV cm2 s)'),
            Column(name='e2dnde', dtype='f8',
                   data=o['e2dnde'], unit='MeV / (cm2 s)'),
            Column(name='npred', dtype='f8', data=o['npred'], unit='ph')]

    tab_diff = Table(cols)

    cols = [Column(name='index', dtype='f8'),
            Column(name='e_min', dtype='f8', unit='MeV'),
            Column(name='e_ref', dtype='f8', unit='MeV'),
            Column(name='e_max', dtype='f8', unit='MeV'),
            Column(name='flux', dtype='f8', unit='ph / (cm2 s)'),
            Column(name='eflux', dtype='f8', unit='MeV / (cm2 s)'),
            Column(name='dnde', dtype='f8', unit='ph / (MeV cm2 s)'),
            Column(name='e2dnde', dtype='f8', unit='MeV / (cm2 s)'),
            Column(name='npred', dtype='f8', unit='ph'),
            Column(name='ebin_e_min', dtype='f8',
                   unit='MeV', shape=(len(ectr),)),
            Column(name='ebin_e_ref', dtype='f8',
                   unit='MeV', shape=(len(ectr),)),
            Column(name='ebin_e_max', dtype='f8',
                        unit='MeV', shape=(len(ectr),)),
            Column(name='ebin_flux', dtype='f8',
                   unit='ph / (cm2 s)', shape=(len(ectr),)),
            Column(name='ebin_eflux', dtype='f8',
                   unit='MeV / (cm2 s)', shape=(len(ectr),)),
            Column(name='ebin_dnde', dtype='f8',
                   unit='ph / (MeV cm2 s)', shape=(len(ectr),)),
            Column(name='ebin_e2dnde', dtype='f8',
                   unit='MeV / (cm2 s)', shape=(len(ectr),)),
            Column(name='ebin_npred', dtype='f8', unit='ph', shape=(len(ectr),))]

    cols_ebounds = [Column(name='E_MIN', dtype='f8',
                           unit='MeV', data=ebins[:-1]),
                    Column(name='E_MAX', dtype='f8',
                           unit='MeV', data=ebins[1:]), ]

    tab_int = Table(cols)
    tab_ebounds = Table(cols_ebounds)

    index = np.linspace(1.0, 5.0, 4 * 4 + 1)

    for g in index:
        fn = spectrum.PowerLaw([1E-13, -g], scale=10**3.5)
        o = scalc.int_flux_threshold(c, fn, ts_thresh, 3.0)
        row = [g]
        for colname in tab_int.columns:
            if colname == 'index':
                continue
            if 'ebin' in colname:
                row += [o['bins'][colname.replace('ebin_', '')]]
            else:
                row += [o[colname]]

        tab_int.add_row(row)

    hdulist = fits.HDUList()
    hdulist.append(fits.table_to_hdu(tab_diff))
    hdulist.append(fits.table_to_hdu(tab_int))
    hdulist.append(fits.table_to_hdu(tab_ebounds))

    hdulist[1].name = 'DIFF_FLUX'
    hdulist[2].name = 'INT_FLUX'
    hdulist[3].name = 'EBOUNDS'

    if map_type is not None:
        hdu = map_diff_flux.create_image_hdu()
        hdu.name = 'MAP_DIFF_FLUX'
        hdulist.append(hdu)
        hdu = map_diff_npred.create_image_hdu()
        hdu.name = 'MAP_DIFF_NPRED'
        hdulist.append(hdu)

        hdu = map_int_flux.create_image_hdu()
        hdu.name = 'MAP_INT_FLUX'
        hdulist.append(hdu)
        hdu = map_int_npred.create_image_hdu()
        hdu.name = 'MAP_INT_NPRED'
        hdulist.append(hdu)

    hdulist.writeto(output, overwrite=True)
Example #46
0
def get_sky(plate, mjd, output_path, verbose=False):
    """Extract individual sky exposures used for a single coadd.

    The results are saved to a single FITS file named sky-<plate>-<mjd>.fits
    with the following HDUs:

     - 0: header with cards PLATE, MJD, NFIBERS, NEXP
     - 1: table with per-exposure metadata.
     - 2: table with per-sky-fiber plugmap metadata.
     - remaining ImageHDUs named xWLEN, xWDISP, xRDNOISE, xFLAT, xFLUX, xIVAR,
       xMASK, where x=B,R identifies the band. Each HDU contains a 2D array
       with shape (NFIBERS * NEXP, NWLEN), with NWLEN=4112(B) or 4128(R).

    xFLUX is  in flat-field corrected electrons, with corresponding pipeline
    inverses variance xIVAR. xFLUX * xFLAT and xRDNOISE are in units of
    detected electrons.  xWLEN and xWDISP are in angstroms.

    Parameters
    ----------
    plate : int
        Plate number identifying the coadd.
    mjd : int
        MJD number identifying the coadd.
    output_path : str
        Path where output FITS file should be written.
    verbose : bool
        Print progress updates when True.

    Returns
    -------
    astropy.table.Table
        Table of metadata for the extracted exposures.
    """
    tag = f'PLATE {plate:05d} MJD {mjd:05d} PATH {output_path}'
    if verbose:
        print('Starting {}'.format(tag))
    # Initialize output data.
    last_nexp = None
    plugmaps = []
    wlens = {'b': [], 'r': []}
    wdisps = {'b': [], 'r': []}
    fluxes = {'b': [], 'r': []}
    ivars = {'b': [], 'r': []}
    flats = {'b': [], 'r': []}
    rdnoises = {'b': [], 'r': []}
    masks = {'b': [], 'r': []}
    obskeys = ('EXPOSURE', 'TAI-BEG', 'EXPTIME', 'AZ', 'ALT', 'AIRMASS',
              'PRESSURE', 'AIRTEMP',
              'RDNOISE0', 'RDNOISE1', 'RDNOISE2', 'RDNOISE3')
    obsvals = {key: [] for key in obskeys}
    # Size of each amplifier in raw image pixels along (wlen, tracex) axes.
    ampsize = {'b': (2056, 2048), 'r': (2064, 2057)}
    # ampx[band] tabulates whether each wavelength index is readout by
    # amplifier 0/2 (=0) or 1/3 (=1).
    ampx = {'b': 1 * (np.arange(4112) >= 2056),
            'r': 1 * (np.arange(4128) >= 2064)}
    # amplifer[band] is a function that takes a traceset as input an returns an
    # array that tabulates whether each wavelength index is readout by
    # amplifier 0-3.
    amplifier = {'b': lambda x: 2 * (x >= 2048) + ampx['b'],
                 'r': lambda x: 2 * (x >= 2057) + ampx['r']}
    # Scaling such that RMS = rdnoise_scale * RDNOISEn * neff.
    rdnoise_scale = (4 * np.pi) ** 0.25
    # Conversion from constant log-lambda pixels to wavelength ratio.
    wdisp_const = 1e-4 * np.log(10)
    # Allowed pixel mask bits.
    valid_mask = (1 << 32) - 1
    # Slices of valid data to save. These trim pixels at each end where
    # IVAR=0 or other serious pixel mask bits are often set.
    valid_slices = {'b': slice(767, 3299), 'r': slice(483, 3668) }
    # Initialize data access.
    finder = bossdata.path.Finder()
    mirror = bossdata.remote.Manager()
    # Loop over spectrographs.
    expected_fibers = []
    for specidx in 1, 2:
        # Load the list of science exposures used for this spectrograph's coadd.
        fiber = 500 * (specidx - 1) + 1
        spec_name = finder.get_spec_path(plate, mjd, fiber=fiber, lite=True)
        exposures = bossdata.spec.SpecFile(mirror.get(spec_name)).exposures
        for band in 'b', 'r':
            camera = '{}{}'.format(band, specidx)
            use = valid_slices[band]
            # Loop over science exposures for this camera.
            nexp = exposures.num_by_camera[camera]
            if not (last_nexp is None or nexp == last_nexp):
                print(f'Different nexp for {camera} {tag}')
                return None
            last_nexp = nexp
            for expidx in range(nexp):
                # Load this camera's spFrame file.
                name = exposures.get_exposure_name(expidx, camera, 'spFrame')
                path = mirror.get(finder.get_plate_path(plate, name))
                spFrame = bossdata.plate.FrameFile(path, calibrated=False)
                # Lookup this spectrograph's sky fibers.
                sky_name = binary_type('SKY             ', 'ascii')
                fiberidx = np.where(
                    spFrame.plug_map['OBJTYPE'] == sky_name)[0]
                if expidx == 0 and band == 'b':
                    # Save plugmap metadata.
                    plugmaps.append(spFrame.plug_map[
                        ['FIBERID','RA','DEC','XFOCAL','YFOCAL']][fiberidx])
                    if specidx == 2:
                        plugmap = astropy.table.vstack(plugmaps)
                if specidx == 1 and band == 'b':
                    # Record observation metadata.
                    for key in obskeys:
                        try:
                            value = spFrame.header[key]
                        except KeyError:
                            value = -999 # invalid value for int/float types
                        obsvals[key].append(value)
                # Load the sky fiber data.
                fibers = spFrame.plug_map['FIBERID'][fiberidx].data
                assert np.all(fiberidx == spFrame.get_fiber_offsets([fibers]))
                if expidx == 0 and band == 'b':
                    expected_fibers.append(fibers)
                    if verbose:
                        print('Found {} sky fibers on spec{}: {}.'.format(
                            len(fibers), specidx,
                            ','.join([str(f) for f in fibers])))
                else:
                    if not np.all(fibers == expected_fibers[specidx - 1]):
                        print('Did not get expected fibers for {} exp {}'
                              .format(camera, expidx))
                data = spFrame.get_valid_data(
                    fibers, include_sky=True, include_wdisp=True, use_ivar=True,
                    pixel_quality_mask=valid_mask)
                if verbose:
                    print('Reading {} for exposure {} / {}...'
                          .format(camera, expidx + 1, nexp))
                assert data.shape == (len(fibers), 2 * ampsize[band][0])
                mask = spFrame.get_pixel_masks(fibers)
                masks[band].append(mask[:, use])
                # Identify pixels with valid data.
                valid = ~data['ivar'].mask
                bad_fibers = ~np.any(valid, axis=1)
                if verbose and np.any(bad_fibers):
                    print('  bad fibers: {}'.format(fibers[bad_fibers]))
                ivar = data['ivar'].data
                assert np.all(ivar[valid] > 0)
                ivars[band].append(ivar[:, use])
                # Load the superflat and trace vectors for sky fibers.
                superflat = spFrame.get_superflat(fibers)
                tracex = spFrame.hdulist[7].read()[fiberidx]
                # Load fiberflat and neff vectors from this camera's spFlat.
                name = exposures.get_exposure_name(expidx, camera, 'spFlat')
                path = mirror.get(finder.get_plate_path(plate, name))
                with fits.open(path) as spFlat:
                    fiberflat = spFlat[0].data[fiberidx]
                    neff = bossdata.plate.TraceSet(spFlat[3]).get_y()[fiberidx]
                if np.any(neff[valid] <= 0):
                    print(f'WARNING: neff <= 0 for {camera} {expidx} {tag}')
                # Lookup the per-amplifier readnoise values.
                readnoises = np.array([
                    spFrame.header['RDNOISE{}'.format(amp)]
                    for amp in range(4)], dtype=np.float32)
                # Determine which amplifier (0-3) each pixel along the trace is
                # read out by and scale to RMS readnoise per wavelength pixel.
                amp = amplifier[band](tracex)
                rdnoise = rdnoise_scale * readnoises[amp] * neff
                rdnoises[band].append(rdnoise[:, use].astype(np.float32))
                # Combine the superflat and fiberflat.
                flat = superflat * fiberflat
                assert np.all(flat[valid] > 0)
                flats[band].append(flat[:, use])
                # Save wavelength solutions in angstroms.
                wlen = data['wavelength'].data
                wlens[band].append(wlen[:, use])
                # Save wavelength dispersions in angstroms.
                wdisp = data['wdisp'].data
                assert np.all(wdisp[valid] > 0)
                wdisp = wlen * np.expm1(wdisp_const * wdisp)
                wdisps[band].append(wdisp[:, use])
                # Save the combined flat-fielded sky models + residuals,
                # which might be negative due to readnoise.
                flux = data['flux'].data + data['sky'].data
                fluxes[band].append(flux[:, use])
    # Build observation metadata table.
    obslist = astropy.table.Table()
    for key in obskeys:
        obslist[key] = obsvals[key]
    # Build the output HDU list.
    hdus = fits.HDUList()
    cards = dict(PLATE=plate, MJD=mjd, NFIBERS=len(plugmap), NEXP=nexp)
    hdus.append(fits.PrimaryHDU(header=fits.Header(cards)))
    hdus.append(fits.table_to_hdu(obslist))
    hdus[-1].name = 'OBSLIST'
    hdus.append(fits.table_to_hdu(plugmap))
    hdus[-1].name = 'PLUGMAP'
    for band in 'b', 'r':
        Band = band.upper()
        # Combine arrays for each band and save an an image HDU.
        hdus.append(fits.ImageHDU(np.vstack(wlens[band]),
                                  name='{}WLEN'.format(Band)))
        hdus.append(fits.ImageHDU(np.vstack(wdisps[band]),
                                  name='{}WDISP'.format(Band)))
        hdus.append(fits.ImageHDU(np.vstack(rdnoises[band]),
                                  name='{}RDNOISE'.format(Band)))
        hdus.append(fits.ImageHDU(np.vstack(flats[band]),
                                  name='{}FLAT'.format(Band)))
        hdus.append(fits.ImageHDU(np.vstack(fluxes[band]),
                                  name='{}FLUX'.format(Band)))
        hdus.append(fits.ImageHDU(np.vstack(ivars[band]),
                                  name='{}IVAR'.format(Band)))
        hdus.append(fits.ImageHDU(np.vstack(masks[band]),
                                  name='{}MASK'.format(Band)))
    name = os.path.join(output_path, 'sky-{}-{}.fits'.format(plate, mjd))
    hdus.writeto(name, overwrite=True)
    print('Completed {}'.format(tag))
    return obslist
Example #47
0
def main():

    usage = "usage: %(prog)s"
    description = "Fit SED for cascade component."
    parser = argparse.ArgumentParser(usage=usage,description=description)

    parser.add_argument('--output', default = 'igmf_casc_lnl.fits')
    parser.add_argument('--make_plots', default = False, action='store_true')
    parser.add_argument('--cache', default = False, action='store_true')
    parser.add_argument('--modelfile', default = False, required=True,
                        help='FITS file containing the IGMF models')
    parser.add_argument('--sedfile', default = False, required=True,
                        help='FITS file containing the TeV SEDs.')
    parser.add_argument('--nstep', default = 5, type=int)
    parser.add_argument('--name', default = [], action='append')
    parser.add_argument('tables', nargs='+', default = None,
                        help='Extension and likelihood tables.')

    args = parser.parse_args()

    # list of sources
    src_names = args.name
    
    casc_model = CascModel.create_from_fits(args.modelfile)

    tab_pars = Table.read(args.tables[0],'SCAN_PARS')
    tab_ebounds = Table.read(args.tables[0],'EBOUNDS')
    
    # Use cached fits file
    if args.cache:
        tab_casc = load_cache(args.tables, src_names)
    else:
        tables = [Table.read(t) for t in args.tables]

        for i, t in enumerate(tables):
            if 'NAME' in t.columns:
                t['name'] = t['NAME']

        tab_casc = join(tables[0],tables[1])
        tab_casc = join(tab_casc,tables[2])
        tab_casc = load_source_rows(tab_casc, src_names)

    tab_sed_tev = Table.read(args.sedfile)

    tab_igmf = []

    for name in src_names:

        rows_sed_tev = load_source_rows(tab_sed_tev, [name], key='SOURCE')
        cat_names = [ '3FGL %s'%row['3FGL_NAME'] for row in rows_sed_tev ]
        cat_names = np.unique(np.array(cat_names))
        rows_sed_gev = load_source_rows(tab_casc, cat_names, key='NAME')
        rows_casc = load_source_rows(tab_casc, cat_names, key='name')
        tab = scan_igmf_likelihood(casc_model, rows_sed_tev, rows_sed_gev,
                                   rows_casc, tab_pars, tab_ebounds, args.nstep)
        tab_igmf += [tab]

    tab = vstack(tab_igmf)
        
    hdulist = fits.HDUList()
    hdulist.append(fits.table_to_hdu(tab))
    hdulist[1].name = 'SCAN_DATA'    
    hdulist.writeto(args.output, clobber=True)
Example #48
0
file_index =6
infile = '/scratch1/scratchdirs/angela/rand%db/radec.csv' % file_index
outfile = '/scratch1/scratchdirs/angela/rand%db/ELG_FArands_mtl0_%d.fits' % (file_index, file_index)
data = ascii.read("/global/u1/a/angela/DESI/ELG_nz_zcat0.csv", format='csv')
radec= ascii.read(infile, format='no_header')
num_lines = len(radec)
# num_lines = sum(1 for line in open(infile))
number_0f_ELG = num_lines*data['number']
red_max = data['red_max']
red_min = data['red_min']
number_0f_ELG = np.around(number_0f_ELG)
z_vector = np.random.random(num_lines)
cum_no_ELG = np.cumsum(number_0f_ELG)
val = int(cum_no_ELG[0])
z_vector[:val] = z_vector [:val]*(red_max[0]-red_min[0])+ red_min[0]
for i in range(1,len(cum_no_ELG)):
    z_vector[int(cum_no_ELG[i-1]):int(cum_no_ELG[i])] = z_vector[int(cum_no_ELG[i-1]):int(cum_no_ELG[i])] *(red_max[i]-red_min[i])+ red_min[i]
j=0
int_val = int(cum_no_ELG[-1])
if cum_no_ELG[-1] < num_lines:
    extra_lines = num_lines-cum_no_ELG
    for i in range(int_val,num_lines):
        z_vector[i] = red_min[j]
        j+=1

new_z_vect = shuffle(z_vector)
t = Table ([radec['col1'], radec['col2'],new_z_vect], names=['RA', 'DEC', 'Z'])
hdu = fits.table_to_hdu(t)
hdu.writeto(outfile)

Example #49
0
    def _make_sed_fits(self, sed, filename, **kwargs):

        # Write a FITS file
        cols = [Column(name='e_min', dtype='f8', data=sed['e_min'], unit='MeV'),
                Column(name='e_ref', dtype='f8',
                       data=sed['e_ref'], unit='MeV'),
                Column(name='e_max', dtype='f8',
                       data=sed['e_max'], unit='MeV'),
                Column(name='ref_dnde_e_min', dtype='f8',
                       data=sed['ref_dnde_e_min'], unit='ph / (MeV cm2 s)'),
                Column(name='ref_dnde_e_max', dtype='f8',
                       data=sed['ref_dnde_e_max'], unit='ph / (MeV cm2 s)'),
                Column(name='ref_dnde', dtype='f8',
                       data=sed['ref_dnde'], unit='ph / (MeV cm2 s)'),
                Column(name='ref_flux', dtype='f8',
                       data=sed['ref_flux'], unit='ph / (cm2 s)'),
                Column(name='ref_eflux', dtype='f8',
                       data=sed['ref_eflux'], unit='MeV / (cm2 s)'),
                Column(name='ref_npred', dtype='f8', data=sed['ref_npred']),
                Column(name='dnde', dtype='f8',
                       data=sed['dnde'], unit='ph / (MeV cm2 s)'),
                Column(name='dnde_err', dtype='f8',
                       data=sed['dnde_err'], unit='ph / (MeV cm2 s)'),
                Column(name='dnde_errp', dtype='f8',
                       data=sed['dnde_err_hi'], unit='ph / (MeV cm2 s)'),
                Column(name='dnde_errn', dtype='f8',
                       data=sed['dnde_err_lo'], unit='ph / (MeV cm2 s)'),
                Column(name='dnde_ul', dtype='f8',
                       data=sed['dnde_ul'], unit='ph / (MeV cm2 s)'),
                Column(name='e2dnde', dtype='f8',
                       data=sed['e2dnde'], unit='MeV / (cm2 s)'),
                Column(name='e2dnde_err', dtype='f8',
                       data=sed['e2dnde_err'], unit='MeV / (cm2 s)'),
                Column(name='e2dnde_errp', dtype='f8',
                       data=sed['e2dnde_err_hi'], unit='MeV / (cm2 s)'),
                Column(name='e2dnde_errn', dtype='f8',
                       data=sed['e2dnde_err_lo'], unit='MeV / (cm2 s)'),
                Column(name='e2dnde_ul', dtype='f8',
                       data=sed['e2dnde_ul'], unit='MeV / (cm2 s)'),
                Column(name='norm', dtype='f8', data=sed['norm']),
                Column(name='norm_err', dtype='f8', data=sed['norm_err']),
                Column(name='norm_errp', dtype='f8', data=sed['norm_err_hi']),
                Column(name='norm_errn', dtype='f8', data=sed['norm_err_lo']),
                Column(name='norm_ul', dtype='f8', data=sed['norm_ul95']),
                Column(name='ts', dtype='f8', data=sed['ts']),
                Column(name='loglike', dtype='f8', data=sed['loglike']),
                Column(name='norm_scan', dtype='f8', data=sed['norm_scan']),
                Column(name='dloglike_scan', dtype='f8',
                       data=sed['dloglike_scan']),

                ]

        tab = Table(cols)
        tab.meta['UL_CONF'] = 0.95
        hdu_sed = fits.table_to_hdu(tab)
        hdu_sed.name = 'SED'

        columns = fits.ColDefs([])

        columns.add_col(fits.Column(name=str('energy'), format='E',
                                    array=sed['model_flux']['energies'],
                                    unit='MeV'))
        columns.add_col(fits.Column(name=str('dnde'), format='E',
                                    array=sed['model_flux']['dnde'],
                                    unit='ph / (MeV cm2 s)'))
        columns.add_col(fits.Column(name=str('dnde_lo'), format='E',
                                    array=sed['model_flux']['dnde_lo'],
                                    unit='ph / (MeV cm2 s)'))
        columns.add_col(fits.Column(name=str('dnde_hi'), format='E',
                                    array=sed['model_flux']['dnde_hi'],
                                    unit='ph / (MeV cm2 s)'))
        columns.add_col(fits.Column(name=str('dnde_err'), format='E',
                                    array=sed['model_flux']['dnde_err'],
                                    unit='ph / (MeV cm2 s)'))
        columns.add_col(fits.Column(name=str('dnde_ferr'), format='E',
                                    array=sed['model_flux']['dnde_ferr']))

        hdu_f = fits.BinTableHDU.from_columns(columns, name='MODEL_FLUX')

        columns = fits.ColDefs([])

        npar = len(sed['param_names'])
        columns.add_col(fits.Column(name=str('name'),
                                    format='A32',
                                    array=sed['param_names']))
        columns.add_col(fits.Column(name=str('value'), format='E',
                                    array=sed['param_values']))
        columns.add_col(fits.Column(name=str('error'), format='E',
                                    array=sed['param_errors']))
        columns.add_col(fits.Column(name=str('covariance'),
                                    format='%iE' % npar,
                                    dim=str('(%i)' % npar),
                                    array=sed['param_covariance']))
        columns.add_col(fits.Column(name=str('correlation'),
                                    format='%iE' % npar,
                                    dim=str('(%i)' % npar),
                                    array=sed['param_correlation']))

        hdu_p = fits.BinTableHDU.from_columns(columns, name='PARAMS')

        hdus = [fits.PrimaryHDU(), hdu_sed, hdu_f, hdu_p]
        hdus[0].header['CONFIG'] = json.dumps(sed['config'])
        hdus[1].header['CONFIG'] = json.dumps(sed['config'])

        fits_utils.write_hdus(hdus, filename,
                              keywords={'SRCNAME': sed['name']})
Example #50
0
nrows_tot_all = mtl_data.shape[0]
o_mask = mtl_data['DESI_TARGET']!=2
o_masked_mtl = mtl_data[o_mask]

for file_index in range(0,9):
    print(file_index)
    first_file = '/global/project/projectdirs/desi/datachallenge/LSScat/quicksurvey2016/elg_ran_%d.fits' % file_index
    if file_index < 9:
        extra_data = '/global/project/projectdirs/desi/datachallenge/LSScat/quicksurvey2016/elg_ran_%d.fits' % (file_index+1)
    else:
        extra_data = '/global/project/projectdirs/desi/datachallenge/LSScat/quicksurvey2016/elg_ran_0.fits'
    rd_data = fits.open(first_file)
    rd_extra = fits.open(extra_data)
    nrows1 = rd_data[1].data.shape[0]
    if nrows_tot > nrows1:
        nrows_diff = nrows_tot-nrows1
    hdu = fits.BinTableHDU.from_columns(rd_data[1].columns, nrows=nrows_tot)
    if nrows_tot > nrows1:
        for colname in rd_data[1].columns.names:
            hdu.data[colname][nrows1:]=rd_extra[1].data[colname][0:nrows_diff]

    masked_mtl.OBSCONDITIONS = masked_mtl.OBSCONDITIONS*0
    masked_mtl.OBSCONDITIONS = masked_mtl.OBSCONDITIONS +3
    t = Table([masked_mtl.TARGETID,hdu.data.ra,hdu.data.dec, masked_mtl.DESI_TARGET,masked_mtl.BGS_TARGET,masked_mtl.MWS_TARGET,masked_mtl.SUBPRIORITY,masked_mtl.OBSCONDITIONS,masked_mtl.BRICKNAME,masked_mtl.DECAM_FLUX,masked_mtl.SHAPEDEV_R,masked_mtl.SHAPEEXP_R,masked_mtl.DEPTH_R,masked_mtl.GALDEPTH_R,masked_mtl.NUMOBS_MORE,masked_mtl.PRIORITY],names=( 'TARGETID','RA', 'DEC', 'DESI_TARGET', 'BGS_TARGET','MWS_TARGET', 'SUBPRIORITY', 'OBSCONDITIONS','BRICKNAME','DECAM_FLUX', 'SHAPEDEV_R', 'SHAPEEXP_R', 'DEPTH_R', 'GALDEPTH_R', 'NUMOBS_MORE','PRIORITY'), dtype = ('>i8', '>f8', '>f8', '>i8', '>i8', '>i8', '>f8', '>i2', 'S8', '>f4','>f4', '>f4', '>f4', '>f4', '>i4', '>i8'))

    full_table =vstack([t2, t])
    hdu =fits.table_to_hdu(full_table)

    file_out ='/scratch1/scratchdirs/angela/LSS/LSS_rand_pre_fibassign_target_%d.fits' % file_index
    hdu.writeto(file_out)
Example #51
0
File: slow.py Project: OSSOS/MOP
def run(expnum, ccd, version='p', prefix='', dry_run=False, force=False):

    message = 'success'

    if storage.get_status(task, prefix, expnum, version=version, ccd=ccd) and not force:
        logging.info("{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd))
        return

    with storage.LoggingManager(task=task, prefix=prefix, expnum=expnum, ccd=ccd, version=version, dry_run=dry_run):
        try:
            if not storage.get_status(dependency, prefix, expnum, "p", ccd=ccd):
                raise IOError("{} not yet run for {}".format(dependency, expnum))


            header = storage.get_astheader(expnum, ccd)
            datasec = storage.datasec_to_list(header.get('DATASEC', '[80:2080,30,4160]'))
            try:
                fwhm = "{:5.2f}".format(storage.get_fwhm(expnum, ccd))
            except:
                fwhm = 'unknown'
            for keyword in del_keyword_list:
                try:
                    del(header[keyword])
                except:
                    pass
            header['FWHM'] = (fwhm, 'FWHM in pixels')
            header['EXTNAME'] = 'header'
            primary_hdu = fits.PrimaryHDU(header=header)
            hdu_list = fits.HDUList([primary_hdu, ])
            for ext in ['jmp', 'matt']:
                extension = 'obj.'+ext
                name = "{}p{:02d}.{}".format(expnum, ccd, extension)
                try:
                    os.unlink(name)
                    os.unlink(name+".fits")
                except:
                    pass
                logging.info("Retrieving {}".format(name))
                obj_file = mop_file.Parser(expnum, ccd, extension)
                obj_file.parse()
        
                t = numpy.all([datasec[0] < obj_file.data['X'], obj_file.data['X'] < datasec[1],
                               datasec[2] < obj_file.data['Y'], obj_file.data['Y'] < datasec[3]], axis=0)
                logging.info("Source remaining after datasec cut: {} of {}".format(len(obj_file.data[t]['X']), len(t)))
                table_hdu = fits.table_to_hdu(obj_file.data[t])
                table_hdu.header['CATALOG'] = name
                table_hdu.header['EXTNAME'] = ext
                hdu_list.append(table_hdu)
                del table_hdu
                del obj_file
                os.unlink(name)

            name = "{}p{:02d}.{}".format(expnum, ccd, 'obj.fits')
            if os.access(name, os.F_OK):
                os.unlink(name)
            hdu_list.writeto(name)
            uri = storage.dbimages_uri(expnum, ccd, 'p', ext=".obj.fits")
            logging.info(name+" -> "+uri)
            count = 0
            with open(name):
              while True:
                count += 1
                logging.info("Copy attempt {}".format(count))
                try:
                    storage.copy(name, uri)
                    os.unlink(name)
                    break
                except Exception as ex:
                    if count > 10:
                        raise ex

            logging.info(message)
        except Exception as e:
            message = str(e)
            logging.error(message)
        
        if not dry_run:
            storage.set_status(task, prefix, expnum, version=version, ccd=ccd, status=message)
wcs.wcs.ctype[0] = 'RA---CAR'
wcs.wcs.ctype[1] = 'DEC--CAR'
wcs.wcs.crval[0] = lon
wcs.wcs.crval[1] = lat
wcs.wcs.crpix[0] = (npix+1)/2.
wcs.wcs.crpix[1] = (npix+1)/2.
wcs.wcs.cdelt[0] = -1.0*cdelt
wcs.wcs.cdelt[1] = cdelt

#################################
# WCS Cube Regular

hdu = fits.PrimaryHDU(data, header=wcs.to_header())

tab_bands = create_bands_table(emin, emax)
hdulist = [hdu, fits.table_to_hdu(tab_bands)]
update_header(hdulist[1].header, **{})
fits.HDUList(hdulist).writeto('wcs_ccube.fits', overwrite=True)

#################################
# WCS Cube Irregular

# Set data values outside the geometry to NaN
data_irreg = np.full((nband, 8, 8), np.nan)
for i, n in enumerate([2,4,6,8]):
    data_irreg[i, :n, :n] = np.random.poisson(1.0,(n, n)).astype('float')


hdu = fits.PrimaryHDU(data_irreg, header=wcs.to_header())

npix_irreg = np.vstack((np.arange(2,10,2),np.arange(2,10,2))).T
Example #53
0
def run_flux_sensitivity(**kwargs):

    index = kwargs.get('index', 2.0)
    emin = kwargs.get('emin', 10**1.5)
    emax = kwargs.get('emax', 10**6.0)
    nbin = kwargs.get('nbin', 18)
    glon = kwargs.get('glon', 0.0)
    glat = kwargs.get('glat', 0.0)
    ltcube_filepath = kwargs.get('ltcube', None)
    galdiff_filepath = kwargs.get('galdiff', None)
    isodiff_filepath = kwargs.get('isodiff', None)
    obs_time_yr = kwargs.get('obs_time_yr', None)
    event_class = kwargs.get('event_class', 'P8R2_SOURCE_V6')
    min_counts = kwargs.get('min_counts', 3.0)
    ts_thresh = kwargs.get('ts_thresh', 25.0)
    output = kwargs.get('output', None)

    event_types = [['FRONT', 'BACK']]
    fn = spectrum.PowerLaw([1E-13, -index], scale=1E3)

    log_ebins = np.linspace(np.log10(emin),
                            np.log10(emax), nbin + 1)
    ebins = 10**log_ebins
    ectr = np.exp(utils.edge_to_center(np.log(ebins)))

    c = SkyCoord(glon, glat, unit='deg', frame='galactic')

    if ltcube_filepath is None:

        if obs_time_yr is None:
            raise Exception('No observation time defined.')

        ltc = LTCube.create_from_obs_time(obs_time_yr * 365 * 24 * 3600.)
    else:
        ltc = LTCube.create(ltcube_filepath)
        if obs_time_yr is not None:
            ltc._counts *= obs_time_yr * 365 * \
                24 * 3600. / (ltc.tstop - ltc.tstart)

    gdiff = skymap.Map.create_from_fits(galdiff_filepath)

    if isodiff_filepath is None:
        isodiff = utils.resolve_file_path('iso_%s_v06.txt' % event_class,
                                          search_dirs=[os.path.join('$FERMIPY_ROOT', 'data'),
                                                       '$FERMI_DIFFUSE_DIR'])
        isodiff = os.path.expandvars(isodiff)
    else:
        isodiff = isodiff_filepath

    iso = np.loadtxt(isodiff, unpack=True)

    scalc = SensitivityCalc(gdiff, iso, ltc, ebins,
                            event_class, event_types)

    o = scalc.diff_flux_threshold(c, fn, ts_thresh, min_counts)

    cols = [Column(name='e_min', dtype='f8', data=scalc.ebins[:-1], unit='MeV'),
            Column(name='e_ref', dtype='f8', data=o['e_ref'], unit='MeV'),
            Column(name='e_max', dtype='f8', data=scalc.ebins[1:], unit='MeV'),
            Column(name='flux', dtype='f8', data=o[
                   'flux'], unit='ph / (cm2 s)'),
            Column(name='eflux', dtype='f8', data=o[
                   'eflux'], unit='MeV / (cm2 s)'),
            Column(name='dnde', dtype='f8', data=o['dnde'],
                   unit='ph / (MeV cm2 s)'),
            Column(name='e2dnde', dtype='f8',
                   data=o['e2dnde'], unit='MeV / (cm2 s)'),
            Column(name='npred', dtype='f8', data=o['npred'], unit='ph')]

    tab_diff = Table(cols)

    cols = [Column(name='index', dtype='f8'),
            Column(name='e_min', dtype='f8', unit='MeV'),
            Column(name='e_ref', dtype='f8', unit='MeV'),
            Column(name='e_max', dtype='f8', unit='MeV'),
            Column(name='flux', dtype='f8', unit='ph / (cm2 s)'),
            Column(name='eflux', dtype='f8', unit='MeV / (cm2 s)'),
            Column(name='dnde', dtype='f8', unit='ph / (MeV cm2 s)'),
            Column(name='e2dnde', dtype='f8', unit='MeV / (cm2 s)'),
            Column(name='npred', dtype='f8', unit='ph')]

    cols_ebin = [Column(name='index', dtype='f8'),
                 Column(name='e_min', dtype='f8',
                        unit='MeV', shape=(len(ectr),)),
                 Column(name='e_ref', dtype='f8',
                        unit='MeV', shape=(len(ectr),)),
                 Column(name='e_max', dtype='f8',
                        unit='MeV', shape=(len(ectr),)),
                 Column(name='flux', dtype='f8',
                        unit='ph / (cm2 s)', shape=(len(ectr),)),
                 Column(name='eflux', dtype='f8',
                        unit='MeV / (cm2 s)', shape=(len(ectr),)),
                 Column(name='dnde', dtype='f8',
                        unit='ph / (MeV cm2 s)', shape=(len(ectr),)),
                 Column(name='e2dnde', dtype='f8',
                        unit='MeV / (cm2 s)', shape=(len(ectr),)),
                 Column(name='npred', dtype='f8', unit='ph', shape=(len(ectr),))]

    tab_int = Table(cols)
    tab_int_ebin = Table(cols_ebin)

    index = np.linspace(1.0, 5.0, 4 * 4 + 1)

    for g in index:
        fn = spectrum.PowerLaw([1E-13, -g], scale=10**3.5)
        o = scalc.int_flux_threshold(c, fn, ts_thresh, 3.0)
        row = [g]
        for colname in tab_int.columns:
            if not colname in o:
                continue
            row += [o[colname]]

        tab_int.add_row(row)

        row = [g]
        for colname in tab_int.columns:
            if not colname in o:
                continue
            row += [o['bins'][colname]]
        tab_int_ebin.add_row(row)

    hdulist = fits.HDUList()
    hdulist.append(fits.table_to_hdu(tab_diff))
    hdulist.append(fits.table_to_hdu(tab_int))
    hdulist.append(fits.table_to_hdu(tab_int_ebin))

    hdulist[1].name = 'DIFF_FLUX'
    hdulist[2].name = 'INT_FLUX'
    hdulist[3].name = 'INT_FLUX_EBIN'

    hdulist.writeto(output, clobber=True)
order = 4
nside = 2**order
npix_imp = np.ones(4)*12*nside**2
cols = []
for i in range(4):
    cols += [Column(name='CHANNEL%i'%i, data=hp.ud_grade(vals0[i], nside_out=nside, power=-2.0), dtype='f8')]
tab_imp = Table(cols, meta={'EXTNAME' : 'SKYMAP'})
hdr_imp = hdr.copy()
hdr_imp['ORDER'] = order
hdr_imp['NSIDE'] = nside
hdr_imp['INDXSCHM'] = 'IMPLICIT'
hdr_imp['AXCOLS1'] = 'E_MIN,E_MAX'

# Write File
tab_bands_imp = create_bands_table(np.ones(4)*nside, npix_imp, emin, emax)
hdulist = [fits.PrimaryHDU(), fits.table_to_hdu(tab_imp),
           fits.table_to_hdu(tab_bands_imp)]
update_header(hdulist[1].header, **hdr_imp)
fits.HDUList(hdulist).writeto('hpx_ccube_implicit.fits', overwrite=True)

#################################
# EXPLICIT Cube

# Create Table
order = 4
nside = 2**order
ipix = hp.query_disc(nside, v[0], np.radians(rad), nest=True)
vals_exp = np.zeros((4,len(ipix)))
for i in range(nband):
    vals_exp[i] = hp.ud_grade(vals0[i], nside_out=nside, power=-2.0)[ipix]
Example #55
0
def fits_out(out_file, freq_lags, energy_lags):
    """
    Write the lag-frequency and lag-energy spectra to a FITS output file.
    Header info is in extension 0, lag-frequency is in extension 1, and
    lag-energy is in extension 2.

    Parameters
    ----------
    out_file : str
        The full path of the output file, in format '*_lag.fits'.

    in_file : str
        The full path of the cross-spectrum input file.

    evt_list : str
        The full path of the event list of the data.

    meta_dict : dict
        Dictionary of meta-parameters needed for analysis.

    lo_freq, up_freq : float
        The lower and upper frequency bounds to average over for computing the
        lag-energy spectrum, inclusive, in Hz.

    lo_chan, up_chan : int
        The lower and upper energy bounds to average over for computing the
        lag-frequency spectrum, inclusive, in detector energy channel.

    mean_rate_ci : np.array of floats
        1-D array of the mean photon count rate of each of the cross-spectral
        channels of interest, in cts/s.

    mean_rate_ref : float
        The mean photon count rate of the cross-spectral reference band, in
        cts/s.

    freq : np.array of floats
        1-D array of the Fourier frequencies against which the lag-frequency
        spectrum is plotted.

    phase, err_phase : np.array of floats
        The phase and error in phase of the frequency lags, in radians.

    tlag, err_tlag : np.array of floats
        The time and error in time of the frequency lags, in seconds.

    e_phase, e_err_phase : np.array of floats
        The phase and error in phase of the energy lags, in radians.

    e_tlag, e_err_tlag : np.array of floats
        The time and error in time of the energy lags, in seconds.

    Returns
    -------
    Nothing, but writes to file '*_lag.fits'.

    """

    print "Output sent to: %s" % out_file
    freq_lags.meta['RATE_CI'] = str(freq_lags.meta['RATE_CI'].tolist())
    energy_lags.meta['RATE_CI'] = str(energy_lags.meta['RATE_CI'].tolist())

    # ## Make FITS header (extension 0)
    # prihdr = fits.Header()
    # prihdr.set('TYPE', "Lag-frequency and lag-energy spectra")
    # prihdr.set('DATE', str(datetime.now()), "YYYY-MM-DD localtime")
    # prihdr.set('EVTLIST', freq_lags.meta['EVTLIST'])
    # prihdr.set('CS_DATA', freq_lags.meta['CS_DATA'])
    # prihdr.set('DT', freq_lags.meta['DT'], "seconds")
    # prihdr.set('N_BINS', freq_lags.meta['N_BINS'], "time bins per segment")
    # prihdr.set('SEGMENTS', freq_lags.meta['SEGMENTS'],
    #            "segments in the whole light curve")
    # prihdr.set('EXPOSURE', freq_lags.meta['EXPOSURE'],
    #            "seconds, of light curve")
    # prihdr.set('DETCHANS', freq_lags.meta['DETCHANS'],
    #            "Number of detector energy channels")
    # prihdr.set('LAG_LF', freq_lags.meta['LO_FREQ'],
    #            "Hz; Lower frequency bound for energy lags")
    # prihdr.set('LAG_UF', freq_lags.meta['UP_FREQ'],
    #            "Hz; Upper frequency bound for energy lags")
    # prihdr.set('LAG_LE', freq_lags.meta['LO_CHAN'],
    #            "Lower energy channel bound for frequency lags")
    # prihdr.set('LAG_UE', freq_lags.meta['UP_CHAN'],
    #            "Upper energy channel bound for frequency lags")
    # prihdr.set('RATE_CI', freq_lags.meta['RATE_CI'], "cts/s")
    # prihdr.set('RATE_REF', freq_lags.meta['RATE_REF'], "cts/s")
    # prihdu = fits.PrimaryHDU(prihdr)

    # ## Make FITS table for lag-frequency plot (extension 1)
    # col1 = fits.Column(name='FREQUENCY', format='D', array=f_bins)
    # col2 = fits.Column(name='PHASE', unit='radians', format='D',
    #                    array=phase.flatten('C'))
    # col3 = fits.Column(name='PHASE_ERR', unit='radians', format='D',
    #                    array=err_phase.flatten('C'))
    # col4 = fits.Column(name='TIME_LAG', unit='s', format='D',
    #                    array=tlag.flatten('C'))
    # col5 = fits.Column(name='TIME_LAG_ERR', unit='s', format='D',
    #                    array=err_tlag.flatten('C'))
    # cols = fits.ColDefs([col1, col2, col3, col4, col5])
    # tbhdu1 = fits.BinTableHDU.from_columns(cols)
    #
    # ## Make FITS table for lag-energy plot (extension 2)
    # col1 = fits.Column(name='PHASE', unit='radians', format='D', array=e_phase)
    # col2 = fits.Column(name='PHASE_ERR', unit='radians', format='D', \
    #                    array=e_err_phase)
    # col3 = fits.Column(name='TIME_LAG', unit='s', format='D', array=e_tlag)
    # col4 = fits.Column(name='TIME_LAG_ERR', unit='s', format='D', \
    #                    array=e_err_tlag)
    # col5 = fits.Column(name='CHANNEL', unit='', format='I', \
    #                    array=chan)
    # cols = fits.ColDefs([col1, col2, col3, col4, col5])
    # tbhdu2 = fits.BinTableHDU.from_columns(cols)
    #
    # ## Check that the filename has FITS file extension
    # assert out_file[-4:].lower() == "fits", \
    #     'ERROR: Output file must have extension ".fits".'
    #
    # ## Write to a FITS file
    # prihdr = fits.Header(freq_lags.meta)
    # thdulist = fits.HDUList([prihdu,
    #                          fits.table_to_hdu(freq_lags)]) #,
    #                          # fits.table_to_hdu(energy_lags)])
    # thdulist.writeto(out_file, clobber=True)


    ## I know this his hack-y, but it's good enough for now. I kept getting a
    ## string64 or string32 error when trying to make a primary HDU from the
    ## Table meta information.
    ##      self._bitpix = DTYPE2BITPIX[data.dtype.name]
    ##      KeyError: 'string32'

    freq_lags.write(out_file, overwrite=True, format='fits')
    hdulist = fits.open(out_file, mode='update')
    hdulist.append(fits.table_to_hdu(energy_lags))
    hdulist.flush()
Example #56
0
    print i, nebin

    tab['z'][i,:] = t[3]['z']
    
    row = []
    for cname in tab.columns:

        if cname not in t[0].columns:
            continue
        
        if cname in ['inj_flux','prim_flux']:
            tab[cname][i,:,:nebin] = t[0][cname][:,:nebin]
        elif cname in ['casc_flux']:
            tab[cname][i,:,:nebin,:nebin] = t[0][cname][:,:nebin,:nebin]
        else:
            tab[cname][i,...] = t[0][cname][...]
            
        #row += [t[cname]]
        #tab[cname][i] = t[cname]
    #tab.add_row(row)

hdulist = fits.HDUList()
hdulist.append(fits.table_to_hdu(tab))
hdulist.append(fits.table_to_hdu(tables[0][1]))
hdulist.append(fits.table_to_hdu(tables[0][2]))
hdulist[1].name = 'FLUX_AND_MODEL_PARS'
hdulist[2].name = 'ENERGIES'
hdulist[3].name = 'THETA'    
hdulist.writeto('out.fits',clobber=True)

Example #57
0
def write_sky_map(filename, m, **kwargs):
    """Write a gravitational-wave sky map to a file, populating the header
    with optional metadata.

    Parameters
    ----------

    filename: string
        Path to the optionally gzip-compressed FITS file.

    m : astropy.table.Table or numpy.array
        If a Numpy record array or astorpy.table.Table instance, and has a
        column named 'UNIQ', then interpret the input as NUNIQ-style
        multi-order map [1]_. Otherwise, interpret as a NESTED or RING ordered
        map.

    **kwargs
        Additional metadata to add to FITS header. If m is an
        astropy.table.Table instance, then the header is initialized from both
        m.meta and **kwargs.

    References
    ----------
    .. [1] Górski, K.M., Wandelt, B.D., Hivon, E., Hansen, F.K., & Banday, A.J.
        2017. The HEALPix Primer. The Unique Identifier scheme.
        http://healpix.sourceforge.net/html/intronode4.htm#SECTION00042000000000000000

    Examples
    --------

    Test header contents:

    >>> order = 9
    >>> nside = 2 ** order
    >>> npix = hp.nside2npix(nside)
    >>> prob = np.ones(npix, dtype=np.float) / npix

    >>> import tempfile
    >>> from lalinference import InferenceVCSInfo as vcs_info
    >>> with tempfile.NamedTemporaryFile(suffix='.fits') as f:
    ...     write_sky_map(f.name, prob, nest=True, vcs_info=vcs_info)
    ...     for card in fits.getheader(f.name, 1).cards:
    ...         print(str(card).rstrip())
    XTENSION= 'BINTABLE'           / binary table extension
    BITPIX  =                    8 / array data type
    NAXIS   =                    2 / number of array dimensions
    NAXIS1  =                    8 / length of dimension 1
    NAXIS2  =              3145728 / length of dimension 2
    PCOUNT  =                    0 / number of group parameters
    GCOUNT  =                    1 / number of groups
    TFIELDS =                    1 / number of table fields
    TTYPE1  = 'PROB    '
    TFORM1  = 'D       '
    TUNIT1  = 'pix-1   '
    PIXTYPE = 'HEALPIX '           / HEALPIX pixelisation
    ORDERING= 'NESTED  '           / Pixel ordering scheme: RING, NESTED, or NUNIQ
    COORDSYS= 'C       '           / Ecliptic, Galactic or Celestial (equatorial)
    NSIDE   =                  512 / Resolution parameter of HEALPIX
    INDXSCHM= 'IMPLICIT'           / Indexing: IMPLICIT or EXPLICIT
    VCSVERS = 'LALInference ...' / Software version
    VCSSTAT = '...: ...' / Software version control status
    VCSREV  = '...' / Software revision (Git)
    DATE-BLD= '...' / Software build date

    >>> uniq = moc.nest2uniq(np.uint8(order), np.arange(npix, dtype=np.uint64))
    >>> probdensity = prob / hp.nside2pixarea(nside)
    >>> moc_data = np.rec.fromarrays(
    ...     [uniq, probdensity], names=['UNIQ', 'PROBDENSITY'])
    >>> with tempfile.NamedTemporaryFile(suffix='.fits') as f:
    ...     write_sky_map(f.name, moc_data, vcs_info=vcs_info)
    ...     for card in fits.getheader(f.name, 1).cards:
    ...         print(str(card).rstrip())
    XTENSION= 'BINTABLE'           / binary table extension
    BITPIX  =                    8 / array data type
    NAXIS   =                    2 / number of array dimensions
    NAXIS1  =                   16 / length of dimension 1
    NAXIS2  =              3145728 / length of dimension 2
    PCOUNT  =                    0 / number of group parameters
    GCOUNT  =                    1 / number of groups
    TFIELDS =                    2 / number of table fields
    TTYPE1  = 'UNIQ    '
    TFORM1  = 'K       '
    TZERO1  =  9223372036854775808
    TTYPE2  = 'PROBDENSITY'
    TFORM2  = 'D       '
    TUNIT2  = 'sr-1    '
    PIXTYPE = 'HEALPIX '           / HEALPIX pixelisation
    ORDERING= 'NUNIQ   '           / Pixel ordering scheme: RING, NESTED, or NUNIQ
    COORDSYS= 'C       '           / Ecliptic, Galactic or Celestial (equatorial)
    MOCORDER=                    9 / MOC resolution (best order)
    VCSVERS = 'LALInference ...' / Software version
    VCSSTAT = '...: ...' / Software version control status
    VCSREV  = '...' / Software revision (Git)
    DATE-BLD= '...' / Software build date
    """

    if isinstance(m, Table) or (isinstance(m, np.ndarray) and m.dtype.names):
        m = Table(m)
    else:
        if np.ndim(m) == 1:
            m = [m]
        m = Table(m, names=DEFAULT_NESTED_NAMES[:len(m)])
    m.meta.update(kwargs)

    if 'UNIQ' in m.colnames:
        default_names = DEFAULT_NUNIQ_NAMES
        default_units = DEFAULT_NUNIQ_UNITS
        extra_header = [
            ('PIXTYPE', 'HEALPIX',
             'HEALPIX pixelisation'),
            ('ORDERING', 'NUNIQ',
             'Pixel ordering scheme: RING, NESTED, or NUNIQ'),
            ('COORDSYS', 'C',
             'Ecliptic, Galactic or Celestial (equatorial)'),
            ('MOCORDER', moc.uniq2order(m['UNIQ'].max()),
             'MOC resolution (best order)')]
    else:
        default_names = DEFAULT_NESTED_NAMES
        default_units = DEFAULT_NESTED_UNITS
        ordering = 'NESTED' if m.meta.pop('nest', False) else 'RING'
        extra_header = [
            ('PIXTYPE', 'HEALPIX',
             'HEALPIX pixelisation'),
            ('ORDERING', ordering,
             'Pixel ordering scheme: RING, NESTED, or NUNIQ'),
            ('COORDSYS', 'C',
             'Ecliptic, Galactic or Celestial (equatorial)'),
            ('NSIDE', hp.npix2nside(len(m)),
             'Resolution parameter of HEALPIX'),
            ('INDXSCHM', 'IMPLICIT',
             'Indexing: IMPLICIT or EXPLICIT')]

    for key, rows in itertools.groupby(FITS_META_MAPPING, lambda row: row[0]):
        try:
            value = m.meta.pop(key)
        except KeyError:
            pass
        else:
            for row in rows:
                _, fits_key, fits_comment, to_fits, _ = row
                if to_fits is not None:
                    extra_header.append(
                        (fits_key, to_fits(value), fits_comment))

    for default_name, default_unit in zip(default_names, default_units):
        try:
            col = m[default_name]
        except KeyError:
            pass
        else:
            if not col.unit:
                col.unit = default_unit

    if astropy_version >= '1.3.1':
        hdu = fits.table_to_hdu(m)
        hdu.header.extend(extra_header)
        hdulist = fits.HDUList([fits.PrimaryHDU(), hdu])
        hdulist.writeto(filename, clobber=True)
    else:
        # FIXME: This code path works around a number of issues with older
        # versions of Astropy. Remove it once we drop support for
        # astropy < 1.3.1.
        #
        # astropy.io.fits.table_to_hdu was added in astropy 1.2.
        # We must currently support astropy >= 1.1.1 on the LIGO Data Grid's
        # Scientific Linux 7 computing clusters.
        #
        # With some old versions of astropy that we still have to
        # support, the astropy.table.Table.write method did not support the
        # clobber argument. So we have to manually delete the file first so
        # that astropy.io.fits does not complain that the file exists.
        #
        # Also this works around https://github.com/astropy/astropy/pull/5720,
        # which was fixed in astropy 1.3.1.
        from ..bayestar.command import rm_f
        rm_f(filename)
        m.write(filename, format='fits')

        hdulist = fits.open(filename)
        _, hdu = hdulist
        hdu.header.extend(extra_header)
        hdulist.writeto(filename, clobber=True)
Example #58
0
def main():

    usage = "Usage: %(prog)s  [files or file lists] [options]"
    description = """Compute the instrument response diagnostics from
a set of MC merit files.
"""
    parser = argparse.ArgumentParser(usage=usage, description=description)

    parser.add_argument('--aliases', default=[], action='append',
                        type=str, help='Set a yaml file that contains a '
                        'set of Merit aliases defined as key/value pairs. Also accepts '
                        'event class XML files.  Note '
                        'that the selection string can use any aliases defined '
                        'in this file.')

    parser.add_argument('--output', default='output.fits', type=str,
                        help='Output filename.')

    parser.add_argument('--selection', default=None, type=str, action='append',
                        help='Output filename.')

    parser.add_argument('files', nargs='+', default=None,
                        help='MERIT files or MERIT file lists.')

    args = parser.parse_args()

    ROOT.TFormula.SetMaxima(2000, 2000, 2000)

    # Assemble list of root files
    merit_files = get_files(args.files)

    chain = ROOT.TChain("MeritTuple")
    load_chain(chain, merit_files)

    aliases = load_aliases(args.aliases)
    for k, v in sorted(aliases.items()):
        chain.SetAlias(k, v)

    chain_job = ROOT.TChain('jobinfo')
    load_chain(chain_job, merit_files)

    cthmin = 0.0
    cthmax = 1.0
    cthdelta = 0.1
    cthnbin = int((cthmax - cthmin) / cthdelta)

    logemin = 1.25
    logemax = 5.75
    logedelta = 0.125
    logenbin = int((logemax - logemin) / logedelta)

    logebins = np.linspace(logemin, logemax, logenbin + 1)
    cthbins = np.linspace(cthmin, cthmax, cthnbin + 1)

    ectr = 10**(0.5 * (logebins[1:] + logebins[:-1]))

    cols = [Column(name='name', dtype='S32'),
            Column(name='ectr', dtype='f8', unit='MeV', shape=(36,)),
            Column(name='aeff', dtype='f8', unit='m^2', shape=(36, 10)),
            Column(name='acceptance', dtype='f8', unit='m^2 sr', shape=(36))]

    tab = Table(cols)
    ngen = getGeneratedEvents(chain_job)

    classes = ['SOURCE']
    if args.selection is not None:
        classes = args.selection

    for c in sorted(classes):
        print(c)

        aeff, aeff_err = calc_aeff(chain, c, ngen, logebins, cthbins)
        acc = np.sum(aeff, axis=1) * 2 * np.pi / cthnbin
        tab.add_row([c, ectr, aeff, acc])

    hdulist = fits.HDUList()
    hdulist.append(fits.table_to_hdu(tab))
    hdulist.writeto(args.output, overwrite=True)
Example #59
0
                try:
                    os.unlink(name)
                    os.unlink(name+".fits")
                except:
                    pass
                print("Doing {}".format(name))
                try:
                    obj_file = mop_file.Parser(expnum, ccd, extension)
                    obj_file.parse()
                except Exception as ex:
                    print(ex)
                    continue
                t = numpy.all([datasec[0] < obj_file.data['X'], obj_file.data['X'] < datasec[1],
                               datasec[2] < obj_file.data['Y'], obj_file.data['Y'] < datasec[3]], axis=0)
                print("Source remaining after datasec cut: {} of {}".format(len(obj_file.data[t]['X']), len(t)))
                table_hdu = fits.table_to_hdu(obj_file.data[t])
                table_hdu.header['CATALOG'] = name
                table_hdu.header['EXTNAME'] = ext
                hdu_list.append(table_hdu)
                del table_hdu
                del obj_file
                os.unlink(name)

            name = "{}p{:02d}.{}".format(expnum, ccd, 'obj.fits')
            if os.access(name, os.F_OK):
                os.unlink(name)
            hdu_list.writeto(name)
            uri = storage.dbimages_uri(expnum, ccd, 'p', ext=".obj.fits")
            print(name+" -> "+uri)
            count = 0
            while count < 10:
def save_potential_target_data(total_unique_targs, output_filename):    
    hdu = fits.table_to_hdu(total_unique_targs)
    hdu.writeto(output_filename)