Beispiel #1
0
 def test_read_from_fileobj(self, tmpdir):
     filename = str(tmpdir.join('test_read_from_fileobj.fits'))
     hdu = BinTableHDU(self.data)
     hdu.writeto(filename, overwrite=True)
     with open(filename, 'rb') as f:
         t = Table.read(f)
     assert equal_data(t, self.data)
Beispiel #2
0
 def test_read_from_fileobj(self, tmpdir):
     filename = str(tmpdir.join('test_read_from_fileobj.fits'))
     hdu = BinTableHDU(self.data)
     hdu.writeto(filename, overwrite=True)
     with open(filename, 'rb') as f:
         t = Table.read(f)
     assert equal_data(t, self.data)
Beispiel #3
0
def main():

    usage = "usage: %(prog)s [archive file]"
    description = "Build the extended archive from the master archive YAML file."
    parser = argparse.ArgumentParser(usage=usage, description=description)

    parser.add_argument('--outname', default=None, required=True)

    parser.add_argument('--vernum', default=0, required=True)

    parser.add_argument('masterfile',
                        help='Extended archive master YAML file.')

    args = parser.parse_args()

    npar_max = 5
    sources = yaml.load(open(args.masterfile))
    cols = [
        Column(name='Source_Name', format='18A'),
        Column(name='RAJ2000', format='E', unit='deg', disp='F8.4'),
        Column(name='DEJ2000', format='E', unit='deg', disp='F8.4'),
        Column(name='GLON', format='E', unit='deg', disp='F8.4'),
        Column(name='GLAT', format='E', unit='deg', disp='F8.4'),
        Column(name='Photon_Flux', format='E', unit='ph cm-2 s-1',
               disp='E8.2'),
        Column(name='Energy_Flux',
               format='E',
               unit='erg cm-2 s-1',
               disp='E8.2'),
        Column(name='Model_Form', format='12A'),
        Column(name='Model_SemiMajor', format='E', unit='deg', disp='E7.3'),
        Column(name='Model_SemiMinor', format='E', unit='deg', disp='E7.3'),
        Column(name='Model_PosAng', format='E', unit='deg', disp='E6.1'),
        Column(name='Spatial_Function', format='15A'),
        Column(name='Spatial_Filename', format='50A'),
        Column(name='Spectral_Function', format='12A'),
        Column(name='Spectral_Filename', format='40A'),
        Column(name='Name_1FGL', format='18A'),
        Column(name='Name_2FGL', format='18A'),
        Column(name='Name_3FGL', format='18A'),
        Column(name='Spectral_Param_Name', format='45A9'),
        Column(name='Spectral_Param_Value',
               format='E',
               dim=str(npar_max),
               disp='E9.4'),
        Column(name='Spectral_Param_Error',
               format='E',
               dim=str(npar_max),
               disp='E9.4'),
        Column(name='Spectral_Param_Scale', format='E', dim=str(npar_max)),
    ]

    for c in cols:
        c.array = build_column_array(c.name, sources, npar_max)

    record = FITS_rec.from_columns(cols)
    record.sort(order="RAJ2000")

    outdir = args.outname + "_v" + args.vernum
    mkdir(outdir)

    fitsname = "LAT_extended_sources_v" + args.vernum + ".fits"
    output = BinTableHDU(record)
    output.writeto(os.path.join(outdir, fitsname), overwrite=True)

    xmldir = os.path.join(outdir, 'XML')
    mkdir(xmldir)

    for k, v in sources.items():
        xmlpath = os.path.join(xmldir,
                               v['Source_Name'].replace(' ', '') + '.xml')
        to_xml(xmlpath, v['Source_Name'], v)
Beispiel #4
0
def write_table_fits(input, output, overwrite=False):
    """
    Write a Table object to a FITS file

    Parameters
    ----------
    input : Table
        The table to write out.
    output : str
        The filename to write the table to.
    overwrite : bool
        Whether to overwrite any existing file without warning.
    """

    # Check if output file already exists
    if isinstance(output, basestring) and os.path.exists(output):
        if overwrite:
            os.remove(output)
        else:
            raise IOError("File exists: {0}".format(output))

    # Create a new HDU object
    if input.masked:
        table_hdu = BinTableHDU(np.array(input.filled()))
        for col in table_hdu.columns:
            # The astype is necessary because if the string column is less
            # than one character, the fill value will be N/A by default which
            # is too long, and so no values will get masked.
            fill_value = input[col.name].get_fill_value()
            col.null = fill_value.astype(input[col.name].dtype)
    else:
        table_hdu = BinTableHDU(np.array(input))

    # Set units for output HDU
    for col in table_hdu.columns:
        if input[col.name].units is not None:
            col.unit = input[col.name].units.to_string(format='fits')

    for key, value in input.meta.items():

        if is_column_keyword(key.upper()) or key.upper() in REMOVE_KEYWORDS:

            log.warn("Meta-data keyword {0} will be ignored since it "
                     "conflicts with a FITS reserved keyword".format(key))

        if isinstance(value, list):
            for item in value:
                try:
                    table_hdu.header.append((key, item))
                except ValueError:
                    log.warn("Attribute `{0}` of type {1} cannot be written "
                             "to FITS files - skipping".format(key,
                                                               type(value)))
        else:
            try:
                table_hdu.header[key] = value
            except ValueError:
                log.warn("Attribute `{0}` of type {1} cannot be written to "
                         "FITS files - skipping".format(key, type(value)))

    # Write out file
    table_hdu.writeto(output)
Beispiel #5
0
def write_table_fits(input, output, overwrite=False):
    """
    Write a Table object to a FITS file

    Parameters
    ----------
    input : Table
        The table to write out.
    output : str
        The filename to write the table to.
    overwrite : bool
        Whether to overwrite any existing file without warning.
    """

    # Check if output file already exists
    if isinstance(output, six.string_types) and os.path.exists(output):
        if overwrite:
            os.remove(output)
        else:
            raise IOError("File exists: {0}".format(output))

    # Create a new HDU object
    if input.masked:
        table_hdu = BinTableHDU(np.array(input.filled()))
        for col in table_hdu.columns:
            # The astype is necessary because if the string column is less
            # than one character, the fill value will be N/A by default which
            # is too long, and so no values will get masked.
            fill_value = input[col.name].get_fill_value()
            col.null = fill_value.astype(input[col.name].dtype)
    else:
        table_hdu = BinTableHDU(np.array(input))

    # Set units for output HDU
    for col in table_hdu.columns:
        if input[col.name].units is not None:
            col.unit = input[col.name].units.to_string(format='fits')

    for key, value in input.meta.items():

        if is_column_keyword(key.upper()) or key.upper() in REMOVE_KEYWORDS:

            log.warn("Meta-data keyword {0} will be ignored since it "
                     "conflicts with a FITS reserved keyword".format(key))

        if isinstance(value, list):
            for item in value:
                try:
                    table_hdu.header.append((key, item))
                except ValueError:
                    log.warn("Attribute `{0}` of type {1} cannot be written "
                             "to FITS files - skipping".format(
                                 key, type(value)))
        else:
            try:
                table_hdu.header[key] = value
            except ValueError:
                log.warn("Attribute `{0}` of type {1} cannot be written to "
                         "FITS files - skipping".format(key, type(value)))

    # Write out file
    table_hdu.writeto(output)