def table_to_hdu(table, character_as_bytes=False): """ Convert an `~astropy.table.Table` object to a FITS `~astropy.io.fits.BinTableHDU`. Parameters ---------- table : astropy.table.Table The table to convert. character_as_bytes : bool Whether to return bytes for string columns when accessed from the HDU. By default this is `False` and (unicode) strings are returned, but for large tables this may use up a lot of memory. Returns ------- table_hdu : `~astropy.io.fits.BinTableHDU` The FITS binary table HDU. """ # Avoid circular imports from .connect import is_column_keyword, REMOVE_KEYWORDS from .column import python_to_tdisp # Header to store Time related metadata hdr = None # Not all tables with mixin columns are supported if table.has_mixin_columns: # Import is done here, in order to avoid it at build time as erfa is not # yet available then. from astropy.table.column import BaseColumn from astropy.time import Time from astropy.units import Quantity from .fitstime import time_to_fits # Only those columns which are instances of BaseColumn, Quantity or Time can # be written unsupported_cols = table.columns.not_isinstance( (BaseColumn, Quantity, Time)) if unsupported_cols: unsupported_names = [col.info.name for col in unsupported_cols] raise ValueError( 'cannot write table with mixin column(s) {0}'.format( unsupported_names)) time_cols = table.columns.isinstance(Time) if time_cols: table, hdr = time_to_fits(table) # Create a new HDU object if table.masked: # float column's default mask value needs to be Nan for column in table.columns.values(): fill_value = column.get_fill_value() if column.dtype.kind == 'f' and np.allclose(fill_value, 1e20): column.set_fill_value(np.nan) # TODO: it might be better to construct the FITS table directly from # the Table columns, rather than go via a structured array. table_hdu = BinTableHDU.from_columns(np.array(table.filled()), header=hdr, character_as_bytes=True) for col in table_hdu.columns: # Binary FITS tables support TNULL *only* for integer data columns # TODO: Determine a schema for handling non-integer masked columns # in FITS (if at all possible) int_formats = ('B', 'I', 'J', 'K') if not (col.format in int_formats or col.format.p_format in int_formats): continue # The astype is necessary because if the string column is less # than one character, the fill value will be N/A by default which # is too long, and so no values will get masked. fill_value = table[col.name].get_fill_value() col.null = fill_value.astype(table[col.name].dtype) else: table_hdu = BinTableHDU.from_columns( np.array(table.filled()), header=hdr, character_as_bytes=character_as_bytes) # Set units and format display for output HDU for col in table_hdu.columns: if table[col.name].info.format is not None: # check for boolean types, special format case logical = table[col.name].info.dtype == bool tdisp_format = python_to_tdisp(table[col.name].info.format, logical_dtype=logical) if tdisp_format is not None: col.disp = tdisp_format unit = table[col.name].unit if unit is not None: # Local imports to avoid importing units when it is not required, # e.g. for command-line scripts from astropy.units import Unit from astropy.units.format.fits import UnitScaleError try: col.unit = unit.to_string(format='fits') except UnitScaleError: scale = unit.scale raise UnitScaleError( "The column '{0}' could not be stored in FITS format " "because it has a scale '({1})' that " "is not recognized by the FITS standard. Either scale " "the data or change the units.".format( col.name, str(scale))) except ValueError: warnings.warn( "The unit '{0}' could not be saved to FITS format".format( unit.to_string()), AstropyUserWarning) # Try creating a Unit to issue a warning if the unit is not FITS compliant Unit(col.unit, format='fits', parse_strict='warn') # Column-specific override keywords for coordinate columns coord_meta = table.meta.pop('__coordinate_columns__', {}) for col_name, col_info in coord_meta.items(): col = table_hdu.columns[col_name] # Set the column coordinate attributes from data saved earlier. # Note: have to set all three, even if we have no data. for attr in 'coord_type', 'coord_unit', 'time_ref_pos': setattr(col, attr, col_info.get(attr, None)) for key, value in table.meta.items(): if is_column_keyword(key.upper()) or key.upper() in REMOVE_KEYWORDS: warnings.warn( "Meta-data keyword {0} will be ignored since it conflicts " "with a FITS reserved keyword".format(key), AstropyUserWarning) # Convert to FITS format if key == 'comments': key = 'comment' if isinstance(value, list): for item in value: try: table_hdu.header.append((key, item)) except ValueError: warnings.warn( "Attribute `{0}` of type {1} cannot be added to " "FITS Header - skipping".format(key, type(value)), AstropyUserWarning) else: try: table_hdu.header[key] = value except ValueError: warnings.warn( "Attribute `{0}` of type {1} cannot be added to FITS " "Header - skipping".format(key, type(value)), AstropyUserWarning) return table_hdu
def table_to_hdu(table, character_as_bytes=False): """ Convert an `~astropy.table.Table` object to a FITS `~astropy.io.fits.BinTableHDU`. Parameters ---------- table : astropy.table.Table The table to convert. character_as_bytes : bool Whether to return bytes for string columns when accessed from the HDU. By default this is `False` and (unicode) strings are returned, but for large tables this may use up a lot of memory. Returns ------- table_hdu : `~astropy.io.fits.BinTableHDU` The FITS binary table HDU. """ # Avoid circular imports from .connect import is_column_keyword, REMOVE_KEYWORDS from .column import python_to_tdisp # Header to store Time related metadata hdr = None # Not all tables with mixin columns are supported if table.has_mixin_columns: # Import is done here, in order to avoid it at build time as erfa is not # yet available then. from astropy.table.column import BaseColumn from astropy.time import Time from astropy.units import Quantity from .fitstime import time_to_fits # Only those columns which are instances of BaseColumn, Quantity or Time can # be written unsupported_cols = table.columns.not_isinstance( (BaseColumn, Quantity, Time)) if unsupported_cols: unsupported_names = [col.info.name for col in unsupported_cols] raise ValueError(f'cannot write table with mixin column(s) ' f'{unsupported_names}') time_cols = table.columns.isinstance(Time) if time_cols: table, hdr = time_to_fits(table) # Create a new HDU object tarray = table.as_array() if isinstance(tarray, np.ma.MaskedArray): # Fill masked values carefully: # float column's default mask value needs to be Nan and # string column's default mask should be an empty string. # Note: getting the fill value for the structured array is # more reliable than for individual columns for string entries. # (no 'N/A' for a single-element string, where it should be 'N'). default_fill_value = np.ma.default_fill_value(tarray.dtype) for colname, (coldtype, _) in tarray.dtype.fields.items(): if np.all( tarray.fill_value[colname] == default_fill_value[colname]): # Since multi-element columns with dtypes such as '2f8' have # a subdtype, we should look up the type of column on that. coltype = (coldtype.subdtype[0].type if coldtype.subdtype else coldtype.type) if issubclass(coltype, np.complexfloating): tarray.fill_value[colname] = complex(np.nan, np.nan) elif issubclass(coltype, np.inexact): tarray.fill_value[colname] = np.nan elif issubclass(coltype, np.character): tarray.fill_value[colname] = '' # TODO: it might be better to construct the FITS table directly from # the Table columns, rather than go via a structured array. table_hdu = BinTableHDU.from_columns( tarray.filled(), header=hdr, character_as_bytes=character_as_bytes) for col in table_hdu.columns: # Binary FITS tables support TNULL *only* for integer data columns # TODO: Determine a schema for handling non-integer masked columns # with non-default fill values in FITS (if at all possible). int_formats = ('B', 'I', 'J', 'K') if not (col.format in int_formats or col.format.p_format in int_formats): continue fill_value = tarray[col.name].fill_value col.null = fill_value.astype(int) else: table_hdu = BinTableHDU.from_columns( tarray, header=hdr, character_as_bytes=character_as_bytes) # Set units and format display for output HDU for col in table_hdu.columns: if table[col.name].info.format is not None: # check for boolean types, special format case logical = table[col.name].info.dtype == bool tdisp_format = python_to_tdisp(table[col.name].info.format, logical_dtype=logical) if tdisp_format is not None: col.disp = tdisp_format unit = table[col.name].unit if unit is not None: # Local imports to avoid importing units when it is not required, # e.g. for command-line scripts from astropy.units import Unit from astropy.units.format.fits import UnitScaleError try: col.unit = unit.to_string(format='fits') except UnitScaleError: scale = unit.scale raise UnitScaleError( f"The column '{col.name}' could not be stored in FITS " f"format because it has a scale '({str(scale)})' that " f"is not recognized by the FITS standard. Either scale " f"the data or change the units.") except ValueError: # Warn that the unit is lost, but let the details depend on # whether the column was serialized (because it was a # quantity), since then the unit can be recovered by astropy. warning = ( f"The unit '{unit.to_string()}' could not be saved in " f"native FITS format ") if any('SerializedColumn' in item and 'name: ' + col.name in item for item in table.meta.get('comments', [])): warning += ( "and hence will be lost to non-astropy fits readers. " "Within astropy, the unit can roundtrip using QTable, " "though one has to enable the unit before reading.") else: warning += ( "and cannot be recovered in reading. It can roundtrip " "within astropy by using QTable both to write and read " "back, though one has to enable the unit before reading." ) warnings.warn(warning, AstropyUserWarning) else: # Try creating a Unit to issue a warning if the unit is not # FITS compliant Unit(col.unit, format='fits', parse_strict='warn') # Column-specific override keywords for coordinate columns coord_meta = table.meta.pop('__coordinate_columns__', {}) for col_name, col_info in coord_meta.items(): col = table_hdu.columns[col_name] # Set the column coordinate attributes from data saved earlier. # Note: have to set these, even if we have no data. for attr in 'coord_type', 'coord_unit': setattr(col, attr, col_info.get(attr, None)) trpos = col_info.get('time_ref_pos', None) if trpos is not None: setattr(col, 'time_ref_pos', trpos) for key, value in table.meta.items(): if is_column_keyword(key.upper()) or key.upper() in REMOVE_KEYWORDS: warnings.warn( f"Meta-data keyword {key} will be ignored since it conflicts " f"with a FITS reserved keyword", AstropyUserWarning) continue # Convert to FITS format if key == 'comments': key = 'comment' if isinstance(value, list): for item in value: try: table_hdu.header.append((key, item)) except ValueError: warnings.warn( f"Attribute `{key}` of type {type(value)} cannot be " f"added to FITS Header - skipping", AstropyUserWarning) else: try: table_hdu.header[key] = value except ValueError: warnings.warn( f"Attribute `{key}` of type {type(value)} cannot be " f"added to FITS Header - skipping", AstropyUserWarning) return table_hdu
def write_table_fits(input, output, overwrite=False): """ Write a Table object to a FITS file Parameters ---------- input : Table The table to write out. output : str The filename to write the table to. overwrite : bool Whether to overwrite any existing file without warning. """ # Tables with mixin columns are not supported if input.has_mixin_columns: mixin_names = [name for name, col in input.columns.items() if not isinstance(col, input.ColumnClass)] raise ValueError('cannot write table with mixin column(s) {0} to FITS' .format(mixin_names)) # Check if output file already exists if isinstance(output, string_types) and os.path.exists(output): if overwrite: os.remove(output) else: raise IOError("File exists: {0}".format(output)) # Create a new HDU object if input.masked: #float column's default mask value needs to be Nan for column in six.itervalues(input.columns): fill_value = column.get_fill_value() if column.dtype.kind == 'f' and np.allclose(fill_value, 1e20): column.set_fill_value(np.nan) fits_rec = FITS_rec.from_columns(np.array(input.filled())) table_hdu = BinTableHDU(fits_rec) for col in table_hdu.columns: # Binary FITS tables support TNULL *only* for integer data columns # TODO: Determine a schema for handling non-integer masked columns # in FITS (if at all possible) int_formats = ('B', 'I', 'J', 'K') if not (col.format in int_formats or col.format.p_format in int_formats): continue # The astype is necessary because if the string column is less # than one character, the fill value will be N/A by default which # is too long, and so no values will get masked. fill_value = input[col.name].get_fill_value() col.null = fill_value.astype(input[col.name].dtype) else: fits_rec = FITS_rec.from_columns(np.array(input.filled())) table_hdu = BinTableHDU(fits_rec) # Set units for output HDU for col in table_hdu.columns: unit = input[col.name].unit if unit is not None: try: col.unit = unit.to_string(format='fits') except UnitScaleError: scale = unit.scale raise UnitScaleError( "The column '{0}' could not be stored in FITS format " "because it has a scale '({1})' that " "is not recognized by the FITS standard. Either scale " "the data or change the units.".format(col.name, str(scale))) except ValueError: warnings.warn( "The unit '{0}' could not be saved to FITS format".format( unit.to_string()), AstropyUserWarning) for key, value in input.meta.items(): if is_column_keyword(key.upper()) or key.upper() in REMOVE_KEYWORDS: warnings.warn( "Meta-data keyword {0} will be ignored since it conflicts " "with a FITS reserved keyword".format(key), AstropyUserWarning) if isinstance(value, list): for item in value: try: table_hdu.header.append((key, item)) except ValueError: warnings.warn( "Attribute `{0}` of type {1} cannot be written to " "FITS files - skipping".format(key, type(value)), AstropyUserWarning) else: try: table_hdu.header[key] = value except ValueError: warnings.warn( "Attribute `{0}` of type {1} cannot be written to FITS " "files - skipping".format(key, type(value)), AstropyUserWarning) # Write out file table_hdu.writeto(output)