def hdf5_reader(filename, format='auto', auto_merge=False, **kwargs): """ Read in all datasets from an HDF5 file Parameters ---------- source: str or HDUList The pathname to the FITS file. If an HDUList is passed in, simply use that. """ import h5py from astropy.table import Table # Open file file_handle = h5py.File(filename, 'r') # Define function to read # Read in all datasets datasets = extract_hdf5_datasets(file_handle) label_base = os.path.basename(filename).rpartition('.')[0] if not label_base: label_base = os.path.basename(filename) data_by_shape = {} groups = OrderedDict() for key in datasets: label = '{0}[{1}]'.format(label_base, key) if datasets[key].dtype.kind in ('f', 'i'): if auto_merge and datasets[key].value.shape in data_by_shape: data = data_by_shape[datasets[key].value.shape] else: data = Data(label=label) data_by_shape[datasets[key].value.shape] = data groups[label] = data data.add_component(component=datasets[key].value, label=key) else: table = Table.read(datasets[key], format='hdf5') data = Data(label=label) groups[label] = data for column_name in table.columns: column = table[column_name] if column.ndim == 1: component = Component(column, units=column.unit) data.add_component(component=component, label=column_name) else: warnings.warn( "HDF5: Ignoring vector column {0}".format(column_name)) # Close HDF5 file file_handle.close() return [groups[idx] for idx in groups]
def fits_reader(source, auto_merge=False, exclude_exts=None, label=None): """ Read in all extensions from a FITS file. Parameters ---------- source: str or HDUList The pathname to the FITS file. If an HDUList is passed in, simply use that. auto_merge: bool Merge extensions that have the same shape and only one has a defined WCS. exclude_exts: [hdu, ] or [index, ] List of HDU's to exclude from reading. This can be a list of HDU's or a list of HDU indexes. """ from astropy.io import fits from astropy.table import Table exclude_exts = exclude_exts or [] if not isinstance(source, fits.hdu.hdulist.HDUList): hdulist = fits.open(source) hdulist.verify('fix') else: hdulist = source groups = OrderedDict() extension_by_shape = OrderedDict() if label is not None: label_base = label else: hdulist_name = hdulist.filename() if hdulist_name is None: hdulist_name = "HDUList" label_base = basename(hdulist_name).rpartition('.')[0] if not label_base: label_base = basename(hdulist_name) # Create a new image Data. def new_data(): label = '{0}[{1}]'.format(label_base, hdu_name) data = Data(label=label) data.coords = coords groups[hdu_name] = data extension_by_shape[shape] = hdu_name return data for extnum, hdu in enumerate(hdulist): hdu_name = hdu.name if hdu.name else "HDU{0}".format(extnum) if (hdu.data is not None and hdu.data.size > 0 and hdu_name not in exclude_exts and extnum not in exclude_exts): if is_image_hdu(hdu): shape = hdu.data.shape coords = coordinates_from_header(hdu.header) if not auto_merge or has_wcs(coords): data = new_data() else: try: data = groups[extension_by_shape[shape]] except KeyError: data = new_data() data.add_component(component=hdu.data, label=hdu_name) elif is_table_hdu(hdu): # Loop through columns and make component list table = Table(hdu.data) label = '{0}[{1}]'.format(label_base, hdu_name) data = Data(label=label) groups[hdu_name] = data for column_name in table.columns: column = table[column_name] component = Component(column, units=column.unit) data.add_component(component=component, label=column_name) return [groups[idx] for idx in groups]
def _load_component(rec, context): if 'log' in rec: return context.object(rec['log']).component(rec['log_item']) return Component(data=context.object(rec['data']), units=rec['units'])