def test_extract(tmpdir): hdulist = HDUList() image = ImageHDU(np.random.random((25, 25))) hdulist.append(image) tree = { 'some_words': 'These are some words', 'nested': { 'a': 100, 'b': 42 }, 'list': [x for x in range(10)], 'image': image.data } asdf_in_fits = str(tmpdir.join('asdf.fits')) with AsdfInFits(hdulist, tree) as aif: aif.write_to(asdf_in_fits) pure_asdf = str(tmpdir.join('extract.asdf')) extract.extract_file(asdf_in_fits, pure_asdf) assert os.path.exists(pure_asdf) with asdf.open(pure_asdf) as af: assert not isinstance(af, AsdfInFits) assert_tree_match(tree, af.tree)
def createFromScratch(self, phu, extensions=None): """ Creates an AstroData object from a collection of objects. """ lst = HDUList() if phu is not None: if isinstance(phu, PrimaryHDU): lst.append(phu) elif isinstance(phu, Header): lst.append(PrimaryHDU(header=deepcopy(phu), data=DELAYED)) elif isinstance(phu, (dict, list, tuple)): p = PrimaryHDU() p.header.update(phu) lst.append(p) else: raise ValueError( "phu must be a PrimaryHDU or a valid header object") # TODO: Verify the contents of extensions... if extensions is not None: for ext in extensions: lst.append(ext) return self.getAstroData(lst)
def tofits(filename, data, hdr=None, clobber=False): """simple pyfits wrapper to make saving fits files easier.""" hdu = PrimaryHDU(data) if not (hdr is None): hdu.header += hdr hdulist = HDUList([hdu]) hdulist.writeto(filename, overwrite=clobber, output_verify='ignore')
def to_hdu_list(self): """Convert to `~astropy.io.fits.HDUList`. """ hdu_list = HDUList() for image in self: hdu = image.to_image_hdu() hdu_list.append(hdu) return hdu_list
def test_datasets(self): records = DALResults.from_result_url( 'http://example.com/query/dataset') record = records[0] assert record.getdataurl() == 'http://example.com/querydata/image.fits' dataset = record.getdataset() HDUList.fromstring(dataset.read())
def export_visibility_to_fits(vis: Visibility, fits_file: str): hdu = HDUList([ PrimaryHDU(), configuration_to_hdu(vis.configuration), visibility_to_hdu(vis) ]) with open(fits_file, "w") as f: hdu.writeto(f, checksum=True)
def setup_class(self): self.data1 = np.array(list( zip([1, 2, 3, 4], ['a', 'b', 'c', 'd'], [2.3, 4.5, 6.7, 8.9])), dtype=[('a', int), ('b', 'U1'), ('c', float)]) self.data2 = np.array(list( zip([1.4, 2.3, 3.2, 4.7], [2.3, 4.5, 6.7, 8.9])), dtype=[('p', float), ('q', float)]) hdu1 = PrimaryHDU() hdu2 = BinTableHDU(self.data1, name='first') hdu3 = BinTableHDU(self.data2, name='second') self.hdus = HDUList([hdu1, hdu2, hdu3])
def save_fits(self, folder: str) -> str: data = self.data.copy() assert data.dtype == np.float32 and data.max() <= 1.0 and data.min() >= 0.0, f"{data.dtype} {data.max()} {data.min()}" hdu = PrimaryHDU( data=self.data, header=self.fits_header, ) l = HDUList([hdu]) path = join(folder, f"{self.key}.fits") l.writeto(path, overwrite=True) return path
def trim_throughput(indir, outdir): '''downsample throughput files''' assert os.path.basename(indir) == 'throughput' if not os.path.exists(outdir): os.makedirs(outdir) for targettype in ('elg', 'lrg', 'perfect', 'qso', 'sky', 'star'): filename = 'fiberloss-{}.dat'.format(targettype) shutil.copy(os.path.join(indir, filename), os.path.join(outdir, filename)) for filename in ['thru-b.fits', 'thru-r.fits', 'thru-z.fits']: fx = fits.open(indir + '/' + filename) hdus = HDUList() hdus.append(fx[0]) hdus.append(BinTableHDU(fx[1].data[::20], header=fx[1].header)) hdus.append(BinTableHDU(fx[2].data[::20], header=fx[2].header)) hdus.writeto(outdir + '/' + filename) fx.close() for filename in [ 'DESI-0347_blur.ecsv', 'DESI-0347_offset.ecsv', 'DESI-0347_random_offset_1.fits' ]: shutil.copy(os.path.join(indir, filename), os.path.join(outdir, filename))
async def write_fits(self, filename: str, hdulist: fits.HDUList, *args: Any, **kwargs: Any) -> None: """Convenience function for writing an Image to a FITS file. Args: filename: Name of file to write. hdulist: hdu list to write. """ # open file async with self.open_file(filename, "wb") as f: with io.BytesIO() as bio: hdulist.writeto(bio, *args, **kwargs) await f.write(bio.getvalue())
def from_hdu(hdu): ''' Return a OneDSpectrum from a FITS HDU or HDU list. ''' if isinstance(hdu, HDUList): hdul = hdu hdu = hdul[0] else: hdul = HDUList([hdu]) if not len(hdu.data.shape) == 1: raise ValueError("HDU must contain one-dimensional data.") meta = {} mywcs = wcs.WCS(hdu.header) if "BUNIT" in hdu.header: unit = convert_bunit(hdu.header["BUNIT"]) meta["BUNIT"] = hdu.header["BUNIT"] else: unit = None beams = cube_utils.try_load_beams(hdul) self = OneDSpectrum(hdu.data, unit=unit, wcs=mywcs, meta=meta, header=hdu.header, beams=beams) return self
def setup_method(self, method): from astropy.table import Table from astropy.io.fits import HDUList, ImageHDU Registry().clear() x = [1, 2, 3] y = [2, 3, 4] u = [10, 20, 30, 40] v = [20, 40, 60, 80] self.xy = {'x': x, 'y': y} self.dict_data = {'u': u, 'v': v} self.recarray_data = np.rec.array([(0, 1), (2, 3)], dtype=[(str('a'), int), (str('b'), int)]) self.astropy_table = Table({'x': x, 'y': y}) self.bad_data = {'x': x, 'u': u} self.hdulist = HDUList([ImageHDU(x, name='PRIMARY')]) self.x = np.array(x) self.y = np.array(y) self.u = np.array(u) self.v = np.array(v)
def testFits(self): """Test I/O with FITS""" from astropy.io.fits import HDUList fits = HDUList() self.fluxTable.toFits(fits) ft = FluxTable.fromFits(fits) self.assertFluxTable(ft)
def align(hduls, name="SCI", reference=None): """ Aligns the source astronomical image(s) to the reference astronomical image \b :param hduls: list of fitsfiles :return: list of fistfiles with <name> HDU aligned """ hduls_list = [hdul for hdul in hduls] sources = [hdul[name] for hdul in hduls_list] outputs = [] if reference is None: reference = snr.snr(hduls_list, name)[name] # click.echo(reference.header["ORIGNAME"]) # FIXME log ref name np_ref = to_np( reference, "Cannot align to unexpected type {}; expected numpy array or FITS HDU") for source in sources: np_src = to_np( source, "Cannot align unexpected type {}; expected numpy array or FITS HDU" ) # possibly unneccessary but unsure about scoping output = np.array([]) output = astroalign.register(np_src, np_ref)[0] if isinstance(source, HDU_TYPES): output = PrimaryHDU(output, source.header) outputs.append(HDUList([output])) return (hdul for hdul in outputs)
def hdulist(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") hdu = self.hdu beamhdu = beams_to_bintable(self.beams) return HDUList([hdu, beamhdu])
def fake_hdulist(extver=1, version=2, timesys="TDB", telescop="KEPLER"): new_header = fake_header(extver, version, timesys, telescop) return [ HDUList(hdus=[ PrimaryHDU(header=new_header), BinTableHDU(header=new_header, name="LIGHTCURVE") ]) ]
def random_position(image: fits.HDUList, hg_ra: float, hg_dec: float, limit: int = 10, show: bool = False): image, path = ff.path_or_hdu(image) header = image[0].header data = image[0].data # Find host galaxy pixel wcs_info = wcs.WCS(header) hg_x, hg_y = wcs_info.all_world2pix(hg_ra, hg_dec, 0) hg_x = int(np.round(hg_x)) hg_y = int(np.round(hg_y)) image_copy = data.copy()[hg_y - limit:hg_y + limit, hg_x - limit:hg_x + limit] noise = np.median(image_copy) image_copy = image_copy - noise image_flatten = image_copy.flatten(1) i = st.value_from_pdf(np.arange(image_flatten.shape[0]), image_flatten / max(image_flatten)) i = int(i) x, y = np.unravel_index(i, image_copy.shape) if show: plt.imshow(image_copy) plt.scatter(x, y) plt.show() plt.close() plt.plot(image_flatten / max(image_flatten)) plt.show() x += hg_x - limit + np.random.uniform(-0.5, 0.5) y += hg_y - limit + np.random.uniform(-0.5, 0.5) ra, dec = wcs_info.all_pix2world(x, y, 0) if path: image.close() return x, y, ra, dec
def trim_quickpsf(indir, outdir, filename): assert os.path.abspath(indir) != os.path.abspath(outdir) infile = os.path.join(indir, filename) outfile = os.path.join(outdir, filename) fx = fits.open(infile) hdus = HDUList() hdus.append(fx[0]) for i in [1,2,3]: d = fx[i].data hdus.append(BinTableHDU(d[::10], header=fx[i].header)) hdus.writeto(outfile, clobber=True) fx.close()
def from_hdu(hdu): ''' Return a OneDSpectrum from a FITS HDU or HDU list. ''' if isinstance(hdu, HDUList): hdul = hdu hdu = hdul[0] else: hdul = HDUList([hdu]) if not len(hdu.data.shape) == 1: raise ValueError("HDU must contain one-dimensional data.") meta = {} mywcs = wcs.WCS(hdu.header) if "BUNIT" in hdu.header: unit = convert_bunit(hdu.header["BUNIT"]) meta["BUNIT"] = hdu.header["BUNIT"] else: unit = None beams_table = cube_utils.try_load_beams(hdul) if beams_table is not None: # Convert to a beams object from the table beams = Beams( major=u.Quantity(beams_table['BMAJ'], u.arcsec), minor=u.Quantity(beams_table['BMIN'], u.arcsec), pa=u.Quantity(beams_table['BPA'], u.deg), meta=[{ key: row[key] for key in beams_table.names if key not in ('BMAJ', 'BPA', 'BMIN') } for row in beams_table], ) self = VaryingResolutionOneDSpectrum(hdu.data, unit=unit, wcs=mywcs, meta=meta, header=hdu.header, beams=beams) else: beam = cube_utils.try_load_beam(hdu.header) self = OneDSpectrum(hdu.data, unit=unit, wcs=mywcs, meta=meta, header=hdu.header, beam=beam) return self
def setup_class(self): self.data1 = np.array(list( zip([1, 2, 3, 4], ['a', 'b', 'c', 'd'], [2.3, 4.5, 6.7, 8.9])), dtype=[('a', int), ('b', 'U1'), ('c', float)]) self.data2 = np.array(list( zip([1.4, 2.3, 3.2, 4.7], [2.3, 4.5, 6.7, 8.9])), dtype=[('p', float), ('q', float)]) self.data3 = np.array(list(zip([1, 2, 3, 4], [2.3, 4.5, 6.7, 8.9])), dtype=[('A', int), ('B', float)]) hdu0 = PrimaryHDU() hdu1 = BinTableHDU(self.data1, name='first') hdu2 = BinTableHDU(self.data2, name='second') hdu3 = ImageHDU(np.ones((3, 3)), name='third') hdu4 = BinTableHDU(self.data3) self.hdus = HDUList([hdu0, hdu1, hdu2, hdu3, hdu4]) self.hdusb = HDUList([hdu0, hdu3, hdu2, hdu1]) self.hdus3 = HDUList([hdu0, hdu3, hdu2]) self.hdus2 = HDUList([hdu0, hdu1, hdu3]) self.hdus1 = HDUList([hdu0, hdu1])
def _create_fits(self): hdul = HDUList(PrimaryHDU()) h = hdul[0].header h.append(Card('name', self.name)) self._cf_pre_hook(hdul) self._cf_add_setup_info(hdul) self._cf_post_setup_hook(hdul) self._cf_add_summary_statistics(hdul) self._cf_add_pipeline_steps(hdul) self._cf_post_hook(hdul) return hdul
def test_fitsdiff_openfile(tmpdir): """Make sure that failing FITSDiff doesn't leave open files.""" path1 = str(tmpdir.join("file1.fits")) path2 = str(tmpdir.join("file2.fits")) hdulist = HDUList([PrimaryHDU(), ImageHDU(data=np.zeros(5))]) hdulist.writeto(path1) hdulist[1].data[0] = 1 hdulist.writeto(path2) diff = FITSDiff(path1, path2) assert diff.identical, diff.report()
def get_primary_extension(hdu: fits.HDUList): """ Figure out which is the primary extension (1 if fpacked, 0 otherwise) :param hdu: astropy.io.fits.HDUList :return: int """ _, file_extension = os.path.splitext(hdu.filename()) if file_extension == '.fz': primary_extension = 1 else: primary_extension = 0 return primary_extension
def setup_class(self): self.data1 = np.array(list(zip([1, 2, 3, 4], ['a', 'b', 'c', 'd'], [2.3, 4.5, 6.7, 8.9])), dtype=[(str('a'), int), (str('b'), str('U1')), (str('c'), float)]) self.data2 = np.array(list(zip([1.4, 2.3, 3.2, 4.7], [2.3, 4.5, 6.7, 8.9])), dtype=[(str('p'), float), (str('q'), float)]) hdu1 = PrimaryHDU() hdu2 = BinTableHDU(self.data1, name='first') hdu3 = BinTableHDU(self.data2, name='second') self.hdus = HDUList([hdu1, hdu2, hdu3])
def _prepare_hdulist(hdulist, default_extension='SCI', extname_parser=None): new_list = [] highest_ver = 0 recognized = set() if len(hdulist) > 1 or (len(hdulist) == 1 and hdulist[0].data is None): # MEF file # First get HDUs for which EXTVER is defined for n, hdu in enumerate(hdulist): if extname_parser: extname_parser(hdu) ver = hdu.header.get('EXTVER') if ver not in (-1, None) and hdu.name: highest_ver = max(highest_ver, ver) elif not isinstance(hdu, PrimaryHDU): continue new_list.append(hdu) recognized.add(hdu) # Then HDUs that miss EXTVER for hdu in hdulist: if hdu in recognized: continue elif isinstance(hdu, ImageHDU): highest_ver += 1 if 'EXTNAME' not in hdu.header: hdu.header['EXTNAME'] = (default_extension, 'Added by AstroData') if hdu.header.get('EXTVER') in (-1, None): hdu.header['EXTVER'] = (highest_ver, 'Added by AstroData') new_list.append(hdu) recognized.add(hdu) else: # Uh-oh, a single image FITS file new_list.append(PrimaryHDU(header=hdulist[0].header)) image = ImageHDU(header=hdulist[0].header, data=hdulist[0].data) # Fudge due to apparent issues with assigning ImageHDU from data image._orig_bscale = hdulist[0]._orig_bscale image._orig_bzero = hdulist[0]._orig_bzero for keyw in ('SIMPLE', 'EXTEND'): if keyw in image.header: del image.header[keyw] image.header['EXTNAME'] = (default_extension, 'Added by AstroData') image.header['EXTVER'] = (1, 'Added by AstroData') new_list.append(image) return HDUList(sorted(new_list, key=fits_ext_comp_key))
def get_stamps(self, oid, candid=None): """Download Stamps for an specific alert. Parameters ---------- oid : :py:class:`str` object ID in ALeRCE DBs. candid : :py:class:`int` Candid of the stamp to be displayed. Returns ------- :class:`astropy.io.fits.HDUList` Science, Template and Difference stamps for an specific alert. """ if candid is None: candid = self._get_first_detection(oid) try: hdulist = HDUList() for stamp_type in ["science", "template", "difference"]: tmp_hdulist = fits_open( "%s?oid=%s&candid=%s&type=%s&format=fits" % ( self.config["AVRO_URL"] + self.config["AVRO_ROUTES"]["get_stamp"], oid, candid, stamp_type, ) ) hdu = tmp_hdulist[0] hdu.header["STAMP_TYPE"] = stamp_type hdulist.append(hdu) return hdulist except HTTPError: warnings.warn("AVRO File not found.", RuntimeWarning) return None
def from_hdu(hdu): ''' Return a OneDSpectrum from a FITS HDU or HDU list. ''' if isinstance(hdu, HDUList): hdul = hdu hdu = hdul[0] else: hdul = HDUList([hdu]) if not len(hdu.data.shape) == 1: raise ValueError("HDU must contain one-dimensional data.") meta = {} mywcs = wcs.WCS(hdu.header) if "BUNIT" in hdu.header: unit = convert_bunit(hdu.header["BUNIT"]) meta["BUNIT"] = hdu.header["BUNIT"] else: unit = None with warnings.catch_warnings(): warnings.filterwarnings('ignore', category=FITSWarning) beam = cube_utils.try_load_beams(hdul) if hasattr(beam, '__len__'): beams = beam else: beams = None if beams is not None: self = VaryingResolutionOneDSpectrum(hdu.data, unit=unit, wcs=mywcs, meta=meta, header=hdu.header, beams=beams) else: beam = cube_utils.try_load_beam(hdu.header) self = OneDSpectrum(hdu.data, unit=unit, wcs=mywcs, meta=meta, header=hdu.header, beam=beam) return self
def writeFits(self, filename): """Write to FITS file This API is intended for use by the LSST data butler, which handles translating the desired identity into a filename. Parameters ---------- filename : `str` Filename of FITS file. """ from astropy.io.fits import HDUList, PrimaryHDU fits = HDUList() fits.append(PrimaryHDU()) self._writeImpl(fits) with open(filename, "wb") as fd: fits.writeto(fd)
def raw_converter_to_calibrated_hdulist(converter): # type: (SingleCCDRawConverter) -> HDUList """ TODO: Document me :param converter: """ early_dark_pixel_columns = converter.parameters.early_dark_pixel_columns # type: int late_dark_pixel_columns = converter.parameters.late_dark_pixel_columns # type: int # noinspection PyUnresolvedReferences left_dark_parts = [ raw_slice.pixels[:, :early_dark_pixel_columns] for raw_slice in converter.slices ] # type: list # noinspection PyUnresolvedReferences right_dark_parts = [ raw_slice.pixels[:, -late_dark_pixel_columns:] for raw_slice in converter.slices ] # type: list # noinspection PyUnresolvedReferences image_parts = [ raw_slice.pixels[:, early_dark_pixel_columns:-late_dark_pixel_columns] for raw_slice in converter.slices ] # type: list for i in range(1, len(converter.slices), 2): left_dark_parts[i] = numpy.fliplr(left_dark_parts[i]) right_dark_parts[i] = numpy.fliplr(right_dark_parts[i]) image_parts[i] = numpy.fliplr(image_parts[i]) header_with_parameters = set_header_settings( converter.parameters, raw_converter_parameters, converter.conversion_metadata.header) header_with_transformation_flags = set_header_settings( converter.flags, raw_transformation_flags, header_with_parameters) if converter.conversion_metadata.command is not None: header_with_transformation_flags.add_history( converter.conversion_metadata.command) return HDUList( PrimaryHDU( header=header_with_transformation_flags, # `+` concatenates python lists data=numpy.hstack(left_dark_parts + image_parts + right_dark_parts)))
def hdu_to_imagemodel(in_hdu): """ Workaround for initializing a `jwst.datamodels.ImageModel` from a normal FITS ImageHDU that could contain HST header keywords and unexpected WCS definition. TBD Parameters ---------- in_hdu : `astropy.io.fits.ImageHDU` Returns ------- img : `jwst.datamodels.ImageModel` """ from astropy.io.fits import ImageHDU, HDUList from astropy.coordinates import ICRS from jwst.datamodels import util import gwcs hdu = ImageHDU(data=in_hdu.data, header=in_hdu.header) new_header = strip_telescope_header(hdu.header) hdu.header = new_header # Initialize data model img = util.open(HDUList([hdu])) # Initialize GWCS tform = gwcs.wcs.utils.make_fitswcs_transform(new_header) hwcs = gwcs.WCS(forward_transform=tform, output_frame=ICRS()) #gwcs.CelestialFrame()) sh = hdu.data.shape hwcs.bounding_box = ((-0.5, sh[0] - 0.5), (-0.5, sh[1] - 0.5)) # Put gWCS in meta, where blot/drizzle expect to find it img.meta.wcs = hwcs return img
def write_fits(self, filename, overwrite=False): r'''Write pipeline results to a FITS file. Parameters ---------- filename : str Name of output file to be written. overwrite : bool If filename already exists, this flag indicates whether or not to overwrite it (without warning). ''' from astropy.io.fits import HDUList, PrimaryHDU, table_to_hdu hdul = [PrimaryHDU()] for t in self.table_config: hdu = table_to_hdu(self[t]) hdu.header['EXTNAME'] = t hdul.append(hdu) HDUList(hdul).writeto(filename, overwrite=overwrite)
def createFromScratch(self, phu, extensions=None): """ Creates an AstroData object from a collection of objects. """ lst = HDUList() if phu is not None: if isinstance(phu, PrimaryHDU): lst.append(phu) elif isinstance(phu, Header): lst.append(PrimaryHDU(header=deepcopy(phu), data=DELAYED)) elif isinstance(phu, (dict, list, tuple)): p = PrimaryHDU() p.header.update(phu) lst.append(p) else: raise ValueError("phu must be a PrimaryHDU or a valid header object") # TODO: Verify the contents of extensions... if extensions is not None: for ext in extensions: lst.append(ext) return self.getAstroData(lst)
def mime_object_maker(url, mimetype): """ return a data object suitable for the mimetype given. this will either return a astropy fits object or a pyvo DALResults object, a PIL object for conventional images or string for text content. Parameters ---------- url : str the object download url mimetype : str the content mimetype """ mimetype = mimeparse.parse_mime_type(mimetype) if mimetype[0] == 'text': return s.get(url).text if mimetype[1] == 'fits' or mimetype[1] == 'x-fits': r = s.get(url) return HDUList.fromstring(r.content) if mimetype[0] == 'image': from PIL import Image from io import BytesIO r = s.get(url) b = BytesIO(r.content) return Image.open(b) if mimetype[1] == 'x-votable' or mimetype[1] == 'x-votable+xml': # As soon as there are some kind of recursive data structures, # things start to get really f*cked up if mimetype[2].get('content', None) == 'datalink': from .adhoc import DatalinkResults return DatalinkResults.from_result_url(url) else: from .query import DALResults return DALResults.from_result_url(url)
def test_ignore_hdus_report(self, capsys): a = np.arange(100).reshape(10, 10) b = a.copy() + 1 ha = Header([('A', 1), ('B', 2), ('C', 3)]) phdu_a = PrimaryHDU(header=ha) phdu_b = PrimaryHDU(header=ha) ihdu_a = ImageHDU(data=a, name='SCI') ihdu_b = ImageHDU(data=b, name='SCI') hdulist_a = HDUList([phdu_a, ihdu_a]) hdulist_b = HDUList([phdu_b, ihdu_b]) tmp_a = self.temp('testa.fits') tmp_b = self.temp('testb.fits') hdulist_a.writeto(tmp_a) hdulist_b.writeto(tmp_b) numdiff = fitsdiff.main([tmp_a, tmp_b, "-u", "SCI"]) assert numdiff == 0 out, err = capsys.readouterr() assert "testa.fits" in out assert "testb.fits" in out
class TestMultipleHDU: def setup_class(self): self.data1 = np.array(list(zip([1, 2, 3, 4], ['a', 'b', 'c', 'd'], [2.3, 4.5, 6.7, 8.9])), dtype=[(str('a'), int), (str('b'), str('U1')), (str('c'), float)]) self.data2 = np.array(list(zip([1.4, 2.3, 3.2, 4.7], [2.3, 4.5, 6.7, 8.9])), dtype=[(str('p'), float), (str('q'), float)]) hdu1 = PrimaryHDU() hdu2 = BinTableHDU(self.data1, name='first') hdu3 = BinTableHDU(self.data2, name='second') self.hdus = HDUList([hdu1, hdu2, hdu3]) def teardown_class(self): del self.hdus def setup_method(self, method): warnings.filterwarnings('always') def test_read(self, tmpdir): filename = str(tmpdir.join('test_read.fits')) self.hdus.writeto(filename) with catch_warnings() as l: t = Table.read(filename) assert len(l) == 1 assert str(l[0].message).startswith( 'hdu= was not specified but multiple tables are present, reading in first available table (hdu=1)') assert equal_data(t, self.data1) def test_read_with_hdu_0(self, tmpdir): filename = str(tmpdir.join('test_read_with_hdu_0.fits')) self.hdus.writeto(filename) with pytest.raises(ValueError) as exc: Table.read(filename, hdu=0) assert exc.value.args[0] == 'No table found in hdu=0' @pytest.mark.parametrize('hdu', [1, 'first']) def test_read_with_hdu_1(self, tmpdir, hdu): filename = str(tmpdir.join('test_read_with_hdu_1.fits')) self.hdus.writeto(filename) with catch_warnings() as l: t = Table.read(filename, hdu=hdu) assert len(l) == 0 assert equal_data(t, self.data1) @pytest.mark.parametrize('hdu', [2, 'second']) def test_read_with_hdu_2(self, tmpdir, hdu): filename = str(tmpdir.join('test_read_with_hdu_2.fits')) self.hdus.writeto(filename) with catch_warnings() as l: t = Table.read(filename, hdu=hdu) assert len(l) == 0 assert equal_data(t, self.data2) def test_read_from_hdulist(self): with catch_warnings() as l: t = Table.read(self.hdus) assert len(l) == 1 assert str(l[0].message).startswith( 'hdu= was not specified but multiple tables are present, reading in first available table (hdu=1)') assert equal_data(t, self.data1) def test_read_from_hdulist_with_hdu_0(self, tmpdir): with pytest.raises(ValueError) as exc: Table.read(self.hdus, hdu=0) assert exc.value.args[0] == 'No table found in hdu=0' @pytest.mark.parametrize('hdu', [1, 'first']) def test_read_from_hdulist_with_hdu_1(self, tmpdir, hdu): with catch_warnings() as l: t = Table.read(self.hdus, hdu=hdu) assert len(l) == 0 assert equal_data(t, self.data1) @pytest.mark.parametrize('hdu', [2, 'second']) def test_read_from_hdulist_with_hdu_2(self, tmpdir, hdu): with catch_warnings() as l: t = Table.read(self.hdus, hdu=hdu) assert len(l) == 0 assert equal_data(t, self.data2) def test_read_from_single_hdu(self): with catch_warnings() as l: t = Table.read(self.hdus[1]) assert len(l) == 0 assert equal_data(t, self.data1)