def test_link_editor(): # Make sure that the WCSLink works property in the link editor and is # returned unmodified. The main way to check that is just to make sure that # the link round-trips when going through EditableLinkFunctionState. wcs1 = WCS(naxis=2) wcs1.wcs.ctype = 'DEC--TAN', 'RA---TAN' wcs1.wcs.set() data1 = Data(label='Data 1') data1.coords = WCSCoordinates(wcs=wcs1) data1['x'] = np.ones((2, 3)) wcs2 = WCS(naxis=3) wcs2.wcs.ctype = 'GLON-CAR', 'FREQ', 'GLAT-CAR' wcs2.wcs.set() data2 = Data(label='Data 2') data2.coords = WCSCoordinates(wcs=wcs2) data2['x'] = np.ones((2, 3, 4)) link1 = WCSLink(data1, data2) link2 = EditableLinkFunctionState(link1).link assert isinstance(link2, WCSLink) assert link2.data1.label == 'Data 1' assert link2.data2.label == 'Data 2'
def test_clone_wcs_link(): # Make sure that WCSLink can be serialized/deserialized wcs1 = WCS(naxis=2) wcs1.wcs.ctype = 'DEC--TAN', 'RA---TAN' wcs1.wcs.set() data1 = Data(label='Data 1') data1.coords = WCSCoordinates(wcs=wcs1) data1['x'] = np.ones((2, 3)) wcs2 = WCS(naxis=3) wcs2.wcs.ctype = 'GLON-CAR', 'FREQ', 'GLAT-CAR' wcs2.wcs.set() data2 = Data(label='Data 2') data2.coords = WCSCoordinates(wcs=wcs2) data2['x'] = np.ones((2, 3, 4)) link1 = WCSLink(data1, data2) link2 = clone(link1) assert isinstance(link2, WCSLink) assert link2.data1.label == 'Data 1' assert link2.data2.label == 'Data 2'
def reverse_add_data(self, data_item): """ Adds data from specviz to glue. Parameters ---------- data_item : :class:`specviz.core.items.DataItem` The data item recently added to model. """ new_data = Data(label=data_item.name) new_data.coords = coordinates_from_header(data_item.spectrum.wcs) flux_component = Component(data_item.spectrum.flux, data_item.spectrum.flux.unit) new_data.add_component(flux_component, "Flux") disp_component = Component(data_item.spectrum.spectral_axis, data_item.spectrum.spectral_axis.unit) new_data.add_component(disp_component, "Dispersion") if data_item.spectrum.uncertainty is not None: uncert_component = Component(data_item.spectrum.uncertainty.array, data_item.spectrum.uncertainty.unit) new_data.add_component(uncert_component, "Uncertainty") self._session.data_collection.append(new_data)
def test_component_unit_header(tmpdir): from astropy import units as u filename = tmpdir.join('test3.fits').strpath data = Data(x=np.arange(6).reshape(2, 3), y=(np.arange(6) * 2).reshape(2, 3), z=(np.arange(6) * 2).reshape(2, 3)) data.coords = WCSCoordinates() unit1 = data.get_component("x").units = u.m / u.s unit2 = data.get_component("y").units = u.Jy unit3 = data.get_component("z").units = "" fits_writer(filename, data) with fits.open(filename) as hdulist: assert len(hdulist) == 3 bunit = hdulist['x'].header.get('BUNIT') assert u.Unit(bunit) == unit1 bunit = hdulist['y'].header.get('BUNIT') assert u.Unit(bunit) == unit2 bunit = hdulist['z'].header.get('BUNIT') assert bunit == unit3
def to_glue(self, label="yt", data_collection=None): """ Takes the data in the FITSImageData instance and exports it to Glue (http://glueviz.org) for interactive analysis. Optionally add a *label*. If you are already within the Glue environment, you can pass a *data_collection* object, otherwise Glue will be started. """ from glue.core import Data, DataCollection from glue.core.coordinates import coordinates_from_header try: from glue.app.qt.application import GlueApplication except ImportError: from glue.qt.glue_application import GlueApplication image = Data(label=label) image.coords = coordinates_from_header(self.wcs.to_header()) for k in self.fields: image.add_component(self[k].data, k) if data_collection is None: dc = DataCollection([image]) app = GlueApplication(dc) app.start() else: data_collection.append(image)
def test_wcs_autolink_emptywcs(): # No links should be found because the WCS don't actually have well defined # physical types. data1 = Data() data1.coords = WCS(naxis=1) data1['x'] = [1, 2, 3] data2 = Data() data2.coords = WCS(naxis=1) data2['x'] = [4, 5, 6] dc = DataCollection([data1, data2]) links = wcs_autolink(dc) assert len(links) == 0
def cube_to_data(self, cube, output_label=None, output_component_id=None): """ Convert SpectralCube to final output. self.output_as_component is checked here. if self.output_as_component: add new component to self.data else: create new data and return it. :param cube: SpectralCube :param output_label: Name of new Data. :param output_component_id: label of new component :return: """ original_data = self.data new_component = Component(cube._data.copy(), self.component_unit) if self.output_as_component: original_data.add_component(new_component, output_component_id) return None else: new_data = Data(label=output_label) new_data.coords = coordinates_from_header(cube.header) new_data.add_component(new_component, output_component_id) return new_data
def load_stacked_sequence(self, raster_data): for window, window_data in raster_data.items(): w_data = Data(label=f"{window.replace(' ', '_')}") w_data.coords = WCSCoordinates(wcs=window_data.wcs) w_data.add_component(Component(window_data.data), f"{window}") self.datasets.append(w_data)
def test_wcs_autolink_emptywcs(): # No links should be found because the WCS don't actually have well defined # physical types. data1 = Data() data1.coords = WCSCoordinates(wcs=WCS(naxis=1)) data1['x'] = [1, 2, 3] data2 = Data() data2.coords = WCSCoordinates(wcs=WCS(naxis=1)) data2['x'] = [4, 5, 6] dc = DataCollection([data1, data2]) links = wcs_autolink(dc) assert len(links) == 0
def test_component_unit_header(tmpdir): from astropy import units as u filename = tmpdir.join('test3.fits').strpath data = Data(x=np.arange(6).reshape(2, 3), y=(np.arange(6) * 2).reshape(2, 3), z=(np.arange(6) * 2).reshape(2, 3)) wcs = WCS() data.coords = WCSCoordinates(wcs=wcs) unit1 = data.get_component("x").units = u.m / u.s unit2 = data.get_component("y").units = u.Jy unit3 = data.get_component("z").units = "" fits_writer(filename, data) with fits.open(filename) as hdulist: assert len(hdulist) == 3 bunit = hdulist['x'].header.get('BUNIT') assert u.Unit(bunit) == unit1 bunit = hdulist['y'].header.get('BUNIT') assert u.Unit(bunit) == unit2 bunit = hdulist['z'].header.get('BUNIT') assert bunit == unit3
def read_cube(filename, **kwargs): cube_data = None exclude_exts = [] data_collection = [] hdulist = fits.open(filename) try: cube_data = CubeData.read(hdulist) except CubeDataIOError as e: warnings.warn('No CubeData found in "{}": {}'.format( filename, e.message )) if cube_data is not None: data = Data() try: data.coords = coordinates_from_wcs(cube_data.wcs) except AttributeError: # There is no wcs. Not to worry now. pass data.add_component(Component(cube_data), label="cube") data_collection.append(data) exclude_exts = cube_data.meta.get('hdu_ids') # Read in the rest of the FITS file. data_collection += _load_fits_generic(hdulist, exclude_exts=exclude_exts) return data_collection
def deimos_spectrum1D_reader(file_name): """ Data loader for Keck/DEIMOS 1D spectra. This loads the 'Bxspf-B' (extension 1) and 'Bxspf-R' (extension 2) and appends them together to proudce the combined Red/Blue Spectrum along with their Wavelength and Inverse Variance arrays. """ with fits.open(file_name) as hdulist: data = Data(label='1D Spectrum') hdulist[1].header['CTYPE1'] = 'WAVE' hdulist[1].header['CUNIT1'] = 'Angstrom' data.header = hdulist[1].header wcs = WCS(hdulist[1].header) data.coords = coordinates_from_wcs(wcs) full_wl = np.append(hdulist[1].data['LAMBDA'][0], hdulist[2].data['LAMBDA'][0]) full_spec = np.append(hdulist[1].data['SPEC'][0], hdulist[2].data['SPEC'][0]) full_ivar = np.append(hdulist[1].data['IVAR'][0], hdulist[2].data['IVAR'][0]) data.add_component(full_wl, 'Wavelength') data.add_component(full_spec, 'Flux') data.add_component(1 / np.sqrt(full_ivar), 'Uncertainty') return data
def load_sequence(self, raster_data): for window, window_data in raster_data.items(): for i, scan_data in enumerate(window_data): w_data = Data(label=f"{window.replace(' ', '_')}-scan-{i}") w_data.coords = WCSCoordinates(wcs=scan_data.wcs) w_data.add_component(Component(scan_data.data), f"{window}-scan-{i}") w_data.meta = scan_data.meta self.datasets.append(w_data)
def test_wcs_affine_approximation(): wcs1 = WCS(naxis=2) wcs1.wcs.ctype = 'DEC--TAN', 'RA---TAN' wcs1.wcs.set() data1 = Data(label='Data 1') data1.coords = wcs1 data1['x'] = np.ones((2, 3)) wcs2 = WCS(naxis=2) wcs2.wcs.ctype = 'DEC--TAN', 'RA---TAN' wcs2.wcs.crpix = -3, 5 wcs2.wcs.cd = [[2, -1], [1, 2]] wcs2.wcs.set() data2 = Data(label='Data 2') data2.coords = wcs2 data2['x'] = np.ones((2, 3)) link = WCSLink(data1, data2) affine_link = link.as_affine_link(tolerance=0.1) assert isinstance(affine_link, AffineLink) assert_allclose(affine_link.matrix, [[0.4, 0.2, -3.4], [-0.2, 0.4, 4.2], [0, 0, 1]], atol=1e-5) x1 = np.array([1.4, 3.2, 2.5]) y1 = np.array([0.2, 4.3, 2.2]) x2, y2 = link.forwards(x1, y1) x3, y3 = affine_link.forwards(x1, y1) assert_allclose(x2, x3, atol=1e-5) assert_allclose(y2, y3, atol=1e-5) x4, y4 = link.backwards(x1, y1) x5, y5 = affine_link.backwards(x1, y1) assert_allclose(x4, x5, atol=1e-5) assert_allclose(y4, y4, atol=1e-5)
def load_sji(self, sji): with fits.open(sji) as hdul: hdul.verify("fix") label = hdul[0].header['TDESC1'] data = Data(label=label) data.coords = WCSCoordinates(hdul[0].header) data.meta = hdul[0].header data.add_component(Component(hdul[0].data), label) self.datasets.append(data)
def _create_data_obj(filename, coords): label = "JWST data cube: {}".format(splitext(filename)[0]) data = Data(label=label) data.coords = coords # Set metadata indicating specific cubeviz layout to be used data.meta[CUBEVIZ_LAYOUT] = 'JWST' return data
def load_sequence(self, raster_data): for window, window_data in raster_data.items(): for i, scan_data in enumerate(window_data): w_data = Data(label=f"{window.replace(' ', '_')}-scan-{i}") w_data.coords = scan_data.wcs w_data.add_component(Component(scan_data.data), f"{window}-scan-{i}") w_data.meta = scan_data.meta w_data.style = VisualAttributes(color='#5A4FCF') self.datasets.append(w_data)
def load_sunpy_map(self, sunpy_map): sunpy_map_loaded = sunpy.map.Map(sunpy_map) label = 'sunpy-map-' + sunpy_map_loaded.name data = Data(label=label) data.coords = sunpy_map_loaded.wcs # preferred way, preserves more info in some cases data.meta = sunpy_map_loaded.meta data.add_component(Component(sunpy_map_loaded.data), sunpy_map_loaded.name) data.style = VisualAttributes(color='#FDB813', preferred_cmap=sunpy_map.cmap) self.datasets.append(data)
def _load_GALFAHI_data_LowRes(filename, **kwargs): # Data loader customized for GALFA-HI data cube # Resize the data cube into lower resolution in velocity/space def _bin_cube(cube, factor, axis_label): # resize the cube to lower resolution shape = cube.shape if axis_label == 'VELO': new_shape = (shape[0]/factor, factor, shape[1], shape[2]) return cube.reshape(new_shape).mean(axis = 1) elif axis_label == 'RADEC': new_shape = (shape[0], shape[1]/factor, factor, shape[2]/factor, factor) return cube.reshape(new_shape).mean(axis = 4).mean(axis = 2) else: return cube # change the header for those cubes that has been binned into low resolutions def _get_new_header(header, factor, axis_label): new_header = header if axis_label == 'VELO': new_header['NAXIS3'] = header['NAXIS3'] / factor new_header['CRVAL3'] = header['CRVAL3'] new_header['CRPIX3'] = float(header['CRPIX3'] / factor) new_header['CDELT3'] = header['CDELT3'] * factor elif axis_label == 'RADEC': for ax in [1, 2]: new_header['NAXIS%d'%(ax)] = header['NAXIS%d'%(ax)] / factor new_header['CRVAL%d'%(ax)] = header['CRVAL%d'%(ax)] new_header['CRPIX%d'%(ax)] = float(header['CRPIX%d'%(ax)] / factor) new_header['CDELT%d'%(ax)] = header['CDELT%d'%(ax)] * factor else: new_header = header # m/s --> km/s new_header['CDELT3'] = new_header['CDELT3'] * (10**(-3)) return new_header def _get_cube_center(header, cubeshape): ra = header['CRVAL1'] + header['CDELT1'] * (np.arange(cubeshape[2])+0.5 - header['CRPIX1']) ## degree dec = header['CRVAL2'] + header['CDELT2'] * (np.arange(cubeshape[1])+0.5 - header['CRPIX2']) ## degree return np.mean(ra), np.mean(dec) data_list = [] # add 3 data objects with different resolutions: for factor, axis_label in zip([4, 16, 2], ['VELO', 'VELO', 'RADEC']): cube = fits.getdata(filename) header = fits.getheader(filename) cen_ra, cen_dec = _get_cube_center(header, cube.shape) new_header = _get_new_header(header, factor, axis_label) cube_name = 'G_%d%+.2fradec_%.1fkm/s_%.1fa' % (cen_ra, cen_dec, new_header['CDELT3'], new_header['CDELT2']*60.) data = Data() data.coords = coordinates_from_header(new_header) data.add_component(_bin_cube(cube, factor, axis_label), cube_name) data.label = cube_name data_list.append(data) del data, cube, header return data_list
def test_has_celestial_with_time_and_spectral_axes(): """ To test the case in which we have two data cubes with unequal number of dimensions, but both have celestial axes. """ wcs1 = WCS(naxis=4) wcs1.wcs.ctype = 'WAVE', 'HPLT-TAN', 'HPLN-TAN', 'TIME' wcs1.wcs.set() data1 = Data(label='Data 1') data1.coords = wcs1 data1['x'] = np.ones((2, 3, 4, 5)) pw1, pz1, py1, px1 = data1.pixel_component_ids wcs2 = WCS(naxis=3) wcs2.wcs.ctype = 'HPLN-TAN', 'HPLT-TAN', 'TIME' wcs2.wcs.set() data2 = Data(label='Data 2') data2.coords = wcs2 data2['x'] = np.ones((2, 3, 4)) pz2, py2, px2 = data2.pixel_component_ids dc = DataCollection([data1, data2]) links = wcs_autolink(dc) assert len(links) == 1 link = links[0] assert isinstance(link, MultiLink) assert len(link) == 6 assert link[0].get_to_id() == px2 assert link[0].get_from_ids() == [py1, pz1, pw1] assert link[1].get_to_id() == py2 assert link[1].get_from_ids() == [py1, pz1, pw1] assert link[2].get_to_id() == pz2 assert link[2].get_from_ids() == [py1, pz1, pw1] assert link[3].get_to_id() == py1 assert link[3].get_from_ids() == [px2, py2, pz2] assert link[4].get_to_id() == pz1 assert link[4].get_from_ids() == [px2, py2, pz2] assert link[5].get_to_id() == pw1 assert link[5].get_from_ids() == [px2, py2, pz2]
def load_sji_fits(filename): with fits.open(filename) as hdul: hdul.verify("fix") sji = hdul[0] label = sji.header['TDESC1'] data = Data(label=label) data.coords = WCSCoordinates(sji.header) data.meta = sji.header data.add_component(Component(sji.data), label) return data
def _parse_iris_raster(data, label): result = [] for window, window_data in data.data.items(): for i, scan_data in enumerate(window_data): w_data = Data(label=f"{window.replace(' ', '_')}-scan-{i}") w_data.coords = WCSCoordinates(wcs=scan_data.wcs) w_data.add_component(Component(scan_data.data), f"{window}-scan-{i}") w_data.meta = scan_data.meta result.append(w_data) return result
def _parse_iris_raster(data, label): result = [] for window, window_data in data.items(): for i, scan_data in enumerate(window_data): w_data = Data(label=f"{window.replace(' ', '_')}-scan-{i}") w_data.coords = WCSCoordinates(scan_data.header) w_data.add_component(Component(scan_data.data), f"{window}-scan-{i}") w_data.meta = scan_data.meta w_data.style = VisualAttributes(color='#5A4FCF') result.append(w_data) return result
def read_tiff_metadata(filename): """ Read a TIFF image, looking for .tfw metadata """ base, ext = os.path.splitext(filename) data = np.flipud(np.array(Image.open(filename).convert('L'))) result = Data() if os.path.exists(base + '.tfw'): result.coords = tfw_to_coords(base + '.tfw', data.shape) result.add_component(data, 'map') return result
def test_hypercube_world(self): # Check defaults when we add data wcs = WCS(naxis=4) hypercube2 = Data() hypercube2.coords = WCSCoordinates(wcs=wcs) hypercube2.add_component(np.random.random((2, 3, 4, 5)), 'a') self.data_collection.append(hypercube2) self.viewer.add_data(hypercube2)
def test_numerical_data_changed(self): self.init_draw_count() self.init_subset() assert self.draw_count == 0 self.viewer.add_data(self.data) assert self.draw_count == 1 data = Data(label=self.data.label) data.coords = self.data.coords for cid in self.data.visible_components: data.add_component(self.data[cid] * 2, cid.label) self.data.update_values_from_data(data) assert self.draw_count == 2
def test_wcs_autolink_spectral_cube(): # This should link all coordinates wcs1 = WCS(naxis=3) wcs1.wcs.ctype = 'DEC--TAN', 'FREQ', 'RA---TAN' wcs1.wcs.set() data1 = Data() data1.coords = WCSCoordinates(wcs=wcs1) data1['x'] = np.ones((2, 3, 4)) pz1, py1, px1 = data1.pixel_component_ids wcs2 = WCS(naxis=3) wcs2.wcs.ctype = 'GLON-CAR', 'GLAT-CAR', 'FREQ' wcs2.wcs.set() data2 = Data() data2.coords = WCSCoordinates(wcs=wcs2) data2['x'] = np.ones((2, 3, 4)) pz2, py2, px2 = data2.pixel_component_ids dc = DataCollection([data1, data2]) links = wcs_autolink(dc) assert len(links) == 1 link = links[0] assert isinstance(link, MultiLink) assert len(link) == 6 assert link[0].get_to_id() == pz2 assert link[0].get_from_ids() == [pz1, py1, px1] assert link[1].get_to_id() == py2 assert link[1].get_from_ids() == [pz1, py1, px1] assert link[2].get_to_id() == px2 assert link[2].get_from_ids() == [pz1, py1, px1] assert link[3].get_to_id() == pz1 assert link[3].get_from_ids() == [pz2, py2, px2] assert link[4].get_to_id() == py1 assert link[4].get_from_ids() == [pz2, py2, px2] assert link[5].get_to_id() == px1 assert link[5].get_from_ids() == [pz2, py2, px2]
def test_wcs_offset_approximation(): wcs1 = WCS(naxis=2) wcs1.wcs.ctype = 'DEC--TAN', 'RA---TAN' wcs1.wcs.set() data1 = Data(label='Data 1') data1.coords = wcs1 data1['x'] = np.ones((2, 3)) wcs2 = WCS(naxis=2) wcs2.wcs.ctype = 'DEC--TAN', 'RA---TAN' wcs2.wcs.crpix = -3, 5 wcs2.wcs.set() data2 = Data(label='Data 2') data2.coords = wcs2 data2['x'] = np.ones((2, 3)) link = WCSLink(data1, data2) offset_link = link.as_affine_link(tolerance=0.1) assert isinstance(offset_link, OffsetLink) assert_allclose(offset_link.offsets, [3, -5]) x1 = np.array([1.4, 3.2, 2.5]) y1 = np.array([0.2, 4.3, 2.2]) x2, y2 = link.forwards(x1, y1) x3, y3 = offset_link.forwards(x1, y1) assert_allclose(x2, x3, atol=1e-5) assert_allclose(y2, y3, atol=1e-5) x4, y4 = link.backwards(x1, y1) x5, y5 = offset_link.backwards(x1, y1) assert_allclose(x4, x5, atol=1e-5) assert_allclose(y4, y4, atol=1e-5)
def example_image(shape=64, limits=[-4, 4]): """Creates a test data set containing a ball""" from glue.core import Data, Coordinates import numpy as np import ipyvolume as ipv x = np.linspace(-3, 3, num=shape) X, Y = np.meshgrid(x, x) rho = 0.8 I = np.exp(-X**2 - Y**2 - 2 * X * Y * rho) data = Data() data.coords = Coordinates() data.add_component(I, label='intensity') return data
def _get_sci_group(i, index): d = Data("%s_%i" % (label, index)) d.coords = coordinates_from_wcs(HSTWCS(hdulist, i)) index = index + 1 d.add_component(hdulist[i].data, hdulist[i].name) for h in hdulist[i:]: if h.name == 'SCI': break # new science grp if h.name not in ['ERR', 'DQ']: continue d.add_component(h.data, h.name) return d
def spectral_cube_to_data(cube, label=None): if isinstance(cube, SpectralCube): cube = StokesSpectralCube({'I': cube}) result = Data(label=label) result.coords = coordinates_from_wcs(cube.wcs) for component in cube.components: data = getattr(cube, component)._data result.add_component(data, label='STOKES {0}'.format(component)) return result
def load_sji(self, sji): with fits.open(sji) as hdul: hdul.verify("fix") label = hdul[0].header['TDESC1'] data = Data(label=label) data.coords = WCSCoordinates(hdul[0].header) data.meta = hdul[0].header preferred_cmap_name = 'IRIS ' + hdul[0].header['TDESC1'].replace( '_', ' ') data.style = VisualAttributes(preferred_cmap=preferred_cmap_name) data.add_component(Component(hdul[0].data), label) self.datasets.append(data)
def spectral_cube_to_data(cube, label=None): if isinstance(cube, SpectralCube): cube = StokesSpectralCube({'I': cube}) result = Data(label=label) result.coords = coordinates_from_wcs(cube.wcs) for component in cube.components: data = getattr(cube, component).unmasked_data[...] result.add_component(data, label='STOKES {0}'.format(component)) return result
def test_wcs_autolinking_of_2d_cube_with_temporal_and_spectral_axes_case_2(): """ A test to confirm that two 2D data cubes with matching number of dimensions where the one is spectral (air wavelength in this case) and the other one temporal is indeed autolinked, to test that the order does not matter. """ wcs1 = WCS(naxis=2) wcs1.wcs.ctype = 'AWAV', 'TIME' wcs1.wcs.set() data1 = Data(label='Data 1') data1.coords = wcs1 data1['x'] = np.ones((2, 3)) py1, px1 = data1.pixel_component_ids wcs2 = WCS(naxis=2) wcs2.wcs.ctype = 'TIME', 'AWAV' wcs2.wcs.set() data2 = Data(label='Data 2') data2.coords = wcs2 data2['x'] = np.ones((2, 3)) py2, px2 = data2.pixel_component_ids dc = DataCollection([data1, data2]) links = wcs_autolink(dc) assert len(links) == 1 link = links[0] assert isinstance(link, MultiLink) assert len(link) == 4 assert link[0].get_to_id() == px2 assert link[0].get_from_ids() == [px1, py1] assert link[1].get_to_id() == py2 assert link[1].get_from_ids() == [px1, py1] assert link[2].get_to_id() == px1 assert link[2].get_from_ids() == [px2, py2] assert link[3].get_to_id() == py1 assert link[3].get_from_ids() == [px2, py2]
def load_data(self, data_filenames): """ Load the data based on the extensions defined in the matching YAML file. THen create the datacube and return it. :param data_filename: :return: """ print("in load data") label = None data = None for data_filename in data_filenames.split(','): hdulist = fits.open(data_filename) print(hdulist) if not label: label = "{}: {}".format(self._name, splitext(basename(data_filename))[0]) data = Data(label=label) # this attribute is used to indicate to the cubeviz layout that # this is a cubeviz-specific data component. data.meta[CUBEVIZ_LAYOUT] = self._name data_coords_set = False for ii, hdu in enumerate(hdulist): if 'NAXIS' in hdu.header and hdu.header['NAXIS'] == 3: # Set the coords based on the first 3D HDU if not data_coords_set: data.coords = coordinates_from_header(hdu.header) data_coords_set = True component_name = str(ii) if 'EXTNAME' in hdu.header: component_name = hdu.header['EXTNAME'] # The data must be floating point as spectralcube is expecting floating point data data.add_component(component=hdu.data.astype(np.float), label=component_name) if 'BUNIT' in hdu.header: c = data.get_component(component_name) c.units = self.get_units(hdu.header) # For the purposes of exporting, we keep a reference to the original HDUList object data._cubeviz_hdulist = hdulist return data
def test_celestial_with_unknown_axes(): # Regression test for a bug that caused n-d datasets with celestial axes # and axes with unknown physical types to not even be linked by celestial # axes. wcs1 = WCS(naxis=3) wcs1.wcs.ctype = 'DEC--TAN', 'RA---TAN', 'SPAM' wcs1.wcs.set() data1 = Data() data1.coords = WCSCoordinates(wcs=wcs1) data1['x'] = np.ones((2, 3, 4)) pz1, py1, px1 = data1.pixel_component_ids wcs2 = WCS(naxis=3) wcs2.wcs.ctype = 'GLON-CAR', 'FREQ', 'GLAT-CAR' wcs2.wcs.set() data2 = Data() data2.coords = WCSCoordinates(wcs=wcs2) data2['x'] = np.ones((2, 3, 4)) pz2, py2, px2 = data2.pixel_component_ids dc = DataCollection([data1, data2]) links = wcs_autolink(dc) assert len(links) == 1 link = links[0] assert isinstance(link, MultiLink) assert len(link) == 4 assert link[0].get_to_id() == px2 assert link[0].get_from_ids() == [px1, py1] assert link[1].get_to_id() == pz2 assert link[1].get_from_ids() == [px1, py1] assert link[2].get_to_id() == px1 assert link[2].get_from_ids() == [px2, pz2] assert link[3].get_to_id() == py1 assert link[3].get_from_ids() == [px2, pz2]
def test_wcs_no_approximation(): wcs1 = WCS(naxis=2) wcs1.wcs.ctype = 'DEC--TAN', 'RA---TAN' wcs1.wcs.set() data1 = Data(label='Data 1') data1.coords = wcs1 data1['x'] = np.ones((2, 3)) wcs2 = WCS(naxis=2) wcs2.wcs.ctype = 'DEC--TAN', 'RA---TAN' wcs2.wcs.crval = 30, 50 wcs2.wcs.set() data2 = Data(label='Data 2') data2.coords = wcs2 data2['x'] = np.ones((2, 3)) link = WCSLink(data1, data2) with pytest.raises(NoAffineApproximation): link.as_affine_link(tolerance=0.1)
def acs_cutout_image_reader(file_name): """ Data loader for the ACS cut-outs for the DEIMOS spectra. The cutouts contain only the image. """ hdulist = fits.open(file_name) data = Data(label='ACS Cutout Image') data.coords = coordinates_from_header(hdulist[0].header) data.header = hdulist[0].header data.add_component(hdulist[0].data, 'Flux') return data
def test_numerical_data_changed(self): self.init_draw_count() self.init_subset() assert self.draw_count == 0 self.viewer.add_data(self.data) assert self.draw_count == 1 data = Data(label=self.data.label) data.coords = self.data.coords for cid in self.data.main_components: if self.data.get_kind(cid) == 'numerical': data.add_component(self.data[cid] * 2, cid.label) else: data.add_component(self.data[cid], cid.label) self.data.update_values_from_data(data) assert self.draw_count == 2
def test_wcs_autolink_dimensional_mismatch(): # No links should be found because the WCS don't actually have well defined # physical types. wcs1 = WCS(naxis=1) wcs1.wcs.ctype = ['FREQ'] wcs1.wcs.set() data1 = Data() data1.coords = WCSCoordinates(wcs=wcs1) data1['x'] = [1, 2, 3] wcs2 = WCS(naxis=3) wcs2.wcs.ctype = 'DEC--TAN', 'FREQ', 'RA---TAN' wcs2.wcs.set() data2 = Data() data2.coords = WCSCoordinates(wcs=wcs2) data2['x'] = np.ones((2, 3, 4)) dc = DataCollection([data1, data2]) links = wcs_autolink(dc) assert len(links) == 0
def _load_fits_generic(filename, **kwargs): hdulist = fits.open(filename) groups = dict() label_base = basename(filename).rpartition('.')[0] if not label_base: label_base = basename(filename) for extnum, hdu in enumerate(hdulist): if hdu.data is not None: hdu_name = hdu.name if hdu.name else str(extnum) if is_image_hdu(hdu): shape = hdu.data.shape try: data = groups[shape] except KeyError: label = '{}[{}]'.format( label_base, 'x'.join(str(x) for x in shape) ) data = Data(label=label) data.coords = coordinates_from_header(hdu.header) groups[shape] = data data.add_component(component=hdu.data, label=hdu_name) elif is_table_hdu(hdu): # Loop through columns and make component list table = Table(hdu.data) table_name = '{}[{}]'.format( label_base, hdu_name ) for column_name in table.columns: column = table[column_name] shape = column.shape data_label = '{}[{}]'.format( table_name, 'x'.join(str(x) for x in shape) ) try: data = groups[data_label] except KeyError: data = Data(label=data_label) groups[data_label] = data component = Component.autotyped(column, units=column.unit) data.add_component(component=component, label=column_name) return [data for data in groups.itervalues()]
def deimos_spectrum2D_reader(file_name): """ Data loader for Keck/DEIMOS 2D spectra. This loads only the Flux and Inverse variance. Wavelength information comes from the WCS. """ hdulist = fits.open(file_name) data = Data(label='2D Spectrum') hdulist[1].header['CTYPE2'] = 'Spatial Y' wcs = WCS(hdulist[1].header) # original WCS has both axes named "LAMBDA", glue requires unique component names data.coords = coordinates_from_wcs(wcs) data.header = hdulist[1].header data.add_component(hdulist[1].data['FLUX'][0], 'Flux') data.add_component(hdulist[1].data['IVAR'][0], 'Uncertainty') return data
def nirspec_spectrum2d_reader(file_name): """ Data loader for simulated NIRSpec 2D spectrum. This function extracts the DATA, QUALITY, and VAR extensions and returns them as a glue Data object. It then uses the header keywords of the DATA extension to detemine the wavelengths. """ hdulist = fits.open(file_name) data = Data(label='2D Spectrum') data.header = hdulist['DATA'].header data.coords = coordinates_from_header(hdulist[1].header) data.add_component(hdulist['DATA'].data, 'Flux') data.add_component(hdulist['VAR'].data, 'Uncertainty') return data
def nirspec_spectrum1d_reader(file_name): file_name, ext = split_file_name(file_name, default_ext=1) with fits.open(file_name) as hdulist: header = hdulist['PRIMARY'].header tab = Table.read(file_name, hdu=ext) data = Data(label="1D Spectrum") data.header = header # This assumes the wavelength is in microns data.coords = SpectralCoordinates(np.array(tab['WAVELENGTH']) * u.micron) data.add_component(tab['WAVELENGTH'], "Wavelength") data.add_component(tab['FLUX'], "Flux") data.add_component(tab['ERROR'], "Uncertainty") return data
def _load_fits_generic(filename, **kwargs): hdulist = fits.open(filename) groups = defaultdict(Data) for extnum, hdu in enumerate(hdulist): if not isinstance(hdu, fits.TableHDU) and\ hdu.data is not None: shape = hdu.data.shape if shape not in groups: label = '{}[{}]'.format( basename(filename).split('.', 1)[0], 'x'.join((str(x) for x in shape)) ) data = Data(label=label) data.coords = coordinates_from_header(hdu.header) groups[shape] = data else: data = groups[shape] data.add_component(component=hdu.data, label=hdu.header.get('EXTNAME', 'EXT[{}]'.format(str(extnum)))) return [data for data in groups.itervalues()]
def pre_nircam_image_reader(file_name): """ Data loader for simulated NIRCam image. This is for the full image, where cut-outs will be created on the fly. From the header: If ISWFS is T, structure is: - Plane 1: Signal [frame3 - frame1] in ADU - Plane 2: Signal uncertainty [sqrt(2*RN/g + \|frame3\|)] If ISWFS is F, structure is: - Plane 1: Signal from linear fit to ramp [ADU/sec] - Plane 2: Signal uncertainty [ADU/sec] Note that in the later case, the uncertainty is simply the formal uncertainty in the fit parameter (eg. uncorrelated, WRONG!). Noise model to be implemented at a later date. In the case of WFS, error is computed as SQRT(2*sigma_read + \|frame3\|) which should be a bit more correct - ~Fowler sampling. The FITS file has a single extension with a data cube. The data is the first slice of the cube and the uncertainty is the second slice. """ hdulist = fits.open(file_name) data = Data(label='NIRCam Image') data.header = hdulist[0].header wcs = WCS(hdulist[0].header) # drop the last axis since the cube will be split data.coords = coordinates_from_wcs(wcs) data.add_component(hdulist[0].data, 'Flux') data.add_component(hdulist[0].data / 100, 'Uncertainty') hdulist.close() return data
def to_glue(self, label="yt", data_collection=None): """ Takes the data in the FITSImageData instance and exports it to Glue (http://www.glueviz.org) for interactive analysis. Optionally add a *label*. If you are already within the Glue environment, you can pass a *data_collection* object, otherwise Glue will be started. """ from glue.core import DataCollection, Data from glue.core.coordinates import coordinates_from_header from glue.qt.glue_application import GlueApplication image = Data(label=label) image.coords = coordinates_from_header(self.wcs.to_header()) for k,f in self.items(): image.add_component(f.data, k) if data_collection is None: dc = DataCollection([image]) app = GlueApplication(dc) app.start() else: data_collection.append(image)
def nirspec_level2_reader(file_name): """ Data Loader for level2 products. Uses extension information to index fits hdu list. The ext info is included in the file_name as follows: <file_path>[<ext>] """ file_name, ext = split_file_name(file_name, default_ext=1) hdulist = fits.open(file_name) data = Data(label="2D Spectra") data.header = hdulist[ext].header data.coords = coordinates_from_header(hdulist[ext].header) data.add_component(hdulist[ext].data, 'Level2 Flux') # TODO: update uncertainty once data model becomes clear data.add_component(np.sqrt(hdulist[ext + 2].data), 'Level2 Uncertainty') hdulist.close() return data
def nirspec_spectrum2d_reader(file_name): """ Data loader for simulated NIRSpec 2D spectrum. This function extracts the DATA, QUALITY, and VAR extensions and returns them as a glue Data object. It then uses the header keywords of the DATA extension to detemine the wavelengths. """ file_name, ext = split_file_name(file_name, default_ext=1) hdulist = fits.open(file_name) data = Data(label="2D Spectrum") data.header = hdulist['PRIMARY'].header data.coords = coordinates_from_header(hdulist[ext].header) data.add_component(hdulist[ext].data, 'Flux') data.add_component(np.sqrt(hdulist[ext + 2].data), 'Uncertainty') hdulist.close() return data
def _load_GALFAHI_data(filename, **kwargs): def _get_cube_center(header, cubeshape): ra = header['CRVAL1'] + header['CDELT1'] * (np.arange(cubeshape[2])+0.5 - header['CRPIX1']) ## degree dec = header['CRVAL2'] + header['CDELT2'] * (np.arange(cubeshape[1])+0.5 - header['CRPIX2']) ## degree return np.mean(ra), np.mean(dec) # add the primary components cube = fits.getdata(filename) header = fits.getheader(filename) header['CDELT3'] = header['CDELT3'] * (10**(-3)) # m/s --> km/s cen_ra, cen_dec = _get_cube_center(header, cube.shape) nn = filename.split('/')[-1] # cube_name = '%s_RA%dDEC%d' % (nn[0:3], cen_ra, cen_dec) cube_name = 'G_%d%+.2fradec_%.1fkm/s_%.1fa' % (cen_ra, cen_dec, header['CDELT3'], header['CDELT2']*60.) data = Data() data.coords = coordinates_from_header(header) data.add_component(cube, cube_name) data.label = cube_name data_list = [] data_list.append(data) data_list.append(data) return data_list
def load_data(self, data_filenames): """ Load the data based on the extensions defined in the matching YAML file. THen create the datacube and return it. :param data_filename: :return: """ label = None data = None ifucube = IFUCube() for data_filename in data_filenames.split(','): hdulist = ifucube.open(data_filename, fix=self._check_ifu_valid) # Good in this case means the file has 3D data and can be loaded by SpectralCube.read if self._check_ifu_valid and not ifucube.get_good(): # Popup takes precedence and accepting continues operation and canceling closes the program self.popup_ui.ifucube_log.setText(ifucube.get_log_output()) self.popup_ui.setModal(True) self.popup_ui.show() self.popup_ui.button_accept.clicked.connect(self._accept_button_click) self.popup_ui.button_cancel.clicked.connect(self._reject_button_click) if not label: label = "{}: {}".format(self._name, splitext(basename(data_filename))[0]) data = Data(label=label) # this attribute is used to indicate to the cubeviz layout that # this is a cubeviz-specific data component. data.meta[CUBEVIZ_LAYOUT] = self._name data_coords_set = False for ii, hdu in enumerate(hdulist): if 'NAXIS' in hdu.header and hdu.header['NAXIS'] == 3: # Set the coords based on the first 3D HDU if not data_coords_set: data.coords = coordinates_from_header(hdu.header) data_coords_set = True component_name = str(ii) if 'EXTNAME' in hdu.header: component_name = hdu.header['EXTNAME'] # The data must be floating point as spectralcube is expecting floating point data data.add_component(component=hdu.data.astype(np.float), label=component_name) if 'BUNIT' in hdu.header: c = data.get_component(component_name) c.units = self.get_units(hdu.header) else: # Creates a unique component name component_name = str(ii) data.add_component(component=hdu.data.astype(np.float), label=component_name) # For the purposes of exporting, we keep a reference to the original HDUList object data._cubeviz_hdulist = hdulist return data
def _load_fits_generic(source, exclude_exts=None, **kwargs): """Read in all extensions from a FITS file. Parameters ---------- source: str or HDUList The pathname to the FITS file. If and HDUList is passed in, simply use that. exclude_exts: [hdu, ] or [index, ] List of HDU's to exclude from reading. This can be a list of HDU's or a list of HDU indexes. """ exclude_exts = exclude_exts or [] if not isinstance(source, fits.hdu.hdulist.HDUList): hdulist = fits.open(source) else: hdulist = source groups = dict() label_base = basename(hdulist.filename()).rpartition('.')[0] if not label_base: label_base = basename(hdulist.filename()) for extnum, hdu in enumerate(hdulist): hdu_name = hdu.name if hdu.name else str(extnum) if hdu.data is not None and \ hdu_name not in exclude_exts and \ extnum not in exclude_exts: if is_image_hdu(hdu): shape = hdu.data.shape try: data = groups[shape] except KeyError: label = '{}[{}]'.format( label_base, 'x'.join(str(x) for x in shape) ) data = Data(label=label) data.coords = coordinates_from_header(hdu.header) groups[shape] = data data.add_component(component=hdu.data, label=hdu_name) elif is_table_hdu(hdu): # Loop through columns and make component list table = Table(hdu.data) table_name = '{}[{}]'.format( label_base, hdu_name ) for column_name in table.columns: column = table[column_name] shape = column.shape data_label = '{}[{}]'.format( table_name, 'x'.join(str(x) for x in shape) ) try: data = groups[data_label] except KeyError: data = Data(label=data_label) groups[data_label] = data component = Component(column, units=column.unit) data.add_component(component=component, label=column_name) return [data for data in six.itervalues(groups)]