def geospatial_reader(filename): """ Read in geospatial data using the rasterio package Parameters ---------- filename: str The input file """ data = Data() with rasterio.open(filename) as src: for iband, band in enumerate(src.read()): # TODO: determine the proper labels for each band # NB: We have to flip the raw data in the up-down direction # as Glue plots using the matplotlib imshow argument `origin='lower'` # and otherwise the data comes up outside down. # WARNING: This may cause issues with other (non-matplotlib) image # viewers data.add_component(component=np.flipud(band.astype(float)), label='Band {0}'.format(iband)) return data
def _parse_data_dict(data, label): result = Data(label=label) for label, component in data.items(): result.add_component(component, label) return [result]
def test_component_id_combo_helper_add(): # Make sure that when adding a component, and if a data collection is not # present, the choices still get updated callback = MagicMock() state = ExampleState() state.add_callback('combo', callback) dc = DataCollection([]) helper = ComponentIDComboHelper(state, 'combo') assert selection_choices(state, 'combo') == "" data1 = Data(x=[1, 2, 3], y=[2, 3, 4], label='data1') callback.reset_mock() dc.append(data1) helper.append_data(data1) callback.assert_called_once_with(0) callback.reset_mock() assert selection_choices(state, 'combo') == "x:y" data1.add_component([7, 8, 9], 'z') # Should get notification since choices have changed callback.assert_called_once_with(0) callback.reset_mock() assert selection_choices(state, 'combo') == "x:y:z"
def test_to_ccddata_invalid(): data = Data(label='not-an-image') data.add_component(Component(np.array([3.4, 2.3, -1.1, 0.3]), units='Jy'), 'x') with pytest.raises(ValueError) as exc: data.get_object(CCDData, attribute=data.id['x']) assert exc.value.args[ 0] == 'Only 2-dimensional datasets can be converted to CCDData' class FakeCoordinates(Coordinates): def pixel_to_world_values(self, *pixel): raise NotImplementedError() def world_to_pixel_values(self, *pixel): raise NotImplementedError() coords = FakeCoordinates(n_dim=2) coords.low_level_wcs = coords data = Data(label='image-with-custom-coords', coords=coords) data.add_component(Component(np.array([[3, 4], [4, 5]]), units='Jy'), 'x') with pytest.raises(TypeError) as exc: data.get_object(CCDData, attribute=data.id['x']) assert exc.value.args[ 0] == 'data.coords should be an instance of Coordinates or WCS'
def load_stacked_sequence(self, raster_data): for window, window_data in raster_data.items(): w_data = Data(label=f"{window.replace(' ', '_')}") w_data.coords = WCSCoordinates(wcs=window_data.wcs) w_data.add_component(Component(window_data.data), f"{window}") self.datasets.append(w_data)
def test_component_id_combo_helper_add(): # Make sure that when adding a component, and if a data collection is not # present, the choices still get updated callback = MagicMock() state = ExampleState() state.add_callback('combo', callback) dc = DataCollection([]) helper = ComponentIDComboHelper(state, 'combo') assert selection_choices(state, 'combo') == "" data1 = Data(x=[1, 2, 3], y=[2, 3, 4], label='data1') callback.reset_mock() dc.append(data1) helper.append_data(data1) callback.assert_called_once_with(0) callback.reset_mock() assert selection_choices(state, 'combo') == "x:y" data1.add_component([7, 8, 9], 'z') # Should get notification since choices have changed callback.assert_called_once_with(0) callback.reset_mock() assert selection_choices(state, 'combo') == "x:y:z"
def test_datetime64_disabled(self, capsys): # Make sure that datetime components aren't options for the vector and # error markers. data = Data(label='test') data.add_component(np.array([100, 200, 300, 400], dtype='M8[D]'), 't1') data.add_component(np.array([200, 300, 400, 500], dtype='M8[D]'), 't2') data.add_component(np.array([200., 300., 400., 500.]), 'x') data.add_component(np.array([200., 300., 400., 500.]), 'y') self.data_collection.append(data) self.viewer.add_data(data) self.viewer.state.x_att = data.id['x'] self.viewer.state.y_att = data.id['y'] self.viewer.state.layers[0].cmap_mode = 'Linear' self.viewer.state.layers[0].cmap_att = data.id['x'] self.viewer.state.layers[0].size_mode = 'Linear' self.viewer.state.layers[0].size_att = data.id['y'] self.viewer.state.layers[0].vector_visible = True self.viewer.state.layers[0].xerr_visible = True self.viewer.state.layers[0].yerr_visible = True process_events() self.viewer.state.x_att = data.id['t1'] self.viewer.state.y_att = data.id['t2'] process_events() # We use capsys here because the # error is otherwise only apparent in stderr. out, err = capsys.readouterr() assert out.strip() == "" assert err.strip() == ""
def import_iris_obs(): caption = "Select a directory containing files from one IRIS OBS, and stack all raster scans." data_path = Path(pick_directory(caption)) rasters = list(data_path.glob("*raster*")) sji = list(data_path.glob("*SJI*")) sji_data = [] for s in sji: sji_data.append(load_sji_fits(s)) raster_data = read_iris_spectrograph_level2_fits(rasters, spectral_windows=['Mg II k 2796'], memmap=False, uncertainty=False) raster_data = {window: stack_spectrogram_sequence(seq) for window, seq in raster_data.data.items()} result = [] for window, window_data in raster_data.items(): w_data = Data(label=f"{window.replace(' ', '_')}") w_data.coords = WCSCoordinates(wcs=window_data.wcs) w_data.add_component(Component(window_data.data), f"{window}") result.append(w_data) return result + sji_data
def load_mos_data(*args, **kwargs): path = "/".join(args[0].strip().split('/')[:-1]) result = Data() # Read the table from astropy.table import Table table = Table.read(*args, format='ascii', **kwargs) # Loop through columns and make component list for column_name in table.columns: print(column_name) c = table[column_name] d = None u = c.unit if hasattr(c, 'unit') else c.units m = dict() m['cell'] = c m['path'] = path # if d is not None: # print("Attempting to autotype") # nc = MOSComponent(np.array([np.array(dt)) # result.add_component(nc, column_name) # else: nc = MOSComponent.autotyped(c, units=u, meta=m) result.add_component(nc, column_name) return result
def to_glue(self, label="yt", data_collection=None): """ Takes the data in the FITSImageData instance and exports it to Glue (http://glueviz.org) for interactive analysis. Optionally add a *label*. If you are already within the Glue environment, you can pass a *data_collection* object, otherwise Glue will be started. """ from glue.core import Data, DataCollection from glue.core.coordinates import coordinates_from_header try: from glue.app.qt.application import GlueApplication except ImportError: from glue.qt.glue_application import GlueApplication image = Data(label=label) image.coords = coordinates_from_header(self.wcs.to_header()) for k in self.fields: image.add_component(self[k].data, k) if data_collection is None: dc = DataCollection([image]) app = GlueApplication(dc) app.start() else: data_collection.append(image)
def read_cube(filename, **kwargs): cube_data = None exclude_exts = [] data_collection = [] hdulist = fits.open(filename) try: cube_data = CubeData.read(hdulist) except CubeDataIOError as e: warnings.warn('No CubeData found in "{}": {}'.format( filename, e.message )) if cube_data is not None: data = Data() try: data.coords = coordinates_from_wcs(cube_data.wcs) except AttributeError: # There is no wcs. Not to worry now. pass data.add_component(Component(cube_data), label="cube") data_collection.append(data) exclude_exts = cube_data.meta.get('hdu_ids') # Read in the rest of the FITS file. data_collection += _load_fits_generic(hdulist, exclude_exts=exclude_exts) return data_collection
def cube_to_data(self, cube, output_label=None, output_component_id=None): """ Convert SpectralCube to final output. self.output_as_component is checked here. if self.output_as_component: add new component to self.data else: create new data and return it. :param cube: SpectralCube :param output_label: Name of new Data. :param output_component_id: label of new component :return: """ original_data = self.data new_component = Component(cube._data.copy(), self.component_unit) if self.output_as_component: original_data.add_component(new_component, output_component_id) return None else: new_data = Data(label=output_label) new_data.coords = coordinates_from_header(cube.header) new_data.add_component(new_component, output_component_id) return new_data
def test_to_spectrum1d(): # Set up simple spectral WCS wcs = WCS(naxis=1) wcs.wcs.ctype = ['VELO-LSR'] wcs.wcs.set() coords = WCSCoordinates(wcs=wcs) data = Data(label='spectrum', coords=coords) data.add_component(Component(np.array([3.4, 2.3, -1.1, 0.3]), units='Jy'), 'x') spec = data.get_object(Spectrum1D, attribute=data.id['x']) assert_quantity_allclose(spec.spectral_axis, [1, 2, 3, 4] * u.m / u.s) assert_quantity_allclose(spec.flux, [3.4, 2.3, -1.1, 0.3] * u.Jy) data.add_subset(data.id['x'] > 1, label='bright') spec_subset = data.get_subset_object(cls=Spectrum1D, subset_id=0, attribute=data.id['x']) assert_quantity_allclose(spec_subset.spectral_axis, [1, 2, 3, 4] * u.m / u.s) assert_quantity_allclose(spec_subset.flux, [3.4, 2.3, np.nan, np.nan] * u.Jy) assert_equal(spec_subset.mask, [1, 1, 0, 0])
def test_to_ccddata(with_wcs): if with_wcs: coords = WCS_CELESTIAL else: coords = None data = Data(label='image', coords=coords) data.add_component( Component(np.array([[3.4, 2.3], [-1.1, 0.3]]), units='Jy'), 'x') image = data.get_object(CCDData, attribute=data.id['x']) assert image.wcs is (WCS_CELESTIAL if with_wcs else None) assert_allclose(image.data, [[3.4, 2.3], [-1.1, 0.3]]) assert image.unit is u.Jy data.add_subset(data.id['x'] > 1, label='bright') image_subset = data.get_subset_object(cls=CCDData, subset_id=0, attribute=data.id['x']) assert image_subset.wcs is (WCS_CELESTIAL if with_wcs else None) assert_allclose(image_subset.data, [[3.4, 2.3], [-1.1, 0.3]]) assert image_subset.unit is u.Jy assert_equal(image_subset.mask, [[0, 0], [1, 1]])
def _parse_data_dict(data, label): result = Data(label=label) for label, component in data.items(): result.add_component(component, label) return [result]
def read_cube(filename, **kwargs): cube_data = CubeData.read(filename) data = Data() data.add_component(Component(cube_data), label="cube") print("Loaded successfully") return data
def load_sequence(self, raster_data): for window, window_data in raster_data.items(): for i, scan_data in enumerate(window_data): w_data = Data(label=f"{window.replace(' ', '_')}-scan-{i}") w_data.coords = WCSCoordinates(wcs=scan_data.wcs) w_data.add_component(Component(scan_data.data), f"{window}-scan-{i}") w_data.meta = scan_data.meta self.datasets.append(w_data)
def test_to_spectral_cube_invalid_ndim(): data = Data(label='not-a-spectral-cube') data.add_component(Component(np.array([3.4, 2.3, -1.1, 0.3]), units='Jy'), 'x') with pytest.raises(ValueError) as exc: data.get_object(SpectralCube, attribute=data.id['x']) assert exc.value.args[0] == ('Data object should have 3 or 4 dimensions in order ' 'to be converted to a SpectralCube object.')
def example_volume(shape=64, limits=[-4, 4]): """Creates a test data set containing a ball""" from glue.core import Data import numpy as np import ipyvolume as ipv ball_data = ipv.examples.ball(shape=shape, limits=limits, show=False, draw=False) data = Data() data.add_component(ball_data, label='intensity') return data
def test_to_ccddata_unitless(): data = Data(label='image', coords=WCS_CELESTIAL) data.add_component(Component(np.array([[3.4, 2.3], [-1.1, 0.3]])), 'x') image = data.get_object(CCDData, attribute=data.id['x']) assert_allclose(image.data, [[3.4, 2.3], [-1.1, 0.3]]) assert image.unit is u.one
def test_to_spectrum1d_invalid(): data = Data(label='not-a-spectrum') data.add_component(Component(np.array([3.4, 2.3, -1.1, 0.3]), units='Jy'), 'x') with pytest.raises(TypeError) as exc: data.get_object(Spectrum1D, attribute=data.id['x']) assert exc.value.args[0] == ('data.coords should be an instance of WCS ' 'or SpectralCoordinates')
def test_to_spectral_cube_missing_wcs(): data = Data(label='not-a-spectral-cube') values = np.random.random((4, 5, 3)) data.add_component(Component(values, units='Jy'), 'x') with pytest.raises(TypeError) as exc: data.get_object(SpectralCube, attribute=data.id['x']) assert exc.value.args[0] == ('data.coords should be an instance of BaseLowLevelWCS.')
def test_to_spectral_cube_unitless(spectral_cube_wcs): data = Data(label='spectral_cube', coords=spectral_cube_wcs) values = np.random.random((4, 5, 3)) data.add_component(Component(values), 'x') spec = data.get_object(SpectralCube, attribute=data.id['x']) assert_quantity_allclose(spec.spectral_axis, [1, 2, 3, 4] * u.m / u.s) assert_quantity_allclose(spec.filled_data[...], values * u.one)
def load_sunpy_map(self, sunpy_map): sunpy_map_loaded = sunpy.map.Map(sunpy_map) label = 'sunpy-map-' + sunpy_map_loaded.name data = Data(label=label) data.coords = sunpy_map_loaded.wcs # preferred way, preserves more info in some cases data.meta = sunpy_map_loaded.meta data.add_component(Component(sunpy_map_loaded.data), sunpy_map_loaded.name) data.style = VisualAttributes(color='#FDB813', preferred_cmap=sunpy_map.cmap) self.datasets.append(data)
def load_sequence(self, raster_data): for window, window_data in raster_data.items(): for i, scan_data in enumerate(window_data): w_data = Data(label=f"{window.replace(' ', '_')}-scan-{i}") w_data.coords = scan_data.wcs w_data.add_component(Component(scan_data.data), f"{window}-scan-{i}") w_data.meta = scan_data.meta w_data.style = VisualAttributes(color='#5A4FCF') self.datasets.append(w_data)
def load_sji(self, sji): with fits.open(sji) as hdul: hdul.verify("fix") label = hdul[0].header['TDESC1'] data = Data(label=label) data.coords = WCSCoordinates(hdul[0].header) data.meta = hdul[0].header data.add_component(Component(hdul[0].data), label) self.datasets.append(data)
def _load_GALFAHI_data_LowRes(filename, **kwargs): # Data loader customized for GALFA-HI data cube # Resize the data cube into lower resolution in velocity/space def _bin_cube(cube, factor, axis_label): # resize the cube to lower resolution shape = cube.shape if axis_label == 'VELO': new_shape = (shape[0]/factor, factor, shape[1], shape[2]) return cube.reshape(new_shape).mean(axis = 1) elif axis_label == 'RADEC': new_shape = (shape[0], shape[1]/factor, factor, shape[2]/factor, factor) return cube.reshape(new_shape).mean(axis = 4).mean(axis = 2) else: return cube # change the header for those cubes that has been binned into low resolutions def _get_new_header(header, factor, axis_label): new_header = header if axis_label == 'VELO': new_header['NAXIS3'] = header['NAXIS3'] / factor new_header['CRVAL3'] = header['CRVAL3'] new_header['CRPIX3'] = float(header['CRPIX3'] / factor) new_header['CDELT3'] = header['CDELT3'] * factor elif axis_label == 'RADEC': for ax in [1, 2]: new_header['NAXIS%d'%(ax)] = header['NAXIS%d'%(ax)] / factor new_header['CRVAL%d'%(ax)] = header['CRVAL%d'%(ax)] new_header['CRPIX%d'%(ax)] = float(header['CRPIX%d'%(ax)] / factor) new_header['CDELT%d'%(ax)] = header['CDELT%d'%(ax)] * factor else: new_header = header # m/s --> km/s new_header['CDELT3'] = new_header['CDELT3'] * (10**(-3)) return new_header def _get_cube_center(header, cubeshape): ra = header['CRVAL1'] + header['CDELT1'] * (np.arange(cubeshape[2])+0.5 - header['CRPIX1']) ## degree dec = header['CRVAL2'] + header['CDELT2'] * (np.arange(cubeshape[1])+0.5 - header['CRPIX2']) ## degree return np.mean(ra), np.mean(dec) data_list = [] # add 3 data objects with different resolutions: for factor, axis_label in zip([4, 16, 2], ['VELO', 'VELO', 'RADEC']): cube = fits.getdata(filename) header = fits.getheader(filename) cen_ra, cen_dec = _get_cube_center(header, cube.shape) new_header = _get_new_header(header, factor, axis_label) cube_name = 'G_%d%+.2fradec_%.1fkm/s_%.1fa' % (cen_ra, cen_dec, new_header['CDELT3'], new_header['CDELT2']*60.) data = Data() data.coords = coordinates_from_header(new_header) data.add_component(_bin_cube(cube, factor, axis_label), cube_name) data.label = cube_name data_list.append(data) del data, cube, header return data_list
def make_test_data(): data = Data(label="Test Cat Data 1") np.random.seed(12345) for letter in 'abcdefxyz': comp = Component(np.random.random(100)) data.add_component(comp, letter) return data
def make_test_data(): data = Data(label="Test Cat Data 1") np.random.seed(12345) for letter in 'abcdefxyz': comp = Component(np.random.random(100)) data.add_component(comp, letter) return data
def make_test_data(): data = Data(label="Test Cube Data") np.random.seed(12345) for letter in 'abc': comp = Component(np.random.random((10, 10, 10))) data.add_component(comp, letter) return data
def _parse_iris_raster(data, label): result = [] for window, window_data in data.data.items(): for i, scan_data in enumerate(window_data): w_data = Data(label=f"{window.replace(' ', '_')}-scan-{i}") w_data.coords = WCSCoordinates(wcs=scan_data.wcs) w_data.add_component(Component(scan_data.data), f"{window}-scan-{i}") w_data.meta = scan_data.meta result.append(w_data) return result
def make_test_data(): data = Data(label="Test Cube Data") np.random.seed(12345) for letter in 'abc': comp = Component(np.random.random((10, 10, 10))) data.add_component(comp, letter) return data
def load_sji_fits(filename): with fits.open(filename) as hdul: hdul.verify("fix") sji = hdul[0] label = sji.header['TDESC1'] data = Data(label=label) data.coords = WCSCoordinates(sji.header) data.meta = sji.header data.add_component(Component(sji.data), label) return data
def test_numerical_data_changed(self): self.init_draw_count() self.init_subset() assert self.draw_count == 0 self.viewer.add_data(self.data) assert self.draw_count == 1 data = Data() for cid in self.data.visible_components: data.add_component(self.data[cid] * 2, cid.label) self.data.update_values_from_data(data) assert self.draw_count == 2
def test_numerical_data_changed(self): self.init_draw_count() self.init_subset() assert self.draw_count == 0 self.viewer.add_data(self.data) assert self.draw_count == 1 data = Data() for cid in self.data.visible_components: data.add_component(self.data[cid] * 2, cid.label) self.data.update_values_from_data(data) assert self.draw_count == 2
def test_hypercube_world(self): # Check defaults when we add data wcs = WCS(naxis=4) hypercube2 = Data() hypercube2.coords = WCSCoordinates(wcs=wcs) hypercube2.add_component(np.random.random((2, 3, 4, 5)), 'a') self.data_collection.append(hypercube2) self.viewer.add_data(hypercube2)
def _parse_iris_raster(data, label): result = [] for window, window_data in data.items(): for i, scan_data in enumerate(window_data): w_data = Data(label=f"{window.replace(' ', '_')}-scan-{i}") w_data.coords = WCSCoordinates(scan_data.header) w_data.add_component(Component(scan_data.data), f"{window}-scan-{i}") w_data.meta = scan_data.meta w_data.style = VisualAttributes(color='#5A4FCF') result.append(w_data) return result
def read_tiff_metadata(filename): """ Read a TIFF image, looking for .tfw metadata """ base, ext = os.path.splitext(filename) data = np.flipud(np.array(Image.open(filename).convert('L'))) result = Data() if os.path.exists(base + '.tfw'): result.coords = tfw_to_coords(base + '.tfw', data.shape) result.add_component(data, 'map') return result
def collapse_to_1d(subset, data_collection): mask = subset.to_mask() md = np.ma.masked_array(subset.data['FLUX'], mask=mask) mdd = md.reshape((-1, md.shape[1] * md.shape[2])) spec = np.sum(mdd, axis=1) spec_data = Data(flux=spec, label=':'.join((subset.label, subset.data.label, 'collapsed'))) wave_component = subset.data['Wave'][:, md.shape[1] / 2, md.shape[2] / 2] spec_data.add_component(component=wave_component, label='Wave') spec_data.add_component(component=spec, label='FLUX') data_collection.append(spec_data)
def test_hypercube_world(self): # Check defaults when we add data wcs = WCS(naxis=4) hypercube2 = Data() hypercube2.coords = WCSCoordinates(wcs=wcs) hypercube2.add_component(np.random.random((2, 3, 4, 5)), 'a') self.data_collection.append(hypercube2) self.viewer.add_data(hypercube2)
def spectral_cube_to_data(cube, label=None): if isinstance(cube, SpectralCube): cube = StokesSpectralCube({'I': cube}) result = Data(label=label) result.coords = coordinates_from_wcs(cube.wcs) for component in cube.components: data = getattr(cube, component).unmasked_data[...] result.add_component(data, label='STOKES {0}'.format(component)) return result
def _get_sci_group(i, index): d = Data("%s_%i" % (label, index)) d.coords = coordinates_from_wcs(HSTWCS(hdulist, i)) index = index + 1 d.add_component(hdulist[i].data, hdulist[i].name) for h in hdulist[i:]: if h.name == 'SCI': break # new science grp if h.name not in ['ERR', 'DQ']: continue d.add_component(h.data, h.name) return d
def test_to_spectrum1d_with_spectral_coordinates(): coords = SpectralCoordinates([1, 4, 10] * u.micron) data = Data(label='spectrum1d', coords=coords) data.add_component(Component(np.array([3, 4, 5]), units='Jy'), 'x') assert_allclose(data.coords.pixel2world([0, 0.5, 1, 1.5, 2]), [[1, 2.5, 4, 7, 10]]) spec = data.get_object(Spectrum1D, attribute=data.id['x']) assert_quantity_allclose(spec.spectral_axis, [1, 4, 10] * u.micron) assert_quantity_allclose(spec.flux, [3, 4, 5] * u.Jy)
def _get_sci_group(i, index): d = Data("%s_%i" % (label, index)) d.coords = coordinates_from_wcs(HSTWCS(hdulist, i)) index = index + 1 d.add_component(hdulist[i].data, hdulist[i].name) for h in hdulist[i:]: if h.name == 'SCI': break # new science grp if h.name not in ['ERR', 'DQ']: continue d.add_component(h.data, h.name) return d
def export_glue(ds, data, name): from glue.core import Data, DataCollection from glue.qt.glue_application import GlueApplication import numpy as np d = Data(label=name) d.add_component(ytComponent(data, ds, name), label='x') dc = DataCollection(d) ga = GlueApplication(dc) ga.start()
def example_image(shape=64, limits=[-4, 4]): """ Creates a test 2-d dataset containing an image. """ from glue.core import Data import numpy as np x = np.linspace(-3, 3, num=shape) X, Y = np.meshgrid(x, x) rho = 0.8 intensity = np.exp(-X**2 - Y**2 - 2 * X * Y * rho) data = Data() data.add_component(intensity, label='intensity') return data
def acs_cutout_image_reader(file_name): """ Data loader for the ACS cut-outs for the DEIMOS spectra. The cutouts contain only the image. """ hdulist = fits.open(file_name) data = Data(label='ACS Cutout Image') data.coords = coordinates_from_header(hdulist[0].header) data.header = hdulist[0].header data.add_component(hdulist[0].data, 'Flux') return data
def pre_nirspec_level2_reader(file_name): """ THIS IS A TEST! """ #TODO The level 2 file has multiple exposures. #TODO the level 2 test file has SCI extensions with different shapes. #TODO hdulist = fits.open(file_name) data = Data(label='2D Spectra') hdulist[1].header['CTYPE2'] = 'Spatial Y' data.header = hdulist[1].header # This is a stop gap fix to let fake data be ingested as # level 2 apectra. The level 2 file we have for testing # right now has SCI extensions with different sized arrays # among them. It remains to be seen if this is a expected # feature of level 2 spectra, or just a temporary glitch. # In case it's actually what lvel 2 spectral files look # like, proper handling must be put in place to allow # glue Data objects with different sized components. Or, # if that is not feasible, to properly cut the arrays so # as to make them all of the same size. The solution below # is a naive interpretation of this concept. x_min = 10000 y_min = 10000 for k in range(1, len(hdulist)): if 'SCI' in hdulist[k].header['EXTNAME']: x_min = min(x_min, hdulist[k].data.shape[0]) y_min = min(y_min, hdulist[k].data.shape[1]) # hdulist[k].header['CTYPE2'] = 'Spatial Y' # wcs = WCS(hdulist[1].header) # original WCS has both axes named "LAMBDA", glue requires unique component names # data.coords = coordinates_from_wcs(wcs) # data.header = hdulist[k].header # data.add_component(hdulist[1].data['FLUX'][0], 'Flux') count = 1 for k in range(1, len(hdulist)): if 'SCI' in hdulist[k].header['EXTNAME']: data.add_component(hdulist[k].data[0:x_min, 0:y_min], 'Flux_' + '{:03d}'.format(count)) count += 1 # data.add_component(1 / np.sqrt(hdulist[1].data['IVAR'][0]), 'Uncertainty') return data
def make_test_data(): data = Data(label="Test Cube Data") np.random.seed(12345) for letter in 'abc': comp = Component(np.random.random((10, 10, 10))) data.add_component(comp, letter) # make sure one component key is primary data.add_component(Component(np.random.random((10, 10, 10))), 'PRIMARY') return data
def test_numerical_data_changed(self): self.init_draw_count() self.init_subset() assert self.draw_count == 0 self.viewer.add_data(self.data) assert self.draw_count == 1 data = Data(label=self.data.label) data.coords = self.data.coords for cid in self.data.main_components: if self.data.get_kind(cid) == 'numerical': data.add_component(self.data[cid] * 2, cid.label) else: data.add_component(self.data[cid], cid.label) self.data.update_values_from_data(data) assert self.draw_count == 2
def _load_fits_generic(filename, **kwargs): hdulist = fits.open(filename) groups = dict() label_base = basename(filename).rpartition('.')[0] if not label_base: label_base = basename(filename) for extnum, hdu in enumerate(hdulist): if hdu.data is not None: hdu_name = hdu.name if hdu.name else str(extnum) if is_image_hdu(hdu): shape = hdu.data.shape try: data = groups[shape] except KeyError: label = '{}[{}]'.format( label_base, 'x'.join(str(x) for x in shape) ) data = Data(label=label) data.coords = coordinates_from_header(hdu.header) groups[shape] = data data.add_component(component=hdu.data, label=hdu_name) elif is_table_hdu(hdu): # Loop through columns and make component list table = Table(hdu.data) table_name = '{}[{}]'.format( label_base, hdu_name ) for column_name in table.columns: column = table[column_name] shape = column.shape data_label = '{}[{}]'.format( table_name, 'x'.join(str(x) for x in shape) ) try: data = groups[data_label] except KeyError: data = Data(label=data_label) groups[data_label] = data component = Component.autotyped(column, units=column.unit) data.add_component(component=component, label=column_name) return [data for data in groups.itervalues()]
def test_conversion_utils_spectral_coordinates(): # Set up glue Coordinates object coords = SpectralCoordinates([1, 4, 10] * u.micron) data = Data(label='spectrum1d', coords=coords) data.add_component(Component(np.array([3, 4, 5]), units='Jy'), 'x') assert_allclose(data.coords.pixel2world([0, 0.5, 1, 1.5, 2]), [[1, 2.5, 4, 7, 10]]) assert glue_data_has_spectral_axis(data) spec = glue_data_to_spectrum1d(data, data.id['x']) assert_quantity_allclose(spec.spectral_axis, [1, 4, 10] * u.micron) assert_quantity_allclose(spec.flux, [3, 4, 5] * u.Jy)
class Test4DExtractor(object): def setup_method(self, method): self.data = Data() self.data.coords = MockCoordinates() x, y, z, w = np.mgrid[:3, :4, :5, :4] self.data.add_component(1.0 * w, label="x") def test_extract(self): roi = RectangularROI() roi.update_limits(0, 0, 2, 3) expected = self.data["x"][:, :2, :3, 1].mean(axis=1).mean(axis=1) _, actual = Extractor.spectrum(self.data, self.data.id["x"], roi, (0, "x", "y", 1), 0) np.testing.assert_array_equal(expected, actual)
def test_conversion_utils_3d(): # Set up simple spectral WCS wcs = WCS(naxis=3) wcs.wcs.ctype = ['RA---TAN', 'DEC--TAN', 'VELO-LSR'] wcs.wcs.set() # Set up glue Coordinates object coords = WCSCoordinates(wcs=wcs) data = Data(label='spectral-cube', coords=coords) data.add_component(Component(np.ones((3, 4, 5)), units='Jy'), 'x') assert glue_data_has_spectral_axis(data) spec = glue_data_to_spectrum1d(data, data.id['x'], statistic='sum') assert_quantity_allclose(spec.spectral_axis, [1, 2, 3] * u.m / u.s) assert_quantity_allclose(spec.flux, [20, 20, 20] * u.Jy)
def test_conversion_utils_1d(): # Set up simple spectral WCS wcs = WCS(naxis=1) wcs.wcs.ctype = ['VELO-LSR'] wcs.wcs.set() # Set up glue Coordinates object coords = WCSCoordinates(wcs=wcs) data = Data(label='spectrum', coords=coords) data.add_component(Component(np.array([3.4, 2.3, -1.1, 0.3]), units='Jy'), 'x') assert glue_data_has_spectral_axis(data) spec = glue_data_to_spectrum1d(data, data.id['x']) assert_quantity_allclose(spec.spectral_axis, [1, 2, 3, 4] * u.m / u.s) assert_quantity_allclose(spec.flux, [3.4, 2.3, -1.1, 0.3] * u.Jy)
def yt_data(path): """Use yt to load a gridded dataset This function will extract all particle and field datasets (excluding derived datasets) from a file. Currently, you cannot make images from this data. The resulting Field dataset refers to the highest-resolution subgrids Paramters --------- path : str Path to file to load. This is what get's passed to yt.mods.load() Returns ------- One or two Glue data objects """ ds = load(path) dd = ds.h.all_data() particles = [f for f in ds.h.field_list if ds.field_info[f].particle_type] fields = [f for f in ds.h.field_list if not ds.field_info[f].particle_type] lbl = data_label(path) result = [] if len(particles) > 0: d1 = Data(label=lbl + "_particle") shp = dd[particles[0]].shape for p in particles: d1.add_component(YtComponent(ds, p, shp), p) result.append(d1) if len(fields) > 0: d2 = Data(label=lbl + "_field") shp = dd[fields[0]].shape for f in fields: d2.add_component(YtComponent(ds, f, shp), f) result.append(d2) return result
def deimos_spectrum2D_reader(file_name): """ Data loader for Keck/DEIMOS 2D spectra. This loads only the Flux and Inverse variance. Wavelength information comes from the WCS. """ hdulist = fits.open(file_name) data = Data(label='2D Spectrum') hdulist[1].header['CTYPE2'] = 'Spatial Y' wcs = WCS(hdulist[1].header) # original WCS has both axes named "LAMBDA", glue requires unique component names data.coords = coordinates_from_wcs(wcs) data.header = hdulist[1].header data.add_component(hdulist[1].data['FLUX'][0], 'Flux') data.add_component(hdulist[1].data['IVAR'][0], 'Uncertainty') return data
def nirspec_spectrum2d_reader(file_name): """ Data loader for simulated NIRSpec 2D spectrum. This function extracts the DATA, QUALITY, and VAR extensions and returns them as a glue Data object. It then uses the header keywords of the DATA extension to detemine the wavelengths. """ hdulist = fits.open(file_name) data = Data(label='2D Spectrum') data.header = hdulist['DATA'].header data.coords = coordinates_from_header(hdulist[1].header) data.add_component(hdulist['DATA'].data, 'Flux') data.add_component(hdulist['VAR'].data, 'Uncertainty') return data
def test_link_aligned(ndata, ndim): ds = [] shp = tuple([2] * ndim) for i in range(ndata): d = Data() c = Component(np.random.random(shp)) d.add_component(c, 'test') ds.append(d) # assert that all componentIDs are interchangeable links = LinkAligned(ds) dc = DataCollection(ds) dc.add_link(links) for i in range(ndim): id0 = ds[0].get_pixel_component_id(i) for j in range(1, ndata): id1 = ds[j].get_pixel_component_id(i) np.testing.assert_array_equal(ds[j][id0], ds[j][id1])