def reverse_add_data(self, data_item): """ Adds data from specviz to glue. Parameters ---------- data_item : :class:`specviz.core.items.DataItem` The data item recently added to model. """ new_data = Data(label=data_item.name) new_data.coords = coordinates_from_header(data_item.spectrum.wcs) flux_component = Component(data_item.spectrum.flux, data_item.spectrum.flux.unit) new_data.add_component(flux_component, "Flux") disp_component = Component(data_item.spectrum.spectral_axis, data_item.spectrum.spectral_axis.unit) new_data.add_component(disp_component, "Dispersion") if data_item.spectrum.uncertainty is not None: uncert_component = Component(data_item.spectrum.uncertainty.array, data_item.spectrum.uncertainty.unit) new_data.add_component(uncert_component, "Uncertainty") self._session.data_collection.append(new_data)
def on_finished(self, data, unit=None): """ Called when the `QThread` has finished performing the operation on the `SpectralCube` object. Parameters ---------- data : ndarray The result of the operation performed on the `SpectralCube` object. """ component_name = "{} {}".format(self._component_id, self._operation_name) comp_count = len([x for x in self._data.component_ids() if component_name in str(x)]) if comp_count > 0: component_name = "{} {}".format(component_name, comp_count) if data.ndim == 2: coords = WCSCoordinates(wcs=self._data.coords.wcs.celestial) self._data.container_2d = Data(label=self._data.label + " [2d]", coords=coords) self._data.container_2d.add_component(data, component_name) # self._layout.session.data_collection.append(self._data.container_2d) self._layout.add_overlay(data, component_name, display_now=True) else: component = Component(data, units=unit) self._data.add_component(component, component_name) super(SpectralOperationHandler, self).accept()
def load_stacked_sequence(self, raster_data): for window, window_data in raster_data.items(): w_data = Data(label=f"{window.replace(' ', '_')}") w_data.coords = WCSCoordinates(wcs=window_data.wcs) w_data.add_component(Component(window_data.data), f"{window}") self.datasets.append(w_data)
def import_iris_obs(): caption = "Select a directory containing files from one IRIS OBS, and stack all raster scans." data_path = Path(pick_directory(caption)) rasters = list(data_path.glob("*raster*")) sji = list(data_path.glob("*SJI*")) sji_data = [] for s in sji: sji_data.append(load_sji_fits(s)) raster_data = read_iris_spectrograph_level2_fits(rasters, spectral_windows=['Mg II k 2796'], memmap=False, uncertainty=False) raster_data = {window: stack_spectrogram_sequence(seq) for window, seq in raster_data.data.items()} result = [] for window, window_data in raster_data.items(): w_data = Data(label=f"{window.replace(' ', '_')}") w_data.coords = WCSCoordinates(wcs=window_data.wcs) w_data.add_component(Component(window_data.data), f"{window}") result.append(w_data) return result + sji_data
def load_sequence(self, raster_data): for window, window_data in raster_data.items(): for i, scan_data in enumerate(window_data): w_data = Data(label=f"{window.replace(' ', '_')}-scan-{i}") w_data.coords = WCSCoordinates(wcs=scan_data.wcs) w_data.add_component(Component(scan_data.data), f"{window}-scan-{i}") w_data.meta = scan_data.meta self.datasets.append(w_data)
def load_sequence(self, raster_data): for window, window_data in raster_data.items(): for i, scan_data in enumerate(window_data): w_data = Data(label=f"{window.replace(' ', '_')}-scan-{i}") w_data.coords = scan_data.wcs w_data.add_component(Component(scan_data.data), f"{window}-scan-{i}") w_data.meta = scan_data.meta w_data.style = VisualAttributes(color='#5A4FCF') self.datasets.append(w_data)
def load_sunpy_map(self, sunpy_map): sunpy_map_loaded = sunpy.map.Map(sunpy_map) label = 'sunpy-map-' + sunpy_map_loaded.name data = Data(label=label) data.coords = sunpy_map_loaded.wcs # preferred way, preserves more info in some cases data.meta = sunpy_map_loaded.meta data.add_component(Component(sunpy_map_loaded.data), sunpy_map_loaded.name) data.style = VisualAttributes(color='#FDB813', preferred_cmap=sunpy_map.cmap) self.datasets.append(data)
def load_sji(self, sji): with fits.open(sji) as hdul: hdul.verify("fix") label = hdul[0].header['TDESC1'] data = Data(label=label) data.coords = WCSCoordinates(hdul[0].header) data.meta = hdul[0].header data.add_component(Component(hdul[0].data), label) self.datasets.append(data)
def _parse_iris_raster(data, label): result = [] for window, window_data in data.data.items(): for i, scan_data in enumerate(window_data): w_data = Data(label=f"{window.replace(' ', '_')}-scan-{i}") w_data.coords = WCSCoordinates(wcs=scan_data.wcs) w_data.add_component(Component(scan_data.data), f"{window}-scan-{i}") w_data.meta = scan_data.meta result.append(w_data) return result
def load_sji_fits(filename): with fits.open(filename) as hdul: hdul.verify("fix") sji = hdul[0] label = sji.header['TDESC1'] data = Data(label=label) data.coords = WCSCoordinates(sji.header) data.meta = sji.header data.add_component(Component(sji.data), label) return data
def _parse_iris_raster(data, label): result = [] for window, window_data in data.items(): for i, scan_data in enumerate(window_data): w_data = Data(label=f"{window.replace(' ', '_')}-scan-{i}") w_data.coords = WCSCoordinates(scan_data.header) w_data.add_component(Component(scan_data.data), f"{window}-scan-{i}") w_data.meta = scan_data.meta w_data.style = VisualAttributes(color='#5A4FCF') result.append(w_data) return result
def load_sji(self, sji): with fits.open(sji) as hdul: hdul.verify("fix") label = hdul[0].header['TDESC1'] data = Data(label=label) data.coords = WCSCoordinates(hdul[0].header) data.meta = hdul[0].header preferred_cmap_name = 'IRIS ' + hdul[0].header['TDESC1'].replace( '_', ' ') data.style = VisualAttributes(preferred_cmap=preferred_cmap_name) data.add_component(Component(hdul[0].data), label) self.datasets.append(data)
def _parse_sunpy_map(data, label): """ Parse SunPy map so that it can be loaded by ``glue``. """ scan_map = data label = label + '-' + scan_map.name result = Data(label=label) result.coords = scan_map.wcs # preferred way, preserves more info in some cases result.add_component(Component(scan_map.data), scan_map.name) result.meta = scan_map.meta result.style = VisualAttributes(color='#FDB813', preferred_cmap=scan_map.cmap) return result
def _load_fits_generic(filename, **kwargs): hdulist = fits.open(filename) groups = dict() label_base = basename(filename).rpartition('.')[0] if not label_base: label_base = basename(filename) for extnum, hdu in enumerate(hdulist): if hdu.data is not None: hdu_name = hdu.name if hdu.name else str(extnum) if is_image_hdu(hdu): shape = hdu.data.shape try: data = groups[shape] except KeyError: label = '{}[{}]'.format( label_base, 'x'.join(str(x) for x in shape) ) data = Data(label=label) data.coords = coordinates_from_header(hdu.header) groups[shape] = data data.add_component(component=hdu.data, label=hdu_name) elif is_table_hdu(hdu): # Loop through columns and make component list table = Table(hdu.data) table_name = '{}[{}]'.format( label_base, hdu_name ) for column_name in table.columns: column = table[column_name] shape = column.shape data_label = '{}[{}]'.format( table_name, 'x'.join(str(x) for x in shape) ) try: data = groups[data_label] except KeyError: data = Data(label=data_label) groups[data_label] = data component = Component.autotyped(column, units=column.unit) data.add_component(component=component, label=column_name) return [data for data in groups.itervalues()]
def test_link_aligned(ndata, ndim): ds = [] shp = tuple([2] * ndim) for i in range(ndata): d = Data() c = Component(np.random.random(shp)) d.add_component(c, 'test') ds.append(d) # assert that all componentIDs are interchangeable links = LinkAligned(ds) dc = DataCollection(ds) dc.add_link(links) for i in range(ndim): id0 = ds[0].pixel_component_ids[i] for j in range(1, ndata): id1 = ds[j].pixel_component_ids[i] np.testing.assert_array_equal(ds[j][id0], ds[j][id1])
def cube_to_data(self, cube, output_label=None, output_component_id=None): """ Convert SpectralCube to final output. self.output_as_component is checked here. if self.output_as_component: add new component to self.data else: create new data and return it. :param cube: SpectralCube :param output_label: Name of new Data. :param output_component_id: label of new component :return: """ original_data = self.data new_component = Component(cube._data.copy(), self.component_unit) if self.output_as_component: original_data.add_component(new_component, output_component_id) return None else: new_data = Data(label=output_label) new_data.coords = coordinates_from_header(cube.header) new_data.add_component(new_component, output_component_id) return new_data
def add_to_2d_container(cubeviz_layout, data, component_data, component_unit, label): """ Given the cubeviz layout, a data object, a new 2D layer and a label, add the 2D layer to the data object and update the cubeviz layout accordingly. This creates the 2D container dataset if needed. """ # If the 2D container doesn't exist, we create it here. This container is # basically just a Data object but we keep it in an attribute # ``container_2d`` on its parent dataset. if getattr(data, 'container_2d', None) is None: # For now, we assume that the 2D maps are always computed along the # spectral axis, so that the resulting WCS is always celestial coords = WCSCoordinates(wcs=data.coords.wcs.celestial) data.container_2d = Data(label=data.label + " [2d]", coords=coords) # manually create the component so we can add the units too new_component_data_with_units = Component(component_data, component_unit) component_id = data.container_2d.add_component( new_component_data_with_units, label) cubeviz_layout._flux_unit_controller.add_component_unit( component_id, str(component_unit)) cubeviz_layout.session.data_collection.append(data.container_2d) # NOTE: the following is disabled for now but can be uncommented once # we are ready to use the glue overlay infrastructure. # Set up pixel links so that selections in the image plane propagate # between 1D and 2D views. Again this assumes as above that the # moments are computed along the spectral axis # link1 = LinkSame(data.pixel_component_ids[2], # data.container_2d.pixel_component_ids[1]) # link2 = LinkSame(data.pixel_component_ids[1], # data.container_2d.pixel_component_ids[0]) # cubeviz_layout.session.data_collection.add_link(link1) # cubeviz_layout.session.data_collection.add_link(link2) for helper in cubeviz_layout._viewer_combo_helpers: helper.append_data(data.container_2d) for viewer in cubeviz_layout.cube_views: viewer._widget.add_data(data.container_2d) else: # Make sure we don't add duplicate data components if label in data.container_2d.component_ids(): raise ValueError("Data component with label '{}' already exists, " "and cannot be created again".format(label)) new_component_data_with_units = Component(component_data, component_unit) component_id = data.container_2d.add_component( new_component_data_with_units, label) cubeviz_layout._flux_unit_controller.add_component_unit( component_id, str(component_unit))
catalog=Data(parent=table['parent'], label='Fitted Catalog') #catalog=Data() for column_name in table.columns: cc = table[column_name] uu = cc.unit if hasattr(cc, 'unit') else cc.units if cc.name == 'parent': cc.name = 'cat_parent' column_name = 'cat_parent' elif cc.name == 'height': cc.name = 'cat_height' column_name = 'cat_height' elif cc.name == 'peak': cc.name = 'cat_peak' column_name = 'cat_peak' nc = Component.autotyped(cc, units=uu) catalog.add_component(nc, column_name) # if column_name != 'parent' else '_flarent_' catalog.join_on_key(dendro, '_idx', dendro.pixel_component_ids[0]) dc = DataCollection(dendrogram) #dc = DataCollection([cube, dendrogram, catalog]) #dc.merge(cube,sncube) #sncube.join_on_key(dendro, 'structure', dendro.pixel_component_ids[0]) #dc.merge(catalog, dendro) # UNCOMMENT THIS LINE TO BREAK THE VIEWER dc.append(catalog) app = GlueApplication(dc)
catalog = Data(parent=table['parent'], label='Fitted Catalog') #catalog=Data() for column_name in table.columns: cc = table[column_name] uu = cc.unit if hasattr(cc, 'unit') else cc.units if cc.name == 'parent': cc.name = 'cat_parent' column_name = 'cat_parent' elif cc.name == 'height': cc.name = 'cat_height' column_name = 'cat_height' elif cc.name == 'peak': cc.name = 'cat_peak' column_name = 'cat_peak' nc = Component.autotyped(cc, units=uu) catalog.add_component(nc, column_name) # if column_name != 'parent' else '_flarent_' catalog.join_on_key(dendro, '_idx', dendro.pixel_component_ids[0]) dc = DataCollection(dendrogram) #dc = DataCollection([cube, dendrogram, catalog]) #dc.merge(cube,sncube) #sncube.join_on_key(dendro, 'structure', dendro.pixel_component_ids[0]) #dc.merge(catalog, dendro) # UNCOMMENT THIS LINE TO BREAK THE VIEWER dc.append(catalog) app = GlueApplication(dc)