def test_select_previously_incompatible_layer(self): # Regression test for a bug that caused a selection in a previously disabled # layer to enable the layer without updating the subset view self.viewer.add_data(self.image1) self.viewer.add_data(self.catalog) self.catalog.add_component([4, 5, 6], 'e') link1 = LinkSame(self.catalog.id['c'], self.image1.world_component_ids[0]) link2 = LinkSame(self.catalog.id['d'], self.image1.world_component_ids[1]) self.data_collection.add_link(link1) self.data_collection.add_link(link2) self.data_collection.new_subset_group(subset_state=self.catalog.id['e'] > 4) assert self.viewer.layers[0].enabled # image assert self.viewer.layers[1].enabled # scatter assert not self.viewer.layers[2].enabled # image subset assert self.viewer.layers[3].enabled # scatter subset assert not self.viewer.layers[2].image_artist.get_visible() self.data_collection.subset_groups[0].subset_state = self.catalog.id['c'] > -1 assert self.viewer.layers[0].enabled # image assert self.viewer.layers[1].enabled # scatter assert self.viewer.layers[2].enabled # image subset assert self.viewer.layers[3].enabled # scatter subset assert self.viewer.layers[2].image_artist.get_visible()
def test_linking_and_enabling(self): # Regression test for a bug that caused layers not not be correctly # enabled/disabled. self.viewer.add_data(self.image1) self.viewer.add_data(self.catalog) self.catalog.add_component([4, 5, 6], 'e') self.data_collection.new_subset_group(subset_state=self.catalog.id['e'] > 4) assert self.viewer.layers[0].enabled # image assert not self.viewer.layers[1].enabled # scatter assert not self.viewer.layers[2].enabled # image subset assert not self.viewer.layers[3].enabled # scatter subset link1 = LinkSame(self.catalog.id['c'], self.image1.world_component_ids[0]) link2 = LinkSame(self.catalog.id['d'], self.image1.world_component_ids[1]) self.data_collection.add_link(link1) self.data_collection.add_link(link2) assert self.viewer.layers[0].enabled # image assert self.viewer.layers[1].enabled # scatter assert not self.viewer.layers[2].enabled # image subset assert self.viewer.layers[3].enabled # scatter subset
def vue_collapse(self, *args, **kwargs): # Collapsing over the spectral axis. Cut out the desired spectral # region. Defaults to the entire spectrum. spec_min = float(self.spectral_min) * u.Unit(self.spectral_unit) spec_max = float(self.spectral_max) * u.Unit(self.spectral_unit) with warnings.catch_warnings(): warnings.filterwarnings('ignore', message='No observer defined on WCS') spec = spectral_slab(self._selected_cube, spec_min, spec_max) # Spatial-spatial image only. collapsed_spec = spec.collapse(self.selected_func.lower(), axis=-1).T # Quantity data = Data() data['flux'] = collapsed_spec.value data.get_component('flux').units = str(collapsed_spec.unit) self._label_counter += 1 label = f"Collapsed {self._label_counter} {self._selected_data.label}" self.data_collection[label] = data # Link the new dataset pixel-wise to the original dataset. In general # direct pixel to pixel links are the most efficient and should be # used in cases like this where we know there is a 1-to-1 mapping of # pixel coordinates. # Spatial-spatial image only. pix_id_1 = self._selected_data.pixel_component_ids[ 0] # Pixel Axis 0 [z] pix_id_1c = self.data_collection[label].pixel_component_ids[ 0] # Pixel Axis 0 [y] pix_id_2 = self._selected_data.pixel_component_ids[ 1] # Pixel Axis 1 [y] pix_id_2c = self.data_collection[label].pixel_component_ids[ 1] # Pixel Axis 1 [x] self.data_collection.add_link( [LinkSame(pix_id_1, pix_id_1c), LinkSame(pix_id_2, pix_id_2c)]) snackbar_message = SnackbarMessage( f"Data set '{self._selected_data.label}' collapsed successfully.", color="success", sender=self) self.hub.broadcast(snackbar_message) # Spatial-spatial image only. if self.selected_viewer != 'None': # replace the contents in the selected viewer with the results from this plugin self.app.add_data_to_viewer(self.viewer_to_id.get( self.selected_viewer), label, clear_other_data=True)
def test_disable_incompatible(self): # Test to make sure that image and image subset layers are disabled if # their pixel coordinates are not compatible with the ones of the # reference data. self.viewer.add_data(self.image1) self.viewer.add_data(self.image2) assert self.viewer.state.reference_data is self.image1 self.data_collection.new_subset_group() assert len(self.viewer.layers) == 4 # Only the two layers associated with the reference data should be enabled for layer_artist in self.viewer.layers: if layer_artist.layer in (self.image1, self.image1.subsets[0]): assert layer_artist.enabled else: assert not layer_artist.enabled py1, px1 = self.image1.pixel_component_ids py2, px2 = self.image2.pixel_component_ids link1 = LinkSame(px1, px2) self.data_collection.add_link(link1) # One link isn't enough, second dataset layers are still not enabled for layer_artist in self.viewer.layers: if layer_artist.layer in (self.image1, self.image1.subsets[0]): assert layer_artist.enabled else: assert not layer_artist.enabled link2 = LinkSame(py1, py2) self.data_collection.add_link(link2) # All layers should now be enabled for layer_artist in self.viewer.layers: assert layer_artist.enabled self.data_collection.remove_link(link2) # We should now be back to the original situation for layer_artist in self.viewer.layers: if layer_artist.layer in (self.image1, self.image1.subsets[0]): assert layer_artist.enabled else: assert not layer_artist.enabled
def vue_collapse(self, *args, **kwargs): try: spec = self._selected_data.get_object(cls=SpectralCube) except AttributeError: snackbar_message = SnackbarMessage( "Unable to perform collapse over selected data.", color="error", sender=self) self.hub.broadcast(snackbar_message) return # If collapsing over the spectral axis, cut out the desired spectral # region. Defaults to the entire spectrum. if self.selected_axis == 0: spec_min = float(self.spectral_min) * u.Unit(self.spectral_unit) spec_max = float(self.spectral_max) * u.Unit(self.spectral_unit) spec = spec.spectral_slab(spec_min, spec_max) collapsed_spec = getattr( spec, self.selected_func.lower())(axis=self.selected_axis) data = Data(coords=collapsed_spec.wcs) data['flux'] = collapsed_spec.filled_data[...] data.get_component('flux').units = str(collapsed_spec.unit) data.meta.update(collapsed_spec.meta) self._label_counter += 1 label = f"Collapsed {self._label_counter} {self._selected_data.label}" self.data_collection[label] = data # Link the new dataset pixel-wise to the original dataset. In general # direct pixel to pixel links are the most efficient and should be # used in cases like this where we know there is a 1-to-1 mapping of # pixel coordinates. Here which axes are linked to which depends on # the selected axis. (i1, i2), (i1c, i2c) = AXES_MAPPING[self.selected_axis] pix_id_1 = self._selected_data.pixel_component_ids[i1] pix_id_1c = self.data_collection[label].pixel_component_ids[i1c] pix_id_2 = self._selected_data.pixel_component_ids[i2] pix_id_2c = self.data_collection[label].pixel_component_ids[i2c] self.data_collection.add_link(LinkSame(pix_id_1, pix_id_1c)) self.data_collection.add_link(LinkSame(pix_id_2, pix_id_2c)) snackbar_message = SnackbarMessage( f"Data set '{self._selected_data.label}' collapsed successfully.", color="success", sender=self) self.hub.broadcast(snackbar_message)
def test_2d_world_link(): """Should be able to grab pixel coords after linking world""" x, y = r(10), r(10) cat = Data(label='cat', x=x, y=y) im = Data(label='im', inten=r((3, 3))) dc = DataCollection([cat, im]) dc.add_link(LinkSame(im.get_world_component_id(0), cat.id['x'])) dc.add_link(LinkSame(im.get_world_component_id(1), cat.id['y'])) np.testing.assert_array_equal(cat[im.get_pixel_component_id(0)], x) np.testing.assert_array_equal(cat[im.get_pixel_component_id(1)], y)
def create_client_with_image_and_scatter(self): from glue.core.link_helpers import LinkSame client = self.create_client_with_image() self.collect.append(self.scatter) ix = self.im.get_world_component_id(0) iy = self.im.get_world_component_id(1) self.collect.add_link(LinkSame(self.scatter.id['x'], ix)) self.collect.add_link(LinkSame(self.scatter.id['y'], iy)) client.add_scatter_layer(self.scatter) return client
def setup_method(self, method): self.data_collection = DataCollection() # The reference dataset. Shape is (6, 7, 8, 9). self.data1 = Data(x=ARRAY) self.data_collection.append(self.data1) # A dataset with the same shape but not linked. Shape is (6, 7, 8, 9). self.data2 = Data(x=ARRAY) self.data_collection.append(self.data2) # A dataset with the same number of dimensions but in a different # order, linked to the first. Shape is (9, 7, 6, 8). self.data3 = Data(x=np.moveaxis(ARRAY, (3, 1, 0, 2), (0, 1, 2, 3))) self.data_collection.append(self.data3) self.data_collection.add_link( LinkSame(self.data1.pixel_component_ids[0], self.data3.pixel_component_ids[2])) self.data_collection.add_link( LinkSame(self.data1.pixel_component_ids[1], self.data3.pixel_component_ids[1])) self.data_collection.add_link( LinkSame(self.data1.pixel_component_ids[2], self.data3.pixel_component_ids[3])) self.data_collection.add_link( LinkSame(self.data1.pixel_component_ids[3], self.data3.pixel_component_ids[0])) # A dataset with fewer dimensions, linked to the first one. Shape is # (8, 7, 6) self.data4 = Data(x=ARRAY[:, :, :, 0].transpose()) self.data_collection.append(self.data4) self.data_collection.add_link( LinkSame(self.data1.pixel_component_ids[0], self.data4.pixel_component_ids[2])) self.data_collection.add_link( LinkSame(self.data1.pixel_component_ids[1], self.data4.pixel_component_ids[1])) self.data_collection.add_link( LinkSame(self.data1.pixel_component_ids[2], self.data4.pixel_component_ids[0])) # A dataset with even fewer dimensions, linked to the first one. Shape # is (8, 6) self.data5 = Data(x=ARRAY[:, 0, :, 0].transpose()) self.data_collection.append(self.data5) self.data_collection.add_link( LinkSame(self.data1.pixel_component_ids[0], self.data5.pixel_component_ids[1])) self.data_collection.add_link( LinkSame(self.data1.pixel_component_ids[2], self.data5.pixel_component_ids[0]))
def vue_register_spectrum(self, event): """ Add a spectrum to the data collection based on the currently displayed parameters (these could be user input or fit values). """ # Make sure the initialized models are updated with any user-specified # parameters self._update_initialized_parameters() # Need to run the model fitter with run_fitter=False to get spectrum model, spectrum = fit_model_to_spectrum(self._spectrum1d, self._initialized_models.values(), self.model_equation) self.n_models += 1 label = self.model_label if label in self.data_collection: self.app.remove_data_from_viewer('spectrum-viewer', label) # Some hacky code to remove the label from the data dropdown temp_items = [] for data_item in self.app.state.data_items: if data_item['name'] != label: temp_items.append(data_item) self.app.state.data_items = temp_items # Remove the actual Glue data object from the data_collection self.data_collection.remove(self.data_collection[label]) self.data_collection[label] = spectrum self.save_enabled = True self.data_collection.add_link( LinkSame(self.data_collection[self._label_to_link].pixel_component_ids[0], self.data_collection[label].pixel_component_ids[0]))
def create_client_with_cube_and_scatter(self): from glue.core.link_helpers import LinkSame client = self.create_client_with_cube() self.collect.append(self.cube) ix = self.cube.get_pixel_component_id(0) iy = self.cube.get_pixel_component_id(1) iz = self.cube.get_pixel_component_id(2) self.collect.add_link(LinkSame(self.scatter.id['x'], ix)) self.collect.add_link(LinkSame(self.scatter.id['y'], iy)) self.collect.add_link(LinkSame(self.scatter.id['z'], iz)) client.add_scatter_layer(self.scatter) return client
def add_to_2d_container(cubeviz_layout, data, component_data, label): """ Given the cubeviz layout, a data object, a new 2D layer and a label, add the 2D layer to the data object and update the cubeviz layout accordingly. This creates the 2D container dataset if needed. """ # If the 2D container doesn't exist, we create it here. This container is # basically just a Data object but we keep it in an attribute # ``container_2d`` on its parent dataset. if getattr(data, 'container_2d', None) is None: # For now, we assume that the 2D maps are always computed along the # spectral axis, so that the resulting WCS is always celestial coords = WCSCoordinates(wcs=data.coords.wcs.celestial) data.container_2d = Data(label=data.label + " [2d]", coords=coords) data.container_2d.add_component(component_data, label) cubeviz_layout.session.data_collection.append(data.container_2d) # Set up pixel links so that selections in the image plane propagate # between 1D and 2D views. Again this assumes as above that the # moments are computed along the spectral axis link1 = LinkSame(data.pixel_component_ids[2], data.container_2d.pixel_component_ids[1]) link2 = LinkSame(data.pixel_component_ids[1], data.container_2d.pixel_component_ids[0]) cubeviz_layout.session.data_collection.add_link(link1) cubeviz_layout.session.data_collection.add_link(link2) for helper in cubeviz_layout._viewer_combo_helpers: helper.append_data(data.container_2d) for viewer in cubeviz_layout.cube_views: viewer._widget.add_data(data.container_2d) else: # Make sure we don't add duplicate data components if label in data.container_2d.component_ids(): raise ValueError("Data component with label '{}' already exists, " "and cannot be created again".format(label)) data.container_2d.add_component(component_data, label)
def link_data_in_table(app, data_obj=None): """ Batch links data in the mosviz table viewer. Parameters ---------- app : `~jdaviz.app.Application` The application-level object used to reference the viewers. data_obj : None Passed in in order to use the data_parser_registry, otherwise not used. """ mos_data = app.session.data_collection['MOS Table'] wc_spec_ids = [] # Optimize linking speed through a) delaying link manager updates with a # context manager, b) handling intra-row linkage of 1D and 2D spectra in a # loop, and c) handling inter-row linkage after that in one fell swoop. with app.data_collection.delay_link_manager_update(): spectra_1d = mos_data.get_component('1D Spectra').data spectra_2d = mos_data.get_component('2D Spectra').data # Link each 1D spectrum with its corresponding 2D spectrum for index in range(len(spectra_1d)): spec_1d = spectra_1d[index] spec_2d = spectra_2d[index] wc_spec_1d = app.session.data_collection[spec_1d].world_component_ids wc_spec_2d = app.session.data_collection[spec_2d].world_component_ids wc_spec_ids.append(LinkSame(wc_spec_1d[0], wc_spec_2d[1])) # Link each 1D spectrum to all other 1D spectra first_spec_1d = spectra_1d[0] wc_first_spec_1d = app.session.data_collection[first_spec_1d].world_component_ids for index in range(1, len(spectra_1d)): spec_1d = spectra_1d[index] wc_spec_1d = app.session.data_collection[spec_1d].world_component_ids wc_spec_ids.append(LinkSame(wc_spec_1d[0], wc_first_spec_1d[0])) app.session.data_collection.add_link(wc_spec_ids)
def add_link(self, data1, attribute1, data2, attribute2, function=None): # For now this assumes attribute1 and attribute2 are strings and single # attributes. In future we should generalize this while keeping the # simplest use case simple. if function is not None: raise NotImplementedError att1 = data1.id[attribute1] att2 = data2.id[attribute2] link = LinkSame(att1, att2) self.data_collection.add_link(link)
def test_1d_world_link(): x, y = r(10), r(10) d1 = Data(label='d1', x=x) d2 = Data(label='d2', y=y) dc = DataCollection([d1, d2]) dc.add_link(LinkSame(d2.get_world_component_id(0), d1.id['x'])) assert d2.get_world_component_id(0) in d1.components np.testing.assert_array_equal(d1[d2.get_world_component_id(0)], x) np.testing.assert_array_equal(d1[d2.get_pixel_component_id(0)], x)
def vue_collapse(self, *args, **kwargs): try: spec = self._selected_data.get_object(cls=SpectralCube) except AttributeError: snackbar_message = SnackbarMessage( f"Unable to perform collapse over selected data.", color="error", sender=self) self.hub.broadcast(snackbar_message) return collapsed_spec = getattr(spec, self.selected_func.lower())( axis=self.selected_axis) data = Data(coords=collapsed_spec.wcs) data['flux'] = collapsed_spec.filled_data[...] data.get_component('flux').units = str(collapsed_spec.unit) data.meta.update(collapsed_spec.meta) label = f"Collapsed {self._selected_data.label}" self.data_collection[label] = data # Link the new dataset pixel-wise to the original dataset. In general # direct pixel to pixel links are the most efficient and should be # used in cases like this where we know there is a 1-to-1 mapping of # pixel coordinates. Here which axes are linked to which depends on # the selected axis. (i1, i2), (i1c, i2c) = AXES_MAPPING[self.selected_axis] self.data_collection.add_link(LinkSame(self._selected_data.pixel_component_ids[i1], self.data_collection[label].pixel_component_ids[i1c])) self.data_collection.add_link(LinkSame(self._selected_data.pixel_component_ids[i2], self.data_collection[label].pixel_component_ids[i2c])) snackbar_message = SnackbarMessage( f"Data set '{self._selected_data.label}' collapsed successfully.", color="success", sender=self) self.hub.broadcast(snackbar_message)
def test_scatter_on_volume(tmpdir): data1 = Data(a=np.arange(60).reshape((3, 4, 5))) data2 = Data(x=[1, 2, 3], y=[2, 3, 4], z=[3, 4, 5]) data3 = Data(b=np.arange(60).reshape((3, 4, 5))) dc = DataCollection([data1, data2, data3]) dc.add_link(LinkSame(data1.pixel_component_ids[2], data2.id['x'])) dc.add_link(LinkSame(data1.pixel_component_ids[1], data2.id['y'])) dc.add_link(LinkSame(data1.pixel_component_ids[0], data2.id['z'])) ga = GlueApplication(dc) ga.show() volume = ga.new_data_viewer(VispyVolumeViewer) volume.add_data(data1) volume.add_data(data2) volume.add_data(data3) # Check that writing a session works as expected. session_file = tmpdir.join('test_scatter_on_volume.glu').strpath ga.save_session(session_file) ga.close() # Now we can check that everything is restored correctly ga2 = GlueApplication.restore_session(session_file) ga2.show() volume_r = ga2.viewers[0][0] assert len(volume_r.layers) == 3 ga2.close()
def _link_new_data(self): """ When additional data is loaded, check to see if the spectral axis of any components are compatible with already loaded data. If so, link them so that they can be displayed on the same profile1D plot. """ new_len = len(self.data_collection) # Can't link if there's no world_component_ids if self.data_collection[new_len-1].world_component_ids == []: return for i in range(0, new_len-1): if self.data_collection[i].world_component_ids == []: continue self.data_collection.add_link(LinkSame(self.data_collection[i].world_component_ids[0], self.data_collection[new_len-1].world_component_ids[0]))
def __setgluestate__(cls, rec, context): viewer = super(VispyVolumeViewer, cls).__setgluestate__(rec, context) if rec.get('_protocol', 0) < 2: # Find all data objects in layers (not subsets) layer_data = [ layer.layer for layer in viewer.state.layers if (isinstance(layer, VolumeLayerState) and isinstance(layer.layer, BaseData)) ] if len(layer_data) > 1: reference = layer_data[0] for data in layer_data[1:]: if data not in reference.pixel_aligned_data: break else: return viewer buttons = QMessageBox.Yes | QMessageBox.No message = ( "The 3D volume rendering viewer now requires datasets to " "be linked in order to be shown at the same time. Are you " "happy for glue to automatically link your datasets by " "pixel coordinates?") answer = QMessageBox.question(None, "Link data?", message, buttons=buttons, defaultButton=QMessageBox.Yes) if answer == QMessageBox.Yes: for data in layer_data[1:]: if data not in reference.pixel_aligned_data: for i in range(3): link = LinkSame(reference.pixel_component_ids[i], data.pixel_component_ids[i]) viewer.session.data_collection.add_link(link) return viewer
def vue_gaussian_smooth(self, *args, **kwargs): # Testing inputs to make sure putting smoothed spectrum into # datacollection works # input_flux = Quantity(np.array([0.2, 0.3, 2.2, 0.3]), u.Jy) # input_spaxis = Quantity(np.array([1, 2, 3, 4]), u.micron) # spec1 = Spectrum1D(input_flux, spectral_axis=input_spaxis) size = float(self.stddev) try: spec = self._selected_data.get_object(cls=Spectrum1D) except TypeError: snackbar_message = SnackbarMessage( f"Unable to perform smoothing over selected data.", color="error", sender=self) self.hub.broadcast(snackbar_message) return # Takes the user input from the dialog (stddev) and uses it to # define a standard deviation for gaussian smoothing spec_smoothed = gaussian_smooth(spec, stddev=size) label = f"Smoothed {self._selected_data.label}" self.data_collection[label] = spec_smoothed # Link the new dataset pixel-wise to the original dataset. In general # direct pixel to pixel links are the most efficient and should be # used in cases like this where we know there is a 1-to-1 mapping of # pixel coordinates. Here the smoothing returns a 1-d spectral object # which we can link to the first dimension of the original dataset # (whcih could in principle be a cube or a spectrum) self.data_collection.add_link( LinkSame(self._selected_data.pixel_component_ids[0], self.data_collection[label].pixel_component_ids[0])) snackbar_message = SnackbarMessage( f"Data set '{self._selected_data.label}' smoothed successfully.", color="success", sender=self) self.hub.broadcast(snackbar_message)
def _parse_image(app, file_obj, data_label, show_in_viewer, ext=None): if data_label is None: raise NotImplementedError('data_label should be set by now') data_iter = get_image_data_iterator(app, file_obj, data_label, ext=ext) for data, data_label in data_iter: # avoid duplicate data labels in collection if data_label in app.data_collection.labels: data_label = data_label + "_2" # 0th-order solution as proposed in issue #600 app.add_data(data, data_label) if show_in_viewer: app.add_data_to_viewer("viewer-1", data_label) if len(app.data_collection) <= 1: # No need to link, we are done. return # Auto-link data by pixels links_list = [] refdata = app.data_collection[0] # Link with first one ids0 = refdata.pixel_component_ids ndim_range = range(refdata.ndim) for data in app.data_collection[1:]: ids1 = data.pixel_component_ids try: new_links = [LinkSame(ids0[i], ids1[i]) for i in ndim_range] except Exception as e: # TODO: Is it better to just throw exception and crash? app.hub.broadcast(SnackbarMessage( f"Error linking '{data.label}' to '{refdata.label}': " f"{repr(e)}", color="warning", timeout=8000, sender=app)) continue links_list += new_links with app.data_collection.delay_link_manager_update(): app.data_collection.set_links(links_list)
def add_link(self, data1, attribute1, data2, attribute2): """ Add a simple identity link between two attributes. Parameters ---------- data1 : `~glue.core.data.Data` The dataset containing the first attribute. attribute1 : str or `~glue.core.component_id.ComponentID` The first attribute to link. data2 : `~glue.core.data.Data` The dataset containing the first attribute. attribute2 : str or `~glue.core.component_id.ComponentID` The first attribute to link. """ # For now this assumes attribute1 and attribute2 are strings and single # attributes. In future we should generalize this while keeping the # simplest use case simple. att1 = data1.id[attribute1] att2 = data2.id[attribute2] link = LinkSame(att1, att2) self.data_collection.add_link(link)
def vue_register_spectrum(self, event): """ Add a spectrum to the data collection based on the currently displayed parameters (these could be user input or fit values). """ if self._warn_if_no_equation(): return # Make sure the initialized models are updated with any user-specified # parameters self._update_initialized_parameters() # Need to run the model fitter with run_fitter=False to get spectrum if "spectrum" in event: spectrum = event["spectrum"] else: model, spectrum = fit_model_to_spectrum( self._spectrum1d, self._initialized_models.values(), self.model_equation, window=self._window) self.n_models += 1 label = self.model_label if label in self.data_collection: self.app.remove_data_from_viewer('spectrum-viewer', label) # Remove the actual Glue data object from the data_collection self.data_collection.remove(self.data_collection[label]) self.app.add_data(spectrum, label) # Make sure we link the result spectrum to the data we're fitting data_fitted = self.app.session.data_collection[self.selected_data] data_id = data_fitted.world_component_ids[0] model_id = self.app.session.data_collection[label].world_component_ids[ 0] self.app.session.data_collection.add_link(LinkSame(data_id, model_id)) if self.add_replace_results: self.app.add_data_to_viewer('spectrum-viewer', label)
from glue.core.data_factories import load_data from glue.core import DataCollection from glue.core.link_helpers import LinkSame from glue.app.qt.application import GlueApplication #load 2 datasets from files image = load_data('w5.fits') catalog = load_data('w5_psc.vot') dc = DataCollection([image, catalog]) # link positional information dc.add_link(LinkSame(image.id['Right Ascension'], catalog.id['RAJ2000'])) dc.add_link(LinkSame(image.id['Declination'], catalog.id['DEJ2000'])) #start Glue app = GlueApplication(dc) app.start()
ga.app.processEvents() ga.session.edit_subset_mode.mode = AndNotMode cid = image.main_components[0] subset_state = (cid >= 450) & (cid <= 500) ga.session.edit_subset_mode.update(ga.data_collection, subset_state) ga.app.processEvents() ga.screenshot('subset_refine.png') catalog = ga.load_data('w5_psc.vot') catalog.label = 'Point Sources' # Set up links link1 = LinkSame(image.id['Right Ascension'], catalog.id['RAJ2000']) link2 = LinkSame(image.id['Declination'], catalog.id['DEJ2000']) ga.data_collection.add_link(link1) ga.data_collection.add_link(link2) scatter_viewer = ga.new_data_viewer(ScatterViewer, data=catalog) scatter_viewer._mdi_wrapper.resize(900, 400) scatter_viewer._mdi_wrapper.move(0, 400) scatter_viewer.state.x_att = catalog.id['__4.5__-__5.8_'] scatter_viewer.state.y_att = catalog.id['__4.5_'] scatter_viewer.state.x_min = -1 scatter_viewer.state.x_max = 1.6 scatter_viewer.state.y_min = 1 scatter_viewer.state.y_max = 17 ga.session.edit_subset_mode.mode = ReplaceMode
def link_data(self, link_type='pixels', wcs_fallback_scheme='pixels', wcs_use_affine=True, error_on_fail=False): """(Re)link loaded data with the desired link type. All existing links will be replaced. .. warning:: Any markers added would be removed. You can add back the markers manually using :meth:`add_markers`. During the markers removal, pan/zoom will also reset. Parameters ---------- link_type : {'pixels', 'wcs'} Choose to link by pixels or WCS. wcs_fallback_scheme : {None, 'pixels'} If WCS linking failed, choose to fall back to linking by pixels or not at all. This is only used when ``link_type='wcs'``. Choosing `None` may result in some Imviz functionality not working properly. wcs_use_affine : bool Use an affine transform to represent the offset between images if possible (requires that the approximation is accurate to within 1 pixel with the full WCS transformations). If approximation fails, it will automatically fall back to full WCS transformation. This is only used when ``link_type='wcs'``. Affine approximation is much more performant at the cost of accuracy. error_on_fail : bool If `True`, any failure in linking will raise an exception. If `False`, warnings will be emitted as snackbar messages. When only warnings are emitted and no links are assigned, some Imviz functionality may not work properly. Raises ------ ValueError Invalid inputs or reference data. """ if len(self.app.data_collection) <= 1: # No need to link, we are done. return if link_type not in ('pixels', 'wcs'): raise ValueError( f"link_type must be 'pixels' or 'wcs', got {link_type}") if link_type == 'wcs' and wcs_fallback_scheme not in (None, 'pixels'): raise ValueError("wcs_fallback_scheme must be None or 'pixels', " f"got {wcs_fallback_scheme}") # Clear any existing markers. Otherwise, re-linking will crash. self.reset_markers() refdata, iref = get_reference_image_data(self.app) links_list = [] ids0 = refdata.pixel_component_ids ndim_range = range(refdata.ndim) for i, data in enumerate(self.app.data_collection): # Do not link with self if i == iref: continue # We are not touching any existing Subsets. They keep their own links. if not layer_is_image_data(data): continue ids1 = data.pixel_component_ids try: if link_type == 'pixels': new_links = [ LinkSame(ids0[i], ids1[i]) for i in ndim_range ] else: # 'wcs' wcslink = WCSLink(data1=refdata, data2=data, cids1=ids0, cids2=ids1) if wcs_use_affine: try: new_links = [wcslink.as_affine_link()] except NoAffineApproximation: # pragma: no cover new_links = [wcslink] else: new_links = [wcslink] except Exception as e: if link_type == 'wcs' and wcs_fallback_scheme == 'pixels': try: new_links = [ LinkSame(ids0[i], ids1[i]) for i in ndim_range ] except Exception as e: # pragma: no cover if error_on_fail: raise else: self.app.hub.broadcast( SnackbarMessage( f"Error linking '{data.label}' to '{refdata.label}': " f"{repr(e)}", color="warning", timeout=8000, sender=self.app)) continue else: if error_on_fail: raise else: self.app.hub.broadcast( SnackbarMessage( f"Error linking '{data.label}' to '{refdata.label}': " f"{repr(e)}", color="warning", timeout=8000, sender=self.app)) continue links_list += new_links if len(links_list) > 0: with self.app.data_collection.delay_link_manager_update(): self.app.data_collection.set_links(links_list) self.app.hub.broadcast( SnackbarMessage('Images successfully relinked', color='success', timeout=8000, sender=self.app))
dc = DataCollection(dendrogram) #dc = DataCollection([cube, dendrogram, catalog]) #dc.merge(cube,sncube) #sncube.join_on_key(dendro, 'structure', dendro.pixel_component_ids[0]) #dc.merge(catalog, dendro) # UNCOMMENT THIS LINE TO BREAK THE VIEWER dc.append(catalog) app = GlueApplication(dc) cube_viewer = app.new_data_viewer(ImageWidget) cube_viewer.add_data(sncube) # link positional information dc.add_link(LinkSame(sncube.id['structure'], catalog.id['_idx'])) #dc.add_link(LinkSame(image.id['World y: DEC--TAN'], catalog.id['DEJ2000'])) dc.add_link(LinkSame(cube.id['Galactic Longitude'], catalog.id['x_cen'])) dc.add_link(LinkSame(cube.id['Galactic Latitude'], catalog.id['y_cen'])) def ms_to_kms(x): return x / 1e3 def kms_to_ms(x): return x * 1e3 dc.add_link(
def setup_method(self, method): self.data_collection = DataCollection() self.array = np.arange(3024).reshape((6, 7, 8, 9)) # The reference dataset. Shape is (6, 7, 8, 9). self.data1 = Data(x=self.array) self.data_collection.append(self.data1) # A dataset with the same shape but not linked. Shape is (6, 7, 8, 9). self.data2 = Data(x=self.array) self.data_collection.append(self.data2) # A dataset with the same number of dimesnions but in a different # order, linked to the first. Shape is (9, 7, 6, 8). self.data3 = Data( x=np.moveaxis(self.array, (3, 1, 0, 2), (0, 1, 2, 3))) self.data_collection.append(self.data3) self.data_collection.add_link( LinkSame(self.data1.pixel_component_ids[0], self.data3.pixel_component_ids[2])) self.data_collection.add_link( LinkSame(self.data1.pixel_component_ids[1], self.data3.pixel_component_ids[1])) self.data_collection.add_link( LinkSame(self.data1.pixel_component_ids[2], self.data3.pixel_component_ids[3])) self.data_collection.add_link( LinkSame(self.data1.pixel_component_ids[3], self.data3.pixel_component_ids[0])) # A dataset with fewer dimensions, linked to the first one. Shape is # (8, 7, 6) self.data4 = Data(x=self.array[:, :, :, 0].transpose()) self.data_collection.append(self.data4) self.data_collection.add_link( LinkSame(self.data1.pixel_component_ids[0], self.data4.pixel_component_ids[2])) self.data_collection.add_link( LinkSame(self.data1.pixel_component_ids[1], self.data4.pixel_component_ids[1])) self.data_collection.add_link( LinkSame(self.data1.pixel_component_ids[2], self.data4.pixel_component_ids[0])) # A dataset with even fewer dimensions, linked to the first one. Shape # is (8, 6) self.data5 = Data(x=self.array[:, 0, :, 0].transpose()) self.data_collection.append(self.data5) self.data_collection.add_link( LinkSame(self.data1.pixel_component_ids[0], self.data5.pixel_component_ids[1])) self.data_collection.add_link( LinkSame(self.data1.pixel_component_ids[2], self.data5.pixel_component_ids[0])) # A dataset that is not on the same pixel grid and requires reprojection self.data6 = Data() self.data6.coords = SimpleCoordinates() self.array_nonaligned = np.arange(60).reshape((5, 3, 4)) self.data6['x'] = np.array(self.array_nonaligned) self.data_collection.append(self.data6) self.data_collection.add_link( LinkSame(self.data1.world_component_ids[0], self.data6.world_component_ids[1])) self.data_collection.add_link( LinkSame(self.data1.world_component_ids[1], self.data6.world_component_ids[2])) self.data_collection.add_link( LinkSame(self.data1.world_component_ids[2], self.data6.world_component_ids[0])) self.viewer_state = ImageViewerState() self.viewer_state.layers.append( ImageLayerState(viewer_state=self.viewer_state, layer=self.data1)) self.viewer_state.layers.append( ImageLayerState(viewer_state=self.viewer_state, layer=self.data2)) self.viewer_state.layers.append( ImageLayerState(viewer_state=self.viewer_state, layer=self.data3)) self.viewer_state.layers.append( ImageLayerState(viewer_state=self.viewer_state, layer=self.data4)) self.viewer_state.layers.append( ImageLayerState(viewer_state=self.viewer_state, layer=self.data5)) self.viewer_state.layers.append( ImageLayerState(viewer_state=self.viewer_state, layer=self.data6)) self.viewer_state.reference_data = self.data1
def test_pixel_selection_subset_state(): data1 = Data(x=np.ones((2, 4, 3))) data2 = Data(y=np.ones((4, 3, 2))) data3 = Data(z=np.ones((2, 3))) y_id = data2.main_components[0] z_id = data3.main_components[0] slice_1d = [slice(1, 2), slice(None), slice(None)] slice_2d = [slice(None), slice(2, 3), slice(1, 2)] slice_3d = [slice(1, 2), slice(2, 3), slice(1, 2)] state_1d = PixelSubsetState(data1, slice_1d) state_2d = PixelSubsetState(data1, slice_2d) state_3d = PixelSubsetState(data1, slice_3d) states = [state_1d, state_2d, state_3d] dc = DataCollection([data1, data2, data3]) # Calling to_array with reference data should work by default, and not work # with unlinked datasets. for data in dc: for state in states: cid = data.main_components[0] if data is data1: assert_array_equal(state.to_array(data, cid), data[cid][state.slices]) else: with pytest.raises(IncompatibleAttribute): state.to_array(data, cid) # Add one set of links dc.add_link( LinkSame(data1.pixel_component_ids[0], data2.pixel_component_ids[2])) dc.add_link( LinkSame(data1.pixel_component_ids[0], data3.pixel_component_ids[0])) assert_array_equal(state_1d.to_array(data2, y_id), data2[y_id][:, :, 1:2]) with pytest.raises(IncompatibleAttribute): state_2d.to_array(data2, y_id) with pytest.raises(IncompatibleAttribute): state_3d.to_array(data2, y_id) assert_array_equal(state_1d.to_array(data3, z_id), data3[z_id][1:2]) with pytest.raises(IncompatibleAttribute): state_2d.to_array(data3, z_id) with pytest.raises(IncompatibleAttribute): state_3d.to_array(data3, z_id) # Add links with multiple components, in this case linking two cids with two cids def forwards(x, y): return x + y, x - y def backwards(x, y): return 0.5 * (x + y), 0.5 * (x - y) dc.add_link( MultiLink(data1.pixel_component_ids[1:], data2.pixel_component_ids[:2], forwards=forwards, backwards=backwards)) assert_array_equal(state_1d.to_array(data2, y_id), data2[y_id][:, :, 1:2]) assert_array_equal(state_2d.to_array(data2, y_id), data2[y_id][2:3, 1:2, :]) assert_array_equal(state_3d.to_array(data2, y_id), data2[y_id][2:3, 1:2, 1:2]) assert_array_equal(state_1d.to_array(data3, z_id), data3[z_id][1:2]) with pytest.raises(IncompatibleAttribute): state_2d.to_array(data3, z_id) with pytest.raises(IncompatibleAttribute): state_3d.to_array(data3, z_id)
from glue.core.data_factories import load_data from glue.core import DataCollection from glue.core.link_helpers import LinkSame from glue.qt.glue_application import GlueApplication #load 2 datasets from files image = load_data('w5.fits') catalog = load_data('w5_psc.vot') dc = DataCollection([image, catalog]) # link positional information dc.add_link(LinkSame(image.id['World x: RA---TAN'], catalog.id['RAJ2000'])) dc.add_link(LinkSame(image.id['World y: DEC--TAN'], catalog.id['DEJ2000'])) #start Glue app = GlueApplication(dc) app.start()
apath('fitted_line_parameters_Chi2Constraints.ipac'), format='ascii.ipac') catalog.label = 'FittedLineParameters' catalog.style.color = 'green' catalog.style.marker = 'o' cube = load_data(hpath('APEX_H2CO_303_202_bl.fits')) cube.label = 'H2CO 303/202' cube2 = load_data(molpath('APEX_SiO_54.fits')) cube2.label = 'SiO' cube3 = load_data(hpath('APEX_13CO_matched_H2CO.fits')) cube3.label = '13CO' higaltem = load_data('/Users/adam/work/gc/gcmosaic_temp_conv36.fits') dc = DataCollection([cube, catalog, cube2, cube3, higaltem]) dc.merge(cube, cube2, cube3) dc.add_link(LinkSame(cube.id['Galactic Longitude'], catalog.id['GLON'])) dc.add_link(LinkSame(cube.id['Galactic Latitude'], catalog.id['GLAT'])) def ms_to_kms(x): return x / 1e3 def kms_to_ms(x): return x * 1e3 dc.add_link( LinkTwoWay(cube.id['Vrad'], catalog.id['center'], ms_to_kms, kms_to_ms)) subset_tem_lt_60 = (catalog.id['temperature_chi2'] < 60) & (