def test_component_id_combo_helper_add(): # Make sure that when adding a component, and if a data collection is not # present, the choices still get updated callback = MagicMock() state = ExampleState() state.add_callback('combo', callback) dc = DataCollection([]) helper = ComponentIDComboHelper(state, 'combo') assert selection_choices(state, 'combo') == "" data1 = Data(x=[1, 2, 3], y=[2, 3, 4], label='data1') callback.reset_mock() dc.append(data1) helper.append_data(data1) callback.assert_called_once_with(0) callback.reset_mock() assert selection_choices(state, 'combo') == "x:y" data1.add_component([7, 8, 9], 'z') # Should get notification since choices have changed callback.assert_called_once_with(0) callback.reset_mock() assert selection_choices(state, 'combo') == "x:y:z"
def test_component_id_combo_helper_init(): # Regression test to make sure that the numeric and categorical options # in the __init__ are taken into account properly state = ExampleState() dc = DataCollection([]) data = Data(a=[1, 2, 3], b=['a', 'b', 'c'], label='data2') dc.append(data) helper = ComponentIDComboHelper(state, 'combo', dc) helper.append_data(data) assert selection_choices(state, 'combo') == "a:b" helper = ComponentIDComboHelper(state, 'combo', dc, numeric=False) helper.append_data(data) assert selection_choices(state, 'combo') == "b" helper = ComponentIDComboHelper(state, 'combo', dc, categorical=False) helper.append_data(data) assert selection_choices(state, 'combo') == "a" helper = ComponentIDComboHelper(state, 'combo', dc, numeric=False, categorical=False) helper.append_data(data) assert selection_choices(state, 'combo') == ""
def test_component_id_combo_helper_init(): # Regression test to make sure that the numeric and categorical options # in the __init__ are taken into account properly combo = QtWidgets.QComboBox() dc = DataCollection([]) data = Data(a=[1,2,3], b=['a','b','c'], label='data2') dc.append(data) helper = ComponentIDComboHelper(combo, dc) helper.append_data(data) assert _items_as_string(combo) == "a:b" helper = ComponentIDComboHelper(combo, dc, numeric=False) helper.append_data(data) assert _items_as_string(combo) == "b" helper = ComponentIDComboHelper(combo, dc, categorical=False) helper.append_data(data) assert _items_as_string(combo) == "a" helper = ComponentIDComboHelper(combo, dc, numeric=False, categorical=False) helper.append_data(data) assert _items_as_string(combo) == ""
def test_data_collection_combo_helper(): callback = MagicMock() state = ExampleState() state.add_callback('combo', callback) dc = DataCollection([]) helper = DataCollectionComboHelper(state, 'combo', dc) # noqa data1 = Data(x=[1, 2, 3], y=[2, 3, 4], label='data1') assert callback.call_count == 0 dc.append(data1) assert callback.call_count == 1 assert selection_choices(state, 'combo') == "data1" data1.label = 'mydata1' assert selection_choices(state, 'combo') == "mydata1" assert callback.call_count == 2 dc.remove(data1) assert callback.call_count == 3 assert selection_choices(state, 'combo') == ""
class TestExportPython(BaseTestExportPython): def setup_method(self, method): with NumpyRNGContext(12345): self.data = Data(cube=np.random.random((30, 50, 20))) self.data_collection = DataCollection([self.data]) self.app = GlueApplication(self.data_collection) self.viewer = self.app.new_data_viewer(ImageViewer) self.viewer.add_data(self.data) # FIXME: On some platforms, using an integer label size # causes some of the labels to be non-deterministically # shifted by one pixel, so we pick a non-round font size # to avoid this. self.viewer.state.x_ticklabel_size = 8.21334111 self.viewer.state.y_ticklabel_size = 8.21334111 def teardown_method(self, method): self.viewer.close() self.viewer = None self.app.close() self.app = None def test_simple(self, tmpdir): self.assert_same(tmpdir) def test_simple_att(self, tmpdir): self.viewer.state.x_att = self.data.pixel_component_ids[1] self.viewer.state.y_att = self.data.pixel_component_ids[0] self.assert_same(tmpdir) def test_simple_visual(self, tmpdir): self.viewer.state.layers[0].cmap = plt.cm.RdBu self.viewer.state.layers[0].v_min = 0.2 self.viewer.state.layers[0].v_max = 0.8 self.viewer.state.layers[0].stretch = 'sqrt' self.viewer.state.layers[0].stretch = 'sqrt' self.viewer.state.layers[0].contrast = 0.9 self.viewer.state.layers[0].bias = 0.6 self.assert_same(tmpdir) def test_slice(self, tmpdir): self.viewer.state.x_att = self.data.pixel_component_ids[1] self.viewer.state.y_att = self.data.pixel_component_ids[0] self.viewer.state.slices = (2, 3, 4) self.assert_same(tmpdir) def test_aspect(self, tmpdir): self.viewer.state.aspect = 'auto' self.assert_same(tmpdir) def test_subset(self, tmpdir): self.data_collection.new_subset_group('mysubset', self.data.id['cube'] > 0.5) self.assert_same(tmpdir) def test_subset_slice(self, tmpdir): self.data_collection.new_subset_group('mysubset', self.data.id['cube'] > 0.5) self.test_slice(tmpdir)
def qglue(**kwargs): """ Quickly send python variables to Glue for visualization. The generic calling sequence is:: qglue(label1=data1, label2=data2, ..., [links=links]) The kewyords label1, label2, ... can be named anything besides ``links`` data1, data2, ... can be in many formats: * A pandas data frame * A path to a file * A numpy array, or python list * A numpy rec array * A dictionary of numpy arrays with the same shape * An astropy Table ``Links`` is an optional list of link descriptions, each of which has the format: ([left_ids], [right_ids], forward, backward) Each ``left_id``/``right_id`` is a string naming a component in a dataset (i.e., ``data1.x``). ``forward`` and ``backward`` are functions which map quantities on the left to quantities on the right, and vice versa. `backward` is optional Examples:: balls = {'kg': [1, 2, 3], 'radius_cm': [10, 15, 30]} cones = {'lbs': [5, 3, 3, 1]} def lb2kg(lb): return lb / 2.2 def kg2lb(kg): return kg * 2.2 links = [(['balls.kg'], ['cones.lbs'], lb2kg, kg2lb)] qglue(balls=balls, cones=cones, links=links) :returns: A :class:`~glue.app.qt.application.GlueApplication` object """ from glue.core import DataCollection from glue.app.qt import GlueApplication links = kwargs.pop('links', None) dc = DataCollection() for label, data in kwargs.items(): dc.extend(parse_data(data, label)) if links is not None: dc.add_link(parse_links(dc, links)) with restore_io(): ga = GlueApplication(dc) ga.start() return ga
def test_1d_world_link(): x, y = r(10), r(10) d1 = Data(label='d1', x=x) d2 = Data(label='d2', y=y) dc = DataCollection([d1, d2]) dc.add_link(LinkSame(d2.get_world_component_id(0), d1.id['x'])) assert d2.get_world_component_id(0) in d1.components np.testing.assert_array_equal(d1[d2.get_world_component_id(0)], x) np.testing.assert_array_equal(d1[d2.get_pixel_component_id(0)], x)
def test_close_on_last_layer_remove(self): # regression test for 391 d1 = Data(x=np.random.random((2,) * self.ndim)) d2 = Data(y=np.random.random((2,) * self.ndim)) dc = DataCollection([d1, d2]) app = GlueApplication(dc) with patch.object(self.widget_cls, 'close') as close: w = app.new_data_viewer(self.widget_cls, data=d1) w.add_data(d2) dc.remove(d1) dc.remove(d2) assert close.call_count >= 1
def test_2d_world_link(): """Should be able to grab pixel coords after linking world""" x, y = r(10), r(10) cat = Data(label='cat', x=x, y=y) im = Data(label='im', inten=r((3, 3))) dc = DataCollection([cat, im]) dc.add_link(LinkSame(im.get_world_component_id(0), cat.id['x'])) dc.add_link(LinkSame(im.get_world_component_id(1), cat.id['y'])) np.testing.assert_array_equal(cat[im.get_pixel_component_id(0)], x) np.testing.assert_array_equal(cat[im.get_pixel_component_id(1)], y)
def _load_data_collection_4(rec, context): dc = DataCollection(list(map(context.object, rec['data']))) links = [context.object(link) for link in rec['links']] dc.set_links(links) coerce_subset_groups(dc) dc._subset_groups = list(map(context.object, rec['groups'])) for grp in dc.subset_groups: grp.register_to_hub(dc.hub) dc._sg_count = rec['subset_group_count'] return dc
def main(): import numpy as np from glue.utils.qt import get_qapp from glue.core import Data, DataCollection app = get_qapp() dc = DataCollection() for i in range(10): x = np.array([1, 2, 3]) d = Data(label='data_{0:02d}'.format(i), x=x, y=x * 2) dc.append(d) LinkEditor.update_links(dc)
def glue_gui(): d = data_factories.load_data(DEIMOSTABLE) dc = DataCollection([]) dc.append(d) # Creates glue instance app = GlueApplication(dc) app.setVisible(True) # Adds data to the MosVizViewer app.new_data_viewer(MOSVizViewer) app.viewers[0][0].add_data_for_testing(app.data_collection[0]) return app
def _load_data_collection(rec, context): datasets = list(map(context.object, rec['data'])) links = [context.object(link) for link in rec['links']] # Filter out CoordinateComponentLinks that may have been saved in the past # as these are now re-generated on-the-fly. links = [link for link in links if not isinstance(link, CoordinateComponentLink)] # Go through and split links into links internal to datasets and ones # between datasets as this dictates whether they should be set on the # data collection or on the data objects. external, internal = [], [] for link in links: parent_to = link.get_to_id().parent for cid in link.get_from_ids(): if cid.parent is not parent_to: external.append(link) break else: internal.append(link) # Remove components in datasets that have external links for data in datasets: remove = [] for cid in data.derived_components: comp = data.get_component(cid) # Neihter in external nor in links overall if rec.get('_protocol', 0) <= 3: if comp.link not in internal: remove.append(cid) if isinstance(comp.link, CoordinateComponentLink): remove.append(cid) if len(comp.link.get_from_ids()) == 1 and comp.link.get_from_ids()[0].parent is comp.link.get_to_id().parent and comp.link.get_from_ids()[0].label == comp.link.get_to_id().label: remove.append(cid) for cid in remove: data.remove_component(cid) dc = DataCollection(datasets) dc.set_links(external) coerce_subset_groups(dc) return dc
def setup_method(self, method): self.data1 = Data(x=[1, 2, 3], y=[2, 3, 4], z=[6, 5, 4], label='data1') self.data2 = Data(a=[2, 3, 4], b=[4, 5, 4], c=[3, 4, 1], label='data2') self.data3 = Data(i=[5, 4, 3], j=[2, 2, 1], label='data3') self.data_collection = DataCollection([self.data1, self.data2, self.data3])
def setup_method(self, method): self.data = Data(parent=[4, 4, 5, 5, 5, -1], height=[5, 4, 3, 2, 1, 0], label='dendro') self.dc = DataCollection([self.data]) self.hub = self.dc.hub self.client = DendroClient(self.dc, figure=FIGURE) EditSubsetMode().data_collection = self.dc
def setup_method(self, method): with NumpyRNGContext(12345): self.data = Data(**dict((name, random_with_nan(100, nan_index=idx + 1)) for idx, name in enumerate('abcdefgh'))) self.data_collection = DataCollection([self.data]) self.app = GlueApplication(self.data_collection) self.viewer = self.app.new_data_viewer(HistogramViewer) self.viewer.add_data(self.data) self.viewer.state.x_att = self.data.id['a']
class TestExportPython(BaseTestExportPython): def setup_method(self, method): with NumpyRNGContext(12345): self.data = Data(**dict((name, random_with_nan(100, nan_index=idx + 1)) for idx, name in enumerate('abcdefgh'))) self.data_collection = DataCollection([self.data]) self.app = GlueApplication(self.data_collection) self.viewer = self.app.new_data_viewer(HistogramViewer) self.viewer.add_data(self.data) self.viewer.state.x_att = self.data.id['a'] def teardown_method(self, method): self.viewer.close() self.viewer = None self.app.close() self.app = None def test_simple(self, tmpdir): self.assert_same(tmpdir) def test_simple_visual(self, tmpdir): self.viewer.state.layers[0].color = 'blue' self.viewer.state.layers[0].alpha = 0.5 self.assert_same(tmpdir) def test_cumulative(self, tmpdir): self.viewer.state.cumulative = True self.assert_same(tmpdir) def test_normalize(self, tmpdir): self.viewer.state.normalize = True self.assert_same(tmpdir) def test_subset(self, tmpdir): self.data_collection.new_subset_group('mysubset', self.data.id['a'] > 0.5) self.assert_same(tmpdir) def test_empty(self, tmpdir): self.viewer.state.x_min = 10 self.viewer.state.x_max = 11 self.viewer.state.hist_x_min = 10 self.viewer.state.hist_x_max = 11 self.assert_same(tmpdir)
def test_close_on_last_layer_remove(self): # regression test for 391 d1 = Data(x=np.random.random((2,) * self.ndim)) d2 = Data(y=np.random.random((2,) * self.ndim)) dc = DataCollection([d1, d2]) app = GlueApplication(dc) w = app.new_data_viewer(self.widget_cls, data=d1) w.add_data(d2) process_events() assert len(app.viewers[0]) == 1 dc.remove(d1) process_events() assert len(app.viewers[0]) == 1 dc.remove(d2) process_events() assert len(app.viewers[0]) == 0 app.close()
def test_link_aligned(ndata, ndim): ds = [] shp = tuple([2] * ndim) for i in range(ndata): d = Data() c = Component(np.random.random(shp)) d.add_component(c, 'test') ds.append(d) # assert that all componentIDs are interchangeable links = LinkAligned(ds) dc = DataCollection(ds) dc.add_link(links) for i in range(ndim): id0 = ds[0].get_pixel_component_id(i) for j in range(1, ndata): id1 = ds[j].get_pixel_component_id(i) np.testing.assert_array_equal(ds[j][id0], ds[j][id1])
def test_data_collection_combo_helper(): combo = QtWidgets.QComboBox() dc = DataCollection([]) helper = DataCollectionComboHelper(combo, dc) data1 = Data(x=[1,2,3], y=[2,3,4], label='data1') dc.append(data1) assert _items_as_string(combo) == "data1" data1.label = 'mydata1' assert _items_as_string(combo) == "mydata1" dc.remove(data1) assert _items_as_string(combo) == ""
def test_manual_data_combo_helper(initialize_data_collection): # The case with initialize_data_collection=False is a regression test for a # bug which meant that when a ManualDataComboHelper was initialized without # a data collection, it did not change when a data object added later has a # label changed. callback = MagicMock() state = ExampleState() state.add_callback('combo', callback) dc = DataCollection([]) if initialize_data_collection: helper = ManualDataComboHelper(state, 'combo', dc) else: helper = ManualDataComboHelper(state, 'combo') data1 = Data(x=[1, 2, 3], y=[2, 3, 4], label='data1') dc.append(data1) assert callback.call_count == 0 assert selection_choices(state, 'combo') == "" helper.append_data(data1) assert callback.call_count == 1 assert selection_choices(state, 'combo') == "data1" data1.label = 'mydata1' assert selection_choices(state, 'combo') == "mydata1" assert callback.call_count == 2 if initialize_data_collection: dc.remove(data1) assert selection_choices(state, 'combo') == "" assert callback.call_count == 3
def test_scatter_remove_layer_artists(tmpdir): # Regression test for a bug that caused layer states to not be removed # when the matching layer artist was removed. This then caused issues when # loading session files. # Create fake data data = make_test_data() # Create fake session dc = DataCollection([data]) ga = GlueApplication(dc) ga.show() scatter = ga.new_data_viewer(VispyScatterViewer) scatter.add_data(data) dc.new_subset_group(subset_state=data.id['x'] > 0.5, label='subset 1') scatter.add_subset(data.subsets[0]) assert len(scatter.layers) == 2 assert len(scatter.state.layers) == 2 dc.remove_subset_group(dc.subset_groups[0]) assert len(scatter.layers) == 1 assert len(scatter.state.layers) == 1 # Check that writing a session works as expected. session_file = tmpdir.join('test_scatter_viewer.glu').strpath ga.save_session(session_file) ga.close() # Now we can check that everything is restored correctly ga2 = GlueApplication.restore_session(session_file) ga2.show()
def test_close_on_last_layer_remove(self): # regression test for 391 # Note: processEvents is needed for things to work correctly with PySide2 qtapp = get_qapp() d1 = Data(x=np.random.random((2,) * self.ndim)) d2 = Data(y=np.random.random((2,) * self.ndim)) dc = DataCollection([d1, d2]) app = GlueApplication(dc) w = app.new_data_viewer(self.widget_cls, data=d1) w.add_data(d2) qtapp.processEvents() assert len(app.viewers[0]) == 1 dc.remove(d1) qtapp.processEvents() assert len(app.viewers[0]) == 1 dc.remove(d2) qtapp.processEvents() assert len(app.viewers[0]) == 0 app.close()
def setup_method(self, method): with NumpyRNGContext(12345): self.data = Data(cube=np.random.random((30, 50, 20))) self.data_collection = DataCollection([self.data]) self.app = GlueApplication(self.data_collection) self.viewer = self.app.new_data_viewer(ImageViewer) self.viewer.add_data(self.data) # FIXME: On some platforms, using an integer label size # causes some of the labels to be non-deterministically # shifted by one pixel, so we pick a non-round font size # to avoid this. self.viewer.state.x_ticklabel_size = 8.21334111 self.viewer.state.y_ticklabel_size = 8.21334111
def test_component_id_combo_helper_replaced(): # Make sure that when components are replaced, the equivalent combo index # remains selected and an event is broadcast so that any attached callback # properties can be sure to pull the latest text/userData. callback = MagicMock() state = ExampleState() state.add_callback('combo', callback) dc = DataCollection([]) helper = ComponentIDComboHelper(state, 'combo', dc) assert selection_choices(state, 'combo') == "" data1 = Data(x=[1, 2, 3], y=[2, 3, 4], label='data1') callback.reset_mock() dc.append(data1) helper.append_data(data1) callback.assert_called_once_with(0) callback.reset_mock() assert selection_choices(state, 'combo') == "x:y" new_id = ComponentID(label='new') data1.update_id(data1.id['x'], new_id) callback.assert_called_once_with(0) callback.reset_mock() assert selection_choices(state, 'combo') == "new:y"
def test_component_id_combo_helper_replaced(): # Make sure that when components are replaced, the equivalent combo index # remains selected and an event is broadcast so that any attached callback # properties can be sure to pull the latest text/userData. callback = MagicMock() combo = QtWidgets.QComboBox() combo.currentIndexChanged.connect(callback) dc = DataCollection([]) helper = ComponentIDComboHelper(combo, dc) assert combo_as_string(combo) == "" data1 = Data(x=[1, 2, 3], y=[2, 3, 4], label='data1') callback.reset_mock() dc.append(data1) helper.append_data(data1) callback.assert_called_once_with(0) callback.reset_mock() assert combo_as_string(combo) == "x:y" new_id = ComponentID(label='new') data1.update_id(data1.id['x'], new_id) callback.assert_called_once_with(0) callback.reset_mock() assert combo_as_string(combo) == "new:y"
class TestExporter(): def setup_method(self, method): self.exporter = MySubsetMaskExporter() self.exporter.filename = 'test-filename' self.exporter.writer = MagicMock() self.data = Data(x=[1, 2, 3]) self.data_collection = DataCollection([self.data]) def test_no_subsets(self): with pytest.raises(ValueError) as exc: self.exporter.run(self.data) assert exc.value.args[0] == 'Data has no subsets' def test_multiple_valid(self): self.subset1 = self.data_collection.new_subset_group(subset_state=self.data.id['x'] >= 2, label='subset a') self.subset2 = self.data_collection.new_subset_group(subset_state=self.data.id['x'] >= 3, label='subset b') self.exporter.run(self.data) assert self.exporter.writer.call_count == 1 assert self.exporter.writer.call_args[0][0] == 'test-filename' masks = self.exporter.writer.call_args[0][1] assert len(masks) == 2 assert_equal(masks['subset a'], [0, 1, 1]) assert_equal(masks['subset b'], [0, 0, 1]) def test_single_subset_valid(self): self.subset = self.data_collection.new_subset_group(subset_state=self.data.id['x'] >= 2, label='subset a') self.exporter.run(self.data.subsets[0]) assert self.exporter.writer.call_count == 1 assert self.exporter.writer.call_args[0][0] == 'test-filename' masks = self.exporter.writer.call_args[0][1] assert len(masks) == 1 assert_equal(masks['subset a'], [0, 1, 1])
def test_component_id_combo_helper(): combo = QtWidgets.QComboBox() dc = DataCollection([]) helper = ComponentIDComboHelper(combo, dc) assert _items_as_string(combo) == "" data1 = Data(x=[1,2,3], y=[2,3,4], label='data1') dc.append(data1) helper.append_data(data1) assert _items_as_string(combo) == "x:y" data2 = Data(a=[1,2,3], b=['a','b','c'], label='data2') dc.append(data2) helper.append_data(data2) assert _items_as_string(combo) == "data1:x:y:data2:a:b" helper.categorical = False assert _items_as_string(combo) == "data1:x:y:data2:a" helper.numeric = False assert _items_as_string(combo) == "data1:data2" helper.categorical = True helper.numeric = True helper.visible = False assert _items_as_string(combo) == "data1:Pixel Axis 0 [x]:World 0:x:y:data2:Pixel Axis 0 [x]:World 0:a:b" helper.visible = True dc.remove(data2) assert _items_as_string(combo) == "x:y" # TODO: check that renaming a component updates the combo # data1.id['x'].label = 'z' # assert _items_as_string(combo) == "z:y" helper.remove_data(data1) assert _items_as_string(combo) == ""
def main(): a = AnalyticsEngine() e = ExecutionEngine() # Lake Burley Griffin dimensions = { 'x': { 'range': (149.07, 149.18) }, 'y': { 'range': (-35.32, -35.28) }, 'time': { 'range': (datetime(1990, 1, 1), datetime(1990, 12, 31)) } } ndvi = a.apply_sensor_specific_bandmath('LANDSAT_5', 'nbar', 'ndvi', dimensions, 'get_data', 'ndvi') result = e.execute_plan(a.plan) plot(e.cache['ndvi']) b30_result = e.cache['get_data']['array_result']['red'] b40_result = e.cache['get_data']['array_result']['nir'] ndvi_result = e.cache['ndvi']['array_result']['ndvi'] b30_data = Data(x=b30_result[:, ::-1, :], label='B30') b40_data = Data(x=b40_result[:, ::-1, :], label='B40') ndvi_data = Data(x=ndvi_result[:, ::-1, :], label='ndvi') long_data = Data(x=b40_result.coords['x'], label='long') lat_data = Data(x=b40_result.coords['y'], label='lat') time_data = Data(x=b40_result.coords['time'], label='time') collection = DataCollection([ ndvi_data, b30_data, b40_data, long_data, lat_data, time_data, ]) app = GlueApplication(collection) app.start()
def main(): a = AnalyticsEngine() e = ExecutionEngine() # Lake Burley Griffin dimensions = { 'x': { 'range': (149.07, 149.18) }, 'y': { 'range': (-35.32, -35.28) }, 'time': { 'range': (datetime(1990, 1, 1), datetime(1990, 12, 31)) } } arrays = a.create_array(('LANDSAT_5', 'nbar'), ['nir'], dimensions, 'get_data') median_t = a.apply_generic_reduction(arrays, ['time'], 'median(array1)', 'medianT') result = e.execute_plan(a.plan) plot(e.cache['medianT']) b40_result = e.cache['get_data']['array_result']['nir'] median_result = e.cache['medianT']['array_result']['medianT'] b40_data = Data(x=b40_result[:, ::-1, :], label='B40') median_data = Data(x=median_result[::-1, :], label='medianT') long_data = Data(x=b40_result.coords['x'], label='long') lat_data = Data(x=b40_result.coords['y'], label='lat') time_data = Data(x=b40_result.coords['time'], label='time') collection = DataCollection([ median_data, b40_data, long_data, lat_data, time_data, ]) app = GlueApplication(collection) app.start()
def test_component_id_combo_helper_rename(): # Make sure that renaming component IDs now propagates to the combo options state = ExampleState() data = Data(x=[1, 2, 3], y=[2, 3, 4], label='data1') dc = DataCollection([data]) helper = ComponentIDComboHelper(state, 'combo', dc) # noqa helper.append_data(data) assert selection_choices(state, 'combo') == "x:y" data.id['x'].label = 'renamed' assert selection_choices(state, 'combo') == "renamed:y"
def test_single_draw_call_on_create(self): d = Data(x=np.random.random((2,) * self.ndim)) dc = DataCollection([d]) app = GlueApplication(dc) try: from glue.viewers.matplotlib.qt.widget import MplCanvas draw = MplCanvas.draw MplCanvas.draw = MagicMock() app.new_data_viewer(self.widget_cls, data=d) # each Canvas instance gives at most 1 draw call selfs = [c[0][0] for c in MplCanvas.draw.call_arg_list] assert len(set(selfs)) == len(selfs) finally: MplCanvas.draw = draw
def test_importer_cancel(tmpdir): filename = tmpdir.join('test.fits').strpath hdu = fits.PrimaryHDU(data=[0, 1, 1]) hdu.writeto(filename) data = Data(x=[1, 2, 3]) data_collection = DataCollection([data]) with patch('qtpy.compat.getopenfilename') as o: o.return_value = '', '' # simulates cancelling importer = QtSubsetMaskImporter() importer.run(data, data_collection) assert len(data_collection.subset_groups) == 0 assert len(data.subsets) == 0
def test_array_shape(tmpdir): # Create irregularly shaped data cube data = make_test_data((3841, 48, 46)) # Create fake session dc = DataCollection([data]) ga = GlueApplication(dc) volume = ga.new_data_viewer(VispyVolumeViewer) volume.add_data(data) viewer_state = volume.state # Get layer artist style editor layer_state = viewer_state.layers[0] layer_state.attribute = data.id['b']
def setup_method(self): self.data1 = Data(x=[1, 2, 3], y=[3.5, 4.5, -1.0], z=['a', 'r', 'w']) self.data2 = Data(a=[3, 4, 1], b=[1.5, -2.0, 3.5], c=['y', 'e', 'r']) # Add a derived component so that we can test how we deal with existing ones components = dict((cid.label, cid) for cid in self.data2.components) pc = ParsedCommand('{a}', components) link = ParsedComponentLink(ComponentID('d'), pc) self.data2.add_component_link(link) self.data_collection = DataCollection([self.data1, self.data2]) link = ComponentLink([self.data1.id['x']], self.data2.id['a']) self.data_collection.add_link(link) self.listener1 = ChangeListener(self.data1) self.listener2 = ChangeListener(self.data2)
def setup_method(self, method): self.data_collection = DataCollection() # The reference dataset. Shape is (6, 7, 8, 9). self.data1 = Data(x=ARRAY) self.data_collection.append(self.data1) # A dataset with the same shape but not linked. Shape is (6, 7, 8, 9). self.data2 = Data(x=ARRAY) self.data_collection.append(self.data2) # A dataset with the same number of dimensions but in a different # order, linked to the first. Shape is (9, 7, 6, 8). self.data3 = Data(x=np.moveaxis(ARRAY, (3, 1, 0, 2), (0, 1, 2, 3))) self.data_collection.append(self.data3) self.data_collection.add_link(LinkSame(self.data1.pixel_component_ids[0], self.data3.pixel_component_ids[2])) self.data_collection.add_link(LinkSame(self.data1.pixel_component_ids[1], self.data3.pixel_component_ids[1])) self.data_collection.add_link(LinkSame(self.data1.pixel_component_ids[2], self.data3.pixel_component_ids[3])) self.data_collection.add_link(LinkSame(self.data1.pixel_component_ids[3], self.data3.pixel_component_ids[0])) # A dataset with fewer dimensions, linked to the first one. Shape is # (8, 7, 6) self.data4 = Data(x=ARRAY[:, :, :, 0].transpose()) self.data_collection.append(self.data4) self.data_collection.add_link(LinkSame(self.data1.pixel_component_ids[0], self.data4.pixel_component_ids[2])) self.data_collection.add_link(LinkSame(self.data1.pixel_component_ids[1], self.data4.pixel_component_ids[1])) self.data_collection.add_link(LinkSame(self.data1.pixel_component_ids[2], self.data4.pixel_component_ids[0])) # A dataset with even fewer dimensions, linked to the first one. Shape # is (8, 6) self.data5 = Data(x=ARRAY[:, 0, :, 0].transpose()) self.data_collection.append(self.data5) self.data_collection.add_link(LinkSame(self.data1.pixel_component_ids[0], self.data5.pixel_component_ids[1])) self.data_collection.add_link(LinkSame(self.data1.pixel_component_ids[2], self.data5.pixel_component_ids[0]))
def jglue(*args, **kwargs): from glue.core import DataCollection from glue.app.qt import GlueApplication from glue.qglue import parse_data, parse_links from glue.core.data_factories import load_data links = kwargs.pop('links', None) dc = DataCollection() for label, data in kwargs.items(): if isinstance(data, six.string_types): data = load_data(data) dc.extend(parse_data(data, label)) for data in args: dc.append(data) if links is not None: dc.add_link(parse_links(dc, links)) japp = JupyterApplication(dc) return japp
def main(): a = AnalyticsEngine() e = ExecutionEngine() # Lake Burley Griffin dimensions = {'x': {'range': (149.07, 149.18)}, 'y': {'range': (-35.32, -35.28)}, 'time': {'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))}} b40 = a.create_array(('LANDSAT_5', 'nbar'), ['nir'], dimensions, 'b40') b30 = a.create_array(('LANDSAT_5', 'nbar'), ['red'], dimensions, 'b30') pq = a.create_array(('LANDSAT_5', 'pqa'), ['pixelquality'], dimensions, 'pq') ndvi = a.apply_expression([b40, b30], '((array1 - array2) / (array1 + array2))', 'ndvi') mask = a.apply_expression([ndvi, pq], 'array1{(array2 == 32767) | (array2 == 16383) | (array2 == 2457)}', 'mask') median_t = a.apply_expression(mask, 'median(array1, 0)', 'medianT') result = e.execute_plan(a.plan) plot(e.cache['medianT']) b30_result = e.cache['b30']['array_result']['red'] b40_result = e.cache['b40']['array_result']['nir'] ndvi_result = e.cache['ndvi']['array_result']['ndvi'] pq_result = e.cache['pq']['array_result']['pixelquality'] mask_result = e.cache['mask']['array_result']['mask'] median_result = e.cache['medianT']['array_result']['medianT'] b30_data = Data(x=b30_result[:, ::-1, :], label='B30') b40_data = Data(x=b40_result[:, ::-1, :], label='B40') ndvi_data = Data(x=ndvi_result[:, ::-1, :], label='ndvi') pq_data = Data(x=pq_result[:, ::-1, :], label='pq') mask_data = Data(x=mask_result[:, ::-1, :], label='mask') median_data = Data(x=median_result[::-1, :], label='median') long_data = Data(x=b40_result.coords['x'], label='long') lat_data = Data(x=b40_result.coords['y'], label='lat') time_data = Data(x=b40_result.coords['time'], label='time') collection = DataCollection([median_data, mask_data, pq_data, ndvi_data, b30_data, b40_data, long_data, lat_data, time_data, ]) app = GlueApplication(collection) app.start()
def main(): from glue.core import Data, DataCollection from glue.qt import get_qapp import numpy as np app = get_qapp() sz = 10**5 x = np.random.normal(0, 1, sz) y = np.random.random(sz) d = Data(x=x, y=y) dc = DataCollection([d]) tw = TableWidget(dc) tw.set_data(d) tw.show() app.exec_()
def test_options_widget(self): d1 = Data(x=np.random.random((2, ) * self.ndim)) d2 = Data(x=np.random.random((2, ) * self.ndim)) dc = DataCollection([d1, d2]) app = GlueApplication(dc) w = app.new_data_viewer(self.widget_cls, data=d1) w.state.x_stretch = 0.5 w.state.y_stretch = 1.0 w.state.z_stretch = 2.0 w.state.x_min = -0.1 w.state.x_max = 10.1 w.state.y_min = 0.1 w.state.y_max = 10.9 w.state.z_min = 0.2 w.state.z_max = 10.8 w.state.visible_axes = False
def test_has_celestial_with_time_and_spectral_axes(): """ To test the case in which we have two data cubes with unequal number of dimensions, but both have celestial axes. """ wcs1 = WCS(naxis=4) wcs1.wcs.ctype = 'WAVE', 'HPLT-TAN', 'HPLN-TAN', 'TIME' wcs1.wcs.set() data1 = Data(label='Data 1') data1.coords = wcs1 data1['x'] = np.ones((2, 3, 4, 5)) pw1, pz1, py1, px1 = data1.pixel_component_ids wcs2 = WCS(naxis=3) wcs2.wcs.ctype = 'HPLN-TAN', 'HPLT-TAN', 'TIME' wcs2.wcs.set() data2 = Data(label='Data 2') data2.coords = wcs2 data2['x'] = np.ones((2, 3, 4)) pz2, py2, px2 = data2.pixel_component_ids dc = DataCollection([data1, data2]) links = wcs_autolink(dc) assert len(links) == 1 link = links[0] assert isinstance(link, MultiLink) assert len(link) == 6 assert link[0].get_to_id() == px2 assert link[0].get_from_ids() == [py1, pz1, pw1] assert link[1].get_to_id() == py2 assert link[1].get_from_ids() == [py1, pz1, pw1] assert link[2].get_to_id() == pz2 assert link[2].get_from_ids() == [py1, pz1, pw1] assert link[3].get_to_id() == py1 assert link[3].get_from_ids() == [px2, py2, pz2] assert link[4].get_to_id() == pz1 assert link[4].get_from_ids() == [px2, py2, pz2] assert link[5].get_to_id() == pw1 assert link[5].get_from_ids() == [px2, py2, pz2]
def test_change_components(): # Regression test for a bug that caused table viewers to not update when # adding/removing components. app = get_qapp() # noqa d = Data(a=[1, 2, 3, 4, 5], b=[3.2, 1.2, 4.5, 3.3, 2.2], c=['e', 'b', 'c', 'a', 'f'], label='test') dc = DataCollection([d]) gapp = GlueApplication(dc) viewer = gapp.new_data_viewer(TableViewer) viewer.add_data(d) data_changed = MagicMock() viewer.model.dataChanged.connect(data_changed) # layoutChanged needs to be emitted for the new/removed columns to be # registered (dataChanged is not enough) layout_changed = MagicMock() viewer.model.layoutChanged.connect(layout_changed) assert data_changed.call_count == 0 assert layout_changed.call_count == 0 viewer.model.columnCount() == 2 d.add_component([3, 4, 5, 6, 2], 'z') assert data_changed.call_count == 1 assert layout_changed.call_count == 1 viewer.model.columnCount() == 3 d.remove_component(d.id['z']) assert data_changed.call_count == 2 assert layout_changed.call_count == 2 viewer.model.columnCount() == 2
def test_add_viewer(self, tmpdir): d1 = Data(x=np.random.random((2, ) * self.ndim)) d2 = Data(x=np.random.random((2, ) * self.ndim)) dc = DataCollection([d1, d2]) app = GlueApplication(dc) w = app.new_data_viewer(self.widget_cls, data=d1) w.viewer_size = (300, 400) filename = tmpdir.join('session.glu').strpath app.save_session(filename, include_data=True) app2 = GlueApplication.restore_session(filename) # test session is restored correctly for viewer in app2.viewers: assert viewer[0].viewer_size == (300, 400) app.close() app2.close()
def main(): from glue.core import Data, DataCollection from glue.qt import get_qapp app = get_qapp() d = Data(label='d1', x=[1, 2, 3], y=[2, 3, 4]) d2 = Data(label='d2', z=[1, 2, 3], w=[2, 3, 4]) dc = DataCollection([d, d2]) SubsetFacet.facet(dc) print 'd1 subsets' for s in d.subsets: print s.label, s.subset_state, s.style.color print 'd2 subsets' for s in d2.subsets: print s.label, s.subset_state, s.style.color del app
def qglue(): """ Quickly send python variables to Glue for visualization. Returns ------- ga : ``GlueApplication`` object """ from glue.core import DataCollection from glue.app.qt import GlueApplication dc = DataCollection() # Suppress pesky Glue warnings. with warnings.catch_warnings(): warnings.simplefilter('ignore', GlueDeprecationWarning) ga = GlueApplication(data_collection=dc, maximized=False) return ga
def test_3d_world_link(): """Should be able to grab pixel coords after linking world""" x, y, z = r(10), r(10), r(10) cat = Data(label='cat', x=x, y=y, z=z) im = Data(label='im', inten=r((3, 3, 3))) dc = DataCollection([cat, im]) dc.add_link(LinkSame(im.get_world_component_id(2), cat.id['x'])) dc.add_link(LinkSame(im.get_world_component_id(1), cat.id['y'])) dc.add_link(LinkSame(im.get_world_component_id(0), cat.id['z'])) np.testing.assert_array_equal(cat[im.get_pixel_component_id(2)], x) np.testing.assert_array_equal(cat[im.get_pixel_component_id(1)], y) np.testing.assert_array_equal(cat[im.get_pixel_component_id(0)], z)
def setup_method(self, method): self.viewer_state = HistogramViewerState() ax = plt.subplot(1, 1, 1) self.data = Data(x=[1, 2, 3], y=[2, 3, 4]) self.subset = self.data.new_subset() self.subset.subset_state = self.data.id['x'] > 1 dc = DataCollection([self.data]) # TODO: The following line shouldn't be needed self.viewer_state.data_collection = dc self.artist = HistogramLayerArtist(ax, self.viewer_state, layer=self.subset) self.layer_state = self.artist.state self.viewer_state.layers.append(self.layer_state) self.call_counter = CallCounter() sys.setprofile(self.call_counter)
def setup_method(self, method): self.data = Data(x=[-3.2, 4.3, 2.2, 5.4, 7.2, -1.1, 2.3], y=['a', 'f', 'd', 'e', 'f', 'f', 'a'], label='test_data') self.data_collection = DataCollection([self.data]) class SimpleState(State): layer = CallbackProperty() comp = CallbackProperty() x_min = CallbackProperty() x_max = CallbackProperty() n_bin = CallbackProperty() self.state = SimpleState() self.helper = StateAttributeHistogramHelper(self.state, attribute='comp', lower='x_min', upper='x_max', n_bin='n_bin') self.state.data = self.data
def to_glue(self, label="yt", data_collection=None): """ Takes the data in the FITSImageData instance and exports it to Glue (http://www.glueviz.org) for interactive analysis. Optionally add a *label*. If you are already within the Glue environment, you can pass a *data_collection* object, otherwise Glue will be started. """ from glue.core import DataCollection, Data from glue.core.coordinates import coordinates_from_header from glue.qt.glue_application import GlueApplication image = Data(label=label) image.coords = coordinates_from_header(self.wcs.to_header()) for k, f in self.items(): image.add_component(f.data, k) if data_collection is None: dc = DataCollection([image]) app = GlueApplication(dc) app.start() else: data_collection.append(image)
def test_not_all_points_inside_limits(tmpdir): # Regression test for a bug that occurred when not all points were inside # the visible limits and the color or size mode is linear. data1 = Data(label="Data", x=[1, 2, 3]) dc = DataCollection([data1]) ga = GlueApplication(dc) ga.show() scatter = ga.new_data_viewer(VispyScatterViewer) scatter.add_data(data1) scatter.state.layers[0].color_mode = 'Linear' scatter.state.layers[0].size_mode = 'Linear' scatter.state.x_min = -0.1 scatter.state.x_max = 2.1 ga.close()
def test_wcs_autolink_spectral_cube(): # This should link all coordinates wcs1 = WCS(naxis=3) wcs1.wcs.ctype = 'DEC--TAN', 'FREQ', 'RA---TAN' wcs1.wcs.set() data1 = Data() data1.coords = WCSCoordinates(wcs=wcs1) data1['x'] = np.ones((2, 3, 4)) pz1, py1, px1 = data1.pixel_component_ids wcs2 = WCS(naxis=3) wcs2.wcs.ctype = 'GLON-CAR', 'GLAT-CAR', 'FREQ' wcs2.wcs.set() data2 = Data() data2.coords = WCSCoordinates(wcs=wcs2) data2['x'] = np.ones((2, 3, 4)) pz2, py2, px2 = data2.pixel_component_ids dc = DataCollection([data1, data2]) links = wcs_autolink(dc) assert len(links) == 1 link = links[0] assert isinstance(link, MultiLink) assert len(link) == 6 assert link[0].get_to_id() == pz2 assert link[0].get_from_ids() == [pz1, py1, px1] assert link[1].get_to_id() == py2 assert link[1].get_from_ids() == [pz1, py1, px1] assert link[2].get_to_id() == px2 assert link[2].get_from_ids() == [pz1, py1, px1] assert link[3].get_to_id() == pz1 assert link[3].get_from_ids() == [pz2, py2, px2] assert link[4].get_to_id() == py1 assert link[4].get_from_ids() == [pz2, py2, px2] assert link[5].get_to_id() == px1 assert link[5].get_from_ids() == [pz2, py2, px2]
def test_wcs_autolinking_of_2d_cube_with_temporal_and_spectral_axes_case_2(): """ A test to confirm that two 2D data cubes with matching number of dimensions where the one is spectral (air wavelength in this case) and the other one temporal is indeed autolinked, to test that the order does not matter. """ wcs1 = WCS(naxis=2) wcs1.wcs.ctype = 'AWAV', 'TIME' wcs1.wcs.set() data1 = Data(label='Data 1') data1.coords = wcs1 data1['x'] = np.ones((2, 3)) py1, px1 = data1.pixel_component_ids wcs2 = WCS(naxis=2) wcs2.wcs.ctype = 'TIME', 'AWAV' wcs2.wcs.set() data2 = Data(label='Data 2') data2.coords = wcs2 data2['x'] = np.ones((2, 3)) py2, px2 = data2.pixel_component_ids dc = DataCollection([data1, data2]) links = wcs_autolink(dc) assert len(links) == 1 link = links[0] assert isinstance(link, MultiLink) assert len(link) == 4 assert link[0].get_to_id() == px2 assert link[0].get_from_ids() == [px1, py1] assert link[1].get_to_id() == py2 assert link[1].get_from_ids() == [px1, py1] assert link[2].get_to_id() == px1 assert link[2].get_from_ids() == [px2, py2] assert link[3].get_to_id() == py1 assert link[3].get_from_ids() == [px2, py2]
def test_viewer_size(self, tmpdir): # regression test for #781 # viewers were not restored with the right size d1 = Data(x=np.random.random((2,) * self.ndim)) d2 = Data(x=np.random.random((2,) * self.ndim)) dc = DataCollection([d1, d2]) app = GlueApplication(dc) w = app.new_data_viewer(self.widget_cls, data=d1) w.viewer_size = (300, 400) filename = tmpdir.join('session.glu').strpath app.save_session(filename, include_data=True) app2 = GlueApplication.restore_session(filename) for viewer in app2.viewers: assert viewer[0].viewer_size == (300, 400) app.close() app2.close()
def test_table_title(): app = get_qapp() # noqa data1 = Data(a=[1, 2, 3, 4, 5], label='test1') data2 = Data(a=[1, 2, 3, 4, 5], label='test2') dc = DataCollection([data1, data2]) gapp = GlueApplication(dc) viewer = gapp.new_data_viewer(TableViewer) assert viewer.windowTitle() == 'Table' viewer.add_data(data1) assert viewer.windowTitle() == 'Table: test1' viewer.add_data(data2) assert viewer.windowTitle() == 'Table: test2'
def main(): a = AnalyticsEngine() e = ExecutionEngine() # Lake Burley Griffin dimensions = {'x': {'range': (149.07, 149.18)}, 'y': {'range': (-35.32, -35.28)}, 'time': {'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))}} arrays = a.create_array(('LANDSAT_5', 'nbar'), ['nir', 'red'], dimensions, 'get_data') ndvi = a.apply_bandmath(arrays, '((array1 - array2) / (array1 + array2))', 'ndvi') pq = a.create_array(('LANDSAT_5', 'pqa'), ['pixelquality'], dimensions, 'pq') mask = a.apply_cloud_mask(ndvi, pq, 'mask') e.execute_plan(a.plan) plot(e.cache['mask']) b30_result = e.cache['get_data']['array_result']['red'] b40_result = e.cache['get_data']['array_result']['nir'] ndvi_result = e.cache['ndvi']['array_result']['ndvi'] pq_result = e.cache['pq']['array_result']['pixelquality'] mask_result = e.cache['mask']['array_result']['mask'] b30_data = Data(x=b30_result[:, ::-1, :], label='B30') b40_data = Data(x=b40_result[:, ::-1, :], label='B40') ndvi_data = Data(x=ndvi_result[:, ::-1, :], label='ndvi') pq_data = Data(x=pq_result[:, ::-1, :], label='pq') mask_data = Data(x=mask_result[:, ::-1, :], label='mask') long_data = Data(x=b40_result.coords['x'], label='long') lat_data = Data(x=b40_result.coords['y'], label='lat') time_data = Data(x=b40_result.coords['time'], label='time') collection = DataCollection([mask_data, pq_data, ndvi_data, b30_data, b40_data, long_data, lat_data, time_data, ]) app = GlueApplication(collection) app.start()
def test_layer_visibility_after_session(tmpdir): # Regression test for a bug that caused layers to be incorrectly visible # after saving and loading a session file. # Create fake data data = make_test_data() # Create fake session dc = DataCollection([data]) ga = GlueApplication(dc) ga.show() scatter = ga.new_data_viewer(VispyScatterViewer) scatter.add_data(data) viewer_state = scatter.state layer_state = viewer_state.layers[0] layer_state.visible = False session_file = tmpdir.join('test_layer_visibility.glu').strpath ga.save_session(session_file) ga.close() ga2 = GlueApplication.restore_session(session_file) ga2.show() scatter_r = ga2.viewers[0][0] viewer_state = scatter_r.state layer_state = viewer_state.layers[0] assert not layer_state.visible # Make sure the multiscat layer is also not visible (this was where the bug was) layer_artist = scatter_r.layers[0] assert not layer_artist._multiscat.layers[layer_artist.id]['visible'] ga2.close()
def test_celestial_with_unknown_axes(): # Regression test for a bug that caused n-d datasets with celestial axes # and axes with unknown physical types to not even be linked by celestial # axes. wcs1 = WCS(naxis=3) wcs1.wcs.ctype = 'DEC--TAN', 'RA---TAN', 'SPAM' wcs1.wcs.set() data1 = Data() data1.coords = WCSCoordinates(wcs=wcs1) data1['x'] = np.ones((2, 3, 4)) pz1, py1, px1 = data1.pixel_component_ids wcs2 = WCS(naxis=3) wcs2.wcs.ctype = 'GLON-CAR', 'FREQ', 'GLAT-CAR' wcs2.wcs.set() data2 = Data() data2.coords = WCSCoordinates(wcs=wcs2) data2['x'] = np.ones((2, 3, 4)) pz2, py2, px2 = data2.pixel_component_ids dc = DataCollection([data1, data2]) links = wcs_autolink(dc) assert len(links) == 1 link = links[0] assert isinstance(link, MultiLink) assert len(link) == 4 assert link[0].get_to_id() == px2 assert link[0].get_from_ids() == [px1, py1] assert link[1].get_to_id() == pz2 assert link[1].get_from_ids() == [px1, py1] assert link[2].get_to_id() == px1 assert link[2].get_from_ids() == [px2, pz2] assert link[3].get_to_id() == py1 assert link[3].get_from_ids() == [px2, pz2]
def test_from_spectral_cube(spectral_cube_wcs): values = np.random.random((4, 5, 3)) spec = SpectralCube(values * u.Jy, wcs=spectral_cube_wcs) data_collection = DataCollection() data_collection['spectral-cube'] = spec data = data_collection['spectral-cube'] assert isinstance(data, Data) assert len(data.main_components) == 1 assert data.main_components[0].label == 'flux' assert_allclose(data['flux'], values) component = data.get_component('flux') assert component.units == 'Jy' # Check round-tripping spec_new = data.get_object(attribute='flux') assert isinstance(spec_new, SpectralCube) assert_quantity_allclose(spec_new.spectral_axis, [1, 2, 3, 4] * u.m / u.s) assert_quantity_allclose(spec_new.filled_data[...], values * u.Jy)
def test_wcs_autolink_dimensional_mismatch(): # No links should be found because the WCS don't actually have well defined # physical types. wcs1 = WCS(naxis=1) wcs1.wcs.ctype = ['FREQ'] wcs1.wcs.set() data1 = Data() data1.coords = WCSCoordinates(wcs=wcs1) data1['x'] = [1, 2, 3] wcs2 = WCS(naxis=3) wcs2.wcs.ctype = 'DEC--TAN', 'FREQ', 'RA---TAN' wcs2.wcs.set() data2 = Data() data2.coords = WCSCoordinates(wcs=wcs2) data2['x'] = np.ones((2, 3, 4)) dc = DataCollection([data1, data2]) links = wcs_autolink(dc) assert len(links) == 0