Esempio n. 1
0
def test_component_unit_header(tmpdir):
    from astropy import units as u
    filename = tmpdir.join('test3.fits').strpath

    data = Data(x=np.arange(6).reshape(2, 3),
                y=(np.arange(6) * 2).reshape(2, 3),
                z=(np.arange(6) * 2).reshape(2, 3))

    wcs = WCS()
    data.coords = WCSCoordinates(wcs=wcs)

    unit1 = data.get_component("x").units = u.m / u.s
    unit2 = data.get_component("y").units = u.Jy
    unit3 = data.get_component("z").units = ""

    fits_writer(filename, data)

    with fits.open(filename) as hdulist:
        assert len(hdulist) == 3
        bunit = hdulist['x'].header.get('BUNIT')
        assert u.Unit(bunit) == unit1

        bunit = hdulist['y'].header.get('BUNIT')
        assert u.Unit(bunit) == unit2

        bunit = hdulist['z'].header.get('BUNIT')
        assert bunit == unit3
Esempio n. 2
0
def read_cube(filename, **kwargs):
    cube_data = None
    exclude_exts = []
    data_collection = []
    hdulist = fits.open(filename)
    try:
        cube_data = CubeData.read(hdulist)
    except CubeDataIOError as e:
        warnings.warn('No CubeData found in "{}": {}'.format(
            filename,
            e.message
        ))

    if cube_data is not None:
        data = Data()
        try:
            data.coords = coordinates_from_wcs(cube_data.wcs)
        except AttributeError:
            # There is no wcs. Not to worry now.
            pass
        data.add_component(Component(cube_data), label="cube")
        data_collection.append(data)
        exclude_exts = cube_data.meta.get('hdu_ids')

    # Read in the rest of the FITS file.
    data_collection += _load_fits_generic(hdulist,
                                          exclude_exts=exclude_exts)
    return data_collection
Esempio n. 3
0
def load_mos_data(*args, **kwargs):
    path = "/".join(args[0].strip().split('/')[:-1])
    result = Data()

    # Read the table
    from astropy.table import Table

    table = Table.read(*args, format='ascii', **kwargs)

    # Loop through columns and make component list
    for column_name in table.columns:
        print(column_name)
        c = table[column_name]
        d = None
        u = c.unit if hasattr(c, 'unit') else c.units
        m = dict()

        m['cell'] = c
        m['path'] = path

        # if d is not None:
        #     print("Attempting to autotype")
        #     nc = MOSComponent(np.array([np.array(dt))
        #     result.add_component(nc, column_name)
        # else:
        nc = MOSComponent.autotyped(c, units=u, meta=m)
        result.add_component(nc, column_name)

    return result
Esempio n. 4
0
 def cube_to_data(self, cube,
                  output_label=None,
                  output_component_id=None):
     """
     Convert SpectralCube to final output.
     self.output_as_component is checked here.
     if self.output_as_component:
         add new component to self.data
     else:
         create new data and return it.
     :param cube: SpectralCube
     :param output_label: Name of new Data.
     :param output_component_id: label of new component
     :return:
     """
     original_data = self.data
     new_component = Component(cube._data.copy(), self.component_unit)
     if self.output_as_component:
         original_data.add_component(new_component, output_component_id)
         return None
     else:
         new_data = Data(label=output_label)
         new_data.coords = coordinates_from_header(cube.header)
         new_data.add_component(new_component, output_component_id)
         return new_data
def geospatial_reader(filename):
    """
    Read in geospatial data using the rasterio package

    Parameters
    ----------
    filename: str
        The input file
    """

    data = Data()

    with rasterio.open(filename) as src:
        for iband, band in enumerate(src.read()):
            # TODO: determine the proper labels for each band

            # NB: We have to flip the raw data in the up-down direction
            # as Glue plots using the matplotlib imshow argument `origin='lower'`
            # and otherwise the data comes up outside down.
            # WARNING: This may cause issues with other (non-matplotlib) image
            # viewers
            data.add_component(component=np.flipud(band.astype(float)),
                               label='Band {0}'.format(iband))

    return data
Esempio n. 6
0
    def setup_method(self, method):

        self.coords = MyCoords()
        self.image1 = Data(label='image1', x=[[1, 2], [3, 4]], y=[[4, 5], [2, 3]])
        self.image2 = Data(label='image2', a=[[3, 3], [2, 2]], b=[[4, 4], [3, 2]],
                           coords=self.coords)
        self.catalog = Data(label='catalog', c=[1, 3, 2], d=[4, 3, 3])
        self.hypercube = Data(label='hypercube', x=np.arange(120).reshape((2, 3, 4, 5)))

        # Create data versions with WCS coordinates
        self.image1_wcs = Data(label='image1_wcs', x=self.image1['x'],
                               coords=WCSCoordinates(wcs=WCS(naxis=2)))
        self.hypercube_wcs = Data(label='hypercube_wcs', x=self.hypercube['x'],
                                  coords=WCSCoordinates(wcs=WCS(naxis=4)))

        self.application = GlueApplication()

        self.session = self.application.session

        self.hub = self.session.hub

        self.data_collection = self.session.data_collection
        self.data_collection.append(self.image1)
        self.data_collection.append(self.image2)
        self.data_collection.append(self.catalog)
        self.data_collection.append(self.hypercube)
        self.data_collection.append(self.image1_wcs)
        self.data_collection.append(self.hypercube_wcs)

        self.viewer = self.application.new_data_viewer(ImageViewer)

        self.data_collection.register_to_hub(self.hub)
        self.viewer.register_to_hub(self.hub)

        self.options_widget = self.viewer.options_widget()
Esempio n. 7
0
def test_component_id_combo_helper_add():

    # Make sure that when adding a component, and if a data collection is not
    # present, the choices still get updated

    callback = MagicMock()

    state = ExampleState()
    state.add_callback('combo', callback)

    dc = DataCollection([])

    helper = ComponentIDComboHelper(state, 'combo')

    assert selection_choices(state, 'combo') == ""

    data1 = Data(x=[1, 2, 3], y=[2, 3, 4], label='data1')

    callback.reset_mock()

    dc.append(data1)
    helper.append_data(data1)

    callback.assert_called_once_with(0)
    callback.reset_mock()

    assert selection_choices(state, 'combo') == "x:y"

    data1.add_component([7, 8, 9], 'z')

    # Should get notification since choices have changed
    callback.assert_called_once_with(0)
    callback.reset_mock()

    assert selection_choices(state, 'combo') == "x:y:z"
Esempio n. 8
0
def test_data_collection_combo_helper():

    callback = MagicMock()
    state = ExampleState()
    state.add_callback('combo', callback)

    dc = DataCollection([])

    helper = DataCollectionComboHelper(state, 'combo', dc)  # noqa

    data1 = Data(x=[1, 2, 3], y=[2, 3, 4], label='data1')

    assert callback.call_count == 0

    dc.append(data1)

    assert callback.call_count == 1

    assert selection_choices(state, 'combo') == "data1"

    data1.label = 'mydata1'
    assert selection_choices(state, 'combo') == "mydata1"

    assert callback.call_count == 2

    dc.remove(data1)

    assert callback.call_count == 3

    assert selection_choices(state, 'combo') == ""
Esempio n. 9
0
def test_clone_wcs_link():

    # Make sure that WCSLink can be serialized/deserialized

    wcs1 = WCS(naxis=2)
    wcs1.wcs.ctype = 'DEC--TAN', 'RA---TAN'
    wcs1.wcs.set()

    data1 = Data(label='Data 1')
    data1.coords = WCSCoordinates(wcs=wcs1)
    data1['x'] = np.ones((2, 3))

    wcs2 = WCS(naxis=3)
    wcs2.wcs.ctype = 'GLON-CAR', 'FREQ', 'GLAT-CAR'
    wcs2.wcs.set()

    data2 = Data(label='Data 2')
    data2.coords = WCSCoordinates(wcs=wcs2)
    data2['x'] = np.ones((2, 3, 4))

    link1 = WCSLink(data1, data2)
    link2 = clone(link1)

    assert isinstance(link2, WCSLink)
    assert link2.data1.label == 'Data 1'
    assert link2.data2.label == 'Data 2'
Esempio n. 10
0
def _parse_data_dict(data, label):
    result = Data(label=label)

    for label, component in data.items():
        result.add_component(component, label)

    return [result]
Esempio n. 11
0
def test_link_editor():

    # Make sure that the WCSLink works property in the link editor and is
    # returned unmodified. The main way to check that is just to make sure that
    # the link round-trips when going through EditableLinkFunctionState.

    wcs1 = WCS(naxis=2)
    wcs1.wcs.ctype = 'DEC--TAN', 'RA---TAN'
    wcs1.wcs.set()

    data1 = Data(label='Data 1')
    data1.coords = WCSCoordinates(wcs=wcs1)
    data1['x'] = np.ones((2, 3))

    wcs2 = WCS(naxis=3)
    wcs2.wcs.ctype = 'GLON-CAR', 'FREQ', 'GLAT-CAR'
    wcs2.wcs.set()

    data2 = Data(label='Data 2')
    data2.coords = WCSCoordinates(wcs=wcs2)
    data2['x'] = np.ones((2, 3, 4))

    link1 = WCSLink(data1, data2)

    link2 = EditableLinkFunctionState(link1).link

    assert isinstance(link2, WCSLink)
    assert link2.data1.label == 'Data 1'
    assert link2.data2.label == 'Data 2'
Esempio n. 12
0
def read_cube(filename, **kwargs):
    cube_data = CubeData.read(filename)

    data = Data()
    data.add_component(Component(cube_data), label="cube")
    print("Loaded successfully")

    return data
Esempio n. 13
0
class TestImporter():

    def setup_method(self, method):
        self.importer = MySubsetMaskImporter()
        self.importer.filename = 'test-filename'
        self.importer.reader = MagicMock()
        self.data = Data(x=[1, 2, 3])
        self.data_collection = DataCollection([self.data])

    def test_single_valid(self):
        self.importer.reader.return_value = OrderedDict([('subset 1', np.array([0, 1, 0]))])
        self.importer.run(self.data, self.data_collection)
        assert len(self.data_collection.subset_groups) == 1
        assert_equal(self.data.subsets[0].to_mask(), [0, 1, 0])

    def test_multiple_valid(self):
        self.importer.reader.return_value = OrderedDict([('subset 1', np.array([0, 1, 0])),
                                                         ('subset 2', np.array([1, 1, 0]))])
        self.importer.run(self.data, self.data_collection)
        assert len(self.data_collection.subset_groups) == 2
        assert_equal(self.data.subsets[0].to_mask(), [0, 1, 0])
        assert_equal(self.data.subsets[1].to_mask(), [1, 1, 0])

    def test_missing_masks(self):
        self.importer.reader.return_value = OrderedDict()
        with pytest.raises(ValueError) as exc:
            self.importer.run(self.data, self.data_collection)
        assert exc.value.args[0] == "No subset masks were returned"

    def test_single_invalid_shape(self):
        self.importer.reader.return_value = OrderedDict([('subset 1', np.array([0, 1, 0, 1]))])
        with pytest.raises(ValueError) as exc:
            self.importer.run(self.data, self.data_collection)
        assert exc.value.args[0].replace('L', '') == "Mask shape (4,) does not match data shape (3,)"

    def test_multiple_inconsistent_shapes(self):
        self.importer.reader.return_value = OrderedDict([('subset 1', np.array([0, 1, 0])),
                                                         ('subset 2', np.array([0, 1, 0, 1]))])
        with pytest.raises(ValueError) as exc:
            self.importer.run(self.data, self.data_collection)
        assert exc.value.args[0] == "Not all subsets have the same shape"

    def test_subset_single(self):
        self.importer.reader.return_value = OrderedDict([('subset 1', np.array([0, 1, 0]))])
        subset = self.data.new_subset()
        assert_equal(self.data.subsets[0].to_mask(), [0, 0, 0])
        self.importer.run(subset, self.data_collection)
        assert_equal(self.data.subsets[0].to_mask(), [0, 1, 0])

    def test_subset_multiple(self):
        self.importer.reader.return_value = OrderedDict([('subset 1', np.array([0, 1, 0])),
                                                         ('subset 2', np.array([1, 1, 0]))])
        subset = self.data.new_subset()
        with pytest.raises(ValueError) as exc:
            self.importer.run(subset, self.data_collection)
        assert exc.value.args[0] == 'Can only read in a single subset when importing into a subset'
Esempio n. 14
0
def deimos_spectrum1D_reader(file_name):
    """
    Data loader for Keck/DEIMOS 1D spectra.

    This loads the 'Bxspf-B' (extension 1)
    and 'Bxspf-R' (extension 2) and appends them
    together to proudce the combined Red/Blue Spectrum
    along with their Wavelength and Inverse Variance
    arrays.
    """
    with fits.open(file_name) as hdulist:
        data = Data(label='1D Spectrum')
        hdulist[1].header['CTYPE1'] = 'WAVE'
        hdulist[1].header['CUNIT1'] = 'Angstrom'
        data.header = hdulist[1].header
        wcs = WCS(hdulist[1].header)
        data.coords = coordinates_from_wcs(wcs)

        full_wl = np.append(hdulist[1].data['LAMBDA'][0], hdulist[2].data['LAMBDA'][0])
        full_spec = np.append(hdulist[1].data['SPEC'][0], hdulist[2].data['SPEC'][0])
        full_ivar = np.append(hdulist[1].data['IVAR'][0], hdulist[2].data['IVAR'][0])

        data.add_component(full_wl, 'Wavelength')
        data.add_component(full_spec, 'Flux')
        data.add_component(1 / np.sqrt(full_ivar), 'Uncertainty')

    return data
Esempio n. 15
0
def _load_GALFAHI_data_LowRes(filename, **kwargs):
    # Data loader customized for GALFA-HI data cube
    # Resize the data cube into lower resolution in velocity/space

    def _bin_cube(cube, factor, axis_label):
        # resize the cube to lower resolution
        shape = cube.shape
        if axis_label == 'VELO':
            new_shape = (shape[0]/factor, factor, shape[1], shape[2])
            return cube.reshape(new_shape).mean(axis = 1)
        elif axis_label == 'RADEC':
            new_shape = (shape[0], shape[1]/factor, factor, shape[2]/factor, factor)
            return cube.reshape(new_shape).mean(axis = 4).mean(axis = 2)
        else: return cube

    # change the header for those cubes that has been binned into low resolutions
    def _get_new_header(header, factor, axis_label):
        new_header = header
        if axis_label == 'VELO':
            new_header['NAXIS3'] = header['NAXIS3'] / factor
            new_header['CRVAL3'] = header['CRVAL3']
            new_header['CRPIX3'] = float(header['CRPIX3'] / factor)
            new_header['CDELT3'] = header['CDELT3'] * factor
        elif axis_label == 'RADEC':
            for ax in [1, 2]:
                new_header['NAXIS%d'%(ax)] = header['NAXIS%d'%(ax)] / factor
                new_header['CRVAL%d'%(ax)] = header['CRVAL%d'%(ax)]
                new_header['CRPIX%d'%(ax)] = float(header['CRPIX%d'%(ax)] / factor)
                new_header['CDELT%d'%(ax)] = header['CDELT%d'%(ax)] * factor
        else: new_header = header
        # m/s --> km/s
        new_header['CDELT3'] = new_header['CDELT3'] * (10**(-3))
        return new_header

    def _get_cube_center(header, cubeshape):
        ra  = header['CRVAL1'] + header['CDELT1'] * (np.arange(cubeshape[2])+0.5 - header['CRPIX1'])              ## degree
        dec = header['CRVAL2'] + header['CDELT2'] * (np.arange(cubeshape[1])+0.5 - header['CRPIX2'])            ## degree
        return np.mean(ra), np.mean(dec)

    data_list = []
    # add 3 data objects with different resolutions:
    for factor, axis_label in zip([4, 16, 2], ['VELO', 'VELO', 'RADEC']):
        cube = fits.getdata(filename)
        header = fits.getheader(filename)
        cen_ra, cen_dec = _get_cube_center(header, cube.shape)
        new_header = _get_new_header(header, factor, axis_label)
        cube_name = 'G_%d%+.2fradec_%.1fkm/s_%.1fa' % (cen_ra, cen_dec, new_header['CDELT3'], new_header['CDELT2']*60.)

        data = Data()
        data.coords = coordinates_from_header(new_header)
        data.add_component(_bin_cube(cube, factor, axis_label), cube_name)
        data.label  = cube_name
        data_list.append(data)
        del data, cube, header
    return data_list
Esempio n. 16
0
 def test_numerical_data_changed(self):
     self.init_draw_count()
     self.init_subset()
     assert self.draw_count == 0
     self.viewer.add_data(self.data)
     assert self.draw_count == 1
     data = Data()
     for cid in self.data.visible_components:
         data.add_component(self.data[cid] * 2, cid.label)
     self.data.update_values_from_data(data)
     assert self.draw_count == 2
def make_test_data():

    data = Data(label="Test Cube Data")

    np.random.seed(12345)

    for letter in 'abc':
        comp = Component(np.random.random((10, 10, 10)))
        data.add_component(comp, letter)

    return data
Esempio n. 18
0
def test_1d_world_link():
    x, y = r(10), r(10)
    d1 = Data(label='d1', x=x)
    d2 = Data(label='d2', y=y)
    dc = DataCollection([d1, d2])

    dc.add_link(LinkSame(d2.get_world_component_id(0), d1.id['x']))

    assert d2.get_world_component_id(0) in d1.components
    np.testing.assert_array_equal(d1[d2.get_world_component_id(0)], x)
    np.testing.assert_array_equal(d1[d2.get_pixel_component_id(0)], x)
Esempio n. 19
0
def make_test_data():

    data = Data(label="Test Cat Data 1")

    np.random.seed(12345)

    for letter in 'abcdefxyz':
        comp = Component(np.random.random(100))
        data.add_component(comp, letter)

    return data
Esempio n. 20
0
def collapse_to_1d(subset, data_collection):
    mask = subset.to_mask()
    md = np.ma.masked_array(subset.data['FLUX'], mask=mask)
    mdd = md.reshape((-1, md.shape[1] * md.shape[2]))
    spec = np.sum(mdd, axis=1)
    spec_data = Data(flux=spec, label=':'.join((subset.label,
                                                subset.data.label,
                                                'collapsed')))
    wave_component = subset.data['Wave'][:, md.shape[1] / 2, md.shape[2] / 2]
    spec_data.add_component(component=wave_component, label='Wave')
    spec_data.add_component(component=spec, label='FLUX')
    data_collection.append(spec_data)
Esempio n. 21
0
def read_tiff_metadata(filename):
    """ Read a TIFF image, looking for .tfw metadata """
    base, ext = os.path.splitext(filename)
    data = np.flipud(np.array(Image.open(filename).convert('L')))

    result = Data()

    if os.path.exists(base + '.tfw'):
        result.coords = tfw_to_coords(base + '.tfw', data.shape)

    result.add_component(data, 'map')
    return result
Esempio n. 22
0
    def test_hypercube_world(self):

        # Check defaults when we add data

        wcs = WCS(naxis=4)
        hypercube2 = Data()
        hypercube2.coords = WCSCoordinates(wcs=wcs)
        hypercube2.add_component(np.random.random((2, 3, 4, 5)), 'a')

        self.data_collection.append(hypercube2)

        self.viewer.add_data(hypercube2)
Esempio n. 23
0
def spectral_cube_to_data(cube, label=None):

    if isinstance(cube, SpectralCube):
        cube = StokesSpectralCube({'I': cube})

    result = Data(label=label)
    result.coords = coordinates_from_wcs(cube.wcs)

    for component in cube.components:
        data = getattr(cube, component).unmasked_data[...]
        result.add_component(data, label='STOKES {0}'.format(component))

    return result
Esempio n. 24
0
def export_glue(ds, data, name):

    from glue.core import Data, DataCollection
    from glue.qt.glue_application import GlueApplication
    import numpy as np

    d = Data(label=name)
    d.add_component(ytComponent(data, ds, name), label='x')

    dc = DataCollection(d)

    ga = GlueApplication(dc)
    ga.start()
Esempio n. 25
0
    def setup_method(self, method):
        x = np.arange(80).reshape(8, 10)
        d = Data(x=x, label='data')
        s = d.new_subset()
        s.subset_state = d.id['x'] > 30
        print(s.to_mask())

        self.subset = s
        self.x = x
        self.im = SubsetImage(s, np.s_[:, :])

        m = (s.to_mask() * 127).astype(np.uint8)
        self.base = BaseImage.BaseImage(data_np=m)
Esempio n. 26
0
def acs_cutout_image_reader(file_name):
    """
    Data loader for the ACS cut-outs for the DEIMOS spectra.

    The cutouts contain only the image.
    """

    hdulist = fits.open(file_name)
    data = Data(label='ACS Cutout Image')
    data.coords = coordinates_from_header(hdulist[0].header)
    data.header = hdulist[0].header
    data.add_component(hdulist[0].data, 'Flux')

    return data
Esempio n. 27
0
def nirspec_spectrum1d_reader(file_name):
    """
    Data loader for MOSViz 1D spectrum.

    This function extracts the DATA, QUALITY, and VAR
    extensions and returns them as a glue Data object.

    It then uses the header keywords of the DATA extension
    to detemine the wavelengths.
    """

    hdulist = fits.open(file_name)

    # make wavelength a seperate component in addition to coordinate
    # so you can plot it on the x axis
    wavelength = np.linspace(hdulist['DATA'].header['CRVAL1'],
        hdulist['DATA'].header['CRVAL1']*hdulist['DATA'].header['CDELT1'],
        hdulist['DATA'].header['NAXIS1'])[::-1]

    data = Data(label='1D Spectrum')
    data.header = hdulist['DATA'].header
    data.add_component(wavelength, 'Wavelength')
    data.add_component(hdulist['DATA'].data, 'Flux')
    data.add_component(hdulist['VAR'].data, 'Uncertainty')

    return data
Esempio n. 28
0
def test_2d_world_link():
    """Should be able to grab pixel coords after linking world"""

    x, y = r(10), r(10)
    cat = Data(label='cat', x=x, y=y)
    im = Data(label='im', inten=r((3, 3)))

    dc = DataCollection([cat, im])

    dc.add_link(LinkSame(im.get_world_component_id(0), cat.id['x']))
    dc.add_link(LinkSame(im.get_world_component_id(1), cat.id['y']))

    np.testing.assert_array_equal(cat[im.get_pixel_component_id(0)], x)
    np.testing.assert_array_equal(cat[im.get_pixel_component_id(1)], y)
Esempio n. 29
0
    def reverse_add_data(self, data_item):
        """
        Adds data from specviz to glue.

        Parameters
        ----------
        data_item : :class:`specviz.core.items.DataItem`
            The data item recently added to model.
        """
        new_data = Data(label=data_item.name)
        new_data.coords = coordinates_from_header(data_item.spectrum.wcs)

        flux_component = Component(data_item.spectrum.flux,
                                   data_item.spectrum.flux.unit)
        new_data.add_component(flux_component, "Flux")

        disp_component = Component(data_item.spectrum.spectral_axis,
                                   data_item.spectrum.spectral_axis.unit)
        new_data.add_component(disp_component, "Dispersion")

        if data_item.spectrum.uncertainty is not None:
            uncert_component = Component(data_item.spectrum.uncertainty.array,
                                         data_item.spectrum.uncertainty.unit)
            new_data.add_component(uncert_component, "Uncertainty")

        self._session.data_collection.append(new_data)
def make_test_data():

    data = Data(label="Test Cube Data")

    np.random.seed(12345)

    for letter in 'abc':
        comp = Component(np.random.random((10, 10, 10)))
        data.add_component(comp, letter)

    # make sure one component key is primary
    data.add_component(Component(np.random.random((10, 10, 10))), 'PRIMARY')

    return data
Esempio n. 31
0
class TestHistogramViewer(object):
    def setup_method(self, method):

        self.data = Data(label='d1',
                         x=[3.4, 2.3, -1.1, 0.3],
                         y=['a', 'b', 'c', 'a'])

        self.app = GlueApplication()
        self.session = self.app.session
        self.hub = self.session.hub

        self.data_collection = self.session.data_collection
        self.data_collection.append(self.data)

        self.viewer = self.app.new_data_viewer(HistogramViewer)

    def teardown_method(self, method):
        self.viewer.close()
        self.viewer = None
        self.app.close()
        self.app = None

    def test_basic(self):

        viewer_state = self.viewer.state

        # Check defaults when we add data
        self.viewer.add_data(self.data)

        assert combo_as_string(
            self.viewer.options_widget().ui.combosel_x_att
        ) == 'Main components:x:y:Coordinate components:Pixel Axis 0 [x]'

        assert viewer_state.x_att is self.data.id['x']
        assert viewer_state.x_min == -1.1
        assert viewer_state.x_max == 3.4
        assert viewer_state.y_min == 0.0
        assert viewer_state.y_max == 1.2

        assert viewer_state.hist_x_min == -1.1
        assert viewer_state.hist_x_max == 3.4
        assert viewer_state.hist_n_bin == 15

        assert not viewer_state.cumulative
        assert not viewer_state.normalize

        assert not viewer_state.x_log
        assert not viewer_state.y_log

        assert len(viewer_state.layers) == 1

        # Change to categorical component and check new values

        viewer_state.x_att = self.data.id['y']

        assert viewer_state.x_min == -0.5
        assert viewer_state.x_max == 2.5
        assert viewer_state.y_min == 0.0
        assert viewer_state.y_max == 2.4

        assert viewer_state.hist_x_min == -0.5
        assert viewer_state.hist_x_max == 2.5
        assert viewer_state.hist_n_bin == 3

        assert not viewer_state.cumulative
        assert not viewer_state.normalize

        assert not viewer_state.x_log
        assert not viewer_state.y_log

    def test_log_labels(self):

        # Regression test to make sure the labels are correctly changed to log
        # when the x-axis is in log space.

        viewer_state = self.viewer.state
        data = Data(x=np.logspace(-5, 5, 10000))
        self.data_collection.append(data)

        self.viewer.add_data(data)
        viewer_state.x_log = True

        process_events()

        labels = [
            x.get_text() for x in self.viewer.axes.xaxis.get_ticklabels()
        ]

        # Different Matplotlib versions return slightly different
        # labels, but the ones below should be present regardless
        # of Matplotlib version.
        expected_present = [
            '$\\mathdefault{10^{-5}}$', '$\\mathdefault{10^{-3}}$',
            '$\\mathdefault{10^{-1}}$', '$\\mathdefault{10^{1}}$',
            '$\\mathdefault{10^{3}}$', '$\\mathdefault{10^{5}}$'
        ]

        for label in expected_present:
            assert label in labels

    def test_flip(self):

        viewer_state = self.viewer.state

        self.viewer.add_data(self.data)

        assert viewer_state.x_min == -1.1
        assert viewer_state.x_max == 3.4

        self.viewer.options_widget().button_flip_x.click()

        assert viewer_state.x_min == 3.4
        assert viewer_state.x_max == -1.1

    def test_remove_data(self):
        self.viewer.add_data(self.data)
        assert combo_as_string(
            self.viewer.options_widget().ui.combosel_x_att
        ) == 'Main components:x:y:Coordinate components:Pixel Axis 0 [x]'
        self.data_collection.remove(self.data)
        assert combo_as_string(
            self.viewer.options_widget().ui.combosel_x_att) == ''

    def test_update_component_updates_title(self):
        self.viewer.add_data(self.data)
        assert self.viewer.windowTitle() == '1D Histogram'
        self.viewer.state.x_att = self.data.id['y']
        assert self.viewer.windowTitle() == '1D Histogram'

    def test_combo_updates_with_component_add(self):
        self.viewer.add_data(self.data)
        self.data.add_component([3, 4, 1, 2], 'z')
        assert self.viewer.state.x_att is self.data.id['x']
        assert combo_as_string(
            self.viewer.options_widget().ui.combosel_x_att
        ) == 'Main components:x:y:z:Coordinate components:Pixel Axis 0 [x]'

    def test_nonnumeric_first_component(self):
        # regression test for #208. Shouldn't complain if
        # first component is non-numerical
        data = core.Data()
        data.add_component(['a', 'b', 'c'], label='c1')
        data.add_component([1, 2, 3], label='c2')
        self.data_collection.append(data)
        self.viewer.add_data(data)

    def test_nan_component(self):
        # regression test for case when all values are NaN in a component
        data = core.Data()
        data.add_component([np.nan, np.nan, np.nan], label='c1')
        self.data_collection.append(data)
        self.viewer.add_data(data)

    def test_histogram_values(self):

        # Check the actual values of the histograms

        viewer_state = self.viewer.state

        self.viewer.add_data(self.data)

        # Numerical attribute

        viewer_state.hist_x_min = -5
        viewer_state.hist_x_max = 5
        viewer_state.hist_n_bin = 4

        assert_allclose(self.viewer.state.y_max, 2.4)

        assert_allclose(self.viewer.layers[0].state.histogram[1], [0, 1, 2, 1])
        assert_allclose(self.viewer.layers[0].state.histogram[0],
                        [-5, -2.5, 0, 2.5, 5])

        cid = self.data.main_components[0]
        self.data_collection.new_subset_group('subset 1', cid < 2)

        assert_allclose(self.viewer.layers[1].state.histogram[1], [0, 1, 1, 0])
        assert_allclose(self.viewer.layers[1].state.histogram[0],
                        [-5, -2.5, 0, 2.5, 5])

        viewer_state.normalize = True

        assert_allclose(self.viewer.state.y_max, 0.24)
        assert_allclose(self.viewer.layers[0].state.histogram[1],
                        [0, 0.1, 0.2, 0.1])
        assert_allclose(self.viewer.layers[0].state.histogram[0],
                        [-5, -2.5, 0, 2.5, 5])
        assert_allclose(self.viewer.layers[1].state.histogram[1],
                        [0, 0.2, 0.2, 0])
        assert_allclose(self.viewer.layers[1].state.histogram[0],
                        [-5, -2.5, 0, 2.5, 5])

        viewer_state.cumulative = True

        assert_allclose(self.viewer.state.y_max, 1.2)
        assert_allclose(self.viewer.layers[0].state.histogram[1],
                        [0, 0.25, 0.75, 1.0])
        assert_allclose(self.viewer.layers[0].state.histogram[0],
                        [-5, -2.5, 0, 2.5, 5])
        assert_allclose(self.viewer.layers[1].state.histogram[1],
                        [0, 0.5, 1.0, 1.0])
        assert_allclose(self.viewer.layers[1].state.histogram[0],
                        [-5, -2.5, 0, 2.5, 5])

        viewer_state.normalize = False

        assert_allclose(self.viewer.state.y_max, 4.8)
        assert_allclose(self.viewer.layers[0].state.histogram[1], [0, 1, 3, 4])
        assert_allclose(self.viewer.layers[0].state.histogram[0],
                        [-5, -2.5, 0, 2.5, 5])
        assert_allclose(self.viewer.layers[1].state.histogram[1], [0, 1, 2, 2])
        assert_allclose(self.viewer.layers[1].state.histogram[0],
                        [-5, -2.5, 0, 2.5, 5])

        viewer_state.cumulative = False

        # Categorical attribute

        viewer_state.x_att = self.data.id['y']

        formatter = self.viewer.axes.xaxis.get_major_formatter()
        xlabels = [formatter.format_data(pos) for pos in range(3)]
        assert xlabels == ['a', 'b', 'c']

        assert_allclose(self.viewer.state.y_max, 2.4)
        assert_allclose(self.viewer.layers[0].state.histogram[1], [2, 1, 1])
        assert_allclose(self.viewer.layers[0].state.histogram[0],
                        [-0.5, 0.5, 1.5, 2.5])
        assert_allclose(self.viewer.layers[1].state.histogram[1], [1, 0, 1])
        assert_allclose(self.viewer.layers[1].state.histogram[0],
                        [-0.5, 0.5, 1.5, 2.5])

        viewer_state.normalize = True

        assert_allclose(self.viewer.state.y_max, 0.6)
        assert_allclose(self.viewer.layers[0].state.histogram[1],
                        [0.5, 0.25, 0.25])
        assert_allclose(self.viewer.layers[0].state.histogram[0],
                        [-0.5, 0.5, 1.5, 2.5])
        assert_allclose(self.viewer.layers[1].state.histogram[1],
                        [0.5, 0, 0.5])
        assert_allclose(self.viewer.layers[1].state.histogram[0],
                        [-0.5, 0.5, 1.5, 2.5])

        viewer_state.cumulative = True

        assert_allclose(self.viewer.state.y_max, 1.2)
        assert_allclose(self.viewer.layers[0].state.histogram[1],
                        [0.5, 0.75, 1])
        assert_allclose(self.viewer.layers[0].state.histogram[0],
                        [-0.5, 0.5, 1.5, 2.5])
        assert_allclose(self.viewer.layers[1].state.histogram[1],
                        [0.5, 0.5, 1])
        assert_allclose(self.viewer.layers[1].state.histogram[0],
                        [-0.5, 0.5, 1.5, 2.5])

        viewer_state.normalize = False

        assert_allclose(self.viewer.state.y_max, 4.8)
        assert_allclose(self.viewer.layers[0].state.histogram[1], [2, 3, 4])
        assert_allclose(self.viewer.layers[0].state.histogram[0],
                        [-0.5, 0.5, 1.5, 2.5])
        assert_allclose(self.viewer.layers[1].state.histogram[1], [1, 1, 2])
        assert_allclose(self.viewer.layers[1].state.histogram[0],
                        [-0.5, 0.5, 1.5, 2.5])

        # TODO: add tests for log

    def test_apply_roi(self):

        # Check that when doing an ROI selection, the ROI clips to the bin edges
        # outside the selection

        viewer_state = self.viewer.state

        self.viewer.add_data(self.data)

        viewer_state.hist_x_min = -5
        viewer_state.hist_x_max = 5
        viewer_state.hist_n_bin = 4

        roi = XRangeROI(-0.2, 0.1)

        assert len(self.viewer.layers) == 1

        self.viewer.apply_roi(roi)

        assert len(self.viewer.layers) == 2

        assert_allclose(self.viewer.layers[0].state.histogram[1], [0, 1, 2, 1])
        assert_allclose(self.viewer.layers[1].state.histogram[1], [0, 1, 2, 0])

        assert_allclose(self.data.subsets[0].to_mask(), [0, 1, 1, 1])

        state = self.data.subsets[0].subset_state
        assert isinstance(state, RangeSubsetState)

        assert state.lo == -2.5
        assert state.hi == 2.5

        # TODO: add a similar test in log space

    def test_apply_roi_categorical(self):

        # Check that when doing an ROI selection, the ROI clips to the bin edges
        # outside the selection

        viewer_state = self.viewer.state

        self.viewer.add_data(self.data)

        viewer_state.x_att = self.data.id['y']

        roi = XRangeROI(0.3, 0.9)

        assert len(self.viewer.layers) == 1

        self.viewer.apply_roi(roi)

        assert len(self.viewer.layers) == 2

        assert_allclose(self.viewer.layers[0].state.histogram[1], [2, 1, 1])
        assert_allclose(self.viewer.layers[1].state.histogram[1], [2, 1, 0])

        assert_allclose(self.data.subsets[0].to_mask(), [1, 1, 0, 1])

        state = self.data.subsets[0].subset_state
        assert isinstance(state, CategoricalROISubsetState)

        assert_equal(state.roi.categories, ['a', 'b'])

    def test_apply_roi_empty(self):
        # Make sure that doing an ROI selection on an empty viewer doesn't
        # produce error messsages
        roi = XRangeROI(-0.2, 0.1)
        self.viewer.apply_roi(roi)

    def test_axes_labels(self):

        viewer_state = self.viewer.state

        self.viewer.add_data(self.data)

        assert self.viewer.axes.get_xlabel() == 'x'
        assert self.viewer.axes.get_ylabel() == 'Number'

        viewer_state.x_log = True

        assert self.viewer.axes.get_xlabel() == 'Log x'
        assert self.viewer.axes.get_ylabel() == 'Number'

        viewer_state.x_att = self.data.id['y']

        assert self.viewer.axes.get_xlabel() == 'y'
        assert self.viewer.axes.get_ylabel() == 'Number'

        viewer_state.normalize = True

        assert self.viewer.axes.get_xlabel() == 'y'
        assert self.viewer.axes.get_ylabel() == 'Normalized number'

        viewer_state.normalize = False
        viewer_state.cumulative = True

        assert self.viewer.axes.get_xlabel() == 'y'
        assert self.viewer.axes.get_ylabel() == 'Number'

    def test_y_min_y_max(self):

        # Regression test for a bug that caused y_max to not be set correctly
        # when multiple subsets were present and after turning on normalization
        # after switching to a different attribute from that used to make the
        # selection.

        viewer_state = self.viewer.state
        self.viewer.add_data(self.data)

        self.data.add_component([3.4, 3.5, 10.2, 20.3], 'z')

        viewer_state.x_att = self.data.id['x']

        cid = self.data.main_components[0]
        self.data_collection.new_subset_group('subset 1', cid < 1)

        cid = self.data.main_components[0]
        self.data_collection.new_subset_group('subset 2', cid < 2)

        cid = self.data.main_components[0]
        self.data_collection.new_subset_group('subset 3', cid < 3)

        assert_allclose(self.viewer.state.y_min, 0)
        assert_allclose(self.viewer.state.y_max, 1.2)

        viewer_state.x_att = self.data.id['z']

        assert_allclose(self.viewer.state.y_min, 0)
        assert_allclose(self.viewer.state.y_max, 2.4)

        viewer_state.normalize = True

        assert_allclose(self.viewer.state.y_min, 0)
        assert_allclose(self.viewer.state.y_max, 0.5325443786982249)

    def test_update_when_limits_unchanged(self):

        # Regression test for glue-viz/glue#1010 - this bug caused histograms
        # to not be recomputed if the attribute changed but the limits and
        # number of bins did not.

        viewer_state = self.viewer.state

        self.viewer.add_data(self.data)

        viewer_state.x_att = self.data.id['y']
        viewer_state.hist_x_min = -10.1
        viewer_state.hist_x_max = +10
        viewer_state.hist_n_bin = 5

        assert_allclose(self.viewer.layers[0].state.histogram[1],
                        [0, 0, 3, 1, 0])

        viewer_state.x_att = self.data.id['x']
        viewer_state.hist_x_min = -10.1
        viewer_state.hist_x_max = +10
        viewer_state.hist_n_bin = 5

        assert_allclose(self.viewer.layers[0].state.histogram[1],
                        [0, 0, 2, 2, 0])

        viewer_state.x_att = self.data.id['y']

        assert_allclose(self.viewer.layers[0].state.histogram[1],
                        [0, 0, 3, 1, 0])

        viewer_state.x_att = self.data.id['x']

        assert_allclose(self.viewer.layers[0].state.histogram[1],
                        [0, 0, 2, 2, 0])

    def test_component_replaced(self):

        # regression test for 508 - if a component ID is replaced, we should
        # make sure that the component ID is selected if the old component ID
        # was selected

        self.viewer.add_data(self.data)
        self.viewer.state.x_att = self.data.id['x']
        test = ComponentID('test')
        self.data.update_id(self.viewer.state.x_att, test)
        assert self.viewer.state.x_att is test
        assert combo_as_string(
            self.viewer.options_widget().ui.combosel_x_att
        ) == 'Main components:test:y:Coordinate components:Pixel Axis 0 [x]'

    def test_nbin_override_persists_over_numerical_attribute_change(self):

        # regression test for #398

        self.data.add_component([3, 4, 1, 2], 'z')

        self.viewer.add_data(self.data)
        self.viewer.state.x_att = self.data.id['x']
        self.viewer.state.hist_n_bin = 7
        self.viewer.state.x_att = self.data.id['z']
        assert self.viewer.state.hist_n_bin == 7

    @pytest.mark.parametrize('protocol', [0, 1])
    def test_session_back_compat(self, protocol):

        filename = os.path.join(DATA, 'histogram_v{0}.glu'.format(protocol))

        with open(filename, 'r') as f:
            session = f.read()

        state = GlueUnSerializer.loads(session)

        ga = state.object('__main__')

        dc = ga.session.data_collection

        assert len(dc) == 1

        assert dc[0].label == 'data'

        viewer1 = ga.viewers[0][0]
        assert len(viewer1.state.layers) == 2
        assert viewer1.state.x_att is dc[0].id['a']
        assert_allclose(viewer1.state.x_min, 0)
        assert_allclose(viewer1.state.x_max, 9)
        assert_allclose(viewer1.state.y_min, 0)
        assert_allclose(viewer1.state.y_max, 2.4)
        assert_allclose(viewer1.state.hist_x_min, 0)
        assert_allclose(viewer1.state.hist_x_max, 9)
        assert_allclose(viewer1.state.hist_n_bin, 6)
        assert not viewer1.state.x_log
        assert not viewer1.state.y_log
        assert viewer1.state.layers[0].visible
        assert not viewer1.state.layers[1].visible
        assert not viewer1.state.cumulative
        assert not viewer1.state.normalize

        viewer2 = ga.viewers[0][1]
        assert viewer2.state.x_att is dc[0].id['b']
        assert_allclose(viewer2.state.x_min, 2)
        assert_allclose(viewer2.state.x_max, 16)
        assert_allclose(viewer2.state.y_min, 0)
        assert_allclose(viewer2.state.y_max, 1.2)
        assert_allclose(viewer2.state.hist_x_min, 2)
        assert_allclose(viewer2.state.hist_x_max, 16)
        assert_allclose(viewer2.state.hist_n_bin, 8)
        assert not viewer2.state.x_log
        assert not viewer2.state.y_log
        assert viewer2.state.layers[0].visible
        assert viewer2.state.layers[1].visible
        assert not viewer2.state.cumulative
        assert not viewer2.state.normalize

        viewer3 = ga.viewers[0][2]
        assert viewer3.state.x_att is dc[0].id['a']
        assert_allclose(viewer3.state.x_min, 0)
        assert_allclose(viewer3.state.x_max, 9)
        assert_allclose(viewer3.state.y_min, 0.01111111111111111)
        assert_allclose(viewer3.state.y_max, 0.7407407407407407)
        assert_allclose(viewer3.state.hist_x_min, 0)
        assert_allclose(viewer3.state.hist_x_max, 9)
        assert_allclose(viewer3.state.hist_n_bin, 10)
        assert not viewer3.state.x_log
        assert viewer3.state.y_log
        assert viewer3.state.layers[0].visible
        assert viewer3.state.layers[1].visible
        assert not viewer3.state.cumulative
        assert viewer3.state.normalize

        viewer4 = ga.viewers[0][3]
        assert viewer4.state.x_att is dc[0].id['a']
        assert_allclose(viewer4.state.x_min, -1)
        assert_allclose(viewer4.state.x_max, 10)
        assert_allclose(viewer4.state.y_min, 0)
        assert_allclose(viewer4.state.y_max, 12)
        assert_allclose(viewer4.state.hist_x_min, -1)
        assert_allclose(viewer4.state.hist_x_max, 10)
        assert_allclose(viewer4.state.hist_n_bin, 4)
        assert not viewer4.state.x_log
        assert not viewer4.state.y_log
        assert viewer4.state.layers[0].visible
        assert viewer4.state.layers[1].visible
        assert viewer4.state.cumulative
        assert not viewer4.state.normalize

        ga.close()

    def test_apply_roi_single(self):

        # Regression test for a bug that caused mode.update to be called
        # multiple times and resulted in all other viewers receiving many
        # messages regarding subset updates (this occurred when multiple)
        # datasets were present.

        layer_tree = LayerTreeWidget(session=self.session)
        layer_tree.set_checkable(False)
        layer_tree.setup(self.data_collection)
        layer_tree.bind_selection_to_edit_subset()

        class Client(HubListener):
            def __init__(self, *args, **kwargs):
                super(Client, self).__init__(*args, **kwargs)
                self.count = Counter()

            def ping(self, message):
                self.count[message.sender] += 1

            def register_to_hub(self, hub):
                hub.subscribe(self, SubsetUpdateMessage, handler=self.ping)

        d1 = Data(a=[1, 2, 3], label='d1')
        d2 = Data(b=[1, 2, 3], label='d2')
        d3 = Data(c=[1, 2, 3], label='d3')
        d4 = Data(d=[1, 2, 3], label='d4')

        self.data_collection.append(d1)
        self.data_collection.append(d2)
        self.data_collection.append(d3)
        self.data_collection.append(d4)

        client = Client()
        client.register_to_hub(self.hub)

        self.viewer.add_data(d1)
        self.viewer.add_data(d3)

        roi = XRangeROI(2.5, 3.5)
        self.viewer.apply_roi(roi)

        for subset in client.count:
            assert client.count[subset] == 1

    @pytest.mark.filterwarnings('ignore:elementwise')
    def test_datetime64_support(self, tmpdir):

        self.data.add_component(np.array([100, 200, 300, 400], dtype='M8[D]'),
                                't1')
        self.viewer.add_data(self.data)
        self.viewer.state.x_att = self.data.id['t1']

        # Matplotlib deals with dates by converting them to the number of days
        # since 01-01-0001, so we can check that the limits are correctly
        # converted (and not 100 to 400)
        assert self.viewer.axes.get_xlim() == (719263.0, 719563.0)

        # Apply an ROI selection in plotting coordinates
        roi = XRangeROI(719313, 719513)
        self.viewer.apply_roi(roi)

        # Check that the two middle elements are selected
        assert_equal(self.data.subsets[0].to_mask(), [0, 1, 1, 0])

        # Make sure that the Qt labels look ok
        options = self.viewer.options_widget().ui
        assert options.valuetext_x_min.text() == '1970-04-11'
        assert options.valuetext_x_max.text() == '1971-02-05'

        # Make sure that we can set the xmin/xmax to a string date
        assert_equal(self.viewer.state.x_min, np.datetime64('1970-04-11', 'D'))
        options.valuetext_x_min.setText('1970-04-14')
        options.valuetext_x_min.editingFinished.emit()
        assert self.viewer.axes.get_xlim() == (719266.0, 719563.0)
        assert_equal(self.viewer.state.x_min, np.datetime64('1970-04-14', 'D'))

        # Make sure that everything works fine after saving/reloading
        filename = tmpdir.join('test_datetime64.glu').strpath
        self.session.application.save_session(filename)
        with open(filename, 'r') as f:
            session = f.read()
        state = GlueUnSerializer.loads(session)
        ga = state.object('__main__')
        viewer = ga.viewers[0][0]
        options = viewer.options_widget().ui

        assert_equal(self.viewer.state.x_min, np.datetime64('1970-04-14', 'D'))

        assert options.valuetext_x_min.text() == '1970-04-14'
        assert options.valuetext_x_max.text() == '1971-02-05'

        ga.close()

    @requires_matplotlib_ge_22
    def test_categorical_labels(self, tmpdir):

        # Fix a bug that caused labels on histograms of categorical variables
        # to not be restored correctly after saving and reloading session

        self.viewer.add_data(self.data)
        self.viewer.state.x_att = self.data.id['y']

        self.viewer.figure.canvas.draw()

        assert [x.get_text() for x in self.viewer.axes.xaxis.get_ticklabels()
                ] == ['', 'a', 'b', 'c', '']

        # Make sure that everything works fine after saving/reloading
        filename = tmpdir.join('test_categorical_labels.glu').strpath
        self.session.application.save_session(filename)
        with open(filename, 'r') as f:
            session = f.read()
        state = GlueUnSerializer.loads(session)
        ga = state.object('__main__')
        viewer = ga.viewers[0][0]

        viewer.figure.canvas.draw()

        assert [x.get_text() for x in viewer.axes.xaxis.get_ticklabels()
                ] == ['', 'a', 'b', 'c', '']

        ga.close()
Esempio n. 32
0
 def setup_method(self, method):
     self.x = np.random.random((2, 3, 4))
     self.d = Data(x=self.x)
Esempio n. 33
0
def test_component_id_combo_helper():

    state = ExampleState()

    dc = DataCollection([])

    helper = ComponentIDComboHelper(state, 'combo', dc)

    assert selection_choices(state, 'combo') == ""

    data1 = Data(x=[1, 2, 3], y=[2, 3, 4], label='data1')

    dc.append(data1)
    helper.append_data(data1)

    assert selection_choices(state, 'combo') == "x:y"

    data2 = Data(a=[1, 2, 3],
                 b=['a', 'b', 'c'],
                 label='data2',
                 coords=IdentityCoordinates(ndim=1))

    dc.append(data2)
    helper.append_data(data2)

    assert selection_choices(state, 'combo') == "data1:x:y:data2:a:b"

    helper.categorical = False

    assert selection_choices(state, 'combo') == "data1:x:y:data2:a"

    helper.numeric = False

    assert selection_choices(state, 'combo') == "data1:data2"

    helper.categorical = True
    helper.numeric = True

    helper.pixel_coord = True
    assert selection_choices(
        state, 'combo'
    ) == "data1:main:x:y:coord:Pixel Axis 0 [x]:data2:main:a:b:coord:Pixel Axis 0 [x]"

    helper.world_coord = True
    assert selection_choices(
        state, 'combo'
    ) == "data1:main:x:y:coord:Pixel Axis 0 [x]:data2:main:a:b:coord:Pixel Axis 0 [x]:World 0"

    helper.pixel_coord = False
    assert selection_choices(
        state, 'combo') == "data1:main:x:y:data2:main:a:b:coord:World 0"

    helper.world_coord = False

    dc.remove(data2)

    assert selection_choices(state, 'combo') == "x:y"

    data1['z'] = data1.id['x'] + 1

    assert selection_choices(state, 'combo') == "main:x:y:derived:z"

    helper.derived = False

    assert selection_choices(state, 'combo') == "x:y"

    data1.id['x'].label = 'z'
    assert selection_choices(state, 'combo') == "z:y"

    helper.remove_data(data1)

    assert selection_choices(state, 'combo') == ""
Esempio n. 34
0
class TestImporter():
    def setup_method(self, method):
        self.importer = MySubsetMaskImporter()
        self.importer.filename = 'test-filename'
        self.importer.reader = MagicMock()
        self.data = Data(x=[1, 2, 3])
        self.data_collection = DataCollection([self.data])

    def test_single_valid(self):
        self.importer.reader.return_value = OrderedDict([('subset 1',
                                                          np.array([0, 1,
                                                                    0]))])
        self.importer.run(self.data, self.data_collection)
        assert len(self.data_collection.subset_groups) == 1
        assert_equal(self.data.subsets[0].to_mask(), [0, 1, 0])

    def test_multiple_valid(self):
        self.importer.reader.return_value = OrderedDict([
            ('subset 1', np.array([0, 1, 0])), ('subset 2', np.array([1, 1,
                                                                      0]))
        ])
        self.importer.run(self.data, self.data_collection)
        assert len(self.data_collection.subset_groups) == 2
        assert_equal(self.data.subsets[0].to_mask(), [0, 1, 0])
        assert_equal(self.data.subsets[1].to_mask(), [1, 1, 0])

    def test_missing_masks(self):
        self.importer.reader.return_value = OrderedDict()
        with pytest.raises(ValueError) as exc:
            self.importer.run(self.data, self.data_collection)
        assert exc.value.args[0] == "No subset masks were returned"

    def test_single_invalid_shape(self):
        self.importer.reader.return_value = OrderedDict([
            ('subset 1', np.array([0, 1, 0, 1]))
        ])
        with pytest.raises(ValueError) as exc:
            self.importer.run(self.data, self.data_collection)
        assert exc.value.args[0].replace(
            'L', '') == "Mask shape (4,) does not match data shape (3,)"

    def test_multiple_inconsistent_shapes(self):
        self.importer.reader.return_value = OrderedDict([
            ('subset 1', np.array([0, 1, 0])),
            ('subset 2', np.array([0, 1, 0, 1]))
        ])
        with pytest.raises(ValueError) as exc:
            self.importer.run(self.data, self.data_collection)
        assert exc.value.args[0] == "Not all subsets have the same shape"

    def test_subset_single(self):
        self.importer.reader.return_value = OrderedDict([('subset 1',
                                                          np.array([0, 1,
                                                                    0]))])
        subset = self.data.new_subset()
        assert_equal(self.data.subsets[0].to_mask(), [0, 0, 0])
        self.importer.run(subset, self.data_collection)
        assert_equal(self.data.subsets[0].to_mask(), [0, 1, 0])

    def test_subset_multiple(self):
        self.importer.reader.return_value = OrderedDict([
            ('subset 1', np.array([0, 1, 0])), ('subset 2', np.array([1, 1,
                                                                      0]))
        ])
        subset = self.data.new_subset()
        with pytest.raises(ValueError) as exc:
            self.importer.run(subset, self.data_collection)
        assert exc.value.args[
            0] == 'Can only read in a single subset when importing into a subset'
Esempio n. 35
0
 def setup_method(self, method):
     self.importer = MySubsetMaskImporter()
     self.importer.filename = 'test-filename'
     self.importer.reader = MagicMock()
     self.data = Data(x=[1, 2, 3])
     self.data_collection = DataCollection([self.data])
Esempio n. 36
0
class TestArithmeticEditorWidget:
    def setup_method(self):

        self.data1 = Data(x=[1, 2, 3], y=[3.5, 4.5, -1.0], z=['a', 'r', 'w'])
        self.data2 = Data(a=[3, 4, 1], b=[1.5, -2.0, 3.5], c=['y', 'e', 'r'])

        # Add a derived component so that we can test how we deal with existing ones
        components = dict((cid.label, cid) for cid in self.data2.components)
        pc = ParsedCommand('{a}', components)
        link = ParsedComponentLink(ComponentID('d'), pc)
        self.data2.add_component_link(link)

        self.data_collection = DataCollection([self.data1, self.data2])

        link = ComponentLink([self.data1.id['x']], self.data2.id['a'])
        self.data_collection.add_link(link)

        self.listener1 = ChangeListener(self.data1)
        self.listener2 = ChangeListener(self.data2)

    def test_nochanges(self):
        editor = ArithmeticEditorWidget(self.data_collection)
        editor.show()
        editor.button_ok.click()
        self.listener1.assert_exact_changes()
        self.listener2.assert_exact_changes()
        editor.close()

    def test_add_derived_and_rename(self):
        editor = ArithmeticEditorWidget(self.data_collection)
        editor.show()
        with patch.object(EquationEditorDialog, 'exec_',
                          auto_accept('{x} + {y}')):
            editor.button_add_derived.click()
        item = list(editor.list)[0]
        item.setText(0, 'new')
        editor.button_ok.click()
        self.listener1.assert_exact_changes(added=[self.data1.id['new']])
        self.listener2.assert_exact_changes()
        assert_equal(self.data1['new'], [4.5, 6.5, 2.0])
        editor.close()

    def test_add_derived_and_cancel(self):
        editor = ArithmeticEditorWidget(self.data_collection)
        editor.show()
        with patch.object(EquationEditorDialog, 'exec_', auto_reject()):
            editor.button_add_derived.click()
        assert len(editor.list) == 0
        editor.close()

    def test_edit_existing_equation(self):
        assert_equal(self.data2['d'], [3, 4, 1])
        editor = ArithmeticEditorWidget(self.data_collection)
        editor.show()
        assert len(editor.list) == 0
        editor.combosel_data.setCurrentIndex(1)
        assert len(editor.list) == 1
        editor.list.select_cid(self.data2.id['d'])
        with patch.object(EquationEditorDialog, 'exec_',
                          auto_accept('{a} + {b}')):
            editor.button_edit_derived.click()
        editor.button_ok.click()
        self.listener1.assert_exact_changes()
        self.listener2.assert_exact_changes(numerical=True)
        assert_equal(self.data2['d'], [4.5, 2.0, 4.5])
        editor.close()
Esempio n. 37
0
 def init_data(self):
     return Data(label='d1',
                 x=[3.4, 2.3, -1.1, 0.3],
                 y=['a', 'b', 'c', 'a'])
Esempio n. 38
0
 def setup_method(self, method):
     self.cube = Data(label='cube', x=np.arange(1000).reshape((5, 10, 20)))
     self.application = GlueApplication()
     self.application.data_collection.append(self.cube)
     self.viewer = self.application.new_data_viewer(ImageViewer)
     self.viewer.add_data(self.cube)