Ejemplo n.º 1
0
 def __init__(self, *args, **kwards):
     Signal1D.__init__(self, *args, **kwards)
     if self.metadata.Signal.signal_type == 'EDS':
         warnings.warn('The microscope type is not set. Use '
                       'set_signal_type(\'EDS_TEM\')  '
                       'or set_signal_type(\'EDS_SEM\')')
     self.metadata.Signal.binned = True
     self._xray_markers = {}
Ejemplo n.º 2
0
 def __init__(self, *args, **kwards):
     Signal1D.__init__(self, *args, **kwards)
     if self.metadata.Signal.signal_type == 'EDS':
         warnings.warn('The microscope type is not set. Use '
                       'set_signal_type(\'EDS_TEM\')  '
                       'or set_signal_type(\'EDS_SEM\')')
     self.metadata.Signal.binned = True
     self._xray_markers = {}
Ejemplo n.º 3
0
 def setup_method(self, method):
     s = Signal1D(np.array([1.0, 2, 4, 7, 12, 7, 4, 2, 1]))
     m = s.create_model()
     self.model = m
     self.A = 38.022476979172588
     self.sigma = 1.4764966133859543
     self.centre = 4.0000000002462945
Ejemplo n.º 4
0
def test_error_saving(tmp_path, file):
    filename = tmp_path / file
    s = Signal1D(np.arange(10))

    with pytest.raises(ValueError):
        s.save(filename, write_dataset='unsupported_type')
        assert not s.metadata.Signal.has_item('record_by')
Ejemplo n.º 5
0
 def setup_method(self, method):
     np.random.seed(1)
     s = Signal1D(np.random.normal(scale=2, size=10000)).get_histogram()
     self.g = Gaussian()
     m = s.create_model()
     m.append(self.g)
     self.m = m
Ejemplo n.º 6
0
    def setup_method(self, method):
        np.random.seed(1)
        c1, c2 = 10, 12
        A1, A2 = -50, 20
        G1 = Gaussian(centre=c1, A=A1, sigma=1)
        G2 = Gaussian(centre=c2, A=A2, sigma=1)

        x = np.linspace(0, 20, 1000)
        y = G1.function(x) + G2.function(x) + 5
        error = np.random.normal(size=y.shape)
        y = y + error

        s = Signal1D(y)
        s.axes_manager[-1].scale = x[1] - x[0]

        self.m = s.create_model()
        g1 = Gaussian(centre=c1, A=1, sigma=1)
        g2 = Gaussian(centre=c2, A=1, sigma=1)
        offset = Offset()
        self.m.extend([g1, g2, offset])

        g1.centre.free = False
        g1.sigma.free = False
        g2.centre.free = False
        g2.sigma.free = False

        self.g1, self.g2 = g1, g2
Ejemplo n.º 7
0
 def setUp(self):
     gaussian = Gaussian()
     gaussian.A.value = 20
     gaussian.sigma.value = 10
     gaussian.centre.value = 50
     self.signal = Signal1D(gaussian.function(np.arange(0, 100, 0.01)))
     self.signal.axes_manager[0].scale = 0.01
Ejemplo n.º 8
0
 def setUp(self):
     s = Signal1D(range(100))
     m = s.create_model()
     m.append(Gaussian())
     m.components.Gaussian.A.value = 13
     m.components.Gaussian.name = 'something'
     self.m = m
Ejemplo n.º 9
0
    def get_explained_variance_ratio(self):
        """Return the explained variation ratio of the PCA components as a
        Signal1D.

        Returns
        -------
        s : Signal1D
            Explained variation ratio.

        See Also:
        ---------

        `plot_explained_variance_ration`, `decomposition`,
        `get_decomposition_loadings`,
        `get_decomposition_factors`.

        """
        from hyperspy._signals.signal1d import Signal1D
        target = self.learning_results
        if target.explained_variance_ratio is None:
            raise AttributeError("The explained_variance_ratio attribute is "
                                 "`None`, did you forget to perform a PCA "
                                 "decomposition?")
        s = Signal1D(target.explained_variance_ratio)
        s.metadata.General.title = self.metadata.General.title + \
            "\nPCA Scree Plot"
        s.axes_manager[-1].name = 'Principal component index'
        s.axes_manager[-1].units = ''
        return s
Ejemplo n.º 10
0
def test_estimate_parameters_binned(only_current, binned, lazy, uniform):
    s = Signal1D(np.empty((200,)))
    s.axes_manager.signal_axes[0].is_binned = binned
    axis = s.axes_manager.signal_axes[0]
    axis.scale = .05
    axis.offset = -5
    g1 = Doniach(centre=1, A=5, sigma=1, alpha=0.5)
    s.data = g1.function(axis.axis)
    if not uniform:
        axis.convert_to_non_uniform_axis()
    if lazy:
        s = s.as_lazy()
    g2 = Doniach()
    if binned and uniform:
        factor = axis.scale
    elif binned:
        factor = np.gradient(axis.axis)
    else:
        factor = 1
    assert g2.estimate_parameters(s, axis.low_value, axis.high_value,
                                  only_current=only_current)
    assert g2._axes_manager[-1].is_binned == binned
    np.testing.assert_allclose(g2.sigma.value, 2.331764, 0.01)
    np.testing.assert_allclose(g1.A.value, g2.A.value * factor, 0.3)
    np.testing.assert_allclose(g2.centre.value, -0.4791825)
Ejemplo n.º 11
0
    def setup_method(self, method):
        s = Signal1D(np.random.random((2, 2, 8)))
        m = s.create_model()
        G = Gaussian()
        m.append(G)

        self.model = m
        self.G = G
Ejemplo n.º 12
0
 def setup_method(self, method):
     s = Signal1D(range(100))
     m = s.create_model()
     m.append(Gaussian())
     m[-1].A.value = 13
     m[-1].name = 'something'
     m.append(Gaussian())
     m[-1].A.value = 3
     self.m = m
Ejemplo n.º 13
0
 def setup_method(self, method):
     rng = np.random.RandomState(123)
     S = rng.laplace(size=(3, 500))
     S -= 2 * S.min()  # Required to give us a positive dataset
     A = rng.random((3, 3))
     s = Signal1D(A @ S)
     s.decomposition()
     s.blind_source_separation(2)
     self.s = s
Ejemplo n.º 14
0
def test_fastica_whiten_method(whiten_method):
    rng = np.random.RandomState(123)
    S = rng.laplace(size=(3, 1000))
    A = rng.random((3, 3))
    s = Signal1D(A @ S)
    s.decomposition()
    s.blind_source_separation(
        3, algorithm="sklearn_fastica", whiten_method=whiten_method
    )
    assert s.learning_results.unmixing_matrix.shape == A.shape
Ejemplo n.º 15
0
def test_more_recent_version_warning(tmp_path):
    filename = tmp_path / 'test.hspy'
    s = Signal1D(np.arange(10))
    s.save(filename)

    with h5py.File(filename, mode='a') as f:
        f.attrs["file_format_version"] = '99999999'

    with pytest.warns(UserWarning):
        s2 = load(filename)
    np.testing.assert_allclose(s.data, s2.data)
Ejemplo n.º 16
0
 def test_compression(self, compressor, tmp_path):
     if compressor == "blosc":
         from numcodecs import Blosc
         compressor = Blosc(cname='zstd',
                            clevel=3,
                            shuffle=Blosc.BITSHUFFLE)
     s = Signal1D(np.ones((3, 3)))
     s.save(tmp_path / 'test_compression.zspy',
            overwrite=True,
            compressor=compressor)
     load(tmp_path / 'test_compression.zspy')
Ejemplo n.º 17
0
 def setup_method(self, method):
     s = Signal1D(range(100))
     s.axes_manager[0].offset = 280
     s.set_signal_type("EELS")
     s.add_elements(["C"])
     s.set_microscope_parameters(100, 10, 10)
     m = s.create_model(auto_background=False)
     m.components.C_K.fine_structure_smoothing = 0.5
     m.components.C_K.fine_structure_width = 50
     m.components.C_K.fine_structure_active = True
     self.m = m
Ejemplo n.º 18
0
 def setup_method(self, method):
     g = Gaussian()
     g.A.value = 10000.0
     g.centre.value = 5000.0
     g.sigma.value = 500.0
     axis = np.arange(10000)
     s = Signal1D(g.function(axis))
     m = s.create_model()
     self.model = m
     self.g = g
     self.axis = axis
Ejemplo n.º 19
0
def test_normalize_components_errors():
    rng = np.random.RandomState(123)
    s = Signal1D(rng.random((20, 100)))
    s.decomposition()

    with pytest.raises(ValueError, match="called after s.blind_source_separation"):
        s.normalize_bss_components(target="loadings")

    s.blind_source_separation(2)

    with pytest.raises(ValueError, match="target must be"):
        s.normalize_bss_components(target="uniform")
Ejemplo n.º 20
0
    def setup_method(self, method):
        s = Signal1D(np.array([1.0, 2, 4, 7, 12, 7, 4, 2, 1]))
        m = s.create_model()
        m.low_loss = (s + 3.0).deepcopy()
        self.model = m
        self.s = s

        m.append(Gaussian())
        m.append(Gaussian())
        m.append(ScalableFixedPattern(s * 0.3))
        m[0].A.twin = m[1].A
        m.fit()
Ejemplo n.º 21
0
 def setUp(self):
     g1 = Gaussian()
     g2 = Gaussian()
     g3 = Gaussian()
     s = Signal1D(np.arange(1000).reshape(10, 10, 10))
     m = s.create_model()
     m.append(g1)
     m.append(g2)
     m.append(g3)
     self.g1 = g1
     self.g2 = g2
     self.g3 = g3
     self.model = m
Ejemplo n.º 22
0
def test_save_load_model(tmp_path, file, lazy):
    from hyperspy._components.gaussian import Gaussian
    filename = tmp_path / file
    s = Signal1D(np.ones((10, 10, 10, 10)))
    if lazy:
        s = s.as_lazy()
    m = s.create_model()
    m.append(Gaussian())
    m.store("test")
    s.save(filename)
    signal2 = load(filename)
    m2 = signal2.models.restore("test")
    assert m.signal == m2.signal
Ejemplo n.º 23
0
def test_save_chunks_signal_metadata(tmp_path, file):
    N = 10
    dim = 3
    s = Signal1D(np.arange(N**dim).reshape([N] * dim))
    s.navigator = s.sum(-1)
    s.change_dtype('float')
    s.decomposition()

    filename = tmp_path / file
    chunks = (5, 5, 10)
    s.save(filename, chunks=chunks)
    s2 = load(filename, lazy=True)
    assert tuple([c[0] for c in s2.data.chunks]) == chunks
 def setup_method(self, method):
     g1 = Gaussian()
     g2 = Gaussian()
     g3 = Gaussian()
     s = Signal1D(np.arange(10))
     m = s.create_model()
     m.append(g1)
     m.append(g2)
     m.append(g3)
     self.g1 = g1
     self.g2 = g2
     self.g3 = g3
     self.model = m
Ejemplo n.º 25
0
def test_save_chunks_signal_metadata():
    N = 10
    dim = 3
    s = Signal1D(np.arange(N**dim).reshape([N] * dim))
    s.navigator = s.sum(-1)
    s.change_dtype('float')
    s.decomposition()
    with tempfile.TemporaryDirectory() as tmp:
        filename = os.path.join(tmp, 'test_save_chunks_signal_metadata.hspy')
    chunks = (5, 5, 10)
    s.save(filename, chunks=chunks)
    s2 = load(filename, lazy=True)
    assert tuple([c[0] for c in s2.data.chunks]) == chunks
Ejemplo n.º 26
0
def test_nonuniformaxis(tmp_path, file, lazy):
    fname = tmp_path / file
    data = np.arange(10)
    axis = DataAxis(axis=1 / np.arange(1, data.size + 1), navigate=False)
    s = Signal1D(data, axes=(axis.get_axis_dictionary(), ))
    if lazy:
        s = s.as_lazy()
    s.save(fname, overwrite=True)
    s2 = load(fname)
    np.testing.assert_array_almost_equal(s.axes_manager[0].axis,
                                         s2.axes_manager[0].axis)
    assert (s2.axes_manager[0].is_uniform == False)
    assert (s2.axes_manager[0].navigate == False)
    assert (s2.axes_manager[0].size == data.size)
Ejemplo n.º 27
0
 def setup_method(self, method):
     s = Signal1D(range(100))
     m = s.create_model()
     m.append(Gaussian(A=13))
     m[-1].name = 'something'
     m.append(GaussianHF(module="numpy"))
     m[-1].height.value = 3
     m.append(
         Expression(name="Line",
                    expression="a * x + b",
                    a=1,
                    c=0,
                    rename_pars={"b": "c"}))
     self.m = m
Ejemplo n.º 28
0
def test_get_scaling_parameter(is_binned, non_uniform, dim):
    shape = [10 + i for i in range(dim)]
    signal = Signal1D(np.arange(np.prod(shape)).reshape(shape[::-1]))
    axis = signal.axes_manager.signal_axes[0]
    axis.is_binned = is_binned
    axis.scale = 0.5
    if non_uniform:
        axis.convert_to_non_uniform_axis()
    centre = np.ones(shape[::-2])
    scaling_factor = _get_scaling_factor(signal, axis, centre)

    if is_binned:
        assert np.all(scaling_factor == 0.5)
    else:
        assert scaling_factor == 1
Ejemplo n.º 29
0
def test_orthomax(whiten_method):
    rng = np.random.RandomState(123)
    S = rng.laplace(size=(3, 500))
    A = rng.random((3, 3))
    s = Signal1D(A @ S)
    s.decomposition()
    s.blind_source_separation(3, algorithm="orthomax", whiten_method=whiten_method)

    W = s.learning_results.unmixing_matrix
    assert amari(W, A) < 0.5

    # Verify that we can change gamma for orthomax method
    s = artificial_data.get_core_loss_eels_line_scan_signal()
    s.decomposition()
    s.blind_source_separation(2, algorithm="orthomax", gamma=2)
Ejemplo n.º 30
0
def test_bss_FastICA_object():
    """Tests that a simple sklearn pipeline is an acceptable algorithm."""
    rng = np.random.RandomState(123)
    S = rng.laplace(size=(3, 1000))
    A = rng.random((3, 3))
    s = Signal1D(A @ S)
    s.decomposition()

    from sklearn.decomposition import FastICA

    out = s.blind_source_separation(
        3, algorithm=FastICA(algorithm="deflation"), return_info=True
    )

    assert hasattr(out, "components_")
Ejemplo n.º 31
0
    def setup_method(self, method):
        rng = np.random.RandomState(123)
        ics = rng.laplace(size=(3, 1000))
        mixing_matrix = rng.random((100, 3))
        s = Signal1D(mixing_matrix @ ics)
        s.decomposition()

        mask_sig = s._get_signal_signal(dtype="bool")
        mask_sig.isig[5] = True

        mask_nav = s._get_navigation_signal(dtype="bool")
        mask_nav.isig[5] = True

        self.s = s
        self.mask_nav = mask_nav
        self.mask_sig = mask_sig