Exemplo n.º 1
0
def dummy_result():
    """Dummy result for testing."""

    model = suite.model

    model.is_grouped = False
    model.is_index_dependent = False

    wanted_parameters = suite.wanted_parameters
    data = {}
    for i in range(3):
        e_axis = getattr(suite, "e_axis" if i == 0 else f"e_axis{i+1}")
        c_axis = getattr(suite, "c_axis" if i == 0 else f"c_axis{i+1}")

        data[f"dataset{i+1}"] = simulate(
            suite.sim_model, f"dataset{i+1}", wanted_parameters, {"e": e_axis, "c": c_axis}
        )
    scheme = Scheme(
        model=suite.model,
        parameters=suite.initial_parameters,
        data=data,
        maximum_number_function_evaluations=1,
    )

    yield optimize(scheme)
Exemplo n.º 2
0
def test_decay_model(suite, nnls):

    model = suite.model
    print(model.validate())
    assert model.valid()
    model.dataset_group_models["default"].link_clp = False
    model.dataset_group_models["default"].method = (
        "non_negative_least_squares" if nnls else "variable_projection")

    wanted_parameters = suite.wanted_parameters
    print(model.validate(wanted_parameters))
    print(wanted_parameters)
    assert model.valid(wanted_parameters)

    initial_parameters = suite.initial_parameters
    print(model.validate(initial_parameters))
    assert model.valid(initial_parameters)

    print(model.markdown(wanted_parameters))

    dataset = simulate(model, "dataset1", wanted_parameters, suite.axis)

    assert dataset.data.shape == (suite.axis["time"].size,
                                  suite.axis["spectral"].size)

    data = {"dataset1": dataset}

    scheme = Scheme(
        model=model,
        parameters=initial_parameters,
        data=data,
        maximum_number_function_evaluations=20,
    )
    result = optimize(scheme)
    print(result.optimized_parameters)

    for label, param in result.optimized_parameters.all():
        assert np.allclose(param.value, wanted_parameters.get(label).value)

    resultdata = result.data["dataset1"]

    print(resultdata)

    assert np.array_equal(dataset["time"], resultdata["time"])
    assert np.array_equal(dataset["spectral"], resultdata["spectral"])
    assert dataset.data.shape == resultdata.data.shape
    assert dataset.data.shape == resultdata.fitted_data.shape
    assert np.allclose(dataset.data, resultdata.fitted_data, rtol=1e-1)

    assert "species_spectra" in resultdata
    spectra = resultdata.species_spectra
    assert "spectral_species" in spectra.coords
    assert "spectral" in spectra.coords
    assert spectra.shape == (suite.axis["spectral"].size, 3)

    assert "species_concentration" in resultdata
    concentration = resultdata.species_concentration
    assert "species" in concentration.coords
    assert "time" in concentration.coords
    assert concentration.shape == (suite.axis["time"].size, 3)
Exemplo n.º 3
0
def test_optimization_full_model(index_dependent):
    model = FullModel.model
    model.megacomplex["m1"].is_index_dependent = index_dependent

    print(model.validate())
    assert model.valid()

    parameters = FullModel.parameters
    assert model.valid(parameters)

    dataset = simulate(model, "dataset1", parameters, FullModel.coordinates)

    scheme = Scheme(
        model=model,
        parameters=parameters,
        data={"dataset1": dataset},
        maximum_number_function_evaluations=10,
    )

    result = optimize(scheme, raise_exception=True)
    assert result.success
    optimized_scheme = result.get_scheme()
    assert result.optimized_parameters == optimized_scheme.parameters

    result_data = result.data["dataset1"]
    assert "fitted_data" in result_data
    for label, param in result.optimized_parameters.all():
        if param.vary:
            assert np.allclose(param.value, parameters.get(label).value, rtol=1e-1)

    clp = result_data.clp
    print(clp)
    assert clp.shape == (4, 4)
    assert all(np.isclose(1.0, c) for c in np.diagonal(clp))
Exemplo n.º 4
0
def setup_scheme(model):
    return Scheme(
        model=model,
        parameters=TEST_PARAMETERS,
        data={
            "dataset1": TEST_DATA,
            "dataset2": TEST_DATA,
            "dataset3": TEST_DATA,
        },
    )
Exemplo n.º 5
0
def test_spectral_model(suite):

    model = suite.spectral_model
    print(model.validate())
    assert model.valid()

    wanted_parameters = suite.spectral_parameters
    print(model.validate(wanted_parameters))
    print(wanted_parameters)
    assert model.valid(wanted_parameters)

    initial_parameters = suite.spectral_parameters
    print(model.validate(initial_parameters))
    assert model.valid(initial_parameters)

    print(model.markdown(initial_parameters))

    dataset = simulate(model, "dataset1", wanted_parameters, suite.axis,
                       suite.clp)

    assert dataset.data.shape == (suite.axis["time"].size,
                                  suite.axis["spectral"].size)

    data = {"dataset1": dataset}

    scheme = Scheme(
        model=model,
        parameters=initial_parameters,
        data=data,
        maximum_number_function_evaluations=20,
    )
    result = optimize(scheme)
    print(result.optimized_parameters)

    for label, param in result.optimized_parameters.all():
        assert np.allclose(param.value,
                           wanted_parameters.get(label).value,
                           rtol=1e-1)

    resultdata = result.data["dataset1"]
    assert np.array_equal(dataset["time"], resultdata["time"])
    assert np.array_equal(dataset["spectral"], resultdata["spectral"])
    assert dataset.data.shape == resultdata.data.shape
    assert dataset.data.shape == resultdata.fitted_data.shape
    assert np.allclose(dataset.data, resultdata.fitted_data, rtol=1e-2)
    assert "species_associated_concentrations" in resultdata
    assert resultdata.species_associated_concentrations.shape == (
        suite.axis["time"].size,
        len(suite.decay_compartments),
    )
    assert "species_spectra" in resultdata
    assert resultdata.species_spectra.shape == (
        suite.axis["spectral"].size,
        len(suite.decay_compartments),
    )
Exemplo n.º 6
0
def test_scheme_ipython_rendering(mock_scheme: Scheme):
    """Autorendering in ipython"""

    rendered_obj = format_display_data(mock_scheme)[0]

    assert "text/markdown" in rendered_obj
    assert rendered_obj["text/markdown"].startswith("# Model")

    rendered_markdown_return = format_display_data(mock_scheme.markdown())[0]

    assert "text/markdown" in rendered_markdown_return
    assert rendered_markdown_return["text/markdown"].startswith("# Model")
def test_kinetic_model(suite, nnls):

    model = suite.model
    print(model.validate())
    assert model.valid()

    wanted_parameters = suite.wanted_parameters
    print(model.validate(wanted_parameters))
    print(wanted_parameters)
    assert model.valid(wanted_parameters)

    initial_parameters = suite.initial_parameters
    print(model.validate(initial_parameters))
    assert model.valid(initial_parameters)

    print(model.markdown(initial_parameters))

    dataset = model.simulate("dataset1", wanted_parameters, suite.axis,
                             suite.clp)

    assert dataset.data.shape == (suite.axis["time"].size,
                                  suite.axis["pixel"].size)

    data = {"dataset1": dataset}

    scheme = Scheme(
        model=model,
        parameters=initial_parameters,
        data=data,
        maximum_number_function_evaluations=20,
        non_negative_least_squares=nnls,
    )
    result = optimize(scheme)
    print(result.optimized_parameters)

    for label, param in result.optimized_parameters.all():
        assert np.allclose(param.value,
                           wanted_parameters.get(label).value,
                           rtol=1e-1)

    resultdata = result.data["dataset1"]
    assert np.array_equal(dataset["time"], resultdata["time"])
    assert np.array_equal(dataset["pixel"], resultdata["pixel"])
    assert dataset.data.shape == resultdata.data.shape
    assert dataset.data.shape == resultdata.fitted_data.shape
    assert np.allclose(dataset.data, resultdata.fitted_data, rtol=1e-2)
    assert "species_associated_images" in resultdata
    assert "decay_associated_images" in resultdata

    if len(model.irf) != 0:
        assert "irf" in resultdata
def test_kinetic_model(suite, nnls):

    model = suite.model
    print(model.validate())
    assert model.valid()
    model.dataset_group_models["default"].method = (
        "non_negative_least_squares" if nnls else "variable_projection")

    wanted_parameters = suite.wanted_parameters
    print(model.validate(wanted_parameters))
    print(wanted_parameters)
    assert model.valid(wanted_parameters)

    initial_parameters = suite.initial_parameters
    print(model.validate(initial_parameters))
    assert model.valid(initial_parameters)

    print(model.markdown(wanted_parameters))

    dataset = simulate(model, "dataset1", wanted_parameters, suite.axis)

    assert dataset.data.shape == (suite.axis["time"].size,
                                  suite.axis["spectral"].size)

    data = {f"dataset{i}": dataset for i in range(1, 5)}

    scheme = Scheme(
        model=model,
        parameters=initial_parameters,
        data=data,
        maximum_number_function_evaluations=20,
    )
    result = optimize(scheme)
    print(result.optimized_parameters)

    for label, param in result.optimized_parameters.all():
        print(label, param.value, wanted_parameters.get(label).value)
        assert np.allclose(param.value,
                           wanted_parameters.get(label).value,
                           rtol=1e-1)

    resultdata = result.data["dataset1"]

    print(resultdata)

    assert np.array_equal(dataset["time"], resultdata["time"])
    assert np.array_equal(dataset["spectral"], resultdata["spectral"])
    assert dataset.data.shape == resultdata.data.shape
    assert dataset.data.shape == resultdata.fitted_data.shape
    assert np.allclose(dataset.data, resultdata.fitted_data, rtol=1e-2)
Exemplo n.º 9
0
def test_single_dataset():
    model = SimpleTestModel.from_dict({
        "megacomplex": {
            "m1": {
                "is_index_dependent": False
            }
        },
        "dataset_groups": {
            "default": {
                "link_clp": True
            }
        },
        "dataset": {
            "dataset1": {
                "megacomplex": ["m1"],
            },
        },
    })
    print(model.validate())
    assert model.valid()

    parameters = ParameterGroup.from_list([1, 10])
    print(model.validate(parameters))
    assert model.valid(parameters)
    global_axis = [1, 2, 3]
    model_axis = [5, 7, 9, 12]

    data = {
        "dataset1":
        xr.DataArray(np.ones((3, 4)),
                     coords=[("global", global_axis),
                             ("model", model_axis)]).to_dataset(name="data")
    }

    scheme = Scheme(model, parameters, data)
    optimization_group = OptimizationGroup(
        scheme,
        model.get_dataset_groups()["default"])
    bag = optimization_group._calculator.bag
    datasets = optimization_group._calculator.groups
    assert len(datasets) == 1
    assert len(bag) == 3
    assert all(p.data.size == 4 for p in bag)
    assert all(p.dataset_models[0].label == "dataset1" for p in bag)
    assert all(
        all(p.dataset_models[0].axis["model"] == model_axis) for p in bag)
    assert all(
        all(p.dataset_models[0].axis["global"] == global_axis) for p in bag)
    assert [p.dataset_models[0].indices["global"] for p in bag] == [0, 1, 2]
Exemplo n.º 10
0
def test_doas_model(suite):

    print(suite.sim_model.validate())
    assert suite.sim_model.valid()

    print(suite.model.validate())
    assert suite.model.valid()

    print(suite.sim_model.validate(suite.wanted_parameter))
    assert suite.sim_model.valid(suite.wanted_parameter)

    print(suite.model.validate(suite.parameter))
    assert suite.model.valid(suite.parameter)

    dataset = simulate(suite.sim_model, "dataset1", suite.wanted_parameter,
                       suite.axis)
    print(dataset)

    assert dataset.data.shape == (suite.axis["time"].size,
                                  suite.axis["spectral"].size)

    print(suite.parameter)
    print(suite.wanted_parameter)

    data = {"dataset1": dataset}
    scheme = Scheme(
        model=suite.model,
        parameters=suite.parameter,
        data=data,
        maximum_number_function_evaluations=20,
    )
    result = optimize(scheme, raise_exception=True)
    print(result.optimized_parameters)

    for label, param in result.optimized_parameters.all():
        assert np.allclose(param.value,
                           suite.wanted_parameter.get(label).value,
                           rtol=1e-1)

    resultdata = result.data["dataset1"]
    assert np.array_equal(dataset["time"], resultdata["time"])
    assert np.array_equal(dataset["spectral"], resultdata["spectral"])
    assert dataset.data.shape == resultdata.fitted_data.shape
    assert np.allclose(dataset.data, resultdata.fitted_data)

    assert "damped_oscillation_cos" in resultdata
    assert "damped_oscillation_sin" in resultdata
    assert "damped_oscillation_associated_spectra" in resultdata
    assert "damped_oscillation_phase" in resultdata
Exemplo n.º 11
0
    def setup(self, index_dependent, grouped, weight):
        suite = MultichannelMulticomponentDecay
        model = suite.model
        # 0.4.0 API compat
        model.is_grouped = grouped

        model.megacomplex["m1"].is_index_dependent = index_dependent

        sim_model = suite.sim_model
        suite.sim_model.megacomplex["m1"].is_index_dependent = index_dependent

        wanted_parameters = suite.wanted_parameters

        initial_parameters = suite.initial_parameters
        model.dataset["dataset1"].fill(model, initial_parameters)

        if hasattr(suite, "global_axis"):
            axes_dict = {
                "global": getattr(suite, "global_axis"),
                "model": getattr(suite, "model_axis"),
            }
        else:
            # 0.4.0 API compat
            axes_dict = {
                "e": getattr(suite, "e_axis"),
                "c": getattr(suite, "c_axis"),
            }

        dataset = simulate(sim_model, "dataset1", wanted_parameters, axes_dict)

        if weight:
            dataset["weight"] = xr.DataArray(
                np.ones_like(dataset.data) * 0.5, coords=dataset.data.coords
            )

        data = {"dataset1": dataset}

        self.scheme = Scheme(
            model=model,
            parameters=initial_parameters,
            data=data,
            maximum_number_function_evaluations=10,
            group_tolerance=0.1,
            optimization_method="TrustRegionReflection",
        )
        # 0.4.0 API compat
        if hasattr(self.scheme, "group"):
            self.scheme.group = grouped
Exemplo n.º 12
0
def test_spectral_irf(suite):

    model = suite.model
    print(model.validate())
    assert model.valid()

    parameters = suite.parameters
    print(model.validate(parameters))
    assert model.valid(parameters)

    dataset = model.simulate("dataset1", parameters, suite.axis)

    assert dataset.data.shape == (suite.axis["time"].size, suite.axis["spectral"].size)

    data = {"dataset1": dataset}

    scheme = Scheme(
        model=model,
        parameters=parameters,
        data=data,
        maximum_number_function_evaluations=20,
    )
    result = optimize(scheme)
    print(result.optimized_parameters)

    for label, param in result.optimized_parameters.all():
        assert np.allclose(param.value, parameters.get(label).value, rtol=1e-1)

    resultdata = result.data["dataset1"]

    # print(resultdata)

    assert np.array_equal(dataset["time"], resultdata["time"])
    assert np.array_equal(dataset["spectral"], resultdata["spectral"])
    assert dataset.data.shape == resultdata.data.shape
    assert dataset.data.shape == resultdata.fitted_data.shape
    assert np.allclose(dataset.data, resultdata.fitted_data, atol=1e-14)

    irf_max_at_start = resultdata.fitted_data.isel(spectral=0).argmax(axis=0)
    irf_max_at_end = resultdata.fitted_data.isel(spectral=-1).argmax(axis=0)
    print(f" irf_max_at_start: {irf_max_at_start}\n irf_max_at_end: {irf_max_at_end}")
    # These should not be equal due to dispersion:
    assert irf_max_at_start != irf_max_at_end

    assert "species_associated_spectra" in resultdata
    assert "decay_associated_spectra" in resultdata
Exemplo n.º 13
0
def test_relations(index_dependent, link_clp):
    model = deepcopy(suite.model)
    model.dataset_group_models["default"].link_clp = link_clp
    model.megacomplex["m1"].is_index_dependent = index_dependent
    model.clp_relations.append(
        Relation.from_dict({
            "source": "s1",
            "target": "s2",
            "parameter": "3"
        }))
    parameters = ParameterGroup.from_list([11e-4, 22e-5, 2])

    print("link_clp", link_clp, "index_dependent", index_dependent)
    dataset = simulate(
        suite.sim_model,
        "dataset1",
        parameters,
        {
            "global": suite.global_axis,
            "model": suite.model_axis
        },
    )
    scheme = Scheme(model=model,
                    parameters=parameters,
                    data={"dataset1": dataset})
    optimization_group = OptimizationGroup(
        scheme,
        model.get_dataset_groups()["default"])

    if index_dependent:
        reduced_matrix = (optimization_group.reduced_matrices[0]
                          if link_clp else
                          optimization_group.reduced_matrices["dataset1"][0])
    else:
        reduced_matrix = optimization_group.reduced_matrices["dataset1"]
    matrix = (optimization_group.matrices["dataset1"][0]
              if index_dependent else optimization_group.matrices["dataset1"])

    result_data = optimization_group.create_result_data()
    print(result_data)
    clps = result_data["dataset1"].clp

    assert "s2" not in reduced_matrix.clp_labels
    assert "s2" in clps.coords["clp_label"]
    assert clps.sel(clp_label="s2") == clps.sel(clp_label="s1") * 2
    assert "s2" in matrix.clp_labels
Exemplo n.º 14
0
def test_save_scheme(tmp_path: Path):
    save_model(MODEL, tmp_path / "m.yml")
    save_parameters(PARAMETERS, tmp_path / "p.csv")
    save_dataset(DATASET, tmp_path / "d.nc")
    scheme = Scheme(
        MODEL,
        PARAMETERS,
        {"dataset_1": DATASET},
    )
    scheme_path = tmp_path / "testscheme.yml"
    save_scheme(file_name=scheme_path, format_name="yml", scheme=scheme)

    assert scheme_path.is_file()
    assert scheme_path.read_text() == want
    loaded = load_scheme(scheme_path)
    print(loaded.model.validate(loaded.parameters))
    assert loaded.model.valid(loaded.parameters)
    assert isinstance(scheme.data["dataset_1"], xr.Dataset)
Exemplo n.º 15
0
def problem(request) -> Problem:
    model = suite.model
    model.is_grouped = request.param[0]
    model.is_index_dependent = request.param[1]

    dataset = simulate(
        suite.sim_model,
        "dataset1",
        suite.wanted_parameters,
        {
            "e": suite.e_axis,
            "c": suite.c_axis
        },
    )
    scheme = Scheme(model=model,
                    parameters=suite.initial_parameters,
                    data={"dataset1": dataset})
    return Problem(scheme)
Exemplo n.º 16
0
def test_full_model_problem():
    dataset = simulate(FullModel.model, "dataset1", FullModel.parameters,
                       FullModel.coordinates)
    scheme = Scheme(model=FullModel.model,
                    parameters=FullModel.parameters,
                    data={"dataset1": dataset})
    optimization_group = OptimizationGroup(
        scheme,
        FullModel.model.get_dataset_groups()["default"])

    result = optimization_group.create_result_data()["dataset1"]
    assert "global_matrix" in result
    assert "global_clp_label" in result

    clp = result.clp

    assert clp.shape == (4, 4)
    print(np.diagonal(clp))
    assert all(np.isclose(1.0, c) for c in np.diagonal(clp))
Exemplo n.º 17
0
def optimization_group(request) -> OptimizationGroup:
    model = suite.model
    model.megacomplex["m1"].is_index_dependent = request.param[1]
    model.is_index_dependent = request.param[1]
    model.dataset_group_models["default"].link_clp = request.param[0]

    dataset = simulate(
        suite.sim_model,
        "dataset1",
        suite.wanted_parameters,
        {
            "global": suite.global_axis,
            "model": suite.model_axis
        },
    )
    scheme = Scheme(model=model,
                    parameters=suite.initial_parameters,
                    data={"dataset1": dataset})

    return OptimizationGroup(scheme, model.get_dataset_groups()["default"])
Exemplo n.º 18
0
def test_penalties(index_dependent, link_clp):
    model = deepcopy(suite.model)
    model.dataset_group_models["default"].link_clp = link_clp
    model.megacomplex["m1"].is_index_dependent = index_dependent
    model.clp_area_penalties.append(
        EqualAreaPenalty.from_dict({
            "source": "s1",
            "source_intervals": [(1, 20)],
            "target": "s2",
            "target_intervals": [(20, 45)],
            "parameter": "3",
            "weight": 10,
        }))
    parameters = ParameterGroup.from_list([11e-4, 22e-5, 2])

    global_axis = np.arange(50)

    print(f"{link_clp=}\n{index_dependent=}")
    dataset = simulate(
        suite.sim_model,
        "dataset1",
        parameters,
        {
            "global": global_axis,
            "model": suite.model_axis
        },
    )
    scheme = Scheme(model=model,
                    parameters=parameters,
                    data={"dataset1": dataset})
    optimization_group = OptimizationGroup(
        scheme,
        model.get_dataset_groups()["default"])

    assert isinstance(optimization_group.additional_penalty, np.ndarray)
    assert optimization_group.additional_penalty.size == 1
    assert optimization_group.additional_penalty[0] != 0
    assert isinstance(optimization_group.full_penalty, np.ndarray)
    assert (optimization_group.full_penalty.size == (suite.model_axis.size *
                                                     global_axis.size) +
            optimization_group.additional_penalty.size)
Exemplo n.º 19
0
def save_scheme(
    scheme: Scheme,
    file_name: StrOrPath,
    format_name: str = None,
    *,
    allow_overwrite: bool = False,
    update_source_path: bool = True,
    **kwargs: Any,
) -> None:
    """Save a :class:`Scheme` instance to a spec file.

    Parameters
    ----------
    scheme : Scheme
        :class:`Scheme` instance to save to specs file.
    file_name : StrOrPath
        File to write the scheme specs to.
    format_name : str
        Format the file should be in, if not provided it will be inferred from the file extension.
    allow_overwrite : bool
        Whether or not to allow overwriting existing files, by default False
    update_source_path: bool
        Whether or not to update the ``source_path`` attribute to ``file_name`` when saving.
        by default True
    **kwargs : Any
        Additional keyword arguments passes to the ``save_scheme`` implementation
        of the project io plugin.
    """
    protect_from_overwrite(file_name, allow_overwrite=allow_overwrite)
    io = get_project_io(format_name or inferr_file_format(file_name, needs_to_exist=False))
    io.save_scheme(  # type: ignore[call-arg]
        file_name=Path(file_name).as_posix(),
        scheme=scheme,
        **kwargs,
    )
    if update_source_path is True:
        scheme.source_path = Path(file_name).as_posix()
Exemplo n.º 20
0
def test_optimization(suite, is_index_dependent, link_clp, weight, method):
    model = suite.model

    model.megacomplex["m1"].is_index_dependent = is_index_dependent

    print("Link CLP:", link_clp)
    print("Index dependent:", is_index_dependent)

    sim_model = suite.sim_model
    sim_model.megacomplex["m1"].is_index_dependent = is_index_dependent

    print(model.validate())
    assert model.valid()

    print(sim_model.validate())
    assert sim_model.valid()

    wanted_parameters = suite.wanted_parameters
    print(wanted_parameters)
    print(sim_model.validate(wanted_parameters))
    assert sim_model.valid(wanted_parameters)

    initial_parameters = suite.initial_parameters
    print(initial_parameters)
    print(model.validate(initial_parameters))
    assert model.valid(initial_parameters)
    assert (
        model.dataset["dataset1"].fill(model, initial_parameters).is_index_dependent()
        == is_index_dependent
    )

    nr_datasets = 3 if issubclass(suite, ThreeDatasetDecay) else 1
    data = {}
    for i in range(nr_datasets):
        global_axis = getattr(suite, "global_axis" if i == 0 else f"global_axis{i+1}")
        model_axis = getattr(suite, "model_axis" if i == 0 else f"model_axis{i+1}")

        dataset = simulate(
            sim_model,
            f"dataset{i+1}",
            wanted_parameters,
            {"global": global_axis, "model": model_axis},
        )
        print(f"Dataset {i+1}")
        print("=============")
        print(dataset)

        if hasattr(suite, "scale"):
            dataset["data"] /= suite.scale

        if weight:
            dataset["weight"] = xr.DataArray(
                np.ones_like(dataset.data) * 0.5, coords=dataset.data.coords
            )

        assert dataset.data.shape == (model_axis.size, global_axis.size)

        data[f"dataset{i+1}"] = dataset

    scheme = Scheme(
        model=model,
        parameters=initial_parameters,
        data=data,
        maximum_number_function_evaluations=10,
        clp_link_tolerance=0.1,
        optimization_method=method,
    )

    model.dataset_group_models["default"].link_clp = link_clp

    result = optimize(scheme, raise_exception=True)
    print(result.optimized_parameters)
    assert result.success
    optimized_scheme = result.get_scheme()
    assert result.optimized_parameters == optimized_scheme.parameters
    for dataset in optimized_scheme.data.values():
        assert "fitted_data" not in dataset
        if weight:
            assert "weight" in dataset
    for label, param in result.optimized_parameters.all():
        if param.vary:
            assert np.allclose(param.value, wanted_parameters.get(label).value, rtol=1e-1)

    for i, dataset in enumerate(data.values()):
        resultdata = result.data[f"dataset{i+1}"]
        print(f"Result Data {i+1}")
        print("=================")
        print(resultdata)
        assert "residual" in resultdata
        assert "residual_left_singular_vectors" in resultdata
        assert "residual_right_singular_vectors" in resultdata
        assert "residual_singular_values" in resultdata
        assert np.array_equal(dataset.coords["model"], resultdata.coords["model"])
        assert np.array_equal(dataset.coords["global"], resultdata.coords["global"])
        assert dataset.data.shape == resultdata.data.shape
        print(dataset.data[0, 0], resultdata.data[0, 0])
        assert np.allclose(dataset.data, resultdata.data)
        if weight:
            assert "weight" in resultdata
            assert "weighted_residual" in resultdata
            assert "weighted_residual_left_singular_vectors" in resultdata
            assert "weighted_residual_right_singular_vectors" in resultdata
            assert "weighted_residual_singular_values" in resultdata
Exemplo n.º 21
0
def test_spectral_constraint():
    model = KineticSpectrumModel.from_dict({
        "initial_concentration": {
            "j1": {
                "compartments": ["s1", "s2"],
                "parameters": ["i.1", "i.2"],
            },
        },
        "megacomplex": {
            "mc1": {
                "k_matrix": ["k1"]
            },
        },
        "k_matrix": {
            "k1": {
                "matrix": {
                    ("s2", "s1"): "kinetic.1",
                    ("s2", "s2"): "kinetic.2",
                }
            }
        },
        "spectral_constraints": [
            {
                "type": "zero",
                "compartment": "s2",
                "interval": (float("-inf"), float("inf"))
            },
        ],
        "dataset": {
            "dataset1": {
                "initial_concentration": "j1",
                "megacomplex": ["mc1"],
            },
        },
    })
    print(model)

    wanted_parameters = ParameterGroup.from_dict({
        "kinetic": [1e-4, 1e-5],
        "i": [1, 2],
    })
    initial_parameters = ParameterGroup.from_dict({
        "kinetic": [2e-4, 2e-5],
        "i": [1, 2, {
            "vary": False
        }],
    })

    time = np.asarray(np.arange(0, 50, 1.5))
    dataset = model.dataset["dataset1"].fill(model, wanted_parameters)
    compartments, matrix = kinetic_image_matrix(dataset, time, 0)

    assert len(compartments) == 2
    assert matrix.shape == (time.size, 2)

    reduced_compartments, reduced_matrix = apply_spectral_constraints(
        model, compartments, matrix, 1)

    print(reduced_matrix)
    assert len(reduced_compartments) == 1
    assert reduced_matrix.shape == (time.size, 1)

    reduced_compartments, reduced_matrix = model.constrain_matrix_function(
        "dataset1", wanted_parameters, compartments, matrix, 1)

    assert reduced_matrix.shape == (time.size, 1)

    clp = xr.DataArray([[1.0, 10.0, 20.0, 1]],
                       coords=(("spectral", [1]), ("clp_label",
                                                   ["s1", "s2", "s3", "s4"])))

    data = model.simulate("dataset1",
                          wanted_parameters,
                          clp=clp,
                          axes={
                              "time": time,
                              "spectral": np.array([1])
                          })

    dataset = {"dataset1": data}
    scheme = Scheme(
        model=model,
        parameters=initial_parameters,
        data=dataset,
        maximum_number_function_evaluations=20,
    )

    # the resulting jacobian is singular
    with pytest.warns(UserWarning):
        result = optimize(scheme)

    result_data = result.data["dataset1"]
    print(result_data.clp_label)
    print(result_data.clp)
    #  TODO: save reduced clp
    #  assert result_data.clp.shape == (1, 1)

    print(result_data.species_associated_spectra)
    assert result_data.species_associated_spectra.shape == (1, 2)
    assert result_data.species_associated_spectra[0, 1] == 0
Exemplo n.º 22
0
def test_multi_dataset_overlap():
    model = SimpleTestModel.from_dict({
        "megacomplex": {
            "m1": {
                "is_index_dependent": False
            }
        },
        "dataset_groups": {
            "default": {
                "link_clp": True
            }
        },
        "dataset": {
            "dataset1": {
                "megacomplex": ["m1"],
            },
            "dataset2": {
                "megacomplex": ["m1"],
            },
        },
    })

    model.grouped = lambda: True
    print(model.validate())
    assert model.valid()
    assert model.grouped()

    parameters = ParameterGroup.from_list([1, 10])
    print(model.validate(parameters))
    assert model.valid(parameters)

    global_axis_1 = [1, 2, 3, 5]
    model_axis_1 = [5, 7]
    global_axis_2 = [0, 1.4, 2.4, 3.4, 9]
    model_axis_2 = [5, 7, 9, 12]
    data = {
        "dataset1":
        xr.DataArray(np.ones((4, 2)),
                     coords=[("global", global_axis_1),
                             ("model", model_axis_1)]).to_dataset(name="data"),
        "dataset2":
        xr.DataArray(np.ones((5, 4)),
                     coords=[("global", global_axis_2),
                             ("model", model_axis_2)]).to_dataset(name="data"),
    }

    scheme = Scheme(model, parameters, data, clp_link_tolerance=5e-1)
    optimization_group = OptimizationGroup(
        scheme,
        model.get_dataset_groups()["default"])
    bag = list(optimization_group._calculator.bag)
    assert len(optimization_group._calculator.groups) == 3
    assert "dataset1dataset2" in optimization_group._calculator.groups
    assert optimization_group._calculator.groups["dataset1dataset2"] == [
        "dataset1", "dataset2"
    ]
    assert len(bag) == 6

    assert all(p.data.size == 4 for p in bag[:1])
    assert all(p.dataset_models[0].label == "dataset1" for p in bag[1:5])
    assert all(
        all(p.dataset_models[0].axis["model"] == model_axis_1)
        for p in bag[1:5])
    assert all(
        all(p.dataset_models[0].axis["global"] == global_axis_1)
        for p in bag[1:5])
    assert [p.dataset_models[0].indices["global"]
            for p in bag[1:5]] == [0, 1, 2, 3]

    assert all(p.data.size == 6 for p in bag[1:4])
    assert all(p.dataset_models[1].label == "dataset2" for p in bag[1:4])
    assert all(
        all(p.dataset_models[1].axis["model"] == model_axis_2)
        for p in bag[1:4])
    assert all(
        all(p.dataset_models[1].axis["global"] == global_axis_2)
        for p in bag[1:4])
    assert [p.dataset_models[1].indices["global"]
            for p in bag[1:4]] == [1, 2, 3]

    assert all(p.data.size == 4 for p in bag[5:])
    assert bag[4].dataset_models[0].label == "dataset1"
    assert bag[5].dataset_models[0].label == "dataset2"
    assert np.array_equal(bag[4].dataset_models[0].axis["model"], model_axis_1)
    assert np.array_equal(bag[5].dataset_models[0].axis["model"], model_axis_2)
    assert [p.dataset_models[0].indices["global"]
            for p in bag[1:4]] == [0, 1, 2]
Exemplo n.º 23
0
def test_equal_area_penalties(debug=False):
    # %%

    optim_spec = OptimizationSpec(nnls=True, max_nfev=999)
    noise_spec = NoiseSpec(active=True, seed=1, std_dev=1e-8)

    wavelengths = np.arange(650, 670, 2)
    time_p1 = np.linspace(-1, 2, 50, endpoint=False)
    time_p2 = np.linspace(2, 10, 30, endpoint=False)
    time_p3 = np.geomspace(10, 50, num=20)
    times = np.concatenate([time_p1, time_p2, time_p3])

    irf_loc = float(times[20])
    irf_width = float((times[1] - times[0]) * 10)
    irf = IrfSpec(irf_loc, irf_width)

    amplitude = 1
    location1 = float(wavelengths[2])  # 2
    location2 = float(wavelengths[-3])  # -3
    width1 = float((wavelengths[1] - wavelengths[0]) * 5)
    width2 = float((wavelengths[1] - wavelengths[0]) * 3)
    shape1 = ShapeSpec(amplitude, location1, width1)
    shape2 = ShapeSpec(amplitude, location2, width2)
    dataset_spec = DatasetSpec(times, wavelengths, irf, [shape1, shape2])

    wavelengths = dataset_spec.wavelengths
    equ_interval = [(min(wavelengths), max(wavelengths))]
    weight = 0.01
    # %% The base model specification (mspec)
    base = {
        "initial_concentration": {
            "j1": {
                "compartments": ["s1", "s2"],
                "parameters": ["i.1", "i.2"],
            },
        },
        "megacomplex": {
            "mc1": {"k_matrix": ["k1"]},
        },
        "k_matrix": {
            "k1": {
                "matrix": {
                    ("s1", "s1"): "kinetic.1",
                    ("s2", "s2"): "kinetic.2",
                }
            }
        },
        "irf": {
            "irf1": {"type": "gaussian", "center": "irf.center", "width": "irf.width"},
        },
        "dataset": {
            "dataset1": {
                "initial_concentration": "j1",
                "megacomplex": ["mc1"],
                "irf": "irf1",
            },
        },
    }

    shape = {
        "shape": {
            "sh1": {
                "type": "gaussian",
                "amplitude": "shapes.amps.1",
                "location": "shapes.locs.1",
                "width": "shapes.width.1",
            },
            "sh2": {
                "type": "gaussian",
                "amplitude": "shapes.amps.2",
                "location": "shapes.locs.2",
                "width": "shapes.width.2",
            },
        }
    }

    dataset_shape = {
        "shape": {
            "s1": "sh1",
            "s2": "sh2",
        }
    }

    equ_area = {
        "equal_area_penalties": [
            {
                "source": "s1",
                "target": "s2",
                "parameter": "rela.1",
                "source_intervals": equ_interval,
                "target_intervals": equ_interval,
                "weight": weight,
            },
        ],
    }
    mspec = ModelSpec(base, shape, dataset_shape, equ_area)

    rela = 1.0  # relation between areas
    irf = dataset_spec.irf
    [sh1, sh2] = dataset_spec.shapes
    pspec_base = {
        "kinetic": [1e-1, 5e-3],
        "i": [0.5, 0.5, {"vary": False}],
        "irf": [["center", irf.location], ["width", irf.width]],
    }
    pspec_equa_area = {
        "rela": [rela, {"vary": False}],
    }
    pspec_shape = {
        "shapes": {
            "amps": [sh1.amplitude, sh2.amplitude],
            "locs": [sh1.location, sh2.location],
            "width": [sh1.width, sh2.width],
        },
    }
    pspec = ParameterSpec(pspec_base, pspec_equa_area, pspec_shape)

    # derivates:
    mspec_sim = dict(deepcopy(mspec.base), **mspec.shape)
    mspec_sim["dataset"]["dataset1"].update(mspec.dataset_shape)

    mspec_fit_wp = dict(deepcopy(mspec.base), **mspec.equ_area)
    mspec_fit_np = dict(deepcopy(mspec.base))

    model_sim = KineticSpectrumModel.from_dict(mspec_sim)
    model_wp = KineticSpectrumModel.from_dict(mspec_fit_wp)
    model_np = KineticSpectrumModel.from_dict(mspec_fit_np)
    print(model_np)

    # %% Parameter specification (pspec)

    pspec_sim = dict(deepcopy(pspec.base), **pspec.shapes)
    param_sim = ParameterGroup.from_dict(pspec_sim)

    # For the wp model we create two version of the parameter specification
    # One has all inputs fixed, the other has all but the first free
    # for both we perturb kinetic parameters a bit to give the optimizer some work
    pspec_wp = dict(deepcopy(pspec.base), **pspec.equal_area)
    pspec_wp["kinetic"] = [v * 1.01 for v in pspec_wp["kinetic"]]
    pspec_wp.update({"i": [[1, {"vary": False}], 1]})

    pspec_np = dict(deepcopy(pspec.base))

    param_wp = ParameterGroup.from_dict(pspec_wp)
    param_np = ParameterGroup.from_dict(pspec_np)

    # %% Print models with parameters
    print(model_sim.markdown(param_sim))
    print(model_wp.markdown(param_wp))
    print(model_np.markdown(param_np))

    # %%
    simulated_data = model_sim.simulate(
        "dataset1",
        param_sim,
        axes={"time": times, "spectral": wavelengths},
        noise=noise_spec.active,
        noise_std_dev=noise_spec.std_dev,
        noise_seed=noise_spec.seed,
    )
    # %%
    simulated_data = prepare_time_trace_dataset(simulated_data)
    # make a copy to keep an intact reference
    data = deepcopy(simulated_data)

    # %% Optimizing model without penalty (np)

    dataset = {"dataset1": data}
    scheme_np = Scheme(
        model=model_np,
        parameters=param_np,
        data=dataset,
        non_negative_least_squares=optim_spec.nnls,
        maximum_number_function_evaluations=optim_spec.max_nfev,
    )
    result_np = optimize(scheme_np)
    print(result_np)

    # %% Optimizing model with penalty fixed inputs (wp_ifix)
    scheme_wp = Scheme(
        model=model_wp,
        parameters=param_wp,
        data=dataset,
        non_negative_least_squares=optim_spec.nnls,
        maximum_number_function_evaluations=optim_spec.max_nfev,
    )
    result_wp = optimize(scheme_wp)
    print(result_wp)

    if debug:
        # %% Plot results
        plt_spec = importlib.util.find_spec("matplotlib")
        if plt_spec is not None:
            import matplotlib.pyplot as plt

            plot_overview(result_np.data["dataset1"], "no penalties")
            plot_overview(result_wp.data["dataset1"], "with penalties")
            plt.show()

    # %% Test calculation
    print(result_wp.data["dataset1"])
    area1_np = np.sum(result_np.data["dataset1"].species_associated_spectra.sel(species="s1"))
    area2_np = np.sum(result_np.data["dataset1"].species_associated_spectra.sel(species="s2"))
    assert not np.isclose(area1_np, area2_np)

    area1_wp = np.sum(result_wp.data["dataset1"].species_associated_spectra.sel(species="s1"))
    area2_wp = np.sum(result_wp.data["dataset1"].species_associated_spectra.sel(species="s2"))
    assert np.isclose(area1_wp, area2_wp)

    input_ratio = result_wp.optimized_parameters.get("i.1") / result_wp.optimized_parameters.get(
        "i.2"
    )
    assert np.isclose(input_ratio, 1.5038858115)
Exemplo n.º 24
0
def test_coherent_artifact():
    model_dict = {
        "initial_concentration": {
            "j1": {
                "compartments": ["s1"],
                "parameters": ["2"]
            },
        },
        "megacomplex": {
            "mc1": {
                "k_matrix": ["k1"]
            },
        },
        "k_matrix": {
            "k1": {
                "matrix": {
                    ("s1", "s1"): "1",
                }
            }
        },
        "irf": {
            "irf1": {
                "type": "gaussian-coherent-artifact",
                "center": "2",
                "width": "3",
                "coherent_artifact_order": 3,
            },
        },
        "dataset": {
            "dataset1": {
                "initial_concentration": "j1",
                "megacomplex": ["mc1"],
                "irf": "irf1",
            },
        },
    }
    model = KineticSpectrumModel.from_dict(model_dict.copy())

    parameters = ParameterGroup.from_list([
        101e-4,
        [10, {
            "vary": False,
            "non-negative": False
        }],
        [20, {
            "vary": False,
            "non-negative": False
        }],
        [30, {
            "vary": False,
            "non-negative": False
        }],
    ])

    time = np.asarray(np.arange(0, 50, 1.5))

    irf = model.irf["irf1"].fill(model, parameters)
    irf_same_width = irf.calculate_coherent_artifact(time)

    model_dict["irf"]["irf1"]["coherent_artifact_width"] = "4"
    model = KineticSpectrumModel.from_dict(model_dict)

    irf = model.irf["irf1"].fill(model, parameters)
    irf_diff_width = irf.calculate_coherent_artifact(time)

    assert np.array_equal(irf_same_width[0],
                          irf_diff_width[0])  # labels the same
    assert not np.array_equal(irf_same_width[1],
                              irf_diff_width[1])  # but content is not

    data = model.dataset["dataset1"].fill(model, parameters)
    compartments, matrix = kinetic_spectrum_matrix(data, time, 0)

    assert len(compartments) == 4
    for i in range(1, 4):
        assert compartments[i] == f"coherent_artifact_{i}"

    assert matrix.shape == (time.size, 4)

    clp = xr.DataArray(
        [[1, 1, 1, 1]],
        coords=[
            ("spectral", [0]),
            (
                "clp_label",
                [
                    "s1",
                    "coherent_artifact_1",
                    "coherent_artifact_2",
                    "coherent_artifact_3",
                ],
            ),
        ],
    )
    axis = {"time": time, "spectral": clp.spectral}
    data = model.simulate("dataset1", parameters, axis, clp)

    dataset = {"dataset1": data}
    scheme = Scheme(model=model,
                    parameters=parameters,
                    data=dataset,
                    maximum_number_function_evaluations=20)
    result = optimize(scheme)
    print(result.optimized_parameters)

    for label, param in result.optimized_parameters.all():
        assert np.allclose(param.value, parameters.get(label).value, rtol=1e-1)

    resultdata = result.data["dataset1"]
    assert np.array_equal(data.time, resultdata.time)
    assert np.array_equal(data.spectral, resultdata.spectral)
    assert data.data.shape == resultdata.data.shape
    assert data.data.shape == resultdata.fitted_data.shape
    assert np.allclose(data.data, resultdata.fitted_data, rtol=1e-2)

    assert "coherent_artifact_concentration" in resultdata
    assert resultdata["coherent_artifact_concentration"].shape == (time.size,
                                                                   3)

    assert "coherent_artifact_associated_spectra" in resultdata
    assert resultdata["coherent_artifact_associated_spectra"].shape == (1, 3)
Exemplo n.º 25
0
def test_multiple_groups():
    wanted_parameters = ParameterGroup.from_list([101e-4])
    initial_parameters = ParameterGroup.from_list([100e-5])

    global_axis = np.asarray([1.0])
    model_axis = np.arange(0, 150, 1.5)

    sim_model_dict = {
        "megacomplex": {
            "m1": {
                "is_index_dependent": False
            },
            "m2": {
                "type": "global_complex"
            }
        },
        "dataset": {
            "dataset1": {
                "initial_concentration": [],
                "megacomplex": ["m1"],
                "global_megacomplex": ["m2"],
                "kinetic": ["1"],
            }
        },
    }
    sim_model = DecayModel.from_dict(sim_model_dict)
    model_dict = {
        "dataset_groups": {
            "g1": {},
            "g2": {
                "residual_function": "non_negative_least_squares"
            }
        },
        "megacomplex": {
            "m1": {
                "is_index_dependent": False
            }
        },
        "dataset": {
            "dataset1": {
                "group": "g1",
                "initial_concentration": [],
                "megacomplex": ["m1"],
                "kinetic": ["1"],
            },
            "dataset2": {
                "group": "g2",
                "initial_concentration": [],
                "megacomplex": ["m1"],
                "kinetic": ["1"],
            },
        },
    }
    model = DecayModel.from_dict(model_dict)
    dataset = simulate(
        sim_model,
        "dataset1",
        wanted_parameters,
        {
            "global": global_axis,
            "model": model_axis
        },
    )
    scheme = Scheme(
        model=model,
        parameters=initial_parameters,
        data={
            "dataset1": dataset,
            "dataset2": dataset
        },
        maximum_number_function_evaluations=10,
        clp_link_tolerance=0.1,
    )

    result = optimize(scheme, raise_exception=True)
    print(result.optimized_parameters)
    assert result.success
    for label, param in result.optimized_parameters.all():
        if param.vary:
            assert np.allclose(param.value,
                               wanted_parameters.get(label).value,
                               rtol=1e-1)
Exemplo n.º 26
0
def test_coherent_artifact(spectral_dependence: str):
    model_dict = {
        "initial_concentration": {
            "j1": {"compartments": ["s1"], "parameters": ["irf_center"]},
        },
        "megacomplex": {
            "mc1": {"type": "decay", "k_matrix": ["k1"]},
            "mc2": {"type": "coherent-artifact", "order": 3},
        },
        "k_matrix": {
            "k1": {
                "matrix": {
                    ("s1", "s1"): "rate",
                }
            }
        },
        "irf": {
            "irf1": {
                "type": "spectral-multi-gaussian",
                "center": ["irf_center"],
                "width": ["irf_width"],
            },
        },
        "dataset": {
            "dataset1": {
                "initial_concentration": "j1",
                "megacomplex": ["mc1", "mc2"],
                "irf": "irf1",
            },
        },
    }

    parameter_list = [
        ["rate", 101e-4],
        ["irf_center", 10, {"vary": False, "non-negative": False}],
        ["irf_width", 20, {"vary": False, "non-negative": False}],
    ]

    irf_spec = model_dict["irf"]["irf1"]

    if spectral_dependence == "dispersed":
        irf_spec["dispersion_center"] = "irf_dispc"
        irf_spec["center_dispersion"] = ["irf_disp1", "irf_disp2"]

        parameter_list += [
            ["irf_dispc", 300, {"vary": False, "non-negative": False}],
            ["irf_disp1", 0.01, {"vary": False, "non-negative": False}],
            ["irf_disp2", 0.001, {"vary": False, "non-negative": False}],
        ]
    elif spectral_dependence == "shifted":

        irf_spec["shift"] = ["irf_shift1", "irf_shift2", "irf_shift3"]
        parameter_list += [
            ["irf_shift1", -2],
            ["irf_shift2", 0],
            ["irf_shift3", 2],
        ]

    model = Model.from_dict(
        model_dict.copy(),
        megacomplex_types={
            "decay": DecayMegacomplex,
            "coherent-artifact": CoherentArtifactMegacomplex,
        },
    )

    parameters = ParameterGroup.from_list(parameter_list)

    time = np.arange(0, 50, 1.5)
    spectral = np.asarray([200, 300, 400])
    coords = {"time": time, "spectral": spectral}

    dataset_model = model.dataset["dataset1"].fill(model, parameters)
    dataset_model.overwrite_global_dimension("spectral")
    dataset_model.set_coordinates(coords)
    matrix = calculate_matrix(dataset_model, {"spectral": 1})
    compartments = matrix.clp_labels

    print(compartments)
    assert len(compartments) == 4
    for i in range(1, 4):
        assert compartments[i] == f"coherent_artifact_{i}"

    assert matrix.matrix.shape == (time.size, 4)

    clp = xr.DataArray(
        np.ones((3, 4)),
        coords=[
            ("spectral", spectral),
            (
                "clp_label",
                [
                    "s1",
                    "coherent_artifact_1",
                    "coherent_artifact_2",
                    "coherent_artifact_3",
                ],
            ),
        ],
    )
    axis = {"time": time, "spectral": clp.spectral}
    data = simulate(model, "dataset1", parameters, axis, clp)

    dataset = {"dataset1": data}
    scheme = Scheme(
        model=model, parameters=parameters, data=dataset, maximum_number_function_evaluations=20
    )
    result = optimize(scheme)
    print(result.optimized_parameters)

    for label, param in result.optimized_parameters.all():
        assert np.allclose(param.value, parameters.get(label).value, rtol=1e-8)

    resultdata = result.data["dataset1"]
    assert np.array_equal(data.time, resultdata.time)
    assert np.array_equal(data.spectral, resultdata.spectral)
    assert data.data.shape == resultdata.data.shape
    assert data.data.shape == resultdata.fitted_data.shape
    assert np.allclose(data.data, resultdata.fitted_data)

    assert "coherent_artifact_response" in resultdata
    if spectral_dependence == "none":
        assert resultdata["coherent_artifact_response"].shape == (time.size, 3)
    else:
        assert resultdata["coherent_artifact_response"].shape == (spectral.size, time.size, 3)

    assert "coherent_artifact_associated_spectra" in resultdata
    assert resultdata["coherent_artifact_associated_spectra"].shape == (3, 3)
Exemplo n.º 27
0
SIMULATION_MODEL_YML = generate_model_yml(
    generator_name="spectral_decay_parallel",
    generator_arguments={
        "nr_compartments": 3,
        "irf": True
    },
)
SIMULATION_MODEL = load_model(SIMULATION_MODEL_YML, format_name="yml_str")

MODEL_YML = generate_model_yml(
    generator_name="decay_parallel",
    generator_arguments={
        "nr_compartments": 3,
        "irf": True
    },
)
MODEL = load_model(MODEL_YML, format_name="yml_str")

DATASET = simulate(
    SIMULATION_MODEL,
    "dataset_1",
    SIMULATION_PARAMETERS,
    SIMULATION_COORDINATES,
    noise=True,
    noise_std_dev=1e-2,
)

SCHEME = Scheme(model=MODEL,
                parameters=PARAMETERS,
                data={"dataset_1": DATASET})
Exemplo n.º 28
0
def test_spectral_relation():
    model = KineticSpectrumModel.from_dict({
        "initial_concentration": {
            "j1": {
                "compartments": ["s1", "s2", "s3", "s4"],
                "parameters": ["i.1", "i.2", "i.3", "i.4"],
            },
        },
        "megacomplex": {
            "mc1": {
                "k_matrix": ["k1"]
            },
        },
        "k_matrix": {
            "k1": {
                "matrix": {
                    ("s1", "s1"): "kinetic.1",
                    ("s2", "s2"): "kinetic.1",
                    ("s3", "s3"): "kinetic.1",
                    ("s4", "s4"): "kinetic.1",
                }
            }
        },
        "spectral_relations": [
            {
                "compartment": "s1",
                "target": "s2",
                "parameter": "rel.1",
                "interval": [(0, 2)],
            },
            {
                "compartment": "s1",
                "target": "s3",
                "parameter": "rel.2",
                "interval": [(0, 2)],
            },
        ],
        "dataset": {
            "dataset1": {
                "initial_concentration": "j1",
                "megacomplex": ["mc1"],
            },
        },
    })
    print(model)

    rel1, rel2 = 10, 20
    parameters = ParameterGroup.from_dict({
        "kinetic": [1e-4],
        "i": [1, 2, 3, 4],
        "rel": [rel1, rel2],
    })

    time = np.asarray(np.arange(0, 50, 1.5))
    dataset = model.dataset["dataset1"].fill(model, parameters)
    compartments, matrix = kinetic_image_matrix(dataset, time, 0)

    assert len(compartments) == 4
    assert matrix.shape == (time.size, 4)

    reduced_compartments, relation_matrix = create_spectral_relation_matrix(
        model, "dataset1", parameters, compartments, matrix, 1)

    print(relation_matrix)
    assert len(reduced_compartments) == 2
    assert relation_matrix.shape == (4, 2)
    assert np.array_equal(
        relation_matrix,
        [
            [1.0, 0.0],
            [10.0, 0.0],
            [20.0, 0.0],
            [0.0, 1.0],
        ],
    )

    reduced_compartments, reduced_matrix = model.constrain_matrix_function(
        "dataset1", parameters, compartments, matrix, 1)

    assert reduced_matrix.shape == (time.size, 2)

    print(reduced_matrix[0, 0], matrix[0, 0], matrix[0, 1], matrix[0, 2])
    assert np.allclose(
        reduced_matrix[:, 0],
        matrix[:, 0] + rel1 * matrix[:, 1] + rel2 * matrix[:, 2])

    clp = xr.DataArray([[1.0, 10.0, 20.0, 1]],
                       coords=(("spectral", [1]), ("clp_label",
                                                   ["s1", "s2", "s3", "s4"])))

    data = model.simulate("dataset1",
                          parameters,
                          clp=clp,
                          axes={
                              "time": time,
                              "spectral": np.array([1])
                          })

    dataset = {"dataset1": data}
    scheme = Scheme(model=model,
                    parameters=parameters,
                    data=dataset,
                    maximum_number_function_evaluations=20)
    result = optimize(scheme)

    for label, param in result.optimized_parameters.all():
        if param.vary:
            assert np.allclose(param.value,
                               parameters.get(label).value,
                               rtol=1e-1)

    result_data = result.data["dataset1"]
    print(result_data.species_associated_spectra)
    assert result_data.species_associated_spectra.shape == (1, 4)
    assert (result_data.species_associated_spectra[0, 1] == rel1 *
            result_data.species_associated_spectra[0, 0])
    assert np.allclose(
        result_data.species_associated_spectra[0, 2].values,
        rel2 * result_data.species_associated_spectra[0, 0].values,
    )
Exemplo n.º 29
0
def test_spectral_irf(suite):

    model = suite.model
    assert model.valid(), model.validate()

    parameters = suite.parameters
    assert model.valid(parameters), model.validate(parameters)

    sim_model = deepcopy(model)
    sim_model.dataset["dataset1"].global_megacomplex = ["mc2"]
    dataset = simulate(sim_model, "dataset1", parameters, suite.axis)

    assert dataset.data.shape == (suite.axis["time"].size,
                                  suite.axis["spectral"].size)

    data = {"dataset1": dataset}

    scheme = Scheme(
        model=model,
        parameters=parameters,
        data=data,
        maximum_number_function_evaluations=20,
    )
    result = optimize(scheme)

    for label, param in result.optimized_parameters.all():
        assert np.allclose(param.value,
                           parameters.get(label).value), dedent(f"""
            Error in {suite.__name__} comparing {param.full_label},
            - diff={param.value-parameters.get(label).value}
            """)

    resultdata = result.data["dataset1"]

    # print(resultdata)
    assert np.array_equal(dataset["time"], resultdata["time"])
    assert np.array_equal(dataset["spectral"], resultdata["spectral"])
    assert dataset.data.shape == resultdata.data.shape
    assert dataset.data.shape == resultdata.fitted_data.shape
    # assert np.allclose(dataset.data, resultdata.fitted_data, atol=1e-14)

    fit_data_max_at_start = resultdata.fitted_data.isel(spectral=0).argmax(
        axis=0)
    fit_data_max_at_end = resultdata.fitted_data.isel(spectral=-1).argmax(
        axis=0)

    if suite is NoIrfDispersion:
        assert "center_dispersion_1" not in resultdata
        assert fit_data_max_at_start == fit_data_max_at_end
    else:
        assert "center_dispersion_1" in resultdata
        assert fit_data_max_at_start != fit_data_max_at_end
        if abs(fit_data_max_at_start - fit_data_max_at_end) < 3:
            warnings.warn(
                dedent("""
                    Bad test, one of the following could be the case:
                    - dispersion too small
                    - spectral window to small
                    - time resolution (around the maximum of the IRF) too low"
                    """))

        for x in suite.axis["spectral"]:
            # calculated irf location
            model_irf_center = suite.model.irf["irf1"].center
            model_dispersion_center = suite.model.irf["irf1"].dispersion_center
            model_center_dispersion_coefficients = suite.model.irf[
                "irf1"].center_dispersion_coefficients
            calc_irf_location_at_x = _calculate_irf_position(
                x, model_irf_center, model_dispersion_center,
                model_center_dispersion_coefficients)
            # fitted irf location
            fitted_irf_loc_at_x = resultdata["irf_center_location"].sel(
                spectral=x)
            assert np.allclose(calc_irf_location_at_x,
                               fitted_irf_loc_at_x.values), dedent(f"""
                Error in {suite.__name__} comparing irf_center_location,
                - diff={calc_irf_location_at_x-fitted_irf_loc_at_x.values}
                """)

    assert "species_associated_spectra" in resultdata
    assert "decay_associated_spectra" in resultdata
    assert "irf_center" in resultdata
Exemplo n.º 30
0
def test_multi_dataset_no_overlap():
    model = SimpleTestModel.from_dict({
        "megacomplex": {
            "m1": {
                "is_index_dependent": False
            }
        },
        "dataset_groups": {
            "default": {
                "link_clp": True
            }
        },
        "dataset": {
            "dataset1": {
                "megacomplex": ["m1"],
            },
            "dataset2": {
                "megacomplex": ["m1"],
            },
        },
    })

    model.grouped = lambda: True
    print(model.validate())
    assert model.valid()
    assert model.grouped()

    parameters = ParameterGroup.from_list([1, 10])
    print(model.validate(parameters))
    assert model.valid(parameters)

    global_axis_1 = [1, 2, 3]
    model_axis_1 = [5, 7]
    global_axis_2 = [4, 5, 6]
    model_axis_2 = [5, 7, 9]
    data = {
        "dataset1":
        xr.DataArray(np.ones((3, 2)),
                     coords=[("global", global_axis_1),
                             ("model", model_axis_1)]).to_dataset(name="data"),
        "dataset2":
        xr.DataArray(np.ones((3, 3)),
                     coords=[("global", global_axis_2),
                             ("model", model_axis_2)]).to_dataset(name="data"),
    }

    scheme = Scheme(model, parameters, data)
    optimization_group = OptimizationGroup(
        scheme,
        model.get_dataset_groups()["default"])
    bag = list(optimization_group._calculator.bag)
    assert len(optimization_group._calculator.groups) == 2
    assert len(bag) == 6
    assert all(p.data.size == 2 for p in bag[:3])
    assert all(p.dataset_models[0].label == "dataset1" for p in bag[:3])
    assert all(
        all(p.dataset_models[0].axis["model"] == model_axis_1)
        for p in bag[:3])
    assert all(
        all(p.dataset_models[0].axis["global"] == global_axis_1)
        for p in bag[:3])
    assert [p.dataset_models[0].indices["global"]
            for p in bag[:3]] == [0, 1, 2]

    assert all(p.data.size == 3 for p in bag[3:])
    assert all(p.dataset_models[0].label == "dataset2" for p in bag[3:])
    assert all(
        all(p.dataset_models[0].axis["model"] == model_axis_2)
        for p in bag[3:])
    assert all(
        all(p.dataset_models[0].axis["global"] == global_axis_2)
        for p in bag[3:])
    assert [p.dataset_models[0].indices["global"]
            for p in bag[3:]] == [0, 1, 2]