Beispiel #1
0
def _reduce_matrix(
    model: Model,
    label: str,
    parameters: ParameterGroup,
    result: LabelAndMatrix,
    index: float,
) -> LabelAndMatrix:
    clp_labels = result.clp_label.copy()
    if callable(model.has_matrix_constraints_function) and model.has_matrix_constraints_function():
        clp_label, matrix = model.constrain_matrix_function(
            label, parameters, clp_labels, result.matrix, index
        )
        return LabelAndMatrix(clp_label, matrix)
    return LabelAndMatrix(clp_labels, result.matrix)
Beispiel #2
0
def generate_model(*, generator_name: str,
                   generator_arguments: GeneratorArguments) -> Model:
    """Generate a model.

    Parameters
    ----------
    generator_name : str
        The generator to use.
    generator_arguments : GeneratorArguments
        Arguments for the generator.

    Returns
    -------
    Model
        The generated model

    See Also
    --------
    generate_parallel_decay_model
    generate_parallel_spectral_decay_model
    generate_sequential_decay_model
    generate_sequential_spectral_decay_model

    Raises
    ------
    ValueError
        Raised when an unknown generator is specified.
    """
    if generator_name not in generators:
        raise ValueError(f"Unknown model generator '{generator_name}'. "
                         f"Known generators are: {list(generators.keys())}")
    model = generators[generator_name](**generator_arguments)
    return Model.from_dict(model)
Beispiel #3
0
 def save_model(self, model: Model, file_name: str):
     """Save a Model instance to a spec file.
     Parameters
     ----------
     model: Model
         Model instance to save to specs file.
     file_name : str
         File to write the model specs to.
     """
     model_dict = model.as_dict()
     # We replace tuples with strings
     for items in model_dict.values():
         if not isinstance(items, (list, dict)):
             continue
         item_iterator = items if isinstance(items,
                                             list) else items.values()
         for item in item_iterator:
             for prop_name, prop in item.items():
                 if isinstance(prop, dict) and any(
                         isinstance(k, tuple) for k in prop):
                     keys = [f"({k[0]}, {k[1]})" for k in prop]
                     item[prop_name] = {
                         f"{k}": v
                         for k, v in zip(keys, prop.values())
                     }
     write_dict(model_dict, file_name=file_name)
Beispiel #4
0
    def load_model(self, file_name: str) -> Model:
        """parse_yaml_file reads the given file and parses its content as YML.

        Parameters
        ----------
        filename : str
            filename is the of the file to parse.

        Returns
        -------
        Model
            The content of the file as dictionary.
        """

        spec = self._load_yml(file_name)

        model_spec_deprecations(spec)

        spec = sanitize_yaml(spec)

        default_megacomplex = spec.get("default_megacomplex")

        if default_megacomplex is None and any(
                "type" not in m for m in spec["megacomplex"].values()):
            raise ValueError("Default megacomplex is not defined in model and "
                             "at least one megacomplex does not have a type.")

        if "megacomplex" not in spec:
            raise ValueError("No megacomplex defined in model")

        return Model.from_dict(spec,
                               megacomplex_types=None,
                               default_megacomplex_type=None)
def setup_model(index_dependent, link_clp):
    model_dict = {
        "megacomplex": {"m1": {"is_index_dependent": index_dependent}},
        "dataset_groups": {"default": {"link_clp": link_clp}},
        "dataset": {
            "dataset1": {"megacomplex": ["m1"]},
            "dataset2": {"megacomplex": ["m1"]},
            "dataset3": {"megacomplex": ["m1"]},
        },
    }
    return Model.from_dict(
        model_dict,
        megacomplex_types={"benchmark": BenchmarkMegacomplex},
        default_megacomplex_type="benchmark",
    )
Beispiel #6
0
def save_model(
    model: Model,
    file_name: StrOrPath,
    format_name: str = None,
    *,
    allow_overwrite: bool = False,
    update_source_path: bool = True,
    **kwargs: Any,
) -> None:
    """Save a :class:`Model` instance to a spec file.

    Parameters
    ----------
    model: Model
        :class:`Model` instance to save to specs file.
    file_name : StrOrPath
        File to write the model specs to.
    format_name : str
        Format the file should be in, if not provided it will be inferred from the file extension.
    allow_overwrite : bool
        Whether or not to allow overwriting existing files, by default False
    update_source_path: bool
        Whether or not to update the ``source_path`` attribute to ``file_name`` when saving.
        by default True
    **kwargs : Any
        Additional keyword arguments passes to the ``save_model`` implementation
        of the project io plugin.
    """
    protect_from_overwrite(file_name, allow_overwrite=allow_overwrite)
    io = get_project_io(format_name or inferr_file_format(file_name, needs_to_exist=False))
    io.save_model(  # type: ignore[call-arg]
        file_name=Path(file_name).as_posix(),
        model=model,
        **kwargs,
    )
    if update_source_path is True:
        model.source_path = Path(file_name).as_posix()
def test_coherent_artifact(spectral_dependence: str):
    model_dict = {
        "initial_concentration": {
            "j1": {"compartments": ["s1"], "parameters": ["irf_center"]},
        },
        "megacomplex": {
            "mc1": {"type": "decay", "k_matrix": ["k1"]},
            "mc2": {"type": "coherent-artifact", "order": 3},
        },
        "k_matrix": {
            "k1": {
                "matrix": {
                    ("s1", "s1"): "rate",
                }
            }
        },
        "irf": {
            "irf1": {
                "type": "spectral-multi-gaussian",
                "center": ["irf_center"],
                "width": ["irf_width"],
            },
        },
        "dataset": {
            "dataset1": {
                "initial_concentration": "j1",
                "megacomplex": ["mc1", "mc2"],
                "irf": "irf1",
            },
        },
    }

    parameter_list = [
        ["rate", 101e-4],
        ["irf_center", 10, {"vary": False, "non-negative": False}],
        ["irf_width", 20, {"vary": False, "non-negative": False}],
    ]

    irf_spec = model_dict["irf"]["irf1"]

    if spectral_dependence == "dispersed":
        irf_spec["dispersion_center"] = "irf_dispc"
        irf_spec["center_dispersion"] = ["irf_disp1", "irf_disp2"]

        parameter_list += [
            ["irf_dispc", 300, {"vary": False, "non-negative": False}],
            ["irf_disp1", 0.01, {"vary": False, "non-negative": False}],
            ["irf_disp2", 0.001, {"vary": False, "non-negative": False}],
        ]
    elif spectral_dependence == "shifted":

        irf_spec["shift"] = ["irf_shift1", "irf_shift2", "irf_shift3"]
        parameter_list += [
            ["irf_shift1", -2],
            ["irf_shift2", 0],
            ["irf_shift3", 2],
        ]

    model = Model.from_dict(
        model_dict.copy(),
        megacomplex_types={
            "decay": DecayMegacomplex,
            "coherent-artifact": CoherentArtifactMegacomplex,
        },
    )

    parameters = ParameterGroup.from_list(parameter_list)

    time = np.arange(0, 50, 1.5)
    spectral = np.asarray([200, 300, 400])
    coords = {"time": time, "spectral": spectral}

    dataset_model = model.dataset["dataset1"].fill(model, parameters)
    dataset_model.overwrite_global_dimension("spectral")
    dataset_model.set_coordinates(coords)
    matrix = calculate_matrix(dataset_model, {"spectral": 1})
    compartments = matrix.clp_labels

    print(compartments)
    assert len(compartments) == 4
    for i in range(1, 4):
        assert compartments[i] == f"coherent_artifact_{i}"

    assert matrix.matrix.shape == (time.size, 4)

    clp = xr.DataArray(
        np.ones((3, 4)),
        coords=[
            ("spectral", spectral),
            (
                "clp_label",
                [
                    "s1",
                    "coherent_artifact_1",
                    "coherent_artifact_2",
                    "coherent_artifact_3",
                ],
            ),
        ],
    )
    axis = {"time": time, "spectral": clp.spectral}
    data = simulate(model, "dataset1", parameters, axis, clp)

    dataset = {"dataset1": data}
    scheme = Scheme(
        model=model, parameters=parameters, data=dataset, maximum_number_function_evaluations=20
    )
    result = optimize(scheme)
    print(result.optimized_parameters)

    for label, param in result.optimized_parameters.all():
        assert np.allclose(param.value, parameters.get(label).value, rtol=1e-8)

    resultdata = result.data["dataset1"]
    assert np.array_equal(data.time, resultdata.time)
    assert np.array_equal(data.spectral, resultdata.spectral)
    assert data.data.shape == resultdata.data.shape
    assert data.data.shape == resultdata.fitted_data.shape
    assert np.allclose(data.data, resultdata.fitted_data)

    assert "coherent_artifact_response" in resultdata
    if spectral_dependence == "none":
        assert resultdata["coherent_artifact_response"].shape == (time.size, 3)
    else:
        assert resultdata["coherent_artifact_response"].shape == (spectral.size, time.size, 3)

    assert "coherent_artifact_associated_spectra" in resultdata
    assert resultdata["coherent_artifact_associated_spectra"].shape == (3, 3)
def simulate(
    model: Model,
    dataset: str,
    parameters: ParameterGroup,
    axes: Dict[str, np.ndarray] = None,
    clp: Union[np.ndarray, xr.DataArray] = None,
    noise=False,
    noise_std_dev=1.0,
    noise_seed=None,
):
    """Simulates a model.

    Parameters
    ----------
    model :
        The model to simulate.
    parameter :
        The parameters for the simulation.
    dataset :
        Label of the dataset to simulate
    axes :
        A dictionary with axes for simulation.
    clp :
        Conditionally linear parameters. Will be used instead of `model.global_matrix` if given.
    noise :
        Add noise to the simulation.
    noise_std_dev :
        The standard deviation for noise simulation.
    noise_seed :
        The seed for the noise simulation.
    """

    if model.global_matrix is None and clp is None:
        raise ValueError(
            "Cannot simulate models without implementation for global matrix and no clp given."
        )

    filled_dataset = model.dataset[dataset].fill(model, parameters)

    model_dimension = axes[model.model_dimension]
    global_dimension = axes[model.global_dimension]

    dim1 = model_dimension.size
    dim2 = global_dimension.size
    result = xr.DataArray(
        np.empty((dim1, dim2), dtype=np.float64),
        coords=[
            (model.model_dimension, model_dimension),
            (model.global_dimension, global_dimension),
        ],
    )
    result = result.to_dataset(name="data")

    matrix = ([
        model.matrix(dataset_descriptor=filled_dataset,
                     axis=model_dimension,
                     index=index) for index in global_dimension
    ] if model.index_dependent() else model.matrix(
        dataset_descriptor=filled_dataset, axis=model_dimension, index=None))
    if callable(model.constrain_matrix_function):
        matrix = ([
            model.constrain_matrix_function(dataset, parameters, clp, mat,
                                            global_dimension[i])
            for i, (clp, mat) in enumerate(matrix)
        ] if model.index_dependent() else model.constrain_matrix_function(
            dataset, parameters, matrix[0], matrix[1], None))
    matrix = ([
        xr.DataArray(mat,
                     coords=[(model.model_dimension, model_dimension),
                             ("clp_label", clp_label)])
        for clp_label, mat in matrix
    ] if model.index_dependent() else xr.DataArray(
        matrix[1],
        coords=[(model.model_dimension, model_dimension),
                ("clp_label", matrix[0])]))

    if clp is not None:
        if clp.shape[0] != global_dimension.size:
            raise ValueError(
                f"Size of dimension 0 of clp ({clp.shape[0]}) != size of axis"
                f" '{model.global_dimension}' ({global_dimension.size})")
        if isinstance(clp, xr.DataArray):
            if model.global_dimension not in clp.coords:
                raise ValueError(
                    f"Missing coordinate '{model.global_dimension}' in clp.")
            if "clp_label" not in clp.coords:
                raise ValueError("Missing coordinate 'clp_label' in clp.")
        else:
            if "clp_label" not in axes:
                raise ValueError("Missing axis 'clp_label'")
            clp = xr.DataArray(
                clp,
                coords=[
                    (model.global_dimension, global_dimension),
                    ("clp_label", axes["clp_label"]),
                ],
            )
    else:
        clp_labels, clp = model.global_matrix(filled_dataset, global_dimension)
        clp = xr.DataArray(clp,
                           coords=[(model.global_dimension, global_dimension),
                                   ("clp_label", clp_labels)])
    for i in range(dim2):
        index_matrix = matrix[i] if model.index_dependent() else matrix
        result.data[:, i] = np.dot(
            index_matrix,
            clp[i].sel(clp_label=index_matrix.coords["clp_label"]))

    if noise:
        if noise_seed is not None:
            np.random.seed(noise_seed)
        result["data"] = (
            (model.model_dimension, model.global_dimension),
            np.random.normal(result.data, noise_std_dev),
        )

    return result
def test_baseline():
    model = Model.from_dict(
        {
            "initial_concentration": {
                "j1": {
                    "compartments": ["s1"],
                    "parameters": ["2"]
                },
            },
            "megacomplex": {
                "mc1": {
                    "type": "decay",
                    "k_matrix": ["k1"]
                },
                "mc2": {
                    "type": "baseline",
                    "dimension": "time"
                },
            },
            "k_matrix": {
                "k1": {
                    "matrix": {
                        ("s1", "s1"): "1",
                    }
                }
            },
            "dataset": {
                "dataset1": {
                    "initial_concentration": "j1",
                    "megacomplex": ["mc1", "mc2"],
                },
            },
        },
        megacomplex_types={
            "decay": DecayMegacomplex,
            "baseline": BaselineMegacomplex
        },
    )

    parameter = ParameterGroup.from_list([
        101e-4,
        [1, {
            "vary": False,
            "non-negative": False
        }],
        [42, {
            "vary": False,
            "non-negative": False
        }],
    ])

    time = np.asarray(np.arange(0, 50, 1.5))
    pixel = np.asarray([0])
    coords = {"time": time, "pixel": pixel}
    dataset_model = model.dataset["dataset1"].fill(model, parameter)
    dataset_model.overwrite_global_dimension("pixel")
    dataset_model.set_coordinates(coords)
    matrix = calculate_matrix(dataset_model, {})
    compartments = matrix.clp_labels

    assert len(compartments) == 2
    assert "dataset1_baseline" in compartments

    assert matrix.matrix.shape == (time.size, 2)
    assert np.all(matrix.matrix[:, 1] == 1)