def generate_model(*, generator_name: str, generator_arguments: GeneratorArguments) -> Model: """Generate a model. Parameters ---------- generator_name : str The generator to use. generator_arguments : GeneratorArguments Arguments for the generator. Returns ------- Model The generated model See Also -------- generate_parallel_decay_model generate_parallel_spectral_decay_model generate_sequential_decay_model generate_sequential_spectral_decay_model Raises ------ ValueError Raised when an unknown generator is specified. """ if generator_name not in generators: raise ValueError(f"Unknown model generator '{generator_name}'. " f"Known generators are: {list(generators.keys())}") model = generators[generator_name](**generator_arguments) return Model.from_dict(model)
def load_model(self, file_name: str) -> Model: """parse_yaml_file reads the given file and parses its content as YML. Parameters ---------- filename : str filename is the of the file to parse. Returns ------- Model The content of the file as dictionary. """ spec = self._load_yml(file_name) model_spec_deprecations(spec) spec = sanitize_yaml(spec) default_megacomplex = spec.get("default_megacomplex") if default_megacomplex is None and any( "type" not in m for m in spec["megacomplex"].values()): raise ValueError("Default megacomplex is not defined in model and " "at least one megacomplex does not have a type.") if "megacomplex" not in spec: raise ValueError("No megacomplex defined in model") return Model.from_dict(spec, megacomplex_types=None, default_megacomplex_type=None)
def setup_model(index_dependent, link_clp): model_dict = { "megacomplex": {"m1": {"is_index_dependent": index_dependent}}, "dataset_groups": {"default": {"link_clp": link_clp}}, "dataset": { "dataset1": {"megacomplex": ["m1"]}, "dataset2": {"megacomplex": ["m1"]}, "dataset3": {"megacomplex": ["m1"]}, }, } return Model.from_dict( model_dict, megacomplex_types={"benchmark": BenchmarkMegacomplex}, default_megacomplex_type="benchmark", )
def test_coherent_artifact(spectral_dependence: str): model_dict = { "initial_concentration": { "j1": {"compartments": ["s1"], "parameters": ["irf_center"]}, }, "megacomplex": { "mc1": {"type": "decay", "k_matrix": ["k1"]}, "mc2": {"type": "coherent-artifact", "order": 3}, }, "k_matrix": { "k1": { "matrix": { ("s1", "s1"): "rate", } } }, "irf": { "irf1": { "type": "spectral-multi-gaussian", "center": ["irf_center"], "width": ["irf_width"], }, }, "dataset": { "dataset1": { "initial_concentration": "j1", "megacomplex": ["mc1", "mc2"], "irf": "irf1", }, }, } parameter_list = [ ["rate", 101e-4], ["irf_center", 10, {"vary": False, "non-negative": False}], ["irf_width", 20, {"vary": False, "non-negative": False}], ] irf_spec = model_dict["irf"]["irf1"] if spectral_dependence == "dispersed": irf_spec["dispersion_center"] = "irf_dispc" irf_spec["center_dispersion"] = ["irf_disp1", "irf_disp2"] parameter_list += [ ["irf_dispc", 300, {"vary": False, "non-negative": False}], ["irf_disp1", 0.01, {"vary": False, "non-negative": False}], ["irf_disp2", 0.001, {"vary": False, "non-negative": False}], ] elif spectral_dependence == "shifted": irf_spec["shift"] = ["irf_shift1", "irf_shift2", "irf_shift3"] parameter_list += [ ["irf_shift1", -2], ["irf_shift2", 0], ["irf_shift3", 2], ] model = Model.from_dict( model_dict.copy(), megacomplex_types={ "decay": DecayMegacomplex, "coherent-artifact": CoherentArtifactMegacomplex, }, ) parameters = ParameterGroup.from_list(parameter_list) time = np.arange(0, 50, 1.5) spectral = np.asarray([200, 300, 400]) coords = {"time": time, "spectral": spectral} dataset_model = model.dataset["dataset1"].fill(model, parameters) dataset_model.overwrite_global_dimension("spectral") dataset_model.set_coordinates(coords) matrix = calculate_matrix(dataset_model, {"spectral": 1}) compartments = matrix.clp_labels print(compartments) assert len(compartments) == 4 for i in range(1, 4): assert compartments[i] == f"coherent_artifact_{i}" assert matrix.matrix.shape == (time.size, 4) clp = xr.DataArray( np.ones((3, 4)), coords=[ ("spectral", spectral), ( "clp_label", [ "s1", "coherent_artifact_1", "coherent_artifact_2", "coherent_artifact_3", ], ), ], ) axis = {"time": time, "spectral": clp.spectral} data = simulate(model, "dataset1", parameters, axis, clp) dataset = {"dataset1": data} scheme = Scheme( model=model, parameters=parameters, data=dataset, maximum_number_function_evaluations=20 ) result = optimize(scheme) print(result.optimized_parameters) for label, param in result.optimized_parameters.all(): assert np.allclose(param.value, parameters.get(label).value, rtol=1e-8) resultdata = result.data["dataset1"] assert np.array_equal(data.time, resultdata.time) assert np.array_equal(data.spectral, resultdata.spectral) assert data.data.shape == resultdata.data.shape assert data.data.shape == resultdata.fitted_data.shape assert np.allclose(data.data, resultdata.fitted_data) assert "coherent_artifact_response" in resultdata if spectral_dependence == "none": assert resultdata["coherent_artifact_response"].shape == (time.size, 3) else: assert resultdata["coherent_artifact_response"].shape == (spectral.size, time.size, 3) assert "coherent_artifact_associated_spectra" in resultdata assert resultdata["coherent_artifact_associated_spectra"].shape == (3, 3)
def test_baseline(): model = Model.from_dict( { "initial_concentration": { "j1": { "compartments": ["s1"], "parameters": ["2"] }, }, "megacomplex": { "mc1": { "type": "decay", "k_matrix": ["k1"] }, "mc2": { "type": "baseline", "dimension": "time" }, }, "k_matrix": { "k1": { "matrix": { ("s1", "s1"): "1", } } }, "dataset": { "dataset1": { "initial_concentration": "j1", "megacomplex": ["mc1", "mc2"], }, }, }, megacomplex_types={ "decay": DecayMegacomplex, "baseline": BaselineMegacomplex }, ) parameter = ParameterGroup.from_list([ 101e-4, [1, { "vary": False, "non-negative": False }], [42, { "vary": False, "non-negative": False }], ]) time = np.asarray(np.arange(0, 50, 1.5)) pixel = np.asarray([0]) coords = {"time": time, "pixel": pixel} dataset_model = model.dataset["dataset1"].fill(model, parameter) dataset_model.overwrite_global_dimension("pixel") dataset_model.set_coordinates(coords) matrix = calculate_matrix(dataset_model, {}) compartments = matrix.clp_labels assert len(compartments) == 2 assert "dataset1_baseline" in compartments assert matrix.matrix.shape == (time.size, 2) assert np.all(matrix.matrix[:, 1] == 1)