def simulate(self, dataset: str, parameter: ParameterGroup, axes: typing.Dict[str, np.ndarray] = None, clp: typing.Union[np.ndarray, xr.DataArray] = None, noise: bool = False, noise_std_dev: float = 1.0, noise_seed: int = None, ) -> xr.Dataset: """Simulates the model. Parameters ---------- dataset : Label of the dataset to simulate. parameter : The parameters for the simulation. axes : A dictory with axes for simulation. clp : Conditionaly linear parameter. Will be used instead of `model.global_matrix` if given. noise : If `True` noise is added to the simulated data. noise_std_dev : The standart deviation of the noise. noise_seed : Seed for the noise. """ return simulate(self, dataset, parameter, axes=axes, clp=clp, noise=noise, noise_std_dev=noise_std_dev, noise_seed=noise_seed)
def dummy_result(): """Dummy result for testing.""" model = suite.model model.is_grouped = False model.is_index_dependent = False wanted_parameters = suite.wanted_parameters data = {} for i in range(3): e_axis = getattr(suite, "e_axis" if i == 0 else f"e_axis{i+1}") c_axis = getattr(suite, "c_axis" if i == 0 else f"c_axis{i+1}") data[f"dataset{i+1}"] = simulate( suite.sim_model, f"dataset{i+1}", wanted_parameters, {"e": e_axis, "c": c_axis} ) scheme = Scheme( model=suite.model, parameters=suite.initial_parameters, data=data, maximum_number_function_evaluations=1, ) yield optimize(scheme)
def test_decay_model(suite, nnls): model = suite.model print(model.validate()) assert model.valid() model.dataset_group_models["default"].link_clp = False model.dataset_group_models["default"].method = ( "non_negative_least_squares" if nnls else "variable_projection") wanted_parameters = suite.wanted_parameters print(model.validate(wanted_parameters)) print(wanted_parameters) assert model.valid(wanted_parameters) initial_parameters = suite.initial_parameters print(model.validate(initial_parameters)) assert model.valid(initial_parameters) print(model.markdown(wanted_parameters)) dataset = simulate(model, "dataset1", wanted_parameters, suite.axis) assert dataset.data.shape == (suite.axis["time"].size, suite.axis["spectral"].size) data = {"dataset1": dataset} scheme = Scheme( model=model, parameters=initial_parameters, data=data, maximum_number_function_evaluations=20, ) result = optimize(scheme) print(result.optimized_parameters) for label, param in result.optimized_parameters.all(): assert np.allclose(param.value, wanted_parameters.get(label).value) resultdata = result.data["dataset1"] print(resultdata) assert np.array_equal(dataset["time"], resultdata["time"]) assert np.array_equal(dataset["spectral"], resultdata["spectral"]) assert dataset.data.shape == resultdata.data.shape assert dataset.data.shape == resultdata.fitted_data.shape assert np.allclose(dataset.data, resultdata.fitted_data, rtol=1e-1) assert "species_spectra" in resultdata spectra = resultdata.species_spectra assert "spectral_species" in spectra.coords assert "spectral" in spectra.coords assert spectra.shape == (suite.axis["spectral"].size, 3) assert "species_concentration" in resultdata concentration = resultdata.species_concentration assert "species" in concentration.coords assert "time" in concentration.coords assert concentration.shape == (suite.axis["time"].size, 3)
def test_simulate_dataset(): model = SimpleTestModel.from_dict( {"dataset": { "dataset1": { "megacomplex": [], }, }}) print(model.validate()) assert model.valid() parameter = ParameterGroup.from_list([1, 1]) print(model.validate(parameter)) assert model.valid(parameter) est_axis = np.asarray([1, 1, 1, 1]) cal_axis = np.asarray([2, 2, 2]) data = simulate(model, "dataset1", parameter, { "e": est_axis, "c": cal_axis }) assert np.array_equal(data["c"], cal_axis) assert np.array_equal(data["e"], est_axis) assert data.data.shape == (3, 4) assert np.array_equal( data.data, np.asarray([ [2, 4, 6], [4, 10, 16], [6, 16, 26], [8, 22, 36], ]).T, )
def test_optimization_full_model(index_dependent): model = FullModel.model model.megacomplex["m1"].is_index_dependent = index_dependent print(model.validate()) assert model.valid() parameters = FullModel.parameters assert model.valid(parameters) dataset = simulate(model, "dataset1", parameters, FullModel.coordinates) scheme = Scheme( model=model, parameters=parameters, data={"dataset1": dataset}, maximum_number_function_evaluations=10, ) result = optimize(scheme, raise_exception=True) assert result.success optimized_scheme = result.get_scheme() assert result.optimized_parameters == optimized_scheme.parameters result_data = result.data["dataset1"] assert "fitted_data" in result_data for label, param in result.optimized_parameters.all(): if param.vary: assert np.allclose(param.value, parameters.get(label).value, rtol=1e-1) clp = result_data.clp print(clp) assert clp.shape == (4, 4) assert all(np.isclose(1.0, c) for c in np.diagonal(clp))
def test_simulate_dataset(): model = MockModel.from_dict({ "dataset": { "dataset1": { "megacomplex": [], }, } }) print(model.validate()) assert model.valid() parameter = ParameterGroup.from_list([1, 1]) print(model.validate(parameter)) assert model.valid(parameter) est_axis = np.asarray([1, 1, 1, 1]) cal_axis = np.asarray([2, 2, 2]) data = simulate(model, 'dataset1', parameter, {'e': est_axis, 'c': cal_axis}) assert np.array_equal(data["c"], cal_axis) assert np.array_equal(data["e"], est_axis) assert data.data.shape == (3, 4) assert np.array_equal(data.data, np.asarray([ [2, 4, 6], [4, 10, 16], [6, 16, 26], [8, 22, 36], ]).T)
def simulate(self, dataset: str, parameter: ParameterGroup, axes: typing.Dict[str, np.ndarray] = None, clp: typing.Union[np.ndarray, xr.DataArray] = None, noise: bool = False, noise_std_dev: float = 1.0, noise_seed: int = None, ) -> xr.Dataset: """Simulates the model. Parameters ---------- dataset : Label of the dataset to simulate. parameter : The parameters for the simulation. axes : A dictory with axes for simulation. clp : Conditionaly linear parameter. Will be used instead of `model.global_matrix` if given. noise : If `True` noise is added to the simulated data. noise_std_dev : The standart deviation of the noise. noise_seed : Seed for the noise. """ return simulate(self, dataset, parameter, axes=axes, clp=clp, noise=noise, noise_std_dev=noise_std_dev, noise_seed=noise_seed)
def test_spectral_model(suite): model = suite.spectral_model print(model.validate()) assert model.valid() wanted_parameters = suite.spectral_parameters print(model.validate(wanted_parameters)) print(wanted_parameters) assert model.valid(wanted_parameters) initial_parameters = suite.spectral_parameters print(model.validate(initial_parameters)) assert model.valid(initial_parameters) print(model.markdown(initial_parameters)) dataset = simulate(model, "dataset1", wanted_parameters, suite.axis, suite.clp) assert dataset.data.shape == (suite.axis["time"].size, suite.axis["spectral"].size) data = {"dataset1": dataset} scheme = Scheme( model=model, parameters=initial_parameters, data=data, maximum_number_function_evaluations=20, ) result = optimize(scheme) print(result.optimized_parameters) for label, param in result.optimized_parameters.all(): assert np.allclose(param.value, wanted_parameters.get(label).value, rtol=1e-1) resultdata = result.data["dataset1"] assert np.array_equal(dataset["time"], resultdata["time"]) assert np.array_equal(dataset["spectral"], resultdata["spectral"]) assert dataset.data.shape == resultdata.data.shape assert dataset.data.shape == resultdata.fitted_data.shape assert np.allclose(dataset.data, resultdata.fitted_data, rtol=1e-2) assert "species_associated_concentrations" in resultdata assert resultdata.species_associated_concentrations.shape == ( suite.axis["time"].size, len(suite.decay_compartments), ) assert "species_spectra" in resultdata assert resultdata.species_spectra.shape == ( suite.axis["spectral"].size, len(suite.decay_compartments), )
def test_fitting(suite, index_dependend, grouped): model = suite.model def gr(): return grouped model.grouped = gr def id(): return index_dependend model.index_dependend = id sim_model = suite.sim_model est_axis = suite.e_axis cal_axis = suite.c_axis print(model.validate()) assert model.valid() print(sim_model.validate()) assert sim_model.valid() wanted = suite.wanted print(wanted) print(sim_model.validate(wanted)) assert sim_model.valid(wanted) initial = suite.initial print(initial) print(model.validate(initial)) assert model.valid(initial) dataset = simulate(sim_model, 'dataset1', wanted, {'e': est_axis, 'c': cal_axis}) print(dataset) assert dataset.data.shape == (cal_axis.size, est_axis.size) data = {'dataset1': dataset} scheme = Scheme(model=model, parameter=initial, data=data, nfev=5) result = optimize(scheme) print(result.optimized_parameter) print(result.data['dataset1']) for _, param in result.optimized_parameter.all(): assert np.allclose(param.value, wanted.get(param.full_label).value, rtol=1e-1) resultdata = result.data["dataset1"] assert np.array_equal(dataset.c, resultdata.c) assert np.array_equal(dataset.e, resultdata.e) assert dataset.data.shape == resultdata.data.shape print(dataset.data[0, 0], resultdata.data[0, 0]) assert np.allclose(dataset.data, resultdata.data)
def test_kinetic_model(suite, nnls): model = suite.model print(model.validate()) assert model.valid() model.dataset_group_models["default"].method = ( "non_negative_least_squares" if nnls else "variable_projection") wanted_parameters = suite.wanted_parameters print(model.validate(wanted_parameters)) print(wanted_parameters) assert model.valid(wanted_parameters) initial_parameters = suite.initial_parameters print(model.validate(initial_parameters)) assert model.valid(initial_parameters) print(model.markdown(wanted_parameters)) dataset = simulate(model, "dataset1", wanted_parameters, suite.axis) assert dataset.data.shape == (suite.axis["time"].size, suite.axis["spectral"].size) data = {f"dataset{i}": dataset for i in range(1, 5)} scheme = Scheme( model=model, parameters=initial_parameters, data=data, maximum_number_function_evaluations=20, ) result = optimize(scheme) print(result.optimized_parameters) for label, param in result.optimized_parameters.all(): print(label, param.value, wanted_parameters.get(label).value) assert np.allclose(param.value, wanted_parameters.get(label).value, rtol=1e-1) resultdata = result.data["dataset1"] print(resultdata) assert np.array_equal(dataset["time"], resultdata["time"]) assert np.array_equal(dataset["spectral"], resultdata["spectral"]) assert dataset.data.shape == resultdata.data.shape assert dataset.data.shape == resultdata.fitted_data.shape assert np.allclose(dataset.data, resultdata.fitted_data, rtol=1e-2)
def test_doas_model(suite): print(suite.sim_model.validate()) assert suite.sim_model.valid() print(suite.model.validate()) assert suite.model.valid() print(suite.sim_model.validate(suite.wanted_parameter)) assert suite.sim_model.valid(suite.wanted_parameter) print(suite.model.validate(suite.parameter)) assert suite.model.valid(suite.parameter) dataset = simulate(suite.sim_model, "dataset1", suite.wanted_parameter, suite.axis) print(dataset) assert dataset.data.shape == (suite.axis["time"].size, suite.axis["spectral"].size) print(suite.parameter) print(suite.wanted_parameter) data = {"dataset1": dataset} scheme = Scheme( model=suite.model, parameters=suite.parameter, data=data, maximum_number_function_evaluations=20, ) result = optimize(scheme, raise_exception=True) print(result.optimized_parameters) for label, param in result.optimized_parameters.all(): assert np.allclose(param.value, suite.wanted_parameter.get(label).value, rtol=1e-1) resultdata = result.data["dataset1"] assert np.array_equal(dataset["time"], resultdata["time"]) assert np.array_equal(dataset["spectral"], resultdata["spectral"]) assert dataset.data.shape == resultdata.fitted_data.shape assert np.allclose(dataset.data, resultdata.fitted_data) assert "damped_oscillation_cos" in resultdata assert "damped_oscillation_sin" in resultdata assert "damped_oscillation_associated_spectra" in resultdata assert "damped_oscillation_phase" in resultdata
def setup(self, index_dependent, grouped, weight): suite = MultichannelMulticomponentDecay model = suite.model # 0.4.0 API compat model.is_grouped = grouped model.megacomplex["m1"].is_index_dependent = index_dependent sim_model = suite.sim_model suite.sim_model.megacomplex["m1"].is_index_dependent = index_dependent wanted_parameters = suite.wanted_parameters initial_parameters = suite.initial_parameters model.dataset["dataset1"].fill(model, initial_parameters) if hasattr(suite, "global_axis"): axes_dict = { "global": getattr(suite, "global_axis"), "model": getattr(suite, "model_axis"), } else: # 0.4.0 API compat axes_dict = { "e": getattr(suite, "e_axis"), "c": getattr(suite, "c_axis"), } dataset = simulate(sim_model, "dataset1", wanted_parameters, axes_dict) if weight: dataset["weight"] = xr.DataArray( np.ones_like(dataset.data) * 0.5, coords=dataset.data.coords ) data = {"dataset1": dataset} self.scheme = Scheme( model=model, parameters=initial_parameters, data=data, maximum_number_function_evaluations=10, group_tolerance=0.1, optimization_method="TrustRegionReflection", ) # 0.4.0 API compat if hasattr(self.scheme, "group"): self.scheme.group = grouped
def test_relations(index_dependent, link_clp): model = deepcopy(suite.model) model.dataset_group_models["default"].link_clp = link_clp model.megacomplex["m1"].is_index_dependent = index_dependent model.clp_relations.append( Relation.from_dict({ "source": "s1", "target": "s2", "parameter": "3" })) parameters = ParameterGroup.from_list([11e-4, 22e-5, 2]) print("link_clp", link_clp, "index_dependent", index_dependent) dataset = simulate( suite.sim_model, "dataset1", parameters, { "global": suite.global_axis, "model": suite.model_axis }, ) scheme = Scheme(model=model, parameters=parameters, data={"dataset1": dataset}) optimization_group = OptimizationGroup( scheme, model.get_dataset_groups()["default"]) if index_dependent: reduced_matrix = (optimization_group.reduced_matrices[0] if link_clp else optimization_group.reduced_matrices["dataset1"][0]) else: reduced_matrix = optimization_group.reduced_matrices["dataset1"] matrix = (optimization_group.matrices["dataset1"][0] if index_dependent else optimization_group.matrices["dataset1"]) result_data = optimization_group.create_result_data() print(result_data) clps = result_data["dataset1"].clp assert "s2" not in reduced_matrix.clp_labels assert "s2" in clps.coords["clp_label"] assert clps.sel(clp_label="s2") == clps.sel(clp_label="s1") * 2 assert "s2" in matrix.clp_labels
def test_fitting(suite): model = suite.model sim_model = suite.sim_model est_axis = suite.e_axis cal_axis = suite.c_axis print(model.validate()) assert model.valid() print(sim_model.validate()) assert sim_model.valid() wanted = suite.wanted print(wanted) print(sim_model.validate(wanted)) assert sim_model.valid(wanted) initial = suite.initial print(initial) print(model.validate(initial)) assert model.valid(initial) dataset = simulate(sim_model, 'dataset1', wanted, {'e': est_axis, 'c': cal_axis}) print(dataset) assert dataset.data.shape == (cal_axis.size, est_axis.size) data = {'dataset1': dataset} scheme = Scheme(model=model, parameter=initial, data=data) optimizer = Optimizer(scheme) result = optimizer.optimize() print(result.optimized_parameter) print(result.data['dataset1']) for _, param in result.optimized_parameter.all(): assert np.allclose(param.value, wanted.get(param.full_label).value, rtol=1e-1) resultdata = result.data["dataset1"] assert np.array_equal(dataset.c, resultdata.c) assert np.array_equal(dataset.e, resultdata.e) assert dataset.data.shape == resultdata.data.shape print(dataset.data[0, 0], resultdata.data[0, 0]) assert np.allclose(dataset.data, resultdata.data)
def problem(request) -> Problem: model = suite.model model.is_grouped = request.param[0] model.is_index_dependent = request.param[1] dataset = simulate( suite.sim_model, "dataset1", suite.wanted_parameters, { "e": suite.e_axis, "c": suite.c_axis }, ) scheme = Scheme(model=model, parameters=suite.initial_parameters, data={"dataset1": dataset}) return Problem(scheme)
def test_fitting(suite): model = suite.model sim_model = suite.sim_model est_axis = suite.e_axis cal_axis = suite.c_axis print(model.validate()) assert model.valid() print(sim_model.validate()) assert sim_model.valid() wanted = suite.wanted print(wanted) print(sim_model.validate(wanted)) assert sim_model.valid(wanted) initial = suite.initial print(initial) print(model.validate(initial)) assert model.valid(initial) dataset = simulate(sim_model, wanted, 'dataset1', {'e': est_axis, 'c': cal_axis}) print(dataset) assert dataset.data.shape == (cal_axis.size, est_axis.size) data = {'dataset1': dataset} result = Result(model, data, initial, False) optimize(result) print(result.optimized_parameter) print(result.data['dataset1']) for _, param in result.optimized_parameter.all(): assert np.allclose(param.value, wanted.get(param.full_label).value, rtol=1e-1) resultdata = result.data["dataset1"] assert np.array_equal(dataset.c, resultdata.c) assert np.array_equal(dataset.e, resultdata.e) assert dataset.data.shape == resultdata.data.shape print(dataset.data[0, 0], resultdata.data[0, 0]) assert np.allclose(dataset.data, resultdata.data)
def test_full_model_problem(): dataset = simulate(FullModel.model, "dataset1", FullModel.parameters, FullModel.coordinates) scheme = Scheme(model=FullModel.model, parameters=FullModel.parameters, data={"dataset1": dataset}) optimization_group = OptimizationGroup( scheme, FullModel.model.get_dataset_groups()["default"]) result = optimization_group.create_result_data()["dataset1"] assert "global_matrix" in result assert "global_clp_label" in result clp = result.clp assert clp.shape == (4, 4) print(np.diagonal(clp)) assert all(np.isclose(1.0, c) for c in np.diagonal(clp))
def optimization_group(request) -> OptimizationGroup: model = suite.model model.megacomplex["m1"].is_index_dependent = request.param[1] model.is_index_dependent = request.param[1] model.dataset_group_models["default"].link_clp = request.param[0] dataset = simulate( suite.sim_model, "dataset1", suite.wanted_parameters, { "global": suite.global_axis, "model": suite.model_axis }, ) scheme = Scheme(model=model, parameters=suite.initial_parameters, data={"dataset1": dataset}) return OptimizationGroup(scheme, model.get_dataset_groups()["default"])
def test_penalties(index_dependent, link_clp): model = deepcopy(suite.model) model.dataset_group_models["default"].link_clp = link_clp model.megacomplex["m1"].is_index_dependent = index_dependent model.clp_area_penalties.append( EqualAreaPenalty.from_dict({ "source": "s1", "source_intervals": [(1, 20)], "target": "s2", "target_intervals": [(20, 45)], "parameter": "3", "weight": 10, })) parameters = ParameterGroup.from_list([11e-4, 22e-5, 2]) global_axis = np.arange(50) print(f"{link_clp=}\n{index_dependent=}") dataset = simulate( suite.sim_model, "dataset1", parameters, { "global": global_axis, "model": suite.model_axis }, ) scheme = Scheme(model=model, parameters=parameters, data={"dataset1": dataset}) optimization_group = OptimizationGroup( scheme, model.get_dataset_groups()["default"]) assert isinstance(optimization_group.additional_penalty, np.ndarray) assert optimization_group.additional_penalty.size == 1 assert optimization_group.additional_penalty[0] != 0 assert isinstance(optimization_group.full_penalty, np.ndarray) assert (optimization_group.full_penalty.size == (suite.model_axis.size * global_axis.size) + optimization_group.additional_penalty.size)
def simulate( self, dataset: str, parameters: ParameterGroup, axes: dict[str, np.ndarray] = None, clp: np.ndarray | xr.DataArray = None, noise: bool = False, noise_std_dev: float = 1.0, noise_seed: int = None, ) -> xr.Dataset: """Simulates the model. Parameters ---------- dataset : Label of the dataset to simulate. parameter : The parameters for the simulation. axes : A dictionary with axes for simulation. clp : Conditionally linear parameters. Used instead of `model.global_matrix` if provided. noise : If `True` noise is added to the simulated data. noise_std_dev : The standard deviation of the noise. noise_seed : Seed for the noise. """ return simulate( self, dataset, parameters, axes=axes, clp=clp, noise=noise, noise_std_dev=noise_std_dev, noise_seed=noise_seed, )
def test_optimization(suite, index_dependent, grouped, weight, method): model = suite.model model.is_grouped = grouped model.is_index_dependent = index_dependent print("Grouped:", grouped) print("Index dependent:", index_dependent) assert model.grouped() == grouped assert model.index_dependent() == index_dependent sim_model = suite.sim_model sim_model.is_grouped = grouped sim_model.is_index_dependent = index_dependent print(model.validate()) assert model.valid() print(sim_model.validate()) assert sim_model.valid() wanted_parameters = suite.wanted_parameters print(wanted_parameters) print(sim_model.validate(wanted_parameters)) assert sim_model.valid(wanted_parameters) initial_parameters = suite.initial_parameters print(initial_parameters) print(model.validate(initial_parameters)) assert model.valid(initial_parameters) nr_datasets = 3 if issubclass(suite, ThreeDatasetDecay) else 1 data = {} for i in range(nr_datasets): e_axis = getattr(suite, "e_axis" if i == 0 else f"e_axis{i+1}") c_axis = getattr(suite, "c_axis" if i == 0 else f"c_axis{i+1}") dataset = simulate( sim_model, f"dataset{i+1}", wanted_parameters, {"e": e_axis, "c": c_axis} ) print(f"Dataset {i+1}") print("=============") print(dataset) if hasattr(suite, "scale"): dataset["data"] /= suite.scale if weight: dataset["weight"] = xr.DataArray( np.ones_like(dataset.data) * 0.5, coords=dataset.coords ) assert dataset.data.shape == (c_axis.size, e_axis.size) data[f"dataset{i+1}"] = dataset scheme = Scheme( model=model, parameters=initial_parameters, data=data, nfev=10, group_tolerance=0.1, optimization_method=method, ) result = optimize(scheme) print(result.optimized_parameters) for label, param in result.optimized_parameters.all(): if param.vary: assert np.allclose(param.value, wanted_parameters.get(label).value, rtol=1e-1) for i, dataset in enumerate(data.values()): resultdata = result.data[f"dataset{i+1}"] print(f"Result Data {i+1}") print("=================") print(resultdata) assert "residual" in resultdata assert "residual_left_singular_vectors" in resultdata assert "residual_right_singular_vectors" in resultdata assert "residual_singular_values" in resultdata assert np.array_equal(dataset.c, resultdata.c) assert np.array_equal(dataset.e, resultdata.e) assert dataset.data.shape == resultdata.data.shape print(dataset.data[0, 0], resultdata.data[0, 0]) assert np.allclose(dataset.data, resultdata.data) if weight: assert "weight" in resultdata assert "weighted_data" in resultdata assert np.allclose(resultdata.data, resultdata.weighted_data * 2) assert "weighted_residual" in resultdata assert "weighted_residual_left_singular_vectors" in resultdata assert "weighted_residual_right_singular_vectors" in resultdata assert "weighted_residual_singular_values" in resultdata assert callable(model.additional_penalty_function) assert model.additional_penalty_function_called if isinstance(model, DecayModel): assert callable(model.constrain_matrix_function) assert model.constrain_matrix_function_called assert callable(model.retrieve_clp_function) assert model.retrieve_clp_function_called else: assert not model.constrain_matrix_function_called assert not model.retrieve_clp_function_called
def test_optimization(suite, is_index_dependent, link_clp, weight, method): model = suite.model model.megacomplex["m1"].is_index_dependent = is_index_dependent print("Link CLP:", link_clp) print("Index dependent:", is_index_dependent) sim_model = suite.sim_model sim_model.megacomplex["m1"].is_index_dependent = is_index_dependent print(model.validate()) assert model.valid() print(sim_model.validate()) assert sim_model.valid() wanted_parameters = suite.wanted_parameters print(wanted_parameters) print(sim_model.validate(wanted_parameters)) assert sim_model.valid(wanted_parameters) initial_parameters = suite.initial_parameters print(initial_parameters) print(model.validate(initial_parameters)) assert model.valid(initial_parameters) assert ( model.dataset["dataset1"].fill(model, initial_parameters).is_index_dependent() == is_index_dependent ) nr_datasets = 3 if issubclass(suite, ThreeDatasetDecay) else 1 data = {} for i in range(nr_datasets): global_axis = getattr(suite, "global_axis" if i == 0 else f"global_axis{i+1}") model_axis = getattr(suite, "model_axis" if i == 0 else f"model_axis{i+1}") dataset = simulate( sim_model, f"dataset{i+1}", wanted_parameters, {"global": global_axis, "model": model_axis}, ) print(f"Dataset {i+1}") print("=============") print(dataset) if hasattr(suite, "scale"): dataset["data"] /= suite.scale if weight: dataset["weight"] = xr.DataArray( np.ones_like(dataset.data) * 0.5, coords=dataset.data.coords ) assert dataset.data.shape == (model_axis.size, global_axis.size) data[f"dataset{i+1}"] = dataset scheme = Scheme( model=model, parameters=initial_parameters, data=data, maximum_number_function_evaluations=10, clp_link_tolerance=0.1, optimization_method=method, ) model.dataset_group_models["default"].link_clp = link_clp result = optimize(scheme, raise_exception=True) print(result.optimized_parameters) assert result.success optimized_scheme = result.get_scheme() assert result.optimized_parameters == optimized_scheme.parameters for dataset in optimized_scheme.data.values(): assert "fitted_data" not in dataset if weight: assert "weight" in dataset for label, param in result.optimized_parameters.all(): if param.vary: assert np.allclose(param.value, wanted_parameters.get(label).value, rtol=1e-1) for i, dataset in enumerate(data.values()): resultdata = result.data[f"dataset{i+1}"] print(f"Result Data {i+1}") print("=================") print(resultdata) assert "residual" in resultdata assert "residual_left_singular_vectors" in resultdata assert "residual_right_singular_vectors" in resultdata assert "residual_singular_values" in resultdata assert np.array_equal(dataset.coords["model"], resultdata.coords["model"]) assert np.array_equal(dataset.coords["global"], resultdata.coords["global"]) assert dataset.data.shape == resultdata.data.shape print(dataset.data[0, 0], resultdata.data[0, 0]) assert np.allclose(dataset.data, resultdata.data) if weight: assert "weight" in resultdata assert "weighted_residual" in resultdata assert "weighted_residual_left_singular_vectors" in resultdata assert "weighted_residual_right_singular_vectors" in resultdata assert "weighted_residual_singular_values" in resultdata
def test_fitting(suite, index_dependent, grouped, weight): model = suite.model def gr(): return grouped model.grouped = gr def id(): return index_dependent model.index_dependent = id sim_model = suite.sim_model est_axis = suite.e_axis cal_axis = suite.c_axis print(model.validate()) assert model.valid() print(sim_model.validate()) assert sim_model.valid() wanted = suite.wanted print(wanted) print(sim_model.validate(wanted)) assert sim_model.valid(wanted) initial = suite.initial print(initial) print(model.validate(initial)) assert model.valid(initial) dataset = simulate(sim_model, "dataset1", wanted, { "e": est_axis, "c": cal_axis }) print(dataset) if weight: dataset["weight"] = xr.DataArray(np.ones_like(dataset.data) * 0.5, coords=dataset.coords) assert dataset.data.shape == (cal_axis.size, est_axis.size) data = {"dataset1": dataset} scheme = Scheme(model=model, parameter=initial, data=data, nfev=10) result = optimize(scheme) print(result.optimized_parameter) print(result.data["dataset1"]) for _, param in result.optimized_parameter.all(): assert np.allclose(param.value, wanted.get(param.full_label).value, rtol=1e-1) resultdata = result.data["dataset1"] print(resultdata) assert "residual" in resultdata assert "residual_left_singular_vectors" in resultdata assert "residual_right_singular_vectors" in resultdata assert "residual_singular_values" in resultdata assert np.array_equal(dataset.c, resultdata.c) assert np.array_equal(dataset.e, resultdata.e) assert dataset.data.shape == resultdata.data.shape print(dataset.data[0, 0], resultdata.data[0, 0]) assert np.allclose(dataset.data, resultdata.data) if weight: assert "weight" in resultdata assert "weighted_residual" in resultdata assert "weighted_residual_left_singular_vectors" in resultdata assert "weighted_residual_right_singular_vectors" in resultdata assert "weighted_residual_singular_values" in resultdata
def test_spectral_irf(suite): model = suite.model assert model.valid(), model.validate() parameters = suite.parameters assert model.valid(parameters), model.validate(parameters) sim_model = deepcopy(model) sim_model.dataset["dataset1"].global_megacomplex = ["mc2"] dataset = simulate(sim_model, "dataset1", parameters, suite.axis) assert dataset.data.shape == (suite.axis["time"].size, suite.axis["spectral"].size) data = {"dataset1": dataset} scheme = Scheme( model=model, parameters=parameters, data=data, maximum_number_function_evaluations=20, ) result = optimize(scheme) for label, param in result.optimized_parameters.all(): assert np.allclose(param.value, parameters.get(label).value), dedent(f""" Error in {suite.__name__} comparing {param.full_label}, - diff={param.value-parameters.get(label).value} """) resultdata = result.data["dataset1"] # print(resultdata) assert np.array_equal(dataset["time"], resultdata["time"]) assert np.array_equal(dataset["spectral"], resultdata["spectral"]) assert dataset.data.shape == resultdata.data.shape assert dataset.data.shape == resultdata.fitted_data.shape # assert np.allclose(dataset.data, resultdata.fitted_data, atol=1e-14) fit_data_max_at_start = resultdata.fitted_data.isel(spectral=0).argmax( axis=0) fit_data_max_at_end = resultdata.fitted_data.isel(spectral=-1).argmax( axis=0) if suite is NoIrfDispersion: assert "center_dispersion_1" not in resultdata assert fit_data_max_at_start == fit_data_max_at_end else: assert "center_dispersion_1" in resultdata assert fit_data_max_at_start != fit_data_max_at_end if abs(fit_data_max_at_start - fit_data_max_at_end) < 3: warnings.warn( dedent(""" Bad test, one of the following could be the case: - dispersion too small - spectral window to small - time resolution (around the maximum of the IRF) too low" """)) for x in suite.axis["spectral"]: # calculated irf location model_irf_center = suite.model.irf["irf1"].center model_dispersion_center = suite.model.irf["irf1"].dispersion_center model_center_dispersion_coefficients = suite.model.irf[ "irf1"].center_dispersion_coefficients calc_irf_location_at_x = _calculate_irf_position( x, model_irf_center, model_dispersion_center, model_center_dispersion_coefficients) # fitted irf location fitted_irf_loc_at_x = resultdata["irf_center_location"].sel( spectral=x) assert np.allclose(calc_irf_location_at_x, fitted_irf_loc_at_x.values), dedent(f""" Error in {suite.__name__} comparing irf_center_location, - diff={calc_irf_location_at_x-fitted_irf_loc_at_x.values} """) assert "species_associated_spectra" in resultdata assert "decay_associated_spectra" in resultdata assert "irf_center" in resultdata
SIMULATION_MODEL_YML = generate_model_yml( generator_name="spectral_decay_parallel", generator_arguments={ "nr_compartments": 3, "irf": True }, ) SIMULATION_MODEL = load_model(SIMULATION_MODEL_YML, format_name="yml_str") MODEL_YML = generate_model_yml( generator_name="decay_parallel", generator_arguments={ "nr_compartments": 3, "irf": True }, ) MODEL = load_model(MODEL_YML, format_name="yml_str") DATASET = simulate( SIMULATION_MODEL, "dataset_1", SIMULATION_PARAMETERS, SIMULATION_COORDINATES, noise=True, noise_std_dev=1e-2, ) SCHEME = Scheme(model=MODEL, parameters=PARAMETERS, data={"dataset_1": DATASET})
def test_multiple_groups(): wanted_parameters = ParameterGroup.from_list([101e-4]) initial_parameters = ParameterGroup.from_list([100e-5]) global_axis = np.asarray([1.0]) model_axis = np.arange(0, 150, 1.5) sim_model_dict = { "megacomplex": { "m1": { "is_index_dependent": False }, "m2": { "type": "global_complex" } }, "dataset": { "dataset1": { "initial_concentration": [], "megacomplex": ["m1"], "global_megacomplex": ["m2"], "kinetic": ["1"], } }, } sim_model = DecayModel.from_dict(sim_model_dict) model_dict = { "dataset_groups": { "g1": {}, "g2": { "residual_function": "non_negative_least_squares" } }, "megacomplex": { "m1": { "is_index_dependent": False } }, "dataset": { "dataset1": { "group": "g1", "initial_concentration": [], "megacomplex": ["m1"], "kinetic": ["1"], }, "dataset2": { "group": "g2", "initial_concentration": [], "megacomplex": ["m1"], "kinetic": ["1"], }, }, } model = DecayModel.from_dict(model_dict) dataset = simulate( sim_model, "dataset1", wanted_parameters, { "global": global_axis, "model": model_axis }, ) scheme = Scheme( model=model, parameters=initial_parameters, data={ "dataset1": dataset, "dataset2": dataset }, maximum_number_function_evaluations=10, clp_link_tolerance=0.1, ) result = optimize(scheme, raise_exception=True) print(result.optimized_parameters) assert result.success for label, param in result.optimized_parameters.all(): if param.vary: assert np.allclose(param.value, wanted_parameters.get(label).value, rtol=1e-1)
def test_coherent_artifact(spectral_dependence: str): model_dict = { "initial_concentration": { "j1": {"compartments": ["s1"], "parameters": ["irf_center"]}, }, "megacomplex": { "mc1": {"type": "decay", "k_matrix": ["k1"]}, "mc2": {"type": "coherent-artifact", "order": 3}, }, "k_matrix": { "k1": { "matrix": { ("s1", "s1"): "rate", } } }, "irf": { "irf1": { "type": "spectral-multi-gaussian", "center": ["irf_center"], "width": ["irf_width"], }, }, "dataset": { "dataset1": { "initial_concentration": "j1", "megacomplex": ["mc1", "mc2"], "irf": "irf1", }, }, } parameter_list = [ ["rate", 101e-4], ["irf_center", 10, {"vary": False, "non-negative": False}], ["irf_width", 20, {"vary": False, "non-negative": False}], ] irf_spec = model_dict["irf"]["irf1"] if spectral_dependence == "dispersed": irf_spec["dispersion_center"] = "irf_dispc" irf_spec["center_dispersion"] = ["irf_disp1", "irf_disp2"] parameter_list += [ ["irf_dispc", 300, {"vary": False, "non-negative": False}], ["irf_disp1", 0.01, {"vary": False, "non-negative": False}], ["irf_disp2", 0.001, {"vary": False, "non-negative": False}], ] elif spectral_dependence == "shifted": irf_spec["shift"] = ["irf_shift1", "irf_shift2", "irf_shift3"] parameter_list += [ ["irf_shift1", -2], ["irf_shift2", 0], ["irf_shift3", 2], ] model = Model.from_dict( model_dict.copy(), megacomplex_types={ "decay": DecayMegacomplex, "coherent-artifact": CoherentArtifactMegacomplex, }, ) parameters = ParameterGroup.from_list(parameter_list) time = np.arange(0, 50, 1.5) spectral = np.asarray([200, 300, 400]) coords = {"time": time, "spectral": spectral} dataset_model = model.dataset["dataset1"].fill(model, parameters) dataset_model.overwrite_global_dimension("spectral") dataset_model.set_coordinates(coords) matrix = calculate_matrix(dataset_model, {"spectral": 1}) compartments = matrix.clp_labels print(compartments) assert len(compartments) == 4 for i in range(1, 4): assert compartments[i] == f"coherent_artifact_{i}" assert matrix.matrix.shape == (time.size, 4) clp = xr.DataArray( np.ones((3, 4)), coords=[ ("spectral", spectral), ( "clp_label", [ "s1", "coherent_artifact_1", "coherent_artifact_2", "coherent_artifact_3", ], ), ], ) axis = {"time": time, "spectral": clp.spectral} data = simulate(model, "dataset1", parameters, axis, clp) dataset = {"dataset1": data} scheme = Scheme( model=model, parameters=parameters, data=dataset, maximum_number_function_evaluations=20 ) result = optimize(scheme) print(result.optimized_parameters) for label, param in result.optimized_parameters.all(): assert np.allclose(param.value, parameters.get(label).value, rtol=1e-8) resultdata = result.data["dataset1"] assert np.array_equal(data.time, resultdata.time) assert np.array_equal(data.spectral, resultdata.spectral) assert data.data.shape == resultdata.data.shape assert data.data.shape == resultdata.fitted_data.shape assert np.allclose(data.data, resultdata.fitted_data) assert "coherent_artifact_response" in resultdata if spectral_dependence == "none": assert resultdata["coherent_artifact_response"].shape == (time.size, 3) else: assert resultdata["coherent_artifact_response"].shape == (spectral.size, time.size, 3) assert "coherent_artifact_associated_spectra" in resultdata assert resultdata["coherent_artifact_associated_spectra"].shape == (3, 3)