示例#1
0
def test_parameter_to_csv(tmpdir):
    csv_path = tmpdir.join("parameters.csv")
    params = ParameterGroup.from_yaml("""
    b:
        - ["1", 0.25, {vary: false, min: 0, max: 8}]
        - ["2", 0.75, {expr: '1 - $b.1', non-negative: true}]
    rates:
        - ["total", 2]
        - ["branch1", {expr: '$rates.total * $b.1'}]
        - ["branch2", {expr: '$rates.total * $b.2'}]
    """)

    params.to_csv(csv_path)

    with open(csv_path) as f:
        print(f.read())
    params_from_csv = ParameterGroup.from_csv(csv_path)

    for label, p in params.all():
        assert params_from_csv.has(label)
        p_from_csv = params_from_csv.get(label)
        assert p.label == p_from_csv.label
        assert p.value == p_from_csv.value
        assert p.minimum == p_from_csv.minimum
        assert p.maximum == p_from_csv.maximum
        assert p.vary == p_from_csv.vary
        assert p.non_negative == p_from_csv.non_negative
        assert p.expression == p_from_csv.expression
示例#2
0
class OneCompartmentDecay:
    scale = 2
    wanted_parameters = ParameterGroup.from_list([101e-4])
    initial_parameters = ParameterGroup.from_list([100e-5, [scale, {"vary": False}]])

    global_axis = np.asarray([1.0])
    model_axis = np.arange(0, 150, 1.5)

    sim_model_dict = {
        "megacomplex": {"m1": {"is_index_dependent": False}, "m2": {"type": "global_complex"}},
        "dataset": {
            "dataset1": {
                "initial_concentration": [],
                "megacomplex": ["m1"],
                "global_megacomplex": ["m2"],
                "kinetic": ["1"],
            }
        },
    }
    sim_model = DecayModel.from_dict(sim_model_dict)
    model_dict = {
        "megacomplex": {"m1": {"is_index_dependent": False}},
        "dataset": {
            "dataset1": {
                "initial_concentration": [],
                "megacomplex": ["m1"],
                "kinetic": ["1"],
                "scale": "2",
            }
        },
    }
    model_dict["dataset"]["dataset1"]["scale"] = "2"
    model = DecayModel.from_dict(model_dict)
示例#3
0
class MultichannelMulticomponentDecay:
    wanted_parameters = ParameterGroup.from_dict(
        {
            "k": [0.006, 0.003, 0.0003, 0.03],
            "loc": [
                ["1", 14705],
                ["2", 13513],
                ["3", 14492],
                ["4", 14388],
            ],
            "amp": [
                ["1", 1],
                ["2", 2],
                ["3", 5],
                ["4", 20],
            ],
            "del": [
                ["1", 400],
                ["2", 100],
                ["3", 300],
                ["4", 200],
            ],
        }
    )
    initial_parameters = ParameterGroup.from_dict({"k": [0.006, 0.003, 0.0003, 0.03]})

    global_axis = np.arange(12820, 15120, 50)
    model_axis = np.arange(0, 150, 1.5)

    sim_model = DecayModel.from_dict(
        {
            "megacomplex": {
                "m1": {"is_index_dependent": False},
                "m2": {
                    "type": "global_complex_shaped",
                    "location": ["loc.1", "loc.2", "loc.3", "loc.4"],
                    "delta": ["del.1", "del.2", "del.3", "del.4"],
                    "amplitude": ["amp.1", "amp.2", "amp.3", "amp.4"],
                },
            },
            "dataset": {
                "dataset1": {
                    "megacomplex": ["m1"],
                    "global_megacomplex": ["m2"],
                    "kinetic": ["k.1", "k.2", "k.3", "k.4"],
                }
            },
        }
    )
    model = DecayModel.from_dict(
        {
            "megacomplex": {"m1": {"is_index_dependent": False}},
            "dataset": {
                "dataset1": {
                    "megacomplex": ["m1"],
                    "kinetic": ["k.1", "k.2", "k.3", "k.4"],
                }
            },
        }
    )
示例#4
0
class ThreeDatasetDecay:
    wanted_parameters = ParameterGroup.from_list([101e-4, 201e-3])
    initial_parameters = ParameterGroup.from_list([100e-5, 200e-3])

    e_axis = np.asarray([1.0])
    c_axis = np.arange(0, 150, 1.5)

    e_axis2 = np.asarray([1.0, 2.01])
    c_axis2 = np.arange(0, 100, 1.5)

    e_axis3 = np.asarray([0.99, 3.0])
    c_axis3 = np.arange(0, 150, 1.5)

    model_dict = {
        "dataset": {
            "dataset1": {
                "initial_concentration": [],
                "megacomplex": [],
                "kinetic": ["1"]
            },
            "dataset2": {
                "initial_concentration": [],
                "megacomplex": [],
                "kinetic": ["1", "2"]
            },
            "dataset3": {
                "initial_concentration": [],
                "megacomplex": [],
                "kinetic": ["2"]
            },
        },
    }
    sim_model = DecayModel.from_dict(model_dict)
    model = sim_model
示例#5
0
class OneComponentOneChannelGaussianIrf:
    model = KineticImageModel.from_dict({
        'initial_concentration': {
            'j1': {
                'compartments': ['s1'],
                'parameters': ['2']
            },
        },
        'megacomplex': {
            'mc1': {'k_matrix': ['k1']},
        },
        'k_matrix': {
            "k1": {'matrix': {("s1", "s1"): '1', }}
        },
        'irf': {
            'irf1': {'type': 'gaussian', 'center': '2', 'width': '3'},
        },
        'dataset': {
            'dataset1': {
                'initial_concentration': 'j1',
                'irf': 'irf1',
                'megacomplex': ['mc1'],
            },
        },
    })

    initial = ParameterGroup.from_list([101e-4, 0.1, 5,
                                        [1, {'vary': False, 'non-negative': False}]])
    wanted = ParameterGroup.from_list([101e-3, 0.3, 10,
                                       [1, {'vary': False, 'non-negative': False}]])

    time = np.asarray(np.arange(-10, 50, 1.5))
    axis = {"time": time, "pixel": np.asarray([0])}
    clp = xr.DataArray([[1]], coords=[('pixel', [0]), ('clp_label', ['s1'])])
示例#6
0
class TwoCompartmentDecay:
    wanted_parameters = ParameterGroup.from_list([11e-4, 22e-5])
    initial_parameters = ParameterGroup.from_list([10e-4, 20e-5])

    global_axis = np.asarray([1.0])
    model_axis = np.arange(0, 150, 1.5)

    sim_model = DecayModel.from_dict(
        {
            "megacomplex": {"m1": {"is_index_dependent": False}, "m2": {"type": "global_complex"}},
            "dataset": {
                "dataset1": {
                    "initial_concentration": [],
                    "megacomplex": ["m1"],
                    "global_megacomplex": ["m2"],
                    "kinetic": ["1", "2"],
                }
            },
        }
    )
    model = DecayModel.from_dict(
        {
            "megacomplex": {"m1": {"is_index_dependent": False}},
            "dataset": {
                "dataset1": {
                    "initial_concentration": [],
                    "megacomplex": ["m1"],
                    "kinetic": ["1", "2"],
                }
            },
        }
    )
class OneComponentOneChannelGaussianIrf:
    model = KineticImageModel.from_dict({
        "initial_concentration": {
            "j1": {
                "compartments": ["s1"],
                "parameters": ["2"]
            },
        },
        "megacomplex": {
            "mc1": {
                "k_matrix": ["k1"]
            },
        },
        "k_matrix": {
            "k1": {
                "matrix": {
                    ("s1", "s1"): "1",
                }
            }
        },
        "irf": {
            "irf1": {
                "type": "gaussian",
                "center": "2",
                "width": "3"
            },
        },
        "dataset": {
            "dataset1": {
                "initial_concentration": "j1",
                "irf": "irf1",
                "megacomplex": ["mc1"],
            },
        },
    })

    initial_parameters = ParameterGroup.from_list(
        [101e-4, 0.1, 1, [1, {
            "vary": False,
            "non-negative": False
        }]])
    wanted_parameters = ParameterGroup.from_list([
        [101e-3, {
            "non-negative": True
        }],
        [0.2, {
            "non-negative": True
        }],
        [2, {
            "non-negative": True
        }],
        [1, {
            "vary": False,
            "non-negative": False
        }],
    ])

    time = np.asarray(np.arange(-10, 50, 1.5))
    axis = {"time": time, "pixel": np.asarray([0])}
    clp = xr.DataArray([[1]], coords=[("pixel", [0]), ("clp_label", ["s1"])])
示例#8
0
class ThreeComponentSequential:
    model = KineticImageModel.from_dict({
        'initial_concentration': {
            'j1': {
                'compartments': ['s1', 's2', 's3'],
                'parameters': ['j.1', 'j.0', 'j.0']
            },
        },
        'megacomplex': {
            'mc1': {'k_matrix': ['k1']},
        },
        'k_matrix': {
            "k1": {'matrix': {
                ("s2", "s1"): 'kinetic.1',
                ("s3", "s2"): 'kinetic.2',
                ("s3", "s3"): 'kinetic.3',
            }}
        },
        'irf': {
            'irf1': {'type': 'multi-gaussian', 'center': ['irf.center'], 'width': ['irf.width']},
        },
        'dataset': {
            'dataset1': {
                'initial_concentration': 'j1',
                'irf': 'irf1',
                'megacomplex': ['mc1'],
            },
        },
    })

    initial = ParameterGroup.from_dict({
        'kinetic': [
            ["1", 501e-3],
            ["2", 202e-4],
            ["3", 105e-5],
            {'non-negative': True},
        ],
        'irf': [['center', 1.3], ['width', 7.8]],
        'j': [['1', 1, {'vary': False, 'non-negative': False}],
              ['0', 0, {'vary': False, 'non-negative': False}]],
    })
    wanted = ParameterGroup.from_dict({
        'kinetic': [
            ["1", 501e-3],
            ["2", 202e-4],
            ["3", 105e-5],
        ],
        'irf': [['center', 1.3], ['width', 7.8]],
        'j': [['1', 1, {'vary': False, 'non-negative': False}],
              ['0', 0, {'vary': False, 'non-negative': False}]],
    })

    time = np.asarray(np.arange(-10, 50, 1.0))
    pixel = np.arange(600, 750, 10)
    axis = {"time": time, "pixel": pixel}

    clp = _create_gaussian_clp(
        ['s1', 's2', 's3'], [7, 3, 30], [620, 670, 720], [10, 30, 50], pixel)
class OneComponentOneChannelMeasuredIrf:
    model = KineticImageModel.from_dict({
        "initial_concentration": {
            "j1": {
                "compartments": ["s1"],
                "parameters": ["2"]
            },
        },
        "megacomplex": {
            "mc1": {
                "k_matrix": ["k1"]
            },
        },
        "k_matrix": {
            "k1": {
                "matrix": {
                    ("s1", "s1"): "1",
                }
            }
        },
        "irf": {
            "irf1": {
                "type": "measured"
            },
        },
        "dataset": {
            "dataset1": {
                "initial_concentration": "j1",
                "irf": "irf1",
                "megacomplex": ["mc1"],
            },
        },
    })

    initial_parameters = ParameterGroup.from_list(
        [101e-4, [1, {
            "vary": False,
            "non-negative": False
        }]])
    wanted_parameters = ParameterGroup.from_list(
        [101e-3, [1, {
            "vary": False,
            "non-negative": False
        }]])

    time = np.asarray(np.arange(-10, 50, 1.5))
    axis = {"time": time, "pixel": np.asarray([0])}

    center = 0
    width = 5
    irf = (1 / np.sqrt(2 * np.pi)) * np.exp(-(time - center) *
                                            (time - center) /
                                            (2 * width * width))
    model.irf["irf1"].irfdata = irf

    clp = xr.DataArray([[1]], coords=[("pixel", [0]), ("clp_label", ["s1"])])
示例#10
0
def test_unibranched():

    compartments = ['s1', 's2', 's3']
    matrix = {
        ('s2', 's1'): "1",
        ('s3', 's2'): "2",
        ('s2', 's2'): "2",
        ('s3', 's3'): "3",
    }

    params = ParameterGroup.from_list([3, 4, 5, 1, 0])
    mat = KMatrix()
    mat.label = ""
    mat.matrix = matrix
    mat = mat.fill(None, params)

    jvec = ["4", "5", "5"]
    con = InitialConcentration()
    con.label = ""
    con.compartments = compartments
    con.parameters = jvec
    con = con.fill(None, params)

    assert not mat.is_unibranched(con)

    matrix = {
        ('s2', 's1'): "1",
        ('s2', 's2'): "2",
    }

    compartments = ['s1', 's2']
    params = ParameterGroup.from_list([0.55, 0.0404, 1, 0])
    mat = KMatrix()
    mat.label = ""
    mat.matrix = matrix
    mat = mat.fill(None, params)

    jvec = ["3", "4"]
    con = InitialConcentration()
    con.label = ""
    con.compartments = compartments
    con.parameters = jvec
    con = con.fill(None, params)

    print(mat.reduced(compartments))
    assert mat.is_unibranched(con)

    wanted_a_matrix = np.asarray([
        [1, -1.079278],
        [0, 1.079278],
    ])

    print(mat.a_matrix_unibranch(con))
    assert np.allclose(mat.a_matrix_unibranch(con), wanted_a_matrix)
示例#11
0
def test_unibranched():

    compartments = ['s1', 's2', 's3']
    matrix = {
        ('s2', 's1'): "1",
        ('s3', 's2'): "2",
        ('s2', 's2'): "2",
        ('s3', 's3'): "3",
    }

    params = ParameterGroup.from_list([3, 4, 5, 1, 0])
    mat = KMatrix()
    mat.label = ""
    mat.matrix = matrix
    mat = mat.fill(None, params)

    jvec = ["4", "5", "5"]
    con = InitialConcentration()
    con.label = ""
    con.compartments = compartments
    con.parameters = jvec
    con = con.fill(None, params)

    assert not mat.is_unibranched(con)

    matrix = {
        ('s2', 's1'): "1",
        ('s2', 's2'): "2",
    }

    compartments = ['s1', 's2']
    params = ParameterGroup.from_list([0.55, 0.0404, 1, 0])
    mat = KMatrix()
    mat.label = ""
    mat.matrix = matrix
    mat = mat.fill(None, params)

    jvec = ["3", "4"]
    con = InitialConcentration()
    con.label = ""
    con.compartments = compartments
    con.parameters = jvec
    con = con.fill(None, params)

    print(mat.reduced(compartments))
    assert mat.is_unibranched(con)

    wanted_a_matrix = np.asarray([
        [1, -1.079278],
        [0, 1.079278],
    ])

    print(mat.a_matrix_unibranch(con))
    assert np.allclose(mat.a_matrix_unibranch(con), wanted_a_matrix)
示例#12
0
class ThreeDatasetDecay:
    wanted_parameters = ParameterGroup.from_list([101e-4, 201e-3])
    initial_parameters = ParameterGroup.from_list([100e-5, 200e-3])

    global_axis = np.asarray([1.0])
    model_axis = np.arange(0, 150, 1.5)

    global_axis2 = np.asarray([1.0, 2.01])
    model_axis2 = np.arange(0, 100, 1.5)

    global_axis3 = np.asarray([0.99, 3.0])
    model_axis3 = np.arange(0, 150, 1.5)

    sim_model_dict = {
        "megacomplex": {"m1": {"is_index_dependent": False}, "m2": {"type": "global_complex"}},
        "dataset": {
            "dataset1": {
                "initial_concentration": [],
                "megacomplex": ["m1"],
                "global_megacomplex": ["m2"],
                "kinetic": ["1"],
            },
            "dataset2": {
                "initial_concentration": [],
                "megacomplex": ["m1"],
                "global_megacomplex": ["m2"],
                "kinetic": ["1", "2"],
            },
            "dataset3": {
                "initial_concentration": [],
                "megacomplex": ["m1"],
                "global_megacomplex": ["m2"],
                "kinetic": ["2"],
            },
        },
    }
    sim_model = DecayModel.from_dict(sim_model_dict)

    model_dict = {
        "megacomplex": {"m1": {"is_index_dependent": False}},
        "dataset": {
            "dataset1": {"initial_concentration": [], "megacomplex": ["m1"], "kinetic": ["1"]},
            "dataset2": {
                "initial_concentration": [],
                "megacomplex": ["m1"],
                "kinetic": ["1", "2"],
            },
            "dataset3": {"initial_concentration": [], "megacomplex": ["m1"], "kinetic": ["2"]},
        },
    }
    model = DecayModel.from_dict(model_dict)
示例#13
0
class MultichannelMulticomponentDecay:
    wanted_parameters = ParameterGroup.from_dict({
        "k": [0.006, 0.003, 0.0003, 0.03],
        "loc": [
            ["1", 14705],
            ["2", 13513],
            ["3", 14492],
            ["4", 14388],
        ],
        "amp": [
            ["1", 1],
            ["2", 2],
            ["3", 5],
            ["4", 20],
        ],
        "del": [
            ["1", 400],
            ["2", 100],
            ["3", 300],
            ["4", 200],
        ],
    })
    initial_parameters = ParameterGroup.from_dict(
        {"k": [0.006, 0.003, 0.0003, 0.03]})

    e_axis = np.arange(12820, 15120, 50)
    c_axis = np.arange(0, 150, 1.5)

    sim_model = GaussianDecayModel.from_dict({
        "compartment": ["s1", "s2", "s3", "s4"],
        "dataset": {
            "dataset1": {
                "initial_concentration": [],
                "megacomplex": [],
                "kinetic": ["k.1", "k.2", "k.3", "k.4"],
                "location": ["loc.1", "loc.2", "loc.3", "loc.4"],
                "delta": ["del.1", "del.2", "del.3", "del.4"],
                "amplitude": ["amp.1", "amp.2", "amp.3", "amp.4"],
            }
        },
    })
    model = GaussianDecayModel.from_dict({
        "compartment": ["s1", "s2", "s3", "s4"],
        "dataset": {
            "dataset1": {
                "initial_concentration": [],
                "megacomplex": [],
                "kinetic": ["k.1", "k.2", "k.3", "k.4"],
            }
        },
    })
示例#14
0
class MultichannelMulticomponentDecay:
    wanted = ParameterGroup.from_dict({
        'k': [.006, 0.003, 0.0003, 0.03],
        'loc': [
            ['1', 14705],
            ['2', 13513],
            ['3', 14492],
            ['4', 14388],
        ],
        'amp': [
            ['1', 1],
            ['2', 2],
            ['3', 5],
            ['4', 20],
        ],
        'del': [
            ['1', 400],
            ['2', 100],
            ['3', 300],
            ['4', 200],
        ]
    })
    initial = ParameterGroup.from_dict({'k': [.006, 0.003, 0.0003, 0.03]})

    e_axis = np.arange(12820, 15120, 50)
    c_axis = np.arange(0, 150, 1.5)

    sim_model = GaussianDecayModel.from_dict({
        'compartment': ["s1", "s2", "s3", "s4"],
        'dataset': {
            'dataset1': {
                'initial_concentration': [],
                'megacomplex': [],
                'kinetic': ['k.1', 'k.2', 'k.3', 'k.4'],
                'location': ['loc.1', 'loc.2', 'loc.3', 'loc.4'],
                'delta': ['del.1', 'del.2', 'del.3', 'del.4'],
                'amplitude': ['amp.1', 'amp.2', 'amp.3', 'amp.4'],
            }
        }
    })
    model = DecayModel.from_dict({
        'compartment': ["s1", "s2", "s3", "s4"],
        'dataset': {
            'dataset1': {
                'initial_concentration': [],
                'megacomplex': [],
                'kinetic': ['k.1', 'k.2', 'k.3', 'k.4']
            }
        }
    })
示例#15
0
def test_simulate_dataset():
    model = SimpleTestModel.from_dict(
        {"dataset": {
            "dataset1": {
                "megacomplex": [],
            },
        }})
    print(model.validate())
    assert model.valid()

    parameter = ParameterGroup.from_list([1, 1])
    print(model.validate(parameter))
    assert model.valid(parameter)

    est_axis = np.asarray([1, 1, 1, 1])
    cal_axis = np.asarray([2, 2, 2])

    data = simulate(model, "dataset1", parameter, {
        "e": est_axis,
        "c": cal_axis
    })
    assert np.array_equal(data["c"], cal_axis)
    assert np.array_equal(data["e"], est_axis)
    assert data.data.shape == (3, 4)
    assert np.array_equal(
        data.data,
        np.asarray([
            [2, 4, 6],
            [4, 10, 16],
            [6, 16, 26],
            [8, 22, 36],
        ]).T,
    )
示例#16
0
def test_kinetic_residual_benchmark(benchmark, nnls):

    suite = ThreeComponentSequential
    model = suite.model

    sim_model = suite.sim_model

    wanted = suite.wanted

    initial = ParameterGroup.from_dict({
        'kinetic': [
            ["1", 501e-2],
            ["2", 202e-3],
            ["3", 105e-4],
        ],
        'irf': [['center', 0.3], ['width', 7.8]],
        'j': [['1', 1, {'vary': False, 'non-negative': False}],
              ['0', 0, {'vary': False, 'non-negative': False}]],
    })

    dataset = sim_model.simulate('dataset1', wanted, suite.axis)

    data = {'dataset1': dataset}
    scheme = Scheme(model=model, parameter=initial, data=data, nnls=nnls)
    optimizer = Optimizer(scheme)

    benchmark(optimizer._calculate_penalty, initial)
示例#17
0
def optimize_task(initial_parameter, scheme, verbose):

    problem_bag, groups = _create_problem_bag(scheme)

    minimizer = lmfit.Minimizer(calculate_penalty,
                                initial_parameter,
                                fcn_args=[scheme, problem_bag, groups],
                                fcn_kws=None,
                                iter_cb=None,
                                scale_covar=True,
                                nan_policy='omit',
                                reduce_fcn=None,
                                **{})
    verbose = 2 if verbose else 0
    lm_result = minimizer.minimize(method='least_squares',
                                   verbose=verbose,
                                   max_nfev=scheme.nfev)

    parameter = ParameterGroup.from_parameter_dict(lm_result.params)
    datasets = _create_result(scheme, parameter)
    covar = lm_result.covar if hasattr(lm_result, 'covar') else None

    return Result(scheme, datasets, parameter, lm_result.nfev,
                  lm_result.nvarys, lm_result.ndata, lm_result.nfree,
                  lm_result.chisqr, lm_result.redchi, lm_result.var_names,
                  covar)
示例#18
0
def test_matrix_non_unibranch(matrix):

    params = ParameterGroup.from_list(matrix.params)

    mat = KMatrix()
    mat.label = ""
    mat.matrix = matrix.matrix
    mat = mat.fill(None, params)

    con = InitialConcentration()
    con.label = ""
    con.compartments = matrix.compartments
    con.parameters = matrix.jvec
    con = con.fill(None, params)

    for comp in matrix.compartments:
        assert comp in mat.involved_compartments()

    print(mat.reduced(matrix.compartments))
    assert np.array_equal(mat.reduced(matrix.compartments), matrix.wanted_array)

    print(mat.full(matrix.compartments).T)
    assert np.allclose(mat.full(matrix.compartments), matrix.wanted_full)

    print(mat.eigen(matrix.compartments)[0])
    print(mat.eigen(matrix.compartments)[1])
    vals, vec = mat.eigen(matrix.compartments)
    assert np.allclose(vals, matrix.wanted_eigen_vals)
    assert np.allclose(vec, matrix.wanted_eigen_vec)

    print(mat._gamma(vec, con))
    assert np.allclose(mat._gamma(vec, con), matrix.wanted_gamma)

    print(mat.a_matrix_non_unibranch(con))
    assert np.allclose(mat.a_matrix_non_unibranch(con), matrix.wanted_a_matrix)
示例#19
0
def test_matrix_non_unibranch(matrix):

    params = ParameterGroup.from_list(matrix.params)

    mat = KMatrix()
    mat.label = ""
    mat.matrix = matrix.matrix
    mat = mat.fill(None, params)

    con = InitialConcentration()
    con.label = ""
    con.compartments = matrix.compartments
    con.parameters = matrix.jvec
    con = con.fill(None, params)

    for comp in matrix.compartments:
        assert comp in mat.involved_compartments()

    print(mat.reduced(matrix.compartments))
    assert np.array_equal(mat.reduced(matrix.compartments), matrix.wanted_array)

    print(mat.full(matrix.compartments).T)
    assert np.allclose(mat.full(matrix.compartments), matrix.wanted_full)

    print(mat.eigen(matrix.compartments)[0])
    print(mat.eigen(matrix.compartments)[1])
    vals, vec = mat.eigen(matrix.compartments)
    assert np.allclose(vals, matrix.wanted_eigen_vals)
    assert np.allclose(vec, matrix.wanted_eigen_vec)

    print(mat._gamma(vec, con))
    assert np.allclose(mat._gamma(vec, con), matrix.wanted_gamma)

    print(mat.a_matrix_non_unibranch(con))
    assert np.allclose(mat.a_matrix_non_unibranch(con), matrix.wanted_a_matrix)
示例#20
0
文件: csv.py 项目: jsnel/pyglotaran
    def save_parameters(
        self,
        parameters: ParameterGroup,
        file_name: str,
        *,
        sep: str = ",",
        as_optimized: bool = True,
        replace_infinfinity: bool = True,
    ) -> None:
        """Save a :class:`ParameterGroup` to a CSV file.

        Parameters
        ----------
        parameters : ParameterGroup
            Parameters to be saved to file.
        file_name : str
            File to write the parameters to.
        sep: str
            Other separators can be used optionally., by default ','
        as_optimized : bool
            Weather to include properties which are the result of optimization.
        replace_infinfinity : bool
            Weather to replace infinity values with empty strings.
        """
        df = parameters.to_dataframe(as_optimized=as_optimized)
        if replace_infinfinity is True:
            safe_dataframe_replace(df, "minimum", -np.inf, "")
            safe_dataframe_replace(df, "maximum", np.inf, "")
        df.to_csv(file_name, na_rep="None", index=False, sep=sep)
示例#21
0
def test_single_dataset():
    model = MockModel.from_dict({
        "dataset": {
            "dataset1": {
                "megacomplex": [],
            },
        }
    })
    print(model.validate())
    assert model.valid()

    parameter = ParameterGroup.from_list([1, 10])
    print(model.validate(parameter))
    assert model.valid(parameter)

    data = {'dataset1': xr.DataArray(
        np.ones((3, 4)),
        coords=[('e', [1, 2, 3]), ('c', [5, 7, 9, 12])]
    ).to_dataset(name="data")}

    group = create_group(model, data)
    assert len(group) == 3
    assert [item[0][0] for _, item in group.items()] == [1, 2, 3]
    assert all([item[0][1].label == 'dataset1' for _, item in group.items()])

    result = [calculate_group_item(item, model, parameter, data) for item in group.values()]
    assert len(result) == 3
    print(result[0])
    assert result[0][1].shape == (4, 2)

    data = create_data_group(model, group, data)
    assert len(data) == 3
    assert data[1].shape[0] == 4
示例#22
0
def test_parameter_group_to_from_parameter_dict_list():
    parameter_group = load_parameters(
        """
    b:
        - ["1", 0.25, {vary: false, min: 0, max: 8}]
        - ["2", 0.75, {expr: '1 - $b.1', non-negative: true}]
    rates:
        - ["total", 2]
        - ["branch1", {expr: '$rates.total * $b.1'}]
        - ["branch2", {expr: '$rates.total * $b.2'}]
    """,
        format_name="yml_str",
    )

    parameter_dict_list = parameter_group.to_parameter_dict_list()
    parameter_group_from_dict_list = ParameterGroup.from_parameter_dict_list(parameter_dict_list)

    for label, wanted in parameter_group.all():
        got = parameter_group_from_dict_list.get(label)

        assert got.label == wanted.label
        assert got.full_label == wanted.full_label
        assert got.expression == wanted.expression
        assert got.maximum == wanted.maximum
        assert got.minimum == wanted.minimum
        assert got.non_negative == wanted.non_negative
        assert got.value == wanted.value
        assert got.vary == wanted.vary
def test_multi_dataset_overlap():
    model = MockModel.from_dict({
        "dataset": {
            "dataset1": {
                "megacomplex": [],
            },
            "dataset2": {
                "megacomplex": [],
            },
        }
    })

    model.grouped = lambda: True
    print(model.validate())
    assert model.valid()
    assert model.grouped()

    parameters = ParameterGroup.from_list([1, 10])
    print(model.validate(parameters))
    assert model.valid(parameters)

    axis_e_1 = [1, 2, 3, 5]
    axis_c_1 = [5, 7]
    axis_e_2 = [0, 1.4, 2.4, 3.4, 9]
    axis_c_2 = [5, 7, 9, 12]
    data = {
        "dataset1":
        xr.DataArray(np.ones((4, 2)),
                     coords=[("e", axis_e_1),
                             ("c", axis_c_1)]).to_dataset(name="data"),
        "dataset2":
        xr.DataArray(np.ones((5, 4)),
                     coords=[("e", axis_e_2),
                             ("c", axis_c_2)]).to_dataset(name="data"),
    }

    scheme = Scheme(model, parameters, data, group_tolerance=5e-1)
    problem = Problem(scheme)
    bag = list(problem.bag)
    assert len(problem.groups) == 3
    assert "dataset1dataset2" in problem.groups
    assert problem.groups["dataset1dataset2"] == ["dataset1", "dataset2"]
    assert len(bag) == 6

    assert all(p.data.size == 4 for p in bag[:1])
    assert all(p.descriptor[0].label == "dataset1" for p in bag[1:5])
    assert all(all(p.descriptor[0].axis == axis_c_1) for p in bag[1:5])
    assert [p.descriptor[0].index for p in bag[1:5]] == axis_e_1

    assert all(p.data.size == 6 for p in bag[1:4])
    assert all(p.descriptor[1].label == "dataset2" for p in bag[1:4])
    assert all(all(p.descriptor[1].axis == axis_c_2) for p in bag[1:4])
    assert [p.descriptor[1].index for p in bag[1:4]] == axis_e_2[1:4]

    assert all(p.data.size == 4 for p in bag[5:])
    assert bag[4].descriptor[0].label == "dataset1"
    assert bag[5].descriptor[0].label == "dataset2"
    assert np.array_equal(bag[4].descriptor[0].axis, axis_c_1)
    assert np.array_equal(bag[5].descriptor[0].axis, axis_c_2)
    assert [p.descriptor[0].index for p in bag[1:4]] == axis_e_1[:-1]
def test_single_dataset():
    model = MockModel.from_dict(
        {"dataset": {
            "dataset1": {
                "megacomplex": [],
            },
        }})
    model.grouped = lambda: True
    print(model.validate())
    assert model.valid()
    assert model.grouped()

    parameters = ParameterGroup.from_list([1, 10])
    print(model.validate(parameters))
    assert model.valid(parameters)
    axis_e = [1, 2, 3]
    axis_c = [5, 7, 9, 12]

    data = {
        "dataset1":
        xr.DataArray(np.ones((3, 4)),
                     coords=[("e", axis_e),
                             ("c", axis_c)]).to_dataset(name="data")
    }

    scheme = Scheme(model, parameters, data)
    problem = Problem(scheme)
    bag = problem.bag
    datasets = problem.groups
    assert len(datasets) == 1
    assert len(bag) == 3
    assert all(p.data.size == 4 for p in bag)
    assert all(p.descriptor[0].label == "dataset1" for p in bag)
    assert all(all(p.descriptor[0].axis == axis_c) for p in bag)
    assert [p.descriptor[0].index for p in bag] == axis_e
示例#25
0
def test_simulate_dataset():
    model = MockModel.from_dict({
        "dataset": {
            "dataset1": {
                "megacomplex": [],
            },
        }
    })
    print(model.validate())
    assert model.valid()

    parameter = ParameterGroup.from_list([1, 1])
    print(model.validate(parameter))
    assert model.valid(parameter)

    est_axis = np.asarray([1, 1, 1, 1])
    cal_axis = np.asarray([2, 2, 2])

    data = simulate(model, 'dataset1', parameter, {'e': est_axis, 'c': cal_axis})
    assert np.array_equal(data["c"], cal_axis)
    assert np.array_equal(data["e"], est_axis)
    assert data.data.shape == (3, 4)
    assert np.array_equal(data.data, np.asarray([
        [2, 4, 6],
        [4, 10, 16],
        [6, 16, 26],
        [8, 22, 36],
    ]).T)
示例#26
0
def test_param_options():
    params = """
    - ["5", 1, {non-negative: false, min: -1, max: 1, vary: false}]
    - ["6", 4e2, {non-negative: true, min: -7e2, max: 8e2, vary: true}]
    - ["7", 2e4]
    """

    params = ParameterGroup.from_yaml(params)

    assert params.get("5").value == 1.0
    assert not params.get("5").non_neg
    assert params.get("5").min == -1
    assert params.get("5").max == 1
    assert not params.get("5").vary

    assert params.get("6").value == 4e2
    assert params.get("6").non_neg
    assert params.get("6").min == -7e2
    assert params.get("6").max == 8e2
    assert params.get("6").vary

    assert params.get("7").value == 2e4
    assert not params.get("7").non_neg
    assert params.get("7").min == float('-inf')
    assert params.get("7").max == float('inf')
    assert params.get("7").vary
示例#27
0
def test_parameter_group_to_array():
    params = """
    - ["1", 1, {non-negative: false, min: -1, max: 1, vary: false}]
    - ["2", 4e2, {non-negative: true, min: 10, max: 8e2, vary: true}]
    - ["3", 2e4]
    """

    params = ParameterGroup.from_yaml(params)

    labels, values, lower_bounds, upper_bounds = params.get_label_value_and_bounds_arrays(
        exclude_non_vary=False)

    assert len(labels) == 3
    assert len(values) == 3
    assert len(lower_bounds) == 3
    assert len(upper_bounds) == 3

    assert labels == ["1", "2", "3"]
    assert np.allclose(values, [1, np.log(4e2), 2e4])
    assert np.allclose(lower_bounds, [-1, np.log(10), -np.inf])
    assert np.allclose(upper_bounds, [1, np.log(8e2), np.inf])

    (
        labels_only_vary,
        values_only_vary,
        lower_bounds_only_vary,
        upper_bounds_only_vary,
    ) = params.get_label_value_and_bounds_arrays(exclude_non_vary=True)

    assert len(labels_only_vary) == 2
    assert len(values_only_vary) == 2
    assert len(lower_bounds_only_vary) == 2
    assert len(upper_bounds_only_vary) == 2

    assert labels_only_vary == ["2", "3"]
示例#28
0
def test_param_options():
    params = """
    - ["5", 1, {non-negative: false, min: -1, max: 1, vary: false}]
    - ["6", 4e2, {non-negative: true, min: -7e2, max: 8e2, vary: true}]
    - ["7", 2e4]
    """

    params = ParameterGroup.from_yaml(params)

    assert params.get("5").value == 1.0
    assert not params.get("5").non_negative
    assert params.get("5").minimum == -1
    assert params.get("5").maximum == 1
    assert not params.get("5").vary

    assert params.get("6").value == 4e2
    assert params.get("6").non_negative
    assert params.get("6").minimum == -7e2
    assert params.get("6").maximum == 8e2
    assert params.get("6").vary

    assert params.get("7").value == 2e4
    assert not params.get("7").non_negative
    assert params.get("7").minimum == float("-inf")
    assert params.get("7").maximum == float("inf")
    assert params.get("7").vary
示例#29
0
def test_single_dataset():
    model = MockModel.from_dict(
        {"dataset": {
            "dataset1": {
                "megacomplex": [],
            },
        }})
    print(model.validate())
    assert model.valid()

    parameter = ParameterGroup.from_list([1, 10])
    print(model.validate(parameter))
    assert model.valid(parameter)
    axis_e = [1, 2, 3]
    axis_c = [5, 7, 9, 12]

    data = {
        'dataset1':
        xr.DataArray(np.ones((3, 4)),
                     coords=[('e', axis_e),
                             ('c', axis_c)]).to_dataset(name="data")
    }

    scheme = Scheme(model, parameter, data)
    bag, datasets = create_grouped_bag(scheme)
    bag = bag.compute()
    assert len(datasets) == 0
    assert len(bag) == 3
    assert all([p.data.size == 4 for p in bag])
    assert all([p.descriptor[0].dataset == 'dataset1' for p in bag])
    assert all([all(p.descriptor[0].axis == axis_c) for p in bag])
    assert [p.descriptor[0].index for p in bag] == axis_e
def test_coherent_artifact():
    model = KineticModel.from_dict({
        'initial_concentration': {
            'j1': {
                'compartments': ['s1'],
                'parameters': ['2']
            },
        },
        'megacomplex': {
            'mc1': {
                'k_matrix': ['k1']
            },
        },
        'k_matrix': {
            "k1": {
                'matrix': {
                    ("s1", "s1"): '1',
                }
            }
        },
        'irf': {
            'irf1': {
                'type': 'gaussian',
                'center': '2',
                'width': '3',
                'coherent_artifact': True,
                'coherent_artifact_order': 3,
            },
        },
        'dataset': {
            'dataset1': {
                'initial_concentration': 'j1',
                'megacomplex': ['mc1'],
                'irf': 'irf1',
            },
        },
    })

    parameter = ParameterGroup.from_list([
        101e-4,
        [10, {
            'vary': False,
            'non-negative': False
        }],
        [20, {
            'vary': False,
            'non-negative': False
        }],
    ])

    time = np.asarray(np.arange(0, 50, 1.5))
    dataset = model.dataset['dataset1'].fill(model, parameter)
    compartments, matrix = calculate_kinetic_matrix(dataset, 0, time)

    assert len(compartments) == 4
    for i in range(1, 4):
        assert compartments[i] == f'irf1_coherent_artifact_{i}'

    assert matrix.shape == (time.size, 4)
def test_k_matrices(model):
    assert "km1" in model.k_matrix
    parameter = ParameterGroup.from_list([1, 2, 3, 4, 5, 6, 7])
    reduced = model.k_matrix["km1"].fill(model, parameter).reduced(
        ["s1", "s2", "s3", "s4"])
    assert np.array_equal(
        reduced,
        np.asarray([[1, 3, 5, 7], [2, 0, 0, 0], [4, 0, 0, 0], [6, 0, 0, 0]]))
示例#32
0
class TwoCompartmentDecay:
    wanted_parameters = ParameterGroup.from_list([11e-4, 22e-5])
    initial_parameters = ParameterGroup.from_list([10e-4, 20e-5])

    e_axis = np.asarray([1.0])
    c_axis = np.arange(0, 150, 1.5)

    model = DecayModel.from_dict({
        "dataset": {
            "dataset1": {
                "initial_concentration": [],
                "megacomplex": [],
                "kinetic": ["1", "2"]
            }
        },
    })
    sim_model = model
示例#33
0
    def _calculate_penalty(self, parameter):

        if not isinstance(parameter, ParameterGroup):
            parameter = ParameterGroup.from_parameter_dict(parameter)

        job = self._create_calculate_penalty_job(parameter)

        return job.compute()
示例#34
0
def test_multi_dataset_overlap():
    model = MockModel.from_dict(
        {
            "dataset": {
                "dataset1": {
                    "megacomplex": [],
                },
                "dataset2": {
                    "megacomplex": [],
                },
            }
        }
    )

    print(model.validate())
    assert model.valid()

    parameter = ParameterGroup.from_list([1, 10])
    print(model.validate(parameter))
    assert model.valid(parameter)

    axis_e_1 = [1, 2, 3, 5]
    axis_c_1 = [5, 7]
    axis_e_2 = [0, 1.4, 2.4, 3.4, 9]
    axis_c_2 = [5, 7, 9, 12]
    data = {
        "dataset1": xr.DataArray(
            np.ones((4, 2)), coords=[("e", axis_e_1), ("c", axis_c_1)]
        ).to_dataset(name="data"),
        "dataset2": xr.DataArray(
            np.ones((5, 4)), coords=[("e", axis_e_2), ("c", axis_c_2)]
        ).to_dataset(name="data"),
    }

    scheme = Scheme(model, parameter, data, group_tolerance=5e-1)
    bag, datasets = create_grouped_bag(scheme)
    bag = bag.compute()
    assert len(datasets) == 1
    assert "dataset1dataset2" in datasets
    assert datasets["dataset1dataset2"] == ["dataset1", "dataset2"]
    assert len(bag) == 6

    assert all([p.data.size == 4 for p in bag[:1]])
    assert all([p.descriptor[0].dataset == "dataset1" for p in bag[1:5]])
    assert all([all(p.descriptor[0].axis == axis_c_1) for p in bag[1:5]])
    assert [p.descriptor[0].index for p in bag[1:5]] == axis_e_1

    assert all([p.data.size == 6 for p in bag[1:4]])
    assert all([p.descriptor[1].dataset == "dataset2" for p in bag[1:4]])
    assert all([all(p.descriptor[1].axis == axis_c_2) for p in bag[1:4]])
    assert [p.descriptor[1].index for p in bag[1:4]] == axis_e_2[1:4]

    assert all([p.data.size == 4 for p in bag[5:]])
    assert bag[4].descriptor[0].dataset == "dataset1"
    assert bag[5].descriptor[0].dataset == "dataset2"
    assert np.array_equal(bag[4].descriptor[0].axis, axis_c_1)
    assert np.array_equal(bag[5].descriptor[0].axis, axis_c_2)
    assert [p.descriptor[0].index for p in bag[1:4]] == axis_e_1[:-1]
示例#35
0
class OneCompartmentDecay:
    wanted = ParameterGroup.from_list([101e-4])
    initial = ParameterGroup.from_list([100e-5])

    e_axis = np.asarray([1])
    c_axis = np.arange(0, 150, 1.5)

    model = DecayModel.from_dict({
        "compartment": ["s1"],
        "dataset": {
            "dataset1": {
                "initial_concentration": [],
                "megacomplex": [],
                "kinetic": ["1"]
            }
        },
    })
    sim_model = model
示例#36
0
class OneCompartmentDecay:
    wanted = ParameterGroup.from_list([101e-4])
    initial = ParameterGroup.from_list([100e-5])

    e_axis = np.asarray([1])
    c_axis = np.arange(0, 150, 1.5)

    model = DecayModel.from_dict({
        'compartment': ["s1"],
        'dataset': {
            'dataset1': {
                'initial_concentration': [],
                'megacomplex': [],
                'kinetic': ['1']
            }
        }
    })
    sim_model = model
示例#37
0
def parameter():
    params = [1, 2,
              ['foo', 3],
              ['bar', 4],
              ['baz', 2],
              ['scale_1', 2],
              ['scale_2', 8],
              4e2
              ]
    return ParameterGroup.from_list(params)
示例#38
0
def test_param_block_options():
    params = """
    block:
        - 1.0
        - [3.4, {vary: true}]
        - {vary: false}
    """

    params = ParameterGroup.from_yaml(params)
    assert not params.get("block.1").vary
    assert params.get("block.2").vary
示例#39
0
def test_multi_dataset_overlap():
    model = MockModel.from_dict({
        "dataset": {
            "dataset1": {
                "megacomplex": [],
            },
            "dataset2": {
                "megacomplex": [],
            },
        }
    })

    print(model.validate())
    assert model.valid()

    parameter = ParameterGroup.from_list([1, 10])
    print(model.validate(parameter))
    assert model.valid(parameter)

    data = {
        'dataset1': xr.DataArray(
            np.ones((4, 2)),
            coords=[('e', [0, 1, 2, 3]), ('c', [5, 7])]
        ).to_dataset(name="data"),
        'dataset2': xr.DataArray(
            np.ones((4, 4)),
            coords=[('e', [1.4, 2.4, 3.4, 9]), ('c', [5, 7, 9, 12])]
        ).to_dataset(name="data"),
    }

    scheme = Scheme(model, parameter, data, group_tolerance=5e-1)
    optimizer = Optimizer(scheme)
    group = optimizer._global_problem
    assert len(group) == 5
    assert group[0][0][1].label == 'dataset1'
    assert group[1][0][1].label == 'dataset1'
    assert group[1][1][1].label == 'dataset2'
    assert group[9][0][1].label == 'dataset2'

    optimizer._create_calculate_penalty_job(parameter)
    print(optimizer.matrices)
    result = [m.compute() for m in optimizer.full_matrices.values()]
    assert len(result) == 5
    print(result[0])
    print(result[1])
    assert result[0].shape == (2, 2)
    assert result[1].shape == (6, 2)
    assert result[4].shape == (4, 2)

    data = [d.compute() for d in optimizer._global_data.values()]
    assert len(data) == 5
    assert data[0].shape[0] == 2
    assert data[1].shape[0] == 6
    assert data[4].shape[0] == 4
示例#40
0
def test_param_label():
    params = """
    - ["5", 1]
    - ["4", 2]
    - ["3", 3]
    """

    params = ParameterGroup.from_yaml(params)

    assert len(list(params.all())) == 3
    assert [p.label for _, p in params.all()] == [f"{i}" for i in range(5, 2, -1)]
    assert [p.value for _, p in params.all()] == list(range(1, 4))
示例#41
0
def test_non_negative():

    params = """
    - ["neg", -1]
    - ["negmax", -1, {max=0}]
    - ["nonneg1", 1, {non-negative: True}]
    - ["nonneg2", 2, {non-negative: True}]
    - ["nonnegmin", 6, {non-negative: True, min: 2}]
    """
    params = ParameterGroup.from_yaml(params)
    result = ParameterGroup.from_parameter_dict(params.as_parameter_dict())
    print(params)
    params.as_parameter_dict().pretty_print()
    print(result)

    for label, p in params.all():
        print(label)
        r = result.get(label)
        assert r.non_neg == p.non_neg
        assert np.allclose(r.value, p.value)
        assert np.allclose(r.min, p.min)
        assert np.allclose(r.max, p.max)
示例#42
0
def test_multi_dataset_no_overlap():
    model = MockModel.from_dict({
        "dataset": {
            "dataset1": {
                "megacomplex": [],
            },
            "dataset2": {
                "megacomplex": [],
            },
        }
    })

    print(model.validate())
    assert model.valid()

    parameter = ParameterGroup.from_list([1, 10])
    print(model.validate(parameter))
    assert model.valid(parameter)

    data = {
        'dataset1': xr.DataArray(
            np.ones((3, 2)),
            coords=[('e', [1, 2, 3]), ('c', [5, 7])]
        ).to_dataset(name="data"),
        'dataset2': xr.DataArray(
            np.ones((3, 3)),
            coords=[('e', [4, 5, 6]), ('c', [5, 7, 9])]
        ).to_dataset(name="data"),
    }

    scheme = Scheme(model, parameter, data)
    optimizer = Optimizer(scheme)
    group = optimizer._global_problem
    assert len(group) == 6
    assert [problem[0][0] for problem in group.values()] == [1, 2, 3, 4, 5, 6]
    assert [problem[0][1].label for problem in group.values()] == \
        ['dataset1' for _ in range(3)] + ['dataset2' for _ in range(3)]

    optimizer._create_calculate_penalty_job(parameter)
    result = [m.compute() for mat in optimizer.matrices.values() for m in mat]
    assert len(result) == 6
    print(result[0])
    assert result[0].shape == (2, 2)
    assert result[3].shape == (3, 2)

    data = optimizer._global_data
    assert len(data) == 6
    assert list(data.values())[1].compute().shape[0] == 2
    assert list(data.values())[4].compute().shape[0] == 3
示例#43
0
def test_param_array():
    params = """
    - 5
    - 4
    - 3
    - 2
    - 1
    """

    params = ParameterGroup.from_yaml(params)

    assert len(list(params.all())) == 5

    assert [p.label for _, p in params.all()] == [f"{i}" for i in range(1, 6)]
    assert [p.value for _, p in params.all()] == list(range(1, 6))[::-1]
示例#44
0
def test_coherent_artifact():
    model = KineticModel.from_dict({
        'initial_concentration': {
            'j1': {
                'compartments': ['s1'],
                'parameters': ['2']
            },

        },
        'megacomplex': {
            'mc1': {'k_matrix': ['k1']},
        },
        'k_matrix': {
            "k1": {'matrix': {("s1", "s1"): '1', }}
        },
        'irf': {
            'irf1': {
                'type': 'gaussian',
                'center': '2',
                'width': '3',
                'coherent_artifact': True,
                'coherent_artifact_order': 3,
            },
        },
        'dataset': {
            'dataset1': {
                'initial_concentration': 'j1',
                'megacomplex': ['mc1'],
                'irf': 'irf1',
            },
        },
    })

    parameter = ParameterGroup.from_list([
        101e-4,
        [10, {'vary': False, 'non-negative': False}],
        [20, {'vary': False, 'non-negative': False}],
    ])

    time = np.asarray(np.arange(0, 50, 1.5))
    dataset = model.dataset['dataset1'].fill(model, parameter)
    compartments, matrix = calculate_kinetic_matrix(dataset, time, 0)

    assert len(compartments) == 4
    for i in range(1, 4):
        assert compartments[i] == f'irf1_coherent_artifact_{i}'

    assert matrix.shape == (time.size, 4)
示例#45
0
def test_nested_param_group():
    params = """
    kinetic:
        j:
            - 7
            - 8
            - 9
    """

    params = ParameterGroup.from_yaml(params)
    assert len(list(params.all())) == 3
    group = params['kinetic']
    assert len(list(group.all())) == 3
    group = group['j']
    assert len(list(group.all())) == 3
    assert [p.label for _, p in group.all()] == [f"{i}" for i in range(1, 4)]
    assert [p.value for _, p in group.all()] == list(range(7, 10))
示例#46
0
def test_kinetic_matrix_benchmark(benchmark):
    model = KineticModel.from_dict({
        'initial_concentration': {
            'j1': {
                'compartments': ['s1', 's2', 's3'],
                'parameters': ['j.1', 'j.0', 'j.0']
            },
        },
        'megacomplex': {
            'mc1': {'k_matrix': ['k1']},
        },
        'k_matrix': {
            "k1": {'matrix': {
                ("s2", "s1"): 'kinetic.1',
                ("s3", "s2"): 'kinetic.2',
                ("s3", "s3"): 'kinetic.3',
            }}
        },
        'irf': {
            'irf1': {'type': 'gaussian', 'center': ['irf.center'], 'width': ['irf.width']},
        },
        'dataset': {
            'dataset1': {
                'initial_concentration': 'j1',
                'irf': 'irf1',
                'megacomplex': ['mc1'],
            },
        },
    })
    parameter = ParameterGroup.from_dict({
        'kinetic': [
            ["1", 101e-4],
            ["2", 302e-3],
            ["3", 201e-2],
        ],
        'irf': [['center', 0], ['width', 5]],
        'j': [['1', 1, {'vary': False}], ['0', 0, {'vary': False}]],
    })
    dataset = model.dataset['dataset1'].fill(model, parameter)
    time = np.asarray(np.arange(-10, 100, 0.02))

    benchmark(calculate_kinetic_matrix, dataset, time, 0)
示例#47
0
def test_nested_param_list():
    params = """
    kinetic:
        - 3
        - 4
        - 5
    j:
        - 7
        - 8
    """

    params = ParameterGroup.from_yaml(params)

    assert len(list(params.all())) == 5
    group = params['kinetic']
    assert len(list(group.all())) == 3
    assert [p.label for _, p in group.all()] == [f"{i}" for i in range(1, 4)]
    assert [p.value for _, p in group.all()] == list(range(3, 6))
    group = params['j']
    assert len(list(group.all())) == 2
    assert [p.label for _, p in group.all()] == [f"{i}" for i in range(1, 3)]
    assert [p.value for _, p in group.all()] == list(range(7, 9))
示例#48
0
def test_baseline():
    model = KineticModel.from_dict({
        'initial_concentration': {
            'j1': {
                'compartments': ['s1'],
                'parameters': ['2']
            },

        },
        'megacomplex': {
            'mc1': {'k_matrix': ['k1']},
        },
        'k_matrix': {
            "k1": {'matrix': {("s1", "s1"): '1', }}
        },
        'dataset': {
            'dataset1': {
                'initial_concentration': 'j1',
                'megacomplex': ['mc1'],
                'baseline': True,
            },
        },
    })

    parameter = ParameterGroup.from_list([
        101e-4,
        [1, {'vary': False, 'non-negative': False}],
        [42, {'vary': False, 'non-negative': False}],
    ])

    time = np.asarray(np.arange(0, 50, 1.5))
    dataset = model.dataset['dataset1'].fill(model, parameter)
    compartments, matrix = calculate_kinetic_matrix(dataset, time, 0)

    assert len(compartments) == 2
    assert compartments[1] == 'dataset1_baseline'

    assert matrix.shape == (time.size, 2)
    assert np.all(matrix[:, 1] == 1)
示例#49
0
    def optimize(self, verbose=True):
        parameter = self._scheme.parameter.as_parameter_dict()
        minimizer = lmfit.Minimizer(
            self._calculate_penalty,
            parameter,
            fcn_args=None,
            fcn_kws=None,
            iter_cb=None,
            scale_covar=True,
            nan_policy='omit',
            reduce_fcn=None,
            **{})
        verbose = 2 if verbose else 0
        lm_result = minimizer.minimize(
            method='least_squares', verbose=verbose, max_nfev=self._scheme.nfev)

        self._optimal_parameter = ParameterGroup.from_parameter_dict(lm_result.params)
        self._calculate_result()

        covar = lm_result.covar if hasattr(lm_result, 'covar') else None

        return Result(self._scheme, self._result_data, self._optimal_parameter,
                      lm_result.nfev, lm_result.nvarys, lm_result.ndata, lm_result.nfree,
                      lm_result.chisqr, lm_result.redchi, lm_result.var_names, covar)
示例#50
0
def test_single_dataset():
    model = MockModel.from_dict({
        "dataset": {
            "dataset1": {
                "megacomplex": [],
            },
        }
    })
    print(model.validate())
    assert model.valid()

    parameter = ParameterGroup.from_list([1, 10])
    print(model.validate(parameter))
    assert model.valid(parameter)

    data = {'dataset1': xr.DataArray(
        np.ones((3, 4)),
        coords=[('e', [1, 2, 3]), ('c', [5, 7, 9, 12])]
    ).to_dataset(name="data")}

    scheme = Scheme(model, parameter, data)
    optimizer = Optimizer(scheme)
    group = optimizer._global_problem
    assert len(group) == 3
    assert list(group.keys()) == [f"dataset1_{i}" for i in [1, 2, 3]]
    assert all([p.dataset_descriptor.label == 'dataset1' for p in group.values()])

    optimizer._create_calculate_penalty_job(parameter)
    result = [m.compute() for m in optimizer.matrices.values()]
    assert len(result) == 3
    print(result[0])
    assert result[0].shape == (4, 2)

    data = optimizer._global_data
    assert len(data) == 3
    assert list(data.values())[1].compute().shape[0] == 4