Пример #1
0
def test_model_calls():
    def model_function(x, b, c, d):
        return b + c * x + d * x * x

    t = np.array([1.0, 2.0, 3.0])
    model = Model("m", model_function)
    y_ref = model._raw_call(t, [2.0, 3.0, 4.0])

    np.testing.assert_allclose(
        model(
            t,
            Params(
                **{
                    "m/a": Parameter(1),
                    "m/b": Parameter(2),
                    "m/c": Parameter(3),
                    "m/d": Parameter(4),
                }
            ),
        ),
        y_ref,
    )

    np.testing.assert_allclose(
        model(t, Params(**{"m/d": Parameter(4), "m/c": Parameter(3), "m/b": Parameter(2)})), y_ref
    )

    with pytest.raises(KeyError):
        np.testing.assert_allclose(
            model(t, Params(**{"m/a": Parameter(1), "m/b": Parameter(2), "m/d": Parameter(4)})),
            y_ref,
        )
Пример #2
0
def test_model_defaults():
    """Test whether model defaults propagate to the fit object correctly"""
    def g(data, mu, sig, a, b, c, d, e, f, q):
        del sig, a, b, c, d, e, f, q
        return (data - mu) * 2

    model = Model("M", g, f=Parameter(5))
    fit = Fit(model)
    fit._add_data("test", [1, 2, 3], [2, 3, 4])
    fit._add_data("test2", [1, 2, 3], [2, 3, 4], params={"M/f": "f/new"})
    fit._build_fit()

    assert fit["M/a"].value == Parameter().value
    assert fit.params["M/a"].value == Parameter().value
    assert fit.params["f/new"].value == 5
    assert fit.params["M/f"].value == 5

    # Check whether each parameter is actually unique
    fit.params["f/new"] = 6
    assert fit.params["f/new"].value == 6
    assert fit.params["M/f"].value == 5

    # Test whether providing a default for a parameter that doesn't exist throws
    with pytest.raises(AssertionError):
        Model("M", g, z=Parameter(5))

    # Verify that the defaults are in fact copies
    default = Parameter(5)
    model = Model("M", g, f=default)
    model._params["M/f"].value = 6
    assert default.value == 5
Пример #3
0
def test_parameter_inversion():
    def f(independent, a, b):
        return a + b * independent

    def f_jac(independent, a, b):
        del a, b
        return np.vstack((np.ones((1, len(independent))), independent))

    def g(independent, a, d, b):
        return a - b * independent + d * independent * independent

    def g_jac(independent, a, d, b):
        del a, d, b
        return np.vstack((np.ones(
            (1, len(independent))), independent * independent, -independent))

    def f_der(independent, a, b):
        del independent, a
        return b * np.ones((len(x)))

    def g_der(independent, a, d, b):
        del independent, a
        return -b * np.ones((len(independent))) + 2.0 * d * independent

    x = np.array([1.0, 2.0, 3.0, 4.0, 5.0])
    a_true = 5.0
    b_true = np.array([1.0, 2.0, 3.0, 4.0, 10.0])
    f_data = f(x, a_true, b_true)
    model = Model("f", f, jacobian=f_jac, derivative=f_der)
    fit = Fit(model)
    fit._add_data("test", x, f_data)
    fit.params["f/a"].value = a_true
    fit.params["f/b"].value = 1.0
    np.testing.assert_allclose(
        parameter_trace(model, fit.params, "f/b", x, f_data), b_true)

    a_true = 5.0
    b_true = 3.0
    d_true = np.array([1.0, 2.0, 3.0, 4.0, 10.0])
    f_plus_g_data = f(x, a_true, b_true) + g(x, a_true, d_true, b_true)
    model = Model("f", f, jacobian=f_jac, derivative=f_der) + Model(
        "f", g, jacobian=g_jac, derivative=g_der)
    fit = Fit(model)
    fit._add_data("test", x, f_data)
    fit.params["f/a"].value = a_true
    fit.params["f/b"].value = b_true
    fit.params["f/d"].value = 1.0
    np.testing.assert_allclose(
        parameter_trace(model, fit.params, "f/d", x, f_plus_g_data), d_true)
Пример #4
0
def test_plotting():
    x = np.arange(10.0) / 100.0
    y = [
        1.37272429,
        1.14759176,
        1.2080786,
        1.79293398,
        1.22606946,
        1.55293523,
        1.73564261,
        1.49623027,
        1.81209629,
        1.69464097,
    ]

    F = Fit(
        Model(
            "exponential",
            exp_charge,
            jacobian=exp_charge_jac,
            mag=Parameter(value=1.0),
            k=Parameter(value=1.0),
        ))
    F._add_data("exponential", x, y)
    F.fit()
    profile = F.profile_likelihood("exponential/k")

    profile.plot()
    profile.plot_relations()
Пример #5
0
def test_parameter_slicing():
    # Tests whether parameters coming from a Fit can be sliced by a data handle,
    # i.e. fit.params[data_handle]

    def dummy(t, p1, p2, p3):
        return t * p1 + t * p2 * p2 + t * p3 * p3 * p3

    model = Model("dummy",
                  dummy,
                  p2=Parameter(2),
                  p3=Parameter(3),
                  p1=Parameter(1))
    fit = Fit(model)
    data_set = fit._add_data("data1", [1, 1, 1], [1, 2, 3],
                             {"dummy/p2": "dummy/p2_b"})
    parameter_slice = fit.params[data_set]
    assert parameter_slice["dummy/p1"].value == 1
    assert parameter_slice["dummy/p2"].value == 2
    assert parameter_slice["dummy/p3"].value == 3

    data_set2 = fit._add_data("data2", [1, 1, 1], [1, 2, 3],
                              {"dummy/p2": "dummy/p2_c"})
    fit.params["dummy/p2_c"] = 5
    parameter_slice = fit.params[data_set]
    assert parameter_slice["dummy/p2"].value == 2
    parameter_slice = fit.params[data_set2]
    assert parameter_slice["dummy/p2"].value == 5
Пример #6
0
def test_no_jac():
    x, y = test_data()

    linear_model = Model("linear", linear_func()["model_function"])
    linear_fit = Fit(linear_model)
    linear_fit._add_data("test", x, y, {"linear/a": 5})
    linear_fit.fit()

    with pytest.raises(NotImplementedError):
        linear_fit.cov
Пример #7
0
def test_fit_metrics(model_funcs, sigma, aic, aicc, bic):
    x, y = test_data()

    fit = Fit(Model("linear", **model_funcs()))
    fit._add_data("test", x, y)
    fit.fit()

    np.testing.assert_allclose(fit.sigma[0], sigma)
    np.testing.assert_allclose(fit.aic, aic)
    np.testing.assert_allclose(fit.aicc, aicc)
    np.testing.assert_allclose(fit.bic, bic)
Пример #8
0
def test_asymptotic_errs_subset_parameters():
    """Check whether the asymptotic uncertainty handling is correct by checking a nested model

    Fixing a parameter in quadratic function converts it to a linear one. This should result in
    identical standard errors and covariance matrix"""
    x, y = test_data()

    linear_fit = Fit(Model("m", **linear_func()))
    linear_fit._add_data("test", x, y)
    linear_fit.fit()

    quadratic_fit = Fit(Model("m", **quadratic_func()))
    quadratic_fit._add_data("test", x, y)
    quadratic_fit["m/a"].fixed = True
    quadratic_fit.fit()

    np.testing.assert_allclose(linear_fit.cov, quadratic_fit.cov)
    np.testing.assert_allclose(quadratic_fit["m/b"].stderr,
                               linear_fit["m/a"].stderr)
    np.testing.assert_allclose(quadratic_fit["m/c"].stderr,
                               linear_fit["m/b"].stderr)
    assert quadratic_fit["m/a"].stderr is None
Пример #9
0
def test_jacobian_test_fit():
    def f(independent, a, b):
        return a + b * independent

    def f_jac(independent, a, b):
        del a, b
        return np.vstack((np.ones((1, len(independent))), independent))

    def f_der(independent, a, b):
        del a
        return b * np.ones((len(independent)))

    def f_jac_wrong(independent, a, b):
        del a, b
        return np.vstack((2.0 * np.ones((1, len(independent))), independent))

    x = np.array([1.0, 2.0, 3.0, 4.0, 5.0])
    a_true, b_true = (5.0, 5.0)
    f_data = f(x, a_true, b_true)
    model = Model("f", f, jacobian=f_jac, derivative=f_der)
    fit = Fit(model)
    fit._add_data("test", x, f_data)
    fit.params["f/a"].value = a_true
    fit.params["f/b"].value = b_true
    assert fit.verify_jacobian(fit.params.values)

    model_bad = Model("f", f, jacobian=f_jac_wrong, derivative=f_der)
    fit = Fit(model_bad)
    fit._add_data("test", x, f_data)
    fit.params["f/a"].value = a_true
    fit.params["f/b"].value = b_true
    assert not fit.verify_jacobian(fit.params.values)

    with pytest.raises(ValueError):
        assert odijk("WLC").verify_jacobian([1.0, 2.0, 3.0], [1.0, 2.0])

    with pytest.raises(ValueError):
        odijk("WLC").verify_derivative([1, 2, 3], [1, 2, 3])
Пример #10
0
    def validate_profile(name, func, jac, x, y, parameter, stderr, lb, ub):
        F = Fit(Model(name, func, jacobian=jac))
        F._add_data(name, x, y)
        F.fit()
        profile = F.profile_likelihood(name + "/" + parameter)

        assert (F[name + "/" + parameter].stderr - stderr) < eps
        assert F[name + "/" + parameter].profile == profile
        assert abs(profile.lower_bound - lb) < eps
        assert abs(profile.upper_bound - ub) < eps
        assert len(profile.chi2) == len(profile.p)

        # Validate asymptotic confidence intervals based on comparing them with the profiles
        assert abs(F[name + "/" + parameter].ci(0.95)[0] -
                   profile.lower_bound) < 1e-2
        assert abs(F[name + "/" + parameter].ci(0.95)[1] -
                   profile.upper_bound) < 1e-2
Пример #11
0
def test_integration_parameter_linkage():
    """Verify that we estimate correctly across models when models share parameters."""
    def const(independent, b):
        f = b * np.ones(independent.shape)
        return f

    def const_jac(independent, b):
        del b
        return np.ones((1, len(independent)))

    x = np.arange(3)
    y1 = np.ones(3) * 2
    y2 = np.ones(3) * 4

    # No difference between the offsets for the two datasets (results in average of the two data
    # sets)
    fit = FdFit(Model("M", const, jacobian=const_jac))
    fit.add_data("a", y1, x)
    fit.add_data("b", y2, x)
    fit.fit()
    assert fit["M/b"].value == 3

    # Both models have their own offset (correct estimates)
    m1 = Model("M1", const, jacobian=const_jac)
    m2 = Model("M2", const, jacobian=const_jac)
    fit = FdFit(m1, m2)
    fit[m1].add_data("a", y1, x)
    fit[m2].add_data("b", y2, x)
    fit.fit()
    assert fit["M1/b"].value == 2
    assert fit["M2/b"].value == 4

    # No difference between the offsets for the two datasets because we explicitly say so
    # (results in average of the two data sets)
    m1 = Model("M1", const, jacobian=const_jac)
    m2 = Model("M2", const, jacobian=const_jac)
    fit = FdFit(m1, m2)
    fit[m1].add_data("a", y1, x)
    fit[m2].add_data("b", y2, x, params={"M2/b": "M1/b"})
    fit.fit()
    assert fit["M1/b"].value == 3

    # Both models have their own offset (correct estimates)
    fit = FdFit(Model("M", const, jacobian=const_jac))
    fit.add_data("a", y1, x)
    fit.add_data("b", y2, x, params={"M/b": "M/b2"})
    fit.fit()
    assert fit["M/b"].value == 2
    assert fit["M/b2"].value == 4
Пример #12
0
def test_non_identifiability():
    x = np.arange(10.0) / 100.0
    y = [
        1.37272429,
        1.14759176,
        1.2080786,
        1.79293398,
        1.22606946,
        1.55293523,
        1.73564261,
        1.49623027,
        1.81209629,
        1.69464097,
    ]

    F = Fit(
        Model(
            "exponential",
            exp_charge,
            jacobian=exp_charge_jac,
            mag=Parameter(value=1.0),
            k=Parameter(value=1.0),
        ))
    F._add_data("exponential", x, y)
    F.fit()
    num_steps = 100
    profile = F.profile_likelihood("exponential/k", num_steps=num_steps)

    # This model does not have an upper bound for its 95% confidence interval (we're fitting a
    # constant with an exponential rise. The exponential rise is non-identifiable since it can be
    # infinitely fast.
    assert profile.lower_bound is not None
    assert profile.upper_bound is None
    assert len(profile.p) < 2 * num_steps
    assert str(profile) == dedent("""\
        Profile likelihood for exponential/k (121 points)
          - chi2
          - p
          - lower_bound: 20.36
          - upper_bound: undefined
        """)
Пример #13
0
def test_data_loading():
    m = Model("M", lambda x, a: a * x)
    fit = Fit(m)
    fit._add_data("test", [1, np.nan, 3], [2, np.nan, 4])
    np.testing.assert_allclose(fit[m].data["test"].x, [1, 3])
    np.testing.assert_allclose(fit[m].data["test"].y, [2, 4])
    np.testing.assert_allclose(fit[m].data["test"].independent, [1, 3])
    np.testing.assert_allclose(fit[m].data["test"].dependent, [2, 4])

    # Name must be unique
    with pytest.raises(KeyError):
        fit._add_data("test", [1, 3, 5], [2, 4, 5])

    with pytest.raises(AssertionError):
        fit._add_data("test2", [1, 3], [2, 4, 5])

    with pytest.raises(AssertionError):
        fit._add_data("test3", [1, 3, 5], [2, 4])

    with pytest.raises(AssertionError):
        fit._add_data("test4", [[1, 3, 5]], [[2, 4, 5]])
Пример #14
0
def test_parameter_availability():
    x = np.arange(10)
    y = np.array([
        8.24869073,
        7.77648717,
        11.9436565,
        14.85406276,
        22.73081526,
        20.39692261,
        32.48962353,
        31.4775862,
        37.63807819,
        40.50125925,
    ])

    def linear(independent, a=1, b=1):
        return a * independent + b

    def linear_jac(independent, a, b):
        del a, b
        return np.vstack((independent, np.ones(len(independent))))

    linear_model = Model("linear", linear, jacobian=linear_jac)
    linear_fit = Fit(linear_model)

    with pytest.raises(IndexError):
        linear_fit.params["linear/a"]

    linear_fit._add_data("test", x, y, {"linear/a": 5})
    linear_fit = Fit(linear_model)

    # Parameter linear_a is not actually a parameter in the fit object at this point (it was set
    # to 5)
    with pytest.raises(IndexError):
        linear_fit.params["linear/a"]

    linear_fit._add_data("test", x, y)
    assert "linear/a" in linear_fit.params
Пример #15
0
def test_asymptotic_errs_all_parameters():
    """Tests whether the covariance matrix is computed correctly"""
    x, y = test_data()

    quadratic_fit = Fit(Model("quadratic", **quadratic_func()))
    quadratic_fit._add_data("test", x, y)
    quadratic_fit.fit()

    np.testing.assert_allclose(
        quadratic_fit.cov,
        np.array([
            [0.001465292918082, -0.013187636262741, 0.017583515016988],
            [-0.013187636262741, 0.128066601040397, -0.200452071193665],
            [0.017583515016988, -0.200452071193665, 0.478271608462078],
        ]),
    )

    np.testing.assert_allclose(quadratic_fit["quadratic/a"].stderr,
                               0.038279144688489905)
    np.testing.assert_allclose(quadratic_fit["quadratic/b"].stderr,
                               0.35786394207910477)
    np.testing.assert_allclose(quadratic_fit["quadratic/c"].stderr,
                               0.6915718389741429)
Пример #16
0
def test_datasets_build_status():
    def g(data, mu, sig, a, b, c, d, e, f, q):
        del sig, a, b, c, d, e, f, q
        return (data - mu) * 2

    all_params = ["M/mu", "M/sig", "M/a", "M/b", "M/d", "M/e", "M/f", "M/q"]

    m = Model("M", g)
    data_set = Datasets(m, 0)

    data_set._add_data("test", [1, 2, 3], [2, 3, 4], {"M/c": 4})
    assert not data_set.built

    data_set._link_data(
        OrderedDict(zip(all_params, np.arange(len(all_params)))))
    assert data_set.built

    # Loading new data should invalidate the build
    data_set._add_data("test2", [1, 2, 3], [2, 3, 4], {
        "M/c": 5,
        "M/f": "f/new"
    })
    assert not data_set.built
Пример #17
0
def test_integration_test_fitting():
    def linear(independent, a, b):
        f = a * independent + b
        return f

    def linear_jac(independent, a, b):
        del a, b
        jacobian = np.vstack((independent, np.ones(len(independent))))
        return jacobian

    def linear_jac_wrong(independent, a, b):
        del a, b
        jacobian = np.vstack((np.ones(len(independent)), independent))
        return jacobian

    assert Model("M", linear, jacobian=linear_jac).has_jacobian
    assert not Model("M", linear).has_jacobian
    with pytest.raises(RuntimeError):
        Model("M", linear).jacobian([1.0, 2.0, 3.0], [1.0, 2.0, 3.0])
    with pytest.raises(RuntimeError):
        Model("M", linear).derivative([1.0, 2.0, 3.0], [1.0, 2.0, 3.0])

    model = Model("M", linear, jacobian=linear_jac_wrong)
    assert not model.verify_jacobian([1, 2, 3], [1, 1])

    model = Model("M", linear, jacobian=linear_jac)
    fit = Fit(model)
    x = np.arange(3)
    for i in np.arange(3):
        y = 4.0 * x * i + 5.0
        fit._add_data(f"test {i}", x, y, params={"M/a": f"slope_{i}"})

    y = 4.0 * x + 10.0
    fit._add_data("test x", x, y, params={"M/a": "slope_1", "M/b": "M/b_2"})

    # Test whether fixed parameters are not fitted
    fit["slope_2"].fixed = True
    fit.fit()
    assert np.isclose(fit["slope_2"].value, 0)

    fit["slope_2"].fixed = False
    fit.fit()
    assert len(fit.params.values) == 5
    assert len(fit.params) == 5
    assert fit.n_residuals == 12
    assert fit.n_params == 5

    assert np.isclose(fit.params["slope_0"].value, 0)
    assert np.isclose(fit.params["slope_1"].value, 4)
    assert np.isclose(fit.params["slope_2"].value, 8)
    assert np.isclose(fit.params["M/b"].value, 5)
    assert np.isclose(fit.params["M/b_2"].value, 10)

    # Verify that fixed parameters are correctly removed from sub-models
    model = Model("M", linear, jacobian=linear_jac)
    fit = Fit(model)
    fit._add_data("test1", x, 4.0 * x + 5.0, {"M/a": 4})
    fit._add_data("test2", x, 8.0 * x + 10.0, {"M/b": 10})
    fit.fit()
    assert np.isclose(fit.params["M/b"].value, 5)
    assert np.isclose(fit.params["M/a"].value, 8)

    fit["M/a"].upper_bound = 4
    fit["M/a"].value = 5
    with pytest.raises(ValueError):
        fit.fit()
Пример #18
0
def test_model_composition():
    def f(x, a, b):
        return a + b * x

    def f_jac(x, a, b):
        return np.vstack((np.ones((1, len(x))), x))

    def f_jac_wrong(x, a, b):
        return np.vstack((np.zeros((1, len(x))), x))

    def g(x, a, d, b):
        return a - b * x + d * x * x

    def g_jac(x, a, d, b):
        return np.vstack((np.ones((1, len(x))), x * x, -x))

    def f_der(x, a, b):
        return b * np.ones((len(x)))

    def f_der_wrong(x, a, b):
        return np.ones((len(x)))

    def g_der(x, a, d, b):
        return -b * np.ones((len(x))) + 2.0 * d * x

    m1 = Model("M", f, dependent="x", jacobian=f_jac, derivative=f_der)
    m2 = Model("M", g, dependent="x", jacobian=g_jac, derivative=g_der)
    t = np.arange(0, 2, 0.5)

    # Check actual composition
    # (a + b * x) + a - b * x + d * x * x = 2 * a + d * x * x
    np.testing.assert_allclose(
        (m1 + m2)._raw_call(t, np.array([1.0, 2.0, 3.0])), 2.0 + 3.0 * t * t
    ), "Model composition returns invalid function evaluation (parameter order issue?)"

    # Check correctness of the Jacobians and derivatives
    assert (m1 + m2).verify_jacobian(t, [1.0, 2.0, 3.0])
    assert (m1 + m2).verify_derivative(t, [1.0, 2.0, 3.0])
    assert (m2 + m1).verify_jacobian(t, [1.0, 2.0, 3.0])
    assert (m2 + m1).verify_derivative(t, [1.0, 2.0, 3.0])
    assert (m2 + m1 + m2).verify_jacobian(t, [1.0, 2.0, 3.0])
    assert (m2 + m1 + m2).verify_derivative(t, [1.0, 2.0, 3.0])

    m1_wrong_jacobian = Model("M",
                              f,
                              dependent="x",
                              jacobian=f_jac_wrong,
                              derivative=f_der)
    assert not (m1_wrong_jacobian + m2).verify_jacobian(t, [1.0, 2.0, 3.0],
                                                        verbose=False)
    assert not (m2 + m1_wrong_jacobian).verify_jacobian(t, [1.0, 2.0, 3.0],
                                                        verbose=False)

    assert (InverseModel(m1) + m2).verify_jacobian(t, [-1.0, 2.0, 3.0],
                                                   verbose=False)
    assert InverseModel(m1 + m2).verify_jacobian(t, [-1.0, 2.0, 3.0],
                                                 verbose=False)
    assert InverseModel(m1 + m2 + m1).verify_jacobian(t, [-1.0, 2.0, 3.0],
                                                      verbose=False)

    assert (InverseModel(m1) + m2).verify_derivative(t, [-1.0, 2.0, 3.0])
    assert InverseModel(m1 + m2).verify_derivative(t, [-1.0, 2.0, 3.0])
    assert InverseModel(m1 + m2 + m1).verify_derivative(t, [-1.0, 2.0, 3.0])

    m1_wrong_derivative = Model("M",
                                f,
                                dependent="x",
                                jacobian=f_jac,
                                derivative=f_der_wrong)
    assert not (InverseModel(m1_wrong_derivative) + m2).verify_jacobian(
        t, [-1.0, 2.0, 3.0], verbose=False)
    assert not (InverseModel(m1_wrong_jacobian) + m2).verify_jacobian(
        t, [-1.0, 2.0, 3.0], verbose=False)
    assert not (InverseModel(m1_wrong_derivative) + m2).verify_derivative(
        t, [-1.0, 2.0, 3.0])

    assert m1.subtract_independent_offset().verify_jacobian(t,
                                                            [-1.0, 2.0, 3.0],
                                                            verbose=False)
    assert m1.subtract_independent_offset().verify_derivative(
        t, [-1.0, 2.0, 3.0])

    m1 = inverted_odijk("DNA").subtract_independent_offset() + force_offset(
        "f")
    m2 = (odijk("DNA") + distance_offset("DNA_d")).invert() + force_offset("f")
    t = np.array([0.19, 0.2, 0.3])
    p1 = np.array([0.1, 4.9e1, 3.8e-1, 2.1e2, 4.11, 1.5])
    p2 = np.array([4.9e1, 3.8e-1, 2.1e2, 4.11, 0.1, 1.5])
    np.testing.assert_allclose(m1._raw_call(t, p1), m2._raw_call(t, p2))

    # Check whether incompatible variables are found
    with pytest.raises(AssertionError):
        distance_offset("d") + force_offset("f")

    composite = distance_offset("d") + odijk("DNA")
    assert composite.dependent == "d"
    assert composite.independent == "f"
    assert composite._dependent_unit == "micron"
    assert composite._independent_unit == "pN"

    inverted = composite.invert()
    assert inverted.dependent == "f"
    assert inverted.independent == "d"
    assert inverted._dependent_unit == "pN"
    assert inverted._independent_unit == "micron"
Пример #19
0
def test_model_fit_object_linking():
    def fetch_params(parameters, indices):
        p_list = list(parameters.keys())
        return [p_list[x] if x is not None else None for x in indices]

    def g(data, mu, sig, a, b, c, d, e, f, q):
        del sig, a, b, c, d, e, f, q
        return (data - mu) * 2

    def h(data, mu, e, q, c, r):
        del e, q, c, r
        return (data - mu) * 2

    all_params = ["M/mu", "M/sig", "M/a", "M/b", "M/d", "M/e", "M/f", "M/q"]
    m = Model("M", g, d=Parameter(4))
    m2 = Model("M", h)

    # Model should not be built
    fit = Fit(m, m2)
    fit[m]._add_data("test", [1, 2, 3], [2, 3, 4], {"M/c": 4})
    assert fit.dirty

    # Asking for the parameters should have triggered a build
    fit.params
    assert not fit.dirty
    assert set(fit.params.keys()) == set(all_params)

    # Check the parameters included in the model
    np.testing.assert_allclose(fit.datasets[id(m)]._conditions[0].p_external,
                               [0, 1, 2, 3, 5, 6, 7, 8])
    assert np.all(fit.datasets[id(m)]._conditions[0].p_local ==
                  [None, None, None, None, 4, None, None, None, None])
    params = ["M/mu", "M/sig", "M/a", "M/b", None, "M/d", "M/e", "M/f", "M/q"]
    assert fetch_params(
        fit.params,
        fit.datasets[id(m)]._conditions[0]._p_global_indices) == params

    # Loading data should make it dirty again
    fit[m]._add_data("test2", [1, 2, 3], [2, 3, 4], {
        "M/c": 4,
        "M/e": "M/e_new"
    })
    assert fit.dirty

    # Check the parameters included in the model
    fit._rebuild()
    np.testing.assert_allclose(fit.datasets[id(m)]._conditions[0].p_external,
                               [0, 1, 2, 3, 5, 6, 7, 8])
    assert np.all(fit.datasets[id(m)]._conditions[0].p_local ==
                  [None, None, None, None, 4, None, None, None, None])
    params = ["M/mu", "M/sig", "M/a", "M/b", None, "M/d", "M/e", "M/f", "M/q"]
    assert fetch_params(
        fit.params,
        fit.datasets[id(m)]._conditions[0]._p_global_indices) == params

    np.testing.assert_allclose(fit.datasets[id(m)]._conditions[1].p_external,
                               [0, 1, 2, 3, 5, 6, 7, 8])
    assert np.all(fit.datasets[id(m)]._conditions[1].p_local ==
                  [None, None, None, None, 4, None, None, None, None])
    params = [
        "M/mu", "M/sig", "M/a", "M/b", None, "M/d", "M/e_new", "M/f", "M/q"
    ]
    assert fetch_params(
        fit.params,
        fit.datasets[id(m)]._conditions[1]._p_global_indices) == params

    # Load data into model 2
    fit[m2]._add_data("test", [1, 2, 3], [2, 3, 4], {"M/c": 4, "M/r": 6})
    assert fit.dirty

    # Since M/r is set fixed in that model, it should not appear as a parameter
    all_params = [
        "M/mu", "M/sig", "M/a", "M/b", "M/d", "M/e", "M/e_new", "M/f", "M/q"
    ]
    assert set(fit.params.keys()) == set(all_params)

    all_params = [
        "M/mu", "M/sig", "M/a", "M/b", "M/d", "M/e", "M/e_new", "M/f", "M/q",
        "M/r"
    ]
    fit[m2]._add_data("test2", [1, 2, 3], [2, 3, 4], {"M/c": 4, "M/e": 5})
    assert set(fit.params.keys()) == set(all_params)
    np.testing.assert_allclose(fit.datasets[id(m)]._conditions[0].p_external,
                               [0, 1, 2, 3, 5, 6, 7, 8])
    assert np.all(fit.datasets[id(m)]._conditions[0].p_local ==
                  [None, None, None, None, 4, None, None, None, None])
    params = ["M/mu", "M/sig", "M/a", "M/b", None, "M/d", "M/e", "M/f", "M/q"]
    assert fetch_params(
        fit.params,
        fit.datasets[id(m)]._conditions[0]._p_global_indices) == params

    np.testing.assert_allclose(fit.datasets[id(m)]._conditions[1].p_external,
                               [0, 1, 2, 3, 5, 6, 7, 8])
    assert np.all(fit.datasets[id(m)]._conditions[1].p_local ==
                  [None, None, None, None, 4, None, None, None, None])
    params = [
        "M/mu", "M/sig", "M/a", "M/b", None, "M/d", "M/e_new", "M/f", "M/q"
    ]
    assert fetch_params(
        fit.params,
        fit.datasets[id(m)]._conditions[1]._p_global_indices) == params

    np.testing.assert_allclose(fit.datasets[id(m2)]._conditions[0].p_external,
                               [0, 1, 2])
    assert np.all(fit.datasets[id(m2)]._conditions[0].p_local ==
                  [None, None, None, 4, 6])
    params = ["M/mu", "M/e", "M/q", None, None]
    assert fetch_params(
        fit.params,
        fit.datasets[id(m2)]._conditions[0]._p_global_indices) == params

    params = ["M/mu", None, "M/q", None, "M/r"]
    assert fetch_params(
        fit.params,
        fit.datasets[id(m2)]._conditions[1]._p_global_indices) == params

    fit.update_params(Params(**{"M/mu": 4, "M/sig": 6}))
    assert fit["M/mu"].value == 4
    assert fit["M/sig"].value == 6

    f2 = Fit(m)
    f2._add_data("test", [1, 2, 3], [2, 3, 4])
    f2["M/mu"].value = 12

    fit.update_params(f2)
    assert fit["M/mu"].value == 12

    with pytest.raises(RuntimeError):
        fit.update_params(5)  # noqa
Пример #20
0
    assert inverted.independent == "d"
    assert inverted._dependent_unit == "pN"
    assert inverted._independent_unit == "micron"


@pytest.mark.parametrize(
    "model,param,unit",
    [
        (odijk("m").subtract_independent_offset(), "m/f_offset", "pN"),
        (inverted_odijk("m").subtract_independent_offset(), "m/d_offset",
         "micron"),
        ((odijk("m") + odijk("m")).subtract_independent_offset(),
         "m_with_m/f_offset", "pN"),
        (odijk("m").invert().subtract_independent_offset(), "inv(m)/d_offset",
         "micron"),
        (Model("m", lambda c, a: c + a).subtract_independent_offset(),
         "m/c_offset", "au"),
        (
            Model("m",
                  lambda c, a: c + a).invert().subtract_independent_offset(),
            "inv(m)/y_offset",
            "au",
        ),
    ],
)
def test_subtract_independent_offset_unit(model, param, unit):
    """ "Validate that the model units propagate to the subtracted independent offset parameter"""
    assert model.defaults[param].unit == unit


def test_interpolation_inversion():