コード例 #1
0
    def test_against_elementwise_transformation(self):
        # Test general case gives the same result as the elementwise case
        t1 = pints.IdentityTransformation(1)  # This is element-wise
        t_elem = pints.ComposedTransformation(t1, self.t2, self.t3)
        self.assertTrue(t_elem.elementwise())  # This is element-wise

        # Test log-Jacobian determinant
        self.assertAlmostEqual(self.t.log_jacobian_det(self.x),
                               t_elem.log_jacobian_det(self.x))

        # Test log-Jacobian determinant derivatives
        _, t_deriv = self.t.log_jacobian_det_S1(self.x)
        _, t_elem_deriv = t_elem.log_jacobian_det_S1(self.x)
        self.assertTrue(np.allclose(t_deriv, t_elem_deriv))
コード例 #2
0
ファイル: test_transformation.py プロジェクト: dungho95/pints
    def setUpClass(cls):
        # Create Transformation class
        cls.t1 = TestNonElementWiseIdentityTransformation(1)
        lower2 = np.array([1, 2])
        upper2 = np.array([10, 20])
        cls.t2 = pints.RectangularBoundariesTransformation(lower2, upper2)
        cls.t3 = pints.LogTransformation(1)

        cls.t = pints.ComposedTransformation(cls.t1, cls.t2, cls.t3)

        cls.p = [0.1, 1.5, 15., 999.]
        cls.x = [0.1, -2.8332133440562162, 0.9555114450274365,
                 6.9067547786485539]
        cls.j = np.diag([1., 0.4722222222222225, 3.6111111111111098, 999.])
        cls.j_s1_diag = [0., 0.4197530864197533, -1.6049382716049378, 999.]
        cls.j_s1 = np.zeros((4, 4, 4))
        for i in range(4):
            cls.j_s1[i, i, i] = cls.j_s1_diag[i]
        cls.log_j_det = 7.4404646962481324
        cls.log_j_det_s1 = [0., 0.8888888888888888, -0.4444444444444445, 1.]
コード例 #3
0
def inference(model, values, times):

    # Create an object with links to the model and time series
    problem = pints.SingleOutputProblem(model, times, values)

    # Create a log-likelihood function (adds an extra parameter!)
    log_likelihood = pints.GaussianLogLikelihood(problem)

    # Create a uniform prior over both the parameters and the new noise variable
    lower_bounds = np.array([1e-3, 0.0, 0.4, 0.1, 1e-6, 8.0, 1e-4])
    upper_bounds = np.array([10.0, 0.4, 0.6, 100.0, 100e-6, 10.0, 0.2])
    log_prior = pints.UniformLogPrior(lower_bounds, upper_bounds)

    # Create a posterior log-likelihood (log(likelihood * prior))
    log_posterior = pints.LogPosterior(log_likelihood, log_prior)

    # Choose starting points for 3 mcmc chains
    # params =                   ['k0', 'E0', 'a', 'Ru', 'Cdl', 'freq', 'sigma']
    start_parameters = np.array(
        [0.0101, 0.214, 0.53, 8.0, 20.0e-6, 9.0152, 0.01])

    transform = pints.ComposedTransformation(
        pints.LogTransformation(1),
        pints.RectangularBoundariesTransformation(lower_bounds[1:],
                                                  upper_bounds[1:]),
    )
    sigma0 = [0.1 * (h - l) for l, h in zip(lower_bounds, upper_bounds)]
    boundaries = pints.RectangularBoundaries(lower_bounds, upper_bounds)
    found_parameters, found_value = pints.optimise(log_posterior,
                                                   start_parameters,
                                                   sigma0,
                                                   boundaries,
                                                   transform=transform,
                                                   method=pints.CMAES)
    xs = [
        found_parameters * 1.001,
        found_parameters * 1.002,
        found_parameters * 1.003,
    ]
    for x in xs:
        x[5] = found_parameters[5]

    print('start_parameters', start_parameters)
    print('found_parameters', found_parameters)
    print('lower_bounds', lower_bounds)
    print('upper_bounds', upper_bounds)

    # Create mcmc routine with four chains
    mcmc = pints.MCMCController(log_posterior,
                                3,
                                xs,
                                method=pints.HaarioBardenetACMC,
                                transform=transform)

    # Add stopping criterion
    mcmc.set_max_iterations(10000)

    # Run!
    chains = mcmc.run()

    # Save chains for plotting and analysis
    pickle.dump((xs, pints.GaussianLogLikelihood, log_prior, chains,
                 'HaarioBardenetACMC'), open('results.pickle', 'wb'))