コード例 #1
0
    def test_transformed_log_pdf(self):
        # Test TransformedLogPDF class

        t = pints.LogTransformation(2)
        r = pints.toy.TwistedGaussianLogPDF(2, 0.01)
        x = [0.05, 1.01]
        tx = [-2.9957322735539909, 0.0099503308531681]
        j = np.diag(x)
        log_j_det = -2.9857819427008230
        tr = t.convert_log_pdf(r)

        # Test before and after transformed give the same result
        self.assertAlmostEqual(tr(tx), r(x) + log_j_det)
        self.assertEqual(tr.n_parameters(), r.n_parameters())

        # Test evaluateS1()
        rx, s1 = r.evaluateS1(x)
        trx = rx + log_j_det
        ts1 = np.matmul(s1, j) + np.ones(2)
        trtx, trts1 = tr.evaluateS1(tx)
        self.assertTrue(np.allclose(trtx, trx))
        self.assertTrue(np.allclose(trts1, ts1))

        # Test invalid transform
        self.assertRaises(ValueError, pints.TransformedLogPDF, r,
                          pints.LogTransformation(3))
コード例 #2
0
ファイル: test_transformation.py プロジェクト: dungho95/pints
    def setUpClass(cls):
        # Create Transformation class
        cls.t1 = pints.LogTransformation(1)
        cls.t4 = pints.LogTransformation(4)

        cls.p = [0.1, 1., 10., 999.]
        cls.x = [-2.3025850929940455, 0., 2.3025850929940459,
                 6.9067547786485539]
        cls.j = np.diag(cls.p)
        cls.j_s1 = np.zeros((4, 4, 4))
        for i in range(4):
            cls.j_s1[i, i, i] = cls.p[i]
        cls.log_j_det = np.sum(cls.x)
        cls.log_j_det_s1 = np.ones(4)
コード例 #3
0
    def update_model(self, fixed_parameters_list):
        """
        Update the model with fixed parameters.

        Parameters
        ----------
        fixed_parameters_list
            List of fixed parameter values.
        """

        # Create dictionary of fixed parameters and its values
        name_value_dict = {
            name: value
            for (name, value
                 ) in zip(self.model._parameter_names, fixed_parameters_list)
        }
        self.model.fix_parameters(name_value_dict)

        # Setup the problem with pints,
        # including likelihood, prior and posterior
        print(self.model.n_parameters())
        problem = pints.SingleOutputProblem(
            model=self.model,
            times=self.data['Time'].to_numpy(),
            values=self.data['Incidence Number'].to_numpy())
        log_likelihood = pints.GaussianLogLikelihood(problem)
        priors = self.set_prior(name_value_dict)
        self.log_prior = pints.ComposedLogPrior(*priors)
        self.log_posterior = pints.LogPosterior(log_likelihood, self.log_prior)

        # Run transformation
        self.transformations = pints.LogTransformation(
            self.log_posterior.n_parameters())
コード例 #4
0
    def test_transformed_boundaries(self):
        # Test TransformedBoundaries class

        t = pints.LogTransformation(2)
        b = pints.RectangularBoundaries([0.01, 0.95], [0.05, 1.05])
        tb = t.convert_boundaries(b)
        xi = [0.02, 1.01]
        txi = [-3.9120230054281460, 0.0099503308531681]
        xo = [10., 50.]
        txo = [2.3025850929940459, 3.9120230054281460]
        tr = [1.6094379124341001, 0.1000834585569826]

        # Test before and after transformed give the same result
        self.assertEqual(tb.check(txi), b.check(xi))
        self.assertEqual(tb.check(txo), b.check(xo))
        self.assertEqual(tb.n_parameters(), b.n_parameters())

        # Test transformed range
        self.assertTrue(np.allclose(tb.range(), tr))

        # Test invalid transform
        self.assertRaises(ValueError, pints.TransformedBoundaries, b,
                          pints.LogTransformation(3))
コード例 #5
0
    def test_transformed_error_measure(self):
        # Test TransformedErrorMeasure class

        t = pints.LogTransformation(2)
        r = pints.toy.ParabolicError()
        x = [0.1, 0.1]
        tx = [-2.3025850929940455, -2.3025850929940455]
        j = np.diag(x)
        tr = t.convert_error_measure(r)

        # Test before and after transformed give the same result
        self.assertAlmostEqual(tr(tx), r(x))
        self.assertEqual(tr.n_parameters(), r.n_parameters())

        # Test evaluateS1()
        rx, s1 = r.evaluateS1(x)
        ts1 = np.matmul(s1, j)
        trtx, trts1 = tr.evaluateS1(tx)
        self.assertTrue(np.allclose(trtx, rx))
        self.assertTrue(np.allclose(trts1, ts1))

        # Test invalid transform
        self.assertRaises(ValueError, pints.TransformedErrorMeasure, r,
                          pints.LogTransformation(3))
コード例 #6
0
    def test_transformed_log_prior(self):
        # Test TransformedLogPrior class

        d = 2
        t = pints.LogTransformation(2)
        r = pints.UniformLogPrior([0.1, 0.1], [0.9, 0.9])
        tr = t.convert_log_prior(r)

        # Test sample
        n = 1
        x = tr.sample(n)
        self.assertEqual(x.shape, (n, d))
        self.assertTrue(np.all(x < 0.))
        n = 1000
        x = tr.sample(n)
        self.assertEqual(x.shape, (n, d))
        self.assertTrue(np.all(x < 0.))
コード例 #7
0
ファイル: test_transformation.py プロジェクト: dungho95/pints
    def setUpClass(cls):
        # Create Transformation class
        cls.t1 = TestNonElementWiseIdentityTransformation(1)
        lower2 = np.array([1, 2])
        upper2 = np.array([10, 20])
        cls.t2 = pints.RectangularBoundariesTransformation(lower2, upper2)
        cls.t3 = pints.LogTransformation(1)

        cls.t = pints.ComposedTransformation(cls.t1, cls.t2, cls.t3)

        cls.p = [0.1, 1.5, 15., 999.]
        cls.x = [0.1, -2.8332133440562162, 0.9555114450274365,
                 6.9067547786485539]
        cls.j = np.diag([1., 0.4722222222222225, 3.6111111111111098, 999.])
        cls.j_s1_diag = [0., 0.4197530864197533, -1.6049382716049378, 999.]
        cls.j_s1 = np.zeros((4, 4, 4))
        for i in range(4):
            cls.j_s1[i, i, i] = cls.j_s1_diag[i]
        cls.log_j_det = 7.4404646962481324
        cls.log_j_det_s1 = [0., 0.8888888888888888, -0.4444444444444445, 1.]
コード例 #8
0
def inference(model, values, times):

    # Create an object with links to the model and time series
    problem = pints.SingleOutputProblem(model, times, values)

    # Create a log-likelihood function (adds an extra parameter!)
    log_likelihood = pints.GaussianLogLikelihood(problem)

    # Create a uniform prior over both the parameters and the new noise variable
    lower_bounds = np.array([1e-3, 0.0, 0.4, 0.1, 1e-6, 8.0, 1e-4])
    upper_bounds = np.array([10.0, 0.4, 0.6, 100.0, 100e-6, 10.0, 0.2])
    log_prior = pints.UniformLogPrior(lower_bounds, upper_bounds)

    # Create a posterior log-likelihood (log(likelihood * prior))
    log_posterior = pints.LogPosterior(log_likelihood, log_prior)

    # Choose starting points for 3 mcmc chains
    # params =                   ['k0', 'E0', 'a', 'Ru', 'Cdl', 'freq', 'sigma']
    start_parameters = np.array(
        [0.0101, 0.214, 0.53, 8.0, 20.0e-6, 9.0152, 0.01])

    transform = pints.ComposedTransformation(
        pints.LogTransformation(1),
        pints.RectangularBoundariesTransformation(lower_bounds[1:],
                                                  upper_bounds[1:]),
    )
    sigma0 = [0.1 * (h - l) for l, h in zip(lower_bounds, upper_bounds)]
    boundaries = pints.RectangularBoundaries(lower_bounds, upper_bounds)
    found_parameters, found_value = pints.optimise(log_posterior,
                                                   start_parameters,
                                                   sigma0,
                                                   boundaries,
                                                   transform=transform,
                                                   method=pints.CMAES)
    xs = [
        found_parameters * 1.001,
        found_parameters * 1.002,
        found_parameters * 1.003,
    ]
    for x in xs:
        x[5] = found_parameters[5]

    print('start_parameters', start_parameters)
    print('found_parameters', found_parameters)
    print('lower_bounds', lower_bounds)
    print('upper_bounds', upper_bounds)

    # Create mcmc routine with four chains
    mcmc = pints.MCMCController(log_posterior,
                                3,
                                xs,
                                method=pints.HaarioBardenetACMC,
                                transform=transform)

    # Add stopping criterion
    mcmc.set_max_iterations(10000)

    # Run!
    chains = mcmc.run()

    # Save chains for plotting and analysis
    pickle.dump((xs, pints.GaussianLogLikelihood, log_prior, chains,
                 'HaarioBardenetACMC'), open('results.pickle', 'wb'))
コード例 #9
0
ファイル: likelihoods.py プロジェクト: pkpdapp-team/pkpdapp
 def create_pints_transform(self):
     if False:
         return pints.LogTransformation(n_parameters=1)
     else:
         return pints.IdentityTransformation(n_parameters=1)
コード例 #10
0
def inference2(model_raw, model_old, model, values, times):

    # Create an object with links to the model and time series
    problem = pints.SingleOutputProblem(model_old, times, values)

    # Create a log-likelihood function (adds an extra parameter!)
    log_likelihood = pints.GaussianLogLikelihood(problem)

    # Create a uniform prior over both the parameters and the new noise variable

    e0_buffer = 0.1 * (model_raw.params['Ereverse'] - model_raw.params['Estart'])
    lower_bounds = np.array([
        0.0,
        model_raw.params['Estart'] + e0_buffer,
        0.0,
        0.0,
        0.4,
        0.9* model_raw.params['omega'],
        1e-4,
    ])
    upper_bounds = np.array([
        100 * model_raw.params['k0'],
        model_raw.params['Ereverse'] - e0_buffer,
        10 * model_raw.params['Cdl'],
        10 * model_raw.params['Ru'],
        0.6,
        1.1* model_raw.params['omega'],
        0.2,
    ])
    log_prior = pints.UniformLogPrior(lower_bounds, upper_bounds)

    # Create a posterior log-likelihood (log(likelihood * prior))
    log_posterior = pints.LogPosterior(log_likelihood, log_prior)


    # Choose starting points for 3 mcmc chains
    param_names = ['k0', 'E0', 'Cdl', 'Ru', 'alpha', 'omega', 'sigma']
    start_parameters = np.array([
        model_raw.params['k0'],
        model_raw.params['E0'],
        model_raw.params['Cdl'],
        model_raw.params['Ru'],
        model_raw.params['alpha'],
        model_raw.params['omega'],
        0.01
    ])

    sigma0 = [0.5 * (h - l) for l, h in zip(lower_bounds, upper_bounds)]
    boundaries = pints.RectangularBoundaries(lower_bounds, upper_bounds)
    #found_parameters, found_value = pints.optimise(
    #            log_posterior,
    #            start_parameters,
    #            sigma0,
    #            boundaries,
    #            method=pints.CMAES
    #        )
    found_parameters = start_parameters
    print('start_parameters', start_parameters)
    print('found_parameters', found_parameters)
    xs = [
        found_parameters * 1.001,
        found_parameters * 0.999,
        found_parameters * 0.998,
    ]
    for x in xs:
        x[5] = found_parameters [5]

    # adjust Ru to something reasonable
    xs[0][3] = 1.001*5e-5
    xs[1][3] = 1.00*5e-5
    xs[2][3] = 0.999*5e-5

    transform = pints.ComposedElementWiseTransformation(
        pints.LogTransformation(1),
        pints.RectangularBoundariesTransformation(
            lower_bounds[1:], upper_bounds[1:]
        ),
    )

    # Create mcmc routine with four chains
    mcmc = pints.MCMCController(log_posterior, 3, xs, method=pints.HaarioBardenetACMC,
                                transform=transform)

    # Add stopping criterion
    mcmc.set_max_iterations(10000)

    # Run!
    chains = mcmc.run()

    # Save chains for plotting and analysis
    pickle.dump((xs, pints.GaussianLogLikelihood, log_prior,
                 chains, 'HaarioACMC'), open('results2.pickle', 'wb'))