Ejemplo n.º 1
0
def figure2():
    """Make a figure for MCMC inference
    """
    num_mcmc_iters = 10000

    def stimulus(t):
        return (1 * (t < 50)) + (-100 * (t >= 50) & (t < 75)) + (1 * (t >= 75))

    # Generate data
    y0 = np.array([0.0, 0.0])
    m = diffeqinf.DampedOscillator(stimulus, y0, 'RK45')
    m.set_tolerance(1e-8)
    true_params = [1.0, 0.2, 1.0]
    times = np.linspace(0, 100, 500)
    y = m.simulate(true_params, times)
    y += np.random.normal(0, 0.01, len(times))

    # Run inference with correct model
    problem = pints.SingleOutputProblem(m, times, y)
    likelihood = pints.GaussianLogLikelihood(problem)
    prior = pints.UniformLogPrior([0] * 4, [1e6] * 4)
    posterior = pints.LogPosterior(likelihood, prior)

    x0 = [true_params + [0.01]] * 3

    mcmc = pints.MCMCController(posterior, 3, x0)
    mcmc.set_max_iterations(num_mcmc_iters)
    chains_correct = mcmc.run()

    # Run inference with incorrect model
    m.set_tolerance(1e-2)
    problem = pints.SingleOutputProblem(m, times, y)
    likelihood = pints.GaussianLogLikelihood(problem)
    prior = pints.UniformLogPrior([0] * 4, [1e6] * 4)
    posterior = pints.LogPosterior(likelihood, prior)

    mcmc = pints.MCMCController(posterior, 3, x0)
    mcmc.set_max_iterations(num_mcmc_iters)
    chains_incorrect = mcmc.run()

    # Plot MCMC chains
    pints.plot.trace(chains_incorrect)
    plt.show()

    # Plot posteriors
    diffeqinf.plot.plot_grouped_parameter_posteriors(
        [chains_correct[0, num_mcmc_iters // 2:, :]],
        [chains_incorrect[0, num_mcmc_iters // 2:, :]],
        [chains_incorrect[1, num_mcmc_iters // 2:, :]],
        [chains_incorrect[2, num_mcmc_iters // 2:, :]],
        true_model_parameters=true_params,
        method_names=[
            'Correct', 'PoorTol_Chain1', 'PoorTol_Chain2', 'PoorTol_Chain3'
        ],
        parameter_names=['k', 'c', 'm'],
        fname=None)
    plt.show()
Ejemplo n.º 2
0
    def test_log_posterior(self):

        # Create a toy problem and log likelihood
        model = pints.toy.LogisticModel()
        real_parameters = [0.015, 500]
        x = [0.014, 501]
        sigma = 0.001
        times = np.linspace(0, 1000, 100)
        values = model.simulate(real_parameters, times)
        problem = pints.SingleOutputProblem(model, times, values)
        log_likelihood = pints.GaussianKnownSigmaLogLikelihood(problem, sigma)

        # Create a prior
        log_prior = pints.UniformLogPrior([0, 0], [1, 1000])

        # Test
        p = pints.LogPosterior(log_likelihood, log_prior)
        self.assertEqual(p(x), log_likelihood(x) + log_prior(x))
        y = [-1, 500]
        self.assertEqual(log_prior(y), -float('inf'))
        self.assertEqual(p(y), -float('inf'))
        self.assertEqual(p(y), log_prior(y))

        # Test derivatives
        log_prior = pints.ComposedLogPrior(pints.GaussianLogPrior(0.015, 0.3),
                                           pints.GaussianLogPrior(500, 100))
        log_posterior = pints.LogPosterior(log_likelihood, log_prior)
        x = [0.013, 540]
        y, dy = log_posterior.evaluateS1(x)
        self.assertEqual(y, log_posterior(x))
        self.assertEqual(dy.shape, (2, ))
        y1, dy1 = log_prior.evaluateS1(x)
        y2, dy2 = log_likelihood.evaluateS1(x)
        self.assertTrue(np.all(dy == dy1 + dy2))

        # Test getting the prior and likelihood back again
        self.assertIs(log_posterior.log_prior(), log_prior)
        self.assertIs(log_posterior.log_likelihood(), log_likelihood)

        # First arg must be a LogPDF
        self.assertRaises(ValueError, pints.LogPosterior, 'hello', log_prior)

        # Second arg must be a log_prior
        self.assertRaises(ValueError, pints.LogPosterior, log_likelihood,
                          log_likelihood)

        # Prior and likelihood must have same dimension
        self.assertRaises(ValueError, pints.LogPosterior, log_likelihood,
                          pints.GaussianLogPrior(0.015, 0.3))
Ejemplo n.º 3
0
    def __init__(self, name):
        super(TestAdaptiveCovarianceMCMC, self).__init__(name)

        # Create toy model
        self.model = toy.LogisticModel()
        self.real_parameters = [0.015, 500]
        self.times = np.linspace(0, 1000, 1000)
        self.values = self.model.simulate(self.real_parameters, self.times)

        # Add noise
        noise = 10
        self.values += np.random.normal(0, noise, self.values.shape)
        self.real_parameters.append(noise)

        # Create an object with links to the model and time series
        self.problem = pints.SingleSeriesProblem(
            self.model, self.times, self.values)

        # Create a uniform prior over both the parameters and the new noise
        # variable
        self.prior = pints.UniformPrior(
            [0.01, 400, noise * 0.1],
            [0.02, 600, noise * 100]
        )

        # Create an un-normalised log-posterior (prior * likelihood)
        self.log_likelihood = pints.LogPosterior(
            self.prior, pints.UnknownNoiseLogLikelihood(self.problem))

        # Select initial point and covariance
        self.x0 = np.array(self.real_parameters) * 1.1
        self.sigma0 = [0.005, 100, 0.5 * noise]
    def _create_posterior(self, times, wd, wp):
        """
        Runs the initial conditions optimisation routine for the PHE model.

        Parameters
        ----------
        times
            (list) List of time points at which we have data for the
            log-likelihood computation.
        wd
            Proportion of contribution of the deaths_data to the
            log-likelihood.
        wp
            Proportion of contribution of the poritives_data to the
            log-likelihood.

        """
        # Create a likelihood
        loglikelihood = PHELogLik(self._model, self._susceptibles_data,
                                  self._infectives_data, times, self._deaths,
                                  self._time_to_death, self._deaths_times,
                                  self._fatality_ratio, self._total_tests,
                                  self._positive_tests, self._serology_times,
                                  self._sens, self._spec, wd, wp)

        # Create a prior
        log_prior = PHELogPrior(self._model, times)

        # Create a posterior log-likelihood (log(likelihood * prior))
        self._log_posterior = pints.LogPosterior(loglikelihood, log_prior)
Ejemplo n.º 5
0
    def setUpClass(cls):
        """ Prepare a problem for testing. """

        # Random seed
        np.random.seed(1)

        # Create toy model
        cls.model = toy.LogisticModel()
        cls.real_parameters = [0.015, 500]
        cls.times = np.linspace(0, 1000, 1000)
        cls.values = cls.model.simulate(cls.real_parameters, cls.times)

        # Add noise
        cls.noise = 10
        cls.values += np.random.normal(0, cls.noise, cls.values.shape)
        cls.real_parameters.append(cls.noise)
        cls.real_parameters = np.array(cls.real_parameters)

        # Create an object with links to the model and time series
        cls.problem = pints.SingleOutputProblem(cls.model, cls.times,
                                                cls.values)

        # Create a uniform prior over both the parameters and the new noise
        # variable
        cls.log_prior = pints.UniformLogPrior([0.01, 400, cls.noise * 0.1],
                                              [0.02, 600, cls.noise * 100])

        # Create a log likelihood
        cls.log_likelihood = pints.GaussianLogLikelihood(cls.problem)

        # Create an un-normalised log-posterior (log-likelihood + log-prior)
        cls.log_posterior = pints.LogPosterior(cls.log_likelihood,
                                               cls.log_prior)
    def update_model(self, fixed_parameters_list):
        """
        Update the model with fixed parameters.

        Parameters
        ----------
        fixed_parameters_list
            List of fixed parameter values.
        """

        # Create dictionary of fixed parameters and its values
        name_value_dict = {
            name: value
            for (name, value
                 ) in zip(self.model._parameter_names, fixed_parameters_list)
        }
        self.model.fix_parameters(name_value_dict)

        # Setup the problem with pints,
        # including likelihood, prior and posterior
        print(self.model.n_parameters())
        problem = pints.SingleOutputProblem(
            model=self.model,
            times=self.data['Time'].to_numpy(),
            values=self.data['Incidence Number'].to_numpy())
        log_likelihood = pints.GaussianLogLikelihood(problem)
        priors = self.set_prior(name_value_dict)
        self.log_prior = pints.ComposedLogPrior(*priors)
        self.log_posterior = pints.LogPosterior(log_likelihood, self.log_prior)

        # Run transformation
        self.transformations = pints.LogTransformation(
            self.log_posterior.n_parameters())
Ejemplo n.º 7
0
    def test_model_that_gives_nan(self):
        # This model will return a nan in the gradient evaluation, which
        # originally tripped up the find_reasonable_epsilon function in nuts.
        # Run it for a bit so that we get coverage on the if statement!

        model = pints.toy.LogisticModel()
        real_parameters = model.suggested_parameters()
        times = model.suggested_parameters()
        org_values = model.simulate(real_parameters, times)
        np.random.seed(1)
        noise = 0.2
        values = org_values + np.random.normal(0, noise, org_values.shape)
        problem = pints.SingleOutputProblem(model, times, values)
        log_likelihood = pints.GaussianKnownSigmaLogLikelihood(problem, noise)

        log_prior = pints.UniformLogPrior([0.01, 40], [0.2, 60])

        log_posterior = pints.LogPosterior(log_likelihood, log_prior)

        xs = [real_parameters * 1.1]
        nuts_mcmc = pints.MCMCController(log_posterior,
                                         len(xs),
                                         xs,
                                         method=pints.NoUTurnMCMC)

        nuts_mcmc.set_max_iterations(10)
        nuts_mcmc.set_log_to_screen(False)
        nuts_chains = nuts_mcmc.run()

        self.assertFalse(np.isnan(np.sum(nuts_chains)))
Ejemplo n.º 8
0
    def setUpClass(cls):
        """ Prepare problem for tests. """
        # Load a forward model
        model = pints.toy.LogisticModel()

        # Create some toy data
        real_parameters = [0.015, 500]
        times = np.linspace(0, 1000, 1000)
        org_values = model.simulate(real_parameters, times)

        # Add noise
        noise = 10
        values = org_values + np.random.normal(0, noise, org_values.shape)
        real_parameters = np.array(real_parameters + [noise])

        # Create an object with links to the model and time series
        problem = pints.SingleOutputProblem(model, times, values)

        # Create an error measure
        cls.score = pints.SumOfSquaresError(problem)
        cls.boundaries = pints.RectangularBoundaries([0, 400], [0.05, 600])

        # Create a log-likelihood function (adds an extra parameter!)
        log_likelihood = pints.GaussianLogLikelihood(problem)

        # Create a uniform prior over both the parameters and the new noise
        cls.log_prior = pints.UniformLogPrior([0.01, 400, noise * 0.1],
                                              [0.02, 600, noise * 100])

        # Create a posterior log-likelihood (log(likelihood * prior))
        cls.log_posterior = pints.LogPosterior(log_likelihood, cls.log_prior)
Ejemplo n.º 9
0
    def _make_pints_posterior(self):
        """Rebuild the Pints posterior and save it.
        """
        # Build a uniform model prior if it is not supplied
        if self.model_prior is None:
            num_model_params = self.problem.n_parameters()
            model_prior = pints.UniformLogPrior([-1e6] * num_model_params,
                                                [1e6] * num_model_params)

        # Get the GP prior
        kernel_prior = NonstatGPLogPrior(
            self.gp_times,
            self.kernel.num_parameters() // len(self.gp_times), self.mu,
            self.alpha, self.beta)

        # Combine the two priors
        log_prior = pints.ComposedLogPrior(model_prior, kernel_prior)

        # Build the likelihood
        log_likelihood = flexnoise.KernelCovarianceLogLikelihood(
            self.problem, self.kernel)

        # Build the posterior
        log_posterior = pints.LogPosterior(log_likelihood, log_prior)

        self.posterior = log_posterior
Ejemplo n.º 10
0
    def test_build_tree_nan(self):
        # This method gives nan in the hamiltonian_dash
        # in the build_tree function
        # Needed for coverage

        model = pints.toy.LogisticModel()
        real_parameters = np.array([0.015, 20])
        times = np.linspace(0, 1000, 50)
        org_values = model.simulate(real_parameters, times)
        np.random.seed(1)
        noise = 0.1
        values = org_values + np.random.normal(0, noise, org_values.shape)
        problem = pints.SingleOutputProblem(model, times, values)
        log_likelihood = pints.GaussianKnownSigmaLogLikelihood(problem, noise)

        log_prior = pints.UniformLogPrior([0.0001, 1], [1, 500])

        log_posterior = pints.LogPosterior(log_likelihood, log_prior)

        xs = [[0.36083914, 1.99013825]]
        nuts_mcmc = pints.MCMCController(log_posterior,
                                         len(xs),
                                         xs,
                                         method=pints.NoUTurnMCMC)

        nuts_mcmc.set_max_iterations(50)
        nuts_mcmc.set_log_to_screen(False)
        np.random.seed(5)
        nuts_chains = nuts_mcmc.run()

        self.assertFalse(np.isnan(np.sum(nuts_chains)))
Ejemplo n.º 11
0
def run(model, real_parameters, noise_used, log_prior_used):
    # Create some toy data
    
    times = np.linspace(1, 1000, 50)
    org_values = model.simulate(real_parameters, times)

    # Add noise
    noise = 10
    values = org_values + np.random.normal(0, noise, org_values.shape)
    real_parameters = np.array(real_parameters)


    # Create an object with links to the model and time series
    problem = pints.SingleOutputProblem(model, times, values)

    # Create a log-likelihood function (adds an extra parameter!)
    log_likelihood_used = pints.GaussianKnownSigmaLogLikelihood(problem, [noise_used])

    # Create a uniform prior over both the parameters and the new noise variable

    # Create a posterior log-likelihood (log(likelihood * prior))
    log_posterior = pints.LogPosterior(log_likelihood_used, log_prior_used)

    # Choose starting points for 3 mcmc chains
    xs = [
        real_parameters,
        real_parameters * 1.01,
        real_parameters * 0.99,
    ]

    # Create mcmc routine with four chains
    mcmc = pints.MCMCController(log_posterior, 3, xs, method=pints.HaarioACMC)
    
    sample_size = 4000
    # Add stopping criterion
    mcmc.set_max_iterations(sample_size)

    # Start adapting after 1000 iterations
    mcmc.set_initial_phase_iterations(sample_size//4)

    # Disable logging mode
    mcmc.set_log_to_screen(False)

    # Run!
    print('Running...')
    chains = mcmc.run()
    print('Done!')
    s = sample_size//4+1
    #HMC: s = 1
    b = False
    while s < sample_size:
        chains_cut = chains[:,sample_size//4:s+1]
        rhat = pints.rhat(chains_cut)
        s+=1
        if rhat[0] < 1.05:
            b = True
            break
    print(s)
    return chains[0][s:][:, 0]
Ejemplo n.º 12
0
    def sample(self, x, parallel=False):
        """
        Runs the sampler, this method:
            (1) generates simulated data and adds noise
            (2) sets up the sampler with the method given,
                using an KnownNoiseLogLikelihood, and a UniformLogPrior
            (3) runs the sampler
            (4) returns:
                - the calculated rhat value
                - the average of ess across all chains, returning the
                  minimum result across all parameters
                - the total time taken by the sampler
        """

        the_model = self.model()
        values = the_model.simulate(self.real_parameters, self.times)
        value_range = np.max(values) - np.min(values)
        values += np.random.normal(0, self.noise * value_range, values.shape)
        problem = pints.MultiOutputProblem(the_model, self.times, values)
        log_likelihood = pints.KnownNoiseLogLikelihood(
            problem, value_range * self.noise)
        # lower = list(self.lower) + [value_range *
        #                            self.noise / 10.0]*the_model.n_outputs()
        #upper = list(self.upper) + [value_range * self.noise * 10]*the_model.n_outputs()
        lower = list(self.lower)
        upper = list(self.upper)
        middle = [0.5 * (u + l) for l, u in zip(lower, upper)]
        sigma = [u - l for l, u in zip(lower, upper)]
        log_prior = pints.UniformLogPrior(lower, upper)
        log_posterior = pints.LogPosterior(log_likelihood, log_prior)
        n_chains = int(x[-1])
        xs = [[
            np.random.uniform() * (u - l) + l for l, u in zip(lower, upper)
        ] for c in range(n_chains)]
        mcmc = pints.MCMCSampling(log_posterior,
                                  n_chains,
                                  xs,
                                  method=self.method)
        [sampler.set_hyper_parameters(x[:-1]) for sampler in mcmc.samplers()]
        if parallel:
            mcmc.set_parallel(int(os.environ['OMP_NUM_THREADS']))

        mcmc.set_log_interval(1000)

        start = timer()
        chains = mcmc.run()
        end = timer()

        rhat = np.max(pints._diagnostics.rhat_all_params(chains))
        ess = np.zeros(chains[0].shape[1])
        for chain in chains:
            ess += np.array(pints._diagnostics.effective_sample_size(chain))
        ess /= n_chains
        ess = np.min(ess)
        print('rhat:', rhat)
        print('ess:', ess)
        print('time:', end - start)
        return rhat, ess, end - start
    def mcmc_runner(temps, n_chains=nchains):
        #
        # Define the unnormalised tempered target density
        #
        x0 = np.loadtxt(cmaes_result_files + model_name + '-cell-' +
                        str(cell) + '-cmaes.txt')

        # Define Likelihood
        if args.discrepancy:
            arma_start = armax_result.params
            x0 = np.concatenate((x0, arma_start))
            print('Initial values of chain from iid noise model')
            timer.sleep(1.5)
            tempered_log_likelihood = dsLogLikelihood.DiscrepancyLogLikelihood(
                problem, armax_result, temperature=temps)
            print(
                'Experimental Warning: Running in discrepancy and thermodynamic mode'
            )
            timer.sleep(2.5)
        else:
            print('Running in iid noise mode')
            timer.sleep(2.5)
            tempered_log_likelihood = tiLogLikelihood.ThermoLogLikelihood(
                problem, sigma_noise, temps)

        tempered_log_posterior = pints.LogPosterior(tempered_log_likelihood,
                                                    log_prior)

        # Define starting point for mcmc routine
        print('Model parameters start point: ', x0)
        xs = []

        for _ in xrange(nchains):
            xs.append(x0)

        print('MCMC starting point: ')
        for x0 in xs:
            print(x0)

        print('MCMC starting Log-Posterior: ')
        for x0 in xs:
            print(tempered_log_likelihood(x0))
        print(tempered_log_likelihood(x0))
        # Create sampler
        mcmc = mcmcsampling.MCMCSampling(tempered_log_posterior,
                                         n_chains,
                                         xs,
                                         method=pints.AdaptiveCovarianceMCMC)

        mcmc.set_log_to_screen(False)

        mcmc.set_max_iterations(iterations)
        mcmc.set_parallel(False)

        trace, LLs = mcmc.run(returnLL=True)

        return trace, LLs
Ejemplo n.º 14
0
    def setUpClass(cls):
        """ Set up problem for tests. """

        # Create toy model
        cls.model = toy.LogisticModel()
        cls.real_parameters = [0.015, 500]
        cls.times = np.linspace(0, 1000, 1000)
        cls.values = cls.model.simulate(cls.real_parameters, cls.times)

        # Add noise
        cls.noise = 10
        cls.values += np.random.normal(0, cls.noise, cls.values.shape)
        cls.real_parameters.append(cls.noise)
        cls.real_parameters = np.array(cls.real_parameters)

        # Create an object with links to the model and time series
        cls.problem = pints.SingleOutputProblem(cls.model, cls.times,
                                                cls.values)

        # Create a uniform prior over both the parameters and the new noise
        # variable
        cls.log_prior = pints.UniformLogPrior([0.01, 400, cls.noise * 0.1],
                                              [0.02, 600, cls.noise * 100])

        # Create a log likelihood
        cls.log_likelihood = pints.GaussianLogLikelihood(cls.problem)

        # Create an un-normalised log-posterior (log-likelihood + log-prior)
        cls.log_posterior = pints.LogPosterior(cls.log_likelihood,
                                               cls.log_prior)

        # Run MCMC sampler
        xs = [
            cls.real_parameters * 1.1,
            cls.real_parameters * 0.9,
            cls.real_parameters * 1.15,
        ]

        mcmc = pints.MCMCController(cls.log_posterior,
                                    3,
                                    xs,
                                    method=pints.HaarioBardenetACMC)
        mcmc.set_max_iterations(200)
        mcmc.set_initial_phase_iterations(50)
        mcmc.set_log_to_screen(False)

        start = time.time()
        cls.chains = mcmc.run()
        end = time.time()
        cls.time = end - start
Ejemplo n.º 15
0
def run_figureS2(num_runs=3, output_dir='./'):
    """Run the Gaussian process on block noise data.

    This function runs the simulations and saves the results to pickle.
    """
    random.seed(12345)
    np.random.seed(12345)

    all_fits = []
    iid_runs = []
    sigmas = []
    mult_runs = []
    gp_runs = []
    for run in range(num_runs):
        # Make a synthetic time series
        times, values, data = generate_time_series(model='logistic',
                                                   noise='blocks',
                                                   n_times=625)

        # Make Pints model and problem
        model = pints.toy.LogisticModel()
        problem = pints.SingleOutputProblem(model, times, data)

        # Initial conditions for model parameters
        model_starting_point = [0.08, 50]

        # Infer the nonstationary kernel fit
        # Run an optimization assumming IID
        log_prior = pints.UniformLogPrior([0] * 3, [1e6] * 3)
        log_likelihood = pints.GaussianLogLikelihood(problem)
        log_posterior = pints.LogPosterior(log_likelihood, log_prior)
        opt = pints.OptimisationController(log_posterior,
                                           model_starting_point + [2])
        xbest, fbest = opt.run()

        # Run the GP fit, using the best fit for initialization
        gp_times = times[::25]
        kernel = flexnoise.kernels.GPLaplacianKernel
        gnp = flexnoise.GPNoiseProcess(problem, kernel, xbest[:2], gp_times)
        gnp.set_gp_hyperparameters(mu=0.0, alpha=1.0, beta_num_points=200)
        x = gnp.run_optimize(num_restarts=100, parallel=True, maxiter=150)
        all_fits.append(x)

    # Save all results to pickle
    kernel = kernel(None, gp_times)
    results = [all_fits, times, data, values, model, problem, kernel]

    fname = os.path.join(output_dir, 'figS2_data.pkl')
    with open(fname, 'wb') as f:
        pickle.dump(results, f)
def mcmc_runner(temps):

    nchains = 1
    #print('temperature', temps)
    tempered_log_likelihood = tiLogLikelihood(problem, sigma_noise, temps)
    tempered_log_posterior = pints.LogPosterior(tempered_log_likelihood,
                                                log_prior)
    xs = log_prior.sample(1)
    mcmc = mcmcsampling.MCMCSampling(tempered_log_posterior,
                                     nchains,
                                     xs,
                                     method=pints.MetropolisRandomWalkMCMC)
    #mcmc.set_log_to_file('log.txt')
    mcmc.set_log_to_screen(False)
    mcmc.set_max_iterations(niter)
    mcmc.set_parallel(False)
    chains, LL = mcmc.run(returnLL=True)
    return chains, LL
Ejemplo n.º 17
0
def plot_likelihood(model, values, times):

    # Create an object with links to the model and time series
    problem = pints.SingleOutputProblem(model, times, values)

    # Create a log-likelihood function (adds an extra parameter!)
    log_likelihood = pints.GaussianLogLikelihood(problem)

    # Create a uniform prior over both the parameters and the new noise variable
    lower_bounds = model.non_dim([1e-3, 0.0, 0.4, 0.1, 1e-6, 8.0, 1e-4])
    upper_bounds = model.non_dim([10.0, 0.4, 0.6, 100.0, 100e-6, 10.0, 0.2])
    log_prior = pints.UniformLogPrior(lower_bounds, upper_bounds)

    # Create a posterior log-likelihood (log(likelihood * prior))
    log_posterior = pints.LogPosterior(log_likelihood, log_prior)

    # Choose starting points for 3 mcmc chains
    param_names = ['k0', 'E0', 'a', 'Ru', 'Cdl', 'freq', 'sigma']
    start_parameters = model.non_dim(
        [0.0101, 0.214, 0.53, 8.0, 20.0e-6, 9.0152, 0.01])

    scaling = (upper_bounds - lower_bounds)
    minx = start_parameters - scaling / 1000.0
    maxx = start_parameters + scaling / 1000.0

    fig = plt.figure()
    for i, start in enumerate(start_parameters):
        print(param_names[i])
        plt.clf()
        xgrid = np.linspace(minx[i], maxx[i], 100)
        ygrid = np.empty_like(xgrid)
        for j, x in enumerate(xgrid):
            params = np.copy(start_parameters)
            params[i] = x
            ygrid[j] = log_likelihood(params)
        plt.plot(xgrid, ygrid)
        plt.savefig('likelihood_' + param_names[i] + '.pdf')
Ejemplo n.º 18
0
                                         sine_wave=False)
    npar = model.n_params
    #
    # Define problem
    #
    problem_sine = pints.SingleOutputProblem(model, time_sine, current_sine)
    problem_ap = pints.SingleOutputProblem(model_ap, time_ap, current_ap)
    #
    # Define log-posterior
    #
    log_likelihood = pints.KnownNoiseLogLikelihood(problem_sine,
                                                   sigma_noise_sine)
    log_likelihood_ap = pints.KnownNoiseLogLikelihood(problem_ap,
                                                      sigma_noise_ap)
    log_prior = prior.LogPrior(rate_dict, lower_conductance, npar, transform)
    log_posterior = pints.LogPosterior(log_likelihood, log_prior)
    log_posterior_ap = pints.LogPosterior(log_likelihood_ap, log_prior)
    rate_checker = Rates.ratesPrior(transform, lower_conductance)

    if args.mcmc:
        model_metrics = np.zeros((5, 7))
        root = os.path.abspath('mcmc_results')
        param_filename = os.path.join(
            root, model_name + '-cell-' + str(cell) + '-mcmc_traces.p')
        trace = cPickle.load(open(param_filename, 'rb'))

        burnin = 70000
        points = burnin / args.points
        samples_all_chains = trace[:, burnin:, :]
        sample_chain_1 = samples_all_chains[0]
        samples_waic = sample_chain_1[::10, :npar]
Ejemplo n.º 19
0
    def __init__(self, name):
        super(TestPlot, self).__init__(name)

        # Create toy model (single output)
        self.model = toy.LogisticModel()
        self.real_parameters = [0.015, 500]
        self.times = np.linspace(0, 1000, 100)  # small problem
        self.values = self.model.simulate(self.real_parameters, self.times)

        # Add noise
        self.noise = 10
        self.values += np.random.normal(0, self.noise, self.values.shape)
        self.real_parameters.append(self.noise)
        self.real_parameters = np.array(self.real_parameters)

        # Create an object with links to the model and time series
        self.problem = pints.SingleOutputProblem(self.model, self.times,
                                                 self.values)

        # Create a uniform prior over both the parameters and the new noise
        # variable
        self.lower = [0.01, 400, self.noise * 0.1]
        self.upper = [0.02, 600, self.noise * 100]
        self.log_prior = pints.UniformLogPrior(self.lower, self.upper)

        # Create a log likelihood
        self.log_likelihood = pints.GaussianLogLikelihood(self.problem)

        # Create an un-normalised log-posterior (log-likelihood + log-prior)
        self.log_posterior = pints.LogPosterior(self.log_likelihood,
                                                self.log_prior)

        # Run MCMC
        self.x0 = [
            self.real_parameters * 1.1, self.real_parameters * 0.9,
            self.real_parameters * 1.05
        ]
        mcmc = pints.MCMCController(self.log_posterior, 3, self.x0)
        mcmc.set_max_iterations(300)  # make it as small as possible
        mcmc.set_log_to_screen(False)
        self.samples = mcmc.run()

        # Create toy model (multi-output)
        self.model2 = toy.LotkaVolterraModel()
        self.real_parameters2 = self.model2.suggested_parameters()
        self.times2 = self.model2.suggested_times()[::10]  # down sample it
        self.values2 = self.model2.simulate(self.real_parameters2, self.times2)

        # Add noise
        self.noise2 = 0.05
        self.values2 += np.random.normal(0, self.noise2, self.values2.shape)

        # Create an object with links to the model and time series
        self.problem2 = pints.MultiOutputProblem(self.model2, self.times2,
                                                 self.values2)

        # Create a uniform prior over both the parameters and the new noise
        # variable
        self.log_prior2 = pints.UniformLogPrior([1, 1, 1, 1], [6, 6, 6, 6])
        # Create a log likelihood
        self.log_likelihood2 = pints.GaussianKnownSigmaLogLikelihood(
            self.problem2, self.noise2)

        # Create an un-normalised log-posterior (log-likelihood + log-prior)
        self.log_posterior2 = pints.LogPosterior(self.log_likelihood2,
                                                 self.log_prior2)

        # Run MCMC
        self.x02 = [
            self.real_parameters2 * 1.1, self.real_parameters2 * 0.9,
            self.real_parameters2 * 1.05
        ]
        mcmc = pints.MCMCController(self.log_posterior2, 3, self.x02)
        mcmc.set_max_iterations(300)  # make it as small as possible
        mcmc.set_log_to_screen(False)
        self.samples2 = mcmc.run()

        # Create toy model (single-output, single-parameter)
        self.real_parameters3 = [0]
        self.log_posterior3 = toy.GaussianLogPDF(self.real_parameters3, [1])
        self.lower3 = [-3]
        self.upper3 = [3]

        # Run MCMC
        self.x03 = [[1], [-2], [3]]
        mcmc = pints.MCMCController(self.log_posterior3, 3, self.x03)
        mcmc.set_max_iterations(300)  # make it as small as possible
        mcmc.set_log_to_screen(False)
        self.samples3 = mcmc.run()
Ejemplo n.º 20
0
df = pd.DataFrame(columns=columns)
for n in range(N):
    print(n)
    theta = log_prior.sample(n=1)[0]
    times = np.linspace(1, 1000, 25)
    org_values = model.simulate(theta, times)
    # Add noise
    noise = 10
    ys = org_values + np.random.normal(0, noise, org_values.shape)
    # Create an object with links to the model and time series
    problem = pints.SingleOutputProblem(model, times, ys)
    # Create a log-likelihood function (adds an extra parameter!)
    log_likelihood = pints.GaussianKnownSigmaLogLikelihood(problem, [noise])

    log_prior_incorrect = pints.UniformLogPrior([200], [800])
    log_posterior = pints.LogPosterior(log_likelihood, log_prior)

    # Choose starting points for 3 mcmc chains
    xs = [theta, theta * 1.01, theta * 0.99]
    isinf = False
    for x in xs:
        if (math.isinf(log_posterior.evaluateS1(x)[0])):
            isinf = True
            d += 1
            break
    if (isinf == True):
        continue

    # Create mcmc routine with three chains
    mcmc = pints.MCMCController(log_posterior, 3, xs, method=pints.HaarioACMC)
Ejemplo n.º 21
0
def run_figure2(num_mcmc_samples=20000,
                num_mcmc_chains=3,
                num_runs=8,
                output_dir='./'):
    """Run the Gaussian process on multiplicative data.

    This function runs the simulations and saves the results to pickle.
    """
    random.seed(123)
    np.random.seed(123)

    all_fits = []
    iid_runs = []
    sigmas = []
    mult_runs = []
    gp_runs = []
    for run in range(num_runs):
        # Make a synthetic time series
        times, values, data = generate_time_series(model='logistic',
                                                   noise='multiplicative',
                                                   n_times=251)

        # Make Pints model and problem
        model = pints.toy.LogisticModel()
        problem = pints.SingleOutputProblem(model, times, data)

        # Initial conditions for model parameters
        model_starting_point = [0.08, 50]

        # Run MCMC for IID posterior
        likelihood = pints.GaussianLogLikelihood
        x0 = model_starting_point + [2]
        posterior_iid = run_pints(problem, likelihood, x0, num_mcmc_samples)
        iid_runs.append(posterior_iid)

        # Save standard deviations from IID runs
        sigma = np.median(posterior_iid[:, 2])
        sigmas.append(sigma)

        # Run MCMC for multiplicative noise posterior
        likelihood = pints.MultiplicativeGaussianLogLikelihood
        x0 = model_starting_point + [0.5, 0.5]
        posterior_mult = run_pints(problem, likelihood, x0, num_mcmc_samples)
        mult_runs.append(posterior_mult)

        # Infer the nonstationary kernel fit
        # Run an optimization assumming IID
        log_prior = pints.UniformLogPrior([0] * 3, [1e6] * 3)
        log_likelihood = pints.GaussianLogLikelihood(problem)
        log_posterior = pints.LogPosterior(log_likelihood, log_prior)
        opt = pints.OptimisationController(log_posterior,
                                           model_starting_point + [2])
        xbest, fbest = opt.run()

        # Run the GP fit, using the best fit for initialization
        gp_times = times[::10]
        kernel = flexnoise.kernels.GPLaplacianKernel
        gnp = flexnoise.GPNoiseProcess(problem, kernel, xbest[:2], gp_times)
        gnp.set_gp_hyperparameters(mu=0.0, alpha=1.0, beta_num_points=200)
        x = gnp.run_optimize(num_restarts=100, parallel=True, maxiter=150)
        all_fits.append(x)

        # Run MCMC for multivariate normal noise
        kernel = flexnoise.kernels.GPLaplacianKernel(None, gp_times)
        kernel.parameters = x[2:]
        cov = kernel.get_matrix(times)
        likelihood = flexnoise.CovarianceLogLikelihood
        x0 = model_starting_point
        posterior_gp = run_pints(problem,
                                 likelihood,
                                 x0,
                                 num_mcmc_samples,
                                 likelihood_args=[cov])
        gp_runs.append(posterior_gp)

    # Save all results to pickle
    results = [
        iid_runs, mult_runs, all_fits, gp_runs, times, data, values, model,
        problem, kernel, sigmas
    ]

    fname = os.path.join(output_dir, 'fig2_data.pkl')
    with open(fname, 'wb') as f:
        pickle.dump(results, f)
Ejemplo n.º 22
0
def inference(model, values, times):

    # Create an object with links to the model and time series
    problem = pints.SingleOutputProblem(model, times, values)

    # Create a log-likelihood function (adds an extra parameter!)
    log_likelihood = pints.GaussianLogLikelihood(problem)

    # Create a uniform prior over both the parameters and the new noise variable
    lower_bounds = np.array([1e-3, 0.0, 0.4, 0.1, 1e-6, 8.0, 1e-4])
    upper_bounds = np.array([10.0, 0.4, 0.6, 100.0, 100e-6, 10.0, 0.2])
    log_prior = pints.UniformLogPrior(lower_bounds, upper_bounds)

    # Create a posterior log-likelihood (log(likelihood * prior))
    log_posterior = pints.LogPosterior(log_likelihood, log_prior)

    # Choose starting points for 3 mcmc chains
    # params =                   ['k0', 'E0', 'a', 'Ru', 'Cdl', 'freq', 'sigma']
    start_parameters = np.array(
        [0.0101, 0.214, 0.53, 8.0, 20.0e-6, 9.0152, 0.01])

    transform = pints.ComposedTransformation(
        pints.LogTransformation(1),
        pints.RectangularBoundariesTransformation(lower_bounds[1:],
                                                  upper_bounds[1:]),
    )
    sigma0 = [0.1 * (h - l) for l, h in zip(lower_bounds, upper_bounds)]
    boundaries = pints.RectangularBoundaries(lower_bounds, upper_bounds)
    found_parameters, found_value = pints.optimise(log_posterior,
                                                   start_parameters,
                                                   sigma0,
                                                   boundaries,
                                                   transform=transform,
                                                   method=pints.CMAES)
    xs = [
        found_parameters * 1.001,
        found_parameters * 1.002,
        found_parameters * 1.003,
    ]
    for x in xs:
        x[5] = found_parameters[5]

    print('start_parameters', start_parameters)
    print('found_parameters', found_parameters)
    print('lower_bounds', lower_bounds)
    print('upper_bounds', upper_bounds)

    # Create mcmc routine with four chains
    mcmc = pints.MCMCController(log_posterior,
                                3,
                                xs,
                                method=pints.HaarioBardenetACMC,
                                transform=transform)

    # Add stopping criterion
    mcmc.set_max_iterations(10000)

    # Run!
    chains = mcmc.run()

    # Save chains for plotting and analysis
    pickle.dump((xs, pints.GaussianLogLikelihood, log_prior, chains,
                 'HaarioBardenetACMC'), open('results.pickle', 'wb'))
Ejemplo n.º 23
0
def run_fit(
    task,
    optimisation_algorithm,
    optimisation_arguments,
    sampling_algorithm,
    sampling_arguments,
):

    print('Entering run_fit()')

    if optimisation_algorithm != 'CMAES':
        raise ValueError(
            'Other optimisation algorithms are not yet supported.')
    if sampling_algorithm != 'AdaptiveCovarianceMCMC':
        raise ValueError('Other sampling algorithms are not yet supported.')

    # Get names of parameters --> They are not stored in order, so will need
    # this a lot!
    keys = task.parameters

    # Use log transform
    log_transform = task.prior.is_positive()
    if log_transform:
        print('Using log-transform for fitting')
    else:
        print('Unable to use log-transform')

    # Select objective for Aidan's code to use
    task.objFun = LogLikGauss()

    # Wrap a LogPDF around Aidan's objective
    class AidanLogPdf(pints.LogPDF):
        def __init__(self, task, keys, log_transform=None):
            self._task = task
            self._keys = keys
            self._log_transform = log_transform
            self._dimension = len(keys)
            self._p = {}

        def n_parameters(self):
            return self._dimension

        def __call__(self, x):

            # Untransform back to model space
            if self._log_transform:
                x = np.exp(x)

            # Create dict
            for i, key in enumerate(self._keys):
                self._p[key] = x[i]

            # Evaluate objective
            return self._task.calculateObjective(self._p)

    # Wrap a LogPrior around Aidan's prior
    class AidanLogPrior(pints.LogPrior):
        def __init__(self, task, keys, log_transform=None):
            self._prior = task.prior
            self._keys = keys
            self._log_transform = log_transform
            self._dimension = len(keys)
            self._p = {}

        def n_parameters(self):
            return self._dimension

        def __call__(self, x):

            # Untransform back to model space
            if self._log_transform:
                x = np.exp(x)

            # Create dict
            for i, key in enumerate(self._keys):
                self._p[key] = x[i]

            # Evaluate prior and return
            prior = self._prior.pdf(self._p)
            if prior <= 0:
                return -np.inf
            return np.log(prior)

        def sample(self, n=1):

            assert n == 1
            x = self._prior.draw()
            x = [x[key] for key in self._keys]

            # Transform to search space
            if self._log_transform:
                x = np.log(x)

            return [x]

    # Find a suitable starting point --> Will be the answer if no iterations
    # are selected
    log_prior = AidanLogPrior(task, keys, False)
    x0 = log_prior.sample()[0]
    del (log_prior)

    print('Parameters: ')
    print('\n'.join('  ' + x for x in parameters))

    # If specified, run (repeated) CMA-ES to select a starting point
    opt_repeats = optimisation_arguments['repeats']
    print('CMA-ES runs: ' + str(opt_repeats))
    if opt_repeats:

        log_likelihood = AidanLogPdf(task, keys, log_transform)
        boundaries = pints.LogPDFBoundaries(
            AidanLogPrior(task, keys, log_transform))

        x_best, fx_best = x0, -np.inf

        for i in range(opt_repeats):

            print(' CMA-ES run ' + str(1 + i))

            # Choose random starting point (in search space)
            x0 = boundaries.sample()[0]
            f0 = log_likelihood(x0)
            i = 0
            while not np.isfinite(f0):
                x0 = boundaries.sample()[0]
                f0 = log_likelihood(x0)
                i += 1
                if i > 20:
                    print('Unable to find good starting point!')
                    break

            # Create optimiser
            opt = pints.OptimisationController(log_likelihood,
                                               x0,
                                               boundaries=boundaries,
                                               method=pints.CMAES)
            opt.set_max_iterations(None)
            opt.set_parallel(True)
            opt.set_max_unchanged_iterations(80)

            # DEBUG
            #opt.set_max_iterations(5)

            # Run optimisation
            try:
                with np.errstate(all='ignore'):
                    x, fx = opt.run()
            except ValueError:
                fx = -np.inf
                import traceback
                traceback.print_exc()

            # Check outcome
            if fx > fx_best:
                print('New best score ' + str(fx) + ' > ' + str(fx_best))
                x_best, fx_best = x, fx

                if log_transform:
                    # Transform back to model space
                    x_best = np.exp(x_best)

        x0 = x_best
        x0_obj = dict(zip(keys, x0))

    # If specified, run MCMC
    n_mcmc_iters = sampling_arguments.get('iterations', 0)
    print('MCMC iterations: ' + str(n_mcmc_iters))
    if n_mcmc_iters:
        print('Starting MCMC')

        log_likelihood = AidanLogPdf(task, keys)
        log_prior = AidanLogPrior(task, keys)
        log_posterior = pints.LogPosterior(log_likelihood, log_prior)

        # Configure MCMC
        mcmc = pints.MCMCSampling(log_posterior, 1, [x0])
        mcmc.set_max_iterations(n_mcmc_iters)
        mcmc.set_parallel(False)

        # Run
        chains = mcmc.run()
        print('MCMC Completed')

        # Get chain
        chain = chains[0]

        # Discard warm-up
        warm_up = int(sampling_arguments.get('warm_up', 0))
        if warm_up > 0:
            print('Discarding first ' + str(warm_up) + ' samples as warm-up')
            chain = chain[warm_up:]

    else:

        chain = [x0]

    # Create distribution object and return
    dist = []
    for sample in chain:
        d = {}
        for i, key in enumerate(keys):
            d[key] = sample[i]
        dist.append(d)
    return DiscreteParameterDistribution(dist)
Ejemplo n.º 24
0
    def run(times, ax, bins):
        values = m_true.simulate(true_params, times)
        data = values + np.random.normal(0, 0.1, values.shape)
        problem = pints.SingleOutputProblem(m_simple, times, data)

        # Run MCMC for IID noise, wrong model
        prior = pints.UniformLogPrior([0, 0], [1e6, 1e6])
        likelihood = pints.GaussianLogLikelihood(problem)
        posterior = pints.LogPosterior(likelihood, prior)
        x0 = [[0.2, 1.0]] * 3
        mcmc = pints.MCMCController(posterior, 3, x0)
        mcmc.set_max_iterations(num_mcmc_iter)
        chains_iid = mcmc.run()
        freq_iid = chains_iid[0, :, 0][num_mcmc_iter // 2:]

        # Run MCMC for AR(1) noise, wrong model
        prior = pints.UniformLogPrior([0, 0, 0], [1e6, 1, 1e6])
        likelihood = pints.AR1LogLikelihood(problem)
        posterior = pints.LogPosterior(likelihood, prior)
        x0 = [[0.2, 0.01, 1.0]] * 3
        mcmc = pints.MCMCController(posterior, 3, x0)
        mcmc.set_max_iterations(num_mcmc_iter)
        chains_ar1 = mcmc.run()
        freq_ar1 = chains_ar1[0, :, 0][num_mcmc_iter//2:]

        # Run MCMC for IID noise, correct model
        problem = pints.SingleOutputProblem(m_true, times, data)
        prior = pints.UniformLogPrior([0, 0, 0], [1e6, 1e6, 1e6])
        likelihood = pints.GaussianLogLikelihood(problem)
        posterior = pints.LogPosterior(likelihood, prior)
        x0 = [[0.2, 0.8, 1.0]] * 3
        mcmc = pints.MCMCController(posterior, 3, x0)
        mcmc.set_max_iterations(num_mcmc_iter)
        chains_true = mcmc.run()
        freq_true = chains_true[0, :, 0][num_mcmc_iter // 2:]

        # Plot histograms of the posteriors
        ax.hist(freq_true,
                alpha=0.5,
                label='Correct',
                hatch='//',
                density=True,
                bins=bins,
                histtype='stepfilled',
                linewidth=2,
                color='grey',
                zorder=-20)

        ax.hist(freq_ar1,
                alpha=1.0,
                label='AR1',
                density=True,
                bins=bins,
                histtype='stepfilled',
                linewidth=2,
                edgecolor='k',
                facecolor='none')

        ax.hist(freq_iid,
                alpha=0.5,
                label='IID',
                density=True,
                bins=bins,
                histtype='stepfilled',
                linewidth=2,
                color=plt.rcParams['axes.prop_cycle'].by_key()['color'][0],
                zorder=-10)

        ax.axvline(0.2, ls='--', color='k')
        ax.set_xlabel(r'$\theta$')
        ax.legend()
Ejemplo n.º 25
0
def run_model(model,
              cell,
              protocol,
              time,
              voltage,
              current,
              plot='unfold',
              label=None,
              axes=None):

    # Select protocol file
    protocol_file = os.path.join(root, protocol + '.mmt')
    print(protocol_file)
    myokit_protocol = myokit.load_protocol(protocol_file)

    # Estimate noise from start of data
    sigma_noise = np.std(current[:2000], ddof=1)

    # fetch cmaes parameters
    obtained_parameters = model.fetch_parameters()

    # Cell-specific parameters
    temperature = model.temperature(cell)
    lower_conductance = model.conductance_limit(cell)

    # Apply capacitance filter based on protocol
    print('Applying capacitance filtering')
    time, voltage, current = model.capacitance(myokit_protocol, 0.1, time,
                                               voltage, current)

    forward_model = model.ForwardModel(myokit_protocol,
                                       temperature,
                                       sine_wave=False)
    problem = pints.SingleOutputProblem(forward_model, time, current)
    log_likelihood = pints.KnownNoiseLogLikelihood(problem, sigma_noise)
    log_prior = model.LogPrior(lower_conductance)
    log_posterior = pints.LogPosterior(log_likelihood, log_prior)

    # Show obtained parameters and score
    obtained_log_posterior = log_posterior(obtained_parameters)
    print('Kylie sine-wave parameters:')
    for x in obtained_parameters:
        print(pints.strfloat(x))
    print('Final log-posterior:')
    print(pints.strfloat(obtained_log_posterior))

    # Simulate
    simulated = forward_model.simulate(obtained_parameters, time)

    if plot == 'unfold':
        axes[0].plot(time, voltage, color='red')  #, label='voltage')
        #axes[0].legend(loc='upper right')
        axes[1].plot(time, current, alpha=0.3,
                     color='red')  #, label='measured current')

        if label == 0:
            model_name = 'circularCOIIC'
            axes[1].plot(time,
                         simulated,
                         alpha=1,
                         color='blue',
                         label=model_name)
        elif label == 1:
            model_name = 'linearCOI'
            axes[1].plot(time,
                         simulated,
                         alpha=1,
                         color='magenta',
                         label=model_name)
        elif label == 2:
            model_name = 'linearCCOI'
            axes[1].plot(time,
                         simulated,
                         alpha=1,
                         color='seagreen',
                         label=model_name)
        elif label == 3:
            model_name = 'linearCCCOI'
            axes[1].plot(time,
                         simulated,
                         alpha=1,
                         color='seagreen',
                         label=model_name)
    #axes.subplot(2,1,1)

    else:

        IkrModel.fold_plot(protocol, time, voltage, [current, simulated])
Ejemplo n.º 26
0
def run_pints(problem,
              likelihood,
              x0,
              num_mcmc_samples,
              num_chains=3,
              log_prior=None,
              likelihood_args=None,
              enforce_convergence=False,
              mcmc_method=None):
    """Perform infernce with Pints using a specified model and likelihood.

    Parameters
    ----------
    problem : pints.Problem
        Pints problem holding the times and data
    likelihood : pints.ProblemLogLikelihood
        Pints likelihood for the data
    x0 : array_like of float
        Starting point of model parameters.
    num_mcmc_samples : int
        Total number of MCMC samples.
    num_chains : int
        Number of separate MCMC chains.
    log_prior : pints.LogPrior
        Prior distribution on all parameters in the likelihood. If None, a
        uniform prior from 0 to 1e6 is chosen for all parameters.
    likelihood_args : list
        Any other arguments besides the pints problem which must be provided
        when instantiating the likelihood.
    enforce_convergence : bool
        Whether to raise an error if the chains have not converged. After
        finishing the MCMC chain, the Rhat value is calculated, and any value
        of Rhat greater than 1.05 is assumed to indicate lack of convergence.
    mcmc_method : str
        Name of any MCMC sampler implemented in Pints.

    Returns
    -------
    np.ndarray
        MCMC samples of posterior. One chain is provided with the first half
        discarded as burn-in.
    """
    if likelihood_args is None:
        log_likelihood = likelihood(problem)
    else:
        log_likelihood = likelihood(problem, *likelihood_args)

    # Get the number of parameters to infer = model params plus noise params
    num_params = len(x0)

    if log_prior is None:
        log_prior = pints.UniformLogPrior([0] * num_params, [1e6] * num_params)

    log_posterior = pints.LogPosterior(log_likelihood, log_prior)

    x0 = [np.array(x0), 1.1 * np.array(x0), 0.9 * np.array(x0)]

    # Run MCMC
    if mcmc_method is None:
        mcmc = pints.MCMCController(log_posterior, num_chains, x0)
    else:
        mcmc = pints.MCMCController(log_posterior,
                                    num_chains,
                                    x0,
                                    method=mcmc_method)
    mcmc.set_max_iterations(num_mcmc_samples)
    mcmc.set_log_to_screen(True)
    chains = mcmc.run()

    # Check convergence
    rs = pints.rhat(chains[:, num_mcmc_samples // 2:, :])
    if max(rs) > 1.05:
        message = 'MCMC chains failed to converge, R={}'.format(str(rs))
        if enforce_convergence:
            pints.plot.trace(chains)
            plt.show()
            raise RuntimeError(message)
        else:
            warnings.warn(message)

    # Get one chain, discard first half burn in
    chain = chains[0][num_mcmc_samples // 2:]

    return chain
Ejemplo n.º 27
0
)

LogPrior = {
    'model_A': priors.ModelALogPrior,
    'model_B': priors.ModelBLogPrior,
}

# Update protocol
model.set_fixed_form_voltage_protocol(protocol, protocol_times)

# Create Pints stuffs
problem = pints.SingleOutputProblem(model, times, data)
loglikelihood = pints.GaussianKnownSigmaLogLikelihood(problem, noise_sigma)
logprior = LogPrior[info_id](transform_to_model_param,
                             transform_from_model_param)
logposterior = pints.LogPosterior(loglikelihood, logprior)

# Check logposterior is working fine
priorparams = np.copy(info.base_param)
transform_priorparams = transform_from_model_param(priorparams)
print('Score at prior parameters: ', logposterior(transform_priorparams))
for _ in range(10):
    assert(logposterior(transform_priorparams) ==\
            logposterior(transform_priorparams))

# Run
try:
    N = int(sys.argv[2])
except IndexError:
    N = 3
Ejemplo n.º 28
0
def MCMC_routine(starting_point,
                 max_iter=4000,
                 adapt_start=None,
                 log_prior=None,
                 mmt_model_filename=None,
                 chain_filename=None,
                 pdf_filename=None,
                 log_likelihood='GaussianLogLikelihood',
                 method='HaarioBardenetACMC',
                 sigma0=None,
                 parallel=False):
    """
    Runs a MCMC routine for the selected model

    :param starting_point:
        List of numpy.array. List of starting values for the MCMC for the
        optimisation parameters. Must have the same length as
        data_exp.fitting_parms_annot + 1 (for Noise). len(starting_point)
        defines the amount of MCMC chains.

    :param max_iter:
        int. Maximal iterations for the whole MCMC. Should be higher than
        adapt_start.

    :param adapt_start:
        int. Iterations before starting the adapting phase of the MCMC.

    :param log_prior: pints.log_priors
        Type of prior. If not specified, pints.UniformLogPrior

    :param mmt_model_filename: str
        location of the mmt model to run if different from the one loaded
        previously. It will replace the sabs_pkpd.constants.s
        myokit.Simulation() already present.

    :param chain_filename:
        str. Location of the CSV file where the chains will be written. If not
        provided, the chains are not saved in CSV

    :param pdf_filename:
        str. Location of the CSV file where the log_likelihood will be written.
        If not provided, it will not be saved in CSV.

    :param log_likelihood: pints.LogLikelihood
        Type of log likelihood. If not specified,
        pints.UnknownNoiseLogLikelihood.

    :param method: pints.method:
        method of optimisation. If not specified, pints.HaarioBardenetACMC.

    :param sigma0:
        sigma0 for the desired MCMC algorithm. If not provided, sigma0 will be
        computed automatically by the algorithm. See
        https://pints.readthedocs.io/en/latest/mcmc_samplers/running.html for
        documentation.

    :param parallel:
    Boolean. Enables or not the parallelisation of the MCMC among the available
    CPUs. False as default.

    :return: chains
        The chain for the MCMC routine.

    """
    sabs_pkpd.constants.n = len(starting_point[0]) - 1

    if len(starting_point[0]) != \
            len(sabs_pkpd.constants.data_exp.
                fitting_instructions.fitted_params_annot) + 1:
        raise ValueError('Starting point and Parameters annotations + Noise '
                         'must have the same length')

    if mmt_model_filename is not None:
        sabs_pkpd.constants.s = \
            sabs_pkpd.load_model.load_simulation_from_mmt(mmt_model_filename)

    # Then create an instance of our new model class
    model = sabs_pkpd.pints_problem_def.MyModel()

    # log_prior within [0.5 * starting_point ,  2 * starting_point] if not
    # specified
    if log_prior is not None:
        pass
    else:
        mini = np.array(np.min(starting_point, axis=0).tolist())
        maxi = np.array(np.max(starting_point, axis=0).tolist())
        log_prior = pints.UniformLogPrior(np.array(mini * 0.5).tolist(),
                                          np.array(maxi * 2).tolist())

    fit_values = np.concatenate(sabs_pkpd.constants.data_exp.values)

    problem = pints.SingleOutputProblem(
        model,
        times=np.linspace(0, 1, len(fit_values)),
        values=fit_values)

    # Create a log-likelihood function (adds an extra parameter!)
    log_likelihood = eval('pints.' + log_likelihood + '(problem)')

    # Create a posterior log-likelihood (log(likelihood * prior))
    log_posterior = pints.LogPosterior(log_likelihood, log_prior)

    method = eval('pints.' + method)

    # Create mcmc routine
    mcmc = pints.MCMCController(log_posterior,
                                len(starting_point),
                                starting_point,
                                method=method,
                                sigma0=sigma0)

    # Allow parallelisation of computation when provided by user
    mcmc.set_parallel(parallel)

    # Add stopping criterion
    mcmc.set_max_iterations(max_iter)
    # Start adapting after adapt_start iterations
    if adapt_start is not None:
        mcmc.set_initial_phase_iterations(adapt_start)
        if adapt_start > max_iter:
            raise ValueError('The maximum number of iterations should be '
                             'higher than the adapting phase length. Got ' +
                             str(max_iter) + ' maximum iterations, ' +
                             str(adapt_start) + 'iterations in adapting phase')

    if chain_filename is not None:
        mcmc.set_chain_filename(chain_filename)
    if pdf_filename is not None:
        mcmc.set_log_pdf_filename(pdf_filename)

    # Run!
    print('Running...')
    chains = mcmc.run()
    print('Done!')

    return chains
Ejemplo n.º 29
0
    def load_problem(problem_dict):
        """
        Returns a dictionary containing an instantiated PINTS problem
        """
        problem_instance = copy.deepcopy(problem_dict)

        model = problem_dict["model"]()
        parameters = problem_dict['parameters']

        # simulate problem
        if 'simulation_noise_percent' in problem_dict:
            values, times, noise_stds = emutils.simulate(
                model,
                parameters=problem_dict['parameters'],
                times=problem_dict['times'],
                noise_range_percent=problem_dict['simulation_noise_percent'],
            )
        else:
            values, times = emutils.simulate(
                model,
                parameters=problem_dict['parameters'],
                times=problem_dict['times'],
                noise_range_percent=None,
            )
            noise_stds = None

        # create instance of a problem and
        if problem_dict['n_outputs'] == 1:
            problem = pints.SingleOutputProblem(model, times, values)
        else:
            problem = pints.MultiOutputProblem(model, times, values)

        # create likelihood with or without known noise
        # log_likelihood = pints.UnknownNoiseLogLikelihood(problem)
        log_likelihood = pints.KnownNoiseLogLikelihood(problem, noise_stds)

        # should either provide the percentage range for parameters
        # or the parameter range itself
        if 'param_range_percent' in problem_dict:
            param_range_percent = problem_dict['param_range_percent']
            params_lower = parameters - param_range_percent * np.abs(parameters)
            params_upper = parameters + param_range_percent * np.abs(parameters)
        else:
            params_lower, params_upper = problem_dict['param_range']

        # add noise
        # noise_lower, noise_upper = problem_dict['noise_bounds']

        bounds = pints.RectangularBoundaries(
            lower=params_lower,
            upper=params_upper,
        )

        log_prior = problem_dict['prior'](bounds)
        log_posterior = pints.LogPosterior(log_likelihood, log_prior)

        # extend the dictionary with created variables
        problem_instance.update({
            'model': model,
            'values': values,
            'times': times,
            'noise_stds': noise_stds,
            'problem': problem,
            'bounds': bounds,
            'log_likelihood': log_likelihood,
            'log_prior': log_prior,
            'log_posterior': log_posterior
        })

        return problem_instance
Ejemplo n.º 30
0
def Algorithm1WithConvergence(L,
                              N,
                              model,
                              log_prior,
                              log_prior_used,
                              times,
                              noise,
                              noise_used,
                              MCMCmethod,
                              param=0):
    time_start = time.time()
    sum_p = 0
    sum_p_theta = 0
    sum_p_y = 0
    c = 1
    for i in range(c):
        print(i)
        res1 = []
        res2 = []
        thetatildeArray = []  #np.empty(N, dtype=float)
        ytildeArray = []  #np.empty(N, dtype=float)

        d = 0
        for n in range(N):
            print(n)
            thetatilde = log_prior.sample(n=1)[0]
            org_values = model.simulate(thetatilde, times)
            ytilde_n = org_values + np.random.normal(0, noise,
                                                     org_values.shape)
            problem = pints.SingleOutputProblem(model, times, ytilde_n)
            log_likelihood_used = pints.GaussianKnownSigmaLogLikelihood(
                problem, [noise_used])
            log_posterior = pints.LogPosterior(log_likelihood_used,
                                               log_prior_used)
            #Start from thetatilde

            xs = [thetatilde, thetatilde * 1.01, thetatilde * 0.99]
            isinf = False
            for x in xs:
                #print(x)
                if (math.isinf(log_posterior.evaluateS1(x)[0])):
                    isinf = True
                    d += 1
                    break
            if (isinf == True):
                print('isinf:', isinf)
                continue
            #Run Markov chain L steps from thetatilde'''
            mcmc = pints.MCMCController(log_posterior,
                                        len(xs),
                                        xs,
                                        method=MCMCmethod)
            # Add stopping criterion
            sample_size = 3000

            mcmc.set_max_iterations(sample_size)

            # Start adapting after sample_size//4 iterations
            mcmc.set_initial_phase_iterations(sample_size // 4)

            # Disable logging mode
            mcmc.set_log_to_screen(False)
            chains = mcmc.run()
            s = sample_size // 4 + 1
            b = False
            while s < sample_size:
                chains_cut = chains[:, sample_size // 4:s + 1]
                #HMC: chains_cut = chains[:,0:s+1]
                rhat = pints.rhat(chains_cut)
                s += 1
                if rhat[0] < 1.05:
                    print('converge')
                    b = True
                    break
            if b == False:
                d += 1
                continue

            print(s)
            thetatilde_n = chains[0][(s + sample_size) // 2 - 1]
            print(thetatilde)
            thetatildeArray.append(thetatilde_n[param])
            ytildeArray.append(ytilde_n[param])
            res1.append((thetatilde_n[param], ytilde_n[param]))

        thetaArray = np.empty(N - d, dtype=float)
        yArray = np.empty(N - d, dtype=float)

        for n in range(N - d):
            theta_n = log_prior.sample(n=1)[0]
            org_values = model.simulate(theta_n, times)
            y_n = org_values + np.random.normal(0, noise, org_values.shape)
            thetaArray[n] = theta_n[param]
            yArray[n] = y_n[param]
            res2.append((theta_n[param], y_n[param]))

        p = ks2d2s(thetatildeArray, ytildeArray, thetaArray, yArray)
        statistic_theta, p_theta = ks_2samp(thetatildeArray, thetaArray)
        statistic_y, p_y = ks_2samp(ytildeArray, yArray)
        sum_p += p
        sum_p_theta += p_theta
        sum_p_y += p_y
    time_end = time.time()
    duration = time_end - time_start

    average_p = sum_p / c
    average_p_theta = sum_p_theta / c
    average_p_y = sum_p_y / c
    print('average_p:', average_p)
    print('average_p_theta:', average_p_theta)
    print('average_p_y:', average_p_y)
    return average_p, average_p_theta, average_p_y, duration, thetatildeArray, thetaArray, ytildeArray, yArray