예제 #1
0
def figure2():
    """Make a figure for MCMC inference
    """
    num_mcmc_iters = 10000

    def stimulus(t):
        return (1 * (t < 50)) + (-100 * (t >= 50) & (t < 75)) + (1 * (t >= 75))

    # Generate data
    y0 = np.array([0.0, 0.0])
    m = diffeqinf.DampedOscillator(stimulus, y0, 'RK45')
    m.set_tolerance(1e-8)
    true_params = [1.0, 0.2, 1.0]
    times = np.linspace(0, 100, 500)
    y = m.simulate(true_params, times)
    y += np.random.normal(0, 0.01, len(times))

    # Run inference with correct model
    problem = pints.SingleOutputProblem(m, times, y)
    likelihood = pints.GaussianLogLikelihood(problem)
    prior = pints.UniformLogPrior([0] * 4, [1e6] * 4)
    posterior = pints.LogPosterior(likelihood, prior)

    x0 = [true_params + [0.01]] * 3

    mcmc = pints.MCMCController(posterior, 3, x0)
    mcmc.set_max_iterations(num_mcmc_iters)
    chains_correct = mcmc.run()

    # Run inference with incorrect model
    m.set_tolerance(1e-2)
    problem = pints.SingleOutputProblem(m, times, y)
    likelihood = pints.GaussianLogLikelihood(problem)
    prior = pints.UniformLogPrior([0] * 4, [1e6] * 4)
    posterior = pints.LogPosterior(likelihood, prior)

    mcmc = pints.MCMCController(posterior, 3, x0)
    mcmc.set_max_iterations(num_mcmc_iters)
    chains_incorrect = mcmc.run()

    # Plot MCMC chains
    pints.plot.trace(chains_incorrect)
    plt.show()

    # Plot posteriors
    diffeqinf.plot.plot_grouped_parameter_posteriors(
        [chains_correct[0, num_mcmc_iters // 2:, :]],
        [chains_incorrect[0, num_mcmc_iters // 2:, :]],
        [chains_incorrect[1, num_mcmc_iters // 2:, :]],
        [chains_incorrect[2, num_mcmc_iters // 2:, :]],
        true_model_parameters=true_params,
        method_names=[
            'Correct', 'PoorTol_Chain1', 'PoorTol_Chain2', 'PoorTol_Chain3'
        ],
        parameter_names=['k', 'c', 'm'],
        fname=None)
    plt.show()
예제 #2
0
    def setUpClass(cls):
        # Create test log-pdfs
        model = pints.toy.ConstantModel(1)

        problem = pints.SingleOutputProblem(model=model,
                                            times=[1, 2, 3, 4],
                                            values=[1, 2, 3, 4])
        cls.log_pdf_1 = pints.GaussianLogLikelihood(problem)

        problem = pints.SingleOutputProblem(model=model,
                                            times=[1, 2, 3, 4],
                                            values=[1, 1, 1, 1])
        cls.log_pdf_2 = pints.GaussianLogLikelihood(problem)
예제 #3
0
    def test_evaluateS1_gaussian_log_likelihood_agrees_multi(self):
        # Create an object with links to the model and time series
        problem = pints.MultiOutputProblem(self.model_multi, self.times,
                                           self.data_multi)

        # Create CombinedGaussianLL and GaussianLL
        log_likelihood = pkpd.ConstantAndMultiplicativeGaussianLogLikelihood(
            problem)
        gauss_log_likelihood = pints.GaussianLogLikelihood(problem)

        # Check that CombinedGaussianLL agrees with GaussianLoglikelihood when
        # sigma_rel = 0 and sigma_base = sigma
        test_parameters = [
            2.0, 2.0, 2.0, 0.5, 0.5, 0.5, 1.1, 1.1, 1.1, 0.0, 0.0, 0.0
        ]
        gauss_test_parameters = [2.0, 2.0, 2.0, 0.5, 0.5, 0.5]
        score, deriv = log_likelihood.evaluateS1(test_parameters)
        gauss_score, gauss_deriv = gauss_log_likelihood.evaluateS1(
            gauss_test_parameters)

        # Check that scores are the same
        self.assertAlmostEqual(score, gauss_score)

        # Check that partials for model params and sigma_base agree
        self.assertAlmostEqual(deriv[0], gauss_deriv[0])
        self.assertAlmostEqual(deriv[1], gauss_deriv[1])
        self.assertAlmostEqual(deriv[2], gauss_deriv[2])
        self.assertAlmostEqual(deriv[3], gauss_deriv[3])
        self.assertAlmostEqual(deriv[4], gauss_deriv[4])
        self.assertAlmostEqual(deriv[5], gauss_deriv[5])
예제 #4
0
    def setUpClass(cls):
        """ Prepare a problem for testing. """

        # Random seed
        np.random.seed(1)

        # Create toy model
        cls.model = toy.LogisticModel()
        cls.real_parameters = [0.015, 500]
        cls.times = np.linspace(0, 1000, 1000)
        cls.values = cls.model.simulate(cls.real_parameters, cls.times)

        # Add noise
        cls.noise = 10
        cls.values += np.random.normal(0, cls.noise, cls.values.shape)
        cls.real_parameters.append(cls.noise)
        cls.real_parameters = np.array(cls.real_parameters)

        # Create an object with links to the model and time series
        cls.problem = pints.SingleOutputProblem(cls.model, cls.times,
                                                cls.values)

        # Create a uniform prior over both the parameters and the new noise
        # variable
        cls.log_prior = pints.UniformLogPrior([0.01, 400, cls.noise * 0.1],
                                              [0.02, 600, cls.noise * 100])

        # Create a log likelihood
        cls.log_likelihood = pints.GaussianLogLikelihood(cls.problem)

        # Create an un-normalised log-posterior (log-likelihood + log-prior)
        cls.log_posterior = pints.LogPosterior(cls.log_likelihood,
                                               cls.log_prior)
예제 #5
0
    def optimise(self, data, sigma_fac=0.001, method="minimisation"):
        cmaes_problem = pints.MultiOutputProblem(self, self.frequency_range,
                                                 data)
        if method == "likelihood":
            score = pints.GaussianLogLikelihood(cmaes_problem)
            sigma = sigma_fac * np.sum(data) / 2 * len(data)
            lower_bound = [self.param_bounds[x][0]
                           for x in self.params] + [0.1 * sigma] * 2
            upper_bound = [self.param_bounds[x][1]
                           for x in self.params] + [10 * sigma] * 2
            CMAES_boundaries = pints.RectangularBoundaries(
                lower_bound, upper_bound)
            random_init = abs(np.random.rand(self.n_parameters()))
            x0 = self.change_norm_group(random_init, "un_norm",
                                        "list") + [sigma] * 2
            cmaes_fitting = pints.OptimisationController(
                score,
                x0,
                sigma0=None,
                boundaries=CMAES_boundaries,
                method=pints.CMAES)
        elif method == "minimisation":
            score = pints.SumOfSquaresError(cmaes_problem)
            lower_bound = [self.param_bounds[x][0] for x in self.params]
            upper_bound = [self.param_bounds[x][1] for x in self.params]
            CMAES_boundaries = pints.RectangularBoundaries(
                lower_bound, upper_bound)
            random_init = abs(np.random.rand(self.n_parameters()))
            x0 = self.change_norm_group(random_init, "un_norm", "list")
            cmaes_fitting = pints.OptimisationController(
                score,
                x0,
                sigma0=None,
                boundaries=CMAES_boundaries,
                method=pints.CMAES)
        cmaes_fitting.set_max_unchanged_iterations(iterations=200,
                                                   threshold=1e-7)
        #cmaes_fitting.set_log_to_screen(False)
        cmaes_fitting.set_parallel(True)

        found_parameters, found_value = cmaes_fitting.run()

        if method == "likelihood":
            sim_params = found_parameters[:-2]
            sim_data = self.simulate(sim_params, self.frequency_range)
        else:
            found_value = -found_value
            sim_params = found_parameters
            sim_data = self.simulate(sim_params, self.frequency_range)
            """

            log_score = pints.GaussianLogLikelihood(cmaes_problem)
            stds=self.get_std(data, sim_data)
            sigma=sigma_fac*np.sum(data)/2*len(data)
            score_params=list(found_parameters)+[sigma]*2
            found_value=log_score(score_params)
            print(stds, found_value, "stds")"""

        #DOITDIMENSIONALLY#NORMALISE DEFAULT TO BOUND
        return found_parameters, found_value, cmaes_fitting._optimiser._es.sm.C, sim_data
예제 #6
0
    def setUpClass(cls):
        """ Prepare problem for tests. """
        # Load a forward model
        model = pints.toy.LogisticModel()

        # Create some toy data
        real_parameters = [0.015, 500]
        times = np.linspace(0, 1000, 1000)
        org_values = model.simulate(real_parameters, times)

        # Add noise
        noise = 10
        values = org_values + np.random.normal(0, noise, org_values.shape)
        real_parameters = np.array(real_parameters + [noise])

        # Create an object with links to the model and time series
        problem = pints.SingleOutputProblem(model, times, values)

        # Create an error measure
        cls.score = pints.SumOfSquaresError(problem)
        cls.boundaries = pints.RectangularBoundaries([0, 400], [0.05, 600])

        # Create a log-likelihood function (adds an extra parameter!)
        log_likelihood = pints.GaussianLogLikelihood(problem)

        # Create a uniform prior over both the parameters and the new noise
        cls.log_prior = pints.UniformLogPrior([0.01, 400, noise * 0.1],
                                              [0.02, 600, noise * 100])

        # Create a posterior log-likelihood (log(likelihood * prior))
        cls.log_posterior = pints.LogPosterior(log_likelihood, cls.log_prior)
    def update_model(self, fixed_parameters_list):
        """
        Update the model with fixed parameters.

        Parameters
        ----------
        fixed_parameters_list
            List of fixed parameter values.
        """

        # Create dictionary of fixed parameters and its values
        name_value_dict = {
            name: value
            for (name, value
                 ) in zip(self.model._parameter_names, fixed_parameters_list)
        }
        self.model.fix_parameters(name_value_dict)

        # Setup the problem with pints,
        # including likelihood, prior and posterior
        print(self.model.n_parameters())
        problem = pints.SingleOutputProblem(
            model=self.model,
            times=self.data['Time'].to_numpy(),
            values=self.data['Incidence Number'].to_numpy())
        log_likelihood = pints.GaussianLogLikelihood(problem)
        priors = self.set_prior(name_value_dict)
        self.log_prior = pints.ComposedLogPrior(*priors)
        self.log_posterior = pints.LogPosterior(log_likelihood, self.log_prior)

        # Run transformation
        self.transformations = pints.LogTransformation(
            self.log_posterior.n_parameters())
예제 #8
0
    def test_gaussian_noise_multi(self):
        # Multi-output test for known/unknown Gaussian noise log-likelihood
        # methods.

        model = pints.toy.FitzhughNagumoModel()
        parameters = [0.5, 0.5, 0.5]
        sigma = 0.1
        times = np.linspace(0, 100, 100)
        values = model.simulate(parameters, times)
        values += np.random.normal(0, sigma, values.shape)
        problem = pints.MultiOutputProblem(model, times, values)

        # Test if known/unknown give same result
        l1 = pints.GaussianKnownSigmaLogLikelihood(problem, sigma)
        l2 = pints.GaussianKnownSigmaLogLikelihood(problem, [sigma, sigma])
        l3 = pints.GaussianLogLikelihood(problem)
        self.assertAlmostEqual(l1(parameters), l2(parameters),
                               l3(parameters + [sigma, sigma]))

        # Test invalid constructors
        self.assertRaises(ValueError, pints.GaussianKnownSigmaLogLikelihood,
                          problem, 0)
        self.assertRaises(ValueError, pints.GaussianKnownSigmaLogLikelihood,
                          problem, -1)
        self.assertRaises(ValueError, pints.GaussianKnownSigmaLogLikelihood,
                          problem, [1])
        self.assertRaises(ValueError, pints.GaussianKnownSigmaLogLikelihood,
                          problem, [1, 2, 3, 4])
        self.assertRaises(ValueError, pints.GaussianKnownSigmaLogLikelihood,
                          problem, [1, 2, -3])
예제 #9
0
    def test_sum_of_independent_log_pdfs(self):

        # Test single output
        model = pints.toy.LogisticModel()
        x = [0.015, 500]
        sigma = 0.1
        times = np.linspace(0, 1000, 100)
        values = model.simulate(x, times) + 0.1
        problem = pints.SingleOutputProblem(model, times, values)

        l1 = pints.GaussianKnownSigmaLogLikelihood(problem, sigma)
        l2 = pints.GaussianLogLikelihood(problem)
        ll = pints.SumOfIndependentLogPDFs([l1, l1, l1])
        self.assertEqual(l1.n_parameters(), ll.n_parameters())
        self.assertEqual(3 * l1(x), ll(x))

        # Test single output derivatives
        y, dy = ll.evaluateS1(x)
        self.assertEqual(y, ll(x))
        self.assertEqual(dy.shape, (2, ))
        y1, dy1 = l1.evaluateS1(x)
        self.assertTrue(np.all(3 * dy1 == dy))

        # Wrong number of arguments
        self.assertRaises(TypeError, pints.SumOfIndependentLogPDFs)
        self.assertRaises(ValueError, pints.SumOfIndependentLogPDFs, [l1])

        # Wrong types
        self.assertRaises(ValueError, pints.SumOfIndependentLogPDFs, [l1, 1])
        self.assertRaises(ValueError, pints.SumOfIndependentLogPDFs,
                          [problem, l1])

        # Mismatching dimensions
        self.assertRaises(ValueError, pints.SumOfIndependentLogPDFs, [l1, l2])

        # Test multi-output
        model = pints.toy.FitzhughNagumoModel()
        x = model.suggested_parameters()
        nt = 10
        nx = model.n_parameters()
        times = np.linspace(0, 10, nt)
        values = model.simulate(x, times) + 0.01
        problem = pints.MultiOutputProblem(model, times, values)
        sigma = 0.01
        l1 = pints.GaussianKnownSigmaLogLikelihood(problem, sigma)
        ll = pints.SumOfIndependentLogPDFs([l1, l1, l1])
        self.assertEqual(l1.n_parameters(), ll.n_parameters())
        self.assertEqual(3 * l1(x), ll(x))

        # Test multi-output derivatives
        y, dy = ll.evaluateS1(x)

        # Note: y and ll(x) differ a bit, because the solver acts slightly
        # different when evaluating with and without sensitivities!
        self.assertAlmostEqual(y, ll(x), places=3)

        self.assertEqual(dy.shape, (nx, ))
        y1, dy1 = l1.evaluateS1(x)
        self.assertTrue(np.all(3 * dy1 == dy))
예제 #10
0
def figure1():
    """Make an interactive figure for numerical error.
    """
    def stimulus(t):
        return (1 * (t < 50)) + (-100 * (t >= 50) & (t < 75)) + (1 * (t >= 75))

    # Generate data
    y0 = np.array([0.0, 0.0])
    m = diffeqinf.DampedOscillator(stimulus, y0, 'RK45')
    m.set_tolerance(1e-8)
    true_params = [1.0, 0.2, 1.0]
    times = np.linspace(0, 100, 500)
    y = m.simulate(true_params, times)
    y += np.random.normal(0, 0.01, len(times))

    # Forward Euler method
    m = diffeqinf.DampedOscillator(stimulus, y0, diffeqinf.ForwardEuler)
    m.set_step_size(0.01)
    problem = pints.SingleOutputProblem(m, times, y)
    likelihood = pints.GaussianLogLikelihood(problem)

    step_sizes = [0.2, 0.1, 0.01]
    true_params = [1.0, 0.2, 1.0, 0.01]

    diffeqinf.plot.plot_likelihoods(problem,
                                    likelihood,
                                    true_params,
                                    step_sizes=step_sizes,
                                    param_names=['k', 'c', 'm'])

    # RK45 method
    m = diffeqinf.DampedOscillator(stimulus, y0, 'RK45')
    m.set_tolerance(1e-2)

    problem = pints.SingleOutputProblem(m, times, y)
    likelihood = pints.GaussianLogLikelihood(problem)

    tolerances = [0.01, 0.0001, 0.000001]

    diffeqinf.plot.plot_likelihoods(problem,
                                    likelihood,
                                    true_params,
                                    tolerances=tolerances,
                                    param_names=['k', 'c', 'm'])
예제 #11
0
def run_figureS2(num_runs=3, output_dir='./'):
    """Run the Gaussian process on block noise data.

    This function runs the simulations and saves the results to pickle.
    """
    random.seed(12345)
    np.random.seed(12345)

    all_fits = []
    iid_runs = []
    sigmas = []
    mult_runs = []
    gp_runs = []
    for run in range(num_runs):
        # Make a synthetic time series
        times, values, data = generate_time_series(model='logistic',
                                                   noise='blocks',
                                                   n_times=625)

        # Make Pints model and problem
        model = pints.toy.LogisticModel()
        problem = pints.SingleOutputProblem(model, times, data)

        # Initial conditions for model parameters
        model_starting_point = [0.08, 50]

        # Infer the nonstationary kernel fit
        # Run an optimization assumming IID
        log_prior = pints.UniformLogPrior([0] * 3, [1e6] * 3)
        log_likelihood = pints.GaussianLogLikelihood(problem)
        log_posterior = pints.LogPosterior(log_likelihood, log_prior)
        opt = pints.OptimisationController(log_posterior,
                                           model_starting_point + [2])
        xbest, fbest = opt.run()

        # Run the GP fit, using the best fit for initialization
        gp_times = times[::25]
        kernel = flexnoise.kernels.GPLaplacianKernel
        gnp = flexnoise.GPNoiseProcess(problem, kernel, xbest[:2], gp_times)
        gnp.set_gp_hyperparameters(mu=0.0, alpha=1.0, beta_num_points=200)
        x = gnp.run_optimize(num_restarts=100, parallel=True, maxiter=150)
        all_fits.append(x)

    # Save all results to pickle
    kernel = kernel(None, gp_times)
    results = [all_fits, times, data, values, model, problem, kernel]

    fname = os.path.join(output_dir, 'figS2_data.pkl')
    with open(fname, 'wb') as f:
        pickle.dump(results, f)
예제 #12
0
    def setUpClass(cls):
        """ Set up problem for tests. """

        # Create toy model
        cls.model = toy.LogisticModel()
        cls.real_parameters = [0.015, 500]
        cls.times = np.linspace(0, 1000, 1000)
        cls.values = cls.model.simulate(cls.real_parameters, cls.times)

        # Add noise
        cls.noise = 10
        cls.values += np.random.normal(0, cls.noise, cls.values.shape)
        cls.real_parameters.append(cls.noise)
        cls.real_parameters = np.array(cls.real_parameters)

        # Create an object with links to the model and time series
        cls.problem = pints.SingleOutputProblem(cls.model, cls.times,
                                                cls.values)

        # Create a uniform prior over both the parameters and the new noise
        # variable
        cls.log_prior = pints.UniformLogPrior([0.01, 400, cls.noise * 0.1],
                                              [0.02, 600, cls.noise * 100])

        # Create a log likelihood
        cls.log_likelihood = pints.GaussianLogLikelihood(cls.problem)

        # Create an un-normalised log-posterior (log-likelihood + log-prior)
        cls.log_posterior = pints.LogPosterior(cls.log_likelihood,
                                               cls.log_prior)

        # Run MCMC sampler
        xs = [
            cls.real_parameters * 1.1,
            cls.real_parameters * 0.9,
            cls.real_parameters * 1.15,
        ]

        mcmc = pints.MCMCController(cls.log_posterior,
                                    3,
                                    xs,
                                    method=pints.HaarioBardenetACMC)
        mcmc.set_max_iterations(200)
        mcmc.set_initial_phase_iterations(50)
        mcmc.set_log_to_screen(False)

        start = time.time()
        cls.chains = mcmc.run()
        end = time.time()
        cls.time = end - start
예제 #13
0
    def test_call_gaussian_log_likelihood_agrees_single(self):
        # Create an object with links to the model and time series
        problem = pints.SingleOutputProblem(self.model_single, self.times,
                                            self.data_single)

        # Create CombinedGaussianLL and GaussianLL
        log_likelihood = pkpd.ConstantAndMultiplicativeGaussianLogLikelihood(
            problem)
        gauss_log_likelihood = pints.GaussianLogLikelihood(problem)

        # Check that CombinedGaussianLL agrees with GaussianLoglikelihood when
        # sigma_rel = 0 and sigma_base = sigma
        test_parameters = [2.0, 0.5, 1.1, 0.0]
        gauss_test_parameters = [2.0, 0.5]
        score = log_likelihood(test_parameters)
        gauss_score = gauss_log_likelihood(gauss_test_parameters)
        self.assertAlmostEqual(score, gauss_score)
예제 #14
0
    def create_pints_log_likelihood(self):
        problem, fitted_children = self.create_pints_problem()
        if self.form == self.Form.NORMAL:
            noise_param = self.parameters.get(index=1)
            if noise_param.child.form == noise_param.child.Form.FIXED:
                value = noise_param.value
                return pints.GaussianKnownSigmaLogLikelihood(
                    problem, value), fitted_children
            else:
                return pints.GaussianLogLikelihood(
                    problem), fitted_children + [noise_param.child]
        elif self.form == LogLikelihood.Form.LOGNORMAL:
            noise_param = self.parameters.get(index=1)
            return pints.LogNormalLogLikelihood(
                problem), fitted_children + [noise_param.child]

        raise RuntimeError('unknown log_likelihood form')
예제 #15
0
    def test_bad_likelihood(self):
        # Create single output model
        model = pints.toy.ConstantModel(1)

        # Generate data
        times = np.array([1, 2, 3, 4])
        data = np.array([1, 2, 3, 4]) / 5.0

        # Create problem
        problem = pints.SingleOutputProblem(model, times, data)

        # Create "bad" likelihood
        log_likelihood = pints.GaussianLogLikelihood(problem)

        # Check that error is thown when we attempt to fix eta
        eta = 1
        self.assertRaisesRegex(
            ValueError, 'This likelihood wrapper is only defined for a ',
            pkpd.FixedEtaLogLikelihoodWrapper, log_likelihood, eta)
예제 #16
0
def plot_likelihood(model, values, times):

    # Create an object with links to the model and time series
    problem = pints.SingleOutputProblem(model, times, values)

    # Create a log-likelihood function (adds an extra parameter!)
    log_likelihood = pints.GaussianLogLikelihood(problem)

    # Create a uniform prior over both the parameters and the new noise variable
    lower_bounds = model.non_dim([1e-3, 0.0, 0.4, 0.1, 1e-6, 8.0, 1e-4])
    upper_bounds = model.non_dim([10.0, 0.4, 0.6, 100.0, 100e-6, 10.0, 0.2])
    log_prior = pints.UniformLogPrior(lower_bounds, upper_bounds)

    # Create a posterior log-likelihood (log(likelihood * prior))
    log_posterior = pints.LogPosterior(log_likelihood, log_prior)

    # Choose starting points for 3 mcmc chains
    param_names = ['k0', 'E0', 'a', 'Ru', 'Cdl', 'freq', 'sigma']
    start_parameters = model.non_dim(
        [0.0101, 0.214, 0.53, 8.0, 20.0e-6, 9.0152, 0.01])

    scaling = (upper_bounds - lower_bounds)
    minx = start_parameters - scaling / 1000.0
    maxx = start_parameters + scaling / 1000.0

    fig = plt.figure()
    for i, start in enumerate(start_parameters):
        print(param_names[i])
        plt.clf()
        xgrid = np.linspace(minx[i], maxx[i], 100)
        ygrid = np.empty_like(xgrid)
        for j, x in enumerate(xgrid):
            params = np.copy(start_parameters)
            params[i] = x
            ygrid[j] = log_likelihood(params)
        plt.plot(xgrid, ygrid)
        plt.savefig('likelihood_' + param_names[i] + '.pdf')
예제 #17
0
def inference2(model_raw, model_old, model, values, times):

    # Create an object with links to the model and time series
    problem = pints.SingleOutputProblem(model_old, times, values)

    # Create a log-likelihood function (adds an extra parameter!)
    log_likelihood = pints.GaussianLogLikelihood(problem)

    # Create a uniform prior over both the parameters and the new noise variable

    e0_buffer = 0.1 * (model_raw.params['Ereverse'] - model_raw.params['Estart'])
    lower_bounds = np.array([
        0.0,
        model_raw.params['Estart'] + e0_buffer,
        0.0,
        0.0,
        0.4,
        0.9* model_raw.params['omega'],
        1e-4,
    ])
    upper_bounds = np.array([
        100 * model_raw.params['k0'],
        model_raw.params['Ereverse'] - e0_buffer,
        10 * model_raw.params['Cdl'],
        10 * model_raw.params['Ru'],
        0.6,
        1.1* model_raw.params['omega'],
        0.2,
    ])
    log_prior = pints.UniformLogPrior(lower_bounds, upper_bounds)

    # Create a posterior log-likelihood (log(likelihood * prior))
    log_posterior = pints.LogPosterior(log_likelihood, log_prior)


    # Choose starting points for 3 mcmc chains
    param_names = ['k0', 'E0', 'Cdl', 'Ru', 'alpha', 'omega', 'sigma']
    start_parameters = np.array([
        model_raw.params['k0'],
        model_raw.params['E0'],
        model_raw.params['Cdl'],
        model_raw.params['Ru'],
        model_raw.params['alpha'],
        model_raw.params['omega'],
        0.01
    ])

    sigma0 = [0.5 * (h - l) for l, h in zip(lower_bounds, upper_bounds)]
    boundaries = pints.RectangularBoundaries(lower_bounds, upper_bounds)
    #found_parameters, found_value = pints.optimise(
    #            log_posterior,
    #            start_parameters,
    #            sigma0,
    #            boundaries,
    #            method=pints.CMAES
    #        )
    found_parameters = start_parameters
    print('start_parameters', start_parameters)
    print('found_parameters', found_parameters)
    xs = [
        found_parameters * 1.001,
        found_parameters * 0.999,
        found_parameters * 0.998,
    ]
    for x in xs:
        x[5] = found_parameters [5]

    # adjust Ru to something reasonable
    xs[0][3] = 1.001*5e-5
    xs[1][3] = 1.00*5e-5
    xs[2][3] = 0.999*5e-5

    transform = pints.ComposedElementWiseTransformation(
        pints.LogTransformation(1),
        pints.RectangularBoundariesTransformation(
            lower_bounds[1:], upper_bounds[1:]
        ),
    )

    # Create mcmc routine with four chains
    mcmc = pints.MCMCController(log_posterior, 3, xs, method=pints.HaarioBardenetACMC,
                                transform=transform)

    # Add stopping criterion
    mcmc.set_max_iterations(10000)

    # Run!
    chains = mcmc.run()

    # Save chains for plotting and analysis
    pickle.dump((xs, pints.GaussianLogLikelihood, log_prior,
                 chains, 'HaarioACMC'), open('results2.pickle', 'wb'))
예제 #18
0
    def __init__(self, name):
        super(TestPlot, self).__init__(name)

        # Create toy model (single output)
        self.model = toy.LogisticModel()
        self.real_parameters = [0.015, 500]
        self.times = np.linspace(0, 1000, 100)  # small problem
        self.values = self.model.simulate(self.real_parameters, self.times)

        # Add noise
        self.noise = 10
        self.values += np.random.normal(0, self.noise, self.values.shape)
        self.real_parameters.append(self.noise)
        self.real_parameters = np.array(self.real_parameters)

        # Create an object with links to the model and time series
        self.problem = pints.SingleOutputProblem(self.model, self.times,
                                                 self.values)

        # Create a uniform prior over both the parameters and the new noise
        # variable
        self.lower = [0.01, 400, self.noise * 0.1]
        self.upper = [0.02, 600, self.noise * 100]
        self.log_prior = pints.UniformLogPrior(self.lower, self.upper)

        # Create a log likelihood
        self.log_likelihood = pints.GaussianLogLikelihood(self.problem)

        # Create an un-normalised log-posterior (log-likelihood + log-prior)
        self.log_posterior = pints.LogPosterior(self.log_likelihood,
                                                self.log_prior)

        # Run MCMC
        self.x0 = [
            self.real_parameters * 1.1, self.real_parameters * 0.9,
            self.real_parameters * 1.05
        ]
        mcmc = pints.MCMCController(self.log_posterior, 3, self.x0)
        mcmc.set_max_iterations(300)  # make it as small as possible
        mcmc.set_log_to_screen(False)
        self.samples = mcmc.run()

        # Create toy model (multi-output)
        self.model2 = toy.LotkaVolterraModel()
        self.real_parameters2 = self.model2.suggested_parameters()
        self.times2 = self.model2.suggested_times()[::10]  # down sample it
        self.values2 = self.model2.simulate(self.real_parameters2, self.times2)

        # Add noise
        self.noise2 = 0.05
        self.values2 += np.random.normal(0, self.noise2, self.values2.shape)

        # Create an object with links to the model and time series
        self.problem2 = pints.MultiOutputProblem(self.model2, self.times2,
                                                 self.values2)

        # Create a uniform prior over both the parameters and the new noise
        # variable
        self.log_prior2 = pints.UniformLogPrior([1, 1, 1, 1], [6, 6, 6, 6])
        # Create a log likelihood
        self.log_likelihood2 = pints.GaussianKnownSigmaLogLikelihood(
            self.problem2, self.noise2)

        # Create an un-normalised log-posterior (log-likelihood + log-prior)
        self.log_posterior2 = pints.LogPosterior(self.log_likelihood2,
                                                 self.log_prior2)

        # Run MCMC
        self.x02 = [
            self.real_parameters2 * 1.1, self.real_parameters2 * 0.9,
            self.real_parameters2 * 1.05
        ]
        mcmc = pints.MCMCController(self.log_posterior2, 3, self.x02)
        mcmc.set_max_iterations(300)  # make it as small as possible
        mcmc.set_log_to_screen(False)
        self.samples2 = mcmc.run()

        # Create toy model (single-output, single-parameter)
        self.real_parameters3 = [0]
        self.log_posterior3 = toy.GaussianLogPDF(self.real_parameters3, [1])
        self.lower3 = [-3]
        self.upper3 = [3]

        # Run MCMC
        self.x03 = [[1], [-2], [3]]
        mcmc = pints.MCMCController(self.log_posterior3, 3, self.x03)
        mcmc.set_max_iterations(300)  # make it as small as possible
        mcmc.set_log_to_screen(False)
        self.samples3 = mcmc.run()
예제 #19
0
def inference(model, values, times):

    # Create an object with links to the model and time series
    problem = pints.SingleOutputProblem(model, times, values)

    # Create a log-likelihood function (adds an extra parameter!)
    log_likelihood = pints.GaussianLogLikelihood(problem)

    # Create a uniform prior over both the parameters and the new noise variable
    lower_bounds = np.array([1e-3, 0.0, 0.4, 0.1, 1e-6, 8.0, 1e-4])
    upper_bounds = np.array([10.0, 0.4, 0.6, 100.0, 100e-6, 10.0, 0.2])
    log_prior = pints.UniformLogPrior(lower_bounds, upper_bounds)

    # Create a posterior log-likelihood (log(likelihood * prior))
    log_posterior = pints.LogPosterior(log_likelihood, log_prior)

    # Choose starting points for 3 mcmc chains
    # params =                   ['k0', 'E0', 'a', 'Ru', 'Cdl', 'freq', 'sigma']
    start_parameters = np.array(
        [0.0101, 0.214, 0.53, 8.0, 20.0e-6, 9.0152, 0.01])

    transform = pints.ComposedTransformation(
        pints.LogTransformation(1),
        pints.RectangularBoundariesTransformation(lower_bounds[1:],
                                                  upper_bounds[1:]),
    )
    sigma0 = [0.1 * (h - l) for l, h in zip(lower_bounds, upper_bounds)]
    boundaries = pints.RectangularBoundaries(lower_bounds, upper_bounds)
    found_parameters, found_value = pints.optimise(log_posterior,
                                                   start_parameters,
                                                   sigma0,
                                                   boundaries,
                                                   transform=transform,
                                                   method=pints.CMAES)
    xs = [
        found_parameters * 1.001,
        found_parameters * 1.002,
        found_parameters * 1.003,
    ]
    for x in xs:
        x[5] = found_parameters[5]

    print('start_parameters', start_parameters)
    print('found_parameters', found_parameters)
    print('lower_bounds', lower_bounds)
    print('upper_bounds', upper_bounds)

    # Create mcmc routine with four chains
    mcmc = pints.MCMCController(log_posterior,
                                3,
                                xs,
                                method=pints.HaarioBardenetACMC,
                                transform=transform)

    # Add stopping criterion
    mcmc.set_max_iterations(10000)

    # Run!
    chains = mcmc.run()

    # Save chains for plotting and analysis
    pickle.dump((xs, pints.GaussianLogLikelihood, log_prior, chains,
                 'HaarioBardenetACMC'), open('results.pickle', 'wb'))
예제 #20
0
        'randstim': protocol.randstim,
        }
stim_seq = stim_list[which_data]


# Model
model = m.Model(
        './mmt-model-files/%s.mmt' % which_model,
        stim_seq=stim_seq,
        transform=transform_to_model_param,
        )
model.set_name(which_model)

# Create Pints stuffs
problem = pints.SingleOutputProblem(model, times, data)
loglikelihood = pints.GaussianLogLikelihood(problem)
logprior = pints.UniformLogPrior(
        np.append(np.log(0.1) * np.ones(model.n_parameters()),
            0.1 * noise_sigma),
        np.append(np.log(10.) * np.ones(model.n_parameters()),
            10. * noise_sigma)
        )
logposterior = pints.LogPosterior(loglikelihood, logprior)

# Check logposterior is working fine
priorparams = np.ones(model.n_parameters())
transform_priorparams = transform_from_model_param(priorparams)
priorparams = np.append(priorparams, noise_sigma)
transform_priorparams = np.append(transform_priorparams, noise_sigma)
print('Score at prior parameters: ',
        logposterior(transform_priorparams))
예제 #21
0
    def run(times, ax, bins):
        values = m_true.simulate(true_params, times)
        data = values + np.random.normal(0, 0.1, values.shape)
        problem = pints.SingleOutputProblem(m_simple, times, data)

        # Run MCMC for IID noise, wrong model
        prior = pints.UniformLogPrior([0, 0], [1e6, 1e6])
        likelihood = pints.GaussianLogLikelihood(problem)
        posterior = pints.LogPosterior(likelihood, prior)
        x0 = [[0.2, 1.0]] * 3
        mcmc = pints.MCMCController(posterior, 3, x0)
        mcmc.set_max_iterations(num_mcmc_iter)
        chains_iid = mcmc.run()
        freq_iid = chains_iid[0, :, 0][num_mcmc_iter // 2:]

        # Run MCMC for AR(1) noise, wrong model
        prior = pints.UniformLogPrior([0, 0, 0], [1e6, 1, 1e6])
        likelihood = pints.AR1LogLikelihood(problem)
        posterior = pints.LogPosterior(likelihood, prior)
        x0 = [[0.2, 0.01, 1.0]] * 3
        mcmc = pints.MCMCController(posterior, 3, x0)
        mcmc.set_max_iterations(num_mcmc_iter)
        chains_ar1 = mcmc.run()
        freq_ar1 = chains_ar1[0, :, 0][num_mcmc_iter//2:]

        # Run MCMC for IID noise, correct model
        problem = pints.SingleOutputProblem(m_true, times, data)
        prior = pints.UniformLogPrior([0, 0, 0], [1e6, 1e6, 1e6])
        likelihood = pints.GaussianLogLikelihood(problem)
        posterior = pints.LogPosterior(likelihood, prior)
        x0 = [[0.2, 0.8, 1.0]] * 3
        mcmc = pints.MCMCController(posterior, 3, x0)
        mcmc.set_max_iterations(num_mcmc_iter)
        chains_true = mcmc.run()
        freq_true = chains_true[0, :, 0][num_mcmc_iter // 2:]

        # Plot histograms of the posteriors
        ax.hist(freq_true,
                alpha=0.5,
                label='Correct',
                hatch='//',
                density=True,
                bins=bins,
                histtype='stepfilled',
                linewidth=2,
                color='grey',
                zorder=-20)

        ax.hist(freq_ar1,
                alpha=1.0,
                label='AR1',
                density=True,
                bins=bins,
                histtype='stepfilled',
                linewidth=2,
                edgecolor='k',
                facecolor='none')

        ax.hist(freq_iid,
                alpha=0.5,
                label='IID',
                density=True,
                bins=bins,
                histtype='stepfilled',
                linewidth=2,
                color=plt.rcParams['axes.prop_cycle'].by_key()['color'][0],
                zorder=-10)

        ax.axvline(0.2, ls='--', color='k')
        ax.set_xlabel(r'$\theta$')
        ax.legend()
예제 #22
0
    def test_gaussian_log_likelihoods_single_output(self):
        # Single-output test for known/unknown noise log-likelihood methods

        model = pints.toy.LogisticModel()
        parameters = [0.015, 500]
        sigma = 0.1
        times = np.linspace(0, 1000, 100)
        values = model.simulate(parameters, times)
        values += np.random.normal(0, sigma, values.shape)
        problem = pints.SingleOutputProblem(model, times, values)

        # Test if known/unknown give same result
        l1 = pints.GaussianKnownSigmaLogLikelihood(problem, sigma)
        l2 = pints.GaussianLogLikelihood(problem)
        self.assertAlmostEqual(l1(parameters), l2(parameters + [sigma]))

        # Test invalid constructors
        self.assertRaises(ValueError, pints.GaussianKnownSigmaLogLikelihood,
                          problem, 0)
        self.assertRaises(ValueError, pints.GaussianKnownSigmaLogLikelihood,
                          problem, -1)

        # known noise value checks
        model = pints.toy.ConstantModel(1)
        times = np.linspace(0, 10, 10)
        values = model.simulate([2], times)
        org_values = np.arange(10) / 5.0
        problem = pints.SingleOutputProblem(model, times, org_values)
        log_likelihood = pints.GaussianKnownSigmaLogLikelihood(problem, 1.5)
        self.assertAlmostEqual(log_likelihood([-1]), -21.999591968683927)
        l, dl = log_likelihood.evaluateS1([3])
        self.assertAlmostEqual(l, -23.777369746461702)
        self.assertAlmostEqual(dl[0], -9.3333333333333321)
        self.assertEqual(len(dl), 1)

        # unknown noise value checks
        log_likelihood = pints.GaussianLogLikelihood(problem)
        self.assertAlmostEqual(log_likelihood([-3, 1.5]), -47.777369746461702)

        # unknown noise check sensitivity
        model = pints.toy.ConstantModel(1)
        times = np.linspace(0, 10, 10)
        values = model.simulate([2], times)
        org_values = np.arange(10) / 5.0
        problem = pints.SingleOutputProblem(model, times, org_values)
        log_likelihood = pints.GaussianLogLikelihood(problem)
        l, dl = log_likelihood.evaluateS1([7, 2.0])
        self.assertAlmostEqual(l, -63.04585713764618)
        self.assertAlmostEqual(dl[0], -15.25)
        self.assertAlmostEqual(dl[1], 41.925000000000004)

        # Test deprecated aliases
        l1 = pints.KnownNoiseLogLikelihood(problem, sigma)
        self.assertIsInstance(l1, pints.GaussianKnownSigmaLogLikelihood)

        l2 = pints.UnknownNoiseLogLikelihood(problem)
        self.assertIsInstance(l2, pints.GaussianLogLikelihood)

        # test multiple output unknown noise
        model = pints.toy.ConstantModel(3)
        parameters = [0, 0, 0]
        times = [1, 2, 3, 4]
        values = model.simulate([0, 0, 0], times)
        org_values = [[10.7, 3.5, 3.8], [1.1, 3.2, -1.4], [9.3, 0.0, 4.5],
                      [1.2, -3, -10]]
        problem = pints.MultiOutputProblem(model, times, org_values)
        log_likelihood = pints.GaussianLogLikelihood(problem)
        # Test Gaussian_logpdf((10.7, 1.1, 9.3, 1.2)|mean=0, sigma=3.5) +
        #      Gaussian_logpdf((3.5, 3.2, 0.0, -3)|mean=0, sigma=1) +
        #      Gaussian_logpdf((3.8, -1.4, 4.5, -10)|mean=0, sigma=12)
        #      = -50.5088...
        self.assertAlmostEqual(log_likelihood(parameters + [3.5, 1, 12]),
                               -50.508848609684783)
        l, dl = log_likelihood.evaluateS1(parameters + [3.5, 1, 12])
        self.assertAlmostEqual(l, -50.508848609684783)
        self.assertAlmostEqual(dl[0], 1.820408163265306)
        self.assertAlmostEqual(dl[1], 3.7000000000000002)
        self.assertAlmostEqual(dl[2], -0.021527777777777774)
        self.assertAlmostEqual(dl[3], 3.6065306122448981)
        self.assertAlmostEqual(dl[4], 27.490000000000002)
        self.assertAlmostEqual(dl[5], -0.25425347222222222)

        # test multiple output model dimensions of sensitivities
        d = 20
        model = pints.toy.ConstantModel(d)
        parameters = [0 for i in range(d)]
        times = [1, 2, 3, 4]
        values = model.simulate(parameters, times)
        org_values = np.ones((len(times), d))
        extra_params = np.ones(d).tolist()
        problem = pints.MultiOutputProblem(model, times, org_values)
        log_likelihood = pints.GaussianLogLikelihood(problem)
        l = log_likelihood(parameters + extra_params)
        l1, dl = log_likelihood.evaluateS1(parameters + extra_params)
        self.assertTrue(np.array_equal(len(dl),
                                       len(parameters + extra_params)))
        self.assertEqual(l, l1)
예제 #23
0
def run_figure2(num_mcmc_samples=20000,
                num_mcmc_chains=3,
                num_runs=8,
                output_dir='./'):
    """Run the Gaussian process on multiplicative data.

    This function runs the simulations and saves the results to pickle.
    """
    random.seed(123)
    np.random.seed(123)

    all_fits = []
    iid_runs = []
    sigmas = []
    mult_runs = []
    gp_runs = []
    for run in range(num_runs):
        # Make a synthetic time series
        times, values, data = generate_time_series(model='logistic',
                                                   noise='multiplicative',
                                                   n_times=251)

        # Make Pints model and problem
        model = pints.toy.LogisticModel()
        problem = pints.SingleOutputProblem(model, times, data)

        # Initial conditions for model parameters
        model_starting_point = [0.08, 50]

        # Run MCMC for IID posterior
        likelihood = pints.GaussianLogLikelihood
        x0 = model_starting_point + [2]
        posterior_iid = run_pints(problem, likelihood, x0, num_mcmc_samples)
        iid_runs.append(posterior_iid)

        # Save standard deviations from IID runs
        sigma = np.median(posterior_iid[:, 2])
        sigmas.append(sigma)

        # Run MCMC for multiplicative noise posterior
        likelihood = pints.MultiplicativeGaussianLogLikelihood
        x0 = model_starting_point + [0.5, 0.5]
        posterior_mult = run_pints(problem, likelihood, x0, num_mcmc_samples)
        mult_runs.append(posterior_mult)

        # Infer the nonstationary kernel fit
        # Run an optimization assumming IID
        log_prior = pints.UniformLogPrior([0] * 3, [1e6] * 3)
        log_likelihood = pints.GaussianLogLikelihood(problem)
        log_posterior = pints.LogPosterior(log_likelihood, log_prior)
        opt = pints.OptimisationController(log_posterior,
                                           model_starting_point + [2])
        xbest, fbest = opt.run()

        # Run the GP fit, using the best fit for initialization
        gp_times = times[::10]
        kernel = flexnoise.kernels.GPLaplacianKernel
        gnp = flexnoise.GPNoiseProcess(problem, kernel, xbest[:2], gp_times)
        gnp.set_gp_hyperparameters(mu=0.0, alpha=1.0, beta_num_points=200)
        x = gnp.run_optimize(num_restarts=100, parallel=True, maxiter=150)
        all_fits.append(x)

        # Run MCMC for multivariate normal noise
        kernel = flexnoise.kernels.GPLaplacianKernel(None, gp_times)
        kernel.parameters = x[2:]
        cov = kernel.get_matrix(times)
        likelihood = flexnoise.CovarianceLogLikelihood
        x0 = model_starting_point
        posterior_gp = run_pints(problem,
                                 likelihood,
                                 x0,
                                 num_mcmc_samples,
                                 likelihood_args=[cov])
        gp_runs.append(posterior_gp)

    # Save all results to pickle
    results = [
        iid_runs, mult_runs, all_fits, gp_runs, times, data, values, model,
        problem, kernel, sigmas
    ]

    fname = os.path.join(output_dir, 'fig2_data.pkl')
    with open(fname, 'wb') as f:
        pickle.dump(results, f)
예제 #24
0
def plot_likelihood_old(model_raw, model_old, model, values, times):

    # Create an object with links to the model and time series
    problem = pints.SingleOutputProblem(model_old, times, values)

    # Create a log-likelihood function (adds an extra parameter!)
    log_likelihood = pints.GaussianLogLikelihood(problem)

    # Create a uniform prior over both the parameters and the new noise variable

    e0_buffer = 0.1 * (model_raw.params['Ereverse'] -
                       model_raw.params['Estart'])
    lower_bounds = np.array([
        0.0,
        model_raw.params['Estart'] + e0_buffer,
        0.0,
        0.0,
        0.4,
        0.9 * model_raw.params['omega'],
        1e-4,
    ])
    upper_bounds = np.array([
        100 * model_raw.params['k0'],
        model_raw.params['Ereverse'] - e0_buffer,
        10 * model_raw.params['Cdl'],
        10 * model_raw.params['Ru'],
        0.6,
        1.1 * model_raw.params['omega'],
        0.2,
    ])
    log_prior = pints.UniformLogPrior(lower_bounds, upper_bounds)

    # Create a posterior log-likelihood (log(likelihood * prior))
    log_posterior = pints.LogPosterior(log_likelihood, log_prior)

    # Choose starting points for 3 mcmc chains
    param_names = ['k0', 'E0', 'Cdl', 'Ru', 'alpha', 'omega', 'sigma']
    start_parameters = [
        model_raw.params['k0'], model_raw.params['E0'],
        model_raw.params['Cdl'], model_raw.params['Ru'],
        model_raw.params['alpha'], model_raw.params['omega'], 0.01
    ]
    start_parameters_new = model.non_dim(
        [0.0101, 0.214, 0.53, 8.0, 20.0e-6, 9.0152, 0.01])
    fig = plt.figure()
    sim_current = model_old.simulate(start_parameters, times)
    sim_current_new = model.simulate(start_parameters_new, times)
    plt.plot(times, values, label='data')
    plt.plot(times, sim_current, label='sim')
    plt.plot(times, -sim_current_new, label='sim_new')
    plt.legend()
    print(
        np.linalg.norm(-sim_current_new - sim_current) /
        np.linalg.norm(sim_current_new))
    plt.savefig('new_versus_old_sim.pdf')

    scaling = (upper_bounds - lower_bounds)
    minx = start_parameters - scaling / 1000.0
    maxx = start_parameters + scaling / 1000.0

    for i, start in enumerate(start_parameters):
        print(param_names[i])
        plt.clf()
        xgrid = np.linspace(minx[i], maxx[i], 100)
        ygrid = np.empty_like(xgrid)
        for j, x in enumerate(xgrid):
            params = np.copy(start_parameters)
            params[i] = x
            ygrid[j] = log_likelihood(params)
        plt.plot(xgrid, ygrid)
        plt.savefig('likelihood_old_' + param_names[i] + '.pdf')
예제 #25
0
    def setUpClass(cls):

        # Number of samples: Make this as small as possible to speed up testing
        n_samples = 300

        # Create toy model (single output)
        cls.model = toy.LogisticModel()
        cls.real_parameters = [0.015, 500]
        cls.times = np.linspace(0, 1000, 100)  # small problem
        cls.values = cls.model.simulate(cls.real_parameters, cls.times)

        # Add noise
        cls.noise = 10
        cls.values += np.random.normal(0, cls.noise, cls.values.shape)
        cls.real_parameters.append(cls.noise)
        cls.real_parameters = np.array(cls.real_parameters)

        # Create an object with links to the model and time series
        cls.problem = pints.SingleOutputProblem(cls.model, cls.times,
                                                cls.values)

        # Create a uniform prior over both the parameters and the new noise
        # variable
        cls.lower = [0.01, 400, cls.noise * 0.1]
        cls.upper = [0.02, 600, cls.noise * 100]
        cls.log_prior = pints.UniformLogPrior(cls.lower, cls.upper)

        # Create a log likelihood
        cls.log_likelihood = pints.GaussianLogLikelihood(cls.problem)

        # Create an un-normalised log-posterior (log-likelihood + log-prior)
        cls.log_posterior = pints.LogPosterior(cls.log_likelihood,
                                               cls.log_prior)

        # Run MCMC
        cls.x0 = [
            cls.real_parameters * 1.1, cls.real_parameters * 0.9,
            cls.real_parameters * 1.05
        ]
        mcmc = pints.MCMCController(cls.log_posterior, 3, cls.x0)
        mcmc.set_max_iterations(n_samples)
        mcmc.set_log_to_screen(False)
        cls.samples = mcmc.run()

        # Create toy model (multi-output)
        cls.model2 = toy.LotkaVolterraModel()
        cls.real_parameters2 = cls.model2.suggested_parameters()
        cls.times2 = cls.model2.suggested_times()[::10]  # downsample it
        cls.values2 = cls.model2.simulate(cls.real_parameters2, cls.times2)

        # Add noise
        cls.noise2 = 0.05
        cls.values2 += np.random.normal(0, cls.noise2, cls.values2.shape)

        # Create an object with links to the model and time series
        cls.problem2 = pints.MultiOutputProblem(cls.model2, cls.times2,
                                                np.log(cls.values2))

        # Create a uniform prior over both the parameters and the new noise
        # variable
        cls.log_prior2 = pints.UniformLogPrior([0, 0, 0, 0], [6, 6, 6, 6])
        # Create a log likelihood
        cls.log_likelihood2 = pints.GaussianKnownSigmaLogLikelihood(
            cls.problem2, cls.noise2)

        # Create an un-normalised log-posterior (log-likelihood + log-prior)
        cls.log_posterior2 = pints.LogPosterior(cls.log_likelihood2,
                                                cls.log_prior2)

        # Run MCMC
        cls.x02 = [
            cls.real_parameters2 * 1.1, cls.real_parameters2 * 0.9,
            cls.real_parameters2 * 1.05
        ]
        mcmc = pints.MCMCController(cls.log_posterior2, 3, cls.x02)
        mcmc.set_max_iterations(n_samples)
        mcmc.set_log_to_screen(False)
        cls.samples2 = mcmc.run()

        # Create toy model (single-output, single-parameter)
        cls.real_parameters3 = [0]
        cls.log_posterior3 = toy.GaussianLogPDF(cls.real_parameters3, [1])
        cls.lower3 = [-3]
        cls.upper3 = [3]

        # Run MCMC
        cls.x03 = [[1], [-2], [3]]
        mcmc = pints.MCMCController(cls.log_posterior3, 3, cls.x03)
        mcmc.set_max_iterations(n_samples)
        mcmc.set_log_to_screen(False)
        cls.samples3 = mcmc.run()
예제 #26
0
def figure4():
    np.random.seed(1234)

    # Generate data from third order polynomial capacitance
    t = np.linspace(1, 1.4, 1000)
    protocol = offset_sine_wave_protocol(-6, 8.9 * 2 * math.pi, -0.2)
    params = [-1.3, 0.44, 195.0, 0.1, 0.45, 0.02]

    def capacitance_func(e, c1, c2, c3):
        return 1 + c1 * e + c2 * e**2 + c3 * e**3

    cap_params = [0.015, 0.01, 0.005]
    i, _ = electron_onerxn(params + cap_params,
                           protocol,
                           t,
                           capacitance_func=capacitance_func)

    # Add IID noise
    i += np.random.normal(0, 0.1*np.mean(np.abs(i)), i.shape)

    # Try to learn using constant capacitance
    class SimpleCapModel(pints.ForwardModel):

        def n_parameters(self):
            return 6

        def simulate(self, parameters, times):
            i, _ = electron_onerxn(parameters, protocol, times)
            if len(i) != len(t) or not np.all(np.isfinite(i)):
                return -np.inf * np.ones(len(times))
            return i

    problem = pints.SingleOutputProblem(SimpleCapModel(), t, i)
    likelihood = pints.GaussianLogLikelihood(problem)

    prior = pints.UniformLogPrior(
        [-10, 0.1, 50, 0.0001, 0.0001, 0.0001, 0.0],
        [10, 7.0, 600, 1.5, 10, 10, 100.0]
    )

    posterior = pints.LogPosterior(likelihood, prior)
    x0 = [-1.3, 0.44, 195.0, 0.1, 0.45, 0.015, 1]

    opt = pints.OptimisationController(posterior, x0)
    opt.set_max_iterations(1000)
    x1, f1 = opt.run()

    # Remove learned noise parameter
    x1 = x1[:-1]

    i_fit, theta_fit = electron_onerxn(x1, protocol, t)

    # Calculate an approximate decomposition of the current into faradaic and
    # capacitive components.
    # For higher accuracy, this should be done in the solver function
    # (echem.py). These are accurate enough for plotting but not perfect.
    dt = 1e-5
    dedt = (protocol(t) - protocol(t - dt)) / dt
    dedt = dedt[:-1]
    dthetadt = np.diff(theta_fit) / (t[1] - t[0])
    didt = np.diff(i_fit) / (t[1] - t[0])
    i_f = x1[-2] * dthetadt
    i_c = x1[-1] * dedt - x1[-1] * x1[3] * didt
    t = t[:-1]
    i_fit = i_fit[:-1]
    i = i[:-1]

    # Calculate residuals to the best fit and moving average
    resids = i_fit - i
    window_len = 25
    ma = np.convolve(resids, np.ones(window_len) / window_len, mode='same')

    fig = plt.figure(figsize=(6.5, 7.25))
    ax = fig.add_subplot(3, 1, 1)
    ax.scatter(protocol(t), i, label='Data', s=1.0, color='k')
    ax.plot(protocol(t), i_fit, label='Best fit', color='k')
    ax.legend()
    ax.set_xlabel('Potential')
    ax.set_ylabel('Current')

    ax = fig.add_subplot(3, 1, 2)
    ax.plot(t, i_f, label='Faradaic', ls='--', color='k')
    ax.plot(t, i_c, label='Capacitive', color='k')
    ax.legend()
    ax.set_ylabel('Current')

    ax = fig.add_subplot(3, 1, 3)
    ax.plot(t, resids, label='Residuals', color='k', alpha=0.5)
    ax.plot(t, ma, label='Moving average', color='k')
    ax.legend()
    ax.set_ylabel('Current')
    ax.set_xlabel('Time')

    fig.set_tight_layout(True)

    plt.savefig('figure4.pdf')