def main(): #constants timeRangesToUse = [[1, 2499], [2549, 2999], [3049, 4999], [5049, 14999], [15049, 19999], [20049, 29999], [30049, 64999], [65049, 69999], [70049, -1]] true_parameters = [ 2.26E-04, 0.0699, 3.45E-05, 0.05462, 0.0873, 8.92E-03, 5.150E-3, 0.03158, 0.1524 ] model = ChannelModelPintsWrapper() data = pd.read_csv("data/averaged-data.txt", delim_whitespace=True) dat = extract_time_ranges(data.values, timeRangesToUse) times = dat[:, 0] values = dat[:, 1] current = model.simulate(true_parameters, times) plt.plot(times, values) plt.plot(times, current) plt.show() problem = pints.SingleOutputProblem(model, times, values) error = pints.SumOfSquaresError(problem) boundaries = MarkovModelBoundaries() x0 = np.array([0.1] * 9) found_parameters, found_value = pints.optimise(error, true_parameters, boundaries=boundaries) print(found_parameters, found_value)
def test_optimise(self): """ Tests :meth: `pints.optimise()`. """ r = pints.toy.TwistedGaussianLogPDF(2, 0.01) x = np.array([0, 1.01]) s = 0.01 b = pints.RectangularBoundaries([-0.01, 0.95], [0.01, 1.05]) with StreamCapture(): x, f = pints.optimise(r, x, s, b, method=pints.XNES) self.assertEqual(x.shape, (2, )) self.assertTrue(f < 1e-6)
def main(): #constants timeRangesToUse = [[1,2499], [2549,2999], [3049,4999], [5049,14999], [15049,19999], [20049,29999], [30049,64999], [65049,69999], [70049,-1]] starting_parameters = [3.87068845e-04, 5.88028759e-02, 6.46971727e-05, 4.87408447e-02, 8.03073893e-02, 7.36295506e-03, 5.32908518e-03, 3.32254316e-02, 6.56614672e-02] model = ChannelModelPintsWrapper() data = pd.read_csv("data/averaged-data.txt", delim_whitespace=True) dat = extract_time_ranges(data.values, timeRangesToUse) times=dat[:,0] values=dat[:,1] current = model.simulate(starting_parameters, times) problem = pints.SingleOutputProblem(model, times, values) error = pints.SumOfSquaresError(problem) boundaries = MarkovModelBoundaries() x0 = np.array([0.1]*9) found_parameters, found_value = pints.optimise(error, starting_parameters, boundaries=boundaries) print(found_parameters, found_value)
def optimise(self): """ Parameter inference using SNES (Seperable Natural Evolution Strategy). Returns: --------------- found_parameters: - found optimal parameters """ #Define a score function, i.e the sum of squares error score = pints.SumOfSquaresError(self.problem) #Define the boundaries for F and k according to literature boundaries = pints.RectangularBoundaries([0.01, 0.01], [1.0, 1.0]) #Starting point within the boundaries x0 = [0.05, 0.05] #Run SNES found_parameters, found_value = pints.optimise(score, x0, boundaries=boundaries, method=pints.SNES) return found_parameters
def inference(model, values, times): # Create an object with links to the model and time series problem = pints.SingleOutputProblem(model, times, values) # Create a log-likelihood function (adds an extra parameter!) log_likelihood = pints.GaussianLogLikelihood(problem) # Create a uniform prior over both the parameters and the new noise variable lower_bounds = np.array([1e-3, 0.0, 0.4, 0.1, 1e-6, 8.0, 1e-4]) upper_bounds = np.array([10.0, 0.4, 0.6, 100.0, 100e-6, 10.0, 0.2]) log_prior = pints.UniformLogPrior(lower_bounds, upper_bounds) # Create a posterior log-likelihood (log(likelihood * prior)) log_posterior = pints.LogPosterior(log_likelihood, log_prior) # Choose starting points for 3 mcmc chains # params = ['k0', 'E0', 'a', 'Ru', 'Cdl', 'freq', 'sigma'] start_parameters = np.array( [0.0101, 0.214, 0.53, 8.0, 20.0e-6, 9.0152, 0.01]) transform = pints.ComposedTransformation( pints.LogTransformation(1), pints.RectangularBoundariesTransformation(lower_bounds[1:], upper_bounds[1:]), ) sigma0 = [0.1 * (h - l) for l, h in zip(lower_bounds, upper_bounds)] boundaries = pints.RectangularBoundaries(lower_bounds, upper_bounds) found_parameters, found_value = pints.optimise(log_posterior, start_parameters, sigma0, boundaries, transform=transform, method=pints.CMAES) xs = [ found_parameters * 1.001, found_parameters * 1.002, found_parameters * 1.003, ] for x in xs: x[5] = found_parameters[5] print('start_parameters', start_parameters) print('found_parameters', found_parameters) print('lower_bounds', lower_bounds) print('upper_bounds', upper_bounds) # Create mcmc routine with four chains mcmc = pints.MCMCController(log_posterior, 3, xs, method=pints.HaarioBardenetACMC, transform=transform) # Add stopping criterion mcmc.set_max_iterations(10000) # Run! chains = mcmc.run() # Save chains for plotting and analysis pickle.dump((xs, pints.GaussianLogLikelihood, log_prior, chains, 'HaarioBardenetACMC'), open('results.pickle', 'wb'))
def n_parameters(self): return self.n lpdf = LogisticAPI( 'https://mighty-badlands-12664.herokuapp.com/pints-team/benchmarks/1.0.0/') real_parameters = [0.015, 500, 10] ## Select some boundaries boundaries = pints.RectangularBoundaries([0, 400, 0], [0.03, 600, 20]) ## Perform an optimization with boundaries and hints x0 = 0.01, 450, 5 sigma0 = [0.01, 100, 10] found_parameters, found_value = pints.optimise(lpdf, x0, sigma0, boundaries, method=pints.CMAES) ## Show score of true solution print('log likelihood at true solution: ') print(lpdf(real_parameters)) # ## Compare parameters with original print('Found solution: True parameters:') for k, x in enumerate(found_parameters): print(pints.strfloat(x) + ' ' + pints.strfloat(real_parameters[k]))
s = myokit.Simulation(model, prot) # Format the values list to use it in PINTS fit_values = values[0] + values[1] # Calling PINTS library # Parameters : [CL, Vc, Qp1, Vp1, Qp2, Vp2] initial_point = [5.4, 8.5, 18.6, 8.3, 1.7, 32.8] problem = pints.SingleOutputProblem(model=MyModel(), times=np.linspace(0, 24, 10), values=fit_values) boundaries = pints.RectangularBoundaries([3, 5, 7, 5, 0.5, 30], [15, 15, 30, 20, 5, 60]) error_measure = pints.SumOfSquaresError(problem) found_parameters, found_value = pints.optimise(error_measure, initial_point, boundaries=boundaries, method=pints.XNES) #%% Running the simulation with found parameters # Reset the variables to initial state for the plot s.reset() # Use parameters returned from optimisation or user-defined parameters set parameters = [6.7, 10.1, 17.8, 9.4, 1.4, 18.3] plot_parameters = found_parameters s.set_constant('constants.CL', plot_parameters[0]) s.set_constant('plasma.Vc', plot_parameters[1]) s.set_constant('constants.kp1', plot_parameters[2]) s.set_constant('compartment_1.V1', plot_parameters[3]) s.set_constant('constants.kp2', plot_parameters[4]) s.set_constant('compartment_2.V2', plot_parameters[5])
def main(args, output_dir="", ms_to_remove_after_spike=50): output_dir = os.path.join(args.output, output_dir) if not os.path.exists(output_dir): os.mkdir(output_dir) # Constants if ms_to_remove_after_spike == 0: indices_to_remove = None else: spikes = [2500, 3000, 5000, 15000, 20000, 30000, 65000, 70000] indices_to_remove = [[spike, spike + ms_to_remove_after_spike*10] for spike in spikes] indices_to_use = remove_indices(list(range(80000)), indices_to_remove) # indices_to_use = [[1,2499], [2549,2999], [3049,4999], [5049,14999], [15049,19999], [20049,29999], [30049,64999], [65049,69999], [70049,-1]] starting_parameters = [3.87068845e-04, 5.88028759e-02, 6.46971727e-05, 4.87408447e-02, 8.03073893e-02, 7.36295506e-03, 5.32908518e-03, 3.32254316e-02, 6.56614672e-02] plt.rcParams['axes.axisbelow'] = True data = pd.read_csv(args.data_file_path, delim_whitespace=True) print("outputting to {}".format(args.output)) if not os.path.exists(args.data_file_path): print("Input file not provided. Doing nothing.") return par = Params() skip = int(par.timestep/0.1) dat = data.values[indices_to_use] times=dat[:,0] values=dat[:,1] model = PintsWrapper(par, args, times) current = model.simulate(starting_parameters, times) if args.plot: plt.plot(times, values) plt.plot(model.times_to_use, current) plt.show() problem = pints.SingleOutputProblem(model, times, values) error = pints.SumOfSquaresError(problem) boundaries = Boundaries() x0 = starting_parameters found_parameters, found_value = pints.optimise(error, starting_parameters, boundaries=boundaries) # found_parameters = np.array([2.26E-04, 0.0699, 3.45E-05, 0.05462, 0.0873, 8.92E-03, 5.150E-3, 0.03158, 0.1524]) # found_value = 100 print("finished! found parameters : {} ".format(found_parameters, found_value)) # Find error sensitivities funcs = model.funcs current, sens = funcs.SimulateForwardModelSensitivities(found_parameters) sens = (sens * found_parameters[None,:]).T for i, vec in enumerate(sens): plt.plot(times, vec, label="state_variable".format(i)) plt.title("Output sensitivities for four gate Markov Model") if args.plot: plt.show() else: plt.savefig(os.path.join(output_dir, "output_sensitivities")) plt.clf() # Estimate the various of the i.i.d Gaussian noise nobs = len(times) sigma2 = sum((current - values)**2)/(nobs-1) # Compute the Fischer information matrix FIM = sens @ sens.T/sigma2 cov = FIM**-1 eigvals = np.linalg.eigvals(FIM) for i in range(0, par.n_params): for j in range(i+1, par.n_params): parameters_to_view = np.array([i,j]) sub_cov = cov[parameters_to_view[:,None], parameters_to_view] eigen_val, eigen_vec = np.linalg.eigh(sub_cov) eigen_val=eigen_val.real if eigen_val[0] > 0 and eigen_val[1] > 0: print("COV_{},{} : well defined".format(i, j)) cov_ellipse(sub_cov, q=[0.75, 0.9, 0.99]) plt.ylabel("parameter {}".format(i)) plt.xlabel("parameter {}".format(j)) if args.plot: plt.show() else: plt.savefig(os.path.join(output_dir, "covariance_for_parameters_{}_{}".format(i,j))) plt.clf() else: print("COV_{},{} : negative eigenvalue".format(i,j)) print('Eigenvalues of FIM:\n{}'.format(eigvals)) print("Covariance matrix is: \n{}".format(cov)) plt.plot(data["time"], data["current"], label="averaged data") plt.plot(times, current, label="current") plt.legend() if args.plot: plt.show() else: plt.savefig(os.path.join(output_dir, "fit")) plt.clf() return times, found_parameters