def test_stochastic_acceptor(): acceptor = pyabc.StochasticAcceptor( pdf_norm_method=pyabc.pdf_norm_max_found) eps = pyabc.Temperature(initial_temperature=1) distance = pyabc.IndependentNormalKernel(var=np.array([1, 1])) def model(par): return {'s0': par['p0'] + np.array([0.3, 0.7])} x_0 = {'s0': np.array([0.4, -0.6])} # just run prior = pyabc.Distribution(p0=pyabc.RV('uniform', -1, 2)) abc = pyabc.ABCSMC(model, prior, distance, eps=eps, acceptor=acceptor, population_size=10) abc.new(pyabc.create_sqlite_db_id(), x_0) abc.run(max_nr_populations=1, minimum_epsilon=1.) # use no initial temperature and adaptive c acceptor = pyabc.StochasticAcceptor() eps = pyabc.Temperature() abc = pyabc.ABCSMC(model, prior, distance, eps=eps, acceptor=acceptor, population_size=10) abc.new(pyabc.create_sqlite_db_id(), x_0) abc.run(max_nr_populations=3, minimum_epsilon=1.)
def test_pdf_norm_methods_integration(): """Test integration of pdf normalization methods in ABCSMC.""" def model(par): return {'s0': par['p0'] + np.array([0.3, 0.7])} x_0 = {'s0': np.array([0.4, -0.6])} for pdf_norm in [ pyabc.pdf_norm_max_found, pyabc.pdf_norm_from_kernel, pyabc.ScaledPDFNorm(), ]: # just run acceptor = pyabc.StochasticAcceptor(pdf_norm_method=pdf_norm) eps = pyabc.Temperature() distance = pyabc.IndependentNormalKernel(var=np.array([1, 1])) prior = pyabc.Distribution(p0=pyabc.RV('uniform', -1, 2)) abc = pyabc.ABCSMC(model, prior, distance, eps=eps, acceptor=acceptor, population_size=20) abc.new(pyabc.create_sqlite_db_id(), x_0) abc.run(max_nr_populations=3)
def test_temperature(): acceptor_config = {'pdf_norm': 5, 'kernel_scale': pyabc.distance.SCALE_LOG} nr_pop = 3 log_file = tempfile.mkstemp(suffix='.json')[1] eps = pyabc.Temperature(initial_temperature=42, log_file=log_file) eps.initialize(0, get_weighted_distances, get_all_records, nr_pop, acceptor_config) # check if initial value is respected assert eps(0) == 42 eps.update(1, get_weighted_distances, get_all_records, 0.4, acceptor_config) assert eps(1) < 42 # last time eps.update(2, get_weighted_distances, get_all_records, 0.2, acceptor_config) assert eps(2) == 1 # check log file proposed_temps = pyabc.storage.load_dict_from_json(log_file) assert proposed_temps[0][0] == 42 assert len(proposed_temps[1]) == 2 assert len(proposed_temps[2]) == 1
def test_default_eps(): def model(par): return {'s0': par['p0'] + np.random.random(), 's1': np.random.random()} x_0 = {'s0': 0.4, 's1': 0.6} prior = pyabc.Distribution(p0=pyabc.RV('uniform', -1, 2)) # usual setting abc = pyabc.ABCSMC(model, prior, population_size=10) abc.new(pyabc.create_sqlite_db_id(), x_0) abc.run(max_nr_populations=3) assert abc.minimum_epsilon == 0.0 # noisy setting acceptor = pyabc.StochasticAcceptor() eps = pyabc.Temperature() distance = pyabc.IndependentNormalKernel(var=np.array([1, 1])) abc = pyabc.ABCSMC(model, prior, distance, eps=eps, acceptor=acceptor, population_size=10) abc.new(pyabc.create_sqlite_db_id(), x_0) abc.run(max_nr_populations=3) assert abc.minimum_epsilon == 1.0
def test_temperature(): weighted_distances = pd.DataFrame({ 'distance': [1, 2, 3, 4], 'w': [2, 1, 1, 0] }) all_records = [ dict(distance=np.random.randn(), transition_pd_prev=np.random.randn(), transition_pd=np.random.randn(), accepted=True if np.random.random() > 0.5 else False) for _ in range(20) ] acceptor_config = {'pdf_norm': 5, 'kernel_scale': pyabc.distance.SCALE_LOG} nr_pop = 3 eps = pyabc.Temperature(initial_temperature=42) eps.initialize(0, lambda: weighted_distances, lambda: all_records, nr_pop, acceptor_config) # check if initial value is respected assert eps(0) == 42 eps.update(1, lambda: weighted_distances, lambda: all_records, 0.4, acceptor_config) assert eps(1) < 42 # last time eps.update(2, lambda: weighted_distances, lambda: all_records, 0.2, acceptor_config) assert eps(2) == 1
def test_stochastic_acceptor(): """Test the stochastic acceptor's features.""" # store pnorms pnorm_file = tempfile.mkstemp(suffix=".json")[1] acceptor = pyabc.StochasticAcceptor( pdf_norm_method=pyabc.pdf_norm_max_found, log_file=pnorm_file) eps = pyabc.Temperature(initial_temperature=1) distance = pyabc.IndependentNormalKernel(var=np.array([1, 1])) def model(par): return {'s0': par['p0'] + np.array([0.3, 0.7])} x_0 = {'s0': np.array([0.4, -0.6])} # just run prior = pyabc.Distribution(p0=pyabc.RV('uniform', -1, 2)) abc = pyabc.ABCSMC(model, prior, distance, eps=eps, acceptor=acceptor, population_size=10) abc.new(pyabc.create_sqlite_db_id(), x_0) h = abc.run(max_nr_populations=1, minimum_epsilon=1.) # check pnorms pnorms = pyabc.storage.load_dict_from_json(pnorm_file) assert len(pnorms) == h.max_t + 2 # +1 t0, +1 one final update assert isinstance(list(pnorms.keys())[0], int) assert isinstance(pnorms[0], float) # use no initial temperature and adaptive c acceptor = pyabc.StochasticAcceptor() eps = pyabc.Temperature() abc = pyabc.ABCSMC(model, prior, distance, eps=eps, acceptor=acceptor, population_size=20) abc.new(pyabc.create_sqlite_db_id(), x_0) abc.run(max_nr_populations=3)
def from_vars(analysis_vars: AnalysisVars, model_vars: ModelVars, i_data: int = 0, i_rep: int = 0): acceptor = analysis_vars.get_acceptor() transition = analysis_vars.get_transition() eps_min = analysis_vars.eps_min eps = analysis_vars.get_eps() if isinstance(acceptor, pyabc.StochasticAcceptor): if not isinstance(eps, pyabc.Temperature): eps = pyabc.Temperature() model = model_vars.get_model() distance = model_vars.get_kernel() eps_min = 1.0 else: model = model_vars.get_model_noisy() distance = model_vars.get_distance() prior = model_vars.get_prior() sampler = create_sampler() n_acc = analysis_vars.n_acc if model_vars.n_acc is None \ else model_vars.n_acc n_pop = analysis_vars.n_pop if model_vars.n_pop is None \ else model_vars.n_pop min_acc_rate = analysis_vars.min_acc_rate p_true = model_vars.p_true y_obs = get_data(model_vars, i_data) analysis_id = analysis_vars.id model_id = model_vars.get_id() return Task(acceptor=acceptor, transition=transition, eps=eps, distance=distance, model=model, prior=prior, sampler=sampler, n_acc=n_acc, n_pop=n_pop, eps_min=eps_min, min_acc_rate=min_acc_rate, p_true=p_true, y_obs=y_obs, analysis_id=analysis_id, model_id=model_id, i_data=i_data, i_rep=i_rep)
def __call__( self, prev_pdf_norm, get_weighted_distances, prev_temp, acceptance_rate, **kwargs): pdf_norm = pyabc.pdf_norm_max_found(prev_pdf_norm=prev_pdf_norm, get_weighted_distances=get_weighted_distances) print(" best: ", pdf_norm) if prev_temp is None or (acceptance_rate >= 0.1 and not self.hit): return pdf_norm self.hit = True temp = 0.6 * prev_temp offset = temp * np.log(10) used_norm = pdf_norm - offset used_norm = max(prev_pdf_norm, used_norm) print(" offsetted: ", pdf_norm - offset) return used_norm prior = pyabc.Distribution(**{key: pyabc.RV("uniform", a, b-a) for key, (a,b) in limits.items()}) acceptor = pyabc.StochasticAcceptor(log_file="acc_log_v5_higherc.json", pdf_norm_method=PDFNorm()) temperature = pyabc.Temperature(schemes=[pyabc.AcceptanceRateScheme(), pyabc.ExpDecayFixedRatioScheme(alpha=0.6)]) kernel = pyabc.IndependentNormalKernel(keys=keys, var=noise_vector**2) sampler = pyabc.sampler.RedisEvalParallelSampler(host="icb-mona", port=8776) #sampler = pyabc.sampler.MulticoreEvalParallelSampler(daemon=False) abc = pyabc.ABCSMC(model, prior, kernel, sampler=sampler, acceptor=acceptor, eps=temperature, population_size=500) db_path="sqlite:///tumor2d_stoch_acc_v5_higherc.db" abc.new(db_path, noisy_data) abc.run()
muA=pyabc.RV("uniform", lim.lb, lim.interval_length)) # %% Define ABC-SMC model distanceP2_adaptive = pyabc.AdaptivePNormDistance( p=2, scale_function=pyabc.distance.root_mean_square_deviation) distanceP2 = pyabc.PNormDistance(p=2) kernel1 = pyabc.IndependentNormalKernel(var=1.0**2) # Measure distance and set it as minimum epsilon min_eps = distanceP2(obs_data_noisy, obs_data_raw) acceptor1 = pyabc.StochasticAcceptor() eps0 = pyabc.MedianEpsilon(50) eps1 = pyabc.Temperature() sampler0 = pyabc.sampler.MulticoreEvalParallelSampler(n_procs=8) def non_noisy_model(para): return solver.ode_model(para, add_noise=False) abc = pyabc.ABCSMC( models=non_noisy_model, parameter_priors=paraPrior, # acceptor=acceptor1, population_size=100, sampler=sampler0, distance_function=distanceP2,
def fit_cmd(self, observed_cmd, pop_size=1000, max_n_pop=np.Inf, savename='starwave', min_acceptance_rate=0.0001, gamma=0.5, cores=1, accept='uniform', alpha=0.5, population_strategy='constant', statistic='output'): if cores == 1: pyabc_sampler = pyabc.sampler.SingleCoreSampler() elif cores > 1: pyabc_sampler = pyabc.sampler.MulticoreEvalParallelSampler( n_procs=cores) else: print('invalid number of cores. defaulting to 1 core.') pyabc_sampler = pyabc.sampler.SingleCoreSampler() if population_strategy == 'constant': population_strategy = pyabc.populationstrategy.ConstantPopulationSize( pop_size) elif population_strategy == 'adapt': population_strategy = pyabc.populationstrategy.AdaptivePopulationSize( pop_size) scaled_observed_cmd = self.init_scaler(observed_cmd, gamma=gamma) obs = dict(output=self.kernel_representation(scaled_observed_cmd, self.mapping)) dummy_cmd = np.zeros(observed_cmd.shape) def simcmd(imf_type): return lambda params: self.cmd_sim(params, imf_type=imf_type) simulator = [] prior = [] for idx, imf in enumerate(self.imf_type): simulator.append(simcmd(imf)) prior.append(self.params[idx].to_pyabc()) if accept == 'uniform': acceptor = pyabc.acceptor.UniformAcceptor() eps = pyabc.epsilon.QuantileEpsilon(alpha=alpha) def distance(cmd1, cmd2): return np.sqrt(np.sum((cmd1[statistic] - cmd2[statistic])**2)) elif accept == 'stochastic': acceptor = pyabc.StochasticAcceptor() eps = pyabc.Temperature() base_params = make_params(self.imf_type[0]).get_values() sim_rep = np.asarray([ self.cmd_sim(base_params, imf_type=self.imf_type[0])['output'] for ii in range(25) ]) var = np.var(sim_rep, 0) distance = pyabc.IndependentNormalKernel(var=var, keys=['input']) abc = pyabc.ABCSMC(simulator, prior, distance, sampler=pyabc_sampler, population_size=pop_size, eps=eps, acceptor=acceptor) db_path = ("sqlite:///" + savename + ".db") abc.new(db_path, obs) self.history = abc.run(min_acceptance_rate=min_acceptance_rate, max_nr_populations=max_n_pop) return self.history
# import everything to pyABC importer = pyabc.petab.AmiciPetabImporter(petab_problem, model, solver) # extract what we need from the importer prior = importer.create_prior() model = importer.create_model() kernel = importer.create_kernel() print(model(importer.get_nominal_parameters())) print(prior) sampler = pyabc.MulticoreEvalParallelSampler() temperature = pyabc.Temperature() acceptor = pyabc.StochasticAcceptor() abc = pyabc.ABCSMC(model, prior, kernel, eps=temperature, acceptor=acceptor, sampler=sampler, population_size=100) # AMICI knows the data, thus we don't pass them here abc.new(pyabc.create_sqlite_db_id(), {}) h = abc.run() pyabc.visualization.plot_kde_matrix_highlevel( h, limits=importer.get_bounds(), refval=importer.get_nominal_parameters(), refval_color='grey', names=importer.get_parameter_names(),
key: pyabc.RV("uniform", lb, ub - lb) for key, (lb, ub) in limits.items() }) redis_sampler = RedisEvalParallelSampler(host=args.ip, port=args.port, look_ahead=False, wait_for_all_samples=True) acceptor = pyabc.StochasticAcceptor(pdf_norm_method=ScaledPDFNorm()) kernel = pyabc.IndependentNormalKernel( var=[0.061763933333333] * 60 + [0.050105066666667] * 60, keys=[ "IdSumstat__YAP_nuclear_observable", "IdSumstat__YAP_total_observable" ]) eps = pyabc.Temperature() abc = pyabc.ABCSMC(model, prior, kernel, population_size=1000, acceptor=acceptor, eps=eps, all_accepted=False, sampler=redis_sampler) db_path = "sqlite:///" + "/home/emad/Documents/test_liver_model_delete/" + "test_14param_Felipe.db" history = abc.new(db_path, dict_data) abc.run(max_nr_populations=40) pyabc.visualization.plot_epsilons(history)