def test_stochastic_acceptor(): acceptor = pyabc.StochasticAcceptor( pdf_norm_method=pyabc.pdf_norm_max_found) eps = pyabc.Temperature(initial_temperature=1) distance = pyabc.IndependentNormalKernel(var=np.array([1, 1])) def model(par): return {'s0': par['p0'] + np.array([0.3, 0.7])} x_0 = {'s0': np.array([0.4, -0.6])} # just run prior = pyabc.Distribution(p0=pyabc.RV('uniform', -1, 2)) abc = pyabc.ABCSMC(model, prior, distance, eps=eps, acceptor=acceptor, population_size=10) abc.new(pyabc.create_sqlite_db_id(), x_0) abc.run(max_nr_populations=1, minimum_epsilon=1.) # use no initial temperature and adaptive c acceptor = pyabc.StochasticAcceptor() eps = pyabc.Temperature() abc = pyabc.ABCSMC(model, prior, distance, eps=eps, acceptor=acceptor, population_size=10) abc.new(pyabc.create_sqlite_db_id(), x_0) abc.run(max_nr_populations=3, minimum_epsilon=1.)
def test_pdf_norm_methods_integration(): """Test integration of pdf normalization methods in ABCSMC.""" def model(par): return {'s0': par['p0'] + np.array([0.3, 0.7])} x_0 = {'s0': np.array([0.4, -0.6])} for pdf_norm in [ pyabc.pdf_norm_max_found, pyabc.pdf_norm_from_kernel, pyabc.ScaledPDFNorm(), ]: # just run acceptor = pyabc.StochasticAcceptor(pdf_norm_method=pdf_norm) eps = pyabc.Temperature() distance = pyabc.IndependentNormalKernel(var=np.array([1, 1])) prior = pyabc.Distribution(p0=pyabc.RV('uniform', -1, 2)) abc = pyabc.ABCSMC(model, prior, distance, eps=eps, acceptor=acceptor, population_size=20) abc.new(pyabc.create_sqlite_db_id(), x_0) abc.run(max_nr_populations=3)
def test_default_eps(): def model(par): return {'s0': par['p0'] + np.random.random(), 's1': np.random.random()} x_0 = {'s0': 0.4, 's1': 0.6} prior = pyabc.Distribution(p0=pyabc.RV('uniform', -1, 2)) # usual setting abc = pyabc.ABCSMC(model, prior, population_size=10) abc.new(pyabc.create_sqlite_db_id(), x_0) abc.run(max_nr_populations=3) assert abc.minimum_epsilon == 0.0 # noisy setting acceptor = pyabc.StochasticAcceptor() eps = pyabc.Temperature() distance = pyabc.IndependentNormalKernel(var=np.array([1, 1])) abc = pyabc.ABCSMC(model, prior, distance, eps=eps, acceptor=acceptor, population_size=10) abc.new(pyabc.create_sqlite_db_id(), x_0) abc.run(max_nr_populations=3) assert abc.minimum_epsilon == 1.0
def test_stochastic_acceptor(): """Test the stochastic acceptor's features.""" # store pnorms pnorm_file = tempfile.mkstemp(suffix=".json")[1] acceptor = pyabc.StochasticAcceptor( pdf_norm_method=pyabc.pdf_norm_max_found, log_file=pnorm_file) eps = pyabc.Temperature(initial_temperature=1) distance = pyabc.IndependentNormalKernel(var=np.array([1, 1])) def model(par): return {'s0': par['p0'] + np.array([0.3, 0.7])} x_0 = {'s0': np.array([0.4, -0.6])} # just run prior = pyabc.Distribution(p0=pyabc.RV('uniform', -1, 2)) abc = pyabc.ABCSMC(model, prior, distance, eps=eps, acceptor=acceptor, population_size=10) abc.new(pyabc.create_sqlite_db_id(), x_0) h = abc.run(max_nr_populations=1, minimum_epsilon=1.) # check pnorms pnorms = pyabc.storage.load_dict_from_json(pnorm_file) assert len(pnorms) == h.max_t + 2 # +1 t0, +1 one final update assert isinstance(list(pnorms.keys())[0], int) assert isinstance(pnorms[0], float) # use no initial temperature and adaptive c acceptor = pyabc.StochasticAcceptor() eps = pyabc.Temperature() abc = pyabc.ABCSMC(model, prior, distance, eps=eps, acceptor=acceptor, population_size=20) abc.new(pyabc.create_sqlite_db_id(), x_0) abc.run(max_nr_populations=3)
def __call__( self, prev_pdf_norm, get_weighted_distances, prev_temp, acceptance_rate, **kwargs): pdf_norm = pyabc.pdf_norm_max_found(prev_pdf_norm=prev_pdf_norm, get_weighted_distances=get_weighted_distances) print(" best: ", pdf_norm) if prev_temp is None or (acceptance_rate >= 0.1 and not self.hit): return pdf_norm self.hit = True temp = 0.6 * prev_temp offset = temp * np.log(10) used_norm = pdf_norm - offset used_norm = max(prev_pdf_norm, used_norm) print(" offsetted: ", pdf_norm - offset) return used_norm prior = pyabc.Distribution(**{key: pyabc.RV("uniform", a, b-a) for key, (a,b) in limits.items()}) acceptor = pyabc.StochasticAcceptor(log_file="acc_log_v5_higherc.json", pdf_norm_method=PDFNorm()) temperature = pyabc.Temperature(schemes=[pyabc.AcceptanceRateScheme(), pyabc.ExpDecayFixedRatioScheme(alpha=0.6)]) kernel = pyabc.IndependentNormalKernel(keys=keys, var=noise_vector**2) sampler = pyabc.sampler.RedisEvalParallelSampler(host="icb-mona", port=8776) #sampler = pyabc.sampler.MulticoreEvalParallelSampler(daemon=False) abc = pyabc.ABCSMC(model, prior, kernel, sampler=sampler, acceptor=acceptor, eps=temperature, population_size=500) db_path="sqlite:///tumor2d_stoch_acc_v5_higherc.db" abc.new(db_path, noisy_data) abc.run()
# distanceP2 = pyabc.PNormDistance(p=2) sigma_n = 5.66 sigma_m = 4.59 sigma_b = 5.15 sigma_a = 2.42 mu = 0. a = 0.05 var_list = [(a * sigma_n) ** 2]*30 + [(a * sigma_m) ** 2]*30 + [(a * sigma_b) ** 2]*30 + [(a * sigma_a) ** 2]*30 distance_s = pyabc.IndependentNormalKernel(var=var_list) # Measure distance and set it as minimum epsilon # min_eps = distanceP2(obs_data_noisy, obs_data_raw) acceptor_s = pyabc.StochasticAcceptor() # acceptor_adpt = pyabc.UniformAcceptor(use_complete_history=True) # eps0 = pyabc.MedianEpsilon(50) eps_s = pyabc.epsilon.Temperature() # eps_fixed = pyabc.epsilon.ListEpsilon([50, 46, 43, 40, 37, 34, 31, 29, 27, 25, # 23, 21, 19, 17, 15, 14, 13, 12, 11, 10]) # transition0 = pyabc.transition.LocalTransition(k=50, k_fraction=None) # transition1 = pyabc.transition.GridSearchCV() sampler0 = pyabc.sampler.MulticoreEvalParallelSampler(n_procs=8) abc = pyabc.ABCSMC(models=solver.non_noisy_model, parameter_priors=paraPrior, acceptor=acceptor_s,
iBM=pyabc.RV("uniform", lim3.lb, lim3.interval_length), muB=pyabc.RV("uniform", lim3.lb, lim3.interval_length), sAM=pyabc.RV("uniform", lim.lb, lim.interval_length), muA=pyabc.RV("uniform", lim.lb, lim.interval_length)) # %% Define ABC-SMC model distanceP2_adaptive = pyabc.AdaptivePNormDistance( p=2, scale_function=pyabc.distance.root_mean_square_deviation) distanceP2 = pyabc.PNormDistance(p=2) kernel1 = pyabc.IndependentNormalKernel(var=1.0**2) # Measure distance and set it as minimum epsilon min_eps = distanceP2(obs_data_noisy, obs_data_raw) acceptor1 = pyabc.StochasticAcceptor() eps0 = pyabc.MedianEpsilon(50) eps1 = pyabc.Temperature() sampler0 = pyabc.sampler.MulticoreEvalParallelSampler(n_procs=8) def non_noisy_model(para): return solver.ode_model(para, add_noise=False) abc = pyabc.ABCSMC( models=non_noisy_model, parameter_priors=paraPrior, # acceptor=acceptor1,
import pyabc import sys from study_abc_noise.model import NonIdAblePrioredModelVars from study_abc_noise.util import create_sampler, get_timestamp from study_abc_noise.vars import AnalysisVars, Task mv = NonIdAblePrioredModelVars() # create analysis settings list_analysis_vars = [] for acceptor, id_ in [ (pyabc.UniformAcceptor(), "deterministic"), (pyabc.UniformAcceptor(), "noisy_model"), (pyabc.StochasticAcceptor(temp_schemes=[ pyabc.acceptor.scheme_acceptance_rate, pyabc.acceptor.scheme_decay ]), "stochastic_acceptor") ]: list_analysis_vars.append( AnalysisVars(get_acceptor=lambda acceptor=acceptor: acceptor, id_=id_)) # create tasks tasks = [] for analysis_vars in list_analysis_vars: tasks.append(Task.from_vars(analysis_vars, mv, 0)) # overwrite deterministic setting tasks[0].model = mv.get_model() tasks[0].eps_min = 0.0 # run for task in tasks:
def fit_cmd(self, observed_cmd, pop_size=1000, max_n_pop=np.Inf, savename='starwave', min_acceptance_rate=0.0001, gamma=0.5, cores=1, accept='uniform', alpha=0.5, population_strategy='constant', statistic='output'): if cores == 1: pyabc_sampler = pyabc.sampler.SingleCoreSampler() elif cores > 1: pyabc_sampler = pyabc.sampler.MulticoreEvalParallelSampler( n_procs=cores) else: print('invalid number of cores. defaulting to 1 core.') pyabc_sampler = pyabc.sampler.SingleCoreSampler() if population_strategy == 'constant': population_strategy = pyabc.populationstrategy.ConstantPopulationSize( pop_size) elif population_strategy == 'adapt': population_strategy = pyabc.populationstrategy.AdaptivePopulationSize( pop_size) scaled_observed_cmd = self.init_scaler(observed_cmd, gamma=gamma) obs = dict(output=self.kernel_representation(scaled_observed_cmd, self.mapping)) dummy_cmd = np.zeros(observed_cmd.shape) def simcmd(imf_type): return lambda params: self.cmd_sim(params, imf_type=imf_type) simulator = [] prior = [] for idx, imf in enumerate(self.imf_type): simulator.append(simcmd(imf)) prior.append(self.params[idx].to_pyabc()) if accept == 'uniform': acceptor = pyabc.acceptor.UniformAcceptor() eps = pyabc.epsilon.QuantileEpsilon(alpha=alpha) def distance(cmd1, cmd2): return np.sqrt(np.sum((cmd1[statistic] - cmd2[statistic])**2)) elif accept == 'stochastic': acceptor = pyabc.StochasticAcceptor() eps = pyabc.Temperature() base_params = make_params(self.imf_type[0]).get_values() sim_rep = np.asarray([ self.cmd_sim(base_params, imf_type=self.imf_type[0])['output'] for ii in range(25) ]) var = np.var(sim_rep, 0) distance = pyabc.IndependentNormalKernel(var=var, keys=['input']) abc = pyabc.ABCSMC(simulator, prior, distance, sampler=pyabc_sampler, population_size=pop_size, eps=eps, acceptor=acceptor) db_path = ("sqlite:///" + savename + ".db") abc.new(db_path, obs) self.history = abc.run(min_acceptance_rate=min_acceptance_rate, max_nr_populations=max_n_pop) return self.history
limits["k7"] = (1, 4) limits["k8"] = (1, 4) limits["k2"] = (0, 2) limits["K_M1"] = (-4, -2) prior = pyabc.Distribution(**{ key: pyabc.RV("uniform", lb, ub - lb) for key, (lb, ub) in limits.items() }) redis_sampler = RedisEvalParallelSampler(host=args.ip, port=args.port, look_ahead=False, wait_for_all_samples=True) acceptor = pyabc.StochasticAcceptor(pdf_norm_method=ScaledPDFNorm()) kernel = pyabc.IndependentNormalKernel( var=[0.061763933333333] * 60 + [0.050105066666667] * 60, keys=[ "IdSumstat__YAP_nuclear_observable", "IdSumstat__YAP_total_observable" ]) eps = pyabc.Temperature() abc = pyabc.ABCSMC(model, prior, kernel, population_size=1000, acceptor=acceptor, eps=eps, all_accepted=False, sampler=redis_sampler)