def setup_module(): """Set up module. Called before all tests here.""" # create and run some model observation = {'ss0': p_true['p0'], 'ss1': p_true['p1']} prior = pyabc.Distribution( **{ key: pyabc.RV('uniform', limits[key][0], limits[key][1] - limits[key][0]) for key in p_true.keys() }) n_history = 2 sampler = pyabc.sampler.MulticoreEvalParallelSampler(n_procs=2) for _ in range(n_history): log_file = tempfile.mkstemp(suffix=".json")[1] log_files.append(log_file) distance = pyabc.AdaptivePNormDistance(p=2, scale_log_file=log_file) abc = pyabc.ABCSMC(model, prior, distance, population_size=100, sampler=sampler) abc.new(db_path, observation) abc.run(minimum_epsilon=0.1, max_nr_populations=3) for j in range(n_history): history = pyabc.History(db_path) history.id = j + 1 histories.append(history) labels.append("Some run " + str(j))
def test_redis_look_ahead_error(): """Test whether the look-ahead mode fails as expected.""" model, prior, distance, obs = basic_testcase() with tempfile.NamedTemporaryFile(mode='w', suffix='.csv') as fh: sampler = RedisEvalParallelSamplerServerStarter( look_ahead=True, look_ahead_delay_evaluation=False, log_file=fh.name) args_list = [{ 'eps': pyabc.MedianEpsilon() }, { 'distance_function': pyabc.AdaptivePNormDistance() }] for args in args_list: if 'distance_function' not in args: args['distance_function'] = distance try: with pytest.raises(AssertionError) as e: abc = pyabc.ABCSMC(model, prior, sampler=sampler, population_size=10, **args) abc.new(pyabc.create_sqlite_db_id(), obs) abc.run(max_nr_populations=3) assert "cannot be used in look-ahead mode" in str(e.value) finally: sampler.shutdown()
def test_pipeline(db_file): """Test whole pipeline using a learned summary statistic.""" rng = np.random.Generator(np.random.PCG64(0)) def model(p): return {"s0": p["p0"] + 1e-2 * rng.normal(size=2), "s1": rng.normal()} prior = pyabc.Distribution(p0=pyabc.RV("uniform", 0, 1)) distance = pyabc.AdaptivePNormDistance(sumstat=PredictorSumstat( LinearPredictor(), fit_ixs={1, 3}), ) data = {"s0": np.array([0.1, 0.105]), "s1": 0.5} # run a little analysis abc = pyabc.ABCSMC(model, prior, distance, population_size=100) abc.new("sqlite:///" + db_file, data) h = abc.run(max_total_nr_simulations=1000) # first iteration df0, w0 = h.get_distribution(t=0) off0 = abs(pyabc.weighted_mean(df0.p0, w0) - 0.1) # last iteration df, w = h.get_distribution() off = abs(pyabc.weighted_mean(df.p0, w) - 0.1) assert off0 > off # alternative run with simple distance distance = pyabc.PNormDistance() abc = pyabc.ABCSMC(model, prior, distance, population_size=100) abc.new("sqlite:///" + db_file, data) h = abc.run(max_total_nr_simulations=1000) df_comp, w_comp = h.get_distribution() off_comp = abs(pyabc.weighted_mean(df_comp.p0, w_comp) - 0.1) assert off_comp > off # alternative run with info weighting distance = pyabc.InfoWeightedPNormDistance( predictor=LinearPredictor(), fit_info_ixs={1, 3}, ) abc = pyabc.ABCSMC(model, prior, distance, population_size=100) abc.new("sqlite:///" + db_file, data) h = abc.run(max_total_nr_simulations=1000) df_info, w_info = h.get_distribution() off_info = abs(pyabc.weighted_mean(df_info.p0, w_info) - 0.1) assert off_comp > off_info
lambdaN=pyabc.RV("uniform", lim3.lb, lim3.interval_length), kNB=pyabc.RV("uniform", lim3.lb, lim3.interval_length), muN=pyabc.RV("uniform", lim2.lb, lim2.interval_length), vNM=pyabc.RV("uniform", lim2.lb, lim2.interval_length), lambdaM=pyabc.RV("uniform", lim3.lb, lim3.interval_length), kMB=pyabc.RV("uniform", lim2.lb, lim2.interval_length), muM=pyabc.RV("uniform", lim2.lb, lim2.interval_length), sBN=pyabc.RV("uniform", lim3.lb, lim3.interval_length), iBM=pyabc.RV("uniform", lim3.lb, lim3.interval_length), muB=pyabc.RV("uniform", lim3.lb, lim3.interval_length), sAM=pyabc.RV("uniform", lim.lb, lim.interval_length), muA=pyabc.RV("uniform", lim.lb, lim.interval_length)) # %% Define ABC-SMC model distanceP2_adaptive = pyabc.AdaptivePNormDistance( p=2, scale_function=pyabc.distance.root_mean_square_deviation) distanceP2 = pyabc.PNormDistance(p=2) kernel1 = pyabc.IndependentNormalKernel(var=1.0**2) # Measure distance and set it as minimum epsilon min_eps = distanceP2(obs_data_noisy, obs_data_raw) acceptor1 = pyabc.StochasticAcceptor() eps0 = pyabc.MedianEpsilon(50) eps1 = pyabc.Temperature() sampler0 = pyabc.sampler.MulticoreEvalParallelSampler(n_procs=8) def non_noisy_model(para):