def test_uniform_acceptor(): """Test the uniform acceptor.""" def dist(x, x_0): return sum(abs(x[key] - x_0[key]) for key in x_0) distance = pyabc.SimpleFunctionDistance(dist) acceptor = pyabc.UniformAcceptor() eps = pyabc.ListEpsilon([1, 4, 2]) x = {'s0': 1.5} x_0 = {'s0': 0} ret = acceptor(distance_function=distance, eps=eps, x=x, x_0=x_0, t=2, par=None) assert ret.accept # now let's test again, including previous time points acceptor = pyabc.UniformAcceptor(use_complete_history=True) ret = acceptor(distance_function=distance, eps=eps, x=x, x_0=x_0, t=2, par=None) assert not ret.accept
def test_redis_look_ahead(): """Test the redis sampler in look-ahead mode.""" model, prior, distance, obs = basic_testcase() eps = pyabc.ListEpsilon([20, 10, 5]) # spice things up with an adaptive population size pop_size = pyabc.AdaptivePopulationSize(start_nr_particles=50, mean_cv=0.5, max_population_size=50) with tempfile.NamedTemporaryFile(mode='w', suffix='.csv') as fh: sampler = RedisEvalParallelSamplerServerStarter( look_ahead=True, look_ahead_delay_evaluation=False, log_file=fh.name) try: abc = pyabc.ABCSMC(model, prior, distance, sampler=sampler, population_size=pop_size, eps=eps) abc.new(pyabc.create_sqlite_db_id(), obs) h = abc.run(max_nr_populations=3) finally: sampler.shutdown() assert h.n_populations == 3 # read log file df = pd.read_csv(fh.name, sep=',') assert (df.n_lookahead > 0).any() assert (df.n_lookahead_accepted > 0).any() assert (df.n_preliminary == 0).all()
def test_redis_look_ahead(): """Test the redis sampler in look-ahead mode.""" model, prior, distance, obs = basic_testcase() eps = pyabc.ListEpsilon([20, 10, 5]) # spice things up with an adaptive population size pop_size = pyabc.AdaptivePopulationSize(start_nr_particles=50, mean_cv=0.5, max_population_size=50) with tempfile.NamedTemporaryFile(mode='w', suffix='.csv') as fh: sampler = RedisEvalParallelSamplerServerStarter( look_ahead=True, look_ahead_delay_evaluation=False, log_file=fh.name, ) try: abc = pyabc.ABCSMC( model, prior, distance, sampler=sampler, population_size=pop_size, eps=eps, ) abc.new(pyabc.create_sqlite_db_id(), obs) h = abc.run(max_nr_populations=3) finally: sampler.shutdown() assert h.n_populations == 3 # read log file df = pd.read_csv(fh.name, sep=',') assert (df.n_lookahead > 0).any() assert (df.n_lookahead_accepted > 0).any() assert (df.n_preliminary == 0).all() # check history proposal ids for t in range(0, h.max_t + 1): pop = h.get_population(t=t) pop_size = len(pop) n_lookahead_pop = len( [p for p in pop.particles if p.proposal_id == -1]) assert (min(pop_size, int( df.loc[df.t == t, 'n_lookahead_accepted'])) == n_lookahead_pop)
def test_listepsilon(): eps = pyabc.ListEpsilon([3.5, 2.3, 1, 0.3]) with pytest.raises(Exception): eps(4)
matplotlib.rc('font', **font) # In[2]: nodes = 8 noisevar = 0.03 sleepvars = [1, 1.5, 2] basepath = "/p/home/jusers/reck1/juwels/scripts/Batch_pyABC/programs/ODE" pop_sizes = [64, 256, 1024, 4096] psize = pop_sizes[-1] eps_list = [8, 4, 2, 1, 0.7, 0.5, 0.33, 0.25] eps = pyabc.ListEpsilon(eps_list) iters_LA = 25 iters_ORI = 25 iters_STAT = 3 # In[3]: histories_LA = [] for i in range(iters_LA): histories_LA.append( pyabc.History("sqlite:///" + os.path.join( basepath, "results/Var" + str(sleepvars[0]) + "/database", "DYNLA" + str(nodes) + "_" + str(psize) + "_" + str(i) + ".db")))