def test_create_db(): # temporary file name file_ = tempfile.mkstemp(suffix=".db")[1] # set up history pyabc.History("sqlite:///" + file_) # should work just fine though file mostly empty pyabc.History("sqlite:///" + file_, create=False) # delete file and check we cannot create a History object os.remove(file_) with pytest.raises(ValueError): pyabc.History("sqlite:///" + file_, create=False)
def setup_module(): """Set up module. Called before all tests here.""" # create and run some model observation = {'ss0': p_true['p0'], 'ss1': p_true['p1']} prior = pyabc.Distribution( **{ key: pyabc.RV('uniform', limits[key][0], limits[key][1] - limits[key][0]) for key in p_true.keys() }) distance = pyabc.PNormDistance(p=2) n_history = 2 sampler = pyabc.sampler.MulticoreEvalParallelSampler(n_procs=2) for _ in range(n_history): abc = pyabc.ABCSMC(model, prior, distance, population_size=100, sampler=sampler) abc.new(db_path, observation) abc.run(minimum_epsilon=.1, max_nr_populations=3) for j in range(n_history): history = pyabc.History(db_path) history.id = j + 1 histories.append(history) labels.append("Some run " + str(j))
def test_db_import(script_runner): """Import an outdated database, assert import raises, and then convert.""" db_file = os.path.join(tempfile.gettempdir(), "pyabc_test_migrate.db") # this database was created with a previos pyabc version, thus # import should fail with pytest.raises(AssertionError): pyabc.History("sqlite:///" + db_file) # call the migration script ret = script_runner.run('abc-migrate', '--src', db_file, '--dst', db_file) assert ret.success # now it should work h = pyabc.History("sqlite:///" + db_file) h.get_weighted_sum_stats() # remove file os.remove(db_file)
prior_distribution = "loguniform" print(prior_distribution) para_prior1 = para_prior(lim, prior_distribution, 1) para_prior2 = para_prior(lim, prior_distribution, 2) para_prior3 = para_prior(lim, prior_distribution, 3) para_prior4 = para_prior(lim, prior_distribution, 4) para_prior5 = para_prior(lim, prior_distribution, 5) # %% Load database # change database name db_path = "sqlite:///db/model5_super.db" history = pyabc.History(db_path) print("ID: %d, generations: %d" % (history.id, history.max_t)) # %% Load last population df, w = history.get_distribution(t=history.max_t - 5) pca = PCA(n_components=12, svd_solver='full', iterated_power='full') pca.fit(df) # plt.plot(pca.explained_variance_ratio_) # plt.show() plt.figure(figsize=(11, 5))
def client(): """A fake server client.""" history = pyabc.History(db_path) server.app.config["HISTORY"] = history with server.app.test_client() as client: yield client
prior_distribution = "loguniform" print(prior_distribution) para_prior1 = para_prior(lim, prior_distribution, 1) para_prior2 = para_prior(lim, prior_distribution, 2) para_prior3 = para_prior(lim, prior_distribution, 3) para_prior4 = para_prior(lim, prior_distribution, 4) para_prior5 = para_prior(lim, prior_distribution, 5) # %% Load database # change database name db_path = "sqlite:///db/model5_24_more.db" history = pyabc.History(db_path) print("ID: %d, generations: %d" % (history.id, history.max_t)) # %% Plot solver = ODESolver() # change model name solver.ode_model = solver.ode_model5 result_data(history, solver, nr_population=history.max_t, savefig=True) # change prior name result_plot(history, None, para_prior5, history.max_t - 5, savefig=False)
solver = ODESolver() solver.time_point = solver.time_point_default obs_data_raw_s = solver.ode_model1(para_true1, flatten=False, add_noise=False) solver.time_point = solver.time_point_exp obs_data_raw_s_less = solver.ode_model(para_true1, flatten=False, add_noise=False) # print("Target data") # print(obs_data_noisy_s) # %% Load database db_path = "sqlite:///db/abcsmc_test.db" history = pyabc.History(db_path) print("ID: %d, generations: %d" % (history.id, history.max_t)) # %% Plot raw_data_path = os.path.abspath(os.curdir) + "/data/rawData.csv" raw_data = pd.read_csv(raw_data_path).astype("float32") time_points: object = raw_data.iloc[:, 0].to_numpy() exp_data = raw_data.iloc[:, 1:].to_numpy() exp_data = arr2d_to_dict(exp_data) exp_data_s = raw_data.iloc[:, 1:].to_dict(orient='list') for k in exp_data_s: exp_data_s[k] = np.array(exp_data_s[k])
psize = pop_sizes[-1] eps_list = [8, 4, 2, 1, 0.7, 0.5, 0.33, 0.25] eps = pyabc.ListEpsilon(eps_list) iters_LA = 25 iters_ORI = 25 iters_STAT = 3 # In[3]: histories_LA = [] for i in range(iters_LA): histories_LA.append( pyabc.History("sqlite:///" + os.path.join( basepath, "results/Var" + str(sleepvars[0]) + "/database", "DYNLA" + str(nodes) + "_" + str(psize) + "_" + str(i) + ".db"))) histories_ORI = [] for i in range(iters_ORI): histories_ORI.append( pyabc.History("sqlite:///" + os.path.join( basepath, "results/Var" + str(sleepvars[0]) + "/database", "ORI" + str(nodes) + "_" + str(psize) + "_" + str(i) + ".db"))) histories_STAT = [] for i in range(iters_STAT): histories_STAT.append( pyabc.History("sqlite:///" + os.path.join( basepath, "results/Var" + str(sleepvars[0]) + "/database", "STATIC" + str(nodes) + "_" + str(psize) + "_" +
import pyabc import os import matplotlib.pyplot as plt from study_abc_noise.model import NonIdAbleModelVars as ModelVars mv = ModelVars() db_files = [f for f in os.listdir('.') if os.path.isfile(f) and "db_" in f] print(f"Using db files {db_files}") histories = [] labels = [] for db_file in db_files: id_ = db_file.split('__')[1] h = pyabc.History("sqlite:///" + db_file) h.id = 1 histories.append(h) labels.append(id_) gt_par = h.get_population(t=-1).get_list()[0].parameter pyabc.visualization.plot_sample_numbers(histories, labels) plt.savefig("samples.png") pyabc.visualization.plot_epsilons(histories, labels, scale="log10") plt.savefig("epsilons.png") for h, label in zip(histories, labels): pyabc.visualization.plot_histogram_matrix(h, bins=100) plt.savefig("hist_" + label + ".png") df, w = h.get_distribution() pyabc.visualization.plot_kde_matrix(df, w, refval=gt_par) #, limits=mv.limits)