def test_simple_admixture_subsampling(folded, n_lins): check_subsampling(simple_admixture_demo(), { "b": 2, "a": 3 }, 3, folded=folded)
def test_subconfigs(fold, normalized): demo = simple_admixture_demo() num_bases = 1000 mu = 1. n_loci = 1000 sampled_n_dict = {"a": 4, "b": 5} sfs = demo.simulate_data(muts_per_gen=mu / num_bases, recoms_per_gen=0, length=num_bases, num_replicates=n_loci, sampled_n_dict=sampled_n_dict)._sfs if fold: configs = sfs.fold().configs else: configs = sfs.configs demo = demo._get_demo(sampled_n_dict) sub_idxs = np.array( random.sample(list(range(len(configs))), int(len(configs) / 2) + 1)) assert len(sub_idxs) > 0 and len(sub_idxs) < len(configs) val1 = momi.expected_sfs(demo, configs, normalized=normalized, folded=fold)[sub_idxs] sub_configs = momi.data.configurations._ConfigList_Subset( configs, sub_idxs) val2 = momi.expected_sfs(demo, sub_configs, normalized=normalized, folded=fold) assert np.allclose(val1, val2)
def test_pseudoinverse(): demo0 = simple_admixture_demo()._get_demo({"b": 2, "a": 3}) demo1 = NoLookdownDemography(demo0) p = 20 vecs = [np.random.normal(size=(p, n + 1)) for n in demo0.sampled_n] vals0, vals1 = [expected_sfs_tensor_prod(vecs, d) for d in (demo0, demo1)] assert np.allclose(vals0, vals1) n_lins_diff = np.array( [demo0._n_at_node(v) - demo1._n_at_node(v) for v in demo0._G]) assert all(n_lins_diff <= 0) assert any(n_lins_diff < 0)
def test_constructor(): pre_demo = simple_admixture_demo() demo = pre_demo._get_demo({"b": 2, "a": 3}) demo2 = momi.demography.Demography(demo._get_graph_structure(), demo._get_differentiable_part()) assert np.allclose(expected_total_branch_len(demo), expected_total_branch_len(demo2)) # make sure it fails if we don't pass in the array values demo3 = momi.demography.Demography(demo._get_graph_structure()) try: expected_total_branch_len(demo3) except: return assert False
def test_count_subsets(): demo = simple_admixture_demo() #data = momi.simulate_ms(ms_path, demo.demo_hist, # sampled_pops=demo.pops, # sampled_n=demo.n, # num_loci=1000, mut_rate=1.0) num_bases = 1000 mu = 1.0 num_replicates = 100 data = demo.simulate_data(muts_per_gen=mu / num_bases, recoms_per_gen=0, length=num_bases, num_replicates=num_replicates, sampled_n_dict={ "b": 2, "a": 3 }).extract_sfs(None) subconfig = [] for n in data.sampled_n: sub_n = random.randrange(n + 1) d = random.randrange(sub_n + 1) subconfig.append([sub_n - d, d]) subconfig_probs = np.ones(len(data.configs)) for i, (a, d) in enumerate(subconfig): #subconfig_probs *= scipy.special.comb( # data.configs.value[:, i, :], [a, d]).prod(axis=1) #subconfig_probs /= scipy.special.comb( # data.configs.value[:, i, :].sum(axis=1), a+d) subconfig_probs *= scipy.stats.hypergeom.pmf( d, data.configs.value[:, i, :].sum(axis=1), data.configs.value[:, i, 1], a + d) assert np.allclose(subconfig_probs, data.configs.subsample_probs(subconfig))
def test_admixture_tmrca(): check_tmrca(simple_admixture_demo()._get_demo({"a": 3, "b": 6}))
def test_admixture_demo_numsnps(): check_num_snps({"a": 1, "b": 2}, simple_admixture_demo(), 1000.0, 1.0)
def test_admixture_demo_normalization(): check_demo_normalization(simple_admixture_demo()._get_demo({ "a": 3, "b": 3 }), error_matrices=False)
def test_admixture_demo_rank1tensor(): demo = simple_admixture_demo() check_random_tensor(demo._get_demo({"a": 4, "b": 5}))