def test_adaptivepnorm_all_particles(): """Test using rejected particles or not for weighting.""" abc = MockABC( [ { 's1': -1, 's2': -1, 's3': -1 }, { 's1': -1, 's2': 0, 's3': 1 }, { 's1': -2, 's2': 0.5, 's3': 3 }, ], accepted=[True, True, False], ) x_0 = {'s1': 0, 's2': 0, 's3': 1} x_1 = {'s1': 0.5, 's2': 0.4, 's3': -5} # check that distance values calculated when using rejected particles # or not differ dist_all = AdaptivePNormDistance(all_particles_for_scale=True) dist_all.initialize(0, abc.sample_from_prior, x_0=x_0, total_sims=0) dist_acc = AdaptivePNormDistance(all_particles_for_scale=False) dist_acc.initialize(0, abc.sample_from_prior, x_0=x_0, total_sims=0) assert dist_all(x_1, x_0, t=0) != dist_acc(x_1, x_0, t=0)
def test_adaptivepnormdistance(): """ Only tests basic running. """ abc = MockABC([{ 's1': -1, 's2': -1, 's3': -1 }, { 's1': -1, 's2': 0, 's3': 1 }]) x_0 = {'s1': 0, 's2': 0, 's3': 1} scale_functions = [ median_absolute_deviation, mean_absolute_deviation, standard_deviation, bias, root_mean_square_deviation, median_absolute_deviation_to_observation, mean_absolute_deviation_to_observation, combined_median_absolute_deviation, combined_mean_absolute_deviation, standard_deviation_to_observation ] for scale_function in scale_functions: dist_f = AdaptivePNormDistance(scale_function=scale_function) dist_f.initialize(0, abc.sample_from_prior, x_0=x_0) dist_f(abc.sample_from_prior()[0], abc.sample_from_prior()[1], t=0) # test max weight ratio for scale_function in scale_functions: dist_f = AdaptivePNormDistance(scale_function=scale_function, max_weight_ratio=20) dist_f.initialize(0, abc.sample_from_prior, x_0=x_0) dist_f(abc.sample_from_prior()[0], abc.sample_from_prior()[1], t=0)
def test_store_weights(): """Test whether storing distance weights works.""" abc = MockABC([{'s1': -1, 's2': -1, 's3': -1}, {'s1': -1, 's2': 0, 's3': 1}]) x_0 = {'s1': 0, 's2': 0, 's3': 1} weights_file = tempfile.mkstemp(suffix=".json")[1] print(weights_file) def distance0(x, x_0): return abs(x['s1'] - x_0['s1']) def distance1(x, x_0): return np.sqrt((x['s2'] - x_0['s2'])**2) for distance in [AdaptivePNormDistance(log_file=weights_file), AdaptiveAggregatedDistance( [distance0, distance1], log_file=weights_file)]: distance.initialize(0, abc.sample_from_prior, x_0=x_0) distance.update(1, abc.sample_from_prior) distance.update(2, abc.sample_from_prior) weights = load_dict_from_json(weights_file) assert set(weights.keys()) == {0, 1, 2} expected = distance.weights for key, val in expected.items(): if isinstance(val, np.ndarray): expected[key] = val.tolist() assert weights == expected
def test_adaptivepnormdistance_initial_weights(): abc = MockABC([{ 's1': -1, 's2': -1, 's3': -1 }, { 's1': -1, 's2': 0, 's3': 1 }]) x_0 = {'s1': 0, 's2': 0, 's3': 1} # first test that for PNormDistance, the weights stay constant initial_weights = {'s1': 1, 's2': 2, 's3': 3} dist_f = AdaptivePNormDistance(p=2, initial_scale_weights=initial_weights) dist_f.initialize(0, abc.sample_from_prior, x_0=x_0, total_sims=0) assert (dist_f.scale_weights[0] == np.array([1, 2, 3])).all() # call distance function d = dist_f(abc.sumstats[0], abc.sumstats[1], t=0) expected = pow(sum([(2 * 1)**2, (3 * 2)**2]), 1 / 2) assert expected == d # check updating works dist_f.update(1, abc.sample_from_prior, total_sims=0) assert (dist_f.scale_weights[1] != dist_f.scale_weights[0]).any()
def test_adaptivepnormdistance_initial_weights(): abc = MockABC([{ 's1': -1, 's2': -1, 's3': -1 }, { 's1': -1, 's2': 0, 's3': 1 }]) x_0 = {'s1': 0, 's2': 0, 's3': 1} # first test that for PNormDistance, the weights stay constant initial_weights = {'s1': 1, 's2': 2, 's3': 3} dist_f = AdaptivePNormDistance(initial_weights=initial_weights) dist_f.initialize(0, abc.sample_from_prior, x_0=x_0) assert dist_f.weights[0] == initial_weights # call distance function d = dist_f(abc.sample_from_prior()[0], abc.sample_from_prior()[1], t=0) expected = pow(sum([(2 * 1)**2, (3 * 2)**2]), 1 / 2) assert expected == d # check updating works dist_f.update(1, abc.sample_from_prior) assert dist_f.weights[1] != dist_f.weights[0]
def test_adaptivepnormdistance(): """ Only tests basic running. """ # TODO it could be checked that the scale functions lead to the expected # values abc = MockABC([{ 's1': -1, 's2': -1, 's3': -1 }, { 's1': -1, 's2': 0, 's3': 1 }]) x_0 = {'s1': 0, 's2': 0, 's3': 1} scale_functions = [ median_absolute_deviation, mean_absolute_deviation, standard_deviation, bias, root_mean_square_deviation, std_or_rmsd, median_absolute_deviation_to_observation, mean_absolute_deviation_to_observation, combined_median_absolute_deviation, mad_or_cmad, combined_mean_absolute_deviation, standard_deviation_to_observation, ] for scale_function in scale_functions: dist_f = AdaptivePNormDistance(scale_function=scale_function) dist_f.initialize(0, abc.sample_from_prior, x_0=x_0, total_sims=0) dist_f(abc.sumstats[0], abc.sumstats[1], t=0) assert (dist_f.scale_weights[0] != np.ones(3)).any() # test max weight ratio for scale_function in scale_functions: dist_f = AdaptivePNormDistance(scale_function=scale_function, max_scale_weight_ratio=20) dist_f.initialize(0, abc.sample_from_prior, x_0=x_0, total_sims=0) dist_f(abc.sumstats[0], abc.sumstats[1], t=0) weights = dist_f.scale_weights[0] assert (weights != np.ones(3)).any() assert np.max(weights) / np.min(weights[~np.isclose(weights, 0)]) <= 20