def test_shuffled_edges(): d = copy.deepcopy(d_original) original_dwell = json.loads(arbplf_dwell(json.dumps(d))) d = copy.deepcopy(d_original) original_ll = json.loads(arbplf_ll(json.dumps(d))) d = copy.deepcopy(d_original) d['site_reduction'] = {'aggregation' : 'sum'} original_em_update = json.loads(arbplf_em_update(json.dumps(d))) iter_count = 10 for i in range(iter_count): d_shuffled, perm = _shuffle_edges(d_original) # the ll output does not have an edge column d = copy.deepcopy(d_shuffled) ll = json.loads(arbplf_ll(json.dumps(d))) assert_equal(ll, original_ll) d = copy.deepcopy(d_shuffled) dwell = json.loads(arbplf_dwell(json.dumps(d))) dwell_prime = _perm_output_edges(dwell, perm) dwell_prime['data'].sort() assert_equal(dwell_prime, original_dwell) d = copy.deepcopy(d_shuffled) d['site_reduction'] = {'aggregation' : 'sum'} em_update = json.loads(arbplf_em_update(json.dumps(d))) em_update_prime = _perm_output_edges(em_update, perm) em_update_prime['data'].sort() assert_equal(em_update_prime, original_em_update)
def test_shuffled_nodes(): d = copy.deepcopy(d_original) original_dwell = json.loads(arbplf_dwell(json.dumps(d))) d = copy.deepcopy(d_original) original_ll = json.loads(arbplf_ll(json.dumps(d))) d = copy.deepcopy(d_original) d['site_reduction'] = {'aggregation' : 'sum'} original_em_update = json.loads(arbplf_em_update(json.dumps(d))) iter_count = 10 for i in range(iter_count): d_shuffled = _shuffle_nodes(d_original) d = copy.deepcopy(d_shuffled) dwell = json.loads(arbplf_dwell(json.dumps(d))) assert_equal(dwell, original_dwell) d = copy.deepcopy(d_shuffled) ll = json.loads(arbplf_ll(json.dumps(d))) assert_equal(ll, original_ll) d = copy.deepcopy(d_shuffled) d['site_reduction'] = {'aggregation' : 'sum'} em_update = json.loads(arbplf_em_update(json.dumps(d))) assert_equal(em_update, original_em_update)
def mydwell(d): """ Provides a dict -> pandas.DataFrame wrapper of the pure JSON arbplf_dwell. """ s = arbplf_dwell(json.dumps(d)) df = pd.read_json(StringIO(s), orient='split', precise_float=True) return df
def test_heterogeneous_edge_rates(): # try changing one of the edge rate coefficients d = { "model_and_data": { "edges": [[0, 1], [1, 2]], "edge_rate_coefficients": [1, 2], "rate_matrix": [[0, 1], [0, 0]], "probability_array": [[[1, 0], [1, 1], [1, 0]]] }, "site_reduction": { "aggregation": "only" } } actual_marginal = json.loads(arbplf_marginal(json.dumps(d))) assert_equal(actual_marginal, desired_marginal) g = copy.deepcopy(d) g['trans_reduction'] = dict(selection=[[0, 1], [1, 0]]) actual_trans = json.loads(arbplf_trans(json.dumps(g))) assert_equal(actual_trans, desired_trans) actual_ll = json.loads(arbplf_ll(json.dumps(d))) desired_ll = {"columns": ["value"], "data": [[-3.0]]} assert_equal(actual_ll, desired_ll) actual_em_update = json.loads(arbplf_em_update(json.dumps(d))) assert_equal(actual_em_update, desired_em_update) actual_dwell = json.loads(arbplf_dwell(json.dumps(d))) assert_equal(actual_dwell, desired_dwell)
def test_edges_are_not_preordered(): # Try switching the order of the edges in the input # and increasing the birth rate in the rate matrix. d = { "model_and_data": { "edges": [[1, 2], [0, 1]], "edge_rate_coefficients": [1, 2], "rate_matrix": [[0, 2], [0, 0]], "probability_array": [[[1, 0], [1, 1], [1, 0]]] }, "site_reduction": { "aggregation": "only" } } actual_marginal = json.loads(arbplf_marginal(json.dumps(d))) assert_equal(actual_marginal, desired_marginal) g = copy.deepcopy(d) g['trans_reduction'] = dict(selection=[[0, 1], [1, 0]]) actual_trans = json.loads(arbplf_trans(json.dumps(g))) assert_equal(actual_trans, desired_trans) actual_ll = json.loads(arbplf_ll(json.dumps(d))) desired_ll = {"columns": ["value"], "data": [[-6.0]]} assert_equal(actual_ll, desired_ll) actual_em_update = json.loads(arbplf_em_update(json.dumps(d))) assert_equal(actual_em_update, desired_em_update) actual_dwell = json.loads(arbplf_dwell(json.dumps(d))) assert_equal(actual_dwell, desired_dwell)
def test_dwell(): d = copy.deepcopy(D) s = arbplf_dwell(json.dumps(d)) df = pd.read_json(StringIO(s), orient='split', precise_float=True) actual = df.pivot('edge', 'state', 'value').values # compute the desired closed form solution u = np.cumsum([0] + rates) a, b = u[:-1], u[1:] v = exprel(b - a) * exp(-b) desired = np.vstack([v, 1-v]).T # compare actual and desired result assert_allclose(actual, desired)
def test_truncated_dwell(): d = copy.deepcopy(D) d['model_and_data']['probability_array'][0][-1] = [0, 1] s = arbplf_dwell(json.dumps(d)) df = pd.read_json(StringIO(s), orient='split', precise_float=True) actual = df.pivot('edge', 'state', 'value').values # compute the desired closed form solution u = np.cumsum([0] + rates) a, b = u[:-1], u[1:] T = u[-1] # this way is not robust when a == b. def F(x): return -(exp(T - x) + x) / expm1(T) v = (F(b) - F(a)) / (b - a) desired = np.vstack([v, 1-v]).T # this way is better v = (exprel(b-a)*exp(T-b) - 1) / expm1(T) desired = np.vstack([v, 1-v]).T # compare actual and desired result assert_allclose(actual, desired)
def mydwell(d): s = arbplf_dwell(json.dumps(d)) df = pd.read_json(StringIO(s), orient='split', precise_float=True) return df