示例#1
0
    def test_invalid_beta_throws(self):
        distances = dok_matrix(hand_net_dists)
        if issparse(distances):
            hand_net_incidence_local = dok_matrix(hand_net_incidence)
        else:
            hand_net_incidence_local = hand_net_incidence

        data_list = [distances]
        network_struct = ModelDataStruct(data_list,
                                         hand_net_incidence_local,
                                         data_array_names_debug=("distances",
                                                                 "u_turn"))

        beta_vec = np.array([-5])

        model = RecursiveLogitModelPrediction(network_struct,
                                              initial_beta=beta_vec,
                                              mu=1)
        try:
            model.generate_observations(origin_indices=[0, 1, 2, 7],
                                        dest_indices=[1, 6, 3],
                                        num_obs_per_pair=4,
                                        iter_cap=15,
                                        rng_seed=1)
        except ValueError as e:
            print(str(e))
示例#2
0
    def test_bad_beta_fails(self, struct_bigger):

        model = RecursiveLogitModelPrediction(struct_bigger, initial_beta=-0.1)
        with pytest.raises(ValueError) as e:
            model.generate_observations(origin_indices=[0],
                                        dest_indices=[9],
                                        num_obs_per_pair=10)
        assert "exp(V(s)) contains negative values" in str(e.value)
def get_data(beta, seed=None):
    beta_vec_generate = np.array([beta])
    model = RecursiveLogitModelPrediction(network_struct,
                                          initial_beta=beta_vec_generate, mu=1)

    obs_indices = [0]
    obs = model.generate_observations(origin_indices=obs_indices,
                                      dest_indices=[1],
                                      num_obs_per_pair=60, iter_cap=2000, rng_seed=seed,
                                      )
    return obs
示例#4
0
def get_data(beta_vec, seed=None):

    model = RecursiveLogitModelPrediction(network_struct,
                                          initial_beta=beta_vec,
                                          mu=1)
    obs = model.generate_observations(
        origin_indices=orig_indices,
        dest_indices=dest_indices,
        num_obs_per_pair=obs_per_pair,
        iter_cap=2000,
        rng_seed=seed,
    )
    return obs
示例#5
0
 def _basic_consistencey_checks(distances):
     data_list = [distances]
     if issparse(distances):
         hand_net_incidence_local = dok_matrix(hand_net_incidence)
     else:
         hand_net_incidence_local = hand_net_incidence
     network_struct = ModelDataStruct(data_list,
                                      hand_net_incidence_local,
                                      data_array_names_debug=("distances",
                                                              "u_turn"))
     beta_vec = np.array([-1])
     model = RecursiveLogitModelPrediction(network_struct,
                                           initial_beta=beta_vec,
                                           mu=1)
     obs = model.generate_observations(origin_indices=[0, 1, 2, 7],
                                       dest_indices=[1, 6, 3],
                                       num_obs_per_pair=4,
                                       iter_cap=15,
                                       rng_seed=1)
     expected = TestSimulation._get_basic_consistency_expected(
         ALLOW_POSITIVE_VALUE_FUNCTIONS)
     assert obs == expected
示例#6
0
    def test_bad_indexfails(self, struct_bigger):
        model = RecursiveLogitModelPrediction(struct_bigger, initial_beta=-0.2)
        with pytest.raises(IndexError) as e:
            model.generate_observations(origin_indices=[0],
                                        dest_indices=[100],
                                        num_obs_per_pair=10)
        assert "Can only simulate observations from indexes which are in the model" in str(
            e.value)

        with pytest.raises(IndexError) as e:
            model.generate_observations(origin_indices=[0],
                                        dest_indices=[10],
                                        num_obs_per_pair=10)
        assert "but the final index is reserved for internal dummy sink state" in str(
            e.value)
def consistency_test(network_file,
                     orig_indices,
                     dest_indices,
                     obs_per_pair,
                     beta0,
                     test_range=None):
    if test_range is None:
        test_range = np.arange(-0.1, -2.1, -0.1)
    # network_file = "EMA_net.tntp"

    data_list, data_list_names = load_tntp_node_formulation(
        network_file,
        columns_to_extract=[
            "length",
        ],
    )
    distances = data_list[0]

    incidence_mat = (distances > 0).astype(int)

    network_struct = ModelDataStruct(data_list,
                                     incidence_mat,
                                     data_array_names_debug=("distances",
                                                             "u_turn"))

    beta_vec = np.array([-0.1])
    model = RecursiveLogitModelPrediction(network_struct,
                                          initial_beta=beta_vec,
                                          mu=1)
    print("Linear system size", model.get_exponential_utility_matrix().shape)

    print(
        f"Generating {obs_per_pair * len(orig_indices) * len(dest_indices)} obs total per "
        f"beta sim val")

    def get_data(beta_vec, seed=None):
        beta_vec_generate = np.array([beta_vec])
        model = RecursiveLogitModelPrediction(network_struct,
                                              initial_beta=beta_vec_generate,
                                              mu=1)
        obs = model.generate_observations(
            origin_indices=orig_indices,
            dest_indices=dest_indices,
            num_obs_per_pair=obs_per_pair,
            iter_cap=2000,
            rng_seed=seed,
        )
        return obs

    optimiser = optimisers.ScipyOptimiser(method='l-bfgs-b')  # bfgs, l-bfgs-b

    import time
    a = time.time()
    expected = []
    actual = []
    for n, beta_gen in enumerate(test_range, start=1):
        expected.append(beta_gen)
        try:
            obs = get_data(beta_gen, seed=None)
        except ValueError as e:
            print(f"beta = {beta_gen} failed, {e}")
            actual.append(0.0)
            continue
        # print(obs)
        beta0 = -5
        model = RecursiveLogitModelEstimation(network_struct,
                                              observations_record=obs,
                                              initial_beta=beta0,
                                              mu=1,
                                              optimiser=optimiser)
        beta = model.solve_for_optimal_beta(verbose=False)
        actual.append(float(beta))
        print("beta_expected", beta_gen, "beta actual", beta, "\nOBS:")
        # text_list = wrapper.wrap(str(obs))
        # print("\n".join(text_list))

    b = time.time()
    print("elapsed =", b - a, "s")
    return np.array(expected), np.array(actual)
     [3.5, 3, 4, 0, 2.5, 3, 3, 0],
     [4.5, 4, 5, 0, 0, 0, 4, 3.5],
     [3, 0, 0, 2, 2, 2.5, 0, 2],
     [3, 2.5, 0, 2, 2, 2.5, 2.5, 0],
     [0, 3, 0, 2.5, 2.5, 3, 3, 2.5],
     [0, 3, 4, 0, 2.5, 3, 3, 2.5],
     [0, 0, 3.5, 2, 0, 2.5, 2.5, 2]])

incidence_mat = (distances > 0).astype(int)

data_list = [distances]
network_struct = ModelDataStruct(data_list, incidence_mat,
                                          data_array_names_debug=("distances"))

beta_vec = np.array([-16])
model = RecursiveLogitModelPrediction(network_struct,
                                      initial_beta=beta_vec, mu=1)
# obs_indices = [i for i in range(8)]
# obs = model.generate_observations(origin_indices=obs_indices,
#                                   dest_indices=obs_indices,
#                                   num_obs_per_pair=1, iter_cap=2000, rng_seed=1,
#                                   )
obs_indices = [1, 2, 3, 4, 5, 6, 7, 8]
obs = model.generate_observations(origin_indices=obs_indices,
                                  dest_indices=[7, 3],
                                  num_obs_per_pair=20, iter_cap=2000, rng_seed=1,
                                  )

print(obs)

print("\nPath in terms of arcs:")
for path in obs:
distances = distances.A
# distances += np.abs(np.min(distances))
# distances +=1
data_list = [distances]
nz_dist = distances.reshape(distances.shape[0] * distances.shape[1], 1)
nz_dist = nz_dist[nz_dist > 0]
print("(max dist, min dist, mean dist) = ",
      (np.max(nz_dist), np.min(nz_dist), np.mean(nz_dist), np.std(nz_dist)))
network_struct = ModelDataStruct(data_list,
                                 incidence_mat,
                                 data_array_names_debug=("distances",
                                                         "u_turn"))

beta_vec = np.array([-1])
model = RecursiveLogitModelPrediction(network_struct,
                                      initial_beta=beta_vec,
                                      mu=1)
print("Linear system size", model.get_exponential_utility_matrix().shape)
orig_indices = np.arange(0, arcmaxp1, 30)
dest_indices = (orig_indices + 5) % arcmaxp1
# orig_indices = np.arange(0, 7, 1)
# dest_indices = np.arange(0, 7, 1)
obs_per_pair = 1

print(
    f"Generating {obs_per_pair * len(orig_indices) * len(dest_indices)} obs total per "
    f"configuration")


def get_data(beta, seed=None):
    beta_vec_generate = np.array([beta])
示例#10
0
                      [4.5, 4, 5, 0, 0, 0, 4, 3.5], [3, 0, 0, 2, 2, 2.5, 0, 2],
                      [3, 2.5, 0, 2, 2, 2.5, 2.5, 0],
                      [0, 3, 0, 2.5, 2.5, 3, 3, 2.5],
                      [0, 3, 4, 0, 2.5, 3, 3, 2.5],
                      [0, 0, 3.5, 2, 0, 2.5, 2.5, 2]])

incidence_mat = (distances > 0).astype(int)

data_list = [distances]
network_struct = ModelDataStruct(data_list,
                                 incidence_mat,
                                 data_array_names_debug=("distances"))

beta_vec = np.array([-1])
model = RecursiveLogitModelPrediction(network_struct,
                                      initial_beta=beta_vec,
                                      mu=1)
# obs_indices = [i for i in range(8)]
# obs = model.generate_observations(origin_indices=obs_indices,
#                                   dest_indices=obs_indices,
#                                   num_obs_per_pair=1, iter_cap=2000, rng_seed=1,
#                                   )
obs_indices = [0, 1, 2, 3, 4, 5, 6, 7]
dest_indices = [0, 1, 2, 3, 4, 5, 6, 7]
obs = model.generate_observations(
    origin_indices=obs_indices,
    dest_indices=dest_indices,
    num_obs_per_pair=2,
    iter_cap=2000,
    rng_seed=1,
)
#      [3, 0, 0, 2, 2, 2.5, 0, 2],
#      [3, 2.5, 0, 2, 2, 2.5, 2.5, 0],
#      [0, 3, 0, 2.5, 2.5, 3, 3, 2.5],
#      [0, 3, 4, 0, 2.5, 3, 3, 2.5],
#      [0, 0, 3.5, 2, 0, 2.5, 2.5, 2]])

incidence_mat = (distances > 0).astype(int)

data_list = [distances]
network_struct = ModelDataStruct(data_list,
                                 incidence_mat,
                                 data_array_names_debug=("distances"))
beta_known = -0.4
beta_vec_generate = np.array([beta_known])
model = RecursiveLogitModelPrediction(network_struct,
                                      initial_beta=beta_vec_generate,
                                      mu=1)
#
# obs_indices = [0]
# obs = model.generate_observations(origin_indices=obs_indices,
#                                   dest_indices=[1],
#                                   num_obs_per_pair=40, iter_cap=2000, rng_seed=1,
#                                   )

obs_indices = [0, 3]
dest_indices = [1, 2]
obs_per_pair = 15
print(
    f"Generating {obs_per_pair * len(obs_indices) * len(dest_indices)} obs total"
)
示例#12
0
import os
print("sys path is", os.getcwd(), os.listdir(os.getcwd()))
network_file = os.path.join("tests", "docs", "SiouxFalls_net.tntp")
node_max = 24  # from network file

data_list, data_list_names = load_tntp_node_formulation(
    network_file, columns_to_extract=["length", "capacity"], sparse_format=False)

# Convert entries to np.arrays since network is small so dense format is more efficient
distances = data_list[0]

incidence_mat = (distances > 0).astype(int)
network_struct = ModelDataStruct(data_list, incidence_mat)

beta_sim = np.array([-0.8, -0.00015])
model = RecursiveLogitModelPrediction(network_struct,
                                      initial_beta=beta_sim, mu=1)
print("Linear system size", model.get_exponential_utility_matrix().shape)

# sparse sample for quick running example
orig_indices = np.arange(0, node_max, 2)
dest_indices = (orig_indices + 5) % node_max
# sample every OD pair once
# orig_indices = np.arange(0, node_max, 1)
# dest_indices = np.arange(0, node_max, 1)
obs_per_pair = 1
print(f"Generating {obs_per_pair * len(orig_indices) * len(dest_indices)} obs total per "
      f"configuration")
seed = 42
obs = model.generate_observations(origin_indices=orig_indices, dest_indices=dest_indices,
                                  num_obs_per_pair=obs_per_pair, iter_cap=2000, rng_seed=seed)
示例#13
0
"""
import numpy as np
from recursiveRouteChoice import RecursiveLogitModelPrediction, ModelDataStruct

# DATA
# A trivial network
distances = np.array([[0, 5, 0, 4], [0, 0, 6, 0], [0, 6, 0, 5], [4, 0, 0, 0]])

incidence_mat = (distances > 0).astype(int)

network_attribute_list = [distances]
network_struct = ModelDataStruct(network_attribute_list,
                                 incidence_mat,
                                 data_array_names_debug=("distances", ))
model = RecursiveLogitModelPrediction(network_struct,
                                      initial_beta=[-0.4],
                                      mu=1)

obs_indices = [0, 3]
dest_indices = [1, 2]
obs_per_pair = 15
print(
    f"Generating {obs_per_pair * len(obs_indices) * len(dest_indices)} obs total"
)

obs = model.generate_observations(origin_indices=obs_indices,
                                  dest_indices=dest_indices,
                                  num_obs_per_pair=obs_per_pair,
                                  iter_cap=2000,
                                  rng_seed=1)
print(obs)
示例#14
0
#      [4.5, 4, 5, 0, 0, 0, 4, 3.5],
#      [3, 0, 0, 2, 2, 2.5, 0, 2],
#      [3, 2.5, 0, 2, 2, 2.5, 2.5, 0],
#      [0, 3, 0, 2.5, 2.5, 3, 3, 2.5],
#      [0, 3, 4, 0, 2.5, 3, 3, 2.5],
#      [0, 0, 3.5, 2, 0, 2.5, 2.5, 2]])

incidence_mat = (distances > 0).astype(int)


data_list = [distances]
network_struct = ModelDataStruct(data_list, incidence_mat,
                                          data_array_names_debug=("distances"))
beta_known = -0.4
beta_vec_generate = np.array([beta_known])
model = RecursiveLogitModelPrediction(network_struct,
                                      initial_beta=beta_vec_generate, mu=1)

obs_indices = [0]
obs = model.generate_observations(origin_indices=obs_indices,
                                  dest_indices=[1],
                                  num_obs_per_pair=20, iter_cap=2000, rng_seed=1,
                                  )

print(obs)

print("\nPath in terms of arcs:")
for path in obs:
    string = "Orig: "
    f = "Empty Path, should not happen"
    for arc_index in path[1:]:
        string += f"-{arc_index + 1}- => "
                      [4.5, 4, 5, 0, 0, 0, 4, 3.5], [3, 0, 0, 2, 2, 2.5, 0, 2],
                      [3, 2.5, 0, 2, 2, 2.5, 2.5, 0],
                      [0, 3, 0, 2.5, 2.5, 3, 3, 2.5],
                      [0, 3, 4, 0, 2.5, 3, 3, 2.5],
                      [0, 0, 3.5, 2, 0, 2.5, 2.5, 2]])

incidence_mat = (distances > 0).astype(int)

data_list = [distances]
network_struct = ModelDataStruct(data_list,
                                 incidence_mat,
                                 data_array_names_debug=("distances"))
beta_known = -16
beta_vec_generate = np.array([beta_known])
model = RecursiveLogitModelPrediction(network_struct,
                                      initial_beta=beta_vec_generate,
                                      mu=1)
# obs_indices = [i for i in range(8)]
# obs = model.generate_observations(origin_indices=obs_indices,
#                                   dest_indices=obs_indices,
#                                   num_obs_per_pair=1, iter_cap=2000, rng_seed=1,
#                                   )
obs_indices = [1, 2, 3, 4, 5, 6, 7, 8]
obs = model.generate_observations(
    origin_indices=obs_indices,
    dest_indices=[7, 3],
    num_obs_per_pair=20,
    iter_cap=2000,
    rng_seed=1,
)
    ],
)
# print(arc_to_index_map)
# print(data_list, data_list_names)
distances = data_list[0].A

incidence_mat = (distances > 0).astype(int)

network_struct = ModelDataStruct(data_list,
                                 incidence_mat,
                                 data_array_names_debug=("distances",
                                                         "u_turn"))

beta_vec = np.array([-0.5])
model = RecursiveLogitModelPrediction(network_struct,
                                      initial_beta=beta_vec,
                                      mu=1)
print("Linear system size", model.get_exponential_utility_matrix().shape)
orig_indices = np.arange(0, 74, 16)
dest_indices = np.arange(0, 74, 8)
obs_per_pair = 8

print(
    f"Generating {obs_per_pair * len(orig_indices) * len(dest_indices)} obs total per "
    f"configuration")


def get_data(beta_vec, seed=None):
    beta_vec_generate = np.array([beta_vec])
    model = RecursiveLogitModelPrediction(network_struct,
                                          initial_beta=beta_vec_generate,