def consistency_test(network_file, orig_indices, dest_indices, obs_per_pair, beta0, test_range=None): if test_range is None: test_range = np.arange(-0.1, -2.1, -0.1) # network_file = "EMA_net.tntp" data_list, data_list_names = load_tntp_node_formulation( network_file, columns_to_extract=[ "length", ], ) distances = data_list[0] incidence_mat = (distances > 0).astype(int) network_struct = ModelDataStruct(data_list, incidence_mat, data_array_names_debug=("distances", "u_turn")) beta_vec = np.array([-0.1]) model = RecursiveLogitModelPrediction(network_struct, initial_beta=beta_vec, mu=1) print("Linear system size", model.get_exponential_utility_matrix().shape) print( f"Generating {obs_per_pair * len(orig_indices) * len(dest_indices)} obs total per " f"beta sim val") def get_data(beta_vec, seed=None): beta_vec_generate = np.array([beta_vec]) model = RecursiveLogitModelPrediction(network_struct, initial_beta=beta_vec_generate, mu=1) obs = model.generate_observations( origin_indices=orig_indices, dest_indices=dest_indices, num_obs_per_pair=obs_per_pair, iter_cap=2000, rng_seed=seed, ) return obs optimiser = optimisers.ScipyOptimiser(method='l-bfgs-b') # bfgs, l-bfgs-b import time a = time.time() expected = [] actual = [] for n, beta_gen in enumerate(test_range, start=1): expected.append(beta_gen) try: obs = get_data(beta_gen, seed=None) except ValueError as e: print(f"beta = {beta_gen} failed, {e}") actual.append(0.0) continue # print(obs) beta0 = -5 model = RecursiveLogitModelEstimation(network_struct, observations_record=obs, initial_beta=beta0, mu=1, optimiser=optimiser) beta = model.solve_for_optimal_beta(verbose=False) actual.append(float(beta)) print("beta_expected", beta_gen, "beta actual", beta, "\nOBS:") # text_list = wrapper.wrap(str(obs)) # print("\n".join(text_list)) b = time.time() print("elapsed =", b - a, "s") return np.array(expected), np.array(actual)
# distances +=1 data_list = [distances] nz_dist = distances.reshape(distances.shape[0] * distances.shape[1], 1) nz_dist = nz_dist[nz_dist > 0] print("(max dist, min dist, mean dist) = ", (np.max(nz_dist), np.min(nz_dist), np.mean(nz_dist), np.std(nz_dist))) network_struct = ModelDataStruct(data_list, incidence_mat, data_array_names_debug=("distances", "u_turn")) beta_vec = np.array([-1]) model = RecursiveLogitModelPrediction(network_struct, initial_beta=beta_vec, mu=1) print("Linear system size", model.get_exponential_utility_matrix().shape) orig_indices = np.arange(0, arcmaxp1, 30) dest_indices = (orig_indices + 5) % arcmaxp1 # orig_indices = np.arange(0, 7, 1) # dest_indices = np.arange(0, 7, 1) obs_per_pair = 1 print( f"Generating {obs_per_pair * len(orig_indices) * len(dest_indices)} obs total per " f"configuration") def get_data(beta, seed=None): beta_vec_generate = np.array([beta]) model = RecursiveLogitModelPrediction(network_struct, initial_beta=beta_vec_generate,