def consistency_test(network_file,
                     orig_indices,
                     dest_indices,
                     obs_per_pair,
                     beta0,
                     test_range=None):
    if test_range is None:
        test_range = np.arange(-0.1, -2.1, -0.1)
    # network_file = "EMA_net.tntp"

    data_list, data_list_names = load_tntp_node_formulation(
        network_file,
        columns_to_extract=[
            "length",
        ],
    )
    distances = data_list[0]

    incidence_mat = (distances > 0).astype(int)

    network_struct = ModelDataStruct(data_list,
                                     incidence_mat,
                                     data_array_names_debug=("distances",
                                                             "u_turn"))

    beta_vec = np.array([-0.1])
    model = RecursiveLogitModelPrediction(network_struct,
                                          initial_beta=beta_vec,
                                          mu=1)
    print("Linear system size", model.get_exponential_utility_matrix().shape)

    print(
        f"Generating {obs_per_pair * len(orig_indices) * len(dest_indices)} obs total per "
        f"beta sim val")

    def get_data(beta_vec, seed=None):
        beta_vec_generate = np.array([beta_vec])
        model = RecursiveLogitModelPrediction(network_struct,
                                              initial_beta=beta_vec_generate,
                                              mu=1)
        obs = model.generate_observations(
            origin_indices=orig_indices,
            dest_indices=dest_indices,
            num_obs_per_pair=obs_per_pair,
            iter_cap=2000,
            rng_seed=seed,
        )
        return obs

    optimiser = optimisers.ScipyOptimiser(method='l-bfgs-b')  # bfgs, l-bfgs-b

    import time
    a = time.time()
    expected = []
    actual = []
    for n, beta_gen in enumerate(test_range, start=1):
        expected.append(beta_gen)
        try:
            obs = get_data(beta_gen, seed=None)
        except ValueError as e:
            print(f"beta = {beta_gen} failed, {e}")
            actual.append(0.0)
            continue
        # print(obs)
        beta0 = -5
        model = RecursiveLogitModelEstimation(network_struct,
                                              observations_record=obs,
                                              initial_beta=beta0,
                                              mu=1,
                                              optimiser=optimiser)
        beta = model.solve_for_optimal_beta(verbose=False)
        actual.append(float(beta))
        print("beta_expected", beta_gen, "beta actual", beta, "\nOBS:")
        # text_list = wrapper.wrap(str(obs))
        # print("\n".join(text_list))

    b = time.time()
    print("elapsed =", b - a, "s")
    return np.array(expected), np.array(actual)
#         print("L, U", lower, upper)
#         break

#
#
# test_range = np.array([-26,-26.1, -26.2, -26.3, -26.4, -26.45, -26.5, -27,])
for n, beta_gen in enumerate(test_range, start=1):
    print("BETA GEN = ", beta_gen)
    try:
        obs = get_data(beta_gen, seed=None)
    except ValueError as e:
        print(f"beta = {beta_gen} failed in prediction, {e}")
        continue
    # print(obs)
    beta_init = -5

    model = RecursiveLogitModelEstimation(network_struct,
                                          observations_record=obs,
                                          initial_beta=beta_init,
                                          mu=1,
                                          optimiser=optimiser)
    # try:
    beta = model.solve_for_optimal_beta(verbose=False)
    # except ValueError:
    #     print("beta_expected", beta_gen,"beta actual failed linesearch")
    #     continue
    print("beta_expected", beta_gen, "beta actual", beta)

b = time.time()
print("elapsed =", b - a, "s")
beta = -5
beta_vec = np.array([beta])  # 4.96 diverges
optimiser = optimisers.LineSearchOptimiser(optimisers.OptimHessianType.BFGS, max_iter=40)

model = RecursiveLogitModelEstimation(network_struct, observations_record=obs_ak,
                                      initial_beta=beta_vec, mu=1,
                                      optimiser=optimiser)
log_like_out, grad_out = model.get_log_likelihood()
print("LL1:", log_like_out, grad_out)
h = 0.0002
model.update_beta_vec([beta + h])
ll2, grad2 = model.get_log_likelihood()
print("LL2:", ll2, grad2)
print("finite difference:", (ll2 - log_like_out) / h)

ls_out = model.solve_for_optimal_beta()
print(model.optim_function_state.val_grad_function(beta))
# =======================================================
print(120 * "=", 'redo with scipy')
optimiser = optimisers.ScipyOptimiser(method='bfgs')

model = RecursiveLogitModelEstimation(network_struct, observations_record=obs_ak,
                                      initial_beta=beta_vec, mu=1,
                                      optimiser=optimiser)
log_like_out, grad_out = model.get_log_likelihood()
print("LL1:", log_like_out, grad_out)
h = 0.0002
model.update_beta_vec([beta + h])
ll2, grad2 = model.get_log_likelihood()
print("LL2:", ll2, grad2)
print("finite difference:", (ll2 - log_like_out) / h)
Beispiel #4
0
                                           max_iter=40)

model = RecursiveLogitModelEstimation(network_struct,
                                      observations_record=obs_ak,
                                      initial_beta=beta_vec,
                                      mu=1,
                                      optimiser=optimiser)
log_like_out, grad_out = model.get_log_likelihood()
print("LL1:", log_like_out, grad_out)
h = 0.0002
model.update_beta_vec([beta + h])
ll2, grad2 = model.get_log_likelihood()
print("LL2:", ll2, grad2)
print("finite difference:", (ll2 - log_like_out) / h)

ls_out = model.solve_for_optimal_beta()

print(model.optim_function_state.val_grad_function(beta))

# out =scopt.minimize_scalar(lambda x: model.optim_function_state.function(x)[0],
#                       )
out_bfgs = scopt.minimize(
    lambda x: model.optim_function_state.val_grad_function(x)[0],
    x0=np.array([-5]),
    method='BFGS',
    jac=lambda x: model.optim_function_state.val_grad_function(x)[1],
)

# print("scipy out")
# print(out_bfgs)