Exemplo n.º 1
0
    def test_compare_optim_methods(self):
        subfolder = "ExampleTinyModifiedObs"  # big data from classical v2
        folder = join("Datasets", subfolder)

        obs_mat, attrs = load_standard_path_format_csv(folder,
                                                       delim=" ",
                                                       angles_included=True)
        import awkward1 as ak
        obs_mat = obs_mat.toarray()
        obs_record = ak.from_numpy(obs_mat)
        incidence_mat, travel_times_mat, angle_cts_mat = attrs
        left, _, _, u_turn = AngleProcessor.get_turn_categorical_matrices(
            angle_cts_mat, incidence_mat)
        data_list = [travel_times_mat, left, u_turn]
        network_data_struct = ModelDataStruct(data_list, incidence_mat)

        # network_data_struct.add_second_travel_time_for_testing()
        optimiser = optimisers.LineSearchOptimiser(
            optimisers.OptimHessianType.BFGS, max_iter=4)
        RecursiveLogitModelEstimation.zeros_error_override = False
        model = RecursiveLogitModelEstimation(network_data_struct,
                                              optimiser,
                                              observations_record=obs_record,
                                              initial_beta=-15)

        m1_ll_out, m1_grad_out = model.get_log_likelihood()

        optimiser2 = optimisers.ScipyOptimiser(method='newton-cg')

        model2 = RecursiveLogitModelEstimation(network_data_struct,
                                               optimiser2,
                                               observations_record=obs_record,
                                               initial_beta=-15)
        m2_ll_out, m2_grad_out = model2.get_log_likelihood()

        assert np.allclose(m2_ll_out, m1_ll_out)
        assert np.allclose(m2_grad_out, m1_grad_out)

        beta1 = model.solve_for_optimal_beta()

        beta2 = model2.solve_for_optimal_beta(verbose=True)
        m1_ll_out, m1_grad_out = model.get_log_likelihood()
        m2_ll_out, m2_grad_out = model2.get_log_likelihood()
        print(m1_ll_out, m2_ll_out)
        print(m1_grad_out, m2_grad_out)

        assert np.allclose(beta1, beta2, 0.34657)

        RecursiveLogitModelEstimation.zeros_error_override = None
def consistency_test(network_file,
                     orig_indices,
                     dest_indices,
                     obs_per_pair,
                     beta0,
                     test_range=None):
    if test_range is None:
        test_range = np.arange(-0.1, -2.1, -0.1)
    # network_file = "EMA_net.tntp"

    data_list, data_list_names = load_tntp_node_formulation(
        network_file,
        columns_to_extract=[
            "length",
        ],
    )
    distances = data_list[0]

    incidence_mat = (distances > 0).astype(int)

    network_struct = ModelDataStruct(data_list,
                                     incidence_mat,
                                     data_array_names_debug=("distances",
                                                             "u_turn"))

    beta_vec = np.array([-0.1])
    model = RecursiveLogitModelPrediction(network_struct,
                                          initial_beta=beta_vec,
                                          mu=1)
    print("Linear system size", model.get_exponential_utility_matrix().shape)

    print(
        f"Generating {obs_per_pair * len(orig_indices) * len(dest_indices)} obs total per "
        f"beta sim val")

    def get_data(beta_vec, seed=None):
        beta_vec_generate = np.array([beta_vec])
        model = RecursiveLogitModelPrediction(network_struct,
                                              initial_beta=beta_vec_generate,
                                              mu=1)
        obs = model.generate_observations(
            origin_indices=orig_indices,
            dest_indices=dest_indices,
            num_obs_per_pair=obs_per_pair,
            iter_cap=2000,
            rng_seed=seed,
        )
        return obs

    optimiser = optimisers.ScipyOptimiser(method='l-bfgs-b')  # bfgs, l-bfgs-b

    import time
    a = time.time()
    expected = []
    actual = []
    for n, beta_gen in enumerate(test_range, start=1):
        expected.append(beta_gen)
        try:
            obs = get_data(beta_gen, seed=None)
        except ValueError as e:
            print(f"beta = {beta_gen} failed, {e}")
            actual.append(0.0)
            continue
        # print(obs)
        beta0 = -5
        model = RecursiveLogitModelEstimation(network_struct,
                                              observations_record=obs,
                                              initial_beta=beta0,
                                              mu=1,
                                              optimiser=optimiser)
        beta = model.solve_for_optimal_beta(verbose=False)
        actual.append(float(beta))
        print("beta_expected", beta_gen, "beta actual", beta, "\nOBS:")
        # text_list = wrapper.wrap(str(obs))
        # print("\n".join(text_list))

    b = time.time()
    print("elapsed =", b - a, "s")
    return np.array(expected), np.array(actual)
model = RecursiveLogitModelEstimation(network_struct, observations_record=obs_ak,
                                      initial_beta=beta_vec, mu=1,
                                      optimiser=optimiser)
log_like_out, grad_out = model.get_log_likelihood()
print("LL1:", log_like_out, grad_out)
h = 0.0002
model.update_beta_vec([beta + h])
ll2, grad2 = model.get_log_likelihood()
print("LL2:", ll2, grad2)
print("finite difference:", (ll2 - log_like_out) / h)

ls_out = model.solve_for_optimal_beta()
print(model.optim_function_state.val_grad_function(beta))
# =======================================================
print(120 * "=", 'redo with scipy')
optimiser = optimisers.ScipyOptimiser(method='bfgs')

model = RecursiveLogitModelEstimation(network_struct, observations_record=obs_ak,
                                      initial_beta=beta_vec, mu=1,
                                      optimiser=optimiser)
log_like_out, grad_out = model.get_log_likelihood()
print("LL1:", log_like_out, grad_out)
h = 0.0002
model.update_beta_vec([beta + h])
ll2, grad2 = model.get_log_likelihood()
print("LL2:", ll2, grad2)
print("finite difference:", (ll2 - log_like_out) / h)

ls_out = model.solve_for_optimal_beta(verbose=True)
print(model.optim_function_state.val_grad_function(beta))
    model = RecursiveLogitModelPrediction(network_struct,
                                          initial_beta=beta_vec_generate,
                                          mu=1)
    obs = model.generate_observations(
        origin_indices=orig_indices,
        dest_indices=dest_indices,
        num_obs_per_pair=obs_per_pair,
        iter_cap=2000,
        rng_seed=seed,
    )
    return obs


# =======================================================
print(120 * "=", 'redo with scipy')
optimiser = optimisers.ScipyOptimiser(method='l-bfgs-b')  # bfgs, l-bfgs-b

import time
a = time.time()
# test_range = np.arange(-0.1, -5, -0.5)
# test_range = np.arange(-3, -20, -1)
# test_range = np.arange(-0.000, -0.1, -0.03)
# test_range = np.array([-0.3, -0.4, -2, -3, -10, -15])
# test_range = np.array([ -20, -26, -26.5, -27, -28])
# test_range = np.array([-0.01, -0.1, -0.3, -0.4, -0.5, -1, -3, -4, -5, -10, -15, -20, -50, -100,
#                       -200, -250,
#                       -300, -2000, -100000])
# Checking Berlin mitte beta range based upon ||M||^infty
# Trying to normalising **10
# test_range = np.array([-0.01, -0.02, -0.03, -0.038, -0.0385, -0.039, -0.04, -0.05,  -0.1, ])
# try to nromalise **1000