def get_optimal_lag_exper(p_src_index, src_neighbor_indices,
                          normalized_cells_response_curve):
    from statsmodels.tsa.api import VAR

    #get the src neighbors
    number_of_points = len(src_neighbor_indices)

    optimal_lag_vector = dict()

    for p_dst_index in src_neighbor_indices:
        src_dst_data = None
        try:
            src_dst_data = normalized_cells_response_curve[
                [p_src_index, p_dst_index], :]
            src_dst_data = np.transpose(src_dst_data)
            model = VAR(src_dst_data)
            maxlags = None

            lag_order_results = model.select_order(maxlags=maxlags)

            lags = [
                lag_order_results.aic, lag_order_results.bic,
                lag_order_results.fpe, lag_order_results.hqic
            ]

            min_i = np.argmin(lags)

            model = model.fit(maxlags=lags[min_i], ic=None)

            p_value_whiteness = model.test_whiteness(nlags=lags[min_i]).pvalue

            if p_value_whiteness == float('nan') or p_value_whiteness < 0.05:
                raise ValueError('found autocorrelation in residuals.')

                #i = models[min_i].k_ar + 1
                #while i < 12 * (models[min_i].nobs/100.)**(1./4):
                #    result_auto_co = model._estimate_var(i,  trend='c')
                #    if result_auto_co.test_whiteness(nlags=i).pvalue > 0.05:
                #        break
                #    i += 1

                #    print 'error order:' + str(models[min_i].k_ar)
                #    print 'found correlation ' + str(i)

            optimal_lag_vector[p_dst_index] = lags[min_i]
        except:
            print('src index: ' + str(p_src_index) + ' dst index: ' +
                  str(p_dst_index))
            if src_dst_data is not None:
                print(src_dst_data)
            raise

    return optimal_lag_vector
def get_optimal_lag(p_src_index, neighbor_indices,
                    normalized_cells_response_curve):
    #get the src neighbors
    number_of_points = len(neighbor_indices)

    src_neighbor_indices = neighbor_indices[p_src_index]

    optimal_lag_vector = np.zeros((number_of_points))

    for p_dst_index in src_neighbor_indices:
        #find the common neighbours
        dst_neighbor_indices = neighbor_indices[p_dst_index]
        disjoint_neighbours = get_disjoint_neighbours(p_src_index, p_dst_index,
                                                      neighbor_indices)

        src_dst_data = normalized_cells_response_curve[
            [p_src_index, p_dst_index], :]
        src_dst_data = np.transpose(src_dst_data)
        model = VAR(src_dst_data)
        maxlags = None

        lag_order_results = model.select_order(maxlags=maxlags)

        lags = [
            lag_order_results.aic, lag_order_results.bic,
            lag_order_results.fpe, lag_order_results.hqic
        ]

        min_i = np.argmin(lags)

        model = model.fit(maxlags=lags[min_i], ic=None)

        if model.test_whiteness(nlags=lags[min_i]).pvalue < 0.05:
            raise ValueError('found autocorrelation in residuals.')

            #i = models[min_i].k_ar + 1
            #while i < 12 * (models[min_i].nobs/100.)**(1./4):
            #    result_auto_co = model._estimate_var(i,  trend='c')
            #    if result_auto_co.test_whiteness(nlags=i).pvalue > 0.05:
            #        break
            #    i += 1

            #    print 'error order:' + str(models[min_i].k_ar)
            #    print 'found correlation ' + str(i)

        optimal_lag_vector[p_dst_index] = lags[min_i]

        break

    return optimal_lag_vector