def test_f_score_with_covars_and_normalized_design_withcovar(random_state=0):
    """

    This test has a statsmodels dependance. There seems to be no simple,
    alternative way to perform a F-test on a linear model including
    covariates.

    """
    try:
        from statsmodels.regression.linear_model import OLS
    except:
        warnings.warn("Statsmodels is required to run this test")
        raise nose.SkipTest

    rng = check_random_state(random_state)

    ### Normalized data
    n_samples = 50
    # generate data
    var1 = np.ones((n_samples, 1)) / np.sqrt(n_samples)  # normalized
    var2 = rng.randn(n_samples, 1)
    var2 = var2 / np.sqrt(np.sum(var2**2, 0))  # normalize
    covars = np.eye(n_samples, 3)  # covars is orthogonal
    covars[3] = -1  # covars is orthogonal to var1
    covars = orthonormalize_matrix(covars)
    # own f_score
    f_val_own = _f_score_with_covars_and_normalized_design(var1, var2,
                                                           covars)[0]
    # statsmodels f_score
    test_matrix = np.array([[1., 0., 0., 0.]])
    statsmodels_ols = OLS(var2, np.hstack((var1, covars))).fit()
    f_val_statsmodels = statsmodels_ols.f_test(test_matrix).fvalue[0]
    assert_array_almost_equal(f_val_own, f_val_statsmodels)
def test_f_score_with_covars_and_normalized_design_withcovar(random_state=0):
    """

    This test has a statsmodels dependance. There seems to be no simple,
    alternative way to perform a F-test on a linear model including
    covariates.

    """
    try:
        from statsmodels.regression.linear_model import OLS
    except:
        warnings.warn("Statsmodels is required to run this test")
        raise nose.SkipTest

    rng = check_random_state(random_state)

    ### Normalized data
    n_samples = 50
    # generate data
    var1 = np.ones((n_samples, 1)) / np.sqrt(n_samples)  # normalized
    var2 = rng.randn(n_samples, 1)
    var2 = var2 / np.sqrt(np.sum(var2 ** 2, 0))  # normalize
    covars = np.eye(n_samples, 3)  # covars is orthogonal
    covars[3] = -1  # covars is orthogonal to var1
    covars = orthonormalize_matrix(covars)
    # own f_score
    f_val_own = _f_score_with_covars_and_normalized_design(var1, var2, covars)[0]
    # statsmodels f_score
    test_matrix = np.array([[1.0, 0.0, 0.0, 0.0]])
    statsmodels_ols = OLS(var2, np.hstack((var1, covars))).fit()
    f_val_statsmodels = statsmodels_ols.f_test(test_matrix).fvalue[0]
    assert_array_almost_equal(f_val_own, f_val_statsmodels)
def test_t_score_with_covars_and_normalized_design_withcovar(random_state=0):
    """

    """
    rng = check_random_state(random_state)

    ### Normalized data
    n_samples = 50
    # generate data
    var1 = np.ones((n_samples, 1)) / np.sqrt(n_samples)  # normalized
    var2 = rng.randn(n_samples, 1)
    var2 = var2 / np.sqrt(np.sum(var2 ** 2, 0))  # normalize
    covars = np.eye(n_samples, 3)  # covars is orthogonal
    covars[3] = -1  # covars is orthogonal to var1
    covars = orthonormalize_matrix(covars)
    # nilearn t-score
    own_score = _t_score_with_covars_and_normalized_design(var1, var2, covars)
    # compute t-scores with linalg or statmodels
    ref_score = get_tvalue_with_alternative_library(var1, var2, covars)
    assert_array_almost_equal(own_score, ref_score)