Exemple #1
0
def test_gsarray_append_data():
    """This function tests GrowableSparseArray creation and filling.

    """
    # Simplest example
    gs_array = GrowableSparseArray(n_rows=1)
    gs_array.append(0, np.ones(5))
    assert_array_equal(gs_array.get_data()['row'], np.zeros(5))
    assert_array_equal(gs_array.get_data()['col'], np.arange(5))
    assert_array_equal(gs_array.get_data()['data'], np.ones(5))

    # Append with no structure extension needed
    gs_array = GrowableSparseArray(n_rows=1, max_elts=10)
    gs_array.append(0, np.ones(5))
    assert_array_equal(gs_array.get_data()['row'], np.zeros(5))
    assert_array_equal(gs_array.get_data()['col'], np.arange(5))
    assert_array_equal(gs_array.get_data()['data'], np.ones(5))

    # Void array
    gs_array = GrowableSparseArray(n_rows=1)
    gs_array.append(0, np.zeros(5))
    assert_array_equal(gs_array.get_data()['row'], [])
    assert_array_equal(gs_array.get_data()['col'], [])
    assert_array_equal(gs_array.get_data()['data'], [])

    # Toy example
    gs_array = GrowableSparseArray(n_rows=10)
    for i in range(10):
        data = np.arange(10) - i
        data[data < 8] = 0
        gs_array.append(i, data)
    assert_array_equal(gs_array.get_data()['row'], np.array([0., 0., 1.]))
    assert_array_equal(gs_array.get_data()['col'], [8, 9, 9])
    assert_array_equal(gs_array.get_data()['data'], [8., 9., 8.])
def test_gsarray_append_data():
    """This function tests GrowableSparseArray creation and filling.

    """
    # Simplest example
    gs_array = GrowableSparseArray(n_rows=1)
    gs_array.append(0, np.ones(5))
    assert_array_equal(gs_array.get_data()['row'], np.zeros(5))
    assert_array_equal(gs_array.get_data()['col'], np.arange(5))
    assert_array_equal(gs_array.get_data()['data'], np.ones(5))

    # Append with no structure extension needed
    gs_array = GrowableSparseArray(n_rows=1, max_elts=10)
    gs_array.append(0, np.ones(5))
    assert_array_equal(gs_array.get_data()['row'], np.zeros(5))
    assert_array_equal(gs_array.get_data()['col'], np.arange(5))
    assert_array_equal(gs_array.get_data()['data'], np.ones(5))

    # Void array
    gs_array = GrowableSparseArray(n_rows=1)
    gs_array.append(0, np.zeros(5))
    assert_array_equal(gs_array.get_data()['row'], [])
    assert_array_equal(gs_array.get_data()['col'], [])
    assert_array_equal(gs_array.get_data()['data'], [])

    # Toy example
    gs_array = GrowableSparseArray(n_rows=10)
    for i in range(10):
        data = np.arange(10) - i
        data[data < 8] = 0
        gs_array.append(i, data)
    assert_array_equal(gs_array.get_data()['row'], np.array([0., 0., 1.]))
    assert_array_equal(gs_array.get_data()['col'], [8, 9, 9])
    assert_array_equal(gs_array.get_data()['data'], [8., 9., 8.])
Exemple #3
0
def test_gsarray_merge():
    """This function tests GrowableSparseArray merging.

    Because of the specific usage of GrowableSparseArrays, only a reduced
    number of manipulations has been implemented.

    """
    # Basic merge
    gs_array = GrowableSparseArray(n_rows=1)
    gs_array.append(0, np.ones(5))
    gs_array2 = GrowableSparseArray(n_rows=1)
    gs_array2.merge(gs_array)
    assert_array_equal(gs_array.get_data()['row'],
                       gs_array2.get_data()['row'])
    assert_array_equal(gs_array.get_data()['col'],
                       gs_array2.get_data()['col'])
    assert_array_equal(gs_array.get_data()['data'],
                       gs_array2.get_data()['data'])

    # Merge list
    gs_array = GrowableSparseArray(n_rows=2)
    gs_array.append(0, np.ones(5))
    gs_array2 = GrowableSparseArray(n_rows=2)
    gs_array2.append(1, 2 * np.ones(5))
    gs_array3 = GrowableSparseArray(n_rows=2)
    gs_array3.merge([gs_array, gs_array2])
    assert_array_equal(gs_array3.get_data()['row'],
                       np.array([0.] * 5 + [1.] * 5))
    assert_array_equal(gs_array3.get_data()['col'], np.tile(np.arange(5), 2))
    assert_array_equal(gs_array3.get_data()['data'],
                       np.array([1.] * 5 + [2.] * 5))
    # failure case
    assert_raises(TypeError, gs_array3.merge, [gs_array, gs_array2, "foo"])

    # Test failure case (merging arrays with different n_rows)
    gs_array_wrong = GrowableSparseArray(n_rows=2)
    gs_array_wrong.append(0, np.ones(5))
    gs_array_wrong.append(1, np.ones(5))
    gs_array = GrowableSparseArray(n_rows=1)
    assert_raises(ValueError, gs_array.merge, gs_array_wrong)

    # Test failure case (merge a numpy array)
    gs_array = GrowableSparseArray(n_rows=1)
    assert_raises(TypeError, gs_array.merge, np.ones(5))
def test_gsarray_merge():
    """This function tests GrowableSparseArray merging.

    Because of the specific usage of GrowableSparseArrays, only a reduced
    number of manipulations has been implemented.

    """
    # Basic merge
    gs_array = GrowableSparseArray(n_rows=1)
    gs_array.append(0, np.ones(5))
    gs_array2 = GrowableSparseArray(n_rows=1)
    gs_array2.merge(gs_array)
    assert_array_equal(gs_array.get_data()['row'], gs_array2.get_data()['row'])
    assert_array_equal(gs_array.get_data()['col'], gs_array2.get_data()['col'])
    assert_array_equal(gs_array.get_data()['data'],
                       gs_array2.get_data()['data'])

    # Merge list
    gs_array = GrowableSparseArray(n_rows=2)
    gs_array.append(0, np.ones(5))
    gs_array2 = GrowableSparseArray(n_rows=2)
    gs_array2.append(1, 2 * np.ones(5))
    gs_array3 = GrowableSparseArray(n_rows=2)
    gs_array3.merge([gs_array, gs_array2])
    assert_array_equal(gs_array3.get_data()['row'],
                       np.array([0.] * 5 + [1.] * 5))
    assert_array_equal(gs_array3.get_data()['col'], np.tile(np.arange(5), 2))
    assert_array_equal(gs_array3.get_data()['data'],
                       np.array([1.] * 5 + [2.] * 5))
    # failure case
    assert_raises(TypeError, gs_array3.merge, [gs_array, gs_array2, "foo"])

    # Test failure case (merging arrays with different n_rows)
    gs_array_wrong = GrowableSparseArray(n_rows=2)
    gs_array_wrong.append(0, np.ones(5))
    gs_array_wrong.append(1, np.ones(5))
    gs_array = GrowableSparseArray(n_rows=1)
    assert_raises(ValueError, gs_array.merge, gs_array_wrong)

    # Test failure case (merge a numpy array)
    gs_array = GrowableSparseArray(n_rows=1)
    assert_raises(TypeError, gs_array.merge, np.ones(5))
Exemple #5
0
def test_compute_counting_statistic_from_parcel_level_scores(random_state=1):
    """Test the computation of RPBI's counting statistic.
    """
    # check random state
    rng = check_random_state(random_state)

    # Generate toy data
    # define data structure
    shape = (5, 5, 5)
    n_voxels = np.prod(shape)
    mask = np.ones(shape, dtype=bool)
    # data generation
    data1 = np.ones(shape)
    data1[1:3, 1:3, 1:3] = 2.
    data2 = np.ones(shape)
    data2[3:, 3:, 3:] = 4.
    data = np.ones((4, np.prod(shape)))  # 4 ravelized images
    data[0] = np.ravel(data1)
    data[1] = np.ravel(data2)

    # Parcellate data and extract signal averages
    n_parcellations = 2
    n_parcels = 3
    parcelled_data, labels = _build_parcellations(
        data, mask, n_parcellations=n_parcellations, n_parcels=n_parcels,
        # make sure we use observations 1 and 2 at least once
        n_bootstrap_samples=6, random_state=rng)
    parcel_level_results = GrowableSparseArray(n_rows=2)
    data_tmp = parcelled_data[0]
    data_tmp[data_tmp < 2] = 0
    parcel_level_results.append(0, data_tmp)
    data_tmp = parcelled_data[1]
    data_tmp[data_tmp < 2] = 0
    parcel_level_results.append(1, data_tmp)
    parcellation_masks = np.zeros((n_parcellations * n_parcels, n_voxels))
    for j in np.arange(n_parcellations):  # loop on parcellations
        label_slice = slice(j * n_voxels, (j + 1) * n_voxels)
        for l in np.unique(labels[label_slice]):
            parcellation_masks[l] = labels[label_slice] == l
    parcellation_masks = sparse.coo_matrix(
        parcellation_masks.astype(np.float32)).tocsr()

    # Transform back data
    # (transformed data should be similar to the original data (up to
    # thresholding and sum across parcellations) since by construction
    # the signal is homogeneous within each parcel for each subject)
    thresholded_data = data.copy()
    thresholded_data[thresholded_data < 2] = 0.
    thresholded_data *= 2.
    res = _compute_counting_statistic_from_parcel_level_scores(
        parcel_level_results.get_data(), slice(0, 2), parcellation_masks,
        n_parcellations, n_parcellations * n_parcels)
    counting_stats_original_data, h0 = res
    assert_array_equal(counting_stats_original_data,
                       thresholded_data[0])
    assert_array_equal(h0, [8])

    # Same thing but only for the permuted data
    res = _compute_counting_statistic_from_parcel_level_scores(
        parcel_level_results.get_data()[2:], slice(1, 2),
        parcellation_masks, n_parcellations, n_parcellations * n_parcels)
    counting_stats_original_data, h0 = res
    assert_array_equal(counting_stats_original_data, [])
    assert_array_equal(h0, [8])
def test_compute_counting_statistic_from_parcel_level_scores(random_state=1):
    """Test the computation of RPBI's counting statistic.
    """
    # check random state
    rng = check_random_state(random_state)

    # Generate toy data
    # define data structure
    shape = (5, 5, 5)
    n_voxels = np.prod(shape)
    mask = np.ones(shape, dtype=bool)
    # data generation
    data1 = np.ones(shape)
    data1[1:3, 1:3, 1:3] = 2.
    data2 = np.ones(shape)
    data2[3:, 3:, 3:] = 4.
    data = np.ones((4, np.prod(shape)))  # 4 ravelized images
    data[0] = np.ravel(data1)
    data[1] = np.ravel(data2)

    # Parcellate data and extract signal averages
    n_parcellations = 2
    n_parcels = 3
    parcelled_data, labels = _build_parcellations(
        data,
        mask,
        n_parcellations=n_parcellations,
        n_parcels=n_parcels,
        # make sure we use observations 1 and 2 at least once
        n_bootstrap_samples=6,
        random_state=rng)
    parcel_level_results = GrowableSparseArray(n_rows=2)
    data_tmp = parcelled_data[0]
    data_tmp[data_tmp < 2] = 0
    parcel_level_results.append(0, data_tmp)
    data_tmp = parcelled_data[1]
    data_tmp[data_tmp < 2] = 0
    parcel_level_results.append(1, data_tmp)
    parcellation_masks = np.zeros((n_parcellations * n_parcels, n_voxels))
    for j in np.arange(n_parcellations):  # loop on parcellations
        label_slice = slice(j * n_voxels, (j + 1) * n_voxels)
        for l in np.unique(labels[label_slice]):
            parcellation_masks[l] = labels[label_slice] == l
    parcellation_masks = sparse.coo_matrix(
        parcellation_masks.astype(np.float32)).tocsr()

    # Transform back data
    # (transformed data should be similar to the original data (up to
    # thresholding and sum across parcellations) since by construction
    # the signal is homogeneous within each parcel for each subject)
    thresholded_data = data.copy()
    thresholded_data[thresholded_data < 2] = 0.
    thresholded_data *= 2.
    res = _compute_counting_statistic_from_parcel_level_scores(
        parcel_level_results.get_data(), slice(0, 2), parcellation_masks,
        n_parcellations, n_parcellations * n_parcels)
    counting_stats_original_data, h0 = res
    assert_array_equal(counting_stats_original_data, thresholded_data[0])
    assert_array_equal(h0, [8])

    # Same thing but only for the permuted data
    res = _compute_counting_statistic_from_parcel_level_scores(
        parcel_level_results.get_data()[2:], slice(1, 2), parcellation_masks,
        n_parcellations, n_parcellations * n_parcels)
    counting_stats_original_data, h0 = res
    assert_array_equal(counting_stats_original_data, [])
    assert_array_equal(h0, [8])