Beispiel #1
0
def test_in_polygon():
    polygon = [[0, 0], [1, 0], [1, 1], [0, 1], [0, 0]]
    points = np.random.uniform(size=(100, 2), low=-1, high=1)
    idx_expected = np.nonzero((points[:, 0] > 0) & (points[:, 1] > 0)
                              & (points[:, 0] < 1) & (points[:, 1] < 1))[0]
    idx = np.nonzero(_in_polygon(points, polygon))[0]
    ae(idx, idx_expected)
Beispiel #2
0
def test_regular_subset():
    spikes = [2, 3, 5, 7, 11, 13, 17]
    ae(regular_subset(spikes), spikes)
    ae(regular_subset(spikes, 100), spikes)
    ae(regular_subset(spikes, 100, offset=2), spikes)
    ae(regular_subset(spikes, 3), [2, 7, 17])
    ae(regular_subset(spikes, 3, offset=1), [3, 11])
Beispiel #3
0
def test_select_spikes_2():
    n_spikes = 1000
    n_clusters = 10
    spike_times = artificial_spike_samples(n_spikes)
    spike_times = 10. * spike_times / spike_times.max()
    chunk_bounds = np.linspace(0.0, 10.0, 11)
    n_chunks_kept = 3
    chunks_kept = [0., 1., 4., 5., 8., 9.]
    spike_clusters = artificial_spike_clusters(n_spikes, n_clusters)

    spc = _spikes_per_cluster(spike_clusters)
    ss = SpikeSelector(
        get_spikes_per_cluster=lambda cl: spc.get(cl, np.array([], dtype=np.int64)),
        spike_times=spike_times, chunk_bounds=chunk_bounds, n_chunks_kept=n_chunks_kept)
    ae(ss.chunks_kept, chunks_kept)

    def _check_chunks(sid):
        chunk_ids = np.searchsorted(chunk_bounds, spike_times[sid], 'right') - 1
        ae(np.unique(chunk_ids), [0, 4, 8])

    # Select all spikes belonging to the kept chunks.
    sid = ss(n_spikes, np.arange(n_clusters), subset_chunks=True)
    _check_chunks(sid)

    # Select 10 spikes from each cluster.
    sid = ss(10, np.arange(n_clusters), subset_chunks=True)
    assert np.all(np.diff(sid) > 0)
    _check_chunks(sid)
    ae(np.bincount(spike_clusters[sid]), [10] * 10)
Beispiel #4
0
def test_select_spikes_2():
    spc = lambda c: {2: [2, 7, 11], 3: [3, 5], 5: []}.get(c, None)

    sel = Selector(spc)
    assert sel.select_spikes() is None
    ae(sel.select_spikes([2, 5]), spc(2))
    ae(sel.select_spikes([2, 5], 2), [2])
Beispiel #5
0
def test_select_spikes_3():
    s = select_spikes(
        [0],
        max_n_spikes_per_cluster=4,
        spikes_per_cluster=lambda x: np.arange(10),
        batch_size=2,
    )
    ae(s, [0, 1, 8, 9])
Beispiel #6
0
def test_read_write(tempdir):
    arr = np.arange(10).astype(np.float32)

    path = Path(tempdir) / 'test.npy'

    write_array(path, arr)
    ae(read_array(path), arr)
    ae(read_array(path, mmap_mode='r'), arr)
Beispiel #7
0
def test_unique():
    """Test _unique() function"""
    _unique([])

    n_spikes = 300
    n_clusters = 3
    spike_clusters = artificial_spike_clusters(n_spikes, n_clusters)
    ae(_unique(spike_clusters), np.arange(n_clusters))
Beispiel #8
0
def test_get_padded():
    arr = np.array([1, 2, 3])[:, np.newaxis]

    with raises(RuntimeError):
        ae(_get_padded(arr, -2, 5).ravel(), [1, 2, 3, 0, 0])
    ae(_get_padded(arr, 1, 2).ravel(), [2])
    ae(_get_padded(arr, 0, 5).ravel(), [1, 2, 3, 0, 0])
    ae(_get_padded(arr, -2, 3).ravel(), [0, 0, 1, 2, 3])
Beispiel #9
0
def test_spikes_in_clusters():
    """Test _spikes_in_clusters()."""

    n_spikes = 100
    n_clusters = 5
    spike_clusters = artificial_spike_clusters(n_spikes, n_clusters)

    ae(_spikes_in_clusters(spike_clusters, []), [])

    for i in range(n_clusters):
        assert np.all(spike_clusters[_spikes_in_clusters(spike_clusters, [i])] == i)

    clusters = [1, 2, 3]
    assert np.all(np.in1d(
        spike_clusters[_spikes_in_clusters(spike_clusters, clusters)], clusters))
Beispiel #10
0
def test_as_array():
    ae(_as_array(3), [3])
    ae(_as_array([3]), [3])
    ae(_as_array(3.), [3.])
    ae(_as_array([3.]), [3.])

    with raises(ValueError):
        _as_array(map)
Beispiel #11
0
def test_spikes_per_cluster():
    """Test _spikes_per_cluster()."""

    n_spikes = 100
    n_clusters = 3
    spike_clusters = artificial_spike_clusters(n_spikes, n_clusters)

    assert not _spikes_per_cluster([])

    spikes_per_cluster = _spikes_per_cluster(spike_clusters)
    assert list(spikes_per_cluster.keys()) == list(range(n_clusters))

    for i in range(n_clusters):
        ae(spikes_per_cluster[i], np.sort(spikes_per_cluster[i]))
        assert np.all(spike_clusters[spikes_per_cluster[i]] == i)
Beispiel #12
0
def test_accumulator_2():
    acc = _accumulate([
        {
            'a': np.arange(3),
            'b': np.arange(3) * 10,
            'c': 0
        },
        {
            'a': np.arange(3, 5),
            'b': np.arange(3, 5) * 10,
            'c': 1
        },
    ])
    ae(acc['a'], np.arange(5))
    ae(acc['b'], np.arange(5) * 10)
    # NOTE: in case of scalars, we take the first one and discard the others.
    # We don't concatenate them.
    assert acc['c'] == 0
Beispiel #13
0
def test_get_excerpts():
    data = np.random.rand(100, 2)
    subdata = get_excerpts(data, n_excerpts=10, excerpt_size=5)
    assert subdata.shape == (50, 2)
    ae(subdata[:5, :], data[:5, :])
    ae(subdata[-5:, :], data[-10:-5, :])

    data = np.random.rand(10, 2)
    subdata = get_excerpts(data, n_excerpts=10, excerpt_size=5)
    ae(subdata, data)

    data = np.random.rand(10, 2)
    subdata = get_excerpts(data, n_excerpts=1, excerpt_size=10)
    ae(subdata, data)

    assert len(get_excerpts(data, n_excerpts=0, excerpt_size=10)) == 0
Beispiel #14
0
def test_chunk():
    data = np.random.randn(200, 4)
    chunks = chunk_bounds(data.shape[0], 100, overlap=20)

    with raises(ValueError):
        data_chunk(data, (0, 0, 0))

    assert data_chunk(data, (0, 0)).shape == (0, 4)

    # Chunk 1.
    ch = next(chunks)
    d = data_chunk(data, ch)
    d_o = data_chunk(data, ch, with_overlap=True)

    ae(d_o, data[0:100])
    ae(d, data[0:90])

    # Chunk 2.
    ch = next(chunks)
    d = data_chunk(data, ch)
    d_o = data_chunk(data, ch, with_overlap=True)

    ae(d_o, data[80:180])
    ae(d, data[90:170])
Beispiel #15
0
def test_concatenate_virtual_arrays_1():
    arrs = [np.arange(5), np.arange(10, 12), np.array([0])]
    c = _concatenate_virtual_arrays(arrs, scaling=1)
    assert c.shape == (8,)
    assert len(c) == 8
    assert c._get_recording(3) == 0
    assert c._get_recording(5) == 1

    ae(c[:], [0, 1, 2, 3, 4, 10, 11, 0])
    ae(c[0], [0])
    ae(c[4], [4])
    ae(c[5], [10])
    ae(c[6], [11])

    ae(c[4:6], [4, 10])

    ae(c[:6], [0, 1, 2, 3, 4, 10])
    ae(c[4:], [4, 10, 11, 0])
    ae(c[4:-1], [4, 10, 11])
Beispiel #16
0
def test_select_spikes_1():
    with raises(AssertionError):
        select_spikes()
    spikes = [2, 3, 5, 7, 11]
    spc = lambda c: {2: [2, 7, 11], 3: [3, 5], 5: []}.get(c, None)
    ae(select_spikes([], spikes_per_cluster=spc), [])
    ae(select_spikes([2, 3, 5], spikes_per_cluster=spc), spikes)
    ae(select_spikes([2, 5], spikes_per_cluster=spc), spc(2))

    ae(select_spikes([2, 3, 5], 0, spikes_per_cluster=spc), spikes)
    ae(select_spikes([2, 3, 5], None, spikes_per_cluster=spc), spikes)
    ae(select_spikes([2, 3, 5], 1, spikes_per_cluster=spc), [2, 3])
    ae(select_spikes([2, 5], 2, spikes_per_cluster=spc), [2])
Beispiel #17
0
def test_grouped_mean():
    spike_clusters = np.array([2, 3, 2, 2, 5])
    arr = [9, -3, 10, 11, -5]
    ae(grouped_mean(arr, spike_clusters), [10, -3, -5])
Beispiel #18
0
def test_flatten_per_cluster():
    spc = {2: [2, 7, 11], 3: [3, 5], 5: []}
    arr = _flatten_per_cluster(spc)
    ae(arr, [2, 3, 5, 7, 11])
Beispiel #19
0
 def _check_chunks(sid):
     chunk_ids = np.searchsorted(chunk_bounds, spike_times[sid], 'right') - 1
     ae(np.unique(chunk_ids), [0, 4, 8])
Beispiel #20
0
def test_range_from_slice():
    """Test '_range_from_slice'."""

    class _SliceTest(object):
        """Utility class to make it more convenient to test slice objects."""
        def __init__(self, **kwargs):
            self._kwargs = kwargs

        def __getitem__(self, item):
            if isinstance(item, slice):
                return _range_from_slice(item, **self._kwargs)

    with raises(ValueError):
        _SliceTest()[:]
    with raises(ValueError):
        _SliceTest()[1:]
    ae(_SliceTest()[:5], [0, 1, 2, 3, 4])
    ae(_SliceTest()[1:5], [1, 2, 3, 4])

    with raises(ValueError):
        _SliceTest()[::2]
    with raises(ValueError):
        _SliceTest()[1::2]
    ae(_SliceTest()[1:5:2], [1, 3])

    with raises(ValueError):
        _SliceTest(start=0)[:]
    with raises(ValueError):
        _SliceTest(start=1)[:]
    with raises(ValueError):
        _SliceTest(step=2)[:]

    ae(_SliceTest(stop=5)[:], [0, 1, 2, 3, 4])
    ae(_SliceTest(start=1, stop=5)[:], [1, 2, 3, 4])
    ae(_SliceTest(stop=5)[1:], [1, 2, 3, 4])
    ae(_SliceTest(start=1)[:5], [1, 2, 3, 4])
    ae(_SliceTest(start=1, step=2)[:5], [1, 3])
    ae(_SliceTest(start=1)[:5:2], [1, 3])

    ae(_SliceTest(length=5)[:], [0, 1, 2, 3, 4])
    with raises(ValueError):
        _SliceTest(length=5)[:3]
    ae(_SliceTest(length=5)[:10], [0, 1, 2, 3, 4])
    ae(_SliceTest(length=5)[:5], [0, 1, 2, 3, 4])
    ae(_SliceTest(start=1, length=5)[:], [1, 2, 3, 4, 5])
    ae(_SliceTest(start=1, length=5)[:6], [1, 2, 3, 4, 5])
    with raises(ValueError):
        _SliceTest(start=1, length=5)[:4]
    ae(_SliceTest(start=1, step=2, stop=5)[:], [1, 3])
    ae(_SliceTest(start=1, stop=5)[::2], [1, 3])
    ae(_SliceTest(stop=5)[1::2], [1, 3])
Beispiel #21
0
def test_concatenate_virtual_arrays_3():
    arrs = [np.zeros((2, 2)), np.ones((3, 2))]
    c = _concatenate_virtual_arrays(arrs, scaling=2)
    ae(c[3], 2 * np.ones((1, 2)))
Beispiel #22
0
def test_select_spikes_1():
    spike_times = np.array([0., 1., 2., 3.3, 4.4])
    spike_clusters = np.array([1, 2, 1, 2, 4])
    chunk_bounds = [0.0, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6]
    n_chunks_kept = 2
    cluster_ids = [1, 2, 4]
    spikes_ids_kept = [0, 1, 3]

    spc = _spikes_per_cluster(spike_clusters)
    ss = SpikeSelector(
        get_spikes_per_cluster=lambda cl: spc.get(cl, np.array([], dtype=np.int64)),
        spike_times=spike_times, chunk_bounds=chunk_bounds, n_chunks_kept=n_chunks_kept)
    ae(ss.chunks_kept, [0.0, 1.1, 3.3, 4.4])

    ae(ss(3, [], subset_chunks=True), [])
    ae(ss(3, [0], subset_chunks=True), [])
    ae(ss(3, [1], subset_chunks=True), [0])

    ae(ss(None, cluster_ids, subset_chunks=True), spikes_ids_kept)
    ae(ss(0, cluster_ids, subset_chunks=True), spikes_ids_kept)
    ae(ss(3, cluster_ids, subset_chunks=True), spikes_ids_kept)
    ae(ss(2, cluster_ids, subset_chunks=True), spikes_ids_kept)
    assert list(ss(1, cluster_ids, subset_chunks=True)) in [[0, 1], [0, 3]]

    ae(ss(2, cluster_ids, subset_spikes=[0, 1], subset_chunks=True), [0, 1])
    ae(ss(2, cluster_ids, subset_chunks=False), np.arange(5))
Beispiel #23
0
def test_spikes_from_chunked():
    chunk_bounds = [0,      4,       9,   12,    20]  # noqa
    spike_times =  [   1, 3, 4, 5, 7,         15]  # noqa

    ae(select_spikes_from_chunked(spike_times, chunk_bounds, 0), [])
    ae(select_spikes_from_chunked(spike_times, chunk_bounds, 1), [4])
    ae(select_spikes_from_chunked(spike_times, chunk_bounds, 2), [4, 5])
    ae(select_spikes_from_chunked(spike_times, chunk_bounds, 3), [4, 5, 7])
    ae(select_spikes_from_chunked(spike_times, chunk_bounds, 4), [1, 4, 5, 7])
    ae(select_spikes_from_chunked(spike_times, chunk_bounds, 5), [1, 3, 4, 5, 7])
    ae(select_spikes_from_chunked(spike_times, chunk_bounds, 6), spike_times)
    ae(select_spikes_from_chunked(spike_times, chunk_bounds, 10), spike_times)
Beispiel #24
0
def test_index_of():
    """Test _index_of."""
    arr = [36, 42, 42, 36, 36, 2, 42]
    lookup = _unique(arr)
    ae(_index_of(arr, lookup), [1, 2, 2, 1, 1, 0, 2])
Beispiel #25
0
def test_pad():
    arr = np.random.rand(10, 3)

    ae(_pad(arr, 0, 'right'), arr[:0, :])
    ae(_pad(arr, 3, 'right'), arr[:3, :])
    ae(_pad(arr, 9), arr[:9, :])
    ae(_pad(arr, 10), arr)

    ae(_pad(arr, 12, 'right')[:10, :], arr)
    ae(_pad(arr, 12)[10:, :], np.zeros((2, 3)))

    ae(_pad(arr, 0, 'left'), arr[:0, :])
    ae(_pad(arr, 3, 'left'), arr[7:, :])
    ae(_pad(arr, 9, 'left'), arr[1:, :])
    ae(_pad(arr, 10, 'left'), arr)

    ae(_pad(arr, 12, 'left')[2:, :], arr)
    ae(_pad(arr, 12, 'left')[:2, :], np.zeros((2, 3)))

    with raises(ValueError):
        _pad(arr, -1)
Beispiel #26
0
def test_concatenate_virtual_arrays_2():
    arrs = [np.zeros((2, 2)), np.ones((3, 2))]
    c = _concatenate_virtual_arrays(arrs)
    assert c.shape == (5, 2)
    ae(c[:, :], np.vstack((np.zeros((2, 2)), np.ones((3, 2)))))
    ae(c[0:4, 0], [0, 0, 1, 1])