Example #1
0
    def test_isolated_continuous_random_walks(self):
        # Two 2D random walks
        np.random.seed(0)
        N = 30
        Y = 250
        M = 20 # margin, because negative values raise OutOfHash
        a = DataFrame({'x': M + random_walk(N), 'y': M + random_walk(N), 'frame': np.arange(N)})
        b = DataFrame({'x': M + random_walk(N - 1), 'y': M + Y + random_walk(N - 1), 'frame': np.arange(1, N)})
        f = pandas_concat([a, b])
        expected = f.copy().reset_index(drop=True)
        expected['particle'] = np.concatenate([np.zeros(N), np.ones(N - 1)])
        pandas_sort(expected, ['particle', 'frame'], inplace=True)
        actual = self.link_df(f, 5)
        assert_traj_equal(actual, expected)
        actual_iter = self.link_df_iter(f, 5, hash_size=(2*M, Y + 2*M))
        assert_traj_equal(actual_iter, expected)

        # Many 2D random walks
        np.random.seed(0)
        initial_positions = [(100, 100), (200, 100), (100, 200), (200, 200)]
        import itertools
        c = itertools.count()
        def walk(x, y): 
            i = next(c)
            return DataFrame({'x': x + random_walk(N - i), 
                              'y': y + random_walk(N - i),
                             'frame': np.arange(i, N)})
        f = pandas_concat([walk(*pos) for pos in initial_positions])
        expected = f.copy().reset_index(drop=True)
        expected['particle'] = np.concatenate([i*np.ones(N - i) for i in range(len(initial_positions))])
        pandas_sort(expected, ['particle', 'frame'], inplace=True)
        actual = self.link_df(f, 5)
        assert_traj_equal(actual, expected)
        actual_iter = self.link_df_iter(f, 5, hash_size=(200 + M, 200 + M))
        assert_traj_equal(actual_iter, expected)
Example #2
0
    def test_nearby_continuous_random_walks(self):
        # Two 2D random walks
        np.random.seed(0)
        N = 30
        Y = 250
        M = 20  # margin, because negative values raise OutOfHash
        a = DataFrame({
            'x': M + random_walk(N),
            'y': M + random_walk(N),
            'frame': np.arange(N)
        })
        b = DataFrame({
            'x': M + random_walk(N - 1),
            'y': M + Y + random_walk(N - 1),
            'frame': np.arange(1, N)
        })
        f = pandas_concat([a, b])
        expected = f.copy().reset_index(drop=True)
        expected['particle'] = np.concatenate([np.zeros(N), np.ones(N - 1)])
        pandas_sort(expected, ['particle', 'frame'], inplace=True)
        actual = self.link_df(f, 5)
        assert_traj_equal(actual, expected)
        actual = self.link_df_iter(f, 5, hash_size=(2 * M, 2 * M + Y))
        assert_traj_equal(actual, expected)

        # Several 2D random walks
        np.random.seed(0)
        initial_positions = [(10, 11), (10, 18), (14, 15), (20, 21), (13, 13),
                             (10, 10), (17, 19)]
        import itertools
        c = itertools.count()

        def walk(x, y):
            i = next(c)
            return DataFrame({
                'x': x + random_walk(N - i),
                'y': y + random_walk(N - i),
                'frame': np.arange(i, N)
            })

        f = pandas_concat([walk(*pos) for pos in initial_positions])
        expected = f.copy().reset_index(drop=True)
        expected['particle'] = np.concatenate(
            [i * np.ones(N - i) for i in range(len(initial_positions))])
        pandas_sort(expected, ['particle', 'frame'], inplace=True)
        actual = self.link_df(f, 5)
        assert_traj_equal(actual, expected)
        actual = self.link_df_iter(f, 5, hash_size=(2 * M, 2 * M))
        assert_traj_equal(actual, expected)

        # Shuffle rows (crazy!)
        np.random.seed(0)
        f1 = f.reset_index(drop=True)
        f1.reindex(np.random.permutation(f1.index))
        actual = self.link_df(f1, 5)
        assert_traj_equal(actual, expected)
        actual = self.link_df_iter(f1, 5, hash_size=(2 * M, 2 * M))
        assert_traj_equal(actual, expected)
    def test_nearby_continuous_random_walks(self):
        # Two 2D random walks
        np.random.seed(0)
        N = 30
        Y = 250
        M = 20 # margin, because negative values raise OutOfHash
        a = DataFrame({'x': M + random_walk(N),
                       'y': M + random_walk(N),
                       'frame': np.arange(N)})
        b = DataFrame({'x': M + random_walk(N - 1),
                       'y': M + Y + random_walk(N - 1),
                       'frame': np.arange(1, N)})
        f = pandas_concat([a, b])
        expected = f.copy().reset_index(drop=True)
        expected['particle'] = np.concatenate([np.zeros(N), np.ones(N - 1)])
        pandas_sort(expected, ['particle', 'frame'], inplace=True)
        actual = self.link_df(f, 5)
        assert_traj_equal(actual, expected)
        actual = self.link_df_iter(f, 5, hash_size=(2*M, 2*M + Y))
        assert_traj_equal(actual, expected)

        # Several 2D random walks
        np.random.seed(0)
        initial_positions = [(10, 11), (10, 18), (14, 15), (20, 21), (13, 13),
                             (10, 10), (17, 19)]
        import itertools
        c = itertools.count()
        def walk(x, y):
            i = next(c)
            return DataFrame({'x': x + random_walk(N - i),
                              'y': y + random_walk(N - i),
                              'frame': np.arange(i, N)})
        f = pandas_concat([walk(*pos) for pos in initial_positions])
        expected = f.copy().reset_index(drop=True)
        expected['particle'] = np.concatenate([i*np.ones(N - i) for i in range(len(initial_positions))])
        pandas_sort(expected, ['particle', 'frame'], inplace=True)
        actual = self.link_df(f, 5)
        assert_traj_equal(actual, expected)
        actual = self.link_df_iter(f, 5, hash_size=(2*M, 2*M))
        assert_traj_equal(actual, expected)

        # Shuffle rows (crazy!)
        np.random.seed(0)
        f1 = f.reset_index(drop=True)
        f1.reindex(np.random.permutation(f1.index))
        actual = self.link_df(f1, 5)
        assert_traj_equal(actual, expected)
        actual = self.link_df_iter(f1, 5, hash_size=(2*M, 2*M))
        assert_traj_equal(actual, expected)
Example #4
0
    def test_memory_on_one_gap(self):
        N = 5
        Y = 2
        # Begin second feature one frame later than the first, so the particle labeling (0, 1) is
        # established and not arbitrary.
        a = DataFrame({
            'x': np.arange(N),
            'y': np.ones(N),
            'frame': np.arange(N)
        })
        b = DataFrame({
            'x': np.arange(1, N),
            'y': Y + np.ones(N - 1),
            'frame': np.arange(1, N)
        })
        a = a.drop(3).reset_index(drop=True)
        f = pandas_concat([a, b])
        expected = f.copy().reset_index(drop=True)
        expected['particle'] = np.concatenate(
            [np.array([0, 0, 0, 0]), np.ones(N - 1)])
        pandas_sort(expected, ['particle', 'frame'], inplace=True)
        expected.reset_index(drop=True, inplace=True)
        actual = self.link(f, 5, memory=1)
        assert_traj_equal(actual, expected)

        # Sort rows by frame (normal use)
        actual = self.link(pandas_sort(f, 'frame'), 5, memory=1)
        assert_traj_equal(actual, expected)

        # Shuffle rows (crazy!)
        np.random.seed(0)
        f1 = f.reset_index(drop=True)
        f1.reindex(np.random.permutation(f1.index))
        actual = self.link(f1, 5, memory=1)
        assert_traj_equal(actual, expected)
Example #5
0
    def test_two_isolated_steppers(self):
        N = 5
        Y = 25
        # Begin second feature one frame later than the first, so the particle labeling (0, 1) is
        # established and not arbitrary.
        a = DataFrame({'x': np.arange(N), 'y': np.ones(N), 'frame': np.arange(N)})
        b = DataFrame({'x': np.arange(1, N), 'y': Y + np.ones(N - 1), 'frame': np.arange(1, N)})
        f = pandas_concat([a, b])
        expected = f.copy().reset_index(drop=True)
        expected['particle'] = np.concatenate([np.zeros(N), np.ones(N - 1)])
        pandas_sort(expected, ['particle', 'frame'], inplace=True)
        actual = self.link_df(f, 5)
        assert_traj_equal(actual, expected)
        actual_iter = self.link_df_iter(f, 5, hash_size=(50, 50))
        assert_traj_equal(actual_iter, expected)

        # Sort rows by frame (normal use)
        actual = self.link_df(pandas_sort(f, 'frame'), 5)
        assert_traj_equal(actual, expected)
        actual_iter = self.link_df_iter(pandas_sort(f, 'frame'), 5, hash_size=(50, 50))
        assert_traj_equal(actual_iter, expected)

        # Shuffle rows (crazy!)
        np.random.seed(0)
        f1 = f.reset_index(drop=True)
        f1.reindex(np.random.permutation(f1.index))
        actual = self.link_df(f1, 5)
        assert_traj_equal(actual, expected)
        actual_iter = self.link_df_iter(f1, 5, hash_size=(50, 50))
        assert_traj_equal(actual_iter, expected)
Example #6
0
 def setUp(self):
     N = 10
     Y = 1
     a = DataFrame({'x': np.arange(N), 'y': np.zeros(N),
                   'frame': np.arange(N), 'particle': np.zeros(N)})
     b = DataFrame({'x': np.arange(1, N), 'y': Y + np.zeros(N - 1),
                    'frame': np.arange(1, N), 'particle': np.ones(N - 1)})
     self.steppers = conformity(pandas_concat([a, b]))
Example #7
0
 def link_df_iter(self, *args, **kwargs):
     kwargs.update(self.linker_opts)
     kwargs['diagnostics'] = self.do_diagnostics
     args = list(args)
     features = args.pop(0)
     res = pandas_concat(link_df_iter(
         (df for fr, df in features.groupby('frame')), *args, **kwargs))
     return pandas_sort(res, ['particle', 'frame']).reset_index(drop=True)
Example #8
0
 def setUp(self):
     N = 10
     Y = 1
     a = DataFrame({'x': np.arange(N), 'y': np.zeros(N),
                   'frame': np.arange(N), 'particle': np.zeros(N)})
     b = DataFrame({'x': np.arange(1, N), 'y': Y + np.zeros(N - 1),
                    'frame': np.arange(1, N), 'particle': np.ones(N - 1)})
     self.steppers = conformity(pandas_concat([a, b]))
Example #9
0
    def setUp(self):
        N = 10
        Y = 1
        a = DataFrame({
            'x': np.zeros(N),
            'y': np.zeros(N),
            'frame': np.arange(N),
            'particle': np.zeros(N)
        })
        b = DataFrame({
            'x': np.zeros(N - 1),
            'y': Y + np.zeros(N - 1),
            'frame': np.arange(1, N),
            'particle': np.ones(N - 1)
        })
        self.dead_still = conformity(pandas_concat([a, b]))
        pandas_sort(self.dead_still, ['frame', 'particle'], inplace=True)

        P = 1000  # particles
        A = 0.00001  # step amplitude
        np.random.seed(0)
        particles = [
            DataFrame({
                'x': A * random_walk(N),
                'y': A * random_walk(N),
                'frame': np.arange(N),
                'particle': i
            }) for i in range(P)
        ]
        self.many_walks = conformity(pandas_concat(particles))

        a = DataFrame({
            'x': np.arange(N),
            'y': np.zeros(N),
            'frame': np.arange(N),
            'particle': np.zeros(N)
        })
        b = DataFrame({
            'x': np.arange(1, N),
            'y': Y + np.zeros(N - 1),
            'frame': np.arange(1, N),
            'particle': np.ones(N - 1)
        })
        self.steppers = conformity(pandas_concat([a, b]))
Example #10
0
    def test_memory_with_late_appearance(self):
        a = unit_steps()
        b = random_walk_legacy()
        gapped = pandas_concat([a, b[b['frame'].isin([1, 4])]])

        safe_disp = 1 + random_x.max() - random_x.min()  # large enough
        t0 = self.link(gapped, safe_disp, memory=1)
        assert len(np.unique(t0['particle'].values)) == 3
        t2 = self.link(gapped, safe_disp, memory=4)
        assert len(np.unique(t2['particle'].values)) == 2
Example #11
0
    def link(self, f, search_range, *args, **kwargs):
        def df_iter(f, first_frame, last_frame):
            """ link_df_iter requires a generator of dataframes """
            for t in range(first_frame, last_frame + 1):
                yield f[f['frame'] == t]

        res_iter = link_df_iter(df_iter(f, 0, int(f['frame'].max())),
                                search_range, *args, **kwargs)
        res = pandas_concat(res_iter)
        return pandas_sort(res, ['particle', 'frame']).reset_index(drop=True)
Example #12
0
 def setUp(self):
     np.random.seed(0)
     randn = np.random.randn
     N = 500
     a = DataFrame(randn(N, 2), columns=['x', 'y'])
     b = DataFrame(a[['x', 'y']] + 0.1*randn(N, 2), columns=['x', 'y'])
     a['particle'] = np.arange(N)
     b['particle'] = np.arange(N)
     a['frame'] = 0
     b['frame'] = 1
     self.random_walk = pandas_concat([a, b])
Example #13
0
 def setUp(self):
     np.random.seed(0)
     randn = np.random.randn
     N = 500
     a = DataFrame(randn(N, 2), columns=['x', 'y'])
     b = DataFrame(a[['x', 'y']] + 0.1 * randn(N, 2), columns=['x', 'y'])
     a['particle'] = np.arange(N)
     b['particle'] = np.arange(N)
     a['frame'] = 0
     b['frame'] = 1
     self.random_walk = pandas_concat([a, b])
Example #14
0
    def setUp(self):
        N = 10
        Y = 1
        a = DataFrame({'x': np.zeros(N), 'y': np.zeros(N),
                      'frame': np.arange(N), 'particle': np.zeros(N)})
        b = DataFrame({'x': np.zeros(N - 1), 'y': Y + np.zeros(N - 1),
                       'frame': np.arange(1, N), 'particle': np.ones(N - 1)})
        self.dead_still = conformity(pandas_concat([a, b]))

        P = 50 # particles
        A = 1 # step amplitude
        np.random.seed(0)
        particles = [DataFrame({'x': A*random_walk(N), 'y': A*random_walk(N),
                                'frame': np.arange(N), 'particle': i})
                     for i in range(P)]
        self.many_walks = conformity(pandas_concat(particles))

        a = DataFrame({'x': np.arange(N), 'y': np.zeros(N),
                      'frame': np.arange(N), 'particle': np.zeros(N)})
        b = DataFrame({'x': np.arange(1, N), 'y': Y + np.zeros(N - 1),
                       'frame': np.arange(1, N), 'particle': np.ones(N - 1)})
        self.steppers = conformity(pandas_concat([a, b]))
Example #15
0
    def test_two_nearby_steppers(self):
        N = 5
        Y = 2
        # Begin second feature one frame later than the first, so the particle labeling (0, 1) is
        # established and not arbitrary.
        a = DataFrame({
            'x': np.arange(N),
            'y': np.ones(N),
            'frame': np.arange(N)
        })
        b = DataFrame({
            'x': np.arange(1, N),
            'y': Y + np.ones(N - 1),
            'frame': np.arange(1, N)
        })
        f = pandas_concat([a, b])
        expected = f.copy().reset_index(drop=True)
        expected['particle'] = np.concatenate([np.zeros(N), np.ones(N - 1)])
        pandas_sort(expected, ['particle', 'frame'], inplace=True)
        actual = self.link_df(f, 5)
        assert_traj_equal(actual, expected)
        actual_iter = self.link_df_iter(f, 5, hash_size=(50, 50))
        assert_traj_equal(actual_iter, expected)

        # Sort rows by frame (normal use)
        actual = self.link_df(pandas_sort(f, 'frame'), 5)
        assert_traj_equal(actual, expected)
        actual_iter = self.link_df_iter(pandas_sort(f, 'frame'),
                                        5,
                                        hash_size=(50, 50))
        assert_traj_equal(actual_iter, expected)

        # Shuffle rows (crazy!)
        np.random.seed(0)
        f1 = f.reset_index(drop=True)
        f1.reindex(np.random.permutation(f1.index))
        actual = self.link_df(f1, 5)
        assert_traj_equal(actual, expected)
        actual_iter = self.link_df_iter(f1, 5, hash_size=(50, 50))
        assert_traj_equal(actual_iter, expected)

        if self.do_diagnostics:
            assert 'diag_subnet' in self.diag.columns
            assert 'diag_subnet_size' in self.diag.columns
            # Except for frame in which they appear, all particles should have
            # been labeled with a search_range
            assert not any(
                self.diag['diag_search_range'][actual_iter.frame > 1].isnull())
            # The number of loop iterations is reported by the numba linker only
            if self.linker_opts['link_strategy'] == 'numba':
                assert 'diag_subnet_iterations' in self.diag.columns
Example #16
0
    def test_memory(self):
        """A unit-stepping trajectory and a random walk are observed
        simultaneously. The random walk is missing from one observation."""
        a = unit_steps()
        b = random_walk_legacy()
        # b[2] is intentionally omitted below.
        gapped = pandas_concat([a, b[b['frame'] != 2]])

        safe_disp = 1 + random_x.max() - random_x.min(
        )  # Definitely large enough
        t0 = self.link(gapped, safe_disp, memory=0)
        assert len(np.unique(t0['particle'].values)) == 3
        t2 = self.link(gapped, safe_disp, memory=2)
        assert len(np.unique(t2['particle'].values)) == 2
    def test_memory_on_one_gap(self):
        N = 5
        Y = 2
        # Begin second feature one frame later than the first, so the particle labeling (0, 1) is
        # established and not arbitrary.
        a = DataFrame({'x': np.arange(N), 'y': np.ones(N), 'frame': np.arange(N)})
        b = DataFrame({'x': np.arange(1, N), 'y': Y + np.ones(N - 1), 'frame': np.arange(1, N)})
        a = a.drop(3).reset_index(drop=True)
        f = pandas_concat([a, b])
        expected = f.copy().reset_index(drop=True)
        expected['particle'] = np.concatenate([np.array([0, 0, 0, 0]), np.ones(N - 1)])
        pandas_sort(expected, ['particle', 'frame'], inplace=True)
        expected.reset_index(drop=True, inplace=True)
        actual = self.link_df(f, 5, memory=1)
        assert_traj_equal(actual, expected)
        if self.do_diagnostics:
            assert 'diag_remembered' in self.diag.columns
        actual_iter = self.link_df_iter(f, 5, hash_size=(50, 50), memory=1)
        assert_traj_equal(actual_iter, expected)
        if self.do_diagnostics:
            assert 'diag_remembered' in self.diag.columns

        # Sort rows by frame (normal use)
        actual = self.link_df(pandas_sort(f, 'frame'), 5, memory=1)
        assert_traj_equal(actual, expected)
        if self.do_diagnostics:
            assert 'diag_remembered' in self.diag.columns
        actual_iter = self.link_df_iter(pandas_sort(f, 'frame'), 5,
                                        memory=1, hash_size=(50, 50))
        assert_traj_equal(actual_iter, expected)
        if self.do_diagnostics:
            assert 'diag_remembered' in self.diag.columns

        # Shuffle rows (crazy!)
        np.random.seed(0)
        f1 = f.reset_index(drop=True)
        f1.reindex(np.random.permutation(f1.index))
        actual = self.link_df(f1, 5, memory=1)
        assert_traj_equal(actual, expected)
        if self.do_diagnostics:
            assert 'diag_remembered' in self.diag.columns
        actual_iter = self.link_df_iter(f1, 5, memory=1, hash_size=(50, 50))
        assert_traj_equal(actual_iter, expected)
        if self.do_diagnostics:
            assert 'diag_remembered' in self.diag.columns
Example #18
0
def contracting_grid():
    """Two frames with a grid of 441 points.

    In the second frame, the points contract, so that the outermost set
    coincides with the second-outermost set in the previous frame.

    This is a way to challenge (and/or stump) a subnet solver.
    """
    pts0x, pts0y = np.mgrid[-10:11, -10:11] * 2.
    pts0 = pd.DataFrame(dict(x=pts0x.flatten(), y=pts0y.flatten(), frame=0))
    pts1 = pts0.copy()
    pts1.frame = 1
    pts1.x = pts1.x * 0.9
    pts1.y = pts1.y * 0.9
    allpts = pandas_concat([pts0, pts1], ignore_index=True)
    allpts.x += 200  # Because BTree doesn't allow negative coordinates
    allpts.y += 200
    return allpts
def contracting_grid():
    """Two frames with a grid of 441 points.

    In the second frame, the points contract, so that the outermost set
    coincides with the second-outermost set in the previous frame.

    This is a way to challenge (and/or stump) a subnet solver.
    """
    pts0x, pts0y = np.mgrid[-10:11,-10:11]
    pts0 = pd.DataFrame(dict(x=pts0x.flatten(), y=pts0y.flatten(),
                             frame=0))
    pts1 = pts0.copy()
    pts1.frame = 1
    pts1.x = pts1.x * 0.9
    pts1.y = pts1.y * 0.9
    allpts = pandas_concat([pts0, pts1], ignore_index=True)
    allpts.x += 100  # Because BTree doesn't allow negative coordinates
    allpts.y += 100
    return allpts
    def test_two_nearby_steppers(self):
        N = 5
        Y = 2
        # Begin second feature one frame later than the first, so the particle labeling (0, 1) is
        # established and not arbitrary.
        a = DataFrame({'x': np.arange(N), 'y': np.ones(N), 'frame': np.arange(N)})
        b = DataFrame({'x': np.arange(1, N), 'y': Y + np.ones(N - 1), 'frame': np.arange(1, N)})
        f = pandas_concat([a, b])
        expected = f.copy().reset_index(drop=True)
        expected['particle'] = np.concatenate([np.zeros(N), np.ones(N - 1)])
        pandas_sort(expected, ['particle', 'frame'], inplace=True)
        actual = self.link_df(f, 5)
        assert_traj_equal(actual, expected)
        actual_iter = self.link_df_iter(f, 5, hash_size=(50, 50))
        assert_traj_equal(actual_iter, expected)

        # Sort rows by frame (normal use)
        actual = self.link_df(pandas_sort(f, 'frame'), 5)
        assert_traj_equal(actual, expected)
        actual_iter = self.link_df_iter(pandas_sort(f, 'frame'), 5, hash_size=(50, 50))
        assert_traj_equal(actual_iter, expected)

        # Shuffle rows (crazy!)
        np.random.seed(0)
        f1 = f.reset_index(drop=True)
        f1.reindex(np.random.permutation(f1.index))
        actual = self.link_df(f1, 5)
        assert_traj_equal(actual, expected)
        actual_iter = self.link_df_iter(f1, 5, hash_size=(50, 50))
        assert_traj_equal(actual_iter, expected)

        if self.do_diagnostics:
            assert 'diag_subnet' in self.diag.columns
            assert 'diag_subnet_size' in self.diag.columns
            # Except for frame in which they appear, all particles should have
            # been labeled with a search_range
            assert not any(self.diag['diag_search_range'][
                               actual_iter.frame > 1].isnull())
            # The number of loop iterations is reported by the numba linker only
            if self.linker_opts['link_strategy'] == 'numba':
                assert 'diag_subnet_iterations' in self.diag.columns
Example #21
0
    def test_two_isolated_steppers_one_gapped(self):
        N = 5
        Y = 25
        # Begin second feature one frame later than the first,
        # so the particle labeling (0, 1) is established and not arbitrary.
        a = DataFrame({
            'x': np.arange(N),
            'y': np.ones(N),
            'frame': np.arange(N)
        })
        a = a.drop(3).reset_index(drop=True)
        b = DataFrame({
            'x': np.arange(1, N),
            'y': Y + np.ones(N - 1),
            'frame': np.arange(1, N)
        })
        f = pandas_concat([a, b])
        expected = f.copy()
        expected['particle'] = np.concatenate(
            [np.array([0, 0, 0, 2]), np.ones(N - 1)])
        pandas_sort(expected, ['particle', 'frame'], inplace=True)
        expected.reset_index(drop=True, inplace=True)
        actual = self.link(f, 5)
        assert_traj_equal(actual, expected)
        # link_df_iter() tests not performed, because hash_size is
        # not knowable from the first frame alone.

        # Sort rows by frame (normal use)
        actual = self.link(pandas_sort(f, 'frame'), 5)
        assert_traj_equal(actual, expected)

        # Shuffle rows (crazy!)
        np.random.seed(0)
        f1 = f.reset_index(drop=True)
        f1.reindex(np.random.permutation(f1.index))
        actual = self.link(f1, 5)
        assert_traj_equal(actual, expected)