def test_merge(self): n_blocks = 100 n_rows = 5 n_cols = 10 col_dim = 3 np.random.seed(n_blocks) data = np.random.random((n_blocks, 2, col_dim)) indices = np.random.randint(0, n_cols, size=n_blocks) indptr = np.array([0] + np.sort( np.random.choice( np.arange(1, n_blocks - 1), size=n_rows - 1, replace=False)).tolist() + [n_blocks]) sbrm = DBSRMatrix() for i in range(n_rows): sbrm.append_row(indices[indptr[i]:indptr[i + 1]], data[indptr[i]:indptr[i + 1]]) np_mat = bsr_matrix((data, indices, indptr)).todense() pair = np.random.choice(np.arange(n_cols), size=2, replace=False) sbrm.merge_cols(tuple(pair.tolist())) dest = min(pair) src = max(pair) np_mat[:, col_dim * dest:col_dim * dest + col_dim] += np_mat[:, col_dim * src:col_dim * src + col_dim] np_mat = np.delete(np_mat, np.arange(col_dim * src, col_dim * src + col_dim), 1) self.assertTrue(np.allclose(np_mat, sbrm.to_bsr().todense()))
def test_construction(self): nblocks = 10 np.random.seed(nblocks) data = np.random.random((nblocks, 2, 3)) indices = np.random.randint(0, 7, size=nblocks) indptr = np.arange(nblocks + 1) sbrm = DBSRMatrix() for i in range(nblocks): sbrm.append_row(indices[i], data[i]) bsr = bsr_matrix((data, indices, indptr)) self.assertTrue(np.allclose(bsr.todense(), sbrm.to_bsr().todense()))
def test_merge_cols(self): nblocks = 13 np.random.seed(nblocks) data = np.random.random((nblocks, 2, 3)) indices = np.random.randint(0, 6, size=nblocks) indptr = np.arange(len(indices) + 1) sbrm = DBSRMatrix() for i in range(nblocks): sbrm.append_row(indices[i], data[i]) indices[indices == 2] = 1 indices[indices > 2] += -1 sbrm.merge_cols((1, 2)) bsr = bsr_matrix((data, indices, indptr)) self.assertTrue(np.allclose(bsr.todense(), sbrm.to_bsr().todense()))
def test_remove_row(self): nblocks = 12 np.random.seed(nblocks) data = np.random.random((nblocks, 2, 2)) indices = np.random.randint(0, 5, size=nblocks) sbrm = DBSRMatrix() for i in range(nblocks): sbrm.append_row(indices[i], data[i]) data = np.delete(data, 3, 0) indices = np.delete(indices, 3) indptr = np.arange(nblocks) sbrm.remove_row(3) bsr = bsr_matrix((data, indices, indptr)) self.assertTrue(np.allclose(bsr.todense(), sbrm.to_bsr().todense()))
def test_remove_col(self): nblocks = 11 np.random.seed(nblocks) data = np.random.random((nblocks, 3, 2)) indices = np.random.randint(0, 5, size=nblocks) sbrm = DBSRMatrix() for i in range(nblocks): sbrm.append_row(indices[i], data[i]) data = np.delete(data, np.flatnonzero(indices == 3), 0) indices = np.delete(indices, np.flatnonzero(indices == 3)) indices[indices > 3] += -1 indptr = np.arange(len(indices) + 1) sbrm.remove_col(3) bsr = bsr_matrix((data, indices, indptr)) self.assertTrue(np.allclose(bsr.todense(), sbrm.to_bsr().todense()))
def test_construction_column_repeat(self): nblocks = 50 nrows = 2 np.random.seed(nblocks) data = np.random.random((nblocks, 2, 3)) indices = np.random.randint(0, 7, size=nblocks) indptr = np.array([0] + np.sort( np.random.choice( np.arange(1, nblocks - 1), size=nrows - 1, replace=False)).tolist() + [nblocks]) sbrm = DBSRMatrix() for i in range(nrows): sbrm.append_row(indices[indptr[i]:indptr[i + 1]], data[indptr[i]:indptr[i + 1]]) bsr = bsr_matrix((data, indices, indptr)) self.assertTrue(np.allclose(bsr.todense(), sbrm.to_bsr().todense()))
def __init__(self, free_point_window=None): """ A class for dynamically maintaining a set of linear motion and observation measurements with Gaussian noise collected over the course of robot navigation While the name of the class suggests graph-like functionality, which is to be added in the future, its primary purpose is to maintain a set of linear systems constraining robot and observation positions By defining a free_point_window, the class will automatically only maintain measurement factors containing at least one of the last free_point_window number of robot point positions as free variables in the linear systems. Those points which eventually become too old are automatically marginalized using their last known position :param free_point_window: An integer declaring the number of most recent robot point positions, which are included as free variables in the linear system(s) """ if free_point_window is None: self._free_point_window = sys.maxsize else: self._free_point_window = free_point_window self.point_dim = None # Dimensionality of the point variables, which is detected and then enforced # following the first point variable encounter self._points = OrderedDict( ) # Collection of all point variables encountered self._free_point_buffer = OrderedDict( ) # Collection of the last free_point_window point variables. The # index of each PointVariable in the buffer corresponds to its # column index within _Ap and _Bp. # if free_point_window is None, _free_point_buffer == _points. self.landmark_dim = None # Dimensionality of the landmark variables, detected and then enforced following the # first encounter self._landmark_buffer = OrderedDict( ) # Collection of all the unique landmark variables encountered. The index # of each LandmarkVariable in the buffer corresponds to its column # index within _Am self._Ap = DBSRMatrix( ) # Observation matrix transforming robot point positions self._Am = DBSRMatrix( ) # Observation matrix transforming landmark positions self._Bp = DBSRMatrix( ) # Odometry matrix relating robot point positions self._array_index = { 'd': {}, 't': {} } # Map of variables to the rows in which they participate in a factor self._d = [ ] # List of range measurements from a robot point to a landmark self._t = [] # List of robot translations self._sigma_d = [ ] # List of estimated standard deviations for each range measurement self._sigma_t = [ ] # List of estimated standard deviations for each translation measurement self.correspondence_map = UnionFind( ) # Map of landmarks to their parent landmark. Landmarks sharing the same