def test_unique_filtered_all(self): result = self._fixture() def uniq(row): return row[0] eq_(result.unique(uniq).all(), [(1, 1, 1), (2, 1, 2), (4, 1, 2)])
def test_one_unique_tricky_one(self): # one() needs to keep consuming rows in order to find a non-unique # one. unique() really slows things down result = self._fixture(data=[(1, 1, 1), (1, 1, 1), (1, 1, 1), (2, 1, 1)]) assert_raises(exc.MultipleResultsFound, result.unique().one)
def test_one_unique(self): # assert that one() counts rows after uniqueness has been applied. # this would raise if we didnt have unique result = self._fixture(data=[(1, 1, 1), (1, 1, 1)]) row = result.unique().one() eq_(row, (1, 1, 1))
def test_unique_mappings_all(self): result = self._fixture() def uniq(row): return row[0] eq_( result.unique(uniq).mappings().all(), [ { "a": 1, "b": 1, "c": 1 }, { "a": 2, "b": 1, "c": 2 }, { "a": 4, "b": 1, "c": 2 }, ], )
def test_unique_scalars_many(self): result = self._fixture() result = result.unique().scalars(1) eq_(result.fetchmany(2), [1, 3]) eq_(result.fetchmany(2), [])
def test_unique_default_filters_rearrange_order(self): result = self._fixture( default_filters=[lambda x: x < 4, lambda x: x, lambda x: True]) eq_( result.unique().columns("b", "a", "c").all(), [(1, 1, 1), (3, 1, 2), (1, 4, 2)], )
def test_partition_unique_yield_per(self): result = self._fixture() r = [] for partition in result.unique().yield_per(2).partitions(): r.append(list(partition)) eq_(r, [[(1, 1, 1), (2, 1, 2)], [(1, 3, 2), (4, 1, 2)]]) eq_(result.all(), [])
def test_unique_filtered_iterate(self): result = self._fixture() def uniq(row): return row[0] result = result.unique(uniq) eq_(list(result), [(1, 1, 1), (2, 1, 2), (4, 1, 2)])
def test_unique_scalars_many_none(self): result = self._fixture() result = result.unique().scalars(1) # this assumes the default fetchmany behavior of all() for # the ListFetchStrategy eq_(result.fetchmany(None), [1, 3]) eq_(result.fetchmany(None), [])
def test_merge_unique(self, dupe_fixture): r1, r2 = dupe_fixture r1.scalars("y") r2.scalars("y") result = r1.merge(r2) # uniqued 2, 2, 1, 3 eq_(result.unique().all(), [2, 1, 3])
def test_unique_filtered_many(self): result = self._fixture() def uniq(row): return row[0] result = result.unique(uniq) eq_(result.fetchmany(2), [(1, 1, 1), (2, 1, 2)]) eq_(result.fetchmany(2), [(4, 1, 2)]) eq_(result.fetchmany(2), [])
def test_unique_default_filters_rearrange_twice(self): # test that the default uniqueness filter is reconfigured # each time columns() is called result = self._fixture( default_filters=[lambda x: x < 4, lambda x: x, lambda x: True]) result = result.unique() # 1, 1, 1 -> True, 1, True eq_(result.fetchone(), (1, 1, 1)) # now rearrange for b, a, c # 1, 2, 2 -> 1, True, True # 3, 1, 2 -> 3, True, True result = result.columns("b", "a", "c") eq_(result.fetchone(), (3, 1, 2)) # now rearrange for c, a # 2, 4 -> True, False result = result.columns("c", "a") eq_(result.fetchall(), [(2, 4)])
def go(): result = s.execute(stmt) for partition in result.unique().partitions(3): pass
def test_unique_scalars_iterate(self): result = self._fixture() result = result.unique().scalars(1) eq_(list(result), [1, 3])
def test_scalars_one_w_unique(self): result = self._fixture(data=[(1, None, 2)]) result = result.unique() eq_(result.scalars().one(), 1)
def test_unique_scalars_all(self): result = self._fixture() eq_(result.unique().scalars(1).all(), [1, 3])
def test_unique_default_filters_rearrange_scalar(self): result = self._fixture( default_filters=[lambda x: x < 4, lambda x: x, lambda x: True] ) eq_(result.unique().scalars(1).all(), [1, 3])
def test_unique_default_filters(self): result = self._fixture( default_filters=[lambda x: x < 4, lambda x: x, lambda x: True] ) eq_(result.unique().all(), [(1, 1, 1), (1, 3, 2), (4, 1, 2)])