def test_count_by_queryset(self): """Getting the count with an empty cache is a database operation.""" self.create_users(10) cq = CachedQueryset(self.cache, User.objects.all()) with self.assertNumQueries(1): self.assertEqual(10, cq.count()) self.assertIsNone(cq._primary_keys)
def test_count_by_queryset(self): """Getting the count with an empty cache is a database operation.""" self.create_users(10) cq = CachedQueryset(self.cache, User.objects.all()) with self.assertNumQueries(1): self.assertEqual(10, cq.count()) self.assertIsNone(cq._primary_keys)
def test_filter(self): """A queryset without pks retrived can be filtered by pk.""" self.create_users(5) user = User.objects.latest('id') cq = CachedQueryset(self.cache, User.objects.all()) users = list(cq.filter(id=user.id)) self.assertEqual(1, len(users)) self.assertEqual(user.id, users[0].id)
def test_filter(self): """A queryset without pks retrived can be filtered by pk.""" self.create_users(5) user = User.objects.latest('id') cq = CachedQueryset(self.cache, User.objects.all()) users = list(cq.filter(id=user.id)) self.assertEqual(1, len(users)) self.assertEqual(user.id, users[0].id)
def test_pks_by_queryset(self): """Acessing primary keys populates the pk cache from the database.""" self.create_users(10) cq = CachedQueryset(self.cache, User.objects.all()) with self.assertNumQueries(1): pks = cq.pks self.assertEqual(10, len(pks))
def test_pks_by_pks(self): """Accessing cached primary keys does not query the database.""" self.create_users(10) cq = CachedQueryset(self.cache, User.objects.all(), [1, 2, 3]) with self.assertNumQueries(0): pks = cq.pks self.assertEqual(3, len(pks)) self.assertEqual([1, 2, 3], pks)
def test_iteration_of_populated_queryset(self): """Iterating through a queryset returns CachedModels.""" self.create_users(10) user_pks = list(User.objects.values_list('pk', flat=True)) cq = CachedQueryset(self.cache, User.objects.order_by('pk')) for pk, cm in zip(user_pks, cq): self.assertIsInstance(cm, CachedModel) self.assertEqual(pk, cm.id)
def test_get_slice_by_queryset(self): """A queryset slice of postpones database access until usage.""" self.create_users(10) cq = CachedQueryset(self.cache, User.objects.all()) with self.assertNumQueries(0): users = cq[0:5] with self.assertNumQueries(1): self.assertEqual(5, users.count())
def test_get_slice_by_pks(self): """A queryset slice with a full cache does not query the database.""" self.create_users(10) cq = CachedQueryset(self.cache, User.objects.all(), range(5)) with self.assertNumQueries(0): users = cq[0:5] with self.assertNumQueries(0): self.assertEqual(5, users.count())
def add_sources(self, obj): """Add the sources used by the serializer fields.""" page = self.context['request'].GET.get('page', 1) per_page = settings.PAGINATE_VIEW_FEATURE if isinstance(obj, Feature): # It's a real Feature, not a cached proxy Feature obj.descendant_count = obj.get_descendant_count() descendant_pks = obj.get_descendants().values_list('pk', flat=True) elif obj.descendant_count <= per_page: # The cached PK list is enough to populate descendant_pks descendant_pks = obj.descendants.values_list('id', flat=True) else: # Load the real object to get the full list of descendants real_obj = Feature.objects.get(id=obj.id) descendant_pks = real_obj.get_descendants().values_list( 'pk', flat=True) descendants = CachedQueryset( Cache(), Feature.objects.all(), descendant_pks) obj.paginated_child_features = Paginator(descendants, per_page) obj.page_child_features = obj.paginated_child_features.page(page) obj.child_features = obj.page_child_features.object_list # Load the remaining related instances section_pks = set(obj.sections.values_list('id', flat=True)) support_pks = set(obj.supports.values_list('id', flat=True)) for feature in obj.child_features: section_pks.update(feature.sections.values_list('id', flat=True)) support_pks.update(feature.supports.values_list('id', flat=True)) obj.all_sections = list(CachedQueryset( Cache(), Section.objects.all(), sorted(section_pks))) obj.all_supports = list(CachedQueryset( Cache(), Support.objects.all(), sorted(support_pks))) specification_pks = set() for section in obj.all_sections: specification_pks.add(section.specification.pk) obj.all_specs = list(CachedQueryset( Cache(), Specification.objects.all(), sorted(specification_pks))) maturity_pks = set() for specification in obj.all_specs: maturity_pks.add(specification.maturity.pk) obj.all_maturities = list(CachedQueryset( Cache(), Maturity.objects.all(), sorted(maturity_pks))) version_pks = set() for support in obj.all_supports: version_pks.add(support.version.pk) obj.all_versions = list(CachedQueryset( Cache(), Version.objects.all(), sorted(version_pks))) browser_pks = set() for version in obj.all_versions: browser_pks.add(version.browser.pk) obj.all_browsers = list(CachedQueryset( Cache(), Browser.objects.all(), sorted(browser_pks)))
def get_row_descendants(self, obj): """Return a CachedQueryset of just the row descendants. This includes row features, and subfeatures of rows that are also row features. See http://bit.ly/1MUSEFL for one example of spliting a large table into a hierarchy of features. """ return CachedQueryset(Cache(), Feature.objects.all(), obj.row_descendant_pks)
def test_large_feature_tree_cached_feature(self): feature = self.setup_feature_tree() cached_qs = CachedQueryset(Cache(), Feature.objects.all(), primary_keys=[feature.pk]) cached_feature = cached_qs.get(pk=feature.pk) self.assertEqual(cached_feature.pk, feature.id) self.assertEqual(cached_feature.descendant_count, 3) self.assertEqual(cached_feature.descendant_pks, []) # Too big to cache url = self.api_reverse('viewfeatures-detail', pk=cached_feature.pk) context = self.make_context(url, include_child_pages=True) serializer = ViewFeatureSerializer(context=context) representation = serializer.to_representation(cached_feature) next_page = url + '?child_pages=1&page=2' expected_pagination = { 'linked.features': { 'previous': None, 'next': self.baseUrl + next_page, 'count': 3, } } compat_table = representation['_view_extra']['meta']['compat_table'] actual_pagination = compat_table['pagination'] self.assertDataEqual(expected_pagination, actual_pagination) context2 = self.make_context(next_page, include_child_pages=True) serializer2 = ViewFeatureSerializer(context=context2) representation = serializer2.to_representation(feature) expected_pagination = { 'linked.features': { 'previous': self.baseUrl + url + '?child_pages=1&page=1', 'next': None, 'count': 3, } } compat_table = representation['_view_extra']['meta']['compat_table'] actual_pagination = compat_table['pagination'] self.assertEqual(expected_pagination, actual_pagination)
def test_large_feature_tree_cached_feature(self): feature = self.setup_feature_tree() cached_qs = CachedQueryset( Cache(), Feature.objects.all(), primary_keys=[feature.pk]) cached_feature = cached_qs.get(pk=feature.pk) self.assertEqual(cached_feature.pk, feature.id) self.assertEqual(cached_feature.descendant_count, 3) self.assertEqual(cached_feature.descendant_pks, []) # Too big to cache url = self.api_reverse('viewfeatures-detail', pk=cached_feature.pk) context = self.make_context(url, include_child_pages=True) serializer = ViewFeatureSerializer(context=context) representation = serializer.to_representation(cached_feature) next_page = url + '?child_pages=1&page=2' expected_pagination = { 'linked.features': { 'previous': None, 'next': self.baseUrl + next_page, 'count': 3, } } compat_table = representation['_view_extra']['meta']['compat_table'] actual_pagination = compat_table['pagination'] self.assertDataEqual(expected_pagination, actual_pagination) context2 = self.make_context(next_page, include_child_pages=True) serializer2 = ViewFeatureSerializer(context=context2) representation = serializer2.to_representation(feature) expected_pagination = { 'linked.features': { 'previous': self.baseUrl + url + '?child_pages=1&page=1', 'next': None, 'count': 3, } } compat_table = representation['_view_extra']['meta']['compat_table'] actual_pagination = compat_table['pagination'] self.assertEqual(expected_pagination, actual_pagination)
def get_all_descendants(self, obj, per_page): """Return a CachedQueryset of all the descendants. This includes row features that model rows in the MDN table, and page features that model sub-pages on MDN, which may have row and subpage features of their own. """ descendant_pks = obj.descendant_pks if len(descendant_pks) != obj.descendant_count: # Cached Features with long descendant lists don't cache them. # Load from the database for the full list. feature = Feature.objects.get(id=obj.id) descendant_pks = list(feature.get_descendants().values_list( 'pk', flat=True)) return CachedQueryset(Cache(), Feature.objects.all(), descendant_pks)
def test_all(self): """Filtering by all() returns the CachedQueryset.""" cq = CachedQueryset(self.cache, User.objects.all()) self.assertEqual(cq, cq.all())
def test_get_existing_instance(self): """A cached instance can be retrieved by get(pk).""" user = User.objects.create(username='******') cq = CachedQueryset(self.cache, User.objects.all()) cached_user = cq.get(pk=user.pk) self.assertEqual('frank', cached_user.username)
def test_iteration_of_empty_queryset(self): """Iterating through a queryset returns CachedModels.""" cq = CachedQueryset(self.cache, User.objects.order_by('pk')) self.assertFalse(list(cq))
def add_sources(self, obj): """Add the sources used by the serializer fields.""" page = self.context['request'].GET.get('page', 1) per_page = settings.PAGINATE_VIEW_FEATURE if self.context.get('include_child_pages'): # Paginate the full descendant tree child_queryset = self.get_all_descendants(obj, per_page) paginated_child_features = Paginator(child_queryset, per_page) obj.page_child_features = paginated_child_features.page(page) obj.child_features = obj.page_child_features.object_list else: # Jut the row-level descendants, but un-paginated child_queryset = self.get_row_descendants(obj) obj.child_features = list(child_queryset.all()) # Load the remaining related instances reference_pks = set(obj.references.values_list('id', flat=True)) support_pks = set(obj.supports.values_list('id', flat=True)) for feature in obj.child_features: reference_pks.update( feature.references.values_list('id', flat=True)) support_pks.update(feature.supports.values_list('id', flat=True)) obj.all_references = list( CachedQueryset(Cache(), Reference.objects.all(), sorted(reference_pks))) obj.all_supports = list( CachedQueryset(Cache(), Support.objects.all(), sorted(support_pks))) section_pks = set() for reference in obj.all_references: section_pks.add(reference.section.pk) obj.all_sections = list( CachedQueryset(Cache(), Section.objects.all(), sorted(section_pks))) specification_pks = set() for section in obj.all_sections: specification_pks.add(section.specification.pk) obj.all_specs = list( CachedQueryset(Cache(), Specification.objects.all(), sorted(specification_pks))) maturity_pks = set() for specification in obj.all_specs: maturity_pks.add(specification.maturity.pk) obj.all_maturities = list( CachedQueryset(Cache(), Maturity.objects.all(), sorted(maturity_pks))) version_pks = set() for support in obj.all_supports: version_pks.add(support.version.pk) obj.all_versions = list( CachedQueryset(Cache(), Version.objects.all(), sorted(version_pks))) browser_pks = set() for version in obj.all_versions: browser_pks.add(version.browser.pk) obj.all_browsers = list( CachedQueryset(Cache(), Browser.objects.all(), sorted(browser_pks)))
def test_none(self): """An empty queryset has no items.""" cq = CachedQueryset(self.cache, User.objects.all()) cq_none = cq.none() self.assertEqual([], cq_none.pks)
def test_count_by_pks(self): """Getting the count with a full cache does not query the database.""" self.create_users(10) cq = CachedQueryset(self.cache, User.objects.all(), range(5)) with self.assertNumQueries(0): self.assertEqual(5, cq.count())
def test_all(self): """Filtering by all() returns the CachedQueryset.""" cq = CachedQueryset(self.cache, User.objects.all()) self.assertEqual(cq, cq.all())
def test_get_nonexisting_instance(self): """Attempting to get a missing instance raises DoesNotExist.""" self.assertFalse(User.objects.filter(pk=666).exists()) cq = CachedQueryset(self.cache, User.objects.all()) self.assertRaises(User.DoesNotExist, cq.get, pk=666)
def test_get_existing_instance(self): """A cached instance can be retrieved by get(pk).""" user = User.objects.create(username='******') cq = CachedQueryset(self.cache, User.objects.all()) cached_user = cq.get(pk=user.pk) self.assertEqual('frank', cached_user.username)
def test_none(self): """An empty queryset has no items.""" cq = CachedQueryset(self.cache, User.objects.all()) cq_none = cq.none() self.assertEqual([], cq_none.pks)
def test_count_by_pks(self): """Getting the count with a full cache does not query the database.""" self.create_users(10) cq = CachedQueryset(self.cache, User.objects.all(), range(5)) with self.assertNumQueries(0): self.assertEqual(5, cq.count())