def get(self): """Record ``GET`` endpoint: retrieve a record. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotFound` if the record is not found. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotModified` if ``If-None-Match`` header is provided and record not modified in the interim. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` if ``If-Match`` header is provided and record modified in the iterim. """ self._raise_400_if_invalid_id(self.record_id) record = self._get_record_or_404(self.record_id) timestamp = record[self.model.modified_field] self._add_timestamp_header(self.request.response, timestamp=timestamp) self._add_cache_header(self.request.response) self._raise_304_if_not_modified(record) self._raise_412_if_modified(record) partial_fields = self._extract_partial_fields() if partial_fields: record = dict_subset(record, partial_fields) return self.postprocess(record)
def collection_get(self): """Model ``GET`` endpoint: retrieve multiple records. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPNotModified` if ``If-None-Match`` header is provided and collection not modified in the interim. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPPreconditionFailed` if ``If-Match`` header is provided and collection modified in the iterim. :raises: :exc:`~pyramid:pyramid.httpexceptions.HTTPBadRequest` if filters or sorting are invalid. """ self._add_timestamp_header(self.request.response) self._add_cache_header(self.request.response) self._raise_304_if_not_modified() self._raise_412_if_modified() headers = self.request.response.headers filters = self._extract_filters() limit = self._extract_limit() sorting = self._extract_sorting(limit) partial_fields = self._extract_partial_fields() filter_fields = [f.field for f in filters] include_deleted = self.model.modified_field in filter_fields pagination_rules, offset = self._extract_pagination_rules_from_token( limit, sorting) records, total_records = self.model.get_records( filters=filters, sorting=sorting, limit=limit, pagination_rules=pagination_rules, include_deleted=include_deleted) offset = offset + len(records) next_page = None if limit and len(records) == limit and offset < total_records: lastrecord = records[-1] next_page = self._next_page_url(sorting, limit, lastrecord, offset) headers['Next-Page'] = encode_header(next_page) if partial_fields: records = [ dict_subset(record, partial_fields) for record in records ] # Bind metric about response size. logger.bind(nb_records=len(records), limit=limit) headers['Total-Records'] = encode_header('%s' % total_records) return self.postprocess(records)
def test_is_noop_if_no_keys(self): obtained = dict_subset(dict(a=1, b=2), []) expected = dict() self.assertEqual(obtained, expected)
def test_ignores_if_subobject_is_not_dict(self): input = dict(a=1, b=dict(c=2, d=3)) obtained = dict_subset(input, ["a", "b.c.d", "b.d"]) expected = dict(a=1, b=dict(c=2, d=3)) self.assertEqual(obtained, expected)
def test_can_filter_subobjects_recursively(self): input = dict(a=1, b=dict(c=2, d=dict(e=4, f=5))) obtained = dict_subset(input, ["a", "b.d.e"]) expected = dict(a=1, b=dict(d=dict(e=4))) self.assertEqual(obtained, expected)
def test_can_filter_subobjects_keys(self): input = dict(a=1, b=dict(c=2, d=3, e=4)) obtained = dict_subset(input, ["a", "b.d", "b.e"]) expected = dict(a=1, b=dict(d=3, e=4)) self.assertEqual(obtained, expected)
def test_can_filter_subobjects(self): obtained = dict_subset(dict(a=1, b=dict(c=2, d=3)), ["a", "b.c"]) expected = dict(a=1, b=dict(c=2)) self.assertEqual(obtained, expected)
def test_ignores_duplicated_keys(self): obtained = dict_subset(dict(a=1, b=2), ["a", "a"]) expected = dict(a=1) self.assertEqual(obtained, expected)
def test_extract_by_keys(self): obtained = dict_subset(dict(a=1, b=2), ["b"]) expected = dict(b=2) self.assertEqual(obtained, expected)
def test_ignores_unknown_keys(self): obtained = dict_subset(dict(a=1, b=2), ["a", "c"]) expected = dict(a=1) self.assertEqual(obtained, expected)