def get(self, request): page_size = 100 if hasattr(settings, "ACTIVITY_STREAM_PAGE_SIZE"): page_size = int(setting.ACTIVITY_STREAM_PAGE_SIZE) totalItems = models.EditLog.objects.all().exclude( resourceclassid=settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID).count( ) uris = { "root": request.build_absolute_uri(reverse("as_stream_collection")), "first": request.build_absolute_uri( reverse("as_stream_page", kwargs={"page": 1})), "last": request.build_absolute_uri( reverse("as_stream_page", kwargs={"page": 1})), } if totalItems > page_size: uris["last"] = request.build_absolute_uri( reverse("as_stream_page", kwargs={"page": int(totalItems / page_size) + 1})) collection = ActivityStreamCollection( uris, totalItems, base_uri_for_arches=request.build_absolute_uri("/").rsplit("/", 1)) return JsonResponse(collection.to_obj())
def setUp(self): # for RDF/JSON-LD export tests self.C = ActivityStreamCollection( base_uris, totalItems, base_uri_for_arches="https://arches.getty.edu") self.EF = editlog_fuzzer()
class ActivityStreamCollectionTests(ArchesTestCase): """ Unit tests for the `to_rdf` method on Datatype classes. """ @classmethod def setUpClass(cls): ResourceInstance.objects.all().delete() for skospath in [ "tests/fixtures/data/rdf_export_thesaurus.xml", "tests/fixtures/data/rdf_export_collections.xml" ]: skos = SKOSReader() rdf = skos.read_file(skospath) ret = skos.save_concepts_from_skos(rdf) # Models for model_name in ["object_model", "document_model"]: with open( os.path.join( "tests/fixtures/resource_graphs/rdf_export_{0}.json". format(model_name)), "rU") as f: archesfile = JSONDeserializer().deserialize(f) ResourceGraphImporter(archesfile["graph"]) def setUp(self): # for RDF/JSON-LD export tests self.C = ActivityStreamCollection( base_uris, totalItems, base_uri_for_arches="https://arches.getty.edu") self.EF = editlog_fuzzer() def test_jsonld_export_function(self): jsontxt = self.C.to_jsonld() doc = json.loads(jsontxt) def test_obj(self): obj = self.C.to_obj() self.assertTrue(obj["id"] == base_uris["root"]) self.assertTrue("first" in obj) self.assertTrue("last" in obj) def test_generate_page(self): collection_page = self.C.generate_page( page_1_uris, reversed([x for x in self.EF.get_events(10)])) outtxt = collection_page.to_jsonld()
def get(self, request, page=None): current_page = 1 page_size = 100 if hasattr(settings, "ACTIVITY_STREAM_PAGE_SIZE"): page_size = int(setting.ACTIVITY_STREAM_PAGE_SIZE) st = 0 end = 100 if page is not None: try: current_page = int(page) if current_page <= 0: current_page = 1 st = (current_page-1) * page_size end = current_page * page_size except (ValueError, TypeError) as e: return HttpResponseBadRequest() totalItems = models.EditLog.objects.all().exclude( resourceclassid=settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID).count() edits = models.EditLog.objects.all().exclude( resourceclassid=settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID).order_by('timestamp')[st:end] # setting last to be same as first, changing later if there are more pages uris = {"root": request.build_absolute_uri(reverse("as_stream_collection")), "this": request.build_absolute_uri(reverse("as_stream_page", kwargs={'page': current_page})), "first": request.build_absolute_uri(reverse("as_stream_page", kwargs={'page': 1})), "last": request.build_absolute_uri(reverse("as_stream_page", kwargs={'page': 1})), } if current_page > 1: uris["prev"] = request.build_absolute_uri(reverse("as_stream_page", kwargs={'page': current_page-1})) if end < totalItems: uris["next"] = request.build_absolute_uri(reverse("as_stream_page", kwargs={'page': current_page+1})) if totalItems > page_size: uris["last"] = request.build_absolute_uri(reverse("as_stream_page", kwargs={'page': int(totalItems/page_size)+1})), collection = ActivityStreamCollection(uris, totalItems, base_uri_for_arches=request.build_absolute_uri("/").rsplit("/", 1)[0]) collection_page = collection.generate_page(uris, edits) collection_page.startIndex((current_page-1)*page_size) return JsonResponse(collection_page.to_obj())