Beispiel #1
0
 def test_bulk_update(self):
     resources = ResourceFactory.build_batch(2)
     self.dataset.resources.extend(resources)
     self.dataset.save()
     now = datetime.now()
     ids = [r.id for r in self.dataset.resources]
     data = [{
         'id': str(id),
         'title': faker.sentence(),
         'description': faker.text(),
     } for id in ids]
     data.append({
         'title': faker.sentence(),
         'description': faker.text(),
         'url': faker.url(),
     })
     with self.api_user():
         response = self.put(url_for('api.resources', dataset=self.dataset),
                             data)
     self.assert200(response)
     self.dataset.reload()
     self.assertEqual(len(self.dataset.resources), 3)
     for idx, id in enumerate(ids):
         resource = self.dataset.resources[idx]
         rdata = data[idx]
         self.assertEqual(str(resource.id), rdata['id'])
         self.assertEqual(resource.title, rdata['title'])
         self.assertEqual(resource.description, rdata['description'])
         self.assertIsNotNone(resource.url)
     new_resource = self.dataset.resources[-1]
     self.assertEqualDates(new_resource.published, now)
Beispiel #2
0
    def test_all_dataset_fields(self):
        resources = ResourceFactory.build_batch(3)
        dataset = DatasetFactory(tags=faker.words(nb=3), resources=resources,
                                 frequency='daily', acronym='acro')
        d = dataset_to_rdf(dataset)
        g = d.graph

        assert isinstance(d, RdfResource)
        assert len(list(g.subjects(RDF.type, DCAT.Dataset))) is 1

        assert g.value(d.identifier, RDF.type) == DCAT.Dataset

        assert isinstance(d.identifier, URIRef)
        uri = url_for('datasets.show_redirect',
                      dataset=dataset.id, _external=True)
        assert str(d.identifier) == uri
        assert d.value(DCT.identifier) == Literal(dataset.id)
        assert d.value(DCT.title) == Literal(dataset.title)
        assert d.value(SKOS.altLabel) == Literal(dataset.acronym)
        assert d.value(DCT.description) == Literal(dataset.description)
        assert d.value(DCT.issued) == Literal(dataset.created_at)
        assert d.value(DCT.modified) == Literal(dataset.last_modified)
        assert d.value(DCT.accrualPeriodicity).identifier == FREQ.daily
        expected_tags = set(Literal(t) for t in dataset.tags)
        assert set(d.objects(DCAT.keyword)) == expected_tags
        assert len(list(d.objects(DCAT.distribution))) == len(resources)
Beispiel #3
0
    def test_all_dataset_fields(self):
        resources = ResourceFactory.build_batch(3)
        dataset = DatasetFactory(tags=faker.words(nb=3), resources=resources,
                                 frequency='daily')
        d = dataset_to_rdf(dataset)
        g = d.graph

        self.assertIsInstance(d, RdfResource)
        self.assertEqual(len(list(g.subjects(RDF.type, DCAT.Dataset))), 1)

        self.assertEqual(g.value(d.identifier, RDF.type), DCAT.Dataset)

        self.assertIsInstance(d.identifier, URIRef)
        uri = url_for('datasets.show_redirect',
                      dataset=dataset.id, _external=True)
        self.assertEqual(str(d.identifier), uri)
        self.assertEqual(d.value(DCT.identifier), Literal(dataset.id))
        self.assertEqual(d.value(DCT.title), Literal(dataset.title))
        self.assertEqual(d.value(DCT.description),
                         Literal(dataset.description))
        self.assertEqual(d.value(DCT.issued), Literal(dataset.created_at))
        self.assertEqual(d.value(DCT.modified),
                         Literal(dataset.last_modified))
        self.assertEqual(d.value(DCT.accrualPeriodicity).identifier,
                         FREQ.daily)
        expected_tags = set(Literal(t) for t in dataset.tags)
        self.assertEqual(set(d.objects(DCAT.keyword)), expected_tags)

        self.assertEqual(len(list(d.objects(DCAT.distribution))),
                         len(resources))
Beispiel #4
0
    def test_reorder(self):
        # Register an extra field in order to test
        # https://github.com/opendatateam/udata/issues/1794
        ResourceMixin.extras.register('my:register', db.BooleanField)
        self.dataset.resources = ResourceFactory.build_batch(3)
        self.dataset.resources[0].extras = {
            'my:register': True,
        }
        self.dataset.save()
        self.dataset.reload()  # Otherwise `last_modified` date is inaccurate.
        initial_last_modified = self.dataset.last_modified

        initial_order = [r.id for r in self.dataset.resources]
        expected_order = [{'id': str(id)} for id in reversed(initial_order)]

        with self.api_user():
            response = self.put(url_for('api.resources', dataset=self.dataset),
                                expected_order)
        self.assertStatus(response, 200)
        self.assertEqual([str(r['id']) for r in response.json],
                         [str(r['id']) for r in expected_order])
        self.dataset.reload()
        self.assertEqual([str(r.id) for r in self.dataset.resources],
                         [str(r['id']) for r in expected_order])
        self.assertEqual(self.dataset.last_modified, initial_last_modified)
Beispiel #5
0
    def test_dataset_api_update_without_resources(self):
        '''It should update a dataset from the API without resources'''
        user = self.login()
        dataset = DatasetFactory(owner=user,
                                 resources=ResourceFactory.build_batch(3))
        initial_length = len(dataset.resources)
        data = dataset.to_dict()
        del data['resources']
        data['description'] = faker.sentence()
        response = self.put(url_for('api.dataset', dataset=dataset), data)
        self.assert200(response)
        self.assertEqual(Dataset.objects.count(), 1)

        dataset.reload()
        self.assertEqual(dataset.description, data['description'])
        self.assertEqual(len(dataset.resources), initial_length)
Beispiel #6
0
    def test_match_existing_resource_by_url(self):
        dataset = DatasetFactory(resources=ResourceFactory.build_batch(3))
        existing_resource = dataset.resources[1]
        node = BNode()
        g = Graph()

        new_title = faker.sentence()
        g.add((node, RDF.type, DCAT.Distribution))
        g.add((node, DCT.title, Literal(new_title)))
        g.add((node, DCAT.downloadURL, Literal(existing_resource.url)))

        resource = resource_from_rdf(g, dataset)
        resource.validate()

        assert isinstance(resource, Resource)
        assert resource.title == new_title
        assert resource.id == existing_resource.id
Beispiel #7
0
    def test_reorder(self):
        self.dataset.resources = ResourceFactory.build_batch(3)
        self.dataset.save()
        self.dataset.reload()  # Otherwise `last_modified` date is inaccurate.
        initial_last_modified = self.dataset.last_modified

        initial_order = [r.id for r in self.dataset.resources]
        expected_order = [{'id': str(id)} for id in reversed(initial_order)]

        with self.api_user():
            response = self.put(url_for('api.resources', dataset=self.dataset),
                                expected_order)
        self.assertStatus(response, 200)
        self.assertEqual([str(r['id']) for r in response.json],
                         [str(r['id']) for r in expected_order])
        self.dataset.reload()
        self.assertEqual([str(r.id) for r in self.dataset.resources],
                         [str(r['id']) for r in expected_order])
        self.assertEqual(self.dataset.last_modified, initial_last_modified)