def test_autoarchive(self, app): nb_datasets = 3 source = HarvestSourceFactory(config={'nb_datasets': nb_datasets}) backend = FakeBackend(source) # create a dangling dataset to be archived limit = app.config['HARVEST_AUTOARCHIVE_GRACE_DAYS'] last_update = datetime.now() - timedelta(days=limit + 1) dataset_arch = DatasetFactory( extras={ 'harvest:domain': source.domain, 'harvest:source_id': str(source.id), 'harvest:remote_id': 'not-on-remote', 'harvest:last_update': last_update.isoformat(), }) # create a dangling dataset that _won't_ be archived because of grace period limit = app.config['HARVEST_AUTOARCHIVE_GRACE_DAYS'] last_update = datetime.now() - timedelta(days=limit - 1) dataset_no_arch = DatasetFactory( extras={ 'harvest:domain': source.domain, 'harvest:source_id': str(source.id), 'harvest:remote_id': 'not-on-remote-two', 'harvest:last_update': last_update.isoformat(), }) job = backend.harvest() # all datasets except arch : 3 mocks + 1 manual (no_arch) assert len(job.items) == nb_datasets + 1 # all datasets : 3 mocks + 2 manuals (arch and no_arch) assert Dataset.objects.count() == nb_datasets + 2 archived_items = [i for i in job.items if i.status == 'archived'] assert len(archived_items) == 1 assert archived_items[0].dataset == dataset_arch dataset_arch.reload() assert dataset_arch.archived is not None assert 'harvest:archived' in dataset_arch.extras assert 'harvest:archived_at' in dataset_arch.extras dataset_no_arch.reload() assert dataset_no_arch.archived is None assert 'harvest:archived' not in dataset_no_arch.extras assert 'harvest:archived_at' not in dataset_no_arch.extras # test unarchive: archive manually then relaunch harvest q = {'extras__harvest:remote_id': 'fake-1'} dataset = Dataset.objects.get(**q) dataset.archived = datetime.now() dataset.extras['harvest:archived'] = 'not-on-remote' dataset.extras['harvest:archived_at'] = datetime.now() dataset.save() backend.harvest() dataset.reload() assert dataset.archived is None assert 'harvest:archived' not in dataset.extras assert 'harvest:archived_at' not in dataset.extras
def test_dataset_api_feature_already(self): '''It shouldn't do anything to feature an already featured dataset''' self.login(AdminFactory()) dataset = DatasetFactory(featured=True) response = self.post(url_for('api.dataset_featured', dataset=dataset)) self.assert200(response) dataset.reload() self.assertTrue(dataset.featured)
def test_dataset_api_feature(self): '''It should mark the dataset featured on POST''' self.login(AdminFactory()) dataset = DatasetFactory(featured=False) response = self.post(url_for('api.dataset_featured', dataset=dataset)) self.assert200(response) dataset.reload() self.assertTrue(dataset.featured)
def test_dataset_api_unfeature_already(self): '''It shouldn't do anything to unfeature a not featured dataset''' self.login(AdminFactory()) dataset = DatasetFactory(featured=False) response = self.delete(url_for('api.dataset_featured', dataset=dataset)) self.assert200(response) dataset.reload() self.assertFalse(dataset.featured)
def test_dataset_api_update_without_resources(self): '''It should update a dataset from the API without resources''' user = self.login() dataset = DatasetFactory(owner=user, resources=ResourceFactory.build_batch(3)) initial_length = len(dataset.resources) data = dataset.to_dict() del data['resources'] data['description'] = faker.sentence() response = self.put(url_for('api.dataset', dataset=dataset), data) self.assert200(response) self.assertEqual(Dataset.objects.count(), 1) dataset.reload() self.assertEqual(dataset.description, data['description']) self.assertEqual(len(dataset.resources), initial_length)
def test_create_with_file(self): '''It should create a resource from the API with a file''' user = self.login() with self.autoindex(): org = OrganizationFactory( members=[Member(user=user, role='admin')]) dataset = DatasetFactory(organization=org) response = self.post(url_for('api.upload_new_dataset_resource', dataset=dataset), {'file': (StringIO(b'aaa'), 'test.txt')}, json=False) self.assert201(response) data = json.loads(response.data) self.assertEqual(data['title'], 'test.txt') response = self.put( url_for('api.resource', dataset=dataset, rid=data['id']), data) self.assert200(response) dataset.reload() self.assertEqual(len(dataset.resources), 1) self.assertTrue(dataset.resources[0].url.endswith('test.txt'))
def test_clean(self): ds1 = DatasetFactory( extras={ 'untouched': 'yep', 'recommendations:sources': ['foo', 'bar'], 'recommendations': [ { 'id': 'id1', 'source': 'bar', 'score': 50 }, { 'id': 'id2', 'source': 'foo', 'score': 50 }, ] }) ds2 = DatasetFactory( extras={ 'wait': 'for it', 'recommendations:sources': ['baz'], 'recommendations': [ { 'id': 'id2', 'source': 'baz', 'score': 50 }, ] }) recommendations_clean() ds1.reload() ds2.reload() assert ds1.extras == {'untouched': 'yep'} assert ds2.extras == {'wait': 'for it'}
def test_apigouvfr_load_apis(app, rmock): dataset = DatasetFactory() url = current_app.config.get('APIGOUVFR_URL') apis = [{ 'title': 'une API', 'tagline': 'tagline', 'path': '/path', 'slug': 'slug', 'owner': 'owner', 'openness': 'open', 'logo': '/logo.png', }] payload = copy.deepcopy(apis) payload[0]['datagouv_uuid'] = [str(dataset.id), 'nope'] # missing fields, won't be processed payload.append({ 'title': 'une autre API', 'datagouv_uuid': [str(dataset.id)], }) rmock.get(url, json=payload) apigouvfr_load_apis() dataset.reload() assert dataset.extras.get(APIGOUVFR_EXTRAS_KEY) == apis
def test_delete(self): '''It should delete the connected user''' user = self.login() self.assertIsNone(user.deleted) other_user = UserFactory() members = [Member(user=user), Member(user=other_user)] organization = OrganizationFactory(members=members) disc_msg_content = faker.sentence() disc_msg = DiscMsg(content=disc_msg_content, posted_by=user) other_disc_msg_content = faker.sentence() other_disc_msg = DiscMsg(content=other_disc_msg_content, posted_by=other_user) discussion = DiscussionFactory(user=user, discussion=[disc_msg, other_disc_msg]) issue_msg_content = faker.sentence() issue_msg = IssueMsg(content=issue_msg_content, posted_by=user) other_issue_msg_content = faker.sentence() other_issue_msg = IssueMsg(content=other_issue_msg_content, posted_by=other_user) issue = IssueFactory(user=user, discussion=[issue_msg, other_issue_msg]) dataset = DatasetFactory(owner=user) reuse = ReuseFactory(owner=user) resource = CommunityResourceFactory(owner=user) activity = UserCreatedDataset.objects().create(actor=user, related_to=dataset) following = Follow.objects().create(follower=user, following=other_user) followed = Follow.objects().create(follower=other_user, following=user) with capture_mails() as mails: response = self.delete(url_for('api.me')) self.assertEqual(len(mails), 1) self.assertEqual(mails[0].send_to, set([user.email])) self.assertEqual(mails[0].subject, _('Account deletion')) self.assert204(response) user.reload() organization.reload() discussion.reload() issue.reload() dataset.reload() reuse.reload() resource.reload() activity.reload() # The following are deleted with self.assertRaises(Follow.DoesNotExist): following.reload() # The followers are deleted with self.assertRaises(Follow.DoesNotExist): followed.reload() # The personal data of the user are anonymized self.assertEqual(user.email, '{}@deleted'.format(user.id)) self.assertEqual(user.password, None) self.assertEqual(user.active, False) self.assertEqual(user.first_name, 'DELETED') self.assertEqual(user.last_name, 'DELETED') self.assertFalse(bool(user.avatar)) self.assertEqual(user.avatar_url, None) self.assertEqual(user.website, None) self.assertEqual(user.about, None) # The user is marked as deleted self.assertIsNotNone(user.deleted) # The user is removed from his organizations self.assertEqual(len(organization.members), 1) self.assertEqual(organization.members[0].user.id, other_user.id) # The discussions are kept but the messages are anonymized self.assertEqual(len(discussion.discussion), 2) self.assertEqual(discussion.discussion[0].content, 'DELETED') self.assertEqual(discussion.discussion[1].content, other_disc_msg_content) # The issues are kept and the messages are not anonymized self.assertEqual(len(issue.discussion), 2) self.assertEqual(issue.discussion[0].content, issue_msg_content) self.assertEqual(issue.discussion[1].content, other_issue_msg_content) # The datasets are unchanged self.assertEqual(dataset.owner, user) # The reuses are unchanged self.assertEqual(reuse.owner, user) # The community resources are unchanged self.assertEqual(resource.owner, user) # The activities are unchanged self.assertEqual(activity.actor, user)
class DatasetResourceAPITest(APITestCase): modules = None def setUp(self): self.login() self.dataset = DatasetFactory(owner=self.user) @attr('create') def test_create(self): data = ResourceFactory.as_dict() with self.api_user(): response = self.post( url_for('api.resources', dataset=self.dataset), data) self.assert201(response) self.dataset.reload() self.assertEqual(len(self.dataset.resources), 1) @attr('create') def test_create_2nd(self): self.dataset.resources.append(ResourceFactory()) self.dataset.save() data = ResourceFactory.as_dict() with self.api_user(): response = self.post( url_for('api.resources', dataset=self.dataset), data) self.assert201(response) self.dataset.reload() self.assertEqual(len(self.dataset.resources), 2) @attr('create') def test_create_with_file(self): '''It should create a resource from the API with a file''' user = self.login() with self.autoindex(): org = OrganizationFactory( members=[Member(user=user, role='admin')]) dataset = DatasetFactory(organization=org) response = self.post(url_for('api.upload_new_dataset_resource', dataset=dataset), {'file': (StringIO(b'aaa'), 'test.txt')}, json=False) self.assert201(response) data = json.loads(response.data) self.assertEqual(data['title'], 'test.txt') response = self.put( url_for('api.resource', dataset=dataset, rid=data['id']), data) self.assert200(response) dataset.reload() self.assertEqual(len(dataset.resources), 1) self.assertTrue(dataset.resources[0].url.endswith('test.txt')) @attr('update') def test_reorder(self): self.dataset.resources = ResourceFactory.build_batch(3) self.dataset.save() self.dataset.reload() # Otherwise `last_modified` date is inaccurate. initial_last_modified = self.dataset.last_modified initial_order = [r.id for r in self.dataset.resources] expected_order = [{'id': str(id)} for id in reversed(initial_order)] with self.api_user(): response = self.put(url_for('api.resources', dataset=self.dataset), expected_order) self.assertStatus(response, 200) self.assertEqual([str(r['id']) for r in response.json], [str(r['id']) for r in expected_order]) self.dataset.reload() self.assertEqual([str(r.id) for r in self.dataset.resources], [str(r['id']) for r in expected_order]) self.assertEqual(self.dataset.last_modified, initial_last_modified) @attr('update') def test_update(self): resource = ResourceFactory() self.dataset.resources.append(resource) self.dataset.save() now = datetime.now() data = { 'title': faker.sentence(), 'description': faker.text(), 'url': faker.url(), 'published': now.isoformat(), } with self.api_user(): response = self.put( url_for('api.resource', dataset=self.dataset, rid=str(resource.id)), data) self.assert200(response) self.dataset.reload() self.assertEqual(len(self.dataset.resources), 1) updated = self.dataset.resources[0] self.assertEqual(updated.title, data['title']) self.assertEqual(updated.description, data['description']) self.assertEqual(updated.url, data['url']) self.assertEqualDates(updated.published, now) @attr('update') def test_bulk_update(self): resources = ResourceFactory.build_batch(2) self.dataset.resources.extend(resources) self.dataset.save() now = datetime.now() ids = [r.id for r in self.dataset.resources] data = [{ 'id': str(id), 'title': faker.sentence(), 'description': faker.text(), } for id in ids] data.append({ 'title': faker.sentence(), 'description': faker.text(), 'url': faker.url(), }) with self.api_user(): response = self.put(url_for('api.resources', dataset=self.dataset), data) self.assert200(response) self.dataset.reload() self.assertEqual(len(self.dataset.resources), 3) for idx, id in enumerate(ids): resource = self.dataset.resources[idx] rdata = data[idx] self.assertEqual(str(resource.id), rdata['id']) self.assertEqual(resource.title, rdata['title']) self.assertEqual(resource.description, rdata['description']) self.assertIsNotNone(resource.url) new_resource = self.dataset.resources[-1] self.assertEqualDates(new_resource.published, now) @attr('update') def test_update_404(self): data = { 'title': faker.sentence(), 'description': faker.text(), 'url': faker.url(), } with self.api_user(): response = self.put( url_for('api.resource', dataset=self.dataset, rid=str(ResourceFactory().id)), data) self.assert404(response) @attr('update') def test_update_with_file(self): '''It should update a resource from the API with a file''' user = self.login() with self.autoindex(): resource = ResourceFactory() org = OrganizationFactory( members=[Member(user=user, role='admin')]) dataset = DatasetFactory(resources=[resource], organization=org) response = self.post(url_for('api.upload_dataset_resource', dataset=dataset, rid=resource.id), {'file': (StringIO(b'aaa'), 'test.txt')}, json=False) self.assert200(response) data = json.loads(response.data) self.assertEqual(data['title'], 'test.txt') response = self.put( url_for('api.resource', dataset=dataset, rid=data['id']), data) self.assert200(response) dataset.reload() self.assertEqual(len(dataset.resources), 1) self.assertTrue(dataset.resources[0].url.endswith('test.txt')) def test_delete(self): resource = ResourceFactory() self.dataset.resources.append(resource) self.dataset.save() with self.api_user(): response = self.delete( url_for('api.resource', dataset=self.dataset, rid=str(resource.id))) self.assertStatus(response, 204) self.dataset.reload() self.assertEqual(len(self.dataset.resources), 0) def test_delete_404(self): with self.api_user(): response = self.delete( url_for('api.resource', dataset=self.dataset, rid=str(ResourceFactory().id))) self.assert404(response) def test_follow_dataset(self): '''It should follow a dataset on POST''' user = self.login() to_follow = DatasetFactory() response = self.post(url_for('api.dataset_followers', id=to_follow.id)) self.assert201(response) self.assertEqual(Follow.objects.following(to_follow).count(), 0) self.assertEqual(Follow.objects.followers(to_follow).count(), 1) follow = Follow.objects.followers(to_follow).first() self.assertIsInstance(follow.following, Dataset) self.assertEqual(Follow.objects.following(user).count(), 1) self.assertEqual(Follow.objects.followers(user).count(), 0) def test_unfollow_dataset(self): '''It should unfollow the dataset on DELETE''' user = self.login() to_follow = DatasetFactory() Follow.objects.create(follower=user, following=to_follow) response = self.delete( url_for('api.dataset_followers', id=to_follow.id)) self.assert200(response) nb_followers = Follow.objects.followers(to_follow).count() self.assertEqual(response.json['followers'], nb_followers) self.assertEqual(Follow.objects.following(to_follow).count(), 0) self.assertEqual(nb_followers, 0) self.assertEqual(Follow.objects.following(user).count(), 0) self.assertEqual(Follow.objects.followers(user).count(), 0) def test_suggest_formats_api(self): '''It should suggest formats''' with self.autoindex(): DatasetFactory(resources=[ ResourceFactory(format=f) for f in (faker.word(), faker.word(), 'test', 'test-1') ]) response = self.get(url_for('api.suggest_formats'), qs={ 'q': 'test', 'size': '5' }) self.assert200(response) self.assertLessEqual(len(response.json), 5) self.assertGreater(len(response.json), 1) # Shortest match first. self.assertEqual(response.json[0]['text'], 'test') for suggestion in response.json: self.assertIn('text', suggestion) self.assertIn('score', suggestion) self.assertTrue(suggestion['text'].startswith('test')) def test_suggest_format_api_no_match(self): '''It should not provide format suggestion if no match''' with self.autoindex(): DatasetFactory(resources=[ ResourceFactory(format=faker.word()) for _ in range(3) ]) response = self.get(url_for('api.suggest_formats'), qs={ 'q': 'test', 'size': '5' }) self.assert200(response) self.assertEqual(len(response.json), 0) def test_suggest_format_api_empty(self): '''It should not provide format suggestion if no data''' self.init_search() response = self.get(url_for('api.suggest_formats'), qs={ 'q': 'test', 'size': '5' }) self.assert200(response) self.assertEqual(len(response.json), 0) def test_suggest_datasets_api(self): '''It should suggest datasets''' with self.autoindex(): for i in range(4): DatasetFactory(title='test-{0}'.format(i) if i % 2 else faker.word(), resources=[ResourceFactory()]) response = self.get(url_for('api.suggest_datasets'), qs={ 'q': 'tes', 'size': '5' }) self.assert200(response) self.assertLessEqual(len(response.json), 5) self.assertGreater(len(response.json), 1) for suggestion in response.json: self.assertIn('id', suggestion) self.assertIn('title', suggestion) self.assertIn('slug', suggestion) self.assertIn('score', suggestion) self.assertIn('image_url', suggestion) self.assertTrue(suggestion['title'].startswith('test')) def test_suggest_datasets_api_unicode(self): '''It should suggest datasets with special characters''' with self.autoindex(): for i in range(4): DatasetFactory(title='testé-{0}'.format(i) if i % 2 else faker.word(), resources=[ResourceFactory()]) response = self.get(url_for('api.suggest_datasets'), qs={ 'q': 'testé', 'size': '5' }) self.assert200(response) self.assertLessEqual(len(response.json), 5) self.assertGreater(len(response.json), 1) for suggestion in response.json: self.assertIn('id', suggestion) self.assertIn('title', suggestion) self.assertIn('slug', suggestion) self.assertIn('score', suggestion) self.assertIn('image_url', suggestion) self.assertTrue(suggestion['title'].startswith('test')) def test_suggest_datasets_api_no_match(self): '''It should not provide dataset suggestion if no match''' with self.autoindex(): for i in range(3): DatasetFactory(resources=[ResourceFactory()]) response = self.get(url_for('api.suggest_datasets'), qs={ 'q': 'xxxxxx', 'size': '5' }) self.assert200(response) self.assertEqual(len(response.json), 0) def test_suggest_datasets_api_empty(self): '''It should not provide dataset suggestion if no data''' self.init_search() response = self.get(url_for('api.suggest_datasets'), qs={ 'q': 'xxxxxx', 'size': '5' }) self.assert200(response) self.assertEqual(len(response.json), 0)
class DatasetBadgeAPITest(APITestCase): @classmethod def setUpClass(cls): # Register at least two badges Dataset.__badges__['test-1'] = 'Test 1' Dataset.__badges__['test-2'] = 'Test 2' cls.factory = badge_factory(Dataset) def setUp(self): self.login(AdminFactory()) self.dataset = DatasetFactory(owner=UserFactory()) def test_list(self): response = self.get(url_for('api.available_dataset_badges')) self.assertStatus(response, 200) self.assertEqual(len(response.json), len(Dataset.__badges__)) for kind, label in Dataset.__badges__.items(): self.assertIn(kind, response.json) self.assertEqual(response.json[kind], label) def test_create(self): data = self.factory.as_dict() with self.api_user(): response = self.post( url_for('api.dataset_badges', dataset=self.dataset), data) self.assert201(response) self.dataset.reload() self.assertEqual(len(self.dataset.badges), 1) def test_create_same(self): data = self.factory.as_dict() with self.api_user(): self.post(url_for('api.dataset_badges', dataset=self.dataset), data) response = self.post( url_for('api.dataset_badges', dataset=self.dataset), data) self.assertStatus(response, 200) self.dataset.reload() self.assertEqual(len(self.dataset.badges), 1) def test_create_2nd(self): # Explicitely setting the kind to avoid collisions given the # small number of choices for kinds. kinds_keys = Dataset.__badges__.keys() self.dataset.badges.append(self.factory(kind=kinds_keys[0])) self.dataset.save() data = self.factory.as_dict() data['kind'] = kinds_keys[1] with self.api_user(): response = self.post( url_for('api.dataset_badges', dataset=self.dataset), data) self.assert201(response) self.dataset.reload() self.assertEqual(len(self.dataset.badges), 2) def test_delete(self): badge = self.factory() self.dataset.badges.append(badge) self.dataset.save() with self.api_user(): response = self.delete( url_for('api.dataset_badge', dataset=self.dataset, badge_kind=str(badge.kind))) self.assertStatus(response, 204) self.dataset.reload() self.assertEqual(len(self.dataset.badges), 0) def test_delete_404(self): with self.api_user(): response = self.delete( url_for('api.dataset_badge', dataset=self.dataset, badge_kind=str(self.factory().kind))) self.assert404(response)
class DatasetResourceAPITest(APITestCase): modules = None def setUp(self): self.login() self.dataset = DatasetFactory(owner=self.user) def test_get(self): '''It should fetch a resource from the API''' resource = ResourceFactory() dataset = DatasetFactory(resources=[resource]) response = self.get( url_for('api.resource', dataset=dataset, rid=resource.id)) self.assert200(response) data = json.loads(response.data) assert data['title'] == resource.title assert data['latest'] == resource.latest assert data['url'] == resource.url def test_create(self): data = ResourceFactory.as_dict() data['extras'] = {'extra:id': 'id'} with self.api_user(): response = self.post( url_for('api.resources', dataset=self.dataset), data) self.assert201(response) self.dataset.reload() self.assertEqual(len(self.dataset.resources), 1) self.assertEqual(self.dataset.resources[0].extras, {'extra:id': 'id'}) def test_create_normalize_format(self): _format = ' FORMAT ' data = ResourceFactory.as_dict() data['format'] = _format with self.api_user(): response = self.post( url_for('api.resources', dataset=self.dataset), data) self.assert201(response) self.dataset.reload() self.assertEqual(self.dataset.resources[0].format, _format.strip().lower()) def test_create_2nd(self): self.dataset.resources.append(ResourceFactory()) self.dataset.save() data = ResourceFactory.as_dict() with self.api_user(): response = self.post( url_for('api.resources', dataset=self.dataset), data) self.assert201(response) self.dataset.reload() self.assertEqual(len(self.dataset.resources), 2) def test_create_with_file(self): '''It should create a resource from the API with a file''' user = self.login() with self.autoindex(): org = OrganizationFactory( members=[Member(user=user, role='admin')]) dataset = DatasetFactory(organization=org) response = self.post(url_for('api.upload_new_dataset_resource', dataset=dataset), {'file': (StringIO(b'aaa'), 'test.txt')}, json=False) self.assert201(response) data = json.loads(response.data) self.assertEqual(data['title'], 'test.txt') response = self.put( url_for('api.resource', dataset=dataset, rid=data['id']), data) self.assert200(response) dataset.reload() self.assertEqual(len(dataset.resources), 1) self.assertTrue(dataset.resources[0].url.endswith('test.txt')) def test_create_with_file_chunks(self): '''It should create a resource from the API with a chunked file''' user = self.login() with self.autoindex(): org = OrganizationFactory( members=[Member(user=user, role='admin')]) dataset = DatasetFactory(organization=org) uuid = str(uuid4()) parts = 4 url = url_for('api.upload_new_dataset_resource', dataset=dataset) for i in range(parts): response = self.post(url, { 'file': (StringIO(b'a'), 'blob'), 'uuid': uuid, 'filename': 'test.txt', 'partindex': i, 'partbyteoffset': 0, 'totalfilesize': parts, 'totalparts': parts, 'chunksize': 1, }, json=False) self.assert200(response) assert response.json['success'] assert 'filename' not in response.json assert 'url' not in response.json assert 'size' not in response.json assert 'sha1' not in response.json assert 'url' not in response.json response = self.post(url, { 'uuid': uuid, 'filename': 'test.txt', 'totalfilesize': parts, 'totalparts': parts, }, json=False) self.assert201(response) data = json.loads(response.data) self.assertEqual(data['title'], 'test.txt') def test_create_filetype_file_unallowed_domain(self): self.app.config['RESOURCES_FILE_ALLOWED_DOMAINS'] = [] data = ResourceFactory.as_dict() data['filetype'] = RESOURCE_FILETYPE_FILE with self.api_user(): response = self.post( url_for('api.resources', dataset=self.dataset), data) self.assert400(response) def test_create_filetype_file_allowed_domain(self): self.app.config['RESOURCES_FILE_ALLOWED_DOMAINS'] = [ 'udata.gouv.fr', ] data = ResourceFactory.as_dict() data['filetype'] = RESOURCE_FILETYPE_FILE data['url'] = 'http://udata.gouv.fr/resource' with self.api_user(): response = self.post( url_for('api.resources', dataset=self.dataset), data) self.assert201(response) def test_create_filetype_file_server_name(self): self.app.config['RESOURCES_FILE_ALLOWED_DOMAINS'] = [] data = ResourceFactory.as_dict() data['filetype'] = RESOURCE_FILETYPE_FILE data['url'] = 'http://%s/resource' % self.app.config['SERVER_NAME'] with self.api_user(): response = self.post( url_for('api.resources', dataset=self.dataset), data) self.assert201(response) def test_reorder(self): self.dataset.resources = ResourceFactory.build_batch(3) self.dataset.save() self.dataset.reload() # Otherwise `last_modified` date is inaccurate. initial_last_modified = self.dataset.last_modified initial_order = [r.id for r in self.dataset.resources] expected_order = [{'id': str(id)} for id in reversed(initial_order)] with self.api_user(): response = self.put(url_for('api.resources', dataset=self.dataset), expected_order) self.assertStatus(response, 200) self.assertEqual([str(r['id']) for r in response.json], [str(r['id']) for r in expected_order]) self.dataset.reload() self.assertEqual([str(r.id) for r in self.dataset.resources], [str(r['id']) for r in expected_order]) self.assertEqual(self.dataset.last_modified, initial_last_modified) def test_update(self): resource = ResourceFactory() self.dataset.resources.append(resource) self.dataset.save() now = datetime.now() data = { 'title': faker.sentence(), 'description': faker.text(), 'url': faker.url(), 'published': now.isoformat(), 'extras': { 'extra:id': 'id', } } with self.api_user(): response = self.put( url_for('api.resource', dataset=self.dataset, rid=str(resource.id)), data) self.assert200(response) self.dataset.reload() self.assertEqual(len(self.dataset.resources), 1) updated = self.dataset.resources[0] self.assertEqual(updated.title, data['title']) self.assertEqual(updated.description, data['description']) self.assertEqual(updated.url, data['url']) self.assertEqual(updated.extras, {'extra:id': 'id'}) self.assertEqualDates(updated.published, now) def test_bulk_update(self): resources = ResourceFactory.build_batch(2) self.dataset.resources.extend(resources) self.dataset.save() now = datetime.now() ids = [r.id for r in self.dataset.resources] data = [{ 'id': str(id), 'title': faker.sentence(), 'description': faker.text(), } for id in ids] data.append({ 'title': faker.sentence(), 'description': faker.text(), 'url': faker.url(), }) with self.api_user(): response = self.put(url_for('api.resources', dataset=self.dataset), data) self.assert200(response) self.dataset.reload() self.assertEqual(len(self.dataset.resources), 3) for idx, id in enumerate(ids): resource = self.dataset.resources[idx] rdata = data[idx] self.assertEqual(str(resource.id), rdata['id']) self.assertEqual(resource.title, rdata['title']) self.assertEqual(resource.description, rdata['description']) self.assertIsNotNone(resource.url) new_resource = self.dataset.resources[-1] self.assertEqualDates(new_resource.published, now) def test_update_404(self): data = { 'title': faker.sentence(), 'description': faker.text(), 'url': faker.url(), } with self.api_user(): response = self.put( url_for('api.resource', dataset=self.dataset, rid=str(ResourceFactory().id)), data) self.assert404(response) def test_update_with_file(self): '''It should update a resource from the API with a file''' user = self.login() with self.autoindex(): resource = ResourceFactory() org = OrganizationFactory( members=[Member(user=user, role='admin')]) dataset = DatasetFactory(resources=[resource], organization=org) response = self.post(url_for('api.upload_dataset_resource', dataset=dataset, rid=resource.id), {'file': (StringIO(b'aaa'), 'test.txt')}, json=False) self.assert200(response) data = json.loads(response.data) self.assertEqual(data['title'], 'test.txt') response = self.put( url_for('api.resource', dataset=dataset, rid=data['id']), data) self.assert200(response) dataset.reload() self.assertEqual(len(dataset.resources), 1) self.assertTrue(dataset.resources[0].url.endswith('test.txt')) def test_delete(self): resource = ResourceFactory() self.dataset.resources.append(resource) self.dataset.save() with self.api_user(): response = self.delete( url_for('api.resource', dataset=self.dataset, rid=str(resource.id))) self.assertStatus(response, 204) self.dataset.reload() self.assertEqual(len(self.dataset.resources), 0) def test_delete_404(self): with self.api_user(): response = self.delete( url_for('api.resource', dataset=self.dataset, rid=str(ResourceFactory().id))) self.assert404(response) def test_follow_dataset(self): '''It should follow a dataset on POST''' user = self.login() to_follow = DatasetFactory() response = self.post(url_for('api.dataset_followers', id=to_follow.id)) self.assert201(response) self.assertEqual(Follow.objects.following(to_follow).count(), 0) self.assertEqual(Follow.objects.followers(to_follow).count(), 1) follow = Follow.objects.followers(to_follow).first() self.assertIsInstance(follow.following, Dataset) self.assertEqual(Follow.objects.following(user).count(), 1) self.assertEqual(Follow.objects.followers(user).count(), 0) def test_unfollow_dataset(self): '''It should unfollow the dataset on DELETE''' user = self.login() to_follow = DatasetFactory() Follow.objects.create(follower=user, following=to_follow) response = self.delete( url_for('api.dataset_followers', id=to_follow.id)) self.assert200(response) nb_followers = Follow.objects.followers(to_follow).count() self.assertEqual(response.json['followers'], nb_followers) self.assertEqual(Follow.objects.following(to_follow).count(), 0) self.assertEqual(nb_followers, 0) self.assertEqual(Follow.objects.following(user).count(), 0) self.assertEqual(Follow.objects.followers(user).count(), 0) def test_suggest_formats_api(self): '''It should suggest formats''' with self.autoindex(): DatasetFactory(resources=[ ResourceFactory(format=f) for f in (faker.word(), faker.word(), 'test', 'test-1') ]) response = self.get(url_for('api.suggest_formats'), qs={ 'q': 'test', 'size': '5' }) self.assert200(response) self.assertLessEqual(len(response.json), 5) self.assertGreater(len(response.json), 1) # Shortest match first. self.assertEqual(response.json[0]['text'], 'test') for suggestion in response.json: self.assertIn('text', suggestion) self.assertIn('score', suggestion) self.assertTrue(suggestion['text'].startswith('test')) def test_suggest_format_api_no_match(self): '''It should not provide format suggestion if no match''' with self.autoindex(): DatasetFactory(resources=[ ResourceFactory(format=faker.word()) for _ in range(3) ]) response = self.get(url_for('api.suggest_formats'), qs={ 'q': 'test', 'size': '5' }) self.assert200(response) self.assertEqual(len(response.json), 0) def test_suggest_format_api_empty(self): '''It should not provide format suggestion if no data''' self.init_search() response = self.get(url_for('api.suggest_formats'), qs={ 'q': 'test', 'size': '5' }) self.assert200(response) self.assertEqual(len(response.json), 0) def test_suggest_datasets_api(self): '''It should suggest datasets''' with self.autoindex(): for i in range(4): DatasetFactory(title='test-{0}'.format(i) if i % 2 else faker.word(), visible=True) response = self.get(url_for('api.suggest_datasets'), qs={ 'q': 'tes', 'size': '5' }) self.assert200(response) self.assertLessEqual(len(response.json), 5) self.assertGreater(len(response.json), 1) for suggestion in response.json: self.assertIn('id', suggestion) self.assertIn('title', suggestion) self.assertIn('slug', suggestion) self.assertIn('score', suggestion) self.assertIn('image_url', suggestion) self.assertTrue(suggestion['title'].startswith('test')) def test_suggest_datasets_acronym_api(self): '''It should suggest datasets from their acronyms''' with self.autoindex(): for i in range(4): DatasetFactory( # Ensure title does not contains 'tes' title=faker.unique_string(), acronym='test-{0}'.format(i) if i % 2 else None, visible=True) response = self.get(url_for('api.suggest_datasets'), qs={ 'q': 'tes', 'size': '5' }) self.assert200(response) self.assertLessEqual(len(response.json), 5) self.assertGreater(len(response.json), 1) for suggestion in response.json: self.assertIn('id', suggestion) self.assertIn('title', suggestion) self.assertIn('slug', suggestion) self.assertIn('score', suggestion) self.assertIn('image_url', suggestion) self.assertNotIn('tes', suggestion['title']) self.assertTrue(suggestion['acronym'].startswith('test')) def test_suggest_datasets_api_unicode(self): '''It should suggest datasets with special characters''' with self.autoindex(): for i in range(4): DatasetFactory(title='testé-{0}'.format(i) if i % 2 else faker.word(), resources=[ResourceFactory()]) response = self.get(url_for('api.suggest_datasets'), qs={ 'q': 'testé', 'size': '5' }) self.assert200(response) self.assertLessEqual(len(response.json), 5) self.assertGreater(len(response.json), 1) for suggestion in response.json: self.assertIn('id', suggestion) self.assertIn('title', suggestion) self.assertIn('slug', suggestion) self.assertIn('score', suggestion) self.assertIn('image_url', suggestion) self.assertTrue(suggestion['title'].startswith('test')) def test_suggest_datasets_api_no_match(self): '''It should not provide dataset suggestion if no match''' with self.autoindex(): for i in range(3): DatasetFactory(resources=[ResourceFactory()]) response = self.get(url_for('api.suggest_datasets'), qs={ 'q': 'xxxxxx', 'size': '5' }) self.assert200(response) self.assertEqual(len(response.json), 0) def test_suggest_datasets_api_empty(self): '''It should not provide dataset suggestion if no data''' self.init_search() response = self.get(url_for('api.suggest_datasets'), qs={ 'q': 'xxxxxx', 'size': '5' }) self.assert200(response) self.assertEqual(len(response.json), 0)