def test_blogpost_with_thumbnail_as_enclosure(self, blogpost, mime): title = faker.sentence() post_url = faker.uri() image_url = faker.image_url() tz = pytz.timezone(faker.timezone()) publish_date = faker.date_time(tzinfo=tz) content = faker.sentence() html_content = '<div>{0}</div>'.format(content) feed = self.feed('Some blog', title, html_content, post_url, published=publish_date, enclosure={ 'type': mime, 'url': image_url }) post = blogpost(feed) assert post['title'] == title assert post['link'] == post_url assert post['summary'] == content assert_equal_dates(post['date'], publish_date) assert post['image_url'] == image_url assert 'srcset' not in post assert 'sizes' not in post
def test_blogpost_with_first_image_as_thumbnail_as_src_set( self, blogpost, tpl): title = faker.sentence() post_url = faker.uri() image_url = faker.image_url() summary = faker.sentence() tz = pytz.timezone(faker.timezone()) publish_date = faker.date_time(tzinfo=tz) srcset = ', '.join(' '.join((faker.image_url(width=w), '{0}w'.format(w))) for w in ('1200', '1024', '300')) sizes = "(max-width: 1200px) 100vw, 1200px" content = tpl.format(image_url, srcset, sizes) feed = self.feed('Some blog', title, content, post_url, published=publish_date, summary=summary) post = blogpost(feed) assert post['title'] == title assert post['link'] == post_url assert post['summary'] == summary assert_equal_dates(post['date'], publish_date) assert post['image_url'] == image_url assert post['srcset'] == srcset assert post['sizes'] == sizes
def test_issues_for_user_with_closed(self): owner = UserFactory() dataset = DatasetFactory(owner=owner) reuse = ReuseFactory(owner=owner) open_issues = [] for i in range(3): user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) open_issues.append( Issue.objects.create(subject=dataset, user=user, title=faker.sentence(), discussion=[message])) open_issues.append( Issue.objects.create(subject=reuse, user=user, title=faker.sentence(), discussion=[message])) # Creating a closed issue that shouldn't show up in response. user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) Issue.objects.create(subject=dataset, user=user, title=faker.sentence(), discussion=[message], closed=datetime.now(), closed_by=user) issues = issues_for(owner, only_open=False) self.assertEqual(len(issues), len(open_issues) + 1)
def test_list_issues_for(self): dataset = Dataset.objects.create(title='Test dataset') issues = [] for i in range(3): user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) issue = Issue.objects.create( subject=dataset, user=user, title='test issue {}'.format(i), discussion=[message] ) issues.append(issue) # Creating a closed issue that shouldn't show up in response. user = UserFactory() other_dataset = Dataset.objects.create(title='Other Test dataset') message = Message(content=faker.sentence(), posted_by=user) issue = Issue.objects.create( subject=other_dataset, user=user, title='test issue {}'.format(i), discussion=[message] ) kwargs = {'for': str(dataset.id)} response = self.get(url_for('api.issues', **kwargs)) self.assert200(response) self.assertEqual(len(response.json['data']), len(issues))
def test_list_discussions_for(self): dataset = DatasetFactory() discussions = [] for i in range(3): user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) discussion = Discussion.objects.create( subject=dataset, user=user, title='test discussion {}'.format(i), discussion=[message] ) discussions.append(discussion) user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) Discussion.objects.create( subject=DatasetFactory(), user=user, title='test discussion {}'.format(i), discussion=[message] ) kwargs = {'for': str(dataset.id)} response = self.get(url_for('api.discussions', **kwargs)) self.assert200(response) self.assertEqual(len(response.json['data']), len(discussions))
def test_bulk_update(self): resources = ResourceFactory.build_batch(2) self.dataset.resources.extend(resources) self.dataset.save() now = datetime.now() ids = [r.id for r in self.dataset.resources] data = [{ 'id': str(id), 'title': faker.sentence(), 'description': faker.text(), } for id in ids] data.append({ 'title': faker.sentence(), 'description': faker.text(), 'url': faker.url(), }) with self.api_user(): response = self.put(url_for('api.resources', dataset=self.dataset), data) self.assert200(response) self.dataset.reload() self.assertEqual(len(self.dataset.resources), 3) for idx, id in enumerate(ids): resource = self.dataset.resources[idx] rdata = data[idx] self.assertEqual(str(resource.id), rdata['id']) self.assertEqual(resource.title, rdata['title']) self.assertEqual(resource.description, rdata['description']) self.assertIsNotNone(resource.url) new_resource = self.dataset.resources[-1] self.assertEqualDates(new_resource.published, now)
def test_list_discussions_closed_filter(self): dataset = Dataset.objects.create(title='Test dataset') open_discussions = [] closed_discussions = [] for i in range(2): user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) discussion = Discussion.objects.create( subject=dataset, user=user, title='test discussion {}'.format(i), discussion=[message] ) open_discussions.append(discussion) for i in range(3): user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) discussion = Discussion.objects.create( subject=dataset, user=user, title='test discussion {}'.format(i), discussion=[message], closed=datetime.now(), closed_by=user ) closed_discussions.append(discussion) response = self.get(url_for('api.discussions', closed=True)) self.assert200(response) self.assertEqual(len(response.json['data']), len(closed_discussions)) for discussion in response.json['data']: self.assertIsNotNone(discussion['closed'])
def test_list_issues_for(self): dataset = Dataset.objects.create(title='Test dataset') issues = [] for i in range(3): user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) issue = Issue.objects.create(subject=dataset, user=user, title='test issue {}'.format(i), discussion=[message]) issues.append(issue) # Creating a closed issue that shouldn't show up in response. user = UserFactory() other_dataset = Dataset.objects.create(title='Other Test dataset') message = Message(content=faker.sentence(), posted_by=user) issue = Issue.objects.create(subject=other_dataset, user=user, title='test issue {}'.format(i), discussion=[message]) kwargs = {'for': str(dataset.id)} response = self.get(url_for('api.issues', **kwargs)) self.assert200(response) self.assertEqual(len(response.json['data']), len(issues))
def test_closed_issue_mail(self): owner = UserFactory() poster = UserFactory() commenter = UserFactory() message = Message(content=faker.sentence(), posted_by=poster) second_message = Message(content=faker.sentence(), posted_by=commenter) closing_message = Message(content=faker.sentence(), posted_by=owner) issue = Issue.objects.create( subject=DatasetFactory(owner=owner), user=poster, title=faker.sentence(), discussion=[message, second_message, closing_message] ) # issue = IssueFactory() with capture_mails() as mails: notify_issue_closed(issue.id, message=len(issue.discussion) - 1) # Should have sent one mail to each participant # and no mail to the closer expected_recipients = (poster.email, commenter.email) self.assertEqual(len(mails), len(expected_recipients)) for mail in mails: self.assertIn(mail.recipients[0], expected_recipients) self.assertNotIn(owner.email, mail.recipients)
def test_list_discussions_closed_filter(self): dataset = Dataset.objects.create(title='Test dataset') open_discussions = [] closed_discussions = [] for i in range(2): user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) discussion = Discussion.objects.create( subject=dataset, user=user, title='test discussion {}'.format(i), discussion=[message]) open_discussions.append(discussion) for i in range(3): user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) discussion = Discussion.objects.create( subject=dataset, user=user, title='test discussion {}'.format(i), discussion=[message], closed=datetime.now(), closed_by=user) closed_discussions.append(discussion) response = self.get(url_for('api.discussions', closed=True)) self.assert200(response) self.assertEqual(len(response.json['data']), len(closed_discussions)) for discussion in response.json['data']: self.assertIsNotNone(discussion['closed'])
def test_blogpost_with_first_image_as_thumbnail_and_summary( self, blogpost): title = faker.sentence() post_url = faker.uri() image_url = faker.image_url() summary = faker.sentence() tz = pytz.timezone(faker.timezone()) publish_date = faker.date_time(tzinfo=tz) content = '<p><img class="whatever" src="{0}" /> Whatever whatever</p>'.format( image_url) feed = self.feed('Some blog', title, content, post_url, published=publish_date, summary=summary) post = blogpost(feed) assert post['title'] == title assert post['link'] == post_url assert post['summary'] == summary assert_equal_dates(post['date'], publish_date) assert post['image_url'] == image_url assert 'srcset' not in post assert 'sizes' not in post
def test_notify_user_discussions(self): owner = UserFactory() dataset = DatasetFactory(owner=owner) open_discussions = {} for i in range(3): user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) discussion = Discussion.objects.create(subject=dataset, user=user, title=faker.sentence(), discussion=[message]) open_discussions[discussion.id] = discussion # Creating a closed discussion that shouldn't show up in response. user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) discussion = Discussion.objects.create(subject=dataset, user=user, title=faker.sentence(), discussion=[message], closed=datetime.now(), closed_by=user) notifications = discussions_notifications(owner) self.assertEqual(len(notifications), len(open_discussions)) for dt, details in notifications: discussion = open_discussions[details['id']] self.assertEqual(details['title'], discussion.title) self.assertEqual(details['subject']['id'], discussion.subject.id) self.assertEqual(details['subject']['type'], 'dataset')
def test_closed_issue_mail(self): owner = UserFactory() poster = UserFactory() commenter = UserFactory() message = Message(content=faker.sentence(), posted_by=poster) second_message = Message(content=faker.sentence(), posted_by=commenter) closing_message = Message(content=faker.sentence(), posted_by=owner) issue = Issue.objects.create( subject=DatasetFactory(owner=owner), user=poster, title=faker.sentence(), discussion=[message, second_message, closing_message] ) # issue = IssueFactory() with capture_mails() as mails: notify_issue_closed(issue, message=closing_message) # Should have sent one mail to each participant # and no mail to the closer expected_recipients = (poster.email, commenter.email) self.assertEqual(len(mails), len(expected_recipients)) for mail in mails: self.assertIn(mail.recipients[0], expected_recipients) self.assertNotIn(owner.email, mail.recipients)
def test_list_discussions_for(self): dataset = DatasetFactory() discussions = [] for i in range(3): user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) discussion = Discussion.objects.create( subject=dataset, user=user, title='test discussion {}'.format(i), discussion=[message] ) discussions.append(discussion) user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) Discussion.objects.create( subject=DatasetFactory(), user=user, title='test discussion {}'.format(i), discussion=[message] ) kwargs = {'for': str(dataset.id)} response = self.get(url_for('api.discussions', **kwargs)) self.assert200(response) self.assertEqual(len(response.json['data']), len(discussions))
def test_blogpost_with_first_image_as_thumbnail_as_src_set(self, blogpost, tpl): title = faker.sentence() post_url = faker.uri() image_url = faker.image_url() summary = faker.sentence() tz = pytz.timezone(faker.timezone()) publish_date = faker.date_time(tzinfo=tz) srcset = ', '.join( ' '.join((faker.image_url(width=w), '{0}w'.format(w))) for w in ('1200', '1024', '300') ) sizes = "(max-width: 1200px) 100vw, 1200px" content = tpl.format(image_url, srcset, sizes) feed = self.feed('Some blog', title, content, post_url, published=publish_date, summary=summary) post = blogpost(feed) assert post['title'] == title assert post['link'] == post_url assert post['summary'] == summary assert_equal_dates(post['date'], publish_date) assert post['image_url'] == image_url assert post['srcset'] == srcset assert post['sizes'] == sizes
def test_list_issues(self): dataset = Dataset.objects.create(title='Test dataset') open_issues = [] for i in range(3): user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) issue = Issue.objects.create( subject=dataset, user=user, title='test issue {}'.format(i), discussion=[message] ) open_issues.append(issue) # Creating a closed issue that shouldn't show up in response. user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) closed_issues = [Issue.objects.create( subject=dataset, user=user, title='test issue {}'.format(i), discussion=[message], closed=datetime.now(), closed_by=user )] response = self.get(url_for('api.issues')) self.assert200(response) expected_length = len(open_issues + closed_issues) self.assertEqual(len(response.json['data']), expected_length)
def test_notify_org_issues(self): recipient = UserFactory() member = Member(user=recipient, role='editor') org = OrganizationFactory(members=[member]) dataset = DatasetFactory(organization=org) open_issues = {} for i in range(3): user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) issue = Issue.objects.create(subject=dataset, user=user, title=faker.sentence(), discussion=[message]) open_issues[issue.id] = issue # Creating a closed issue that shouldn't show up in response. user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) Issue.objects.create(subject=dataset, user=user, title=faker.sentence(), discussion=[message], closed=datetime.now(), closed_by=user) notifications = issues_notifications(recipient) self.assertEqual(len(notifications), len(open_issues)) for dt, details in notifications: issue = open_issues[details['id']] self.assertEqual(details['title'], issue.title) self.assertEqual(details['subject']['id'], issue.subject.id) self.assertEqual(details['subject']['type'], 'dataset')
def process(self, item): '''Generate a random dataset from a fake identifier''' # Get or create a harvested dataset with this identifier. # Harvest metadata are already filled on creation. dataset = self.get_dataset(item.remote_id) # Here you comes your implementation. You should : # - fetch the remote dataset (if necessary) # - validate the fetched payload # - map its content to the dataset fields # - store extra significant data in the `extra` attribute # - map resources data dataset.title = faker.sentence() dataset.description = faker.text() dataset.tags = list(set(faker.words(nb=faker.pyint()))) # Resources for i in range(faker.pyint()): dataset.resources.append( Resource(title=faker.sentence(), description=faker.text(), url=faker.url(), filetype='remote', mime=faker.mime_type(category='text'), format=faker.file_extension(category='text'), filesize=faker.pyint())) return dataset
def test_list_issues(self): dataset = Dataset.objects.create(title='Test dataset') open_issues = [] for i in range(3): user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) issue = Issue.objects.create(subject=dataset, user=user, title='test issue {}'.format(i), discussion=[message]) open_issues.append(issue) # Creating a closed issue that shouldn't show up in response. user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) closed_issues = [ Issue.objects.create(subject=dataset, user=user, title='test issue {}'.format(i), discussion=[message], closed=datetime.now(), closed_by=user) ] response = self.get(url_for('api.issues')) self.assert200(response) expected_length = len(open_issues + closed_issues) self.assertEqual(len(response.json['data']), expected_length)
def ckan_url_is_a_string(): url = faker.sentence() data = { 'name': faker.unique_string(), 'title': faker.sentence(), 'notes': faker.paragraph(), 'resources': [{ 'url': faker.unique_url() }], 'url': url } return data, {'url': url}
def test_new_issue_mail(self): user = UserFactory() owner = UserFactory() message = Message(content=faker.sentence(), posted_by=user) issue = Issue.objects.create(subject=DatasetFactory(owner=owner), user=user, title=faker.sentence(), discussion=[message]) with capture_mails() as mails: notify_new_issue(issue) # Should have sent one mail to the owner self.assertEqual(len(mails), 1) self.assertEqual(mails[0].recipients[0], owner.email)
def empty_extras(): return { 'name': faker.unique_string(), 'title': faker.sentence(), 'notes': faker.paragraph(), 'resources': [{ 'url': faker.unique_url() }], 'extras': [ { 'key': 'none', 'value': None }, { 'key': 'blank', 'value': '' }, { 'key': 'spaces', 'value': ' ' }, ] }
def resource_factory(): return { "resource_group_id": str(uuid4()), "cache_last_updated": None, "revision_timestamp": "2013-10-01T15:59:56.322481", "webstore_last_updated": "2013-10-01T17:59:56.238951", "id": str(uuid4()), "size": "1375", "state": "active", "hash": "689afc083c6316259955f499580bdf41bfc5e495", "description": faker.paragraph(), "format": "CSV", "tracking_summary": { "total": 0, "recent": 0 }, "mimetype_inner": None, "mimetype": "text/csv", "cache_url": None, "name": faker.sentence(), "created": "2013-08-01T09:43:09.031465", "url": faker.url(), "webstore_url": "active", "last_modified": "2013-10-01T17:59:55.552785", "position": 0, "revision_id": str(uuid4()), "resource_type": "file.upload" }
def test_update(self): resource = ResourceFactory() self.dataset.resources.append(resource) self.dataset.save() now = datetime.now() data = { 'title': faker.sentence(), 'description': faker.text(), 'url': faker.url(), 'published': now.isoformat(), 'extras': { 'extra:id': 'id', } } with self.api_user(): response = self.put( url_for('api.resource', dataset=self.dataset, rid=str(resource.id)), data) self.assert200(response) self.dataset.reload() self.assertEqual(len(self.dataset.resources), 1) updated = self.dataset.resources[0] self.assertEqual(updated.title, data['title']) self.assertEqual(updated.description, data['description']) self.assertEqual(updated.url, data['url']) self.assertEqual(updated.extras, {'extra:id': 'id'}) self.assertEqualDates(updated.published, now)
def test_update_job(self): @job('a-job') def test_job(): pass task = PeriodicTask.objects.create( name=faker.name(), description=faker.sentence(), task='a-job', crontab=PeriodicTask.Crontab(minute='5')) self.login(AdminFactory()) response = self.put( url_for('api.job', id=task.id), { 'name': task.name, 'description': 'New description', 'task': task.task, 'crontab': task.crontab.to_json() }) self.assert200(response) self.assertEqual(response.json['id'], str(task.id)) self.assertEqual(response.json['name'], task.name) self.assertEqual(response.json['task'], task.task) self.assertEqual(response.json['description'], 'New description') self.assertIsNotNone(response.json['crontab']) self.assertIsNone(response.json['interval'])
def test_update_job_change_type(self): @job('a-job') def test_job(): pass task = PeriodicTask.objects.create( name=faker.name(), description=faker.sentence(), task='a-job', crontab=PeriodicTask.Crontab(minute='5')) self.login(AdminFactory()) response = self.put( url_for('api.job', id=task.id), { 'name': task.name, 'description': task.description, 'task': task.task, 'interval': { 'every': 5, 'period': 'minutes', } }) self.assert200(response) self.assertEqual(response.json['id'], str(task.id)) self.assertEqual(response.json['name'], task.name) self.assertEqual(response.json['task'], task.task) self.assertEqual(response.json['description'], task.description) self.assertEqual(response.json['interval']['every'], 5) self.assertEqual(response.json['interval']['period'], 'minutes') self.assertIsNone(response.json['crontab'])
def test_prioritize_title_over_alternate_title(self): title = faker.sentence() license = LicenseFactory(title=title) LicenseFactory(alternate_titles=[title]) found = License.guess(title) assert isinstance(found, License) assert license.id == found.id
def feed(self, feed_title, title, content, url, published=None, summary=None, enclosure=None, media_thumbnail=None): feed = FeedGenerator() feed.title(feed_title) feed.description(faker.sentence()) feed.link({'href': WP_FEED_URL}) entry = feed.add_entry() entry.title(title) entry.link({'href': url}) entry.author(name=faker.name()) entry.content(content, type="cdata") if summary: entry.description(summary) if enclosure: entry.enclosure(url=enclosure['url'], type=enclosure['type'], length=str(faker.pyint())) if media_thumbnail: feed.load_extension('media') entry.media.thumbnail({'url': media_thumbnail}) tz = pytz.timezone(faker.timezone()) published = published or faker.date_time(tzinfo=tz) entry.published(published) entry.updated(faker.date_time_between(start_date=published, tzinfo=tz)) return feed.rss_str().decode('utf8')
def test_prioritize_title_over_alternate_title(self): title = faker.sentence() license = LicenseFactory(title=title) LicenseFactory(alternate_titles=[title]) found = License.guess(title) assert isinstance(found, License) assert license.id == found.id
def test_update_job_change_type(self): @job('a-job') def test_job(): pass task = PeriodicTask.objects.create( name=faker.name(), description=faker.sentence(), task='a-job', crontab=PeriodicTask.Crontab(minute='5') ) self.login(AdminFactory()) response = self.put(url_for('api.job', id=task.id), { 'name': task.name, 'description': task.description, 'task': task.task, 'interval': { 'every': 5, 'period': 'minutes', } }) self.assert200(response) self.assertEqual(response.json['id'], str(task.id)) self.assertEqual(response.json['name'], task.name) self.assertEqual(response.json['task'], task.task) self.assertEqual(response.json['description'], task.description) self.assertEqual(response.json['interval']['every'], 5) self.assertEqual(response.json['interval']['period'], 'minutes') self.assertIsNone(response.json['crontab'])
def test_update_job(self): @job('a-job') def test_job(): pass task = PeriodicTask.objects.create( name=faker.name(), description=faker.sentence(), task='a-job', crontab=PeriodicTask.Crontab(minute='5') ) self.login(AdminFactory()) response = self.put(url_for('api.job', id=task.id), { 'name': task.name, 'description': 'New description', 'task': task.task, 'crontab': task.crontab.to_json() }) self.assert200(response) self.assertEqual(response.json['id'], str(task.id)) self.assertEqual(response.json['name'], task.name) self.assertEqual(response.json['task'], task.task) self.assertEqual(response.json['description'], 'New description') self.assertIsNotNone(response.json['crontab']) self.assertIsNone(response.json['interval'])
def feed(self, feed_title, title, content, url, published=None, summary=None, enclosure=None, media_thumbnail=None): feed = FeedGenerator() feed.title(feed_title) feed.description(faker.sentence()) feed.link({'href': WP_FEED_URL}) entry = feed.add_entry() entry.title(title) entry.link({'href': url}) entry.author(name=faker.name()) entry.content(content, type="cdata") if summary: entry.description(summary) if enclosure: entry.enclosure(url=enclosure['url'], type=enclosure['type'], length=str(faker.pyint())) if media_thumbnail: feed.load_extension('media') entry.media.thumbnail({'url': media_thumbnail}) tz = pytz.timezone(faker.timezone()) published = published or faker.date_time(tzinfo=tz) entry.published(published) entry.updated(faker.date_time_between(start_date=published, tzinfo=tz)) return feed.rss_str().decode('utf8')
def test_request_transfer_user_to_organization(self): user = UserFactory() dataset = VisibleDatasetFactory(owner=user) recipient = OrganizationFactory() comment = faker.sentence() login_user(user) self.assert_transfer_started(dataset, user, recipient, comment)
def test_request_transfer_user_to_organization(self): user = UserFactory() dataset = VisibleDatasetFactory(owner=user) recipient = OrganizationFactory() comment = faker.sentence() login_user(user) self.assert_transfer_started(dataset, user, recipient, comment)
def test_request_transfer_to_self(self): user = UserFactory() dataset = VisibleDatasetFactory(owner=user) comment = faker.sentence() login_user(user) with pytest.raises(ValueError): self.assert_transfer_started(dataset, user, user, comment)
def test_request_transfer_to_self(self): user = UserFactory() dataset = VisibleDatasetFactory(owner=user) comment = faker.sentence() login_user(user) with pytest.raises(ValueError): self.assert_transfer_started(dataset, user, user, comment)
class TestDataset(TerritoryDataset): order = 1 id = faker.word() title = faker.sentence() organization_id = str(org.id) description = faker.paragraph() temporal_coverage = {'start': 2007, 'end': 2012} url_template = 'http://somehere.com/{code}'
def test_new_issue_mail(self): user = UserFactory() owner = UserFactory() message = Message(content=faker.sentence(), posted_by=user) issue = Issue.objects.create( subject=DatasetFactory(owner=owner), user=user, title=faker.sentence(), discussion=[message] ) with capture_mails() as mails: notify_new_issue(issue) # Should have sent one mail to the owner self.assertEqual(len(mails), 1) self.assertEqual(mails[0].recipients[0], owner.email)
def test_request_transfer_not_authorized_not_owner(self): user = UserFactory() dataset = VisibleDatasetFactory(owner=UserFactory()) recipient = UserFactory() comment = faker.sentence() login_user(user) with pytest.raises(PermissionDenied): request_transfer(dataset, recipient, comment)
def test_minimal_from_multiple(self): node = BNode() g = Graph() title = faker.sentence() g.add((node, RDF.type, DCAT.Dataset)) g.add((node, DCT.title, Literal(title))) for i in range(3): other = BNode() g.add((other, RDF.type, DCAT.Dataset)) g.add((other, DCT.title, Literal(faker.sentence()))) dataset = dataset_from_rdf(g, node=node) dataset.validate() assert isinstance(dataset, Dataset) assert dataset.title == title
def all_metadata(): resource_data = { 'name': faker.sentence(), 'description': faker.paragraph(), 'url': faker.unique_url(), 'mimetype': faker.mime_type(), 'format': faker.file_extension(), } data = { 'name': faker.unique_string(), 'title': faker.sentence(), 'notes': faker.paragraph(), 'tags': [{ 'name': faker.unique_string() } for _ in range(3)], 'resources': [resource_data], } return data, {'resource_data': resource_data}
def test_minimal_from_multiple(self): node = BNode() g = Graph() title = faker.sentence() g.add((node, RDF.type, DCAT.Dataset)) g.add((node, DCT.title, Literal(title))) for i in range(3): other = BNode() g.add((other, RDF.type, DCAT.Dataset)) g.add((other, DCT.title, Literal(faker.sentence()))) dataset = dataset_from_rdf(g, node=node) dataset.validate() assert isinstance(dataset, Dataset) assert dataset.title == title
def skipped_no_resources(): return { 'name': faker.unique_string(), 'title': faker.sentence(), 'notes': faker.paragraph(), 'tags': [{ 'name': faker.unique_string() } for _ in range(3)], }
class FakeFactory(MongoEngineFactory): class Meta: model = Fake title = factory.LazyAttribute(lambda o: faker.sentence()) description = factory.LazyAttribute(lambda o: faker.paragraph()) tags = factory.LazyAttribute( lambda o: [faker.word() for _ in range(1, randint(1, 4))]) sub = factory.SubFactory(NestedFactory)
def test_request_transfer_not_authorized_not_owner(self): user = UserFactory() dataset = VisibleDatasetFactory(owner=UserFactory()) recipient = UserFactory() comment = faker.sentence() login_user(user) with pytest.raises(PermissionDenied): request_transfer(dataset, recipient, comment)
def test_basic_blogpost(self, blogpost): title = faker.sentence() post_url = faker.uri() tz = pytz.timezone(faker.timezone()) publish_date = faker.date_time(tzinfo=tz) content = faker.sentence() html_content = '<div>{0}</div>'.format(content) feed = self.feed('Some blog', title, html_content, post_url, published=publish_date) post = blogpost(feed) assert post['title'] == title assert post['link'] == post_url assert post['summary'] == content assert_equal_dates(post['date'], publish_date) assert 'image_url' not in post assert 'srcset' not in post assert 'sizes' not in post
def test_request_transfer_organization_to_user(self): user = UserFactory() member = Member(user=user, role='admin') org = OrganizationFactory(members=[member]) dataset = VisibleDatasetFactory(owner=user, organization=org) recipient = UserFactory() comment = faker.sentence() login_user(user) self.assert_transfer_started(dataset, org, recipient, comment)
def test_issues_for_user_with_org(self): user = UserFactory() member = Member(user=user, role='editor') org = OrganizationFactory(members=[member]) dataset = DatasetFactory(organization=org) reuse = ReuseFactory(organization=org) open_issues = [] for i in range(3): sender = UserFactory() message = Message(content=faker.sentence(), posted_by=sender) open_issues.append(Issue.objects.create( subject=dataset, user=sender, title=faker.sentence(), discussion=[message] )) open_issues.append(Issue.objects.create( subject=reuse, user=sender, title=faker.sentence(), discussion=[message] )) # Creating a closed issue that shouldn't show up in response. other_user = UserFactory() message = Message(content=faker.sentence(), posted_by=other_user) Issue.objects.create( subject=dataset, user=other_user, title=faker.sentence(), discussion=[message], closed=datetime.now(), closed_by=user ) issues = issues_for(user) self.assertIsInstance(issues, db.BaseQuerySet) self.assertEqual(len(issues), len(open_issues)) for issue in issues: self.assertIn(issue, open_issues)
def test_request_transfer_to_same_organization(self): user = UserFactory() member = Member(user=user, role='admin') org = OrganizationFactory(members=[member]) dataset = VisibleDatasetFactory(owner=user, organization=org) comment = faker.sentence() login_user(user) with pytest.raises(ValueError): self.assert_transfer_started(dataset, org, org, comment)
def hit_factory(): return { "_score": float(faker.random_number(2)), "_type": "fake", "_id": faker.md5(), "_source": { "title": faker.sentence(), "tags": [faker.word() for _ in range(faker.random_digit())] }, "_index": "udata-test" }
def test_request_transfer_not_authorized_not_admin(self): user = UserFactory() member = Member(user=user, role='editor') org = OrganizationFactory(members=[member]) dataset = VisibleDatasetFactory(organization=org) recipient = UserFactory() comment = faker.sentence() login_user(user) with pytest.raises(PermissionDenied): request_transfer(dataset, recipient, comment)
def test_blogpost_with_first_image_as_thumbnail(self, blogpost): title = faker.sentence() post_url = faker.uri() image_url = faker.image_url() summary = faker.sentence() tz = pytz.timezone(faker.timezone()) publish_date = faker.date_time(tzinfo=tz) content = '<p><img class="whatever" src="{0}" /> {1}</p>'.format(image_url, summary) feed = self.feed('Some blog', title, content, post_url, published=publish_date) post = blogpost(feed) assert post['title'] == title assert post['link'] == post_url assert post['summary'] == summary assert_equal_dates(post['date'], publish_date) assert post['image_url'] == image_url assert 'srcset' not in post assert 'sizes' not in post
def test_blogpost_with_thumbnail_as_enclosure(self, blogpost, mime): title = faker.sentence() post_url = faker.uri() image_url = faker.image_url() tz = pytz.timezone(faker.timezone()) publish_date = faker.date_time(tzinfo=tz) content = faker.sentence() html_content = '<div>{0}</div>'.format(content) feed = self.feed('Some blog', title, html_content, post_url, published=publish_date, enclosure={'type': mime, 'url': image_url}) post = blogpost(feed) assert post['title'] == title assert post['link'] == post_url assert post['summary'] == content assert_equal_dates(post['date'], publish_date) assert post['image_url'] == image_url assert 'srcset' not in post assert 'sizes' not in post
def test_update_from_multiple(self): original = DatasetFactory() node = URIRef('https://test.org/dataset') g = Graph() new_title = faker.sentence() g.add((node, RDF.type, DCAT.Dataset)) g.add((node, DCT.title, Literal(new_title))) for i in range(3): other = BNode() g.add((other, RDF.type, DCAT.Dataset)) g.add((other, DCT.title, Literal(faker.sentence()))) dataset = dataset_from_rdf(g, dataset=original, node=node) dataset.validate() assert isinstance(dataset, Dataset) assert dataset.id == original.id assert dataset.title == new_title
def test_list_issues_closed_filter(self): dataset = Dataset.objects.create(title='Test dataset') open_issues = [] closed_issues = [] for i in range(2): user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) issue = Issue.objects.create( subject=dataset, user=user, title='test issue {}'.format(i), discussion=[message] ) open_issues.append(issue) for i in range(3): user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) issue = Issue.objects.create( subject=dataset, user=user, title='test issue {}'.format(i), discussion=[message], closed=datetime.now(), closed_by=user ) closed_issues.append(issue) response = self.get(url_for('api.issues', closed=True)) self.assert200(response) self.assertEqual(len(response.json['data']), len(closed_issues)) for issue in response.json['data']: self.assertIsNotNone(issue['closed']) response = self.get(url_for('api.issues', id=dataset.id, closed=False)) self.assert200(response) self.assertEqual(len(response.json['data']), len(open_issues)) for issue in response.json['data']: self.assertIsNone(issue['closed'])
def test_minimal(self): node = BNode() g = Graph() title = faker.sentence() g.add((node, RDF.type, DCAT.Dataset)) g.add((node, DCT.title, Literal(title))) dataset = dataset_from_rdf(g) dataset.validate() assert isinstance(dataset, Dataset) assert dataset.title == title
def test_notify_org_issues(self): recipient = UserFactory() member = Member(user=recipient, role='editor') org = OrganizationFactory(members=[member]) dataset = DatasetFactory(organization=org) open_issues = {} for i in range(3): user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) issue = Issue.objects.create( subject=dataset, user=user, title=faker.sentence(), discussion=[message] ) open_issues[issue.id] = issue # Creating a closed issue that shouldn't show up in response. user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) Issue.objects.create( subject=dataset, user=user, title=faker.sentence(), discussion=[message], closed=datetime.now(), closed_by=user ) notifications = issues_notifications(recipient) self.assertEqual(len(notifications), len(open_issues)) for dt, details in notifications: issue = open_issues[details['id']] self.assertEqual(details['title'], issue.title) self.assertEqual(details['subject']['id'], issue.subject.id) self.assertEqual(details['subject']['type'], 'dataset')
def test_html_description(self): node = BNode() g = Graph() g.add((node, RDF.type, DCAT.Dataset)) g.add((node, DCT.identifier, Literal(faker.uuid4()))) g.add((node, DCT.title, Literal(faker.sentence()))) g.add((node, DCT.description, Literal('<div>a description</div>'))) dataset = dataset_from_rdf(g) dataset.validate() assert isinstance(dataset, Dataset) assert dataset.description == 'a description'
def test_new_discussion_comment_mail(self): owner = UserFactory() poster = UserFactory() commenter = UserFactory() message = Message(content=faker.sentence(), posted_by=poster) new_message = Message(content=faker.sentence(), posted_by=commenter) discussion = Discussion.objects.create( subject=DatasetFactory(owner=owner), user=poster, title=faker.sentence(), discussion=[message, new_message] ) with capture_mails() as mails: notify_new_discussion_comment(discussion, message=new_message) # Should have sent one mail to the owner and the other participants # and no mail to the commenter expected_recipients = (owner.email, poster.email) self.assertEqual(len(mails), len(expected_recipients)) for mail in mails: self.assertIn(mail.recipients[0], expected_recipients) self.assertNotIn(commenter.email, mail.recipients)
def test_issues_for_user_with_closed(self): owner = UserFactory() dataset = DatasetFactory(owner=owner) reuse = ReuseFactory(owner=owner) open_issues = [] for i in range(3): user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) open_issues.append(Issue.objects.create( subject=dataset, user=user, title=faker.sentence(), discussion=[message] )) open_issues.append(Issue.objects.create( subject=reuse, user=user, title=faker.sentence(), discussion=[message] )) # Creating a closed issue that shouldn't show up in response. user = UserFactory() message = Message(content=faker.sentence(), posted_by=user) Issue.objects.create( subject=dataset, user=user, title=faker.sentence(), discussion=[message], closed=datetime.now(), closed_by=user ) issues = issues_for(owner, only_open=False) self.assertEqual(len(issues), len(open_issues) + 1)