def test_regions_with_other_datasets_and_pertinent_ones(self): user = self.login() with self.autoindex(): member = Member(user=user, role='admin') organization = OrganizationFactory(members=[member]) VisibleDatasetFactory.create_batch( 3, organization=organization, spatial=SpatialCoverageFactory(zones=[self.paca.id])) response = self.client.get( url_for('territories.territory', territory=self.paca)) self.assert200(response) data = response.data.decode('utf-8') self.assertIn(self.paca.name, data) base_datasets = self.get_context_variable('base_datasets') self.assertEqual(len(base_datasets), 0) other_datasets = self.get_context_variable('other_datasets') self.assertEqual(len(other_datasets), 3) for dataset in other_datasets: self.assertIn( '<div data-udata-dataset-id="{dataset.id}"'.format( dataset=dataset), data) self.assertTrue(self.get_context_variable('has_pertinent_datasets')) self.assertEqual(self.get_context_variable('territory_datasets'), []) self.assertIn('Some of your datasets have an exact match!', data)
def test_pagination(self): site = SiteFactory() page_size = 3 total = 4 uri = url_for('api.site_rdf_catalog', _external=True) uri_first = url_for('api.site_rdf_catalog_format', format='json', page=1, page_size=page_size, _external=True) uri_last = url_for('api.site_rdf_catalog_format', format='json', page=2, page_size=page_size, _external=True) VisibleDatasetFactory.create_batch(total) # First page datasets = Dataset.objects.paginate(1, page_size) catalog = build_catalog(site, datasets, format='json') graph = catalog.graph assert isinstance(catalog, Resource) assert catalog.identifier == URIRef(uri) types = [o.identifier for o in catalog.objects(RDF.type)] assert DCAT.Catalog in types assert HYDRA.Collection in types assert catalog.value(HYDRA.totalItems) == Literal(total) assert len(list(catalog.objects(DCAT.dataset))) == page_size paginations = list(graph.subjects(RDF.type, HYDRA.PartialCollectionView)) assert len(paginations) == 1 pagination = graph.resource(paginations[0]) assert pagination.identifier == URIRef(uri_first) assert pagination.value(HYDRA.first).identifier == URIRef(uri_first) assert pagination.value(HYDRA.next).identifier == URIRef(uri_last) assert pagination.value(HYDRA.last).identifier == URIRef(uri_last) assert HYDRA.previous not in pagination # Second page datasets = Dataset.objects.paginate(2, page_size) catalog = build_catalog(site, datasets, format='json') graph = catalog.graph assert isinstance(catalog, Resource) assert catalog.identifier == URIRef(uri) types = [o.identifier for o in catalog.objects(RDF.type)] assert DCAT.Catalog in types assert HYDRA.Collection in types assert catalog.value(HYDRA.totalItems) == Literal(total) assert len(list(catalog.objects(DCAT.dataset))) == 1 paginations = list(graph.subjects(RDF.type, HYDRA.PartialCollectionView)) assert len(paginations) == 1 pagination = graph.resource(paginations[0]) assert pagination.identifier == URIRef(uri_last) assert pagination.value(HYDRA.first).identifier == URIRef(uri_first) assert pagination.value(HYDRA.previous).identifier == URIRef(uri_first) assert pagination.value(HYDRA.last).identifier == URIRef(uri_last) assert HYDRA.next not in pagination
def test_catalog_pagination(self): origin_org = OrganizationFactory() page_size = 3 total = 4 uri = url_for('api.organization_rdf', org=origin_org.id, _external=True) uri_first = url_for('api.organization_rdf_format', org=origin_org.id, format='json', page=1, page_size=page_size, _external=True) uri_last = url_for('api.organization_rdf_format', org=origin_org.id, format='json', page=2, page_size=page_size, _external=True) VisibleDatasetFactory.create_batch(total, organization=origin_org) # First page datasets = Dataset.objects.paginate(1, page_size) catalog = build_org_catalog(origin_org, datasets, format='json') graph = catalog.graph self.assertIsInstance(catalog, RdfResource) self.assertEqual(catalog.identifier, URIRef(uri)) types = [o.identifier for o in catalog.objects(RDF.type)] self.assertIn(DCAT.Catalog, types) self.assertIn(HYDRA.Collection, types) self.assertEqual(catalog.value(HYDRA.totalItems), Literal(total)) self.assertEqual(len(list(catalog.objects(DCAT.dataset))), page_size) paginations = list(graph.subjects(RDF.type, HYDRA.PartialCollectionView)) self.assertEqual(len(paginations), 1) pagination = graph.resource(paginations[0]) self.assertEqual(pagination.identifier, URIRef(uri_first)) self.assertEqual(pagination.value(HYDRA.first).identifier, URIRef(uri_first)) self.assertEqual(pagination.value(HYDRA.next).identifier, URIRef(uri_last)) self.assertEqual(pagination.value(HYDRA.last).identifier, URIRef(uri_last)) self.assertNotIn(HYDRA.previous, pagination) # Second page datasets = Dataset.objects.paginate(2, page_size) catalog = build_org_catalog(origin_org, datasets, format='json') graph = catalog.graph self.assertIsInstance(catalog, RdfResource) self.assertEqual(catalog.identifier, URIRef(uri)) types = [o.identifier for o in catalog.objects(RDF.type)] self.assertIn(DCAT.Catalog, types) self.assertIn(HYDRA.Collection, types) self.assertEqual(catalog.value(HYDRA.totalItems), Literal(total)) self.assertEqual(len(list(catalog.objects(DCAT.dataset))), 1) paginations = list(graph.subjects(RDF.type, HYDRA.PartialCollectionView)) self.assertEqual(len(paginations), 1) pagination = graph.resource(paginations[0]) self.assertEqual(pagination.identifier, URIRef(uri_last)) self.assertEqual(pagination.value(HYDRA.first).identifier, URIRef(uri_first)) self.assertEqual(pagination.value(HYDRA.previous).identifier, URIRef(uri_first)) self.assertEqual(pagination.value(HYDRA.last).identifier, URIRef(uri_last)) self.assertNotIn(HYDRA.next, pagination)
def test_pagination(self): site = SiteFactory() page_size = 3 total = 4 uri = url_for('site.rdf_catalog', _external=True) uri_first = url_for('site.rdf_catalog_format', format='json', page=1, page_size=page_size, _external=True) uri_last = url_for('site.rdf_catalog_format', format='json', page=2, page_size=page_size, _external=True) VisibleDatasetFactory.create_batch(total) # First page datasets = Dataset.objects.paginate(1, page_size) catalog = build_catalog(site, datasets, format='json') graph = catalog.graph assert isinstance(catalog, Resource) assert catalog.identifier == URIRef(uri) types = [o.identifier for o in catalog.objects(RDF.type)] assert DCAT.Catalog in types assert HYDRA.Collection in types assert catalog.value(HYDRA.totalItems) == Literal(total) assert len(list(catalog.objects(DCAT.dataset))) == page_size paginations = list(graph.subjects(RDF.type, HYDRA.PartialCollectionView)) assert len(paginations) == 1 pagination = graph.resource(paginations[0]) assert pagination.identifier == URIRef(uri_first) assert pagination.value(HYDRA.first).identifier == URIRef(uri_first) assert pagination.value(HYDRA.next).identifier == URIRef(uri_last) assert pagination.value(HYDRA.last).identifier == URIRef(uri_last) assert HYDRA.previous not in pagination # Second page datasets = Dataset.objects.paginate(2, page_size) catalog = build_catalog(site, datasets, format='json') graph = catalog.graph assert isinstance(catalog, Resource) assert catalog.identifier == URIRef(uri) types = [o.identifier for o in catalog.objects(RDF.type)] assert DCAT.Catalog in types assert HYDRA.Collection in types assert catalog.value(HYDRA.totalItems) == Literal(total) assert len(list(catalog.objects(DCAT.dataset))) == 1 paginations = list(graph.subjects(RDF.type, HYDRA.PartialCollectionView)) assert len(paginations) == 1 pagination = graph.resource(paginations[0]) assert pagination.identifier == URIRef(uri_last) assert pagination.value(HYDRA.first).identifier == URIRef(uri_first) assert pagination.value(HYDRA.previous).identifier == URIRef(uri_first) assert pagination.value(HYDRA.last).identifier == URIRef(uri_last) assert HYDRA.next not in pagination
def test_empty_search_with_filter_and_match(self): '''Should match both the topic criteria and the query''' with self.autoindex(): # Match both the topic condition but the queried tag match = VisibleDatasetFactory.create_batch(2, tags=[ 'in', 'filtered' ]) # Match the topic condition but not the queried tag no_match = VisibleDatasetFactory.create_batch(2, tags=['in']) # Excluded because not matching one of the topic tag excluded = VisibleDatasetFactory.create_batch(2, tags=[ 'out', 'filtered' ]) topic = TopicFactory(tags=['in', 'no-match']) query = topic_search_for(topic, DatasetSearch, tag='filtered') result = search.query(query) found = [d.id for d in result] self.assertEqual(len(found), 2) for dataset in match: self.assertIn(dataset.id, found) for dataset in no_match + excluded: self.assertNotIn(dataset.id, found)
def test_dataset_metric(self, app): site = SiteFactory.create(id=app.config['SITE_ID']) DatasetFactory.create_batch(2) VisibleDatasetFactory.create_batch(3) site.count_datasets() assert site.get_metrics()['datasets'] == 3
def test_empty_search_no_match(self): '''Should return no result if no data match the tags''' with self.autoindex(): VisibleDatasetFactory.create_batch(2, tags=['whatever']) topic = TopicFactory(tags=['no-match']) query = topic_search_for(topic, DatasetSearch) result = search.query(query) self.assertEqual(len(result), 0)
def test_minimal(self, app): site = SiteFactory() home_url = url_for('site.home_redirect', _external=True) uri = url_for('site.rdf_catalog', _external=True) datasets = VisibleDatasetFactory.create_batch(3) catalog = build_catalog(site, datasets) graph = catalog.graph assert isinstance(catalog, Resource) catalogs = graph.subjects(RDF.type, DCAT.Catalog) assert len(list(catalogs)) == 1 assert catalog.value(RDF.type).identifier == DCAT.Catalog assert isinstance(catalog.identifier, URIRef) assert str(catalog.identifier) == uri assert catalog.value(DCT.title) == Literal(site.title) lang = app.config['DEFAULT_LANGUAGE'] assert catalog.value(DCT.language) == Literal(lang) assert len(list(catalog.objects(DCAT.dataset))) == len(datasets) assert catalog.value(FOAF.homepage).identifier == URIRef(home_url) org = catalog.value(DCT.publisher) assert org.value(RDF.type).identifier == FOAF.Organization assert org.value(FOAF.name) == Literal(app.config['SITE_AUTHOR']) graph = catalog.graph graph_datasets = graph.subjects(RDF.type, DCAT.Dataset) assert len(list(graph_datasets)) == len(datasets)
def test_minimal(self, app): site = SiteFactory() home_url = url_for('site.home_redirect', _external=True) uri = url_for('site.rdf_catalog', _external=True) datasets = VisibleDatasetFactory.create_batch(3) catalog = build_catalog(site, datasets) graph = catalog.graph assert isinstance(catalog, Resource) catalogs = graph.subjects(RDF.type, DCAT.Catalog) assert len(list(catalogs)) == 1 assert catalog.value(RDF.type).identifier == DCAT.Catalog assert isinstance(catalog.identifier, URIRef) assert str(catalog.identifier) == uri assert catalog.value(DCT.title) == Literal(site.title) lang = app.config['DEFAULT_LANGUAGE'] assert catalog.value(DCT.language) == Literal(lang) assert len(list(catalog.objects(DCAT.dataset))) == len(datasets) assert catalog.value(FOAF.homepage).identifier == URIRef(home_url) org = catalog.value(DCT.publisher) assert org.value(RDF.type).identifier == FOAF.Organization assert org.value(FOAF.name) == Literal(app.config['SITE_AUTHOR']) graph = catalog.graph graph_datasets = graph.subjects(RDF.type, DCAT.Dataset) assert len(list(graph_datasets)) == len(datasets)
def test_catalog(self): origin_org = OrganizationFactory() uri = url_for('organizations.rdf_catalog', org=origin_org.id, _external=True) datasets = VisibleDatasetFactory.create_batch(3, organization=origin_org) catalog = build_org_catalog(origin_org, datasets) graph = catalog.graph self.assertIsInstance(catalog, RdfResource) catalogs = graph.subjects(RDF.type, DCAT.Catalog) self.assertEqual(len(list(catalogs)), 1) self.assertEqual(catalog.value(RDF.type).identifier, DCAT.Catalog) self.assertIsInstance(catalog.identifier, URIRef) self.assertEqual(str(catalog.identifier), uri) self.assertEqual(len(list(catalog.objects(DCAT.dataset))), len(datasets)) org = catalog.value(DCT.publisher) self.assertEqual(org.value(RDF.type).identifier, FOAF.Organization) self.assertEqual(org.value(FOAF.name), Literal(origin_org.name)) graph = catalog.graph graph_datasets = graph.subjects(RDF.type, DCAT.Dataset) self.assertEqual(len(list(graph_datasets)), len(datasets))
def test_minimal(self): site = SiteFactory() home_url = url_for('site.home_redirect', _external=True) uri = url_for('site.rdf_catalog', _external=True) datasets = VisibleDatasetFactory.create_batch(3) catalog = build_catalog(site, datasets) graph = catalog.graph self.assertIsInstance(catalog, Resource) catalogs = graph.subjects(RDF.type, DCAT.Catalog) self.assertEqual(len(list(catalogs)), 1) self.assertEqual(catalog.value(RDF.type).identifier, DCAT.Catalog) self.assertIsInstance(catalog.identifier, URIRef) self.assertEqual(str(catalog.identifier), uri) self.assertEqual(catalog.value(DCT.title), Literal(site.title)) self.assertEqual(catalog.value(DCT.language), Literal(self.app.config['DEFAULT_LANGUAGE'])) self.assertEqual(len(list(catalog.objects(DCAT.dataset))), len(datasets)) self.assertEqual( catalog.value(FOAF.homepage).identifier, URIRef(home_url)) org = catalog.value(DCT.publisher) self.assertEqual(org.value(RDF.type).identifier, FOAF.Organization) self.assertEqual(org.value(FOAF.name), Literal(current_app.config['SITE_AUTHOR'])) graph = catalog.graph self.assertEqual(len(list(graph.subjects(RDF.type, DCAT.Dataset))), len(datasets))
def test_catalog_rdf_paginate(self, client): VisibleDatasetFactory.create_batch(4) url = url_for('api.site_rdf_catalog_format', format='n3', page_size=3) next_url = url_for('api.site_rdf_catalog_format', format='n3', page=2, page_size=3, _external=True) response = client.get(url, headers={'Accept': 'text/n3'}) assert200(response) graph = Graph().parse(data=response.data, format='n3') pagination = graph.value(predicate=RDF.type, object=HYDRA.PartialCollectionView) assert pagination is not None pagination = graph.resource(pagination) assert not pagination.value(HYDRA.previous) assert pagination.value(HYDRA.next).identifier == URIRef(next_url)
def test_catalog_rdf_paginate(self, client): VisibleDatasetFactory.create_batch(4) url = url_for('site.rdf_catalog_format', format='n3', page_size=3) next_url = url_for('site.rdf_catalog_format', format='n3', page=2, page_size=3, _external=True) response = client.get(url) assert200(response) graph = Graph().parse(data=response.data, format='n3') pagination = graph.value(predicate=RDF.type, object=HYDRA.PartialCollectionView) assert pagination is not None pagination = graph.resource(pagination) assert not pagination.value(HYDRA.previous) assert pagination.value(HYDRA.next).identifier == URIRef(next_url)
def test_no_duplicate(self): site = SiteFactory() org = OrganizationFactory() user = UserFactory() datasets = VisibleDatasetFactory.create_batch(2, owner=user) datasets += VisibleDatasetFactory.create_batch(2, organization=org) catalog = build_catalog(site, datasets) graph = catalog.graph orgs = list(graph.subjects(RDF.type, FOAF.Organization)) self.assertEqual(len(orgs), 1 + 1) # There is the site publisher users = list(graph.subjects(RDF.type, FOAF.Person)) self.assertEqual(len(users), 1) org_names = list(graph.objects(orgs[0], FOAF.name)) self.assertEqual(len(org_names), 1) user_names = list(graph.objects(users[0], FOAF.name)) self.assertEqual(len(user_names), 1)
def test_no_duplicate(self): site = SiteFactory() org = OrganizationFactory() user = UserFactory() datasets = VisibleDatasetFactory.create_batch(2, owner=user) datasets += VisibleDatasetFactory.create_batch(2, organization=org) catalog = build_catalog(site, datasets) graph = catalog.graph orgs = list(graph.subjects(RDF.type, FOAF.Organization)) assert len(orgs) == 1 + 1 # There is the site publisher users = list(graph.subjects(RDF.type, FOAF.Person)) assert len(users) == 1 org_names = list(graph.objects(orgs[0], FOAF.name)) assert len(org_names) == 1 user_names = list(graph.objects(users[0], FOAF.name)) assert len(user_names) == 1
def test_empty_search_with_match(self): '''Should only return data with at least one tag''' with self.autoindex(): included = VisibleDatasetFactory.create_batch(2, tags=['in']) excluded = VisibleDatasetFactory.create_batch(2, tags=['out']) topic = TopicFactory(tags=['in', 'no-match']) query = topic_search_for(topic, DatasetSearch) result = search.query(query) found = [d.id for d in result] self.assertEqual(len(found), 2) for dataset in included: self.assertIn(dataset.id, found) for dataset in excluded: self.assertNotIn(dataset.id, found)
def test_regions_with_other_datasets_logged_in(self): self.login() with self.autoindex(): organization = OrganizationFactory() VisibleDatasetFactory.create_batch( 3, organization=organization, spatial=SpatialCoverageFactory(zones=[self.paca.id])) response = self.client.get( url_for('territories.territory', territory=self.paca)) self.assert200(response) data = response.data.decode('utf-8') base_datasets = self.get_context_variable('base_datasets') self.assertEqual(len(base_datasets), 0) other_datasets = self.get_context_variable('other_datasets') self.assertEqual(len(other_datasets), 3) self.assertEqual(self.get_context_variable('territory_datasets'), []) self.assertIn('If you want your datasets to appear in that list', data)
def test_datasets_within_sitemap(self, sitemap): '''It should return a dataset list from the sitemap.''' datasets = VisibleDatasetFactory.create_batch(3) sitemap.fetch() for dataset in datasets: url = sitemap.get_by_url('datasets.show_redirect', dataset=dataset) assert url is not None sitemap.assert_url(url, 0.8, 'weekly')
def test_datasets_within_sitemap(self): '''It should return a dataset list from the sitemap.''' datasets = VisibleDatasetFactory.create_batch(3) self.get_sitemap_tree() for dataset in datasets: url = self.get_by_url('datasets.show_redirect', dataset=dataset) self.assertIsNotNone(url) self.assert_url(url, 0.8, 'weekly')
def test_datasets_within_sitemap(self, sitemap): '''It should return a dataset list from the sitemap.''' datasets = VisibleDatasetFactory.create_batch(3) sitemap.fetch() for dataset in datasets: url = sitemap.get_by_url('datasets.show_redirect', dataset=dataset) assert url is not None sitemap.assert_url(url, 0.8, 'weekly')
def test_with_old_region_datasets(self): lr, occitanie = create_old_new_regions_fixtures() with self.autoindex(): for region in [lr, occitanie]: organization = OrganizationFactory(zone=region.id) VisibleDatasetFactory.create_batch( 3, organization=organization, spatial=SpatialCoverageFactory(zones=[region.id])) response = self.client.get( url_for('territories.territory', territory=occitanie)) self.assert200(response) data = response.data.decode('utf-8') self.assertIn(occitanie.name, data) base_datasets = self.get_context_variable('base_datasets') self.assertEqual(len(base_datasets), 0) territory_datasets = self.get_context_variable('territory_datasets') self.assertEqual(len(territory_datasets), 3) territory_datasets = self.get_context_variable('other_datasets') self.assertEqual(len(territory_datasets), 3)
def test_catalog_rdf_paginate(self): VisibleDatasetFactory.create_batch(4) url = url_for('site.rdf_catalog_format', format='n3', page_size=3) next_url = url_for('site.rdf_catalog_format', format='n3', page=2, page_size=3, _external=True) response = self.get(url) self.assert200(response) graph = Graph().parse(data=response.data, format='n3') pagination = graph.value(predicate=RDF.type, object=HYDRA.PartialCollectionView) self.assertIsNotNone(pagination) pagination = graph.resource(pagination) self.assertFalse(pagination.value(HYDRA.previous)) self.assertEqual( pagination.value(HYDRA.next).identifier, URIRef(next_url))
def test_get_home_datasets(self): site = SiteFactory.create( id=self.app.config['SITE_ID'], settings__home_datasets=VisibleDatasetFactory.create_batch(3)) current_site.reload() self.login(AdminFactory()) response = self.get(url_for('api.home_datasets')) self.assert200(response) self.assertEqual(len(response.json), len(site.settings.home_datasets))
def test_set_home_datasets(self): ids = [d.id for d in VisibleDatasetFactory.create_batch(3)] self.login(AdminFactory()) response = self.put(url_for('api.home_datasets'), ids) self.assert200(response) self.assertEqual(len(response.json), len(ids)) site = Site.objects.get(id=self.app.config['SITE_ID']) self.assertEqual([d.id for d in site.settings.home_datasets], ids)
def test_counties_with_other_datasets(self): with self.autoindex(): organization = OrganizationFactory() VisibleDatasetFactory.create_batch( 3, organization=organization, spatial=SpatialCoverageFactory(zones=[self.bdr.id])) response = self.client.get( url_for('territories.territory', territory=self.bdr)) self.assert200(response) data = response.data.decode('utf-8') self.assertIn(self.bdr.name, data) base_datasets = self.get_context_variable('base_datasets') self.assertEqual(len(base_datasets), 0) other_datasets = self.get_context_variable('other_datasets') self.assertEqual(len(other_datasets), 3) for dataset in other_datasets: self.assertIn( '<div data-udata-dataset-id="{dataset.id}"'.format( dataset=dataset), data) self.assertEqual(self.get_context_variable('territory_datasets'), []) self.assertIn('You want to add your own datasets to that list?', data)
def test_with_region_datasets(self): with self.autoindex(): organization = OrganizationFactory(zone=self.paca.id) VisibleDatasetFactory.create_batch( 3, organization=organization, spatial=SpatialCoverageFactory(zones=[self.paca.id])) response = self.client.get( url_for('territories.territory', territory=self.paca)) self.assert200(response) data = response.data.decode('utf-8') self.assertIn(self.paca.name, data) base_datasets = self.get_context_variable('base_datasets') self.assertEqual(len(base_datasets), 0) territory_datasets = self.get_context_variable('territory_datasets') self.assertEqual(len(territory_datasets), 3) for dataset in territory_datasets: self.assertIn( '<div data-udata-dataset-id="{dataset.id}"'.format( dataset=dataset), data) self.assertEqual(self.get_context_variable('other_datasets'), []) self.assertNotIn('dataset-item--cta', data)
def test_pending_transfer_request_for_user(self): user = UserFactory() datasets = VisibleDatasetFactory.create_batch(2, owner=user) recipient = UserFactory() comment = faker.sentence() transfers = {} login_user(user) for dataset in datasets: transfer = request_transfer(dataset, recipient, comment) transfers[transfer.id] = transfer assert len(transfer_request_notifications(user)) == 0 notifications = transfer_request_notifications(recipient) assert len(notifications) == len(datasets) for dt, details in notifications: transfer = transfers[details['id']] assert details['subject']['class'] == 'dataset' assert details['subject']['id'] == transfer.subject.id
def test_pending_transfer_request_for_user(self): user = UserFactory() datasets = VisibleDatasetFactory.create_batch(2, owner=user) recipient = UserFactory() comment = faker.sentence() transfers = {} login_user(user) for dataset in datasets: transfer = request_transfer(dataset, recipient, comment) transfers[transfer.id] = transfer assert len(transfer_request_notifications(user)) == 0 notifications = transfer_request_notifications(recipient) assert len(notifications) == len(datasets) for dt, details in notifications: transfer = transfers[details['id']] assert details['subject']['class'] == 'dataset' assert details['subject']['id'] == transfer.subject.id
def test_pending_transfer_request_for_org(self): user = UserFactory() datasets = VisibleDatasetFactory.create_batch(2, owner=user) recipient = UserFactory() member = Member(user=recipient, role='editor') org = OrganizationFactory(members=[member]) comment = faker.sentence() transfers = {} login_user(user) for dataset in datasets: transfer = request_transfer(dataset, org, comment) transfers[transfer.id] = transfer self.assertEqual(len(transfer_request_notifications(user)), 0) notifications = transfer_request_notifications(recipient) self.assertEqual(len(notifications), len(datasets)) for dt, details in notifications: transfer = transfers[details['id']] self.assertEqual(details['subject']['class'], 'dataset') self.assertEqual(details['subject']['id'], transfer.subject.id)