def test_publish_message_resource_modified(self): kafka_mock = Mock() KafkaProducerSingleton.get_instance = lambda: kafka_mock resource = ResourceFactory() dataset = DatasetFactory(resources=[resource]) expected_signals = (Dataset.on_resource_updated, ) resource.description = 'New description' with assert_emit(*expected_signals): dataset.update_resource(resource) producer = KafkaProducerSingleton.get_instance() message_type = f'resource.{KafkaMessageType.MODIFIED.value}' expected_value = { 'service': 'udata', 'data': serialize_resource_for_event(resource), 'meta': { 'message_type': message_type, 'dataset_id': str(dataset.id) } } topic = f"{current_app.config['UDATA_INSTANCE_NAME']}.{message_type}" producer.send.assert_called_with(topic, value=expected_value, key=str(resource.id).encode("utf-8"))
def test_get_specific(self): '''Should fetch serialized resource from the API based on rid''' resources = [ResourceFactory() for _ in range(7)] specific_resource = ResourceFactory( id='817204ac-2202-8b4a-98e7-4284d154d10c', title='my-resource') resources.append(specific_resource) dataset = DatasetFactory(resources=resources) response = self.get(url_for('apiv2.resource', rid=specific_resource.id)) self.assert200(response) data = response.json assert data['dataset_id'] == str(dataset.id) assert data['resource']['id'] == str(specific_resource.id) assert data['resource']['title'] == specific_resource.title response = self.get( url_for('apiv2.resource', rid='111111ac-1111-1b1a-11e1-1111d111d11c')) self.assert404(response) com_resource = CommunityResourceFactory() response = self.get(url_for('apiv2.resource', rid=com_resource.id)) self.assert200(response) data = response.json assert data['dataset_id'] is None assert data['resource']['id'] == str(com_resource.id) assert data['resource']['title'] == com_resource.title
def two_datasets_one_resource_url(app): resource_1 = ResourceFactory(url='http://udata.world') resource_2 = ResourceFactory(url='http://udata.world') dataset_1 = DatasetFactory(resources=[resource_1]) dataset_2 = DatasetFactory(resources=[resource_2]) download(resource_1) download(resource_2, latest=True) return (dataset_1, dataset_2), (resource_1, resource_2)
def test_default_allow_remote_preview(): resources = [ ResourceFactory(mime=MIME_TYPE), ResourceFactory(filetype='remote', mime=MIME_TYPE), ] for resource in resources: assert resource.preview_url == expected_url(resource.url)
def test_ignore_post_save_signal(self): resource = ResourceFactory() DatasetFactory(resources=[resource]) unexpected_signals = Dataset.after_save, Dataset.on_update with assert_not_emit(*unexpected_signals), assert_emit(post_save): resource.title = 'New title' resource.save(signal_kwargs={'ignores': ['post_save']})
def test_ignore_post_save_signal(self): resource = ResourceFactory() # assigning to a variable to avoid garbage collection issue _ = DatasetFactory(resources=[resource]) unexpected_signals = Dataset.after_save, Dataset.on_update with assert_not_emit(*unexpected_signals), assert_emit(post_save): resource.title = 'New title' resource.save(signal_kwargs={'ignores': ['post_save']})
def test_render_profile_datasets(self): '''It should render the user profile datasets page''' user = UserFactory() datasets = [DatasetFactory(owner=user, resources=[ResourceFactory()]) for _ in range(3)] for _ in range(2): DatasetFactory(resources=[ResourceFactory()]) response = self.get(url_for('users.datasets', user=user)) self.assert200(response) rendered_datasets = self.get_context_variable('datasets') self.assertEqual(len(rendered_datasets), len(datasets))
def test_create_2nd(self): self.dataset.resources.append(ResourceFactory()) self.dataset.save() data = ResourceFactory.as_dict() with self.api_user(): response = self.post( url_for('api.resources', dataset=self.dataset), data) self.assert201(response) self.dataset.reload() self.assertEqual(len(self.dataset.resources), 2)
def test_base_modals_dataset_w_schemas(self): dataset = DatasetFactory(resources=[ ResourceFactory(schema='etalab/irve'), ResourceFactory(schema='etalab/covoiturage'), ]) content = render_base_modals(dataset=dataset) assert 'etalab/irve' in content assert 'etalab/covoiturage' in content assert 2 == content.count('</modal>')
def test_update_resource(self): user = UserFactory() resource = ResourceFactory() dataset = DatasetFactory(owner=user, resources=[resource]) expected_signals = post_save, Dataset.after_save, Dataset.on_update resource.description = 'New description' with assert_emit(*expected_signals): dataset.update_resource(resource) self.assertEqual(len(dataset.resources), 1) self.assertEqual(dataset.resources[0].id, resource.id) self.assertEqual(dataset.resources[0].description, 'New description')
def test_update_resource(self): user = UserFactory() resource = ResourceFactory() dataset = DatasetFactory(owner=user, resources=[resource]) expected_signals = (Dataset.on_resource_updated, ) resource.description = 'New description' with assert_emit(*expected_signals): dataset.update_resource(resource) assert len(dataset.resources) == 1 assert dataset.resources[0].id == resource.id assert dataset.resources[0].description == 'New description'
def test_update_resource(self): user = UserFactory() resource = ResourceFactory() dataset = DatasetFactory(owner=user, resources=[resource]) expected_signals = post_save, Dataset.after_save, Dataset.on_update resource.description = 'New description' with assert_emit(*expected_signals): dataset.update_resource(resource) assert len(dataset.resources) == 1 assert dataset.resources[0].id == resource.id assert dataset.resources[0].description == 'New description'
def test_add_resource_without_checksum(self): user = UserFactory() dataset = DatasetFactory(owner=user) resource = ResourceFactory(checksum=None) expected_signals = post_save, Dataset.after_save, Dataset.on_update with assert_emit(*expected_signals): dataset.add_resource(ResourceFactory(checksum=None)) self.assertEqual(len(dataset.resources), 1) with assert_emit(*expected_signals): dataset.add_resource(resource) self.assertEqual(len(dataset.resources), 2) self.assertEqual(dataset.resources[0].id, resource.id)
def test_add_resource_without_checksum(self): user = UserFactory() dataset = DatasetFactory(owner=user) resource = ResourceFactory(checksum=None) expected_signals = post_save, Dataset.after_save, Dataset.on_update with assert_emit(*expected_signals): dataset.add_resource(ResourceFactory(checksum=None)) assert len(dataset.resources) == 1 with assert_emit(*expected_signals): dataset.add_resource(resource) assert len(dataset.resources) == 2 assert dataset.resources[0].id == resource.id
def test_add_resource(self): user = UserFactory() dataset = DatasetFactory(owner=user) resource = ResourceFactory() expected_signals = (Dataset.on_resource_added, ) with assert_emit(*expected_signals): dataset.add_resource(ResourceFactory()) assert len(dataset.resources) == 1 with assert_emit(*expected_signals): dataset.add_resource(resource) assert len(dataset.resources) == 2 assert dataset.resources[0].id == resource.id assert dataset.resources[0].dataset == dataset
def test_get_dataset(self): resources = [ResourceFactory() for _ in range(2)] dataset = DatasetFactory(resources=resources) response = self.get(url_for('apiv2.dataset', dataset=dataset)) self.assert200(response) data = response.json assert data['resources']['rel'] == 'subsection' assert data['resources']['href'] == url_for( 'apiv2.resources', dataset=dataset.id, page=1, page_size=DEFAULT_PAGE_SIZE, _external=True) assert data['resources']['type'] == 'GET' assert data['resources']['total'] == len(resources) assert data['community_resources']['rel'] == 'subsection' assert data['community_resources']['href'] == url_for( 'api.community_resources', dataset=dataset.id, page=1, page_size=DEFAULT_PAGE_SIZE, _external=True) assert data['community_resources']['type'] == 'GET' assert data['community_resources']['total'] == 0
def test_update(self): resource = ResourceFactory() self.dataset.resources.append(resource) self.dataset.save() now = datetime.now() data = { 'title': faker.sentence(), 'description': faker.text(), 'url': faker.url(), 'published': now.isoformat(), 'extras': { 'extra:id': 'id', } } with self.api_user(): response = self.put( url_for('api.resource', dataset=self.dataset, rid=str(resource.id)), data) self.assert200(response) self.dataset.reload() self.assertEqual(len(self.dataset.resources), 1) updated = self.dataset.resources[0] self.assertEqual(updated.title, data['title']) self.assertEqual(updated.description, data['description']) self.assertEqual(updated.url, data['url']) self.assertEqual(updated.extras, {'extra:id': 'id'}) self.assertEqualDates(updated.published, now)
def test_fallback_to_default_locale(): resource = ResourceFactory(extras={ 'geop:resource_id': 'RID', }) DatasetFactory(resources=[resource], extras={'geop:dataset_id': 'DID'}) expected = 'https://geo.data.gouv.fr/embed/datasets/DID/resources/RID?lang=fr' # noqa assert resource.preview_url == expected
def test_all_dataset_fields(self): resources = ResourceFactory.build_batch(3) dataset = DatasetFactory(tags=faker.words(nb=3), resources=resources, frequency='daily') d = dataset_to_rdf(dataset) g = d.graph self.assertIsInstance(d, RdfResource) self.assertEqual(len(list(g.subjects(RDF.type, DCAT.Dataset))), 1) self.assertEqual(g.value(d.identifier, RDF.type), DCAT.Dataset) self.assertIsInstance(d.identifier, URIRef) uri = url_for('datasets.show_redirect', dataset=dataset.id, _external=True) self.assertEqual(str(d.identifier), uri) self.assertEqual(d.value(DCT.identifier), Literal(dataset.id)) self.assertEqual(d.value(DCT.title), Literal(dataset.title)) self.assertEqual(d.value(DCT.description), Literal(dataset.description)) self.assertEqual(d.value(DCT.issued), Literal(dataset.created_at)) self.assertEqual(d.value(DCT.modified), Literal(dataset.last_modified)) self.assertEqual(d.value(DCT.accrualPeriodicity).identifier, FREQ.daily) expected_tags = set(Literal(t) for t in dataset.tags) self.assertEqual(set(d.objects(DCAT.keyword)), expected_tags) self.assertEqual(len(list(d.objects(DCAT.distribution))), len(resources))
def test_resource_latest_url(self): '''It should redirect to the real resource URL''' resource = ResourceFactory() DatasetFactory(resources=[resource]) response = self.get(url_for('datasets.resource', id=resource.id)) self.assertStatus(response, 302) self.assertEqual(response.location, resource.url)
def test_reorder(self): # Register an extra field in order to test # https://github.com/opendatateam/udata/issues/1794 ResourceMixin.extras.register('my:register', db.BooleanField) self.dataset.resources = ResourceFactory.build_batch(3) self.dataset.resources[0].extras = { 'my:register': True, } self.dataset.save() self.dataset.reload() # Otherwise `last_modified` date is inaccurate. initial_last_modified = self.dataset.last_modified initial_order = [r.id for r in self.dataset.resources] expected_order = [{'id': str(id)} for id in reversed(initial_order)] with self.api_user(): response = self.put(url_for('api.resources', dataset=self.dataset), expected_order) self.assertStatus(response, 200) self.assertEqual([str(r['id']) for r in response.json], [str(r['id']) for r in expected_order]) self.dataset.reload() self.assertEqual([str(r.id) for r in self.dataset.resources], [str(r['id']) for r in expected_order]) self.assertEqual(self.dataset.last_modified, initial_last_modified)
def test_suggest_tags_api(self): '''It should suggest tags''' with self.autoindex(): for i in range(3): tags = [ faker.word(), faker.word(), 'test', 'test-{0}'.format(i) ] ReuseFactory(tags=tags, datasets=[DatasetFactory()]) DatasetFactory(tags=tags, resources=[ResourceFactory()]) response = self.get(url_for('api.suggest_tags'), qs={ 'q': 'tes', 'size': '5' }) self.assert200(response) self.assertLessEqual(len(response.json), 5) self.assertGreater(len(response.json), 1) self.assertEqual(response.json[0]['text'], 'test') for suggestion in response.json: self.assertIn('text', suggestion) self.assertIn('score', suggestion) self.assertTrue(suggestion['text'].startswith('test'))
def test_get_next_page(self): '''Should fetch 2 pages of resources from the API''' resources = [ResourceFactory() for _ in range(80)] dataset = DatasetFactory(resources=resources) response = self.get( url_for('apiv2.resources', dataset=dataset.id, page=1, page_size=DEFAULT_PAGE_SIZE)) self.assert200(response) data = response.json assert len(data['data']) == DEFAULT_PAGE_SIZE assert data['total'] == len(resources) assert data['page'] == 1 assert data['page_size'] == DEFAULT_PAGE_SIZE assert data['next_page'] == url_for('apiv2.resources', dataset=dataset.id, page=2, page_size=DEFAULT_PAGE_SIZE, _external=True) assert data['previous_page'] is None response = self.get(data['next_page']) self.assert200(response) data = response.json assert len(data['data']) == len(resources) - DEFAULT_PAGE_SIZE assert data['total'] == len(resources) assert data['page'] == 2 assert data['page_size'] == DEFAULT_PAGE_SIZE assert data['next_page'] == None assert data['previous_page'] == url_for('apiv2.resources', dataset=dataset.id, page=1, page_size=DEFAULT_PAGE_SIZE, _external=True)
def test_datasets_csv(self): with self.autoindex(): datasets = [ DatasetFactory(resources=[ResourceFactory()]) for _ in range(5) ] hidden_dataset = DatasetFactory() response = self.get(url_for('site.datasets_csv')) self.assert200(response) self.assertEqual(response.mimetype, 'text/csv') self.assertEqual(response.charset, 'utf-8') csvfile = StringIO(response.data.decode('utf8')) reader = csv.get_reader(csvfile) header = next(reader) self.assertEqual(header[0], 'id') self.assertIn('title', header) self.assertIn('description', header) self.assertIn('created_at', header) self.assertIn('last_modified', header) self.assertIn('tags', header) self.assertIn('metric.reuses', header) rows = list(reader) ids = [row[0] for row in rows] self.assertEqual(len(rows), len(datasets)) for dataset in datasets: self.assertIn(str(dataset.id), ids) self.assertNotIn(str(hidden_dataset.id), ids)
def test_purge_organizations(self): with self.autoindex(): org = Organization.objects.create(name='delete me', description='XXX') resources = [ResourceFactory() for _ in range(2)] dataset = DatasetFactory(resources=resources, organization=org) # Upload organization's logo file = create_test_image() user = AdminFactory() self.login(user) response = self.post(url_for('api.organization_logo', org=org), {'file': (file, 'test.png')}, json=False) self.assert200(response) # Delete organization response = self.delete(url_for('api.organization', org=org)) self.assert204(response) tasks.purge_organizations() # Check organization's logo is deleted self.assertEqual(list(storages.avatars.list_files()), []) dataset = Dataset.objects(id=dataset.id).first() self.assertIsNone(dataset.organization) organization = Organization.objects(name='delete me').first() self.assertIsNone(organization) indexed_dataset = DatasetSearch.get(id=dataset.id, using=es.client, index=es.index_name) self.assertIsNone(indexed_dataset.organization)
def test_render_list_with_query(self): '''It should render the dataset list page with a query string''' with self.autoindex(): datasets = [DatasetFactory( resources=[ResourceFactory()]) for i in range(3)] expected_dataset = DatasetFactory( title='test for query', resources=[ResourceFactory()]) datasets.append(expected_dataset) response = self.get(url_for('datasets.list'), qs={'q': 'test for query'}) self.assert200(response) rendered_datasets = self.get_context_variable('datasets') self.assertEqual(len(rendered_datasets), 1) self.assertEqual(rendered_datasets[0].id, expected_dataset.id)
def test_all_dataset_fields(self): resources = ResourceFactory.build_batch(3) dataset = DatasetFactory(tags=faker.words(nb=3), resources=resources, frequency='daily', acronym='acro') d = dataset_to_rdf(dataset) g = d.graph assert isinstance(d, RdfResource) assert len(list(g.subjects(RDF.type, DCAT.Dataset))) is 1 assert g.value(d.identifier, RDF.type) == DCAT.Dataset assert isinstance(d.identifier, URIRef) uri = url_for('datasets.show_redirect', dataset=dataset.id, _external=True) assert str(d.identifier) == uri assert d.value(DCT.identifier) == Literal(dataset.id) assert d.value(DCT.title) == Literal(dataset.title) assert d.value(SKOS.altLabel) == Literal(dataset.acronym) assert d.value(DCT.description) == Literal(dataset.description) assert d.value(DCT.issued) == Literal(dataset.created_at) assert d.value(DCT.modified) == Literal(dataset.last_modified) assert d.value(DCT.accrualPeriodicity).identifier == FREQ.daily expected_tags = set(Literal(t) for t in dataset.tags) assert set(d.objects(DCAT.keyword)) == expected_tags assert len(list(d.objects(DCAT.distribution))) == len(resources)
def test_all_resource_fields(self): license = LicenseFactory() resource = ResourceFactory(format='csv') dataset = DatasetFactory(resources=[resource], license=license) permalink = url_for('datasets.resource', id=resource.id, _external=True) r = resource_to_rdf(resource, dataset) assert r.value(DCT.title) == Literal(resource.title) assert r.value(DCT.description) == Literal(resource.description) assert r.value(DCT.issued) == Literal(resource.published) assert r.value(DCT.modified) == Literal(resource.modified) assert r.value(DCT.license).identifier == URIRef(license.url) assert r.value(DCT.rights) == Literal(license.title) assert r.value(DCAT.downloadURL).identifier == URIRef(resource.url) assert r.value(DCAT.accessURL).identifier == URIRef(permalink) assert r.value(DCAT.bytesSize) == Literal(resource.filesize) assert r.value(DCAT.mediaType) == Literal(resource.mime) assert r.value(DCT.term('format')) == Literal(resource.format) checksum = r.value(SPDX.checksum) assert r.graph.value(checksum.identifier, RDF.type) == SPDX.Checksum assert (r.graph.value(checksum.identifier, SPDX.algorithm) == SPDX.checksumAlgorithm_sha1) assert (checksum.value(SPDX.checksumValue) == Literal(resource.checksum.value))
def test_suggest_datasets_api_unicode(self): '''It should suggest datasets with special characters''' with self.autoindex(): for i in range(4): DatasetFactory(title='testé-{0}'.format(i) if i % 2 else faker.word(), resources=[ResourceFactory()]) response = self.get(url_for('api.suggest_datasets'), qs={ 'q': 'testé', 'size': '5' }) self.assert200(response) self.assertLessEqual(len(response.json), 5) self.assertGreater(len(response.json), 1) for suggestion in response.json: self.assertIn('id', suggestion) self.assertIn('title', suggestion) self.assertIn('slug', suggestion) self.assertIn('score', suggestion) self.assertIn('image_url', suggestion) self.assertTrue(suggestion['title'].startswith('test'))
def test_delete_404(self): with self.api_user(): response = self.delete( url_for('api.resource', dataset=self.dataset, rid=str(ResourceFactory().id))) self.assert404(response)
def test_bulk_update(self): resources = ResourceFactory.build_batch(2) self.dataset.resources.extend(resources) self.dataset.save() now = datetime.now() ids = [r.id for r in self.dataset.resources] data = [{ 'id': str(id), 'title': faker.sentence(), 'description': faker.text(), } for id in ids] data.append({ 'title': faker.sentence(), 'description': faker.text(), 'url': faker.url(), }) with self.api_user(): response = self.put(url_for('api.resources', dataset=self.dataset), data) self.assert200(response) self.dataset.reload() self.assertEqual(len(self.dataset.resources), 3) for idx, id in enumerate(ids): resource = self.dataset.resources[idx] rdata = data[idx] self.assertEqual(str(resource.id), rdata['id']) self.assertEqual(resource.title, rdata['title']) self.assertEqual(resource.description, rdata['description']) self.assertIsNotNone(resource.url) new_resource = self.dataset.resources[-1] self.assertEqualDates(new_resource.published, now)
def test_match_existing_resource_by_url(self): dataset = DatasetFactory(resources=ResourceFactory.build_batch(3)) existing_resource = dataset.resources[1] node = BNode() g = Graph() new_title = faker.sentence() g.add((node, RDF.type, DCAT.Distribution)) g.add((node, DCT.title, Literal(new_title))) g.add((node, DCAT.downloadURL, Literal(existing_resource.url))) resource = resource_from_rdf(g, dataset) resource.validate() assert isinstance(resource, Resource) assert resource.title == new_title assert resource.id == existing_resource.id
class LinkcheckerTest(TestCase): settings = LinkcheckerTestSettings def setUp(self): self.resource = ResourceFactory() self.dataset = DatasetFactory(resources=[self.resource]) @mock.patch('udata.linkchecker.checker.get_linkchecker') def test_check_resource_no_linkchecker(self, mock_fn): mock_fn.return_value = None res = check_resource(self.resource) self.assertEquals(res, ({'error': 'No linkchecker configured.'}, 503)) @mock.patch('udata.linkchecker.checker.get_linkchecker') def test_check_resource_linkchecker_ok(self, mock_fn): check_res = {'check:status': 200, 'check:available': True, 'check:date': datetime.now()} class DummyLinkchecker: def check(self, _): return check_res mock_fn.return_value = DummyLinkchecker res = check_resource(self.resource) self.assertEquals(res, check_res) check_res.update({'check:count-availability': 1}) self.assertEquals(self.resource.extras, check_res) @mock.patch('udata.linkchecker.checker.get_linkchecker') def test_check_resource_filter_result(self, mock_fn): check_res = {'check:status': 200, 'dummy': 'dummy'} class DummyLinkchecker: def check(self, _): return check_res mock_fn.return_value = DummyLinkchecker res = check_resource(self.resource) self.assertEquals(res, check_res) self.assertNotIn('dummy', self.resource.extras) @mock.patch('udata.linkchecker.checker.get_linkchecker') def test_check_resource_linkchecker_no_status(self, mock_fn): class DummyLinkchecker: def check(self, _): return {'check:available': True} mock_fn.return_value = DummyLinkchecker res = check_resource(self.resource) self.assertEquals(res, ({'error': 'No status in response from linkchecker'}, 503)) @mock.patch('udata.linkchecker.checker.get_linkchecker') def test_check_resource_linkchecker_check_error(self, mock_fn): class DummyLinkchecker: def check(self, _): return {'check:error': 'ERROR'} mock_fn.return_value = DummyLinkchecker res = check_resource(self.resource) self.assertEquals(res, ({'error': 'ERROR'}, 500)) @mock.patch('udata.linkchecker.checker.get_linkchecker') def test_check_resource_linkchecker_in_resource(self, mock_fn): self.resource.extras['check:checker'] = 'another_linkchecker' self.resource.save() check_resource(self.resource) args, kwargs = mock_fn.call_args self.assertEquals(args, ('another_linkchecker', )) def test_check_resource_linkchecker_no_check(self): self.resource.extras['check:checker'] = 'no_check' self.resource.save() res = check_resource(self.resource) self.assertEquals(res.get('check:status'), 204) self.assertEquals(res.get('check:available'), True) def test_check_resource_ignored_domain(self): self.resource.extras = {} self.resource.url = 'http://example-ignore.com/url' self.resource.save() res = check_resource(self.resource) self.assertEquals(res.get('check:status'), 204) self.assertEquals(res.get('check:available'), True) def test_is_need_check(self): self.resource.extras = {'check:available': True, 'check:date': datetime.now(), 'check:status': 42} self.assertFalse(self.resource.need_check()) def test_is_need_check_unknown_status(self): self.resource.extras = {} self.assertTrue(self.resource.need_check()) def test_is_need_check_cache_expired(self): self.resource.extras = { 'check:available': True, 'check:date': datetime.now() - timedelta(seconds=3600), 'check:status': 42 } self.assertTrue(self.resource.need_check()) def test_is_need_check_date_string(self): check_date = (datetime.now() - timedelta(seconds=3600)).isoformat() self.resource.extras = { 'check:available': True, 'check:date': check_date, 'check:status': 42 } self.assertTrue(self.resource.need_check()) def test_is_need_check_wrong_check_date(self): check_date = '123azerty' self.resource.extras = { 'check:available': True, 'check:date': check_date, 'check:status': 42 } self.assertTrue(self.resource.need_check()) def test_is_need_check_wrong_check_date_int(self): check_date = 42 self.resource.extras = { 'check:available': True, 'check:date': check_date, 'check:status': 42 } self.assertTrue(self.resource.need_check()) def test_is_need_check_count_availability(self): self.resource.extras = { # should need a new check after 100 * 30s = 3000s < 3600s 'check:count-availability': 100, 'check:available': True, 'check:date': datetime.now() - timedelta(seconds=3600), 'check:status': 42 } self.assertTrue(self.resource.need_check()) def test_is_need_check_count_availability_expired(self): self.resource.extras = { # should need a new check after 150 * 30s = 4500s > 3600s 'check:count-availability': 150, 'check:available': True, 'check:date': datetime.now() - timedelta(seconds=3600), 'check:status': 42 } self.assertFalse(self.resource.need_check()) def test_is_need_check_count_availability_unavailable(self): self.resource.extras = { # should need a new check after 30s < 3600S # count-availability is below threshold 'check:count-availability': 95, 'check:available': False, 'check:date': datetime.now() - timedelta(seconds=3600), 'check:status': 42 } self.assertTrue(self.resource.need_check()) @mock.patch('udata.linkchecker.checker.get_linkchecker') def test_count_availability_increment(self, mock_fn): check_res = {'check:status': 200, 'check:available': True, 'check:date': datetime.now()} class DummyLinkchecker: def check(self, _): return check_res mock_fn.return_value = DummyLinkchecker check_resource(self.resource) self.assertEquals(self.resource.extras['check:count-availability'], 1) check_resource(self.resource) self.assertEquals(self.resource.extras['check:count-availability'], 2) @mock.patch('udata.linkchecker.checker.get_linkchecker') def test_count_availability_reset(self, mock_fn): self.resource.extras = {'check:status': 200, 'check:available': True, 'check:date': datetime.now(), 'check:count-availability': 2} check_res = {'check:status': 200, 'check:available': False, 'check:date': datetime.now()} class DummyLinkchecker: def check(self, _): return check_res mock_fn.return_value = DummyLinkchecker check_resource(self.resource) self.assertEquals(self.resource.extras['check:count-availability'], 1) def test_count_availability_threshold(self): self.resource.extras = { 'check:status': 404, 'check:available': False, # if it weren't above threshold, should need check (>30s) # and we're still below max_cache 101 * 0.5 < 100 'check:date': datetime.now() - timedelta(seconds=60), 'check:count-availability': 101 } self.assertFalse(self.resource.need_check()) def test_count_availability_max_cache_duration(self): self.resource.extras = { 'check:status': 200, 'check:available': True, # next check should be at 300 * 0.5 = 150min # but we are above max cache duration 150min > 100min # and 120m > 100 min so we should need a new check 'check:date': datetime.now() - timedelta(minutes=120), 'check:count-availability': 300 } self.assertTrue(self.resource.need_check())
def setUp(self): self.resource = ResourceFactory() self.dataset = DatasetFactory(resources=[self.resource])