def test_dataset_metric(self, app): site = SiteFactory.create(id=app.config['SITE_ID']) DatasetFactory.create_batch(2) VisibleDatasetFactory.create_batch(3) site.count_datasets() assert site.get_metrics()['datasets'] == 3
def test_resources_metric(self, app): site = SiteFactory.create(id=app.config['SITE_ID']) DatasetFactory.create_batch(3, nb_resources=3) site.count_datasets() site.count_resources() assert site.get_metrics()['resources'] == 9
def test_list_org_datasets_with_size(self, api): '''Should list organization datasets''' org = OrganizationFactory() DatasetFactory.create_batch(3, organization=org) response = api.get(url_for('api.org_datasets', org=org), qs={'page_size': 2}) assert200(response) assert len(response.json['data']) is 2
def test_list_org_datasets_hide_private(self, api): '''Should not include private datasets when not member''' org = OrganizationFactory() datasets = DatasetFactory.create_batch(3, organization=org) DatasetFactory.create_batch(2, organization=org, private=True) response = api.get(url_for('api.org_datasets', org=org)) assert200(response) assert len(response.json['data']) == len(datasets)
def test_list_org_datasets_with_size(self): '''Should list organization datasets''' org = OrganizationFactory() DatasetFactory.create_batch(3, organization=org) response = self.get(url_for('api.org_datasets', org=org), qs={'size': 2}) self.assert200(response) self.assertEqual(len(response.json), 2)
def test_attach(self): datasets = DatasetFactory.create_batch(3) with NamedTemporaryFile() as csvfile: writer = csv.DictWriter(csvfile, fieldnames=['local', 'remote'], delimiter=b';', quotechar=b'"') writer.writeheader() for index, dataset in enumerate(datasets): writer.writerow({ 'local': str(dataset.id), 'remote': str(index) }) csvfile.flush() result = actions.attach('test.org', csvfile.name) assert result.success == len(datasets) assert result.errors == 0 for index, dataset in enumerate(datasets): dataset.reload() assert dataset.extras['harvest:domain'] == 'test.org' assert dataset.extras['harvest:remote_id'] == str(index)
def test_attach_skip_not_found(self): datasets = DatasetFactory.create_batch(3) with NamedTemporaryFile(mode='w') as csvfile: writer = csv.DictWriter(csvfile, fieldnames=['local', 'remote'], delimiter=';', quotechar='"') writer.writeheader() writer.writerow({ 'local': 'not-found', 'remote': '42' }) for index, dataset in enumerate(datasets): writer.writerow({ 'local': str(dataset.id), 'remote': str(index) }) csvfile.flush() result = actions.attach('test.org', csvfile.name) assert result.success == len(datasets) assert result.errors == 1
def test_attach_skip_not_found(self): datasets = DatasetFactory.create_batch(3) with NamedTemporaryFile() as csvfile: writer = csv.DictWriter(csvfile, fieldnames=['local', 'remote'], delimiter=b';', quotechar=b'"') writer.writeheader() writer.writerow({ 'local': 'not-found', 'remote': '42' }) for index, dataset in enumerate(datasets): writer.writerow({ 'local': str(dataset.id), 'remote': str(index) }) csvfile.flush() result = actions.attach('test.org', csvfile.name) assert result.success == len(datasets) assert result.errors == 1
def test_attach(self): datasets = DatasetFactory.create_batch(3) with NamedTemporaryFile() as csvfile: writer = csv.DictWriter(csvfile, fieldnames=['local', 'remote'], delimiter=b';', quotechar=b'"') writer.writeheader() for index, dataset in enumerate(datasets): writer.writerow({ 'local': str(dataset.id), 'remote': str(index) }) csvfile.flush() result = actions.attach('test.org', csvfile.name) self.assertEqual(result.success, len(datasets)) self.assertEqual(result.errors, 0) for index, dataset in enumerate(datasets): dataset.reload() self.assertEqual(dataset.extras['harvest:domain'], 'test.org') self.assertEqual(dataset.extras['harvest:remote_id'], str(index))
def test_list_org_datasets(self, api): '''Should list organization datasets''' org = OrganizationFactory() datasets = DatasetFactory.create_batch(3, organization=org) response = api.get(url_for('api.org_datasets', org=org)) assert200(response) assert len(response.json['data']) == len(datasets)
def test_list_org_datasets(self): '''Should list organization datasets''' org = OrganizationFactory() datasets = DatasetFactory.create_batch(3, organization=org) response = self.get(url_for('api.org_datasets', org=org)) self.assert200(response) self.assertEqual(len(response.json), len(datasets))
def test_render_list_with_facets(self): '''It should render the dataset list page with facets''' with self.autoindex(): datasets = DatasetFactory.create_batch(3, visible=True, org=True, geo=True) response = self.get(url_for('datasets.list')) self.assert200(response) rendered_datasets = self.get_context_variable('datasets') self.assertEqual(len(rendered_datasets), len(datasets))
def test_list_org_datasets_private(self, api): '''Should include private datasets when member''' user = api.login() member = Member(user=user, role='admin') org = OrganizationFactory(members=[member]) datasets = DatasetFactory.create_batch(3, organization=org, private=True) response = api.get(url_for('api.org_datasets', org=org)) assert200(response) assert len(response.json['data']) == len(datasets)
def test_delete_home_dataset(self): '''Should pull home datasets on deletion''' current_site.settings.home_datasets = DatasetFactory.create_batch(3) current_site.save() dataset = current_site.settings.home_datasets[1] dataset.deleted = datetime.now() dataset.save() current_site.reload() home_datasets = [d.id for d in current_site.settings.home_datasets] self.assertEqual(len(home_datasets), 2) self.assertNotIn(dataset.id, home_datasets)
def test_list_org_datasets_private(self): '''Should include private datasets when member''' self.login() member = Member(user=self.user, role='admin') org = OrganizationFactory(members=[member]) datasets = DatasetFactory.create_batch(3, organization=org, private=True) response = self.get(url_for('api.org_datasets', org=org)) self.assert200(response) self.assertEqual(len(response.json), len(datasets))
def test_attach_does_not_duplicate(self): attached_datasets = [] for i in range(2): dataset = DatasetFactory.build() dataset.extras['harvest:domain'] = 'test.org' dataset.extras['harvest:remote_id'] = str(i) dataset.last_modified = datetime.now() dataset.save() attached_datasets.append(dataset) datasets = DatasetFactory.create_batch(3) with NamedTemporaryFile() as csvfile: writer = csv.DictWriter(csvfile, fieldnames=['local', 'remote'], delimiter=b';', quotechar=b'"') writer.writeheader() for index, dataset in enumerate(datasets): writer.writerow({ 'local': str(dataset.id), 'remote': str(index) }) csvfile.flush() result = actions.attach('test.org', csvfile.name) dbcount = Dataset.objects(**{ 'extras__harvest:remote_id__exists': True }).count() assert result.success == len(datasets) assert dbcount == result.success for index, dataset in enumerate(datasets): dataset.reload() assert dataset.extras['harvest:domain'] == 'test.org' assert dataset.extras['harvest:remote_id'] == str(index)
def test_attach_does_not_duplicate(self): attached_datasets = [] for i in range(2): dataset = DatasetFactory.build() dataset.extras['harvest:domain'] = 'test.org' dataset.extras['harvest:remote_id'] = str(i) dataset.last_modified = datetime.now() dataset.save() attached_datasets.append(dataset) datasets = DatasetFactory.create_batch(3) with NamedTemporaryFile() as csvfile: writer = csv.DictWriter(csvfile, fieldnames=['local', 'remote'], delimiter=b';', quotechar=b'"') writer.writeheader() for index, dataset in enumerate(datasets): writer.writerow({ 'local': str(dataset.id), 'remote': str(index) }) csvfile.flush() result = actions.attach('test.org', csvfile.name) dbcount = Dataset.objects(**{ 'extras__harvest:remote_id__exists': True }).count() self.assertEqual(result.success, len(datasets)) self.assertEqual(dbcount, result.success) for index, dataset in enumerate(datasets): dataset.reload() self.assertEqual(dataset.extras['harvest:domain'], 'test.org') self.assertEqual(dataset.extras['harvest:remote_id'], str(index))
def test_resources_metric(self, app): DatasetFactory.create_batch(3, nb_resources=3) site = Site.objects.get(id=app.config['SITE_ID']) assert site.metrics['resources'] == 9
def datasets(self): return DatasetFactory.create_batch(3)