def test_increment_downloads_for_resource(self): dataset = create_dataset() resource = factories.Resource(package_id=dataset.get('id'), url='http://www.google.com') result = actions.increment_downloads_for_resource( {}, {'resource_id': resource['id']}) assert result == 'success'
def test_get_dataset_stats(self): dataset = create_dataset() update_package_visits(dataset.get('id'), 10, 28) result = helpers.get_dataset_stats(dataset.get('id')) assert result.get('id') == dataset.get('id') assert result.get('visits_recently') == 10 assert result.get('visits_ever') == 28
def test_resource_update_valid_url(self): dataset = create_dataset() resource = factories.Resource(package_id=dataset['id'], url='http://google.com') data_dict = {'id': resource['id'], 'url': "http://yahoo.com"} user = factories.Sysadmin() result = actions.resource_update( { 'user': user.get('name'), 'model': model }, data_dict)
def test_resource_delete(self): dataset = create_dataset() resource = factories.Resource(package_id=dataset['id'], url="http://yahoo.com") action = toolkit.get_action('resource_delete') user = factories.Sysadmin() context = {'user': user.get('name')} data_dict = {'id': resource['id']} result = actions.resource_delete(action, context, data_dict) assert result is None
def test_prepare_zip_resources(self): dataset = create_dataset() resource = factories.Resource( package_id=dataset['id'], url='https://jsonplaceholder.typicode.com/posts') resource1 = factories.Resource( package_id=dataset['id'], url='https://jsonplaceholder.typicode.com/comments') resource_ids = [resource.get('id'), resource1.get('id')] data_dict = {'resources': resource_ids} result = actions.prepare_zip_resources({}, data_dict) assert 'zip_id' in result
def test_get_resource_stats(self): dataset = create_dataset() resource = factories.Resource(package_id=dataset['id'], url='http://google.com') resource_id = resource['id'] increment_downloads(resource_id) increment_downloads(resource_id) increment_downloads(resource_id) result = helpers.get_resource_stats(resource_id) assert result['downloads'] == 3
def test_get_related_datasets(self): dataset = create_dataset(tags=[{'name': 'cat'}]) dataset2 = create_dataset(tags=[{'name': 'cat'}]) id = dataset['id'] result = actions.get_related_datasets({}, {'id': id}) assert result[0]['id'] == dataset2['id'] group = factories.Group() dataset = create_dataset(groups=[{'id': group['id']}]) dataset2 = create_dataset(groups=[{'id': group['id']}]) dataset3 = create_dataset(groups=[{'id': group['id']}]) dataset4 = create_dataset(groups=[{'id': group['id']}]) id = dataset['id'] result = actions.get_related_datasets({}, {'id': id}) assert result[0]['id'] == dataset4['id'] result = actions.get_related_datasets({}, {'id': id, 'limit': 2}) assert len(result) == 2
def test_get_related_datasets(self): dataset = create_dataset(tags=[{'name': 'cat'}]) dataset2 = create_dataset(tags=[{'name': 'cat'}]) id = dataset['id'] result = helpers.get_related_datasets(id) assert result[0]['id'] == dataset2['id'] group = factories.Group() dataset = create_dataset(groups=[{'id': group['id']}]) dataset2 = create_dataset(groups=[{'id': group['id']}]) dataset3 = create_dataset(groups=[{'id': group['id']}]) dataset4 = create_dataset(groups=[{'id': group['id']}]) id = dataset['id'] result = helpers.get_related_datasets(id) assert result[0]['id'] == dataset4['id'] result = helpers.get_related_datasets(id, limit=2) assert len(result) == 2
def test_get_package_total_downloads(self): dataset = create_dataset() dataset_id = dataset['id'] resource = factories.Resource(package_id=dataset_id, url='http://google.com') resource2 = factories.Resource(package_id=dataset_id, url='http://google.com') increment_downloads(resource['id']) increment_downloads(resource2['id']) result = helpers.get_package_total_downloads(dataset_id) assert result == 2
def test_resource_update_invalid_url(self): dataset = create_dataset() resource = factories.Resource(package_id=dataset['id'], url='http://google.com') data_dict = {'id': resource['id'], 'url': "http://brmbrm.com"} user = factories.Sysadmin() with assert_raises(toolkit.ValidationError) as cm: result = actions.resource_update( { 'user': user.get('name'), 'model': model }, data_dict) assert cm.exception.error_summary == {u'Message': u'Invalid URL'}
def test_package_search(self): title_translated = { 'en': 'title on english', 'mk': u'наслов на македонски', 'sq': 'titulli i shqiptar' } dataset = create_dataset(title_translated=title_translated, name='testtranstitle') data_dict = {'fq': 'name:testtranstitle'} set_lang('mk') result = test_helpers.call_action('package_search', **data_dict) assert result.get('results')[0]['title'] == u'наслов на македонски'
def test_resource_update_uploaded_file(self): dataset = create_dataset() test_file = StringIO.StringIO() test_file.write(''' "info": { "title": "BC Data Catalogue API", "description": "This API provides information about datasets in the BC Data Catalogue.", "termsOfService": "http://www.data.gov.bc.ca/local/dbc/docs/license/API_Terms_of_Use.pdf", "contact": { "name": "Data BC", "url": "http://data.gov.bc.ca/", "email": "" }, "license": { "name": "Open Government License - British Columbia", "url": "http://www.data.gov.bc.ca/local/dbc/docs/license/OGL-vbc2.0.pdf" }, "version": "3.0.0" } ''') test_resource = FakeFileStorage(test_file, 'test.json') resource = factories.Resource(package_id=dataset['id'], upload=test_resource) test_file = StringIO.StringIO() test_file.write(''' "info": { "title": "BC Data Catalogue API", "description": "This API provides information about datasets in the BC Data Catalogue.", "termsOfService": "http://www.data.gov.bc.ca/local/dbc/docs/license/API_Terms_of_Use.pdf", "contact": { "name": "Data BC", "url": "http://data.gov.bc.ca/", "email": "" }, "license": { "name": "Open Government License - British Columbia", "url": "http://www.data.gov.bc.ca/local/dbc/docs/license/OGL-vbc2.0.pdf" }, "version": "3.0.0" } ''') test_resource = FakeFileStorage(test_file, 'test.json') with assert_raises(toolkit.ValidationError) as cm: resource = factories.Resource(package_id=dataset['id'], upload=test_resource) assert cm.exception.error_summary == { u'Message': u'Resource already exists' }
def test_get_recently_updated_datasets(self): create_dataset() create_dataset() create_dataset() dataset = create_dataset() result = helpers.get_recently_updated_datasets() assert len(result) == 4 assert result[0]['id'] == dataset['id'] result = helpers.get_recently_updated_datasets(limit=2) assert len(result) == 2 assert result[0]['id'] == dataset['id']
def test_update_package_stats(self): dataset = create_dataset() test_file = StringIO.StringIO() test_file.write(''' "info": { "title": "BC Data Catalogue API", "description": "This API provides information about datasets in the BC Data Catalogue.", "termsOfService": "http://www.data.gov.bc.ca/local/dbc/docs/license/API_Terms_of_Use.pdf", "contact": { "name": "Data BC", "url": "http://data.gov.bc.ca/", "email": "" }, "license": { "name": "Open Government License - British Columbia", "url": "http://www.data.gov.bc.ca/local/dbc/docs/license/OGL-vbc2.0.pdf" }, "version": "3.0.0" } ''') test_resource = FakeFileStorage(test_file, 'test.json') resource = factories.Resource(package_id=dataset['id'], url='http://www.google.com', upload=test_resource) increment_downloads(resource['id']) increment_downloads(resource['id']) actions.update_package_stats(dataset['id']) solr_base_url = config['solr_url'] url = '{0}/select?q=*:*&fq=id:{1}&wt=json'.format( solr_base_url, dataset['id']) result = requests.get(url) response = result.json().get('response') assert response.get('numFound') == 1 assert response.get('docs')[0].get( 'extras_total_downloads') == '000000000000000000000002' assert response.get('docs')[0].get( 'extras_file_size') == '000000000000000000000669'
def test_resource_show_sq(self): dataset = create_dataset() data_dict = { 'package_id': dataset.get('id'), 'url': 'http://www.google.com', 'name_translated': { 'mk': 'name mk', 'en': 'name en', 'sq': 'name sq' }, 'description_translated': { 'mk': 'description mk', 'en': 'description en', 'sq': 'description sq' } } resource = factories.Resource(**data_dict) set_lang('sq') result = test_helpers.call_action('resource_show', id=resource.get('id')) assert result.get('name') == 'name sq' assert result.get('description') == 'description sq'
def test_get_package_stats(self): dataset = create_dataset() test_file = StringIO.StringIO() test_file.write(''' "info": { "title": "BC Data Catalogue API", "description": "This API provides information about datasets in the BC Data Catalogue.", "termsOfService": "http://www.data.gov.bc.ca/local/dbc/docs/license/API_Terms_of_Use.pdf", "contact": { "name": "Data BC", "url": "http://data.gov.bc.ca/", "email": "" }, "license": { "name": "Open Government License - British Columbia", "url": "http://www.data.gov.bc.ca/local/dbc/docs/license/OGL-vbc2.0.pdf" }, "version": "3.0.0" } ''') test_resource = FakeFileStorage(test_file, 'test.json') resource = factories.Resource(package_id=dataset['id'], url='http://www.google.com', upload=test_resource) resource2 = factories.Resource(package_id=dataset['id'], url='http://www.yahoo.com') increment_downloads(resource['id']) increment_downloads(resource['id']) increment_downloads(resource2['id']) result = actions.get_package_stats(dataset['id']) assert result['file_size'] == 669 assert result['total_downloads'] == 3
def test_get_groups(self): groupEmpty = factories.Group() result = helpers.get_groups() assert len(result) == 0 group = factories.Group() dataset = create_dataset(groups=[{'id': group['id']}]) group2 = factories.Group() dataset2 = create_dataset(groups=[{'id': group2['id']}]) dataset3 = create_dataset(groups=[{'id': group2['id']}]) group3 = factories.Group() dataset4 = create_dataset(groups=[{'id': group3['id']}]) dataset5 = create_dataset(groups=[{'id': group3['id']}]) dataset6 = create_dataset(groups=[{'id': group3['id']}]) result = helpers.get_groups() assert result[0]['id'] == group3['id'] assert len(result) == 3
def test_get_most_active_organizations(self): organization = factories.Organization() dataset = factories.Dataset(owner_org=organization['id']) resource = factories.Resource(package_id=dataset['id'], url='http://google.com', skip_update_package_stats=True) organization2 = factories.Organization() dataset1 = factories.Dataset(owner_org=organization2['id']) resource1 = factories.Resource(package_id=dataset1['id'], url='http://google.com', skip_update_package_stats=True) dataset2 = factories.Dataset(owner_org=organization2['id']) resource2 = factories.Resource(package_id=dataset2['id'], url='http://google.com', skip_update_package_stats=True) resource3 = factories.Resource(package_id=dataset2['id'], url='http://google.com', skip_update_package_stats=True) organization3 = factories.Organization() dataset3 = factories.Dataset(owner_org=organization3['id']) resource4 = factories.Resource(package_id=dataset3['id'], url='http://google.com', skip_update_package_stats=True) resource5 = factories.Resource(package_id=dataset3['id'], url='http://google.com', skip_update_package_stats=True) resource6 = factories.Resource(package_id=dataset3['id'], url='http://google.com', skip_update_package_stats=True) resource7 = factories.Resource(package_id=dataset3['id'], url='http://google.com', skip_update_package_stats=True) dataset4 = factories.Dataset(owner_org=organization3['id']) resource7 = factories.Resource(package_id=dataset4['id'], url='http://google.com', skip_update_package_stats=True) fetch_most_active_orgs() result = helpers.get_most_active_organizations() assert len(result) == 3 assert result[0].org_id == organization3['id'] resource8 = factories.Resource(package_id=dataset['id'], url='http://google.com', skip_update_package_stats=True) fetch_most_active_orgs() result = helpers.get_most_active_organizations() assert result[0].org_id == organization['id'] for i in range(10): organizationMultiple = factories.Organization() create_dataset(owner_org=organizationMultiple['id']) fetch_most_active_orgs() result = helpers.get_most_active_organizations(limit=7) assert len(result) == 7
def test_resource_create_invalid_url(self): dataset = create_dataset() with assert_raises(toolkit.ValidationError) as cm: resource = factories.Resource(package_id=dataset['id'], url='http://brmbrm.com') assert cm.exception.error_summary == {u'Message': u'Invalid URL'}
def test_resource_create_valid_url(self): dataset = create_dataset() resource = factories.Resource(package_id=dataset['id'], url='http://google.com')