def test_delete_other_datasets(self, search_datasets, configuration, downloader): datasets = list() def delete_from_hdx(dataset): datasets.append(dataset) geonodetohdx = GeoNodeToHDX('http://xxx', downloader) geonodetohdx.geonode_urls.append('https://ogcserver.gis.wfp.org') geonodetohdx.delete_other_datasets(self.wfpnames, self.wfpmetadata, delete_from_hdx=delete_from_hdx) assert len(datasets) == 0 geonodetohdx.delete_other_datasets(self.mimunames, self.mimumetadata, delete_from_hdx=delete_from_hdx) assert datasets[0]['name'] == self.wfpdatasets[0]['name'] assert datasets[1]['name'] == self.wfpdatasets[1]['name'] geonodetohdx = GeoNodeToHDX('http://yyy', downloader) datasets = list() geonodetohdx.delete_other_datasets(self.mimunames, self.mimumetadata, delete_from_hdx=delete_from_hdx) assert len(datasets) == 0
def test_generate_dataset_and_showcase(self, configuration, downloader): geonodetohdx = GeoNodeToHDX('http://xxx', downloader) dataset, ranges, showcase = geonodetohdx.generate_dataset_and_showcase( 'SDN', TestGeoNodeToHDX.wfplayersdata[0], self.wfpmetadata, get_date_from_title=True) assert dataset == self.wfpdatasets[0] resources = dataset.get_resources() assert resources == self.wfpresources[0] assert ranges == [(datetime(2001, 1, 1, 0, 0), datetime(2013, 12, 31, 0, 0)), (datetime(2018, 1, 1, 0, 0), datetime(2018, 12, 31, 0, 0))] assert showcase == self.wfpshowcases[0] dataset, ranges, showcase = geonodetohdx.generate_dataset_and_showcase( 'SDN', TestGeoNodeToHDX.wfplayersdata[1], self.wfpmetadata, get_date_from_title=True) assert dataset == self.wfpdatasets[1] resources = dataset.get_resources() assert resources == self.wfpresources[1] assert ranges == [(datetime(2014, 1, 1, 0, 0), datetime(2014, 12, 31, 0, 0)), (datetime(2018, 1, 1, 0, 0), datetime(2018, 12, 31, 0, 0))] assert showcase == self.wfpshowcases[1] assert geonodetohdx.geonode_urls[1] == 'https://ogcserver.gis.wfp.org' geonodetohdx = GeoNodeToHDX('http://yyy', downloader) dataset, ranges, showcase = geonodetohdx.generate_dataset_and_showcase( 'MMR', TestGeoNodeToHDX.mimulayersdata[0], self.mimumetadata, get_date_from_title=True, dataset_tags_mapping=self.dataset_tags_mapping) assert dataset == self.mimudatasets[0] resources = dataset.get_resources() assert resources == self.mimuresources[0] assert ranges == [(datetime(2019, 7, 1, 0, 0), datetime(2019, 7, 31, 0, 0))] assert showcase == self.mimushowcases[0] dataset, ranges, showcase = geonodetohdx.generate_dataset_and_showcase( 'MMR', TestGeoNodeToHDX.mimulayersdata[1], self.mimumetadata, get_date_from_title=True) assert dataset == self.mimudatasets[1] resources = dataset.get_resources() assert resources == self.mimuresources[1] assert ranges == [(datetime(2002, 1, 1, 0, 0), datetime(2014, 12, 31, 0, 0))] assert showcase == self.mimushowcases[1]
def test_get_countries(self, configuration, downloader): geonodetohdx = GeoNodeToHDX('http://xxx', downloader) countries = geonodetohdx.get_countries() assert countries == [{'iso3': 'SDN', 'name': 'Sudan', 'layers': 'SDN'}] countries = geonodetohdx.get_countries(use_count=False) assert countries == [{ 'iso3': 'SDN', 'name': 'Sudan', 'layers': 'SDN' }, { 'iso3': 'ALB', 'name': 'Albania', 'layers': 'ALB' }, { 'iso3': 'YEM', 'name': 'Yemen', 'layers': 'YEM' }]
def test_get_orgname(self): metadata = {'orgid': '12345'} class MyOrg: @staticmethod def read_from_hdx(id): return {'name': 'abc'} assert GeoNodeToHDX.get_orgname(metadata, orgclass=MyOrg) == 'abc'
def main(): """Generate dataset and create it in HDX""" with Download() as downloader: configuration = Configuration.read() base_url = configuration['base_url'] dataset_tags_mapping = configuration['dataset_tags_mapping'] geonodetohdx = GeoNodeToHDX(base_url, downloader) countrydata = {'iso3': 'MMR', 'name': 'Myanmar', 'layers': None} metadata = {'maintainerid': '196196be-6037-4488-8b71-d786adf4c081', 'orgid': 'bde18602-2e92-462a-8e88-a0018a7b13f9'} datasets = geonodetohdx.generate_datasets_and_showcases(metadata, countrydata=countrydata, get_date_from_title=True, process_dataset_name=process_dataset_name, dataset_tags_mapping=dataset_tags_mapping, updated_by_script='HDX Scraper: MIMU GeoNode') geonodetohdx.delete_other_datasets(datasets, metadata)
def test_get_layers(self, downloader): geonodetohdx = GeoNodeToHDX('http://xxx', downloader) layers = geonodetohdx.get_layers(countryiso='SDN') assert layers == TestGeoNodeToHDX.wfplayersdata geonodetohdx = GeoNodeToHDX('http://yyy', downloader) layers = geonodetohdx.get_layers() assert layers == TestGeoNodeToHDX.mimulayersdata
def test_mappings(self, configuration, downloader, yaml_config): geonodetohdx = GeoNodeToHDX('http://yyy', downloader) layersdata = copy.deepcopy(TestGeoNodeToHDX.mimulayersdata[0]) abstract = layersdata['abstract'] layersdata['abstract'] = '%s deprecated' % abstract dataset, ranges, showcase = geonodetohdx.generate_dataset_and_showcase( 'MMR', layersdata, self.mimumetadata, get_date_from_title=True) assert dataset is None assert showcase is None geonodetohdx = GeoNodeToHDX('http://yyy', downloader, yaml_config) layersdata['abstract'] = '%s deprecated' % abstract dataset, ranges, showcase = geonodetohdx.generate_dataset_and_showcase( 'MMR', layersdata, self.mimumetadata, get_date_from_title=True) assert dataset is not None assert showcase is not None layersdata['abstract'] = '%s abcd' % abstract dataset, ranges, showcase = geonodetohdx.generate_dataset_and_showcase( 'MMR', layersdata, self.mimumetadata, get_date_from_title=True) assert dataset is None assert showcase is None layersdata['abstract'] = '%s hdx' % abstract geonodetohdx.get_ignore_data().append('hdx') dataset, ranges, showcase = geonodetohdx.generate_dataset_and_showcase( 'MMR', layersdata, self.mimumetadata, get_date_from_title=True) assert dataset is None assert showcase is None geonodetohdx.get_category_mapping()['Location'] = 'acronyms' geonodetohdx.get_titleabstract_mapping()['ffa'] = ['cash assistance'] layersdata[ 'abstract'] = '%s landslide flood drought ffa emergency levels admin boundaries food security refugee camp idp malnutrition food distribution streets airport bridges frost erosion' % abstract dataset, ranges, showcase = geonodetohdx.generate_dataset_and_showcase( 'MMR', layersdata, self.mimumetadata, get_date_from_title=True) assert dataset == { 'name': 'mimu-geonode-myanmar-town', 'title': 'Myanmar Town', 'notes': 'Towns are urban areas divided into wards. landslide flood drought ffa emergency levels admin boundaries food security refugee camp idp malnutrition food distribution streets airport bridges frost erosion\n\nPlace name from GAD, transliteration by MIMU. Names available in Myanmar Unicode 3 and Roman script.\n\nOriginal dataset title: Myanmar Town 2019 July', 'maintainer': '196196be-6037-4488-8b71-d786adf4c081', 'owner_org': 'bde18602-2e92-462a-8e88-a0018a7b13f9', 'dataset_date': '07/01/2019-07/31/2019', 'data_update_frequency': '-2', 'subnational': '1', 'groups': [{ 'name': 'mmr' }], 'tags': [{ 'name': 'geodata', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'acronyms', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'landslides - mudslides', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'floods - storm surges', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'droughts', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'hazards and risk', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'food assistance', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'cold waves', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'erosion', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'roads', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'transportation', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'aviation', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'facilities and infrastructure', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'bridges', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'administrative divisions', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'food security', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'displaced persons locations - camps - shelters', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'refugees', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'internally displaced persons - idp', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'malnutrition', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'cash assistance', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }] } layersdata['abstract'] = '%s security nutrition' % abstract dataset, ranges, showcase = geonodetohdx.generate_dataset_and_showcase( 'MMR', layersdata, self.mimumetadata, get_date_from_title=True) assert dataset == { 'name': 'mimu-geonode-myanmar-town', 'title': 'Myanmar Town', 'notes': 'Towns are urban areas divided into wards. security nutrition\n\nPlace name from GAD, transliteration by MIMU. Names available in Myanmar Unicode 3 and Roman script.\n\nOriginal dataset title: Myanmar Town 2019 July', 'maintainer': '196196be-6037-4488-8b71-d786adf4c081', 'owner_org': 'bde18602-2e92-462a-8e88-a0018a7b13f9', 'dataset_date': '07/01/2019-07/31/2019', 'data_update_frequency': '-2', 'subnational': '1', 'groups': [{ 'name': 'mmr' }], 'tags': [{ 'name': 'geodata', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'acronyms', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'security', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }, { 'name': 'nutrition', 'vocabulary_id': '4e61d464-4943-4e97-973a-84673c1aaa87' }] }
def test_generate_datasets_and_showcases(self, configuration, downloader): geonodetohdx = GeoNodeToHDX('http://xxx', downloader) datasets = list() showcases = list() def create_dataset_showcase(dataset, showcase, batch): datasets.append(dataset) showcases.append(showcase) datasets_to_keep = geonodetohdx.generate_datasets_and_showcases( self.wfpmetadata, create_dataset_showcase=create_dataset_showcase, get_date_from_title=True) assert datasets == self.wfpdatasets assert showcases == self.wfpshowcases assert datasets_to_keep == self.wfpnames geonodetohdx = GeoNodeToHDX('http://yyy', downloader) datasets = list() showcases = list() datasets_to_keep = geonodetohdx.generate_datasets_and_showcases( self.mimumetadata, create_dataset_showcase=create_dataset_showcase, countrydata={ 'iso3': 'MMR', 'name': 'Myanmar', 'layers': None }, get_date_from_title=True, dataset_tags_mapping=self.dataset_tags_mapping) assert datasets == self.mimudatasets assert showcases == self.mimushowcases assert datasets_to_keep == self.mimunames geonodetohdx = GeoNodeToHDX('http://zzz', downloader) datasets = list() showcases = list() datasets_to_keep = geonodetohdx.generate_datasets_and_showcases( self.mimumetadata, create_dataset_showcase=create_dataset_showcase, countrydata={ 'iso3': 'MMR', 'name': 'Myanmar', 'layers': None }, get_date_from_title=True, dataset_tags_mapping=self.dataset_tags_mapping) assert datasets == self.mimudatasets mimushowcases = copy.deepcopy(self.mimushowcases) mimushowcases[0]['url'] = mimushowcases[0]['url'].replace('yyy', 'zzz') mimushowcases[1]['url'] = mimushowcases[1]['url'].replace('yyy', 'zzz') assert showcases == mimushowcases assert datasets_to_keep == self.mimunames geonodetohdx = GeoNodeToHDX('http://aaa', downloader) datasets = list() showcases = list() datasets_to_keep = geonodetohdx.generate_datasets_and_showcases( self.mimumetadata, create_dataset_showcase=create_dataset_showcase, countrydata={ 'iso3': 'MMR', 'name': 'Myanmar', 'layers': None }, get_date_from_title=False) assert datasets == self.mimudatasets_withdates assert showcases == self.mimushowcases_withdates assert datasets_to_keep == self.mimunames_withdates