def configuration(self): Configuration._create(hdx_read_only=True, user_agent='test', project_config_yaml=join( 'tests', 'config', 'project_configuration.yml')) Locations.set_validlocations([{ 'name': 'afg', 'title': 'Afghanistan' }]) # add locations used in tests Country.countriesdata(use_live=False) Vocabulary._tags_dict = True Vocabulary._approved_vocabulary = { 'tags': [{ 'name': 'hxl' }, { 'name': 'commodities' }, { 'name': 'prices' }, { 'name': 'markets' }], 'id': '4e61d464-4943-4e97-973a-84673c1aaa87', 'name': 'approved' }
def configuration(self): Configuration._create(hdx_read_only=True, user_agent='test', project_config_yaml=join( 'tests', 'config', 'project_configuration.yml')) Locations.set_validlocations([{'name': 'afg', 'title': 'Afghanistan'}]) Country.countriesdata(use_live=False) Vocabulary._tags_dict = True Vocabulary._approved_vocabulary = { 'tags': [{ 'name': 'hxl' }, { 'name': 'indicators' }, { 'name': 'health' }, { 'name': 'education' }, { 'name': 'socioeconomic' }, { 'name': 'demographics' }, { 'name': 'development' }], 'id': '4e61d464-4943-4e97-973a-84673c1aaa87', 'name': 'approved' } return Configuration.read()
def configuration(self): Configuration._create(hdx_read_only=True, user_agent='test', project_config_yaml=join('tests', 'config', 'project_configuration.yml')) Locations.set_validlocations([{'name': 'arg', 'title': 'Argentina'}]) # add locations used in tests Country.countriesdata(use_live=False) Vocabulary._tags_dict = True Vocabulary._approved_vocabulary = {'tags': [{'name': 'sustainable development'}, {'name': 'demographics'}, {'name': 'socioeconomics'}, {'name': 'education'}], 'id': '4e61d464-4943-4e97-973a-84673c1aaa87', 'name': 'approved'}
def configuration(self): Configuration._create( hdx_read_only=True, user_agent="test", project_config_yaml=join("tests", "config", "project_configuration.yml"), ) Locations.set_validlocations([{ "name": "afg", "title": "Afghanistan" }]) # add locations used in tests Country.countriesdata(use_live=False) Vocabulary._tags_dict = True Vocabulary._approved_vocabulary = { "tags": [ { "name": "hxl" }, { "name": "food security" }, { "name": "indicators" }, ], "id": "4e61d464-4943-4e97-973a-84673c1aaa87", "name": "approved", } return Configuration.read()
def configuration(self): Configuration._create(hdx_read_only=True, user_agent='test', project_config_yaml=join('tests', 'config', 'project_configuration.yml')) Locations.set_validlocations([{'name': 'gin', 'title': 'Guinea'}]) # add locations used in tests Country.countriesdata(use_live=False) Vocabulary._tags_dict = True Vocabulary._approved_vocabulary = {'tags': [{'name': 'hxl'}, {'name': 'food security'}, {'name': 'indicators'}], 'id': '4e61d464-4943-4e97-973a-84673c1aaa87', 'name': 'approved'}
def configuration(self): Configuration._create(user_agent='test', hdx_key='12345', project_config_yaml=join( 'tests', 'config', 'project_configuration.yml')) Locations.set_validlocations([{ 'name': 'afg', 'title': 'Afghanistan' }, { 'name': 'cmr', 'title': 'Cameroon' }]) Country.countriesdata(use_live=False) Vocabulary._tags_dict = True Vocabulary._approved_vocabulary = { 'tags': [{ 'name': 'hxl' }, { 'name': 'violence and conflict' }, { 'name': 'protests' }, { 'name': 'security incidents' }], 'id': '4e61d464-4943-4e97-973a-84673c1aaa87', 'name': 'approved' }
def configuration(self): Configuration._create(hdx_read_only=True, user_agent='test', project_config_yaml=join( 'tests', 'config', 'project_configuration.yml')) Locations.set_validlocations([{ 'name': 'afg', 'title': 'Afghanistan' }, { 'name': 'tza', 'title': 'Tanzania' }, { 'name': 'world', 'title': 'World' }]) Country.countriesdata(use_live=False) Vocabulary._tags_dict = True Vocabulary._approved_vocabulary = { 'tags': [{ 'name': 'hxl' }, { 'name': 'violence and conflict' }, { 'name': 'displacement' }, { 'name': 'internally displaced persons - idp' }], 'id': '4e61d464-4943-4e97-973a-84673c1aaa87', 'name': 'approved' }
def configuration(self): Configuration._create( user_agent="test", hdx_key="12345", project_config_yaml=join("tests", "config", "project_configuration.yml"), ) Locations.set_validlocations([{"name": "bgd", "title": "Bangladesh"}]) Country.countriesdata(use_live=False) Vocabulary._tags_dict = True Vocabulary._approved_vocabulary = { "tags": [ { "name": "hxl" }, { "name": "refugees" }, { "name": "asylum" }, { "name": "population" }, ], "id": "4e61d464-4943-4e97-973a-84673c1aaa87", "name": "approved", } return Configuration.read()
def configuration(self): Configuration._create(hdx_site='feature', user_agent='test', hdx_key='12345', project_config_yaml=join('tests', 'config', 'project_configuration.yml')) Locations.set_validlocations([{'name': 'afg', 'title': 'Afghanistan'}, {'name': 'cmr', 'title': 'Cameroon'}]) Country.countriesdata(use_live=False) Vocabulary._tags_dict = True Vocabulary._approved_vocabulary = {'tags': [{'name': 'hxl'}, {'name': 'health'}, {'name': 'demographics'}], 'id': '4e61d464-4943-4e97-973a-84673c1aaa87', 'name': 'approved'} return Configuration.read()
def configuration(self): Configuration._create(hdx_read_only=True, user_agent='test', project_config_yaml=join( 'tests', 'config', 'project_configuration.yml')) Locations.set_validlocations([{ 'name': 'arg', 'title': 'Argentina' }]) # add locations used in tests Country.countriesdata(use_live=False)
def configuration(self): Configuration._create(hdx_read_only=True, user_agent='test', project_config_yaml=join( 'tests', 'config', 'project_configuration.yml')) Locations.set_validlocations([{ 'name': 'afg', 'title': 'Afghanistan' }, { 'name': 'pse', 'title': 'State of Palestine' }]) Country.countriesdata(use_live=False)
def test_validlocations(self, project_config_yaml): Country.countriesdata(use_live=False) validlocations = [{'name': 'shn', 'title': 'St. Helena'}] assert Locations.get_HDX_code_from_location( 'sh', locations=validlocations) is None assert Locations.get_HDX_code_from_location_partial( 'sh', locations=validlocations) == (None, False) assert Locations.get_location_from_HDX_code( 'shn', locations=validlocations) == 'St. Helena' validlocations = [{ 'name': 'zmb', 'title': 'Zambia' }, { 'name': 'pry', 'title': 'Paraguay' }] Locations.set_validlocations(validlocations) assert Locations.validlocations() == validlocations assert Locations.get_HDX_code_from_location_partial('NOT') == (None, False) assert Locations.get_location_from_HDX_code('pr') is None assert Locations.get_HDX_code_from_location('zmb') == 'ZMB' assert Locations.get_HDX_code_from_location_partial('zmb') == ('ZMB', True) assert Locations.get_HDX_code_from_location('Z') is None assert Locations.get_HDX_code_from_location_partial('Z') == ('ZMB', False) assert Locations.get_HDX_code_from_location_partial('Zambia') == ( 'ZMB', True) assert Locations.get_HDX_code_from_location_partial('ZAM') == ('ZMB', False) assert Locations.get_location_from_HDX_code( 'zmb', locations=validlocations) == 'Zambia' validlocations = [{'name': 'shn', 'title': 'St. Helena'}] assert Locations.get_HDX_code_from_location( 'sh', locations=validlocations) is None assert Locations.get_HDX_code_from_location_partial( 'sh', locations=validlocations) == (None, False) assert Locations.get_location_from_HDX_code( 'shn', locations=validlocations) == 'St. Helena' Configuration.setup(MyConfiguration()) Locations.set_validlocations(None) assert Locations.get_HDX_code_from_location('zaf') == 'ZAF' assert Locations.get_HDX_code_from_location_partial('zaf') == ('ZAF', True) assert Locations.get_location_from_HDX_code('zaf') == 'South Africa'
def configuration(self): Configuration._create( user_agent="test", hdx_key="12345", project_config_yaml=join("tests", "config", "project_configuration.yml"), ) Locations.set_validlocations([ { "name": "afg", "title": "Afghanistan" }, { "name": "phl", "title": "Philippines" }, ]) Country.countriesdata(use_live=False) return Configuration.read()
def configuration(): Configuration._create(hdx_read_only=True, user_agent='test') Locations.set_validlocations([{'name': 'world', 'title': 'World'}]) Country.countriesdata(use_live=False) Vocabulary._tags_dict = True Vocabulary._approved_vocabulary = { 'tags': [{ 'name': 'hxl' }, { 'name': 'energy' }, { 'name': 'refugees' }, { 'name': 'internally displaced persons - idp' }], 'id': '4e61d464-4943-4e97-973a-84673c1aaa87', 'name': 'approved' }
def get_country_df(columns=None): """ Convert the HDX country database into a dataframe, indexed by ISO2 If columns is not None, select the specified list of columns only. """ import pandas as pd country_data = Country.countriesdata(use_live=False)["countries"] df = pd.DataFrame.from_records(list(country_data.values())) df.set_index("#country+code+v_iso2", inplace=True) if columns is not None: df = df[list(columns)] return df
def configuration(self): Configuration._create( user_agent="test", hdx_key="12345", project_config_yaml=join("tests", "config", "project_configuration.yml"), ) Locations.set_validlocations([ { "name": "afg", "title": "Afghanistan" }, { "name": "phl", "title": "Philippines" }, ]) Country.countriesdata(use_live=False) Vocabulary._tags_dict = True Vocabulary._approved_vocabulary = { "tags": [ { "name": "common operational dataset - cod" }, { "name": "administrative divisions" }, { "name": "geodata" }, { "name": "gazetteer" }, ], "id": "4e61d464-4943-4e97-973a-84673c1aaa87", "name": "approved", } return Configuration.read()
def configuration(self): Configuration._create(hdx_read_only=True, user_agent='test', project_config_yaml=join( 'tests', 'config', 'project_configuration.yml')) Locations.set_validlocations([{ 'name': 'mmr', 'title': 'Myanmar' }, { 'name': 'sdn', 'title': 'Sudan' }, { 'name': 'alb', 'title': 'Albania' }, { 'name': 'yem', 'title': 'Yemen' }]) # add locations used in tests Country.countriesdata(False) Vocabulary._tags_dict = True Vocabulary._approved_vocabulary = { 'tags': [{ 'name': 'geodata' }, { 'name': 'populated places - settlements' }, { 'name': 'land use and land cover' }, { 'name': 'erosion' }, { 'name': 'landslides - mudslides' }, { 'name': 'floods - storm surges' }, { 'name': 'droughts' }, { 'name': 'food assistance' }, { 'name': 'hazards and risk' }, { 'name': 'administrative divisions' }, { 'name': 'food security' }, { 'name': 'security' }, { 'name': 'displaced persons locations - camps - shelters' }, { 'name': 'refugees' }, { 'name': 'internally displaced persons - idp' }, { 'name': 'malnutrition' }, { 'name': 'nutrition' }, { 'name': 'food assistance' }, { 'name': 'roads' }, { 'name': 'transportation' }, { 'name': 'aviation' }, { 'name': 'facilities and infrastructure' }, { 'name': 'bridges' }, { 'name': 'transportation' }, { 'name': 'facilities and infrastructure' }, { 'name': 'cold waves' }, { 'name': 'cash assistance' }, { 'name': 'acronyms' }, { 'name': 'common operational dataset - cod' }], 'id': '4e61d464-4943-4e97-973a-84673c1aaa87', 'name': 'approved' }
def get_indicators(configuration, today, retriever, outputs, tabs, scrapers=None, basic_auths=dict(), other_auths=dict(), countries_override=None, use_live=True): world = [list(), list()] regional = [['regionnames'], ['#region+name']] national = [['iso3', 'countryname', 'ishrp', 'region'], [ '#country+code', '#country+name', '#meta+ishrp', '#region+name' ]] subnational = [['iso3', 'countryname', 'adm1_pcode', 'adm1_name'], [ '#country+code', '#country+name', '#adm1+code', '#adm1+name' ]] sources = [('Indicator', 'Date', 'Source', 'Url'), ('#indicator+name', '#date', '#meta+source', '#meta+url')] Country.countriesdata( use_live=use_live, country_name_overrides=configuration['country_name_overrides'], country_name_mappings=configuration['country_name_mappings']) today_str = today.strftime('%Y-%m-%d') if countries_override: gho_countries = countries_override hrp_countries = countries_override else: gho_countries = configuration['gho'] hrp_countries = configuration['HRPs'] configuration['countries_fuzzy_try'] = hrp_countries downloader = retriever.downloader region = Region(configuration['regional'], today, downloader, gho_countries, hrp_countries) admin1_info = list() for row in configuration['admin1_info']: newrow = { 'pcode': row['ADM1_PCODE'], 'name': row['ADM1_REF'], 'iso3': row['alpha_3'] } admin1_info.append(newrow) configuration['admin1_info'] = admin1_info adminone = AdminOne(configuration) pcodes = adminone.pcodes population_lookup = dict() def update_tab(name, data): logger.info('Updating tab: %s' % name) for output in outputs.values(): output.update_tab(name, data) level = 'national' scraper_configuration = configuration[f'scraper_{level}'] results = run_scrapers(scraper_configuration, gho_countries, adminone, level, downloader, basic_auths, today=today, today_str=today_str, scrapers=['population'], population_lookup=population_lookup) national_headers = extend_headers(national, results['headers']) national_columns = extend_columns('national', national, gho_countries, hrp_countries, region, None, national_headers, results['values']) extend_sources(sources, results['sources']) population_lookup['GHO'] = sum(population_lookup.values()) population_headers, population_columns = region.get_regional( region, national_headers, national_columns, population_lookup=population_lookup) regional_headers = extend_headers(regional, population_headers) extend_columns('regional', regional, region.regions, None, region, None, regional_headers, population_columns) level = 'subnational' scraper_configuration = configuration[f'scraper_{level}'] results = run_scrapers(scraper_configuration, gho_countries, adminone, level, downloader, basic_auths, today=today, today_str=today_str, scrapers=['population'], population_lookup=population_lookup) subnational_headers = extend_headers(subnational, results['headers']) extend_columns('subnational', subnational, pcodes, None, None, adminone, subnational_headers, results['values']) covid_wheaders, covid_wcolumns, covid_ghocolumns, covid_headers, covid_columns, covid_sources = get_who_covid( configuration, today, outputs, hrp_countries, gho_countries, region, population_lookup, scrapers) extend_sources(sources, covid_sources) ipc_headers, ipc_columns, ipc_sheaders, ipc_scolumns, ipc_sources = get_ipc( configuration, today, gho_countries, adminone, downloader, scrapers) if 'national' in tabs: fts_wheaders, fts_wcolumns, fts_wsources, fts_headers, fts_columns, fts_sources = get_fts( configuration, today, today_str, gho_countries, basic_auths, scrapers) food_headers, food_columns, food_sources = add_food_prices( configuration, today, gho_countries, retriever, basic_auths, scrapers) campaign_headers, campaign_columns, campaign_sources = add_vaccination_campaigns( configuration, today, gho_countries, downloader, outputs, scrapers) unhcr_headers, unhcr_columns, unhcr_sources = get_unhcr( configuration, today, today_str, gho_countries, downloader, scrapers) inform_headers, inform_columns, inform_sources = get_inform( configuration, today, gho_countries, other_auths, scrapers) covax_headers, covax_columns, covax_sources = get_covax_deliveries( configuration, today, gho_countries, downloader, scrapers) education_rheaders, education_rcolumns, education_rsources, education_headers, education_columns, education_sources = get_education( configuration, today, gho_countries, region, downloader, scrapers) level = 'national' scraper_configuration = configuration[f'scraper_{level}'] results = run_scrapers(scraper_configuration, gho_countries, adminone, level, downloader, basic_auths, today=today, today_str=today_str, scrapers=scrapers, population_lookup=population_lookup) national_headers = extend_headers(national, covid_headers, results['headers'], food_headers, campaign_headers, fts_headers, unhcr_headers, inform_headers, ipc_headers, covax_headers, education_headers) national_columns = extend_columns( 'national', national, gho_countries, hrp_countries, region, None, national_headers, covid_columns, results['values'], food_columns, campaign_columns, fts_columns, unhcr_columns, inform_columns, ipc_columns, covax_columns, education_columns) extend_sources(sources, results['sources'], food_sources, campaign_sources, fts_sources, unhcr_sources, inform_sources, covax_sources, education_sources) update_tab('national', national) if 'regional' in tabs: regional_headers, regional_columns = region.get_regional( region, national_headers, national_columns, None, (covid_wheaders, covid_wcolumns), (fts_wheaders, fts_wcolumns)) regional_headers = extend_headers(regional, regional_headers, education_rheaders) regional_columns = extend_columns('regional', regional, region.regions + ['global'], None, region, None, regional_headers, regional_columns, education_rcolumns) update_tab('regional', regional) extend_sources(sources, education_rsources) if 'world' in tabs: rgheaders, rgcolumns = region.get_world( regional_headers, regional_columns) level = 'global' scraper_configuration = configuration[f'scraper_{level}'] results = run_scrapers(scraper_configuration, gho_countries, adminone, level, downloader, basic_auths, today=today, today_str=today_str, scrapers=scrapers, population_lookup=population_lookup) world_headers = extend_headers(world, covid_wheaders, fts_wheaders, results['headers'], rgheaders) extend_columns('global', world, None, None, None, None, world_headers, covid_ghocolumns, fts_wcolumns, results['values'], rgcolumns) extend_sources(sources, fts_wsources, results['sources']) update_tab('world', world) if 'subnational' in tabs: whowhatwhere_headers, whowhatwhere_columns, whowhatwhere_sources = get_whowhatwhere( configuration, today_str, adminone, downloader, scrapers) iomdtm_headers, iomdtm_columns, iomdtm_sources = get_iom_dtm( configuration, today_str, adminone, downloader, scrapers) level = 'subnational' scraper_configuration = configuration[f'scraper_{level}'] results = run_scrapers(scraper_configuration, gho_countries, adminone, level, downloader, basic_auths, today=today, today_str=today_str, scrapers=scrapers, population_lookup=population_lookup) subnational_headers = extend_headers(subnational, ipc_sheaders, results['headers'], whowhatwhere_headers, iomdtm_headers) extend_columns('subnational', subnational, pcodes, None, None, adminone, subnational_headers, ipc_scolumns, results['values'], whowhatwhere_columns, iomdtm_columns) extend_sources(sources, results['sources'], whowhatwhere_sources, iomdtm_sources) update_tab('subnational', subnational) extend_sources(sources, ipc_sources) adminone.output_matches() adminone.output_ignored() adminone.output_errors() for sourceinfo in configuration['additional_sources']: date = sourceinfo.get('date') if date is None: if sourceinfo.get('force_date_today', False): date = today_str source = sourceinfo.get('source') source_url = sourceinfo.get('source_url') dataset_name = sourceinfo.get('dataset') if dataset_name: dataset = Dataset.read_from_hdx(dataset_name) if date is None: date = get_date_from_dataset_date(dataset, today=today) if source is None: source = dataset['dataset_source'] if source_url is None: source_url = dataset.get_hdx_url() sources.append((sourceinfo['indicator'], date, source, source_url)) sources.append(get_monthly_report_source(configuration)) sources = [list(elem) for elem in dict.fromkeys(sources)] update_tab('sources', sources) return hrp_countries
import pandas as pd from tabulate import tabulate import seaborn as sb import matplotlib.pyplot as plt import numpy as np from hdx.location.country import Country import math #getting a country and region table allCountryData = Country.countriesdata(use_live=False) detailCountryData = allCountryData['countries'] countryRegion = pd.DataFrame(columns=['country', 'region']) for country in detailCountryData: countryRegion = countryRegion.append( { 'country': detailCountryData[country]['#country+name+preferred'], 'region': detailCountryData[country]['#region+main+name+preferred'] }, ignore_index=True) #reading data and correcting column names data = pd.read_csv(r'./suicides.csv') data.columns = [ 'country', 'year', 'sex', 'age', 'suicides_no', 'population', 'suicides_per_100k', 'country_year', 'HDI_year', 'gdp_year', 'gdp_per_capita', 'generation' ] # print(tabulate(data.head(20), headers='keys', tablefmt='github')) #making sorting by age easier and dropping rows with NaN
def country(): Country.countriesdata(use_live=False)
from datetime import datetime from pathlib import Path from hdx.location.country import Country output = Path(__file__).parent / ".." / "mtoolbox" / "countries.py" date = datetime.now().isoformat() data = Country.countriesdata() with output.open("w") as f: f.write( f"# Country list generated on {date} from OCHA-DAP/hdx-python-country") for key in ["countries", "iso2iso3"]: f.write(f"\n{key} = ") f.write(str(data[key]))
# -*- coding: UTF-8 -*- from hdx.location.country import Country Country.countriesdata(use_live=False, country_name_overrides={'PSE': 'oPt'}, country_name_mappings={'Congo DR': 'COD'})