def update_country_data(): from geodata.importer.country import CountryImport ci = CountryImport() ci.update_country_center() ci.update_polygon() ci.update_regions() ci.update_alt_name()
def setUp(self): # initialize subnational/Country ci = CountryImport() ci.update_polygon() ci.update_alt_name() # creating dummy objects for testing self.dummy_file_source = factory.FileSourceFactory( name='dummy_file_source') self.dummy_geolocation = factory.GeolocationFactory(tag='Albania', iso2='al', iso3='alb', object_id=4, content_type_id=15, type='country') self.dummy_file = factory.FileFactory( title="Region", description="test", contains_subnational_data=True, organisation="test", maintainer="test", date_of_dataset="2009-01-01", methodology="test", define_methodology="test", update_frequency="test", comments="test", accessibility="p", data_quality="test", number_of_rows=1, file_types="csv", location=self.dummy_geolocation, source=self.dummy_file_source, file=os.path.abspath("samples/multiple_ind_cat.csv"))
def setUp(self): ci = CountryImport() ci.update_polygon() ci.update_alt_name() self.dummy_file_source = factory.FileSourceFactory(name='hello') self.dummy_geolocation = factory.GeolocationFactory(tag='Austrlia', iso2='au', iso3='aus', object_id=4, content_type_id=15, type='country') self.dummy_file = factory.FileFactory( title="test", description="test", contains_subnational_data=True, organisation="test", maintainer="test", date_of_dataset="2009-01-01", methodology="test", define_methodology="test", update_frequency="test", comments="test", accessibility="p", data_quality="test", number_of_rows=1, file_types="csv", location=self.dummy_geolocation, source=self.dummy_file_source, file=os.path.abspath('samples/two_mesure_values_num_date.csv'))
class CountryImportTestCase(TestCase): def setUp(self): self.country_import = CountryImport() self.country = CountryFactory.create(code="AF") def test_update_polygon(self): data = { "type": "FeatureCollection", "features": [{ "type": "Feature", "id": "AFG", "properties": { "name": "Afghanistan", "iso2": "AF" }, "geometry": { "type": "Polygon", "coordinates": [ [[61.210817, 35.650072], [62.230651, 35.270664]] ]}}, ]} self.country_import.get_json_data = MagicMock(return_value=data) self.country_import.update_polygon() self.assertEqual(1, self.country_import.get_json_data.call_count) self.assertEqual(1, Country.objects.all().count()) country = Country.objects.all()[0] self.assertIsNotNone(country.polygon) self.assertEqual(country.code, self.country.code) def test_update_country_center(self): data = {"AF": {"latitude": "42.30", "longitude": "1.30"}, } self.country_import.get_json_data = MagicMock(return_value=data) self.country_import.update_country_center() country = Country.objects.all()[0] self.assertEqual(country.center_longlat.y, 42.3) self.assertEqual(country.center_longlat.x, 1.3) def test_update_regions(self): rv = vocabulary_factory.RegionVocabularyFactory() region = RegionFactory.create(code='689', region_vocabulary=rv) data = [{ "country_name": "Afghanistan", "iso2": "AF", "dac_region_code": "689", }, ] self.country_import.get_json_data = MagicMock(return_value=data) self.country_import.update_regions() country = Country.objects.all()[0] self.assertEqual(country.region, region)
class CountryImportTestCase(TestCase): def setUp(self): self.country_import = CountryImport() self.country = CountryFactory.create(code="AF") def test_update_polygon(self): data = { "type": "FeatureCollection", "features": [{ "type": "Feature", "id": "AFG", "properties": { "name": "Afghanistan", "iso2": "AF" }, "geometry": { "type": "Polygon", "coordinates": [ [[61.210817, 35.650072], [62.230651, 35.270664]] ]}}, ]} self.country_import.get_json_data = MagicMock(return_value=data) self.country_import.update_polygon() self.assertEqual(1, self.country_import.get_json_data.call_count) self.assertEqual(1, Country.objects.all().count()) country = Country.objects.all()[0] self.assertIsNotNone(country.polygon) self.assertEqual(country.code, self.country.code) def test_update_country_center(self): data = {"AF": {"latitude": "42.30", "longitude": "1.30"}, } self.country_import.get_json_data = MagicMock(return_value=data) self.country_import.update_country_center() country = Country.objects.all()[0] self.assertEqual(country.center_longlat.y, 42.3) self.assertEqual(country.center_longlat.x, 1.3) def test_update_regions(self): region = RegionFactory.create(code='689') data = [{ "country_name": "Afghanistan", "iso2": "AF", "dac_region_code": "689", }, ] self.country_import.get_json_data = MagicMock(return_value=data) self.country_import.update_regions() country = Country.objects.all()[0] self.assertEqual(country.region, region)
def test_post_file(self): ci = CountryImport() ci.update_polygon() ci.update_alt_name() ''' Test 0: Upload Source ''' res = self.c.post('/api/metadata/sources/?format=json', { 'name': 'The one', }) self.assertEquals(res.status_code, 201, res.json()) self.assertIsNotNone(res.json()['id']) id = res.json()['id'] ''' Test 1: Upload file ''' with open('samples/successful_upload_test.csv') as fp: res = self.c.post( '/api/metadata/?format=json', { 'file': fp, 'title': 'temp title', 'description': 'temp description', 'contains_subnational_data': True, 'organisation': 'ZZ', 'maintainer': 'kieran', 'date_of_dataset': '2009-08-06', 'methodology': 'Testing', 'define_methodology': 'Really tesring', 'update_frequency': 'All the time', 'comments': 'Good stuff', 'accessibility': 'p', 'data_quality': 'good', 'number_of_rows': 200, 'file_types': 'csv', 'location': 1, 'source': id, }) self.assertEquals(res.status_code, 201, res.json()) self.assertIsNotNone(res.json()['id']) ''' Test 2: delete File ''' res_delete = self.c.delete('/api/metadata/{}/?format=json'.format( res.json()['id'])) self.assertEquals(res_delete.status_code, 204)
def setUp(self): self.country_import = CountryImport() self.country = CountryFactory.create(code="AF")
def test_check_file_error_correction(self): ''' Test 1: Upload File ''' ci = CountryImport() ci.update_polygon() ci.update_alt_name() ''' Test 0: Upload Source ''' res = self.c.post('/api/metadata/sources/?format=json', { 'name': 'The one', }) self.assertEquals(res.status_code, 201, res.json()) self.assertIsNotNone(res.json()['id']) id = res.json()['id'] ''' Test 1: Upload file ''' with open('samples/check_file_valid_test_success.csv') as fp: res_file_upload = self.c.post( '/api/metadata/?format=json', { 'file': fp, 'title': 'temp title', 'description': 'temp description', 'contains_subnational_data': True, 'organisation': 'ZZ', 'maintainer': 'kieran', 'date_of_dataset': '2009-08-06', 'methodology': 'Testing', 'define_methodology': 'Really tesring', 'update_frequency': 'All the time', 'comments': 'Good stuff', 'accessibility': 'p', 'data_quality': 'good', 'number_of_rows': 200, 'file_types': 'csv', 'location': 1, 'source': id, }) self.assertEquals(res_file_upload.status_code, 201, res_file_upload.json()) self.assertIsNotNone(res_file_upload.json()['id']) res_check_file_valid = self.c.post( '/api/validate/check_file_valid/?format=json', { 'id': res_file_upload.json()['id'], }, format='json') self.assertEquals(res_check_file_valid.status_code, 200, res_check_file_valid.json()) self.assertEquals(res_check_file_valid.json()['success'], 1) ERROR_CORRECTION_DICT['file_id'] = res_file_upload.json()['id'] res_file_error_correction = self.c.post( '/api/error-correction/?format=json', ERROR_CORRECTION_DICT, format='json') self.assertEquals(res_file_error_correction.status_code, 200, res_file_error_correction.json()) self.assertIsNotNone(res_file_error_correction.json()['data_table']) self.assertIsNotNone(res_file_error_correction.json()['total_amount']) #Find and replace ERROR_CORRECTION_DICT[ 'filter_column_heading'] = 'Element Code' #Column to filter on ERROR_CORRECTION_DICT['find_value'] = '31' ERROR_CORRECTION_DICT['replace_value'] = '41' ERROR_CORRECTION_DICT['filter_toggle'] = True ERROR_CORRECTION_DICT['replace_pressed'] = True res_file_error_correction = self.c.post( '/api/error-correction/?format=json', ERROR_CORRECTION_DICT, format='json') self.assertEquals(res_file_error_correction.status_code, 200, res_file_error_correction.json()) self.assertIsNotNone(res_file_error_correction.json()['data_table']) self.assertIsNotNone(res_file_error_correction.json()['total_amount']) self.assertEquals( json.loads(res_file_error_correction.json()['data_table'])[0] ['Element Code'], '41') #Delete ERROR_CORRECTION_DICT['delete'] = True # Column to filter on ERROR_CORRECTION_DICT['delete_data']['row_keys'] = [2] ERROR_CORRECTION_DICT['filter_toggle'] = False ERROR_CORRECTION_DICT['replace_pressed'] = False res_file_error_correction = self.c.post( '/api/error-correction/?format=json', ERROR_CORRECTION_DICT, format='json') self.assertEquals(res_file_error_correction.status_code, 200, res_file_error_correction.json()) self.assertIsNotNone(res_file_error_correction.json()['data_table']) self.assertIsNotNone(res_file_error_correction.json()['total_amount']) self.assertEquals( len(json.loads(res_file_error_correction.json()['data_table'])), 3) #Update ERROR_CORRECTION_DICT['delete'] = False # Column to filter on ERROR_CORRECTION_DICT['update'] = True # Column to filter o ERROR_CORRECTION_DICT['update_data']['column'] = 'Country or Area' ERROR_CORRECTION_DICT['update_data']['line_no'] = 2 ERROR_CORRECTION_DICT['update_data']['cell_value'] = 'azil' res_file_error_correction = self.c.post( '/api/error-correction/?format=json', ERROR_CORRECTION_DICT, format='json') self.assertEquals(res_file_error_correction.status_code, 200, res_file_error_correction.json()) self.assertIsNotNone(res_file_error_correction.json()['data_table']) self.assertIsNotNone(res_file_error_correction.json()['total_amount']) self.assertEquals( json.loads(res_file_error_correction.json()['data_table'])[2] ['Country or Area'], 'azil') #Get Errors #Todo add test ERROR_CORRECTION_DICT['error_toggle'] = True # Column to filter on ERROR_CORRECTION_DICT['update'] = False # Column to filter o res_file_error_correction = self.c.post( '/api/error-correction/?format=json', ERROR_CORRECTION_DICT, format='json') self.assertEquals(res_file_error_correction.status_code, 200, res_file_error_correction.json()) self.assertIsNotNone(res_file_error_correction.json()['data_table']) self.assertIsNotNone(res_file_error_correction.json()['total_amount']) self.assertIsNotNone( res_file_error_correction.json()['error_data']['error_messages'])
from geodata.importer.country import CountryImport # from geodata.importer.region import RegionImport from geodata.importer.subnational import SubnationalImport # print('Region data') # ri = RegionImport() # ri.update_region_center() print('Country data') ci = CountryImport() ci.update_polygon() ci.update_alt_name() ci.update_country_center() ci.update_regions() ci.update_hd_polygons() ci.update_region_polygons_centers() print('Subnational data') si = SubnationalImport() si.update_polygon() si.update_kenya() si.update_kenya_county_centers()
def update_country_data(): from geodata.importer.country import CountryImport ci = CountryImport() ci.update_country_center() ci.update_polygon() ci.update_regions()
def test_file_manual_mapping(self): # Intialise countries ci = CountryImport() ci.update_polygon() ci.update_alt_name() si = SubnationalImport() si.update_polygon() ''' Test 0: Upload file ''' res = self.c.post('/api/metadata/sources/?format=json', { 'name': 'The one', }) self.assertEquals(res.status_code, 201, res.json()) self.assertIsNotNone(res.json()['id']) id = res.json()['id'] ''' Test 1: Upload file ''' with open('samples/subnational.csv') as fp: res = self.c.post( '/api/metadata/?format=json', { 'file': fp, 'description': 'temp description', 'title': 'AIDSinfotest.csv', 'contains_subnational_data': True, 'organisation': 'ZZ', 'maintainer': 'kieran', 'date_of_dataset': '2009-08-06', 'methodology': 'Testing', 'define_methodology': 'Really tesring', 'update_frequency': 'All the time', 'comments': 'Good stuff', 'accessibility': 'p', 'data_quality': 'good', 'number_of_rows': 200, 'file_types': 'csv', 'location': 1, 'source': id, }) self.assertEquals(res.status_code, 201, res.json()) self.assertIsNotNone(res.json()['id']) ''' Test 2: Validate ''' res_file_validate = self.c.post('/api/validate/?format=json', { 'id': res.json()['id'], }, format='json') # print res_file_validate.json()['found_list'] self.assertEquals(res_file_validate.status_code, 200, res_file_validate.json()) self.assertIsNotNone(res_file_validate.json()['found_list']) self.assertIsNotNone(res_file_validate.json()['missing_list']) self.assertIsNotNone(res_file_validate.json()['summary']) ''' Test 3: File Manual Mapping ''' MAPPING_DICT['metadata_id'] = res.json()['id'] MAPPING_DICT['mapping_dict']['geolocation'] = ['Subnational'] MAPPING_DICT['mapping_dict']['value'] = ['new infections'] MAPPING_DICT['filter_headings'] = {'filters': 'filters'} MAPPING_DICT['extra_information']['empty_entries'][ 'empty_indicator'] = 'Test subnational' MAPPING_DICT['extra_information']['empty_entries'][ 'empty_filter'] = 'Default' MAPPING_DICT['extra_information']['empty_entries'][ 'empty_value_format'] = { 'value_format': 'Numeric' } MAPPING_DICT['extra_information']['empty_entries'][ 'empty_date'] = '2016' # **manual_mapping_data, res_file_manual_mapping = self.c.post('/api/mapping/?format=json', MAPPING_DICT, format='json') self.assertEquals(res_file_manual_mapping.status_code, 200, res_file_manual_mapping.json()) self.assertEquals(res_file_manual_mapping.json()['success'], 1)
def setUp(self): ci = CountryImport() ci.update_polygon() ci.update_alt_name() self.dummy_file_source = factory.FileSourceFactory( name='dummy_file_source' ) self.dummy_geolocation = factory.GeolocationFactory( tag='Albania', iso2='al', iso3='alb', object_id=4, content_type_id=15, type='country' ) self.dummy_file = factory.FileFactory( title="test", description="test", contains_subnational_data=True, organisation="test", maintainer="test", date_of_dataset="2009-01-01", methodology="test", define_methodology="test", update_frequency="test", comments="test", accessibility="p", data_quality="test", number_of_rows=1, file_types="csv", location=self.dummy_geolocation, source=self.dummy_file_source, file=os.path.abspath("samples/AIDSinfotest.csv") ) file_id = self.dummy_file.id input_json = { 'metadata_id': file_id, 'mapping_dict': { "indicator": ["Indicator"], "value_format": ["Unit"], "geolocation": ["Area"], "value": ["Data Value"], "date": ["Time Period"], "comment": ["Source"], "filters": ["Subgroup"] }, 'filter_headings': {"Subgroup": "Subgroup"}, } input_json_str = json.dumps(input_json) query_input = {"input": {"data": input_json_str}} query = """ mutation mapping($input: MappingMutationInput!) { mapping(input: $input) { id data } }""" schema.execute(query, variable_values=query_input)
from geodata.importer.country import CountryImport from geodata.importer.region import RegionImport from geodata.importer.subnational import SubnationalImport print('Country data') ci = CountryImport() ci.update_polygon() ci.update_alt_name() print('Region data') ri = RegionImport() ri.update_region_center() print('Subnational data') si = SubnationalImport() si.update_polygon()
def test_file_manual_mapping_two_measure_value(self): # Intialise countries ci = CountryImport() ci.update_polygon() ci.update_alt_name() ''' Test 0: Upload file ''' res = self.c.post('/api/metadata/sources/?format=json', { 'name': 'The one', }) self.assertEquals(res.status_code, 201, res.json()) self.assertIsNotNone(res.json()['id']) id = res.json()['id'] ''' Test 1: Upload file ''' with open('samples/two_mesure_values_num_date.csv') as fp: res = self.c.post( '/api/metadata/?format=json', { 'file': fp, 'description': 'temp description', 'title': 'AIDSinfotest.csv', 'contains_subnational_data': True, 'organisation': 'ZZ', 'maintainer': 'kieran', 'date_of_dataset': '2009-08-06', 'methodology': 'Testing', 'define_methodology': 'Really tesring', 'update_frequency': 'All the time', 'comments': 'Good stuff', 'accessibility': 'p', 'data_quality': 'good', 'number_of_rows': 200, 'file_types': 'csv', 'location': 1, 'source': id, }) self.assertEquals(res.status_code, 201, res.json()) self.assertIsNotNone(res.json()['id']) ''' Test 2: Validate ''' res_file_validate = self.c.post('/api/validate/?format=json', { 'id': res.json()['id'], }, format='json') # print res_file_validate.json()['found_list'] self.assertEquals(res_file_validate.status_code, 200, res_file_validate.json()) self.assertIsNotNone(res_file_validate.json()['found_list']) self.assertIsNotNone(res_file_validate.json()['missing_list']) self.assertIsNotNone(res_file_validate.json()['summary']) ''' Test 3: File Manual Mapping ''' MAPPING_DICT['metadata_id'] = res.json()['id'] MAPPING_DICT['mapping_dict']['value'] = ['2018', '2017'] MAPPING_DICT['mapping_dict']['date'] = ['2018', '2017'] MAPPING_DICT['mapping_dict']['filters'] = ['Source Type', 'Source'] MAPPING_DICT['mapping_dict']['geolocation'] = ['Country or Area'] MAPPING_DICT['filter_headings'] = { 'Source Type': 'Source', 'Source': 'Abbrv source' } MAPPING_DICT['extra_information']['multi_mapped']['column_heading'] = { '2018': 'date', '2017': 'date' } MAPPING_DICT['extra_information']['multi_mapped']['column_values'] = { '2018': 'value', '2017': 'value' } # Columns headings that are associated with datamodel, dictionary format MAPPING_DICT['extra_information']['empty_entries'][ 'empty_indicator'] = 'Indicator value' MAPPING_DICT['extra_information']['empty_entries'][ 'empty_value_format'] = { "2018": "Number", "2017": "Rate" } ''' Test 3: File Manual Mapping ''' res_file_manual_mapping = self.c.post('/api/mapping/?format=json', MAPPING_DICT, format='json') # print res_file_manual_mapping self.assertEquals(res_file_manual_mapping.status_code, 200, res_file_manual_mapping.json()) self.assertEquals(res_file_manual_mapping.json()['success'], 1)
def test_patch_file_valid(self): ci = CountryImport() ci.update_polygon() ci.update_alt_name() ''' Test 0: Upload Source ''' res = self.c.post('/api/metadata/sources/?format=json', { 'name': 'The one', }) self.assertEquals(res.status_code, 201, res.json()) self.assertIsNotNone(res.json()['id']) id = res.json()['id'] res = self.c.post('/api/metadata/sources/?format=json', { 'name': 'The none', }) self.assertEquals(res.status_code, 201, res.json()) self.assertIsNotNone(res.json()['id']) id2 = res.json()['id'] ''' Test 1: Upload file ''' with open('samples/successful_upload_test.csv') as fp: res_file_upload = self.c.post( '/api/metadata/?format=json', { 'file': fp, 'title': 'temp title', 'description': 'temp description', 'contains_subnational_data': True, 'organisation': 'ZZ', 'maintainer': 'kieran', 'date_of_dataset': '2009-08-06', 'methodology': 'Testing', 'define_methodology': 'Really tesring', 'update_frequency': 'All the time', 'comments': 'Good stuff', 'accessibility': 'p', 'data_quality': 'good', 'number_of_rows': 200, 'file_types': 'csv', 'location': 1, 'source': id, }) self.assertEquals(res_file_upload.status_code, 201, res_file_upload.json()) self.assertIsNotNone(res_file_upload.json()['id']) res_check_file_valid = self.c.post( '/api/validate/check_file_valid/?format=json', { 'id': res_file_upload.json()['id'], }, format='json') self.assertEquals(res_check_file_valid.status_code, 200, res_check_file_valid.json()) self.assertEquals(res_check_file_valid.json()['success'], 1) ''' Step 2: Patch file data ''' patch_data = { "title": "Test1", "description": "Description", "source": id2, } res_file_patch = self.c.patch('/api/metadata/{}/?format=json'.format( res_file_upload.json()['id']), patch_data, format='json') # print res_file_patch.json() self.assertEquals(res_file_patch.status_code, 200, res_file_patch.json()) ''' Step2: Verify Patch file data ''' res_file_patch_updated = self.c.get( '/api/metadata/{}/?format=json'.format( res_file_upload.json()['id']), format='json') self.assertEquals(res_file_patch_updated.status_code, 200, res_file_patch_updated.json()) self.assertEquals(res_file_patch_updated.json()['title'], patch_data['title']) self.assertEquals(res_file_patch_updated.json()['description'], patch_data['description']) self.assertEquals(res_file_patch_updated.json()['source']['id'], patch_data['source']) ''' Step 3: Patch file data Update ''' patch_data = { "title": "Test2", "description": "Description2", "location": 2 } res_file_patch = self.c.patch('/api/metadata/{}/?format=json'.format( res_file_upload.json()['id']), patch_data, format='json') # print res_file_patch.json() self.assertEquals(res_file_patch.status_code, 200, res_file_patch.json()) ''' Step4: Verify Patch file data update ''' res_file_patch_updated = self.c.get( '/api/metadata/{}/?format=json'.format( res_file_upload.json()['id']), format='json') # print res_file_patch_updated.json() self.assertEquals(res_file_patch_updated.status_code, 200, res_file_patch_updated.json()) self.assertEquals(res_file_patch_updated.json()['title'], patch_data['title']) self.assertEquals(res_file_patch_updated.json()['description'], patch_data['description']) self.assertEquals(res_file_patch_updated.json()['location'], patch_data['location'])
def setUp(self): ci = CountryImport() ci.update_polygon() ci.update_alt_name() self.dummy_file_source = factory.FileSourceFactory( name='dummy_file_source') self.dummy_geolocation = factory.GeolocationFactory(tag='Albania', iso2='al', iso3='alb', object_id=4, content_type_id=15, type='country') self.dummy_file = factory.FileFactory( title="test", description="test", contains_subnational_data=True, organisation="test", maintainer="test", date_of_dataset="2009-01-01", methodology="test", define_methodology="test", update_frequency="test", comments="test", accessibility="p", data_quality="test", number_of_rows=1, file_types="csv", location=self.dummy_geolocation, source=self.dummy_file_source, file=os.path.abspath("samples/AIDSinfotest.csv")) file_id = self.dummy_file.id input_json = { 'metadata_id': file_id, 'filter_headings': { "Subgroup": "Subgroup" }, "extra_information": { "empty_entries": { "empty_indicator": '', "empty_geolocation": { "value": '', "type": '' }, "empty_filter": '', "empty_value_format": {}, "empty_date": '' }, "multi_mapped": { "column_heading": {}, "column_values": {} }, "point_based_info": { "coord": { "lat": '', "lon": '' }, "subnational": '', "country": '', "type": '' } }, 'mapping_dict': { "indicator": ["Indicator"], "value_format": ["Unit"], "geolocation": ["Area"], "value": ["Data Value"], "date": ["Time Period"], "comment": ["Source"], "filters": ["Subgroup"] } } begin_mapping(input_json)