def test_missing_shx(self): """ Test missing .shx file """ path = os.path.join(BASE_DIR, "tests", "fixtures", "missing_shx.zip") zip_file = zipfile.ZipFile(path) with self.assertRaises(MissingFiles) as context: get_shapefile(zip_file) the_exception = context.exception self.assertEqual(MISSING_FILE, the_exception.message)
def test_shape_file_not_found(self): """ Test ShapeFileNotFound error message is what we expect """ path = os.path.join(BASE_DIR, "tests", "fixtures", "missing_shp.zip") zip_file = zipfile.ZipFile(path) with self.assertRaises(ShapeFileNotFound) as context: get_shapefile(zip_file) the_exception = context.exception self.assertEqual(NO_SHAPEFILE, the_exception.message)
def test_unnecessary_files(self): """ Test UnnecessaryFiles error message is what we expect """ path = os.path.join(BASE_DIR, "tests", "fixtures", "test_unnecessary_files.zip") zip_file = zipfile.ZipFile(path) with self.assertRaises(UnnecessaryFiles) as context: get_shapefile(zip_file) the_exception = context.exception self.assertEqual(UNNECESSARY_FILE, the_exception.message)
def test_missing_files(self): """ Test MissingFiles error message is what we expect """ path = os.path.join(BASE_DIR, 'tests', 'fixtures', 'test_missing_files.zip') zip_file = zipfile.ZipFile(path) with self.assertRaises(MissingFiles) as context: get_shapefile(zip_file) the_exception = context.exception self.assertEqual(MISSING_FILE, the_exception.message)
def test_get_polygons_ignore_invalid(self): """ Test get_polygons when ignoring invalid """ path = os.path.join(BASE_DIR, "tests", "fixtures", "kenya.zip") zip_file = zipfile.ZipFile(path) shapefile = get_shapefile(zip_file) with TemporaryDirectory() as temp_dir: tpath = temp_dir # Extract all files to Temporary Directory zip_file.extractall(tpath) # concatenate Shapefile path shp_path = os.path.join(tpath, shapefile) # Make the shapefile a DataSource data_source = DataSource(shp_path) layer = data_source[0] # Get geoms for all Polygons in Datasource geom_object_list = layer.get_geoms() polygons = get_polygons(geom_object_list) # check that we get the expected number of Polygons self.assertEqual(379, len(polygons)) for item in polygons: self.assertTrue(isinstance(item, Polygon))
def test_get_polygons(self): """ Test get_polygons """ path = os.path.join(BASE_DIR, "tests", "fixtures", "SamburuCentralPolygon.zip") zip_file = zipfile.ZipFile(path) shapefile = get_shapefile(zip_file) with TemporaryDirectory() as temp_dir: tpath = temp_dir # Extract all files to Temporary Directory zip_file.extractall(tpath) # concatenate Shapefile path shp_path = os.path.join(tpath, shapefile) # Make the shapefile a DataSource data_source = DataSource(shp_path) layer = data_source[0] # Get geoms for all Polygons in Datasource geom_object_list = layer.get_geoms() polygons = get_polygons(geom_object_list) self.assertEqual(1, len(polygons)) for item in polygons: self.assertTrue(isinstance(item, Polygon))
def test_get_shapefile(self): """ Test get_shapefile """ path = os.path.join(BASE_DIR, "tests", "fixtures", "test_shapefile.zip") zip_file = zipfile.ZipFile(path) path = os.path.join(BASE_DIR, "tests", "fixtures", "missing_shp.zip") zip_file1 = zipfile.ZipFile(path) path = os.path.join(BASE_DIR, "tests", "fixtures", "test_missing_files.zip") zip_file2 = zipfile.ZipFile(path) path = os.path.join(BASE_DIR, "tests", "fixtures", "test_unnecessary_files.zip") zip_file3 = zipfile.ZipFile(path) path = os.path.join(BASE_DIR, "tests", "fixtures", "kenya.zip") zip_file4 = zipfile.ZipFile(path) path = os.path.join(BASE_DIR, "tests", "fixtures", "dotted_names.zip") zip_file5 = zipfile.ZipFile(path) path = os.path.join(BASE_DIR, "tests", "fixtures", "SamburuCentralPolygon.zip") zip_file6 = zipfile.ZipFile(path) # test that we can get valid shapefile self.assertEqual(get_shapefile(zip_file), "test_shapefile.shp") self.assertEqual(get_shapefile(zip_file4), "KEN/KEN.Divisions.shp") self.assertEqual(get_shapefile(zip_file5), "example.test.shp") self.assertEqual(get_shapefile(zip_file6), "SamburuCentralPolygon/mytestfile.shp") # test that we get ShapeFileNotFound when shapefile cant be located with self.assertRaises(ShapeFileNotFound): get_shapefile(zip_file1) # test that we get MissingFiles when zipfile is missing files with self.assertRaises(MissingFiles): get_shapefile(zip_file2) # test that we get UnnecessaryFiles when zipfile exceeds needed files with self.settings( TASKING_CHECK_NUMBER_OF_FILES_IN_SHAPEFILES_DIR=True): with self.assertRaises(UnnecessaryFiles): get_shapefile(zip_file3)
def test_get_shapefile(self): """ Test get_shapefile """ path = os.path.join(BASE_DIR, 'tests', 'fixtures', 'test_shapefile.zip') zip_file = zipfile.ZipFile(path) path = os.path.join(BASE_DIR, 'tests', 'fixtures', 'test_shapefile_not_found.zip') zip_file1 = zipfile.ZipFile(path) path = os.path.join(BASE_DIR, 'tests', 'fixtures', 'test_missing_files.zip') zip_file2 = zipfile.ZipFile(path) path = os.path.join(BASE_DIR, 'tests', 'fixtures', 'test_unnecessary_files.zip') zip_file3 = zipfile.ZipFile(path) # test that we can get valid shapefile self.assertEqual(get_shapefile(zip_file), 'test_shapefile.shp') # test that we get ShapeFileNotFound when shapefile cant be located with self.assertRaises(ShapeFileNotFound): get_shapefile(zip_file1) # test that we get MissingFiles when zipfile is missing files with self.assertRaises(MissingFiles): get_shapefile(zip_file2) # test that we get UnnecessaryFiles when zipfile exceeds needed files with self.assertRaises(UnnecessaryFiles): get_shapefile(zip_file3)
def to_internal_value(self, value): # pylint: disable=too-many-locals """ Custom Conversion for shapefile field """ if isinstance(value, dict): # if given a raw binary data buffer i.e an ArrayBuffer, # store the binary data in value # The values dict should be an ordered dict value = BytesIO(bytes(value.values())) multipolygon = value if multipolygon is not None: # open zipfile given : path to a file (a string), # a file-like object or a path-like object try: zip_file = zipfile.ZipFile(value.temporary_file_path()) except AttributeError: zip_file = zipfile.ZipFile(value) # Call get_shapefile method to get the .shp files name try: shpfile = get_shapefile(zip_file) except (ShapeFileNotFound, MissingFiles, UnnecessaryFiles) as exp: # pylint: disable=no-member raise serializers.ValidationError(exp.message) # Setup a Temporary Directory to store Shapefiles with TemporaryDirectory() as temp_dir: tpath = temp_dir # Extract all files to Temporary Directory zip_file.extractall(tpath) # concatenate Shapefile path shp_path = path.join(tpath, shpfile) # Make the shapefile a DataSource data_source = DataSource(shp_path) layer = data_source[0] # Get geoms for all Polygons in Datasource polygon_data = layer.get_geoms() polygons = get_polygons(polygon_data) if not polygons: LOGGER.exception(NO_VALID_POLYGONS) raise serializers.ValidationError(NO_VALID_POLYGONS) try: multipolygon = MultiPolygon(polygons) except TypeError as exc: # this shapefile is just not valid for some reason LOGGER.exception(exc) raise serializers.ValidationError(INVALID_SHAPEFILE) return multipolygon
def to_internal_value(self, value): """ Custom Conversion for shapefile field """ if isinstance(value, dict): # if given a raw binary data buffer i.e an ArrayBuffer, # store the binary data in value # The values dict should be an ordered dict value = BytesIO(bytes(value.values())) multipolygon = value if multipolygon is not None: # open zipfile given : path to a file (a string), # a file-like object or a path-like object try: zip_file = zipfile.ZipFile(value.temporary_file_path()) except AttributeError: zip_file = zipfile.ZipFile(value) # Call get_shapefile method to get the .shp files name shpfile = get_shapefile(zip_file) # Setup a Temporary Directory to store Shapefiles with TemporaryDirectory() as temp_dir: tpath = temp_dir # Extract all files to Temporary Directory zip_file.extractall(tpath) # concatenate Shapefile path shp_path = "{tpath}/{shp}".format(tpath=tpath, shp=shpfile) # Make the shapefile a DataSource data_source = DataSource(shp_path) layer = data_source[0] # Get geoms for all Polygons in Datasource polygon_data = layer.get_geoms() polygons = [] for polygon in polygon_data: polygons.append(polygon.geos) multipolygon = MultiPolygon(polygons) return multipolygon
def test_get_polygons_nested(self): """ Test get_polygons with nested polygons """ path = os.path.join(BASE_DIR, "tests", "fixtures", "kenya.zip") zip_file = zipfile.ZipFile(path) shapefile = get_shapefile(zip_file) with TemporaryDirectory() as temp_dir: tpath = temp_dir # Extract all files to Temporary Directory zip_file.extractall(tpath) # concatenate Shapefile path shp_path = os.path.join(tpath, shapefile) # Make the shapefile a DataSource data_source = DataSource(shp_path) layer = data_source[0] # Get geoms for all Polygons in Datasource geom_object_list = layer.get_geoms() polygons = get_polygons(geom_object_list) # check that we get the expected number of Polygons self.assertEqual(431, len(polygons)) for item in polygons: self.assertTrue(isinstance(item, Polygon)) # lets check get_polygons from the nested multipolygons multipolygon_list = [ _ for _ in geom_object_list if isinstance(_, geometries.MultiPolygon) ] self.assertEqual(12, len(multipolygon_list)) other_polygons = get_polygons(multipolygon_list) # check that we get the expected number of Polygons self.assertEqual(52, len(other_polygons)) for item in other_polygons: self.assertTrue(isinstance(item, Polygon)) # if we add all polygons, do we get the expected number? # 379 is the number of poolygons excluding nested multipolygons # see the `test_get_polygons_ignore_invalid` test below self.assertEqual(431, 379 + len(other_polygons))
def to_internal_value(self, value): """ Custom Conversion for shapefile field """ multipolygon = value if multipolygon is not None: try: zip_file = zipfile.ZipFile(value.temporary_file_path()) except AttributeError: zip_file = zipfile.ZipFile(value) # Call get_shapefile method to get the .shp files name shpfile = get_shapefile(zip_file) # Setup a Temporary Directory to store Shapefiles with TemporaryDirectory() as temp_dir: tpath = temp_dir # Extract all files to Temporary Directory zip_file.extractall(tpath) # concatenate Shapefile path shp_path = "{tpath}/{shp}".format(tpath=tpath, shp=shpfile) # Make the shapefile a DataSource data_source = DataSource(shp_path) layer = data_source[0] # Get geoms for all Polygons in Datasource polygon_data = layer.get_geoms() polygons = [] for polygon in polygon_data: polygons.append(polygon.geos) multipolygon = MultiPolygon(polygons) return multipolygon