def test_duplicate_imports(self): """Import the same layer twice to ensure file names increment properly. """ path = test_file('boxes_with_date_iso_date.zip') ogr = OGRImport(path) layers1 = ogr.handle({'index': 0, 'name': 'test'}) layers2 = ogr.handle({'index': 0, 'name': 'test'}) self.assertEqual(layers1[0][0], 'test') self.assertEqual(layers2[0][0], 'test0')
def import_file(self, in_file, configuration_options=[]): """ Imports the file. """ self.assertTrue(os.path.exists(in_file)) # run ogr2ogr gi = OGRImport(in_file) layers = gi.handle(configuration_options=configuration_options) return layers
def test_arcgisjson(self): """ Tests the import from a WFS Endpoint """ endpoint = 'http://sampleserver3.arcgisonline.com/ArcGIS/rest/services/Hydrography/Watershed173811/FeatureServer/0/query?where=objectid+%3D+objectid&outfields=*&f=json' gi = OGRImport(endpoint) layers = gi.handle(configuration_options=[{'index': 0}]) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore')
def import_file(self, path, configs=None): """Imports the file. """ if configs is None: configs = [] self.assertTrue(os.path.exists(path), path) # run ogr2ogr ogr = OGRImport(path) layers = ogr.handle(configuration_options=configs) return layers
def test_utf8(self): """Tests utf8 characters in attributes """ path = test_file('china_provinces.shp') layer = self.generic_import(path) ogr = OGRImport(path) datastore, _ = ogr.open_target_datastore(ogr.target_store) sql = ("select NAME_CH from {0} where NAME_PY = 'An Zhou'".format( layer.name)) result = datastore.ExecuteSQL(sql) feature = result.GetFeature(0) self.assertEqual(feature.GetField('name_ch'), '安州')
def test_arcgisjson(self): """Tests the import from a WFS Endpoint """ endpoint = 'http://sampleserver6.arcgisonline.com/arcgis/rest/services/Water_Network/FeatureServer/16/query'\ '?where=objectid=326&outfields=*&f=json' ogr = OGRImport(endpoint) configs = [{'index': 0}] layers = ogr.handle(configuration_options=configs) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore')
def test_duplicate_imports(self): """ Tests importing the same layer twice to ensure incrementing file names is properly handled. """ filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'boxes_with_date_iso_date.zip') gi = OGRImport(filename) layers1 = gi.handle({'index': 0, 'name': 'test'}) layers2 = gi.handle({'index': 0, 'name': 'test'}) self.assertEqual(layers1[0][0], 'test') self.assertEqual(layers2[0][0], 'test0')
def test_utf8(self): """ Tests utf8 characters in attributes """ filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'china_provinces.shp') layer = self.generic_import('china_provinces.shp') gi = OGRImport(filename) ds, insp = gi.open_target_datastore(gi.target_store) sql = str("select NAME_CH from %s where NAME_PY = 'An Zhou'" % (layer.name)) res = ds.ExecuteSQL(sql) feat = res.GetFeature(0) self.assertEqual(feat.GetField('name_ch'), "安州")
def test_import_file_uses_configured_layername(self): """ Checks that if a custom layer name is provided it replaces the UploadLayer name and is used as the table name for data stored in PostGIS. """ # --- Prereq's to importing layers # my_states.gpkg is a straightforward 1-layer vector package. test_filename = 'my_states.gpkg' test_filepath = os.path.join(_TEST_FILES_DIR, test_filename) # Make temporary file (the upload/configure process removes the file & we want to keep our test file) tmppath = os.path.join('/tmp', test_filename) shutil.copyfile(test_filepath, tmppath) # upload & configure_upload expect closed file objects # This is heritage from originally being closely tied to a view passing request.Files of = open(tmppath, 'rb') of.close() files = [of] upload = self.upload(files, self.admin_user) self.configure_upload(upload, files) # should be a single UploadFile resulting from configure_upload() upload_file = upload.uploadfile_set.first() # should be a single UploadLayer related to upload_file upload_layer = upload_file.uploadlayer_set.first() # --- Actually do imports (just import_file(), not handlers) custom_layername = 'my_custom_layer' configuration_options = { 'upload_layer_id': upload_layer.id, 'index': 0, 'layer_name': custom_layername } oi = OGRImport(upload_file.file.name, upload_file=upload_file) oi.import_file(configuration_options=configuration_options) # --- Check that PostGIS has a table matching the name of the layer set during configure_upload() expected_tablename = custom_layername default_tablename = upload_layer.layer_name with connections['datastore'].cursor() as cursor: sql = """ SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname != 'pg_catalog' AND schemaname != 'information_schema'; """ cursor.execute(sql) tables = [row[0] for row in cursor.fetchall()] self.assertIn(expected_tablename, tables) self.assertNotIn(default_tablename, tables)
def test_wfs(self): """Tests the import from a WFS Endpoint """ wfs = 'WFS:http://demo.boundlessgeo.com/geoserver/wfs' ogr = OGRImport(wfs) configs = [ {'layer_name': 'og:bugsites'}, {'layer_name': 'topp:states'} ] layers = ogr.handle(configuration_options=configs) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore')
def test_wfs(self): """Tests the import from a WFS Endpoint """ wfs = 'WFS:http://demo.geo-solutions.it/geoserver/tiger/wfs' ogr = OGRImport(wfs) configs = [ { 'layer_name': 'tiger:giant_polygon' }, { 'layer_name': 'tiger:poi' }, ] layers = ogr.handle(configuration_options=configs) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore')
def test_import_file_skips_duplicate_configured_layername(self): """ Checks that if a custom layer name is provided, but is already used the unique layer name created by configure_upload() & stored in UploadLayer is used as the table name for data stored in PostGIS. """ # --- Prereq's to importing layers # my_states.gpkg is a straightforward 1-layer vector package. test_filename = 'my_states.gpkg' test_filepath = os.path.join(_TEST_FILES_DIR, test_filename) # Upload two copies so we can have a duplicate name with the second. # Make temporary file (the upload/configure process removes the file & we want to keep our test file) filename, ext = test_filename.split('.') tmppath1 = os.path.join('/tmp', '{}-1.{}'.format(filename, ext)) tmppath2 = os.path.join('/tmp', '{}-2.{}'.format(filename, ext)) shutil.copyfile(test_filepath, tmppath1) shutil.copyfile(test_filepath, tmppath2) # upload & configure_upload expect closed file objects # This is heritage from originally being closely tied to a view passing request.Files of1 = open(tmppath1, 'rb') of1.close() files = [of1] upload1 = self.upload(files, self.admin_user) self.configure_upload(upload1, files) of2 = open(tmppath2, 'rb') of2.close() files = [of2] upload2 = self.upload(files, self.admin_user) self.configure_upload(upload2, files) upload_file1 = upload1.uploadfile_set.first() upload_layer1 = upload_file1.uploadlayer_set.first() upload_file2 = upload2.uploadfile_set.first() upload_layer2 = upload_file2.uploadlayer_set.first() # --- Import file1's layer using default name configuration_options = {'upload_layer_id': upload_layer1.id, 'index': 0} oi = OGRImport(upload_file1.file.name, upload_file=upload_file1) oi.import_file(configuration_options=configuration_options) # --- Try importing file2's layer using the same name as file1's layer. configuration_options = { 'upload_layer_id': upload_layer2.id, 'index': 0, 'layer_name': upload_layer1.layer_name} oi = OGRImport(upload_file2.file.name, upload_file=upload_file2) oi.import_file(configuration_options=configuration_options) # --- Check that PostGIS has a table matching the default layer name for upload_layer2 expected_tablename = upload_layer2.layer_name with connections['datastore'].cursor() as cursor: sql = """ SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname != 'pg_catalog' AND schemaname != 'information_schema'; """ cursor.execute(sql) tables = [row[0] for row in cursor.fetchall()] self.assertIn(expected_tablename, tables)