def create_geogig_datastore(store_name): """ Args: store_name: name of geogig repo Returns: None """ ogc_server = get_ogc_server() url = "{}/rest".format(ogc_server.get('LOCATION').rstrip('/')) workspace_name = "geonode" workspace_uri = "http://www.geonode.org/" cat = Catalog(url) # Check if local workspace exists and if not create it workspace = cat.get_workspace(workspace_name) if workspace is None: cat.create_workspace(workspace_name, workspace_uri) print "Workspace " + workspace_name + " created." # Get list of datastores datastores = cat.get_stores() datastore = None # Check if remote datastore exists on local system for ds in datastores: if ds.name.lower() == store_name.lower(): datastore = ds if not datastore: datastore = cat.create_datastore(store_name, workspace_name) datastore.connection_parameters.update(geogig_repository=os.path.join( ogc_server.get('GEOGIG_DATASTORE_DIR'), store_name)) cat.save(datastore)
def setup_data(): try: cat = Catalog(GEOSERVER_REST_URL, GS_USERNAME, GS_PASSWORD) # Create the workspaces in geoserver exp_ws = cat.create_workspace(GS_EXPOSURE_WS, 'exposure') haz_ws = cat.create_workspace(GS_HAZARD_WS, 'hazard') impact_ws = cat.create_workspace(GS_IMPACT_WS, 'impact') cat.set_default_workspace(GS_IMPACT_WS) exposure_dir = os.path.join(DATA_PATH, 'exposure') hazard_dir = os.path.join(DATA_PATH, 'hazard') upload_vector(str(os.path.join(exposure_dir, 'quiapo_buildings.shp')), exp_ws) upload_vector(str(os.path.join(exposure_dir, 'tacloban_buildings.shp')), exp_ws) upload_raster(str(os.path.join(exposure_dir, 'tacloban_pop.tif')), exp_ws) upload_vector(str(os.path.join(hazard_dir, 'tacloban_100.shp')), haz_ws) upload_vector(str(os.path.join(hazard_dir, 'quiapo_100.shp')), haz_ws) upload_vector(str(os.path.join(hazard_dir, 'tacloban_stormsurge.shp')), haz_ws) set_style('tacloban_pop', "Population Exposure") set_style('quiapo_100', "Flood Hazard Quiapo") set_style('tacloban_100', "Flood Hazard") set_style('tacloban_stormsurge', "Flood Hazard") set_style('quiapo_buildings', "Building Footprints") set_style('tacloban_buildings', "Building Footprints") except: print 'Error in setting up geoserver test environment.' raise
class UploaderTests(MapStoryTestMixin): """ Basic checks to make sure pages load, etc. """ def create_datastore(self, connection, catalog): settings = connection.settings_dict params = {'database': settings['NAME'], 'passwd': settings['PASSWORD'], 'namespace': 'http://www.geonode.org/', 'type': 'PostGIS', 'dbtype': 'postgis', 'host': settings['HOST'], 'user': settings['USER'], 'port': settings['PORT'], 'enabled': "True"} store = catalog.create_datastore( settings['NAME'], workspace=self.workspace) store.connection_parameters.update(params) try: catalog.save(store) except FailedRequestError: # assuming this is because it already exists pass return catalog.get_store(settings['NAME']) def setUp(self): if not os.path.exists(os.path.join(os.path.split(__file__)[0], '..', 'importer-test-files')): self.skipTest('Skipping test due to missing test data.') # These tests require geonode to be running on :80! self.postgis = db.connections['datastore'] self.postgis_settings = self.postgis.settings_dict self.username, self.password = self.create_user( 'admin', 'admin', is_superuser=True) self.non_admin_username, self.non_admin_password = self.create_user( 'non_admin', 'non_admin') self.cat = Catalog( ogc_server_settings.internal_rest, *ogc_server_settings.credentials) if self.cat.get_workspace('geonode') is None: self.cat.create_workspace('geonode', 'http://geonode.org') self.workspace = 'geonode' self.datastore = self.create_datastore(self.postgis, self.cat) def tearDown(self): """ Clean up geoserver. """ self.cat.delete(self.datastore, recurse=True)
class GeoServerRepository(Repository): def __init__(self, protocol, host, port, path='geoserver/rest', username='******', password='******', workspace='lycheepy'): self.url = '{}://{}:{}'.format(protocol, host, port) self.catalog = Catalog('{}/{}'.format(self.url, path), username=username, password=password) self.workspace = self._get_workspace(workspace) def _get_workspace(self, workspace_name): workspace = self.catalog.get_workspace(workspace_name) if not workspace: workspace = self.catalog.create_workspace(workspace_name, workspace_name) return workspace def publish(self, name, raster_file): self.catalog.create_coveragestore(name, raster_file, self.workspace, True) return '{}/geoserver/wcs?SERVICE=WCS&REQUEST=GetCoverage&VERSION=2.0.1&CoverageId={}'.format( self.url, name)
def wrapper(self, *args, **kwargs): workspace_name = 'geonode' django_datastore = db.connections['datastore'] catalog = Catalog( ogc_server_settings.internal_rest, *ogc_server_settings.credentials ) # Set up workspace/datastore as appropriate ws = catalog.get_workspace(workspace_name) delete_ws = False if ws is None: ws = catalog.create_workspace(workspace_name, 'http://www.geonode.org/') delete_ws = True datastore = create_datastore(workspace_name, django_datastore, catalog) # test method called here try: ret = wrapped_func(self, *args, **kwargs) finally: # Tear down workspace/datastore as appropriate if delete_ws: catalog.delete(ws, recurse=True) else: catalog.delete(datastore, recurse=True) return ret
class TestCatalogBase(unittest.TestCase): @classmethod def setUpClass(cls): pass @classmethod def tearDownClass(cls): sql = """SELECT relname FROM pg_class WHERE relkind = 'r' AND relname !~ '^(pg_|sql_)' AND relname != 'spatial_ref_sys';""" conn = psycopg2.connect( database=_DATABASE, user=_USER, password=_PASSWORD, host=_HOST, port=_PORT ) cur = conn.cursor() cur.execute(sql) tables = [r[0] for r in cur.fetchall()] if len(tables) == 0: return cur.execute("DROP TABLE {};".format(','.join(tables))) conn.commit() def setUp(self): self.catalog = Catalog(settings.SERVICE_URL) self.workspace = self.catalog.create_workspace("gsconfig_test") def tearDown(self): self.catalog.delete(self.workspace, recurse=True)
def createDataStore(self, options): try: cat = Catalog(self.geoserver_rest_url, options["geoserveradmin"], options["gpw"]) # create datastore for URB schema try: ws = cat.create_workspace(options["alias"], options["uri"]) except Exception as e: raise Exception("Le nom du workspace ou l'alias est déja utiliser") try: ds = cat.create_datastore(options["alias"], ws) ds.connection_parameters.update( host=options["urbanUrl"], port=options["dbport"], database=options["database"], user=options["postuser"], passwd=options["ropw"], dbtype="postgis", ) cat.save(ds) except Exception as e: print(str(e)) raise Exception("Erreur de connexion au Geoserver lors de la création du DataStore") except Exception as e: raise Exception(str(e)) return ws.name, ds.name, ds.resource_type
def wrapper(self, *args, **kwargs): workspace_name = 'geonode' django_datastore = db.connections['datastore'] catalog = Catalog(ogc_server_settings.internal_rest, *ogc_server_settings.credentials) # Set up workspace/datastore as appropriate ws = catalog.get_workspace(workspace_name) delete_ws = False if ws is None: ws = catalog.create_workspace(workspace_name, 'http://www.geonode.org/') delete_ws = True datastore = create_datastore(workspace_name, django_datastore, catalog) # test method called here try: ret = wrapped_func(self, *args, **kwargs) finally: # Tear down workspace/datastore as appropriate if delete_ws: catalog.delete(ws, recurse=True) else: catalog.delete(datastore, recurse=True) return ret
def export_layer(db_info, gs_info, data_info, layer_name): """\ This takes the parameters and performs the export. This can be run as a module or called from other code. """ geoValidate(layer_name) layer = explodeGeo(layer_name) (spaRef, native_crs) = get_srs(layer) cat = Catalog(*tuple(gs_info)) wspace = cat.create_workspace(data_info.workspace, data_info.namespace) datastore = create_datastore(cat, wspace, db_info, data_info) with closing(createConnObj(db_info._replace(database='postgres'))) as cxn: createNewDb(cxn, db_info.database) quick_export(layer, db_info) create_postgis_layer( cat, wspace, datastore, layer, spaRef, native_crs, db_info, ) try: arcpy.DeleteFeatures_management(layer) except: log(arcpy.GetMessages())
class TestCatalogBase(unittest.TestCase): @classmethod def setUpClass(cls): pass # @classmethod # def tearDownClass(cls): # sql = """SELECT relname FROM pg_class # WHERE relkind = 'r' AND relname !~ '^(pg_|sql_)' # AND relname != 'spatial_ref_sys';""" # conn = psycopg2.connect( # database=_DATABASE, # user=_USER, # password=_PASSWORD, # host=_HOST, # port=_PORT # ) # cur = conn.cursor() # cur.execute(sql) # tables = [r[0] for r in cur.fetchall()] # if len(tables) == 0: # return # cur.execute("DROP TABLE {};".format(','.join(tables))) # conn.commit() def setUp(self): self.catalog = Catalog(settings.SERVICE_URL) self.workspace = self.catalog.create_workspace("gsconfig_test") def tearDown(self): self.catalog.delete(self.workspace, recurse=True)
def main(options): #connect to geoserver cat = Catalog("http://localhost:8080/geoserver/rest", "admin", options.gpw) #create datrastore for URB schema ws = cat.create_workspace(options.alias,'imio.be') ds = cat.create_datastore(options.alias, ws) ds.connection_parameters.update( host=options.urbanUrl, port="5432", database=options.database, user="******", passwd=options.ropw, dbtype="postgis") cat.save(ds) ds = cat.get_store(options.alias) #config object urb = { "capa":"Parcelles", "toli":"cadastre_ln_toponymiques", "canu":"cadastre_pt_num", "cabu":"Batiments", "gept":"cadastre_points_generaux", "gepn":"cadastre_pol_gen", "inpt":"point", "geli":"cadastre_ln_generales", "inli":"cadastre_ln_informations", "topt":"point", } #connect to tables and create layers and correct urban styles for table in urb: style = urb[table] ft = cat.publish_featuretype(table, ds, 'EPSG:31370', srs='EPSG:31370') ft.default_style = style cat.save(ft) resource = ft.resource resource.title = options.alias+"_"+table resource.save() layer, created = Layer.objects.get_or_create(name=layerName, defaults={ "workspace": ws.name, "store": ds.name, "storeType": ds.resource_type, "typename": "%s:%s" % (ws.name.encode('utf-8'), resource.name.encode('utf-8')), "title": resource.title or 'No title provided', "abstract": resource.abstract or 'No abstract provided', #"owner": owner, "uuid": str(uuid.uuid4()), "bbox_x0": Decimal(resource.latlon_bbox[0]), "bbox_x1": Decimal(resource.latlon_bbox[1]), "bbox_y0": Decimal(resource.latlon_bbox[2]), "bbox_y1": Decimal(resource.latlon_bbox[3]) }) set_attributes(layer, overwrite=True) if created: layer.set_default_permissions()
def CraftGs(self): # Input de création d'un nouveau WorkSpace [WS] if (raw_input("Input [Y/N] to open a workspace: ")) == 'Y': self.workspace_name = (raw_input("Input your workspace's name: ")) self.uri = (raw_input("Input your uri's name: ")) Catalog.create_workspace(self.cat, self.workspace_name, self.uri) print '---- WorkSpace created ----' else: pass # Input de création d'un DataStore [PSQL] self.choice = (raw_input("Input [PSQL/SHP] to open a PostgreSQL or a Shapefile datastore: ")) if self.choice == 'PSQL': print 'Pay attention ! The workspace must be existing !' self.workspace_name_psql = (raw_input("Input your workspace's name: ")) self.datastore_name_psql = (raw_input("Input your datastore's name: ")) ds = Catalog.create_datastore(self.cat, self.datastore_name_psql, self.workspace_name_psql) ds.connection_parameters.update(host=self.param['cnx_psql']['host'], port=self.param['cnx_psql']['port'], database=self.param['cnx_psql']['database'], user=self.param['cnx_psql']['user'], passwd=self.param['cnx_psql']['passwd'], dbtype=self.param['cnx_psql']['dbtype'], schema=self.param['cnx_psql']['schema']) self.cat.save(ds) print '---- PGSQL DataStore created ----' # Input de création d'un DataStore [SHP] elif self.choice == 'SHP': print 'Pay attention ! The workspace must be existing !' self.data_shp = self.param['cnx_globales']['datashp'] self.workspace_name_shp = (raw_input("Input your workspace's name: ")) self.datastore_name_shp = (raw_input("Input your datastore's name: ")) self.url_shp = self.param['cnx_globales'][ 'urlrest'] + self.workspace_name_shp + "/datastores/" + self.datastore_name_shp + "/external.shp?configure=all" self.headers_shp = {'Content-type': 'text/plain'} self.response1 = requests.put(self.url_shp, data=self.data_shp, auth=self.auth, headers=self.headers_shp) else: pass # Appel de la nouvelle fonction DataGs self.DataGs()
def main(): # validate args if len(sys.argv) != 2: get_usage() else: configfile = sys.argv[1] # read the config file config = ConfigParser() config.read(configfile) if config.sections() != ['PostGIS', 'Geoserver', 'Styles']: print('Invalid config file...') sys.exit(1) # connect to postgis pginfo = dict(config.items('PostGIS')) pgdb = PostGIS(**pginfo) pgdb.info() pgdb.connect() # connect to geoserver gsinfo = Struct(**dict(config.items('Geoserver'))) gscat = Catalog(gsinfo.url, gsinfo.user, gsinfo.password) print('\nGeoserver info:') print(' Url:', gsinfo.url) print(' User:'******' password:'******'\nConnect to Geoserver...') if gsws == None: wsuri = '{0}/{1}'.format(gsinfo.url, gsinfo.workspace) gsws = gscat.create_workspace(gsinfo.workspace, wsuri) print('Workspace created...') # sldfile info sldinfo = Struct(**dict(config.items('Styles'))) sldinfo.folder = config.get('Styles', 'folder') sldinfo.overwrite = config.getboolean('Styles', 'overwrite') print('\nStyles info:') print(' Folder: ', sldinfo.folder) print(' Overwrite: ', sldinfo.overwrite) # publish layers from postgis to geoserver publish_layers(pgdb, gscat, gsws, pginfo, gsinfo, sldinfo) # close postgis connection pgdb.conn.close() print("\nCompleted!")
def main(): # 注意 url 中暂时不用加入 work_space # 参考 gsconfig/examples/same_srs # 1- 创建指定的命名空间 cat = Catalog(url, username=USER_NAME, password=PWD) # ws = cat.get_workspace(WORK_SPACE) ws = cat.create_workspace(WORK_SPACE, WORK_SPACE) # 2- 提交store存储 coveragestore = 'st_2' headers_xml = {'content-type': 'text/xml'} # TODO:[*] 20-03-04 注意此处存在的问题: 若提交非geoserver 服务所在的路径下的data,会提示错误 file_path = r'nmefc/waterwind/nmefc_2016072112_opdr.nc' r_create_coveragestore = requests.post( f'http://localhost:8082/geoserver/rest/workspaces/{WORK_SPACE}/coveragestores?configure=all', auth=('admin', 'geoserver'), data='<coverageStore><name>' + coveragestore + '</name><workspace>' + WORK_SPACE + "</workspace><enabled>true</enabled><type>NetCDF</type><url>" + file_path + '</url></coverageStore>', headers=headers_xml) # 3- 指定样式 # old_name_layer = "Band1" # new_name_layer = "newBand" # stylename = 'test' # stylefilename = stylename + '.sld' # styleallname = 'd:\\ROURPATH\\styles\\' + stylefilename # # # creates new style # r_create_new_style = requests.post("http://localhost:8090/geoserver/rest/styles", # auth=('admin', 'geoserver'), # data='<style><name>' + stylename + '</name><filename>' + stylefilename + '</filename></style>', # headers=headers_xml) # # # upload new style # with open(styleallname, 'rb') as sld_file: # r_upload_new_style = requests.put("http://localhost:8090/geoserver/rest/styles/" + stylename, # auth=('admin', 'geoserver'), # data=sld_file, # headers=headers_sld) # # # assign it to a layer # r_assign_new_style = requests.put("http://localhost:8090/geoserver/rest/layers/" + workspace + ':' + new_name_layer, # auth=('admin', 'geoserver'), # data='<layer><defaultStyle><name>' + stylename + '</name></defaultStyle></layer>', # headers=headers_xml) # 4- 发布服务 # 暂时使用现在的样式 SearchRescue/wind_dir_style old_name_layer = "Band1" new_name_layer = "newBand"
def get_geoserver_cascading_workspace(create=True): """Return the geoserver workspace used for cascaded services The workspace can be created it if needed. """ catalog = Catalog(service_url=settings.OGC_SERVER["default"]["LOCATION"] + "rest", username=settings.OGC_SERVER["default"]["USER"], password=settings.OGC_SERVER["default"]["PASSWORD"]) name = getattr(settings, "CASCADE_WORKSPACE", "cascaded-services") workspace = catalog.get_workspace(name) if workspace is None and create: uri = "http://www.geonode.org/{}".format(name) workspace = catalog.create_workspace(name, uri) return workspace
def get_geoserver_cascading_workspace(create=True): """Return the geoserver workspace used for cascaded services The workspace can be created it if needed. """ catalog = Catalog( service_url=settings.OGC_SERVER["default"]["LOCATION"] + "rest", username=settings.OGC_SERVER["default"]["USER"], password=settings.OGC_SERVER["default"]["PASSWORD"] ) name = getattr(settings, "CASCADE_WORKSPACE", "cascaded-services") workspace = catalog.get_workspace(name) if workspace is None and create: uri = "http://www.geonode.org/{}".format(name) workspace = catalog.create_workspace(name, uri) return workspace
class catalog: def __init__(self,path): self.cat = Catalog(path) def createWkspace(self,name): vk = self.cat.create_workspace(name,name) vk.enabled = True return vk def addImage(self,name,workspace,data): pdb.set_trace() self.cat.create_coveragestore(name,"/usr/share/tomcat/webapps/geoserver/data/data/DE2_MS__L1CT___20140220T104454.tif",workspace) inf = CoverageStore(self.cat,workspace,name) inf.fetch() self.cat.save(inf)
class catalog: def __init__(self, path): self.cat = Catalog(path) def createWkspace(self, name): vk = self.cat.create_workspace(name, name) vk.enabled = True return vk def addImage(self, name, workspace, data): pdb.set_trace() self.cat.create_coveragestore( name, "/usr/share/tomcat/webapps/geoserver/data/data/DE2_MS__L1CT___20140220T104454.tif", workspace) inf = CoverageStore(self.cat, workspace, name) inf.fetch() self.cat.save(inf)
def createDataStore(self, options): try: cat = Catalog(self.geoserver_rest_url, options['geoserveradmin'], options['gpw']) #create datastore for URB schema ws = cat.create_workspace(options['alias'],options['uri']) try: ds = cat.create_datastore(options['alias'], ws) ds.connection_parameters.update( host=options['dbadr'], port=options['dbport'], database=options['database'], user=options['postuser'], passwd=options['ropw'], dbtype="postgis") cat.save(ds) except Exception as e: print(str(e)) raise Exception('Erreur de connexion au Geoserver lors de la création du DataStore') except Exception as e: raise Exception(str(e)) return ws.name , ds.name, ds.resource_type
def connect_database(database_name): #connect catalog cat = Catalog(GEOSERVER_URL + '/rest') #create workspace ws = cat.create_workspace(LAB_NAME, LAB_URI) #create datastore ds = cat.create_datastore(LAB_NAME + '_Datastore', LAB_NAME) #connect database ds.connection_parameters.update(host=POSTGRES_URL, port=POSTGRES_PORT, database=database_name, user=POSTGRES_USER, passwd=POSTGRES_PW, dbtype='postgis', schema='public') #save cat.save(ds)
def create_workspace(server, username, password, workspace): cat = Catalog(server, username, password) if not workspace in [w.name for w in cat.get_workspaces()]: print 'Creating workspace: %s' % workspace cat.create_workspace(workspace, 'api.npolar.no/' + workspace)
class ModifyingTests(unittest.TestCase): def setUp(self): self.cat = Catalog(GSPARAMS['GSURL'], username=GSPARAMS['GSUSER'], password=GSPARAMS['GSPASSWORD']) def testFeatureTypeSave(self): # test saving round trip rs = self.cat.get_resource("bugsites") old_abstract = rs.abstract new_abstract = "Not the original abstract" enabled = rs.enabled # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(new_abstract, rs.abstract) self.assertEqual(enabled, rs.enabled) # Change keywords on server rs.keywords = ["bugsites", "gsconfig"] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(["bugsites", "gsconfig"], rs.keywords) self.assertEqual(enabled, rs.enabled) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual( [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")], rs.metadata_links) self.assertEqual(enabled, rs.enabled) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(old_abstract, rs.abstract) def testDataStoreCreate(self): ds = self.cat.create_datastore("vector_gsconfig") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) def testPublishFeatureType(self): # Use the other test and store creation to load vector data into a database # @todo maybe load directly to database? try: self.testDataStoreCreateAndThenAlsoImportData() except FailedRequestError: pass try: lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. self.cat.delete(lyr) self.cat.delete(lyr.resource) ds = self.cat.get_store("gsconfig_import_test") # make sure it's gone self.assert_(self.cat.get_layer('import') is None) self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326") # and now it's not self.assert_(self.cat.get_layer('import') is not None) finally: # tear stuff down to allow the other test to pass if we run first ds = self.cat.get_store("gsconfig_import_test") lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. try: if lyr: self.cat.delete(lyr) self.cat.delete(lyr.resource) if ds: self.cat.delete(ds) except: pass def testDataStoreModify(self): ds = self.cat.get_store("sf") self.assertFalse("foo" in ds.connection_parameters) ds.connection_parameters = ds.connection_parameters ds.connection_parameters["foo"] = "bar" orig_ws = ds.workspace.name self.cat.save(ds) ds = self.cat.get_store("sf") self.assertTrue("foo" in ds.connection_parameters) self.assertEqual("bar", ds.connection_parameters["foo"]) self.assertEqual(orig_ws, ds.workspace.name) @drop_table('import') def testDataStoreCreateAndThenAlsoImportData(self): ds = self.cat.create_datastore("gsconfig_import_test") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) ds = self.cat.get_store("gsconfig_import_test") self.cat.add_data_to_store(ds, "import", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) @drop_table('import2') def testVirtualTables(self): ds = self.cat.create_datastore("gsconfig_import_test2") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) ds = self.cat.get_store("gsconfig_import_test2") self.cat.add_data_to_store(ds, "import2", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) store = self.cat.get_store("gsconfig_import_test2") geom = JDBCVirtualTableGeometry('the_geom','MultiPolygon','4326') ft_name = 'my_jdbc_vt_test' epsg_code = 'EPSG:4326' sql = "select * from import2 where 'STATE_NAME' = 'Illinois'" keyColumn = None parameters = None jdbc_vt = JDBCVirtualTable(ft_name, sql, 'false', geom, keyColumn, parameters) ft = self.cat.publish_featuretype(ft_name, store, epsg_code, jdbc_virtual_table=jdbc_vt) # DISABLED; this test works only in the very particular case # "mytiff.tiff" is already present into the GEOSERVER_DATA_DIR # def testCoverageStoreCreate(self): # ds = self.cat.create_coveragestore2("coverage_gsconfig") # ds.data_url = "file:test/data/mytiff.tiff" # self.cat.save(ds) def testCoverageStoreModify(self): cs = self.cat.get_store("sfdem") self.assertEqual("GeoTIFF", cs.type) cs.type = "WorldImage" self.cat.save(cs) cs = self.cat.get_store("sfdem") self.assertEqual("WorldImage", cs.type) # not sure about order of test runs here, but it might cause problems # for other tests if this layer is misconfigured cs.type = "GeoTIFF" self.cat.save(cs) def testCoverageSave(self): # test saving round trip rs = self.cat.get_resource("Arc_Sample") old_abstract = rs.abstract new_abstract = "Not the original abstract" # # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(new_abstract, rs.abstract) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(old_abstract, rs.abstract) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual( [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")], rs.metadata_links) self.assertEqual(enabled, rs.enabled) srs_before = set(['EPSG:4326']) srs_after = set(['EPSG:4326', 'EPSG:3785']) formats = set(['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF']) formats_after = set(["PNG", "GIF", "TIFF"]) # set and save request_srs_list self.assertEquals(set(rs.request_srs_list), srs_before, str(rs.request_srs_list)) rs.request_srs_list = rs.request_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.request_srs_list), srs_after, str(rs.request_srs_list)) # set and save response_srs_list self.assertEquals(set(rs.response_srs_list), srs_before, str(rs.response_srs_list)) rs.response_srs_list = rs.response_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.response_srs_list), srs_after, str(rs.response_srs_list)) # set and save supported_formats self.assertEquals(set(rs.supported_formats), formats, str(rs.supported_formats)) rs.supported_formats = ["PNG", "GIF", "TIFF"] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.supported_formats), formats_after, str(rs.supported_formats)) def testWmsStoreCreate(self): ws = self.cat.create_wmsstore("wmsstore_gsconfig") ws.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities" ws.type = "WMS" self.cat.save(ws) def testWmsLayer(self): self.cat.create_workspace("wmstest", "http://example.com/wmstest") wmstest = self.cat.get_workspace("wmstest") wmsstore = self.cat.create_wmsstore("wmsstore", wmstest) wmsstore.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities" wmsstore.type = "WMS" self.cat.save(wmsstore) wmsstore = self.cat.get_store("wmsstore") self.assertEqual(1, len(self.cat.get_stores(wmstest))) available_layers = wmsstore.get_resources(available=True) for layer in available_layers: # sanitize the layer name - validation will fail on newer geoservers name = layer.replace(':', '_') new_layer = self.cat.create_wmslayer(wmstest, wmsstore, name, nativeName=layer) added_layers = wmsstore.get_resources() self.assertEqual(len(available_layers), len(added_layers)) changed_layer = added_layers[0] self.assertEqual(True, changed_layer.advertised) self.assertEqual(True, changed_layer.enabled) changed_layer.advertised = False changed_layer.enabled = False self.cat.save(changed_layer) self.cat._cache.clear() changed_layer = wmsstore.get_resources()[0] changed_layer.fetch() self.assertEqual(False, changed_layer.advertised) self.assertEqual(False, changed_layer.enabled) # Testing projection and projection policy changes changed_layer.projection = "EPSG:900913" changed_layer.projection_policy = "REPROJECT_TO_DECLARED" self.cat.save(changed_layer) self.cat._cache.clear() layer = self.cat.get_layer(changed_layer.name) self.assertEqual(layer.resource.projection_policy, changed_layer.projection_policy) self.assertEqual(layer.resource.projection, changed_layer.projection) def testFeatureTypeCreate(self): shapefile_plus_sidecars = shapefile_and_friends("test/data/states") expected = { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' } self.assertEqual(len(expected), len(shapefile_plus_sidecars)) for k, v in expected.iteritems(): self.assertEqual(v, shapefile_plus_sidecars[k]) sf = self.cat.get_workspace("sf") self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) self.assert_(self.cat.get_resource("states_test", workspace=sf) is not None) self.assertRaises( ConflictingDataError, lambda: self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) ) self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster_test", shapefile_plus_sidecars, sf) ) bogus_shp = { 'shp': 'test/data/Pk50095.tif', 'shx': 'test/data/Pk50095.tif', 'dbf': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } self.assertRaises( UploadError, lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf) ) lyr = self.cat.get_layer("states_test") self.cat.delete(lyr) self.assert_(self.cat.get_layer("states_test") is None) def testCoverageCreate(self): tiffdata = { 'tiff': 'test/data/Pk50095.tif', 'tfw': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } sf = self.cat.get_workspace("sf") ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf) self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None) self.assertRaises( ConflictingDataError, lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf) ) self.assertRaises( UploadError, lambda: self.cat.create_featurestore("Pk50095_vector", tiffdata, sf) ) bogus_tiff = { 'tiff': 'test/data/states.shp', 'tfw': 'test/data/states.shx', 'prj': 'test/data/states.prj' } self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster", bogus_tiff) ) ft_ext = self.cat.create_coveragestore_external_geotiff("Pk50095_ext", 'file:test/data/Pk50095.tif', sf) def testLayerSave(self): # test saving round trip lyr = self.cat.get_layer("states") old_attribution = lyr.attribution new_attribution = { 'title': 'Not the original attribution', 'width': '123', 'height': '321', 'href': 'http://www.georchestra.org', 'url': 'https://www.cigalsace.org/portail/cigal/documents/page/mentions-legales/Logo_geOrchestra.jpg', 'type': 'image/jpeg' } # change attribution on server lyr.attribution = new_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(new_attribution, lyr.attribution) # Restore attribution lyr.attribution = old_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(old_attribution, lyr.attribution) self.assertEqual(lyr.default_style.name, "population") old_default_style = lyr.default_style lyr.default_style = (s for s in lyr.styles if s.name == "pophatch").next() lyr.styles = [old_default_style] self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(lyr.default_style.name, "pophatch") self.assertEqual([s.name for s in lyr.styles], ["population"]) def testStyles(self): # check count before tests (upload) count = len(self.cat.get_styles()) # upload new style, verify existence self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Fred", fred.sld_title) # replace style, verify changes self.cat.create_style("fred", open("test/ted.sld").read(), overwrite=True) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Ted", fred.sld_title) # delete style, verify non-existence self.cat.delete(fred, purge=True) self.assert_(self.cat.get_style("fred") is None) # attempt creating new style self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assertEqual("Fred", fred.sld_title) # verify it can be found via URL and check the name f = self.cat.get_style_by_url(fred.href) self.assert_(f is not None) self.assertEqual(f.name, fred.name) # compare count after upload self.assertEqual(count +1, len(self.cat.get_styles())) # attempt creating a new style without "title" self.cat.create_style("notitle", open("test/notitle.sld").read()) notitle = self.cat.get_style("notitle") self.assertEqual(None, notitle.sld_title) def testWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") jed = self.cat.get_style("jed", workspace="blarny") self.assert_(jed is None) jed = self.cat.get_style("jed", workspace="topp") self.assert_(jed is not None) self.assertEqual("Fred", jed.sld_title) jed = self.cat.get_style("topp:jed") self.assert_(jed is not None) self.assertEqual("Fred", jed.sld_title) # replace style, verify changes self.cat.create_style("jed", open("test/ted.sld").read(), overwrite=True, workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assert_(jed is not None) self.assertEqual("Ted", jed.sld_title) # delete style, verify non-existence self.cat.delete(jed, purge=True) self.assert_(self.cat.get_style("jed", workspace="topp") is None) # attempt creating new style self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assertEqual("Fred", jed.sld_title) # verify it can be found via URL and check the full name f = self.cat.get_style_by_url(jed.href) self.assert_(f is not None) self.assertEqual(f.fqn, jed.fqn) def testLayerWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("ned", open("test/fred.sld").read(), overwrite=True, workspace="topp") self.cat.create_style("zed", open("test/ted.sld").read(), overwrite=True, workspace="topp") ned = self.cat.get_style("ned", workspace="topp") zed = self.cat.get_style("zed", workspace="topp") self.assert_(ned is not None) self.assert_(zed is not None) lyr = self.cat.get_layer("states") lyr.default_style = ned lyr.styles = [zed] self.cat.save(lyr) self.assertEqual("topp:ned", lyr.default_style) self.assertEqual([zed], lyr.styles) lyr.refresh() self.assertEqual("topp:ned", lyr.default_style.fqn) self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles]) def testWorkspaceCreate(self): ws = self.cat.get_workspace("acme") self.assertEqual(None, ws) self.cat.create_workspace("acme", "http://example.com/acme") ws = self.cat.get_workspace("acme") self.assertEqual("acme", ws.name) def testWorkspaceDelete(self): self.cat.create_workspace("foo", "http://example.com/foo") ws = self.cat.get_workspace("foo") self.cat.delete(ws) ws = self.cat.get_workspace("foo") self.assert_(ws is None) def testWorkspaceDefault(self): # save orig orig = self.cat.get_default_workspace() neu = self.cat.create_workspace("neu", "http://example.com/neu") try: # make sure setting it works self.cat.set_default_workspace("neu") ws = self.cat.get_default_workspace() self.assertEqual('neu', ws.name) finally: # cleanup and reset to the way things were self.cat.delete(neu) self.cat.set_default_workspace(orig.name) ws = self.cat.get_default_workspace() self.assertEqual(orig.name, ws.name) def testFeatureTypeDelete(self): pass def testCoverageDelete(self): pass def testDataStoreDelete(self): states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) states.enabled = False self.assert_(states.enabled == False) self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == False) states.enabled = True self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) def testLayerGroupSave(self): tas = self.cat.get_layergroup("tasmania") self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) tas.layers = tas.layers[:-1] tas.styles = tas.styles[:-1] self.cat.save(tas) # this verifies the local state self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) # force a refresh to check the remote state tas.refresh() self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) def testImageMosaic(self): """ Test case for Issue #110 """ # testing the mosaic creation name = 'cea_mosaic' data = open('test/data/mosaic/cea.zip', 'rb') self.cat.create_imagemosaic(name, data) # get the layer resource back self.cat._cache.clear() resource = self.cat.get_layer(name).resource self.assert_(resource is not None) # delete granule from mosaic coverage = name store = self.cat.get_store(name) granule_id = name + '.1' self.cat.mosaic_delete_granule(coverage, store, granule_id) def testTimeDimension(self): sf = self.cat.get_workspace("sf") files = shapefile_and_friends(os.path.join(gisdata.GOOD_DATA, "time", "boxes_with_end_date")) self.cat.create_featurestore("boxes_with_end_date", files, sf) get_resource = lambda: self.cat._cache.clear() or self.cat.get_layer('boxes_with_end_date').resource # configure time as LIST resource = get_resource() timeInfo = DimensionInfo("time", "true", "LIST", None, "ISO8601", None, attribute="date") resource.metadata = {'time':timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual("LIST", timeInfo.presentation) self.assertEqual(True, timeInfo.enabled) self.assertEqual("date", timeInfo.attribute) self.assertEqual("ISO8601", timeInfo.units) # disable time dimension timeInfo = resource.metadata['time'] timeInfo.enabled = False # since this is an xml property, it won't get written unless we modify it resource.metadata = {'time' : timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual(False, timeInfo.enabled) # configure with interval, end_attribute and enable again timeInfo.enabled = True timeInfo.presentation = 'DISCRETE_INTERVAL' timeInfo.resolution = '3 days' timeInfo.end_attribute = 'enddate' resource.metadata = {'time' : timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual(True, timeInfo.enabled) self.assertEqual('DISCRETE_INTERVAL', timeInfo.presentation) self.assertEqual('3 days', timeInfo.resolution_str()) self.assertEqual('enddate', timeInfo.end_attribute)
class ModifyingTests(unittest.TestCase): def setUp(self): self.cat = Catalog("http://localhost:8080/geoserver/rest") def testFeatureTypeSave(self): # test saving round trip rs = self.cat.get_resource("bugsites") old_abstract = rs.abstract new_abstract = "Not the original abstract" enabled = rs.enabled # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(new_abstract, rs.abstract) self.assertEqual(enabled, rs.enabled) # Change keywords on server rs.keywords = ["bugsites", "gsconfig"] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(["bugsites", "gsconfig"], rs.keywords) self.assertEqual(enabled, rs.enabled) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual( [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")], rs.metadata_links) self.assertEqual(enabled, rs.enabled) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(old_abstract, rs.abstract) def testDataStoreCreate(self): ds = self.cat.create_datastore("vector_gsconfig") ds.connection_parameters.update( host="localhost", port="5432", database="db", user="******", passwd="password", dbtype="postgis") self.cat.save(ds) def testDataStoreModify(self): ds = self.cat.get_store("sf") self.assertFalse("foo" in ds.connection_parameters) ds.connection_parameters = ds.connection_parameters ds.connection_parameters["foo"] = "bar" orig_ws = ds.workspace.name self.cat.save(ds) ds = self.cat.get_store("sf") self.assertTrue("foo" in ds.connection_parameters) self.assertEqual("bar", ds.connection_parameters["foo"]) self.assertEqual(orig_ws, ds.workspace.name) def testDataStoreCreateAndThenAlsoImportData(self): ds = self.cat.create_datastore("gsconfig_import_test") ds.connection_parameters.update( host="localhost", port="5432", database="db", user="******", passwd="password", dbtype="postgis") self.cat.save(ds) ds = self.cat.get_store("gsconfig_import_test") self.cat.add_data_to_store(ds, "import", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) def testCoverageStoreCreate(self): ds = self.cat.create_coveragestore2("coverage_gsconfig") ds.data_url = "file:data/mytiff.tiff" self.cat.save(ds) def testCoverageStoreModify(self): cs = self.cat.get_store("sfdem") self.assertEqual("GeoTIFF", cs.type) cs.type = "WorldImage" self.cat.save(cs) cs = self.cat.get_store("sfdem") self.assertEqual("WorldImage", cs.type) # not sure about order of test runs here, but it might cause problems # for other tests if this layer is misconfigured cs.type = "GeoTIFF" self.cat.save(cs) def testCoverageSave(self): # test saving round trip rs = self.cat.get_resource("Arc_Sample") old_abstract = rs.abstract new_abstract = "Not the original abstract" # # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(new_abstract, rs.abstract) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(old_abstract, rs.abstract) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual( [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")], rs.metadata_links) self.assertEqual(enabled, rs.enabled) srs_before = set(['EPSG:4326']) srs_after = set(['EPSG:4326', 'EPSG:3785']) formats = set(['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF']) formats_after = set(["PNG", "GIF", "TIFF"]) # set and save request_srs_list self.assertEquals(set(rs.request_srs_list), srs_before, str(rs.request_srs_list)) rs.request_srs_list = rs.request_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.request_srs_list), srs_after, str(rs.request_srs_list)) # set and save response_srs_list self.assertEquals(set(rs.response_srs_list), srs_before, str(rs.response_srs_list)) rs.response_srs_list = rs.response_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.response_srs_list), srs_after, str(rs.response_srs_list)) # set and save supported_formats self.assertEquals(set(rs.supported_formats), formats, str(rs.supported_formats)) rs.supported_formats = ["PNG", "GIF", "TIFF"] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.supported_formats), formats_after, str(rs.supported_formats)) def testFeatureTypeCreate(self): shapefile_plus_sidecars = shapefile_and_friends("test/data/states") expected = { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' } self.assertEqual(len(expected), len(shapefile_plus_sidecars)) for k, v in expected.iteritems(): self.assertEqual(v, shapefile_plus_sidecars[k]) sf = self.cat.get_workspace("sf") self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) self.assert_(self.cat.get_resource("states_test", workspace=sf) is not None) self.assertRaises( ConflictingDataError, lambda: self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) ) self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster_test", shapefile_plus_sidecars, sf) ) bogus_shp = { 'shp': 'test/data/Pk50095.tif', 'shx': 'test/data/Pk50095.tif', 'dbf': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } self.assertRaises( UploadError, lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf) ) lyr = self.cat.get_layer("states_test") self.cat.delete(lyr) self.assert_(self.cat.get_layer("states_test") is None) def testCoverageCreate(self): tiffdata = { 'tiff': 'test/data/Pk50095.tif', 'tfw': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } sf = self.cat.get_workspace("sf") # TODO: Uploading WorldImage file no longer works??? # ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf) # self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None) # self.assertRaises( # ConflictingDataError, # lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf) # ) self.assertRaises( UploadError, lambda: self.cat.create_featurestore("Pk50095_vector", tiffdata, sf) ) bogus_tiff = { 'tiff': 'test/data/states.shp', 'tfw': 'test/data/states.shx', 'prj': 'test/data/states.prj' } self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster", bogus_tiff) ) def testLayerSave(self): # test saving round trip lyr = self.cat.get_layer("states") old_attribution = lyr.attribution new_attribution = "Not the original attribution" # change attribution on server lyr.attribution = new_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(new_attribution, lyr.attribution) # Restore attribution lyr.attribution = old_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(old_attribution, lyr.attribution) self.assertEqual(lyr.default_style.name, "population") old_default_style = lyr.default_style lyr.default_style = (s for s in lyr.styles if s.name == "pophatch").next() lyr.styles = [old_default_style] self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(lyr.default_style.name, "pophatch") self.assertEqual([s.name for s in lyr.styles], ["population"]) def testStyles(self): # upload new style, verify existence self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Fred", fred.sld_title) # replace style, verify changes self.cat.create_style("fred", open("test/ted.sld").read(), overwrite=True) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Ted", fred.sld_title) # delete style, verify non-existence self.cat.delete(fred, purge=True) self.assert_(self.cat.get_style("fred") is None) # attempt creating new style self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assertEqual("Fred", fred.sld_title) def testWorkspaceCreate(self): ws = self.cat.get_workspace("acme") self.assertEqual(None, ws) self.cat.create_workspace("acme", "http://example.com/acme") ws = self.cat.get_workspace("acme") self.assertEqual("acme", ws.name) def testWorkspaceDelete(self): self.cat.create_workspace("foo", "http://example.com/foo") ws = self.cat.get_workspace("foo") self.cat.delete(ws) ws = self.cat.get_workspace("foo") self.assert_(ws is None) def testFeatureTypeDelete(self): pass def testCoverageDelete(self): pass def testDataStoreDelete(self): states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) states.enabled = False self.assert_(states.enabled == False) self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == False) states.enabled = True self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) def testLayerGroupSave(self): tas = self.cat.get_layergroup("tasmania") self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) tas.layers = tas.layers[:-1] tas.styles = tas.styles[:-1] self.cat.save(tas) self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles)
appserver = sys.argv[1] dbserver = sys.argv[2] ws_name = 'naturalearth' ds_name = 'ne_pg' lg_name = "ne_pg" cat = Catalog(appserver + '/rest') # check if workspace exists, bail if it does (safter than deleting it) if any(ws.name == ws_name for ws in cat.get_workspaces()): print 'workspace already exists...' ws = cat.get_workspace(ws_name) else: print 'creating workspace...' ws = cat.create_workspace(ws_name, 'http://www.naturalearth.org') if any(ds.workspace.name == ws.name and ds.name == ds_name for ds in cat.get_stores()): print 'datastore already exists...' ds = cat.get_store(ds_name, ws_name) else: print 'creating datastore' ds = cat.create_datastore(ds_name, ws.name) ds.connection_parameters.update( host=dbserver, port='5432', database='ne_tmp', user='******', password='******', dbtype='postgis') cat.save(ds)
from geoserver.catalog import Catalog # Conexao com o geoserver cat = Catalog("http://localhost:8082/geoserver/rest", username="******", password="******") # Criando um workspace ws = cat.create_workspace('gsc_test', 'http://localhost:8082/geoserver/testWs') # Criando um store ds = cat.create_datastore('bdg_gsc', 'gsc_test') ds.connection_parameters.update(host='localhost', port='5432', database='bdg_gsc', user='******', passwd='user', dbtype='postgis', schema='public') cat.save(ds) # Adicionando layers ft = cat.publish_featuretype('mun_pr', ds, 'EPSG:4674', srs='EPSG:4674') ft1 = cat.publish_featuretype('escolas_pr', ds, 'EPSG:4674', srs='EPSG:4674') # Adicionando styles - nao adiciona sld completo #with open("/home/user/gsc_style.sld") as f: # cat.create_style('gsc_style', f.read()) # Atribuindo o estilo a camada # SLD adicionado no geoserver style1 = cat.get_style("gsc_test:gsc_style_mun")
class UploaderTests(TestCase): """Basic checks to make sure pages load, etc. """ def create_datastore(self, connection, catalog): """Convenience method for creating a datastore. """ settings = connection.settings_dict ds_name = settings['NAME'] params = { 'database': ds_name, 'passwd': settings['PASSWORD'], 'namespace': 'http://www.geonode.org/', 'type': 'PostGIS', 'dbtype': 'postgis', 'host': settings['HOST'], 'user': settings['USER'], 'port': settings['PORT'], 'enabled': 'True' } store = catalog.create_datastore(ds_name, workspace=self.workspace) store.connection_parameters.update(params) try: catalog.save(store) except FailedRequestError: # assuming this is because it already exists pass return catalog.get_store(ds_name) def create_user(self, username, password, **kwargs): """Convenience method for creating users. """ user, created = User.objects.get_or_create(username=username, **kwargs) if created: user.set_password(password) user.save() return user def setUp(self): self.assertTrue( os.path.exists(_TEST_FILES_DIR), 'Test could not run due to missing test data at {0!r}' .format(_TEST_FILES_DIR) ) # These tests require geonode to be running on :80! self.postgis = db.connections['datastore'] self.postgis_settings = self.postgis.settings_dict self.admin_user = self.create_user('admin', 'admin', is_superuser=True) self.non_admin_user = self.create_user('non_admin', 'non_admin') self.catalog = Catalog( ogc_server_settings.internal_rest, *ogc_server_settings.credentials ) if self.catalog.get_workspace('geonode') is None: self.catalog.create_workspace('geonode', 'http://www.geonode.org/') self.workspace = 'geonode' self.datastore = self.create_datastore(self.postgis, self.catalog) def tearDown(self): """Clean up geoserver. """ self.catalog.delete(self.datastore, recurse=True) def import_file(self, path, configs=None): """Imports the file. """ if configs is None: configs = [] self.assertTrue(os.path.exists(path)) # run ogr2ogr ogr = OGRImport(path) layers = ogr.handle(configuration_options=configs) return layers def generic_import(self, filename, configs=None): if configs is None: configs = [{'index': 0}] path = test_file(filename) results = self.import_file(path, configs=configs) layer_results = [] for result in results: if result[1].get('raster'): layer_path = result[0] layer_name = os.path.splitext(os.path.basename(layer_path))[0] layer = Layer.objects.get(name=layer_name) self.assertTrue(layer_path.endswith('.tif')) self.assertTrue(os.path.exists(layer_path)) gdal_layer = gdal.OpenEx(layer_path) self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff') layer_results.append(layer) else: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') if not path.endswith('zip'): self.assertGreaterEqual( layer.attributes.count(), DataSource(path)[0].num_fields ) layer_results.append(layer) return layer_results[0] def generic_api_upload(self, filenames, configs=None): """Tests the import api. """ client = AdminClient() client.login_as_non_admin() # Don't accidentally iterate over given 'foo' as ['f', 'o', 'o']. self.assertNotIsInstance(filenames, str) # Upload Files outfiles = [] for filename in filenames: path = test_file(filename) with open(path) as stream: data = stream.read() upload = SimpleUploadedFile(filename, data) outfiles.append(upload) response = client.post( reverse('uploads-new-json'), {'file': outfiles, 'json': json.dumps(configs)}, follow=True) content = json.loads(response.content) self.assertEqual(response.status_code, 200) self.assertEqual(content['id'], 1) # Configure Uploaded Files upload_id = content['id'] upload_layers = UploadLayer.objects.filter(upload_id=upload_id) for upload_layer in upload_layers: for config in configs: if config['upload_file_name'] == upload_layer.name: payload = config['config'] url = '/importer-api/data-layers/{0}/configure/'.format(upload_layer.id) response = client.post( url, data=json.dumps(payload), content_type='application/json' ) self.assertTrue(response.status_code, 200) url = '/importer-api/data-layers/{0}/'.format(upload_layer.id) response = client.get(url, content_type='application/json') self.assertTrue(response.status_code, 200) return content def generic_raster_import(self, filename, configs=None): if configs is None: configs = [{'index': 0}] path = test_file(filename) results = self.import_file(path, configs=configs) layer_path = results[0][0] layer_name = os.path.splitext(os.path.basename(layer_path))[0] layer = Layer.objects.get(name=layer_name) self.assertTrue(layer_path.endswith('.tif')) self.assertTrue(os.path.exists(layer_path)) gdal_layer = gdal.OpenEx(layer_path) self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff') return layer def test_multi_upload(self): """Tests Uploading Multiple Files """ upload = self.generic_api_upload( filenames=[ 'boxes_with_year_field.zip', 'boxes_with_date.zip', 'point_with_date.geojson' ], configs=[ { 'upload_file_name': 'boxes_with_year_field.shp', 'config': [{'index': 0}] }, { 'upload_file_name': 'boxes_with_date.shp', 'config': [{'index': 0}] }, { 'upload_file_name': 'point_with_date.geojson', 'config': [{'index': 0}] } ] ) self.assertEqual(9, upload['count']) def test_upload_with_slds(self): """Tests Uploading sld """ upload = self.generic_api_upload( filenames=[ 'boxes_with_date.zip', 'boxes.sld', 'boxes1.sld' ], configs=[ { 'upload_file_name': 'boxes_with_date.shp', 'config': [ { 'index': 0, 'default_style': 'boxes.sld', 'styles': ['boxes.sld', 'boxes1.sld'] } ] } ] ) self.assertEqual(6, upload['count']) upload_id = upload['id'] upload_obj = UploadedData.objects.get(pk=upload_id) uplayers = UploadLayer.objects.filter(upload=upload_id) layer_id = uplayers[0].pk upfiles_count = UploadFile.objects.filter(upload=upload_id).count() self.assertEqual(6, upfiles_count) # Warning: this assumes that Layer pks equal UploadLayer pks layer = Layer.objects.get(pk=layer_id) gslayer = self.catalog.get_layer(layer.name) default_style = gslayer.default_style # TODO: can we use public API or omit this? self.catalog._cache.clear() self.assertEqual('boxes.sld', default_style.filename) def test_upload_with_metadata(self): """Tests Uploading metadata """ upload = self.generic_api_upload( filenames=[ 'boxes_with_date.zip', 'samplemetadata.xml', ], configs=[ { 'upload_file_name': 'boxes_with_date.shp', 'config': [ { 'index': 0, 'metadata': 'samplemetadata.xml' } ] } ] ) self.assertEqual(5, upload['count']) upload_id = upload['id'] upload_obj = UploadedData.objects.get(pk=upload_id) # TODO: why did we get upload_obj? uplayers = UploadLayer.objects.filter(upload=upload_id) layer_id = uplayers[0].pk upfiles_count = UploadFile.objects.filter(upload=upload_id).count() self.assertEqual(5, upfiles_count) layer = Layer.objects.get(pk=layer_id) self.assertEqual(layer.language, 'eng') self.assertEqual(layer.title, 'Old_Americas_LSIB_Polygons_Detailed_2013Mar') def test_raster(self): """Exercise raster import. """ layer = self.generic_raster_import( 'test_grid.tif', configs=[ { 'index': 0 } ] ) self.assertTrue(layer.name.startswith('test_grid')) def test_box_with_year_field(self): """Tests the import of test_box_with_year_field. """ layer = self.generic_import( 'boxes_with_year_field.shp', configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date(self): """Tests the import of test_boxes_with_date. """ layer = self.generic_import( 'boxes_with_date.shp', configs=[ { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_gpkg(self): """Tests the import of test_boxes_with_date.gpkg. """ layer = self.generic_import( 'boxes_with_date.gpkg', configs=[ { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_plus_raster_gpkg_by_index(self): """Tests the import of multilayer vector + raster geopackage using index """ layer = self.generic_import( 'boxes_plus_raster.gpkg', configs=[ { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }, {'index': 1}, {'index': 2}, {'index': 3}, {'index': 4}, {'index': 5}, {'index': 6}, {'index': 7}, ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_csv(self): """Tests a CSV with WKT polygon. """ layer = self.generic_import( 'boxes_with_date.csv', configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_csv_missing_features(self): """Test csv that has some rows without geoms and uses ISO-8859 (Latin 4) encoding. """ self.generic_import( 'missing-features.csv', configs=[ {'index': 0} ] ) def test_boxes_with_iso_date(self): """Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import( 'boxes_with_date_iso_date.shp', configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_duplicate_imports(self): """Import the same layer twice to ensure file names increment properly. """ path = test_file('boxes_with_date_iso_date.zip') ogr = OGRImport(path) layers1 = ogr.handle({'index': 0, 'name': 'test'}) layers2 = ogr.handle({'index': 0, 'name': 'test'}) self.assertEqual(layers1[0][0], 'test') self.assertEqual(layers2[0][0], 'test0') def test_launder(self): """Ensure the launder function works as expected. """ self.assertEqual(launder('tm_world_borders_simpl_0.3'), 'tm_world_borders_simpl_0_3') self.assertEqual(launder('Testing#'), 'testing_') self.assertEqual(launder(' '), '_') def test_boxes_with_date_iso_date_zip(self): """Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import( 'boxes_with_date_iso_date.zip', configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_dates_bc(self): """Tests the import of test_boxes_with_dates_bc. """ layer = self.generic_import( 'boxes_with_dates_bc.shp', configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_point_with_date(self): """Tests the import of point_with_date.geojson """ layer = self.generic_import( 'point_with_date.geojson', configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) # OGR will name geojson layers 'ogrgeojson' we rename to the path basename self.assertTrue(layer.name.startswith('point_with_date')) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_end_date(self): """Tests the import of test_boxes_with_end_date. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import( 'boxes_with_end_date.shp', configs=[ { 'index': 0, 'convert_to_date': ['date', 'enddate'], 'start_date': 'date', 'end_date': 'enddate', 'configureTime': True } ] ) date_attr = get_layer_attr(layer, 'date_as_date') end_date_attr = get_layer_attr(layer, 'enddate_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') self.assertEqual(end_date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute ) self.generic_time_check( layer, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute ) def test_us_states_kml(self): """Tests the import of us_states_kml. This layer has a date and an end date field that are typed correctly. """ # TODO: Support time in kmls. layer = self.generic_import( 'us_states.kml', configs=[ { 'index': 0 } ] ) self.assertEqual(layer.name.lower(), "us_states") def test_mojstrovka_gpx(self): """Tests the import of mojstrovka.gpx. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import( 'mojstrovka.gpx', configs=[ { 'index': 0, 'convert_to_date': ['time'], 'configureTime': True } ] ) date_attr = get_layer_attr(layer, 'time_as_date') self.assertEqual(date_attr.attribute_type, u'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute ) self.generic_time_check(layer, attribute=date_attr.attribute) def generic_time_check(self, layer, attribute=None, end_attribute=None): """Convenience method to run generic tests on time layers. """ # TODO: can we use public API or omit this? self.catalog._cache.clear() resource = self.catalog.get_resource( layer.name, store=layer.store, workspace=self.workspace ) time_info = resource.metadata['time'] self.assertEqual('LIST', time_info.presentation) self.assertEqual(True, time_info.enabled) self.assertEqual(attribute, time_info.attribute) self.assertEqual(end_attribute, time_info.end_attribute) def test_us_shootings_csv(self): """Tests the import of US_Shootings.csv. """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') path = test_file('US_Shootings.csv') layer = self.generic_import( path, configs=[ { 'index': 0, 'convert_to_date': ['Date'] } ] ) self.assertTrue(layer.name.startswith('us_shootings')) date_field = 'date' configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_field ) self.generic_time_check(layer, attribute=date_field) def test_sitins(self): """Tests the import of US_Civil_Rights_Sitins0.csv """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') layer = self.generic_import( 'US_Civil_Rights_Sitins0.csv', configs=[ { 'index': 0, 'convert_to_date': ['Date'] } ] ) self.assertEqual(layer.name.lower(), 'us_civil_rights_sitins0') def get_layer_names(self, path): """Gets layer names from a data source. """ data_source = DataSource(path) return [layer.name for layer in data_source] def test_gdal_import(self): path = test_file('point_with_date.geojson') self.generic_import( path, configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) def test_wfs(self): """Tests the import from a WFS Endpoint """ wfs = 'WFS:http://demo.boundlessgeo.com/geoserver/wfs' ogr = OGRImport(wfs) configs = [ {'layer_name': 'og:bugsites'}, {'layer_name': 'topp:states'} ] layers = ogr.handle(configuration_options=configs) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def test_arcgisjson(self): """Tests the import from a WFS Endpoint """ endpoint = 'http://sampleserver3.arcgisonline.com/ArcGIS/rest/services/Hydrography/Watershed173811/FeatureServer/0/query?where=objectid+%3D+objectid&outfields=*&f=json' ogr = OGRImport(endpoint) configs = [{'index':0}] layers = ogr.handle(configuration_options=configs) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def test_file_add_view(self): """Tests the file_add_view. """ client = AdminClient() # test login required for this view request = client.get(reverse('uploads-new')) self.assertEqual(request.status_code, 302) client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['request'].path, reverse('uploads-list')) self.assertEqual(len(response.context['object_list']), 1) upload = response.context['object_list'][0] self.assertEqual(upload.user.username, 'non_admin') self.assertEqual(upload.file_type, 'GeoJSON') self.assertTrue(upload.uploadlayer_set.all()) self.assertEqual(upload.state, 'UPLOADED') self.assertIsNotNone(upload.name) uploaded_file = upload.uploadfile_set.first() self.assertTrue(os.path.exists(uploaded_file.file.path)) with open(test_file('empty_file.geojson')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) self.assertEqual(response.status_code, 200) self.assertIn('file', response.context_data['form'].errors) def test_file_add_view_as_json(self): """Tests the file_add_view. """ client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post( reverse('uploads-new-json'), {'file': stream}, follow=True ) self.assertEqual(response.status_code, 200) self.assertIn('application/json', response.get('Content-Type', '')) content = json.loads(response.content) self.assertIn('state', content) self.assertIn('id', content) def test_describe_fields(self): """Tests the describe fields functionality. """ path = test_file('US_Shootings.csv') with GDALInspector(path) as inspector: layers = inspector.describe_fields() self.assertTrue(layers[0]['layer_name'], 'us_shootings') self.assertEqual([n['name'] for n in layers[0]['fields']], ['Date', 'Shooter', 'Killed', 'Wounded', 'Location', 'City', 'Longitude', 'Latitude']) self.assertEqual(layers[0]['feature_count'], 203) def test_gdal_file_type(self): """Tests the describe fields functionality. """ filenames = { 'US_Shootings.csv': 'CSV', 'point_with_date.geojson': 'GeoJSON', 'mojstrovka.gpx': 'GPX', 'us_states.kml': 'KML', 'boxes_with_year_field.shp': 'ESRI Shapefile', 'boxes_with_date_iso_date.zip': 'ESRI Shapefile' } from .models import NoDataSourceFound try: for filename, file_type in sorted(filenames.items()): path = test_file(filename) with GDALInspector(path) as inspector: self.assertEqual(inspector.file_type(), file_type) except NoDataSourceFound: logging.exception('No data source found in: {0}'.format(path)) raise def test_configure_view(self): """Tests the configuration view. """ path = test_file('point_with_date.geojson') new_user = User.objects.create(username='******') new_user_perms = ['change_resourcebase_permissions'] client = AdminClient() client.login_as_non_admin() with open(path) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) upload = response.context['object_list'][0] payload = [ { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True, 'permissions': { 'users': { 'test': new_user_perms, 'AnonymousUser': [ 'change_layer_data', 'download_resourcebase', 'view_resourcebase' ] } } } ] response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json' ) self.assertTrue(response.status_code, 200) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.srid, 'EPSG:4326') self.assertEqual(first_layer.store, self.datastore.name) self.assertEqual(first_layer.storeType, 'dataStore') self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertEqual( Layer.objects.all()[0].owner.username, self.non_admin_user.username ) perms = first_layer.get_all_level_info() user = User.objects.get(username=self.non_admin_user.username) # check user permissions expected_perms = [ u'publish_resourcebase', u'change_resourcebase_permissions', u'delete_resourcebase', u'change_resourcebase', u'change_resourcebase_metadata', u'download_resourcebase', u'view_resourcebase', u'change_layer_style', u'change_layer_data' ] for perm in expected_perms: self.assertIn(perm, perms['users'][user]) self.assertTrue(perms['users'][new_user]) self.assertIn( 'change_resourcebase_permissions', perms['users'][new_user] ) self.assertIn( 'change_layer_data', perms['users'][User.objects.get(username='******')] ) catalog_layer = self.catalog.get_layer(first_layer.name) self.assertIn('time', catalog_layer.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, first_layer) def test_configure_view_convert_date(self): """Tests the configure view with a dataset that needs to be converted to a date. """ client = AdminClient() client.login_as_non_admin() with open(test_file('US_Shootings.csv')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) upload = response.context['object_list'][0] payload = [ { 'index': 0, 'convert_to_date': ['Date'], 'start_date': 'Date', 'configureTime': True, 'editable': True } ] response = client.get( '/importer-api/data-layers/{0}/configure/'.format(upload.id) ) self.assertEqual(response.status_code, 405) response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id) ) self.assertEqual(response.status_code, 400) response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json' ) self.assertTrue(response.status_code, 200) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.srid, 'EPSG:4326') self.assertEqual(first_layer.store, self.datastore.name) self.assertEqual(first_layer.storeType, 'dataStore') self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertTrue(first_layer.attributes.filter(attribute='date'), 'xsd:dateTime') catalog_layer = self.catalog.get_layer(first_layer.name) self.assertIn('time', catalog_layer.resource.metadata) # ensure a user who does not own the upload cannot configure an import from it. client.logout() client.login_as_admin() response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id) ) self.assertEqual(response.status_code, 404) def test_list_api(self): client = AdminClient() response = client.get('/importer-api/data/') self.assertEqual(response.status_code, 401) client.login_as_non_admin() response = client.get('/importer-api/data/') self.assertEqual(response.status_code, 200) admin = User.objects.get(username=self.admin_user.username) non_admin = User.objects.get(username=self.non_admin_user.username) path = test_file('US_Shootings.csv') with open(path, 'rb') as stream: uploaded_file = SimpleUploadedFile('test_data', stream.read()) admin_upload = UploadedData.objects.create(state='Admin test', user=admin) admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file)) non_admin_upload = UploadedData.objects.create(state='Non admin test', user=non_admin) non_admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file)) client.login_as_admin() response = client.get('/importer-api/data/') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 2) response = client.get('/importer-api/data/?user__username=admin') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 1) def test_layer_list_api(self): client = AdminClient() response = client.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 401) client.login_as_non_admin() response = client.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 200) def test_delete_from_non_admin_api(self): """Ensure users can delete their data. """ client = AdminClient() client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) self.assertEqual(UploadedData.objects.all().count(), 1) upload_id = UploadedData.objects.first().id response = client.delete('/importer-api/data/{0}/'.format(upload_id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def test_delete_from_admin_api(self): """Ensure that administrators can delete data that isn't theirs. """ client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) self.assertEqual(UploadedData.objects.all().count(), 1) client.logout() client.login_as_admin() upload_id = UploadedData.objects.first().id response = client.delete('/importer-api/data/{0}/'.format(upload_id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def naming_an_import(self): """Tests providing a name in the configuration options. """ client = AdminClient() client.login_as_non_admin() name = 'point-with-a-date' with open(test_file('point_with_date.geojson')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) payload = { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'name': name, 'editable': True } response = client.post( '/importer-api/data-layers/1/configure/', data=json.dumps(payload), content_type='application/json' ) self.assertEqual(response.status_code, 200) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.title, name.replace('-', '_')) def test_api_import(self): """Tests the import api. """ client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) payload = { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True } self.assertIsInstance( UploadLayer.objects.first().configuration_options, dict ) response = client.get('/importer-api/data-layers/1/') self.assertEqual(response.status_code, 200) response = client.post( '/importer-api/data-layers/1/configure/', data=json.dumps([payload]), content_type='application/json' ) self.assertEqual(response.status_code, 200) self.assertIn('task', response.content) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.srid, 'EPSG:4326') self.assertEqual(first_layer.store, self.datastore.name) self.assertEqual(first_layer.storeType, 'dataStore') self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime') catalog_layer = self.catalog.get_layer(first_layer.name) self.assertIn('time', catalog_layer.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, first_layer) self.assertTrue(UploadLayer.objects.first().task_id) def test_valid_file_extensions(self): """Test the file extension validator. """ for extension in load_handler(OSGEO_IMPORTER, 'test.txt').valid_extensions: filename = 'test.{0}'.format(extension) upload = SimpleUploadedFile(filename, '') self.assertIsNone(validate_file_extension(upload)) with self.assertRaises(ValidationError): validate_file_extension(SimpleUploadedFile('test.txt', '')) def test_no_geom(self): """Test the file extension validator. """ with self.assertRaises(ValidationError): validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)')) # This should pass (geom type is unknown) validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)')) def test_numeric_overflow(self): """Regression test for numeric field overflows in shapefiles. # https://trac.osgeo.org/gdal/ticket/5241 """ self.generic_import( 'Walmart.zip', configs=[ { 'configureTime':False, 'convert_to_date': ['W1_OPENDAT'], 'editable': True, 'index':0, 'name': 'Walmart', 'start_date': 'W1_OPENDAT' } ] ) def test_multipolygon_shapefile(self): """Tests shapefile with multipart polygons. """ self.generic_import('PhoenixFirstDues.zip', configs=[{'index': 0}]) def test_istanbul(self): """Tests shapefile with multipart polygons and non-WGS84 SR. """ result = self.generic_import( 'Istanbul.zip', configs=[ {'index': 0} ] ) feature_type = self.catalog.get_resource(result.name) self.assertEqual(feature_type.projection, 'EPSG:32635') def test_gwc_handler(self): """Tests the GeoWebCache handler """ layer = self.generic_import( 'boxes_with_date.shp', configs=[ { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True } ] ) gwc = GeoWebCacheHandler(None) gs_layer = self.catalog.get_layer(layer.name) self.assertTrue(gwc.time_enabled(gs_layer)) gs_layer.fetch() payload = self.catalog.http.request(gwc.gwc_url(gs_layer)) self.assertIn('regexParameterFilter', payload[1]) self.assertEqual(int(payload[0]['status']), 200) # Don't configure time, ensure everything still works layer = self.generic_import( 'boxes_with_date_iso_date.shp', configs=[ { 'index': 0 } ] ) gs_layer = self.catalog.get_layer(layer.name) self.catalog._cache.clear() gs_layer.fetch() payload = self.catalog.http.request(gwc.gwc_url(gs_layer)) self.assertNotIn('regexParameterFilter', payload[1]) self.assertEqual(int(payload[0]['status']), 200) def test_utf8(self): """Tests utf8 characters in attributes """ path = test_file('china_provinces.shp') layer = self.generic_import(path) ogr = OGRImport(path) datastore, _ = ogr.open_target_datastore(ogr.target_store) sql = ( "select NAME_CH from {0} where NAME_PY = 'An Zhou'" .format(layer.name) ) result = datastore.ExecuteSQL(sql) feature = result.GetFeature(0) self.assertEqual(feature.GetField('name_ch'), '安州') def test_non_converted_date(self): """Test converting a field as date. """ results = self.generic_import( 'TM_WORLD_BORDERS_2005.zip', configs=[ { 'index': 0, 'start_date': 'Year', 'configureTime': True } ] ) layer = self.catalog.get_layer(results.typename) self.assertIn('time', layer.resource.metadata) self.assertEqual('year', layer.resource.metadata['time'].attribute) def test_fid_field(self): """ Regression test for preserving an FID field when target layer supports it but source does not. """ self.generic_import( 'noaa_paleoclimate.zip', configs=[ { 'index': 0 } ] ) def test_csv_with_wkb_geometry(self): """Exercise import of CSV files with multiple geometries. """ filenames = [ 'police_csv.csv', 'police_csv_nOGC.csv', 'police_csv_noLatLon.csv', 'police_csv_WKR.csv', 'police_csv_nFID.csv', 'police_csv_nOGFID.csv', 'police_csv_noWKB.csv' ] for filename in filenames: self.generic_import( filename, { 'configureTime': True, 'convert_to_date': ['date_time'], 'editable': True, 'index': 0, 'name': filename.lower(), 'permissions': { 'users':{ 'AnonymousUser': [ 'change_layer_data', 'download_resourcebase', 'view_resourcebase' ] } }, 'start_date':'date_time', } )
remoteHost="192.168.99.111" remotePort="5432" remoteDatabase="geoshape_data" remoteUser="******" remotePassword="******" remoteDBType="postgis" remoteLayerName="virginia_natural" WGS84="EPSG:4326" cat = Catalog(localURL) # Check if local workspace exists and if not create it localWSObj = cat.get_workspace (localWSName) if localWSObj is None: localWSObj = cat.create_workspace(localWSName, localWSURI) print "Workspace " + localWSName + " created" else: print "Workspace " + localWSName + " already exists" # Get list of datastores dataStoresObj = cat.get_stores() # Check if remote datastore exists on local system for dataStoreObj in dataStoresObj: if dataStoreObj.name == remoteDSName: localDSObj = dataStoreObj print "Datastore " + localDSObj.name + " already exists" if not localDSObj: localDSObj = cat.create_datastore(remoteDSName, localWSName)
class UploaderTests(TestCase): """Basic checks to make sure pages load, etc. """ def create_datastore(self, connection, catalog): """Convenience method for creating a datastore. """ settings = connection.settings_dict ds_name = settings['NAME'] params = { 'database': ds_name, 'passwd': settings['PASSWORD'], 'namespace': 'http://www.geonode.org/', 'type': 'PostGIS', 'dbtype': 'postgis', 'host': settings['HOST'], 'user': settings['USER'], 'port': settings['PORT'], 'enabled': 'True' } store = catalog.create_datastore(ds_name, workspace=self.workspace) store.connection_parameters.update(params) try: catalog.save(store) except FailedRequestError: # assuming this is because it already exists pass return catalog.get_store(ds_name) def create_user(self, username, password, **kwargs): """Convenience method for creating users. """ user, created = User.objects.get_or_create(username=username, **kwargs) if created: user.set_password(password) user.save() return user def setUp(self): self.assertTrue( os.path.exists(_TEST_FILES_DIR), 'Test could not run due to missing test data at {0!r}'.format( _TEST_FILES_DIR)) # These tests require geonode to be running on :80! self.postgis = db.connections['datastore'] self.postgis_settings = self.postgis.settings_dict self.admin_user = self.create_user('admin', 'admin', is_superuser=True) self.non_admin_user = self.create_user('non_admin', 'non_admin') self.catalog = Catalog(ogc_server_settings.internal_rest, *ogc_server_settings.credentials) if self.catalog.get_workspace('geonode') is None: self.catalog.create_workspace('geonode', 'http://www.geonode.org/') self.workspace = 'geonode' self.datastore = self.create_datastore(self.postgis, self.catalog) def tearDown(self): """Clean up geoserver. """ self.catalog.delete(self.datastore, recurse=True) def import_file(self, path, configs=None): """Imports the file. """ if configs is None: configs = [] self.assertTrue(os.path.exists(path)) # run ogr2ogr ogr = OGRImport(path) layers = ogr.handle(configuration_options=configs) return layers def generic_import(self, filename, configs=None): if configs is None: configs = [{'index': 0}] path = test_file(filename) results = self.import_file(path, configs=configs) layer_results = [] for result in results: if result[1].get('raster'): layer_path = result[0] layer_name = os.path.splitext(os.path.basename(layer_path))[0] layer = Layer.objects.get(name=layer_name) self.assertTrue(layer_path.endswith('.tif')) self.assertTrue(os.path.exists(layer_path)) gdal_layer = gdal.OpenEx(layer_path) self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff') layer_results.append(layer) else: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') if not path.endswith('zip'): self.assertGreaterEqual(layer.attributes.count(), DataSource(path)[0].num_fields) layer_results.append(layer) return layer_results[0] def generic_api_upload(self, filenames, configs=None): """Tests the import api. """ client = AdminClient() client.login_as_non_admin() # Don't accidentally iterate over given 'foo' as ['f', 'o', 'o']. self.assertNotIsInstance(filenames, str) # Upload Files outfiles = [] for filename in filenames: path = test_file(filename) with open(path) as stream: data = stream.read() upload = SimpleUploadedFile(filename, data) outfiles.append(upload) response = client.post(reverse('uploads-new-json'), { 'file': outfiles, 'json': json.dumps(configs) }, follow=True) content = json.loads(response.content) self.assertEqual(response.status_code, 200) self.assertEqual(content['id'], 1) # Configure Uploaded Files upload_id = content['id'] upload_layers = UploadLayer.objects.filter(upload_id=upload_id) for upload_layer in upload_layers: for config in configs: if config['upload_file_name'] == os.path.basename( upload_layer.name): payload = config['config'] url = '/importer-api/data-layers/{0}/configure/'.format( upload_layer.id) response = client.post(url, data=json.dumps(payload), content_type='application/json') self.assertEqual(response.status_code, 200) url = '/importer-api/data-layers/{0}/'.format( upload_layer.id) response = client.get(url, content_type='application/json') self.assertEqual(response.status_code, 200) return content def generic_raster_import(self, filename, configs=None): if configs is None: configs = [{'index': 0}] path = test_file(filename) results = self.import_file(path, configs=configs) layer_path = results[0][0] layer_name = os.path.splitext(os.path.basename(layer_path))[0] layer = Layer.objects.get(name=layer_name) self.assertTrue(layer_path.endswith('.tif')) self.assertTrue(os.path.exists(layer_path)) gdal_layer = gdal.OpenEx(layer_path) self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff') return layer def test_multi_upload(self): """Tests Uploading Multiple Files """ # Number of layers in each file upload_layer_counts = [1, 1, 1] upload = self.generic_api_upload(filenames=[ 'boxes_with_year_field.zip', 'boxes_with_date.zip', 'point_with_date.geojson' ], configs=[{ 'upload_file_name': 'boxes_with_year_field.shp', 'config': [{ 'index': 0 }] }, { 'upload_file_name': 'boxes_with_date.shp', 'config': [{ 'index': 0 }] }, { 'upload_file_name': 'point_with_date.geojson', 'config': [{ 'index': 0 }] }]) self.assertEqual(Layer.objects.count(), sum(upload_layer_counts)) self.assertEqual(9, upload['count']) def test_upload_with_slds(self): """Tests Uploading sld """ upload = self.generic_api_upload( filenames=['boxes_with_date.zip', 'boxes.sld', 'boxes1.sld'], configs=[{ 'upload_file_name': 'boxes_with_date.shp', 'config': [{ 'index': 0, 'default_style': 'boxes.sld', 'styles': ['boxes.sld', 'boxes1.sld'] }] }]) self.assertEqual(6, upload['count']) upload_id = upload['id'] uplayers = UploadLayer.objects.filter(upload=upload_id) layer_id = uplayers[0].pk upfiles_count = UploadFile.objects.filter(upload=upload_id).count() self.assertEqual(6, upfiles_count) # Warning: this assumes that Layer pks equal UploadLayer pks layer = Layer.objects.get(pk=layer_id) gslayer = self.catalog.get_layer(layer.name) default_style = gslayer.default_style # TODO: can we use public API or omit this? self.catalog._cache.clear() self.assertEqual('boxes.sld', default_style.filename) def test_upload_with_metadata(self): """Tests Uploading metadata """ upload = self.generic_api_upload(filenames=[ 'boxes_with_date.zip', 'samplemetadata.xml', ], configs=[{ 'upload_file_name': 'boxes_with_date.shp', 'config': [{ 'index': 0, 'metadata': 'samplemetadata.xml' }] }]) self.assertEqual(5, upload['count']) upload_id = upload['id'] uplayers = UploadLayer.objects.filter(upload=upload_id) layer_id = uplayers[0].pk upfiles_count = UploadFile.objects.filter(upload=upload_id).count() self.assertEqual(5, upfiles_count) layer = Layer.objects.get(pk=layer_id) self.assertEqual(layer.language, 'eng') self.assertEqual(layer.title, 'Old_Americas_LSIB_Polygons_Detailed_2013Mar') def test_geotiff_raster(self): """Exercise GeoTIFF raster import. """ layer = self.generic_raster_import('test_grid.tif', configs=[{ 'index': 0 }]) self.assertTrue(layer.name.startswith('test_grid')) def test_nitf_raster(self): """Tests NITF raster import """ layer = self.generic_raster_import('test_nitf.nitf') self.assertTrue(layer.name.startswith('test_nitf')) def test_box_with_year_field(self): """Tests the import of test_box_with_year_field. """ layer = self.generic_import('boxes_with_year_field.shp', configs=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date(self): """Tests the import of test_boxes_with_date. """ layer = self.generic_import('boxes_with_date.shp', configs=[{ 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_gpkg(self): """Tests the import of test_boxes_with_date.gpkg. """ layer = self.generic_import('boxes_with_date.gpkg', configs=[{ 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_plus_raster_gpkg_by_index(self): """ Tests the import of multilayer vector + tile geopackage using index, treating tile layers as rasters. Tile layers are now treated by default as a distinct layer type. This test forces them to still be treated as rasters and should be removed once tests for vector/tile geopackage files are in place. """ layer = self.generic_import('boxes_plus_raster.gpkg', configs=[ { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }, { 'index': 1 }, { 'index': 2 }, { 'index': 3 }, { 'index': 4 }, { 'index': 5 }, { 'index': 6, 'layer_type': 'raster' }, { 'index': 7, 'layer_type': 'raster' }, ]) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_csv(self): """Tests a CSV with WKT polygon. """ layer = self.generic_import('boxes_with_date.csv', configs=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_csv_missing_features(self): """Test csv that has some rows without geoms and uses ISO-8859 (Latin 4) encoding. """ self.generic_import('missing-features.csv', configs=[{'index': 0}]) def test_boxes_with_iso_date(self): """Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.shp', configs=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_duplicate_imports(self): """Import the same layer twice to ensure file names increment properly. """ path = test_file('boxes_with_date_iso_date.zip') ogr = OGRImport(path) layers1 = ogr.handle({'index': 0, 'name': 'test'}) layers2 = ogr.handle({'index': 0, 'name': 'test'}) self.assertEqual(layers1[0][0], 'test') self.assertEqual(layers2[0][0], 'test0') def test_launder(self): """Ensure the launder function works as expected. """ self.assertEqual(launder('tm_world_borders_simpl_0.3'), 'tm_world_borders_simpl_0_3') self.assertEqual(launder('Testing#'), 'testing_') self.assertEqual(launder(' '), '_') def test_boxes_with_date_iso_date_zip(self): """Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.zip', configs=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_dates_bc(self): """Tests the import of test_boxes_with_dates_bc. """ layer = self.generic_import('boxes_with_dates_bc.shp', configs=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_point_with_date(self): """Tests the import of point_with_date.geojson """ layer = self.generic_import('point_with_date.geojson', configs=[{ 'index': 0, 'convert_to_date': ['date'] }]) # OGR will name geojson layers 'ogrgeojson' we rename to the path basename self.assertTrue(layer.name.startswith('point_with_date')) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_end_date(self): """Tests the import of test_boxes_with_end_date. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('boxes_with_end_date.shp', configs=[{ 'index': 0, 'convert_to_date': ['date', 'enddate'], 'start_date': 'date', 'end_date': 'enddate', 'configureTime': True }]) date_attr = get_layer_attr(layer, 'date_as_date') end_date_attr = get_layer_attr(layer, 'enddate_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') self.assertEqual(end_date_attr.attribute_type, 'xsd:dateTime') configure_time(self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) def test_us_states_kml(self): """Tests the import of us_states_kml. This layer has a date and an end date field that are typed correctly. """ # TODO: Support time in kmls. layer = self.generic_import('us_states.kml', configs=[{'index': 0}]) self.assertEqual(layer.name.lower(), "us_states") def test_mojstrovka_gpx(self): """Tests the import of mojstrovka.gpx. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('mojstrovka.gpx', configs=[{ 'index': 0, 'convert_to_date': ['time'], 'configureTime': True }]) date_attr = get_layer_attr(layer, 'time_as_date') self.assertEqual(date_attr.attribute_type, u'xsd:dateTime') configure_time(self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute) def generic_time_check(self, layer, attribute=None, end_attribute=None): """Convenience method to run generic tests on time layers. """ # TODO: can we use public API or omit this? self.catalog._cache.clear() resource = self.catalog.get_resource(layer.name, store=layer.store, workspace=self.workspace) time_info = resource.metadata['time'] self.assertEqual('LIST', time_info.presentation) self.assertEqual(True, time_info.enabled) self.assertEqual(attribute, time_info.attribute) self.assertEqual(end_attribute, time_info.end_attribute) def test_us_shootings_csv(self): """Tests the import of US_Shootings.csv. """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') path = test_file('US_Shootings.csv') layer = self.generic_import(path, configs=[{ 'index': 0, 'convert_to_date': ['Date'] }]) self.assertTrue(layer.name.startswith('us_shootings')) date_field = 'date' configure_time(self.catalog.get_layer(layer.name).resource, attribute=date_field) self.generic_time_check(layer, attribute=date_field) def test_sitins(self): """Tests the import of US_Civil_Rights_Sitins0.csv """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') layer = self.generic_import('US_Civil_Rights_Sitins0.csv', configs=[{ 'index': 0, 'convert_to_date': ['Date'] }]) self.assertEqual(layer.name.lower(), 'us_civil_rights_sitins0') def get_layer_names(self, path): """Gets layer names from a data source. """ data_source = DataSource(path) return [layer.name for layer in data_source] def test_gdal_import(self): path = test_file('point_with_date.geojson') self.generic_import(path, configs=[{ 'index': 0, 'convert_to_date': ['date'] }]) def test_wfs(self): """Tests the import from a WFS Endpoint """ wfs = 'WFS:http://demo.geo-solutions.it/geoserver/tiger/wfs' ogr = OGRImport(wfs) configs = [ { 'layer_name': 'tiger:giant_polygon' }, { 'layer_name': 'tiger:poi' }, ] layers = ogr.handle(configuration_options=configs) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def test_arcgisjson(self): """Tests the import from a WFS Endpoint """ endpoint = 'http://sampleserver6.arcgisonline.com/arcgis/rest/services/Water_Network/FeatureServer/16/query'\ '?where=objectid=326&outfields=*&f=json' ogr = OGRImport(endpoint) configs = [{'index': 0}] layers = ogr.handle(configuration_options=configs) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def test_file_add_view(self): """Tests the file_add_view. """ client = AdminClient() # test login required for this view request = client.get(reverse('uploads-new')) self.assertEqual(request.status_code, 302) client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post(reverse('uploads-new'), {'file': stream}, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['request'].path, reverse('uploads-list')) self.assertEqual(len(response.context['object_list']), 1) upload = response.context['object_list'][0] self.assertEqual(upload.user.username, 'non_admin') self.assertEqual(upload.file_type, 'GeoJSON') self.assertTrue(upload.uploadlayer_set.all()) self.assertEqual(upload.state, 'UPLOADED') self.assertIsNotNone(upload.name) uploaded_file = upload.uploadfile_set.first() self.assertTrue(os.path.exists(uploaded_file.file.path)) with open(test_file('empty_file.geojson')) as stream: response = client.post(reverse('uploads-new'), {'file': stream}, follow=True) self.assertEqual(response.status_code, 200) self.assertIn('file', response.context_data['form'].errors) def test_file_add_view_as_json(self): """Tests the file_add_view. """ client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post(reverse('uploads-new-json'), {'file': stream}, follow=True) self.assertEqual(response.status_code, 200) self.assertIn('application/json', response.get('Content-Type', '')) content = json.loads(response.content) self.assertIn('state', content) self.assertIn('id', content) def test_describe_fields(self): """Tests the describe fields functionality. """ path = test_file('US_Shootings.csv') with GDALInspector(path) as inspector: layers = inspector.describe_fields() self.assertTrue(layers[0]['layer_name'], 'us_shootings') self.assertEqual([n['name'] for n in layers[0]['fields']], [ 'Date', 'Shooter', 'Killed', 'Wounded', 'Location', 'City', 'Longitude', 'Latitude' ]) self.assertEqual(layers[0]['feature_count'], 203) def test_gdal_file_type(self): """Tests the describe fields functionality. """ filenames = { 'US_Shootings.csv': {'CSV'}, 'point_with_date.geojson': {'GeoJSON'}, 'mojstrovka.gpx': {'GPX'}, 'us_states.kml': {'LIBKML', 'KML'}, 'boxes_with_year_field.shp': {'ESRI Shapefile'}, 'boxes_with_date_iso_date.zip': {'ESRI Shapefile'} } from osgeo_importer.models import NoDataSourceFound try: for filename, file_type in sorted(filenames.items()): path = test_file(filename) with GDALInspector(path) as inspector: self.assertIn(inspector.file_type(), file_type) except NoDataSourceFound: logging.exception('No data source found in: {0}'.format(path)) raise def test_configure_view(self): """Tests the configuration view. """ path = test_file('point_with_date.geojson') new_user = User.objects.create(username='******') new_user_perms = ['change_resourcebase_permissions'] client = AdminClient() client.login_as_non_admin() with open(path) as stream: response = client.post(reverse('uploads-new'), {'file': stream}, follow=True) upload = response.context['object_list'][0] payload = [{ 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True, 'permissions': { 'users': { 'test': new_user_perms, 'AnonymousUser': [ 'change_layer_data', 'download_resourcebase', 'view_resourcebase' ] } } }] response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.srid, 'EPSG:4326') self.assertEqual(first_layer.store, self.datastore.name) self.assertEqual(first_layer.storeType, 'dataStore') self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertEqual(Layer.objects.all()[0].owner.username, self.non_admin_user.username) perms = first_layer.get_all_level_info() user = User.objects.get(username=self.non_admin_user.username) # check user permissions expected_perms = [ u'publish_resourcebase', u'change_resourcebase_permissions', u'delete_resourcebase', u'change_resourcebase', u'change_resourcebase_metadata', u'download_resourcebase', u'view_resourcebase', u'change_layer_style', u'change_layer_data' ] for perm in expected_perms: self.assertIn(perm, perms['users'][user]) self.assertTrue(perms['users'][new_user]) self.assertIn('change_resourcebase_permissions', perms['users'][new_user]) self.assertIn( 'change_layer_data', perms['users'][User.objects.get(username='******')]) catalog_layer = self.catalog.get_layer(first_layer.name) self.assertIn('time', catalog_layer.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, first_layer) def test_configure_view_convert_date(self): """Tests the configure view with a dataset that needs to be converted to a date. """ client = AdminClient() client.login_as_non_admin() with open(test_file('US_Shootings.csv')) as stream: response = client.post(reverse('uploads-new'), {'file': stream}, follow=True) upload = response.context['object_list'][0] payload = [{ 'index': 0, 'convert_to_date': ['Date'], 'start_date': 'Date', 'configureTime': True, 'editable': True }] response = client.get( '/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 405) response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 400) response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.srid, 'EPSG:4326') self.assertEqual(first_layer.store, self.datastore.name) self.assertEqual(first_layer.storeType, 'dataStore') self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertTrue(first_layer.attributes.filter(attribute='date'), 'xsd:dateTime') catalog_layer = self.catalog.get_layer(first_layer.name) self.assertIn('time', catalog_layer.resource.metadata) # ensure a user who does not own the upload cannot configure an import from it. client.logout() client.login_as_admin() response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 404) def test_list_api(self): client = AdminClient() response = client.get('/importer-api/data/') self.assertEqual(response.status_code, 401) client.login_as_non_admin() response = client.get('/importer-api/data/') self.assertEqual(response.status_code, 200) admin = User.objects.get(username=self.admin_user.username) non_admin = User.objects.get(username=self.non_admin_user.username) path = test_file('US_Shootings.csv') with open(path, 'rb') as stream: uploaded_file = SimpleUploadedFile('test_data', stream.read()) admin_upload = UploadedData.objects.create(state='Admin test', user=admin) admin_upload.uploadfile_set.add( UploadFile.objects.create(file=uploaded_file)) non_admin_upload = UploadedData.objects.create( state='Non admin test', user=non_admin) non_admin_upload.uploadfile_set.add( UploadFile.objects.create(file=uploaded_file)) client.login_as_admin() response = client.get('/importer-api/data/') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 2) response = client.get('/importer-api/data/?user__username=admin') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 1) def test_layer_list_api(self): client = AdminClient() response = client.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 401) client.login_as_non_admin() response = client.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 200) def test_delete_from_non_admin_api(self): """Ensure users can delete their data. """ client = AdminClient() client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post(reverse('uploads-new'), {'file': stream}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) upload_id = UploadedData.objects.first().id response = client.delete('/importer-api/data/{0}/'.format(upload_id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def test_delete_from_admin_api(self): """Ensure that administrators can delete data that isn't theirs. """ client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post(reverse('uploads-new'), {'file': stream}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) client.logout() client.login_as_admin() upload_id = UploadedData.objects.first().id response = client.delete('/importer-api/data/{0}/'.format(upload_id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def naming_an_import(self): """Tests providing a name in the configuration options. """ client = AdminClient() client.login_as_non_admin() name = 'point-with-a-date' with open(test_file('point_with_date.geojson')) as stream: response = client.post(reverse('uploads-new'), {'file': stream}, follow=True) payload = { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'name': name, 'editable': True } response = client.post('/importer-api/data-layers/1/configure/', data=json.dumps(payload), content_type='application/json') self.assertEqual(response.status_code, 200) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.title, name.replace('-', '_')) def test_api_import(self): """Tests the import api. """ client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post(reverse('uploads-new'), {'file': stream}, follow=True) payload = { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True } self.assertIsInstance( UploadLayer.objects.first().configuration_options, dict) response = client.get('/importer-api/data-layers/1/') self.assertEqual(response.status_code, 200) response = client.post('/importer-api/data-layers/1/configure/', data=json.dumps([payload]), content_type='application/json') self.assertEqual(response.status_code, 200) self.assertIn('task', response.content) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.srid, 'EPSG:4326') self.assertEqual(first_layer.store, self.datastore.name) self.assertEqual(first_layer.storeType, 'dataStore') self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime') catalog_layer = self.catalog.get_layer(first_layer.name) self.assertIn('time', catalog_layer.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, first_layer) self.assertTrue(UploadLayer.objects.first().task_id) def test_valid_file_extensions(self): """Test the file extension validator. """ for extension in load_handler(OSGEO_IMPORTER, 'test.txt').valid_extensions: filename = 'test.{0}'.format(extension) upload = SimpleUploadedFile(filename, '') self.assertIsNone(validate_file_extension(upload)) with self.assertRaises(ValidationError): validate_file_extension(SimpleUploadedFile('test.txt', '')) def test_no_geom(self): """Test the file extension validator. """ with self.assertRaises(ValidationError): validate_inspector_can_read( SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)')) # This should pass (geom type is unknown) validate_inspector_can_read( SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)')) def test_numeric_overflow(self): """Regression test for numeric field overflows in shapefiles. # https://trac.osgeo.org/gdal/ticket/5241 """ self.generic_import('Walmart.zip', configs=[{ 'configureTime': False, 'convert_to_date': ['W1_OPENDAT'], 'editable': True, 'index': 0, 'name': 'Walmart', 'start_date': 'W1_OPENDAT' }]) def test_multipolygon_shapefile(self): """Tests shapefile with multipart polygons. """ self.generic_import('PhoenixFirstDues.zip', configs=[{'index': 0}]) def test_istanbul(self): """Tests shapefile with multipart polygons and non-WGS84 SR. """ result = self.generic_import('Istanbul.zip', configs=[{'index': 0}]) feature_type = self.catalog.get_resource(result.name) self.assertEqual(feature_type.projection, 'EPSG:32635') def test_houston_tx_annexations(self): """Tests Shapefile with originally unsupported EPSG Code. """ result = self.generic_import('HoustonTXAnnexations.zip', configs=[{ 'index': 0 }]) feature_type = self.catalog.get_resource(result.name) self.assertEqual(feature_type.projection, 'EPSG:2278') def test_gwc_handler(self): """Tests the GeoWebCache handler """ layer = self.generic_import('boxes_with_date.shp', configs=[{ 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) gwc = GeoWebCacheHandler(None) gs_layer = self.catalog.get_layer(layer.name) self.assertTrue(gwc.time_enabled(gs_layer)) gs_layer.fetch() payload = self.catalog.http.request(gwc.gwc_url(gs_layer)) self.assertIn('regexParameterFilter', payload[1]) self.assertEqual(int(payload[0]['status']), 200) # Don't configure time, ensure everything still works layer = self.generic_import('boxes_with_date_iso_date.shp', configs=[{ 'index': 0 }]) gs_layer = self.catalog.get_layer(layer.name) self.catalog._cache.clear() gs_layer.fetch() payload = self.catalog.http.request(gwc.gwc_url(gs_layer)) self.assertNotIn('regexParameterFilter', payload[1]) self.assertEqual(int(payload[0]['status']), 200) def test_utf8(self): """Tests utf8 characters in attributes """ path = test_file('china_provinces.shp') layer = self.generic_import(path) ogr = OGRImport(path) datastore, _ = ogr.open_target_datastore(ogr.target_store) sql = ("select NAME_CH from {0} where NAME_PY = 'An Zhou'".format( layer.name)) result = datastore.ExecuteSQL(sql) feature = result.GetFeature(0) self.assertEqual(feature.GetField('name_ch'), '安州') def test_non_converted_date(self): """Test converting a field as date. """ results = self.generic_import('TM_WORLD_BORDERS_2005.zip', configs=[{ 'index': 0, 'start_date': 'Year', 'configureTime': True }]) layer = self.catalog.get_layer(results.typename) self.assertIn('time', layer.resource.metadata) self.assertEqual('year', layer.resource.metadata['time'].attribute) def test_fid_field(self): """ Regression test for preserving an FID field when target layer supports it but source does not. """ self.generic_import('noaa_paleoclimate.zip', configs=[{'index': 0}]) def test_csv_with_wkb_geometry(self): """Exercise import of CSV files with multiple geometries. """ filenames = [ 'police_csv.csv', 'police_csv_nOGC.csv', 'police_csv_noLatLon.csv', 'police_csv_WKR.csv', 'police_csv_nFID.csv', 'police_csv_nOGFID.csv', 'police_csv_noWKB.csv' ] for filename in filenames: self.generic_import( filename, { 'configureTime': True, 'convert_to_date': ['date_time'], 'editable': True, 'index': 0, 'name': filename.lower(), 'permissions': { 'users': { 'AnonymousUser': [ 'change_layer_data', 'download_resourcebase', 'view_resourcebase' ] } }, 'start_date': 'date_time', })
r_set_wms_options = session.put(geoserver_url + '/services/wms/settings', data='<wms><metadata><entry key="advancedProjectionHandling">false</entry></metadata></wms>', headers=headers_xml) # Delete old workspace and create new one if cat.get_workspace(workspace): if cat.get_store(projectName, workspace): test=cat.get_store(projectName, workspace) cat.delete(cat.get_store(projectName, workspace=workspace), purge="all", recurse=True) cat.delete(cat.get_workspace(workspace), purge="all", recurse=True) for layer in cat.get_layers(): if layer.name.startswith(workspace): cat.delete(layer, recurse=True) for style in cat.get_styles(workspaces=[workspace]): cat.delete(style, recurse=True) cat.create_workspace(workspace, geoserver_url + '/' + workspace) # zip the ncFile zfile = cfg['general']['workdir'] + '/outputFiles/data.zip' logging.info('Writing Zipfile ' + zfile) output = zipfile.ZipFile(zfile, 'w') output.write(netcdfFile, projectName + '.nc', zipfile.ZIP_DEFLATED) output.close() # upload zip file (creating coveragestore and layers automatically) logging.info('Uploading ' + zfile) with open(output.filename, 'rb') as zip_file: r_create_layer = session.put(geoserver_url + '/workspaces/' + workspace + '/coveragestores/' + projectName + '/file.netcdf', data=zip_file, headers=headers_zip)
class UploaderTests(DjagnoOsgeoMixin): """ Basic checks to make sure pages load, etc. """ def create_datastore(self, connection, catalog): settings = connection.settings_dict params = {'database': settings['NAME'], 'passwd': settings['PASSWORD'], 'namespace': 'http://www.geonode.org/', 'type': 'PostGIS', 'dbtype': 'postgis', 'host': settings['HOST'], 'user': settings['USER'], 'port': settings['PORT'], 'enabled': "True"} store = catalog.create_datastore(settings['NAME'], workspace=self.workspace) store.connection_parameters.update(params) try: catalog.save(store) except FailedRequestError: # assuming this is because it already exists pass return catalog.get_store(settings['NAME']) def setUp(self): if not os.path.exists(os.path.join(os.path.split(__file__)[0], '..', 'importer-test-files')): self.skipTest('Skipping test due to missing test data.') # These tests require geonode to be running on :80! self.postgis = db.connections['datastore'] self.postgis_settings = self.postgis.settings_dict self.username, self.password = self.create_user('admin', 'admin', is_superuser=True) self.non_admin_username, self.non_admin_password = self.create_user('non_admin', 'non_admin') self.cat = Catalog(ogc_server_settings.internal_rest, *ogc_server_settings.credentials) if self.cat.get_workspace('geonode') == None: self.cat.create_workspace('geonode', 'http://www.geonode.org/') self.workspace = 'geonode' self.datastore = self.create_datastore(self.postgis, self.cat) def tearDown(self): """ Clean up geoserver. """ self.cat.delete(self.datastore, recurse=True) def generic_import(self, file, configuration_options=[{'index': 0}]): f = file filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', f) res = self.import_file(filename, configuration_options=configuration_options) layer_results=[] for result in res: if result[1].get('raster'): layerfile = result[0] layername = os.path.splitext(os.path.basename(layerfile))[0] layer = Layer.objects.get(name=layername) self.assertTrue(layerfile.endswith('.tif')) self.assertTrue(os.path.exists(layerfile)) l = gdal.OpenEx(layerfile) self.assertTrue(l.GetDriver().ShortName, 'GTiff') layer_results.append(layer) else: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') if not filename.endswith('zip'): self.assertTrue(layer.attributes.count() >= DataSource(filename)[0].num_fields) layer_results.append(layer) return layer_results[0] def generic_raster_import(self, file, configuration_options=[{'index': 0}]): f = file filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', f) res = self.import_file(filename, configuration_options=configuration_options) layerfile = res[0][0] layername = os.path.splitext(os.path.basename(layerfile))[0] layer = Layer.objects.get(name=layername) self.assertTrue(layerfile.endswith('.tif')) self.assertTrue(os.path.exists(layerfile)) l = gdal.OpenEx(layerfile) self.assertTrue(l.GetDriver().ShortName, 'GTiff') return layer def test_raster(self): """ Tests raster import """ layer = self.generic_raster_import('test_grid.tif', configuration_options=[{'index': 0}]) def test_box_with_year_field(self): """ Tests the import of test_box_with_year_field. """ layer = self.generic_import('boxes_with_year_field.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date(self): """ Tests the import of test_boxes_with_date. """ layer = self.generic_import('boxes_with_date.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_gpkg(self): """ Tests the import of test_boxes_with_date.gpkg. """ layer = self.generic_import('boxes_with_date.gpkg', configuration_options=[{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_plus_raster_gpkg_by_index(self): """ Tests the import of multilayer vector + raster geopackage using index """ layer = self.generic_import('boxes_plus_raster.gpkg', configuration_options=[{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }, {'index':1}, {'index':2}, {'index':3}, {'index':4}, {'index':5}, {'index':6}, {'index':7},]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_csv(self): """ Tests a CSV with WKT polygon. """ layer = self.generic_import('boxes_with_date.csv', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_csv_missing_features(self): """ Test csv that has some rows without geoms and uses ISO-8859 (Latin 4) encoding. """ self.generic_import('missing-features.csv', configuration_options=[{'index': 0}]) def test_boxes_with_iso_date(self): """ Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_duplicate_imports(self): """ Tests importing the same layer twice to ensure incrementing file names is properly handled. """ filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'boxes_with_date_iso_date.zip') gi = OGRImport(filename) layers1 = gi.handle({'index': 0, 'name': 'test'}) layers2 = gi.handle({'index': 0, 'name': 'test'}) self.assertEqual(layers1[0][0], 'test') self.assertEqual(layers2[0][0], 'test0') def test_launder(self): """ Ensure the launder function works as expected. """ self.assertEqual(launder('tm_world_borders_simpl_0.3'), 'tm_world_borders_simpl_0_3') self.assertEqual(launder('Testing#'), 'testing_') self.assertEqual(launder(' '), '_') def test_boxes_with_date_iso_date_zip(self): """ Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.zip', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_dates_bc(self): """ Tests the import of test_boxes_with_dates_bc. """ layer = self.generic_import('boxes_with_dates_bc.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_point_with_date(self): """ Tests the import of point_with_date.geojson """ layer = self.generic_import('point_with_date.geojson', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) # OGR will name geojson layers 'ogrgeojson' we rename to the path basename self.assertTrue(layer.name.startswith('point_with_date')) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_end_date(self): """ Tests the import of test_boxes_with_end_date. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('boxes_with_end_date.shp',configuration_options=[{'index': 0, 'convert_to_date': ['date','enddate'], 'start_date': 'date', 'end_date': 'enddate', 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] end_date_attr = filter(lambda attr: attr.attribute == 'enddate_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') self.assertEqual(end_date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) def test_us_states_kml(self): """ Tests the import of us_states_kml. This layer has a date and an end date field that are typed correctly. """ # TODO: Support time in kmls. layer = self.generic_import('us_states.kml',configuration_options=[{'index': 0}]) def test_mojstrovka_gpx(self): """ Tests the import of mojstrovka.gpx. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('mojstrovka.gpx',configuration_options=[{'index': 0, 'convert_to_date': ['time'], 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'time_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, u'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute) def generic_time_check(self, layer, attribute=None, end_attribute=None): """ Convenience method to run generic tests on time layers. """ self.cat._cache.clear() resource = self.cat.get_resource(layer.name, store=layer.store, workspace=self.workspace) timeInfo = resource.metadata['time'] self.assertEqual("LIST", timeInfo.presentation) self.assertEqual(True, timeInfo.enabled) self.assertEqual(attribute, timeInfo.attribute) self.assertEqual(end_attribute, timeInfo.end_attribute) def test_us_shootings_csv(self): """ Tests the import of US_Shootings.csv. """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') filename = 'US_Shootings.csv' f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', filename) layer = self.generic_import(filename, configuration_options=[{'index': 0, 'convert_to_date': ['Date']}]) self.assertTrue(layer.name.startswith('us_shootings')) date_field = 'date' configure_time(self.cat.get_layer(layer.name).resource, attribute=date_field) self.generic_time_check(layer, attribute=date_field) def test_sitins(self): """ Tests the import of US_Civil_Rights_Sitins0.csv """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') layer = self.generic_import("US_Civil_Rights_Sitins0.csv", configuration_options=[{'index': 0, 'convert_to_date': ['Date']}]) def get_layer_names(self, in_file): """ Gets layer names from a data source. """ ds = DataSource(in_file) return map(lambda layer: layer.name, ds) def test_gdal_import(self): filename = 'point_with_date.geojson' f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', filename) self.generic_import(filename, configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) def test_wfs(self): """ Tests the import from a WFS Endpoint """ wfs = 'WFS:http://demo.boundlessgeo.com/geoserver/wfs' gi = OGRImport(wfs) layers = gi.handle(configuration_options=[{'layer_name':'og:bugsites'}, {'layer_name':'topp:states'}]) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def test_arcgisjson(self): """ Tests the import from a WFS Endpoint """ endpoint = 'http://sampleserver3.arcgisonline.com/ArcGIS/rest/services/Hydrography/Watershed173811/FeatureServer/0/query?where=objectid+%3D+objectid&outfields=*&f=json' gi = OGRImport(endpoint) layers = gi.handle(configuration_options=[{'index':0}]) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def import_file(self, in_file, configuration_options=[]): """ Imports the file. """ self.assertTrue(os.path.exists(in_file)) # run ogr2ogr gi = OGRImport(in_file) layers = gi.handle(configuration_options=configuration_options) return layers @staticmethod def createFeatureType(catalog, datastore, name): """ Exposes a PostGIS feature type in geoserver. """ headers = {"Content-type": "application/xml"} data = "<featureType><name>{name}</name></featureType>".format(name=name) url = datastore.href.replace(".xml", '/featuretypes.xml'.format(name=name)) headers, response = catalog.http.request(url, "POST ", data, headers) return response def test_file_add_view(self): """ Tests the file_add_view. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() # test login required for this view request = c.get(reverse('uploads-new')) self.assertEqual(request.status_code, 302) c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['request'].path, reverse('uploads-list')) self.assertTrue(len(response.context['object_list']) == 1) upload = response.context['object_list'][0] self.assertEqual(upload.user.username, 'non_admin') self.assertEqual(upload.file_type, 'GeoJSON') self.assertTrue(upload.uploadlayer_set.all()) self.assertEqual(upload.state, 'UPLOADED') self.assertIsNotNone(upload.name) uploaded_file = upload.uploadfile_set.first() self.assertTrue(os.path.exists(uploaded_file.file.path)) f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'empty_file.geojson') with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertIn('file', response.context_data['form'].errors) def test_file_add_view_as_json(self): """ Tests the file_add_view. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new-json'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertIn('application/json', response.get('Content-Type', '')) content = json.loads(response.content) self.assertIn('state', content) self.assertIn('id', content) def test_describe_fields(self): """ Tests the describe fields functionality. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv') with GDALInspector(f) as f: layers = f.describe_fields() self.assertTrue(layers[0]['layer_name'], 'us_shootings') self.assertEqual([n['name'] for n in layers[0]['fields']], ['Date', 'Shooter', 'Killed', 'Wounded', 'Location', 'City', 'Longitude', 'Latitude']) self.assertEqual(layers[0]['feature_count'], 203) def test_gdal_file_type(self): """ Tests the describe fields functionality. """ files = [ (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv'), 'CSV'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson'), 'GeoJSON'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'mojstrovka.gpx'), 'GPX'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'us_states.kml'), 'KML'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'boxes_with_year_field.shp'), 'ESRI Shapefile'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'boxes_with_date_iso_date.zip'), 'ESRI Shapefile'), ] from .models import NoDataSourceFound try: for path, file_type in files: with GDALInspector(path) as f: self.assertEqual(f.file_type(), file_type) except NoDataSourceFound as e: print 'No data source found in: {0}'.format(path) raise e def test_configure_view(self): """ Tests the configuration view. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') new_user = User.objects.create(username='******') new_user_perms = ['change_resourcebase_permissions'] c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True, 'permissions': {'users': {'test': new_user_perms, 'AnonymousUser': ["change_layer_data", "download_resourcebase", "view_resourcebase"]}}}] response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertEqual(Layer.objects.all()[0].owner.username, self.non_admin_username) perms = layer.get_all_level_info() user = User.objects.get(username=self.non_admin_username) # check user permissions for perm in [u'publish_resourcebase', u'change_resourcebase_permissions', u'delete_resourcebase', u'change_resourcebase', u'change_resourcebase_metadata', u'download_resourcebase', u'view_resourcebase', u'change_layer_style', u'change_layer_data']: self.assertIn(perm, perms['users'][user]) self.assertTrue(perms['users'][new_user]) self.assertIn('change_resourcebase_permissions', perms['users'][new_user]) self.assertIn("change_layer_data", perms['users'][User.objects.get(username='******')]) lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, layer) def test_configure_view_convert_date(self): """ Tests the configure view with a dataset that needs to be converted to a date. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{'index': 0, 'convert_to_date': ['Date'], 'start_date': 'Date', 'configureTime': True, 'editable': True}] response = c.get('/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 405) response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 400) response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertTrue(layer.attributes.filter(attribute='date'), 'xsd:dateTime') lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) # ensure a user who does not own the upload cannot configure an import from it. c.logout() c.login_as_admin() response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 404) def test_list_api(self): c = AdminClient() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 401) c.login_as_non_admin() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 200) admin = User.objects.get(username=self.username) non_admin = User.objects.get(username=self.non_admin_username) from osgeo_importer.models import UploadFile f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv') with open(f, 'rb') as f: uploaded_file = SimpleUploadedFile('test_data', f.read()) admin_upload = UploadedData.objects.create(state='Admin test', user=admin) admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file)) non_admin_upload = UploadedData.objects.create(state='Non admin test', user=non_admin) non_admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file)) c.login_as_admin() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 2) response = c.get('/importer-api/data/?user__username=admin') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 1) def test_layer_list_api(self): c = AdminClient() response = c.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 401) c.login_as_non_admin() response = c.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 200) def test_delete_from_non_admin_api(self): """ Ensure users can delete their data. """ c = AdminClient() f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) id = UploadedData.objects.first().id response = c.delete('/importer-api/data/{0}/'.format(id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def test_delete_from_admin_api(self): """ Ensure that administrators can delete data that isn't theirs. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) c.logout() c.login_as_admin() id = UploadedData.objects.first().id response = c.delete('/importer-api/data/{0}/'.format(id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def naming_an_import(self): """ Tests providing a name in the configuration options. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() name = 'point-with-a-date' with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) payload = {'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'name': name, 'editable': True} response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps(payload), content_type='application/json') layer = Layer.objects.all()[0] self.assertEqual(layer.title, name.replace('-', '_')) def test_api_import(self): """ Tests the import api. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) payload = {'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True} self.assertTrue(isinstance(UploadLayer.objects.first().configuration_options, dict)) response = c.get('/importer-api/data-layers/1/') self.assertEqual(response.status_code, 200) response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps([payload]), content_type='application/json') self.assertEqual(response.status_code, 200) self.assertTrue('task' in response.content) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, layer) self.assertTrue(UploadLayer.objects.first().task_id) def test_valid_file_extensions(self): """ Test the file extension validator. """ for extension in load_handler(OSGEO_IMPORTER, 'test.txt').valid_extensions: self.assertIsNone(validate_file_extension(SimpleUploadedFile('test.{0}'.format(extension), ''))) with self.assertRaises(ValidationError): validate_file_extension(SimpleUploadedFile('test.txt', '')) def test_no_geom(self): """ Test the file extension validator. """ with self.assertRaises(ValidationError): validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)')) # This should pass (geom type is unknown) validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)')) def test_numeric_overflow(self): """ Regression test for numeric field overflows in shapefiles. # https://trac.osgeo.org/gdal/ticket/5241 """ self.generic_import('Walmart.zip', configuration_options=[{"configureTime":False,"convert_to_date":["W1_OPENDAT"],"editable":True,"index":0,"name":"Walmart","start_date":"W1_OPENDAT"}]) def test_multipolygon_shapefile(self): """ Tests shapefile with multipart polygons. """ self.generic_import('PhoenixFirstDues.zip', configuration_options=[{'index': 0}]) def test_non_4326_SR(self): """ Tests shapefile with multipart polygons. """ res = self.generic_import('Istanbul.zip', configuration_options=[{'index': 0}]) featuretype = self.cat.get_resource(res.name) self.assertEqual(featuretype.projection, 'EPSG:32635') def test_gwc_handler(self): """ Tests the GeoWebCache handler """ layer = self.generic_import('boxes_with_date.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) gwc = GeoWebCacheHandler(None) gs_layer = self.cat.get_layer(layer.name) self.assertTrue(gwc.time_enabled(gs_layer)) gs_layer.fetch() payload = self.cat.http.request(gwc.gwc_url(gs_layer)) self.assertTrue('regexParameterFilter' in payload[1]) self.assertEqual(int(payload[0]['status']), 200) # Don't configure time, ensure everything still works layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{'index': 0}]) gs_layer = self.cat.get_layer(layer.name) self.cat._cache.clear() gs_layer.fetch() payload = self.cat.http.request(gwc.gwc_url(gs_layer)) self.assertFalse('regexParameterFilter' in payload[1]) self.assertEqual(int(payload[0]['status']), 200) def test_utf8(self): """ Tests utf8 characters in attributes """ filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'china_provinces.shp') layer = self.generic_import('china_provinces.shp') gi = OGRImport(filename) ds, insp = gi.open_target_datastore(gi.target_store) sql = str("select NAME_CH from %s where NAME_PY = 'An Zhou'" % (layer.name)) res = ds.ExecuteSQL(sql) feat = res.GetFeature(0) self.assertEqual(feat.GetField('name_ch'), "安州") def test_non_converted_date(self): """ Test converting a field as date. """ results = self.generic_import('TM_WORLD_BORDERS_2005.zip', configuration_options=[{'index': 0, 'start_date': 'Year', 'configureTime': True}]) layer = self.cat.get_layer(results.typename) self.assertTrue('time' in layer.resource.metadata) self.assertEqual('year', layer.resource.metadata['time'].attribute) def test_fid_field(self): """ Regression test for preserving an FID field when target layer supports it but source does not. """ self.generic_import('noaa_paleoclimate.zip', configuration_options=[{'index': 0}]) def test_csv_with_wkb_geometry(self): """ Tests problems with the CSV files with multiple geometries. """ files = ['police_csv.csv', 'police_csv_nOGC.csv', 'police_csv_noLatLon.csv', 'police_csv_WKR.csv', 'police_csv_nFID.csv', 'police_csv_nOGFID.csv', 'police_csv_noWKB.csv'] for i in files: self.generic_import(i, {"configureTime":True,"convert_to_date":["date_time"],"editable":True,"index":0,"name":i.lower(),"permissions":{"users":{"AnonymousUser":["change_layer_data","download_resourcebase","view_resourcebase"]}},"start_date":"date_time",})
class ArchiveAndCatalogueI(geocloud.ArchiveAndCatalogue): def __init__(self,com): if not com: raise RuntimeError("Not communicator") self.com=com q = com.stringToProxy('IceGrid/Query') self.query = IceGrid.QueryPrx.checkedCast(q) if not self.query: raise RuntimeError("Invalid proxy") self.broker=geocloud.BrokerPrx.checkedCast(com.stringToProxy("broker")) self.sem = Ice.threading.RLock() #Lock for managing the data structures geoserver_path = "http://localhost:80/geoserver/rest" self.catalog = Catalog(geoserver_path) self.workspace=None def setBroker(self,broker,current=None): self.broker =broker def log(self,log): if self.broker: self.broker.begin_appendLog("<ArchiveAndCatalogue> "+str(log)) else: print log def createScenario(self,scenario,current=None): self.sem.acquire() try: if self.catalog ==None: self.log("Catalog") raise geocloud.CreationScenarioException() try: self.workspace = self.catalog.create_workspace(scenario,scenario) self.workspace.enabled =True self.log("Created Scenario %s"%(scenario)) except Exception: print ("workspace does not created") self.log("Workspace does not created") except FailedRequestError: self.log("Request failed") raise geocloud.CreationScenarioException(); finally: self.sem.release() def catalogue(self,path,store,scenario,current=None): if self.workspace ==None: raise geocloud.CataloguingException() else: try: if self.catalog==None: raise geocloud.CataloguingException() # store = path.split('/')[-1] self.sem.acquire() cv = self.catalog.create_coveragestore(store,path,self.workspace) self.log("%s Catalogued!"%(path)) except ConflictingDataError as e: self.log(e) except UploadError as e: self.log(e) except FailedRequestError as e: None except Exception as e: self.log(e) finally: self.sem.release() def deleteScenario(self,scenario,current=None): self.sem.acquire() try: for layer in self.catalog.get_layers(): self.catalog.delete(layer) for coverage in self.catalog.get_stores(): for resource in coverage.get_resources(): catalog.delete(resource) catalog.delete(coverage) for vk in catalog.get_workspaces(): catalog.delete(wk) except Exception: self.log("Exception while cleanning scenario")
if __name__ == "__main__": if (len(sys.argv) != 4): print "Error with arguments. Must enter almost three" sys.exit(-1) print sys.argv[1] name_file = sys.argv[1].split("/")[-1] scenario = sys.argv[2] nameStore = sys.argv[3] cat = Catalog(geoserver_path) #pdb.set_trace() try: print name_file, sys.argv[1] vk = cat.get_workspace(scenario) if vk == None: vk = cat.create_workspace(scenario, scenario) vk.enabled = True try: cv = cat.create_coveragestore(nameStore, sys.argv[1], vk) inf = CoverageStore(cat, vk, scenario) inf.fetch() cat.save(inf) f.write("Processed %s" % (name_file)) except Exception: print "Catalogued %s!" % (name_file) print nameStore, name_file, scenario comman = 'curl -i --data "layerName=%s&coverageStore=%s&imageurl=ftp://131.254.204.143:21/../../usr/share/tomcat7/apache-tomcat-7.0.53/webapps/geoserver/data/data/%s/%s/%s.geotiff" http://172.18.242.41:8043/IDV' % ( name_file, nameStore, scenario, nameStore, nameStore) print comman print "Sending to IDV module!" #os.system(comman)
class GeoGigUploaderBase(ImportHelper): def __init__(self, *args, **kwargs): super(GeoGigUploaderBase, self).__init__(*args, **kwargs) setUpModule() # this isn't available when being used in other module def setUp(self): self.admin_user = self.create_user('admin', 'admin', is_superuser=True) self.non_admin_user = self.create_user('non_admin', 'non_admin') self.catalog = Catalog(ogc_server_settings.internal_rest, *ogc_server_settings.credentials) if self.catalog.get_workspace('geonode') is None: self.catalog.create_workspace('geonode', 'http://www.geonode.org/') self.workspace = 'geonode' self.datastoreNames = [] def tearDown(self): """Clean up geoserver/geogig catalog. """ # delete stores (will cascade to delete layers) for store_name in self.datastoreNames: self.catalog.delete(self.catalog.get_store(store_name), recurse=True) # delete repository reference in geoserver for store_name in self.datastoreNames: self.remove_geogig_repo(store_name) # geoserver can leave connections open - HACK HACK HACK self.free_geogig_connections() # HACK HACK HACK -- sometimes connections from geoserver to geogig are left open. This kills the postgresql backend! # this is a major hammer. Once geoserver/geogig are better at cleaning up, remove this. def free_geogig_connections(self): with db.connections["geogig"].cursor() as c: c.execute( "select pg_terminate_backend(pid) from pg_stat_activity where application_name = 'PostgreSQL JDBC Driver' or application_name='geogig'" ) # aggressive delete of the repo (mostly cleans up after itself) # call the geogig rest API DELETE def remove_geogig_repo(self, ref_name): username = ogc_server_settings.credentials.username password = ogc_server_settings.credentials.password url = ogc_server_settings.rest http = httplib2.Http(disable_ssl_certificate_validation=False) http.add_credentials(username, password) netloc = urlparse(url).netloc http.authorizations.append( httplib2.BasicAuthentication((username, password), netloc, url, {}, None, None, http)) rest_url = ogc_server_settings.LOCATION + \ "geogig/repos/" + ref_name + "/delete.json" resp, content = http.request(rest_url, 'GET') response = json.loads(content) token = response["response"]["token"] rest_url = ogc_server_settings.LOCATION + \ "geogig/repos/" + ref_name + "?token=" + token resp, content = http.request(rest_url, 'DELETE') # convenience method to load in the test dataset # return a (geonode) layer # the layer will be in Geoserver and Geonode # self.catalog.get_layer(layer.name) -- to get the Geoserver Layer def fully_import_file(self, path, fname, start_time_column, end_time_column=None): # setup time if end_time_column is None: time_config = { 'convert_to_date': [start_time_column], 'start_date': start_time_column, 'configureTime': True } else: time_config = { 'convert_to_date': [start_time_column, end_time_column], 'start_date': start_time_column, 'end_date': end_time_column, 'configureTime': True } name = os.path.splitext(fname)[0] + "_" + str(uuid.uuid1())[:8] self.datastoreNames.append(name) # remember for future deletion full_fname = os.path.join(path, fname) configs = self.prepare_file_for_import(full_fname) configs[0].update({'name': name}) configs[0].update({'layer_name': name}) configs[0].update(time_config) # configure the datastore/repo configs[0]['geoserver_store'] = {} configs[0]['geoserver_store']['type'] = 'geogig' configs[0]['geoserver_store']['name'] = name configs[0]['geoserver_store']['create'] = 'true' configs[0]['geoserver_store']['branch'] = 'master' configs[0]['geoserver_store'][ 'geogig_repository'] = "geoserver://" + name result = self.generic_import(fname, path=path, configs=configs) return result def import_file(self, path, configs=None): """Imports the file. """ if configs is None: configs = [] self.assertTrue(os.path.exists(path), path) # run ogr2ogr ogr = OGRImport(path) layers = ogr.handle(configuration_options=configs) return layers def generic_import(self, filename, path, configs=None): if configs is None: configs = [{'index': 0}] path = os.path.join(path, filename) results = self.import_file(path, configs=configs) layer_results = [] for result in results: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertTrue(layer.store in self.datastoreNames) self.assertEqual(layer.storeType, 'dataStore') if not path.endswith('zip'): self.assertGreaterEqual(layer.attributes.count(), DataSource(path)[0].num_fields) layer_results.append(layer) return layer_results[0] def prepare_file_for_import(self, filepath): """ Prepares the file path provided for import; performs some housekeeping, uploads & configures the file. Returns a list of dicts of the form {'index': <layer_index>, 'upload_layer_id': <upload_layer_id>} these may be used as configuration options for importing all of the layers in the file. """ # Make a copy of the test file, as it's removed in configure_upload() filename = os.path.basename(filepath) tmppath = os.path.join('/tmp', filename) shutil.copy(filepath, tmppath) # upload & configure_upload expect closed file objects # This is heritage from originally being closely tied to a view passing request.Files of = open(tmppath, 'rb') of.close() files = [of] uploaded_data = self.upload(files, self.admin_user) self.configure_upload(uploaded_data, files) configs = [{ 'index': l.index, 'upload_layer_id': l.id } for l in uploaded_data.uploadlayer_set.all()] return configs def create_user(self, username, password, **kwargs): """Convenience method for creating users. """ user, created = User.objects.get_or_create(username=username, **kwargs) if created: user.set_password(password) user.save() return user
def _register_cascaded_service(url, type, name, username, password, wms=None, owner=None, parent=None): """ Register a service as cascading WMS """ try: service = Service.objects.get(base_url=url) return_dict = {} return_dict["service_id"] = service.pk return_dict["msg"] = "This is an existing Service" return HttpResponse(json.dumps(return_dict), mimetype="application/json", status=200) except: # TODO: Handle this error properly pass if wms is None: wms = WebMapService(url) # TODO: Make sure we are parsing all service level metadata # TODO: Handle for setting ServiceProfiletRole service = Service.objects.create( base_url=url, type=type, method="C", name=name, version=wms.identification.version, title=wms.identification.title, abstract=wms.identification.abstract, online_resource=wms.provider.url, owner=owner, parent=parent, ) service.keywords = ",".join(wms.identification.keywords) service.save() service.set_default_permissions() if type in ["WMS", "OWS"]: # Register the Service with GeoServer to be cascaded cat = Catalog(settings.OGC_SERVER["default"]["LOCATION"] + "rest", _user, _password) cascade_ws = cat.get_workspace(name) if cascade_ws is None: cascade_ws = cat.create_workspace(name, "http://geonode.org/" + name) # TODO: Make sure there isn't an existing store with that name, and # deal with it if there is try: cascade_store = cat.get_store(name, cascade_ws) except: cascade_store = cat.create_wmsstore(name, cascade_ws, username, password) cascade_store.capabilitiesURL = url cascade_store.type = "WMS" cat.save(cascade_store) available_resources = cascade_store.get_resources(available=True) elif type == "WFS": # Register the Service with GeoServer to be cascaded cat = Catalog(settings.OGC_SERVER["default"]["LOCATION"] + "rest", _user, _password) # Can we always assume that it is geonode? cascade_ws = cat.get_workspace(settings.CASCADE_WORKSPACE) if cascade_ws is None: cascade_ws = cat.create_workspace(settings.CASCADE_WORKSPACE, "http://geonode.org/cascade") try: wfs_ds = cat.get_store(name, cascade_ws) except: wfs_ds = cat.create_datastore(name, cascade_ws) connection_params = { "WFSDataStoreFactory:MAXFEATURES": "0", "WFSDataStoreFactory:TRY_GZIP": "true", "WFSDataStoreFactory:PROTOCOL": "false", "WFSDataStoreFactory:LENIENT": "true", "WFSDataStoreFactory:TIMEOUT": "3000", "WFSDataStoreFactory:BUFFER_SIZE": "10", "WFSDataStoreFactory:ENCODING": "UTF-8", "WFSDataStoreFactory:WFS_STRATEGY": "nonstrict", "WFSDataStoreFactory:GET_CAPABILITIES_URL": url, } if username and password: connection_params["WFSDataStoreFactory:USERNAME"] = username connection_params["WFSDataStoreFactory:PASSWORD"] = password wfs_ds.connection_parameters = connection_params cat.save(wfs_ds) available_resources = wfs_ds.get_resources(available=True) # Save the Service record service, created = Service.objects.get_or_create(type=type, method="C", base_url=url, name=name, owner=owner) service.save() service.set_default_permissions() elif type == "WCS": return HttpResponse("Not Implemented (Yet)", status=501) else: return HttpResponse( "Invalid Method / Type combo: " + "Only Cascaded WMS, WFS and WCS supported", mimetype="text/plain", status=400, ) message = "Service %s registered" % service.name return_dict = [ { "status": "ok", "msg": message, "service_id": service.pk, "service_name": service.name, "service_title": service.title, "available_layers": available_resources, } ] if settings.USE_QUEUE: # Create a layer import job WebServiceHarvestLayersJob.objects.get_or_create(service=service) else: _register_cascaded_layers(service) return HttpResponse(json.dumps(return_dict), mimetype="application/json", status=200)
if __name__ == "__main__": if (len(sys.argv) != 4): print "Error with arguments. Must enter almost three" sys.exit(-1) print sys.argv[1] name_file=sys.argv[1].split("/")[-1] scenario=sys.argv[2] nameStore=sys.argv[3] cat = Catalog(geoserver_path) #pdb.set_trace() try: print name_file,sys.argv[1] vk = cat.get_workspace(scenario) if vk ==None: vk=cat.create_workspace(scenario,scenario) vk.enabled=True try: cv = cat.create_coveragestore(nameStore,sys.argv[1],vk) inf=CoverageStore(cat,vk,scenario) inf.fetch() cat.save(inf) f.write("Processed %s"%(name_file)) except Exception: print "Catalogued %s!"%(name_file) print nameStore,name_file,scenario comman ='curl -i --data "layerName=%s&coverageStore=%s&imageurl=ftp://131.254.204.143:21/../../usr/share/tomcat7/apache-tomcat-7.0.53/webapps/geoserver/data/data/%s/%s/%s.geotiff" http://172.18.242.41:8043/IDV'%(name_file,nameStore,scenario,nameStore,nameStore) print comman print "Sending to IDV module!" #os.system(comman)
class ModifyingTests(unittest.TestCase): def setUp(self): self.cat = Catalog("http://localhost:8080/geoserver/rest") def testFeatureTypeSave(self): # test saving round trip rs = self.cat.get_resource("bugsites") old_abstract = rs.abstract new_abstract = "Not the original abstract" enabled = rs.enabled # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(new_abstract, rs.abstract) self.assertEqual(enabled, rs.enabled) # Change keywords on server rs.keywords = ["bugsites", "gsconfig"] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(["bugsites", "gsconfig"], rs.keywords) self.assertEqual(enabled, rs.enabled) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual([ ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata") ], rs.metadata_links) self.assertEqual(enabled, rs.enabled) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(old_abstract, rs.abstract) def testDataStoreCreate(self): ds = self.cat.create_datastore("vector_gsconfig") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) def testPublishFeatureType(self): # Use the other test and store creation to load vector data into a database # @todo maybe load directly to database? try: self.testDataStoreCreateAndThenAlsoImportData() except FailedRequestError: pass try: lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. self.cat.delete(lyr) self.cat.delete(lyr.resource) ds = self.cat.get_store("gsconfig_import_test") # make sure it's gone self.assert_(self.cat.get_layer('import') is None) self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326") # and now it's not self.assert_(self.cat.get_layer('import') is not None) finally: # tear stuff down to allow the other test to pass if we run first ds = self.cat.get_store("gsconfig_import_test") lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. self.cat.delete(lyr) self.cat.delete(lyr.resource) self.cat.delete(ds) def testDataStoreModify(self): ds = self.cat.get_store("sf") self.assertFalse("foo" in ds.connection_parameters) ds.connection_parameters = ds.connection_parameters ds.connection_parameters["foo"] = "bar" orig_ws = ds.workspace.name self.cat.save(ds) ds = self.cat.get_store("sf") self.assertTrue("foo" in ds.connection_parameters) self.assertEqual("bar", ds.connection_parameters["foo"]) self.assertEqual(orig_ws, ds.workspace.name) @drop_table('import') def testDataStoreCreateAndThenAlsoImportData(self): ds = self.cat.create_datastore("gsconfig_import_test") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) ds = self.cat.get_store("gsconfig_import_test") self.cat.add_data_to_store( ds, "import", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) def testCoverageStoreCreate(self): ds = self.cat.create_coveragestore2("coverage_gsconfig") ds.data_url = "file:data/mytiff.tiff" self.cat.save(ds) def testCoverageStoreModify(self): cs = self.cat.get_store("sfdem") self.assertEqual("GeoTIFF", cs.type) cs.type = "WorldImage" self.cat.save(cs) cs = self.cat.get_store("sfdem") self.assertEqual("WorldImage", cs.type) # not sure about order of test runs here, but it might cause problems # for other tests if this layer is misconfigured cs.type = "GeoTIFF" self.cat.save(cs) def testCoverageSave(self): # test saving round trip rs = self.cat.get_resource("Arc_Sample") old_abstract = rs.abstract new_abstract = "Not the original abstract" # # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(new_abstract, rs.abstract) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(old_abstract, rs.abstract) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual([ ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata") ], rs.metadata_links) self.assertEqual(enabled, rs.enabled) srs_before = set(['EPSG:4326']) srs_after = set(['EPSG:4326', 'EPSG:3785']) formats = set( ['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF']) formats_after = set(["PNG", "GIF", "TIFF"]) # set and save request_srs_list self.assertEquals(set(rs.request_srs_list), srs_before, str(rs.request_srs_list)) rs.request_srs_list = rs.request_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.request_srs_list), srs_after, str(rs.request_srs_list)) # set and save response_srs_list self.assertEquals(set(rs.response_srs_list), srs_before, str(rs.response_srs_list)) rs.response_srs_list = rs.response_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.response_srs_list), srs_after, str(rs.response_srs_list)) # set and save supported_formats self.assertEquals(set(rs.supported_formats), formats, str(rs.supported_formats)) rs.supported_formats = ["PNG", "GIF", "TIFF"] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.supported_formats), formats_after, str(rs.supported_formats)) def testWmsStoreCreate(self): ws = self.cat.create_wmsstore("wmsstore_gsconfig") ws.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities" ws.type = "WMS" self.cat.save(ws) def testWmsLayer(self): self.cat.create_workspace("wmstest", "http://example.com/wmstest") wmstest = self.cat.get_workspace("wmstest") wmsstore = self.cat.create_wmsstore("wmsstore", wmstest) wmsstore.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities" wmsstore.type = "WMS" self.cat.save(wmsstore) wmsstore = self.cat.get_store("wmsstore") self.assertEqual(1, len(self.cat.get_stores(wmstest))) available_layers = wmsstore.get_resources(available=True) for layer in available_layers: # sanitize the layer name - validation will fail on newer geoservers name = layer.replace(':', '_') new_layer = self.cat.create_wmslayer(wmstest, wmsstore, name, nativeName=layer) added_layers = wmsstore.get_resources() self.assertEqual(len(available_layers), len(added_layers)) changed_layer = added_layers[0] self.assertEqual(True, changed_layer.advertised) self.assertEqual(True, changed_layer.enabled) changed_layer.advertised = False changed_layer.enabled = False self.cat.save(changed_layer) self.cat._cache.clear() changed_layer = wmsstore.get_resources()[0] changed_layer.fetch() self.assertEqual(False, changed_layer.advertised) self.assertEqual(False, changed_layer.enabled) def testFeatureTypeCreate(self): shapefile_plus_sidecars = shapefile_and_friends("test/data/states") expected = { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' } self.assertEqual(len(expected), len(shapefile_plus_sidecars)) for k, v in expected.iteritems(): self.assertEqual(v, shapefile_plus_sidecars[k]) sf = self.cat.get_workspace("sf") self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) self.assert_( self.cat.get_resource("states_test", workspace=sf) is not None) self.assertRaises( ConflictingDataError, lambda: self.cat.create_featurestore( "states_test", shapefile_plus_sidecars, sf)) self.assertRaises( UploadError, lambda: self.cat.create_coveragestore( "states_raster_test", shapefile_plus_sidecars, sf)) bogus_shp = { 'shp': 'test/data/Pk50095.tif', 'shx': 'test/data/Pk50095.tif', 'dbf': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } self.assertRaises( UploadError, lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf)) lyr = self.cat.get_layer("states_test") self.cat.delete(lyr) self.assert_(self.cat.get_layer("states_test") is None) def testCoverageCreate(self): tiffdata = { 'tiff': 'test/data/Pk50095.tif', 'tfw': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } sf = self.cat.get_workspace("sf") # TODO: Uploading WorldImage file no longer works??? # ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf) # self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None) # self.assertRaises( # ConflictingDataError, # lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf) # ) self.assertRaises( UploadError, lambda: self.cat.create_featurestore( "Pk50095_vector", tiffdata, sf)) bogus_tiff = { 'tiff': 'test/data/states.shp', 'tfw': 'test/data/states.shx', 'prj': 'test/data/states.prj' } self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster", bogus_tiff)) def testLayerSave(self): # test saving round trip lyr = self.cat.get_layer("states") old_attribution = lyr.attribution new_attribution = "Not the original attribution" # change attribution on server lyr.attribution = new_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(new_attribution, lyr.attribution) # Restore attribution lyr.attribution = old_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(old_attribution, lyr.attribution) self.assertEqual(lyr.default_style.name, "population") old_default_style = lyr.default_style lyr.default_style = (s for s in lyr.styles if s.name == "pophatch").next() lyr.styles = [old_default_style] self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(lyr.default_style.name, "pophatch") self.assertEqual([s.name for s in lyr.styles], ["population"]) def testStyles(self): # upload new style, verify existence self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Fred", fred.sld_title) # replace style, verify changes self.cat.create_style("fred", open("test/ted.sld").read(), overwrite=True) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Ted", fred.sld_title) # delete style, verify non-existence self.cat.delete(fred, purge=True) self.assert_(self.cat.get_style("fred") is None) # attempt creating new style self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assertEqual("Fred", fred.sld_title) # verify it can be found via URL and check the name f = self.cat.get_style_by_url(fred.href) self.assert_(f is not None) self.assertEqual(f.name, fred.name) def testWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") jed = self.cat.get_style("jed", workspace="blarny") self.assert_(jed is None) jed = self.cat.get_style("jed", workspace="topp") self.assert_(jed is not None) self.assertEqual("Fred", jed.sld_title) jed = self.cat.get_style("topp:jed") self.assert_(jed is not None) self.assertEqual("Fred", jed.sld_title) # replace style, verify changes self.cat.create_style("jed", open("test/ted.sld").read(), overwrite=True, workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assert_(jed is not None) self.assertEqual("Ted", jed.sld_title) # delete style, verify non-existence self.cat.delete(jed, purge=True) self.assert_(self.cat.get_style("jed", workspace="topp") is None) # attempt creating new style self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assertEqual("Fred", jed.sld_title) # verify it can be found via URL and check the full name f = self.cat.get_style_by_url(jed.href) self.assert_(f is not None) self.assertEqual(f.fqn, jed.fqn) def testLayerWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("ned", open("test/fred.sld").read(), overwrite=True, workspace="topp") self.cat.create_style("zed", open("test/ted.sld").read(), overwrite=True, workspace="topp") ned = self.cat.get_style("ned", workspace="topp") zed = self.cat.get_style("zed", workspace="topp") self.assert_(ned is not None) self.assert_(zed is not None) lyr = self.cat.get_layer("states") lyr.default_style = ned lyr.styles = [zed] self.cat.save(lyr) self.assertEqual("topp:ned", lyr.default_style) self.assertEqual([zed], lyr.styles) lyr.refresh() self.assertEqual("topp:ned", lyr.default_style.fqn) self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles]) def testWorkspaceCreate(self): ws = self.cat.get_workspace("acme") self.assertEqual(None, ws) self.cat.create_workspace("acme", "http://example.com/acme") ws = self.cat.get_workspace("acme") self.assertEqual("acme", ws.name) def testWorkspaceDelete(self): self.cat.create_workspace("foo", "http://example.com/foo") ws = self.cat.get_workspace("foo") self.cat.delete(ws) ws = self.cat.get_workspace("foo") self.assert_(ws is None) def testWorkspaceDefault(self): # save orig orig = self.cat.get_default_workspace() neu = self.cat.create_workspace("neu", "http://example.com/neu") try: # make sure setting it works self.cat.set_default_workspace("neu") ws = self.cat.get_default_workspace() self.assertEqual('neu', ws.name) finally: # cleanup and reset to the way things were self.cat.delete(neu) self.cat.set_default_workspace(orig.name) ws = self.cat.get_default_workspace() self.assertEqual(orig.name, ws.name) def testFeatureTypeDelete(self): pass def testCoverageDelete(self): pass def testDataStoreDelete(self): states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) states.enabled = False self.assert_(states.enabled == False) self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == False) states.enabled = True self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) def testLayerGroupSave(self): tas = self.cat.get_layergroup("tasmania") self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities' ], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) tas.layers = tas.layers[:-1] tas.styles = tas.styles[:-1] self.cat.save(tas) # this verifies the local state self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads' ], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) # force a refresh to check the remote state tas.refresh() self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads' ], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) def testTimeDimension(self): sf = self.cat.get_workspace("sf") files = shapefile_and_friends( os.path.join(gisdata.GOOD_DATA, "time", "boxes_with_end_date")) self.cat.create_featurestore("boxes_with_end_date", files, sf) get_resource = lambda: self.cat._cache.clear() or self.cat.get_layer( 'boxes_with_end_date').resource # configure time as LIST resource = get_resource() timeInfo = DimensionInfo("time", "true", "LIST", None, "ISO8601", None, attribute="date") resource.metadata = {'time': timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual("LIST", timeInfo.presentation) self.assertEqual(True, timeInfo.enabled) self.assertEqual("date", timeInfo.attribute) self.assertEqual("ISO8601", timeInfo.units) # disable time dimension timeInfo = resource.metadata['time'] timeInfo.enabled = False # since this is an xml property, it won't get written unless we modify it resource.metadata = {'time': timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual(False, timeInfo.enabled) # configure with interval, end_attribute and enable again timeInfo.enabled = True timeInfo.presentation = 'DISCRETE_INTERVAL' timeInfo.resolution = '3 days' timeInfo.end_attribute = 'enddate' resource.metadata = {'time': timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual(True, timeInfo.enabled) self.assertEqual('DISCRETE_INTERVAL', timeInfo.presentation) self.assertEqual('3 days', timeInfo.resolution_str()) self.assertEqual('enddate', timeInfo.end_attribute) def testImageMosaic(self): # testing the mosaic creation name = 'cea_mosaic' data = open('test/data/mosaic/cea.zip', 'rb') self.cat.create_imagemosaic(name, data) # get the layer resource back self.cat._cache.clear() resource = self.cat.get_layer(name).resource self.assert_(resource is not None) # delete granule from mosaic coverage = name store = name granule_id = name + '.1' self.cat.mosaic_delete_granule(coverage, store, granule_id)
class Extractor: def __init__(self, input_dir): logging.basicConfig(filename='extractor.log', level=logging.INFO) self.table_count = 0 self.working_dir = input_dir self.schemas = [] self.tables = {} # pg connexion self.conn = psycopg2.connect\ (dbname=config.PGDATABASE, user=config.PGUSER, password=config.PGPASSWORD, host=config.PGHOST) self.cur = self.conn.cursor() self.sql_commands = [] #gs config self.cat = None def get_files_by_format(self, format): logging.info('Started') for subdir, dirs, files in os.walk(self.working_dir): # lower and remove spaces schema = ''.join(s.lower() for s in os.path.basename(subdir) if os.path.basename(subdir) and not s.isspace()) if schema != '': self.schemas.append(schema) for file in files: if file.endswith(".{}".format(format)): table_name = ''.join(s.lower() for s in file if not s.isspace()) self.tables[Path(table_name).resolve().stem] = { 'schema': schema, 'path': '{}/{}'.format(subdir, file) } def bulk_create(self): for schema in self.schemas: if schema == '': continue self._push_to_database(self._build_create_schema_query(schema)) self.commit_to_database() def gs_connect_geoserver(self): self.cat = Catalog(config.GSREST, username=config.GSUSER, password=config.GSPASSWORD) def gs_create_workspace(self, ws): if self.cat.get_workspace(ws): logging.info('workspace {} exists skipping...'.format(ws)) else: self.cat.create_workspace(ws) def gs_get_data_store(self, store): return self.cat.get_store(store) def gs_create_store(self, schema, workspace_name): try: self.cat.get_store(schema) except: ds = self.cat.create_datastore(schema, workspace_name) ds.connection_parameters.update({ 'host': config.PGHOST, 'port': config.PGPORT, 'database': config.PGDATABASE, 'user': config.PGUSER, 'passwd': config.PGPASSWORD, 'dbtype': 'postgis', 'schema': schema, 'Expose primary keys': 'true' }) self.cat.save(ds) def gs_publish_feature_type(self, table, store_name): try: data_store = self.gs_get_data_store(store_name) except: self.cat.publish_featuretype(table, data_store, 'EPSG:2154', srs='EPSG:2154') @staticmethod def insert_data_from_shapefile(table, **kwargs): schema = kwargs['schema'] path = kwargs['path'] cmd = 'shp2pgsql -s {} {} "{}"."{}" | ' \ 'PGPASSWORD={} psql -h {} -d {} -U {}'.format( EPSG, path, schema, table, config.PGPASSWORD, config.PGHOST, config.PGDATABASE, config.PGUSER) subprocess.call(cmd, shell=True) @staticmethod def _build_create_table_query(dirname, shapefile): return 'CREATE TABLE {}.{};'.format(dirname, shapefile) @staticmethod def _build_create_schema_query(schema): return 'CREATE SCHEMA {};'.format(schema) def _push_to_database(self, query): self.cur.execute(query) def commit_to_database(self): self.conn.commit()
def _register_cascaded_layers(service, owner=None): """ Register layers for a cascading WMS """ if service.type == 'WMS' or service.type == "OWS": cat = Catalog(settings.OGC_SERVER['default']['LOCATION'] + "rest", _user , _password) # Can we always assume that it is geonode? # Should cascading layers have a separate workspace? cascade_ws = cat.get_workspace(settings.CASCADE_WORKSPACE) if cascade_ws is None: cascade_ws = cat.create_workspace(settings.CASCADE_WORKSPACE, 'cascade') try: store = cat.get_store(service.name,cascade_ws) except Exception: store = cat.create_wmsstore(service.name, cascade_ws) wms = WebMapService(service.base_url) layers = list(wms.contents) count = 0 for layer in layers: lyr = cat.get_resource(layer, store, cascade_ws) if lyr is None: if service.type in ["WMS","OWS"]: resource = cat.create_wmslayer(cascade_ws, store, layer) elif service.type == "WFS": resource = cat.create_wfslayer(cascade_ws, store, layer) if resource: cascaded_layer, created = Layer.objects.get_or_create( typename = "%s:%s" % (cascade_ws.name, resource.name), defaults = { "name": resource.name, "workspace": cascade_ws.name, "store": store.name, "storeType": store.resource_type, "title": resource.title or 'No title provided', "abstract": resource.abstract or 'No abstract provided', "owner": None, "uuid": str(uuid.uuid4()) }) if created: cascaded_layer.save() if cascaded_layer is not None and cascaded_layer.bbox is None: cascaded_layer._populate_from_gs(gs_resource=resource) cascaded_layer.set_default_permissions() service_layer, created = ServiceLayer.objects.get_or_create( service=service, typename=cascaded_layer.name ) service_layer.layer = cascaded_layer service_layer.title=cascaded_layer.title, service_layer.description=cascaded_layer.abstract, service_layer.styles=cascaded_layer.styles service_layer.save() count += 1 else: logger.error("Resource %s from store %s could not be saved as layer" % (layer, store.name)) message = "%d Layers Registered" % count return_dict = {'status': 'ok', 'msg': message } return HttpResponse(json.dumps(return_dict), mimetype='application/json', status=200) elif service.type == 'WCS': return HttpResponse('Not Implemented (Yet)', status=501) else: return HttpResponse('Invalid Service Type', status=400)
class wrap_geoserver: """ Geoserver (gsconfig) wrapper """ def __init__(self, geoserver_name, username=username, password=password, easy=False): if geoserver_name in list(REST.keys()): self.path = REST[geoserver_name] else: self.path = geoserver_name self.wms = self.path.replace("rest/", "wms") self.name = geoserver_name self.catalog = Catalog(self.path, username, password) if not easy: self.layers = [] self.layer_names = [] for layer in self.catalog.get_layers(): self.layers.append(layer) self.layer_names.append(layer.name) self.stores = [store for store in self.catalog.get_stores()] self.store_names = [store.name for store in self.stores] styles = [] self.workspaces = [] self.workspace_names = [] for workspace in self.catalog.get_workspaces(): styles = styles + self.catalog.get_styles(workspace) self.workspace_names.append(workspace._name) self.workspaces.append(workspace) self.styles = styles + [ style for style in self.catalog.get_styles() ] self.style_names = [style.name for style in self.styles] def unpack(self, workspace_name, store_type="datastore"): layers_and_styles = {} features = [] workspace = self.get_workspace(workspace_name) if store_type == "datastore": store_url = workspace.datastore_url elif store_type == "coveragestore": store_url = workspace.coveragestore_url else: print("No correct store given") for datastore in tqdm(get(store_url, "name")): url = "{}workspaces/{}/datastores/{}".format( self.path, workspace.name, datastore) features = features + get(url, between_quotes=True) for feature in features: layer_name = os.path.basename(feature).split(".")[0] self.get_layer(self.get_slug(workspace.name, layer_name)) layers_and_styles[layer_name] = self.layer.default_style setattr(self, workspace_name + "_data", layers_and_styles) return layers_and_styles def get_layer(self, layer, easy=False): self.layer = self.catalog.get_layer(layer) if not easy: self.resource = self.layer.resource self.layer_name = self.layer.resource.name self.sld_name = self.layer.default_style.name self.sld_body = self.layer.default_style.sld_body self.layer_latlon_bbox = self.layer.resource.latlon_bbox self.layer_title = self.layer.resource.title self.layer_abstract = self.layer.resource.abstract def get_store(self, layer): self.store = self.layer.resource._store def get_resource(self): self.resource = self.catalog.get_resource(self.layer.name, self.store) def get_workspace(self, workspace_name): self.workspace = self.catalog.get_workspace(workspace_name) self.workspace_name = self.workspace._name return self.workspace def write_abstract(self, data, load_resource=True): if load_resource: self.get_resource() self.resource.abstract = data self.catalog.save(self.resource) def write_title(self, title): self.resource.title = title self.catalog.save(self.resource) def get_connection_parameters(self): self.get_resource() return self.resource.store.connection_parameters def create_workspace(self, workspace_name): workspace_exists = workspace_name in self.workspace_names if not workspace_exists: self.workspace = self.catalog.create_workspace(workspace_name) else: print("workspace already exists, using existing workspace") self.workspace = self.catalog.get_workspace(workspace_name) self.workspace_name = workspace_name def create_postgis_datastore(self, store_name, workspace_name, pg_data): try: self.store = self.catalog.get_store(store_name, self.workspace_name) print("store within workspace exists, using existing store") except Exception as e: print(e) ds = self.catalog.create_datastore(store_name, workspace_name) ds.connection_parameters.update( host=pg_data["host"], port=pg_data["port"], database=pg_data["database"], user=pg_data["username"], passwd=pg_data["password"], dbtype="postgis", schema="public", ) self.save(ds) self.store = self.catalog.get_store(store_name, self.workspace_name) self.store_name = store_name def publish_layer(self, layer_name, workspace_name, overwrite=False, epsg="3857", reload=False): layer_exists = layer_name in self.layer_names # if layer_name in self.workspace_layers[workspace_name]: slug = self.get_slug(workspace_name, layer_name) if overwrite and layer_exists: print("Layer exists, deleting layer") try: self.layer = self.catalog.get_layer(slug) self.delete(self.layer) self.reload() layer_exists = False except Exception as e: print(e) print("Layer does not exist in workspace") layer_exists = False if not layer_exists: feature_type = self.catalog.publish_featuretype( layer_name, self.store, "EPSG:{}".format(str(epsg)), srs="EPSG:{}".format(str(epsg)), ) self.save(feature_type) self.feature_type = feature_type else: print("layer already exists, using existing layer") if reload: self.get_layer(slug) self.layer_name = layer_name def publish_layergroup(self, name, layers, styles=(), bounds=None, workspace=None): layer_group = self.catalog.create_layergroup(name, layers, styles, bounds, workspace) self.save(layer_group) def save(self, save_object): return self.catalog.save(save_object) def close(self): self.catalog = None def delete(self, delete_object): self.catalog.delete(delete_object) def reload(self): self.catalog.reload() def upload_shapefile(self, layer_name, shapefile_path): path = shapefile_path.split(".shp")[0] shapefile = shapefile_and_friends(path) ft = self.catalog.create_featurestore(layer_name, shapefile, self.workspace) self.save(ft) def upload_sld(self, sld_name, workspace_name, sld, overwrite=True): style_exists = sld_name in self.style_names if overwrite and style_exists: print("Overwriting style") style = self.catalog.get_style(sld_name, workspace_name) self.delete(style) self.reload() style_exists = False if not style_exists: try: self.catalog.create_style(sld_name, sld, False, workspace_name, "sld11") except Exception as e: print(e) style = self.catalog.get_style(sld_name, workspace_name) self.delete(style) self.reload() self.catalog.create_style(sld_name, sld, False, workspace_name, "sld10") self.style_name = sld_name else: if style_exists: print("Style already exists, using current style") self.style_name = sld_name def set_sld_for_layer(self, workspace_name=None, style_name=None, use_custom=False): if not use_custom: workspace_name = self.workspace_name style_name = self.style_name self.style_slug = self.get_slug(workspace_name, style_name) else: if workspace_name is None: self.style_slug = style_name else: self.style_slug = self.get_slug(workspace_name, style_name) self.style = self.catalog.get_style(self.style_slug) print("Setting {} for {}".format(self.style.name, self.layer.name)) self.layer.default_style = self.style self.save(self.layer) def get_slug(self, workspace, name): return "{}:{}".format(workspace, name) def get_slug_data(self, slug): workspace_name = slug.split(":")[0] layer_name = slug.split(":")[1] return workspace_name, layer_name def get_sld(self, layer_slug=None): if layer_slug is None: self.style = self.catalog.get_style(self.layer_slug) else: self.style = self.catalog.get_style(layer_slug) self.sld_body = self.style.sld_body return self.sld_body def get_layer_workspace(self, layer_name): return self.catalog.get_layer(layer_name).resource.workspace.name
class GsConn: def __init__(self, host, login, password, debug=False): """ Geoserver connection """ self.host = host self.login = login self.password = password self.debug = debug # Connect to server self.cat = Catalog("http://%s/geoserver/rest" % host, login, password) if self.debug is True: print "Connected to geoserver" def crate_workspace(self, name, overwrite=False): """ Creates a workspace :param name: Workspace name. :param overwrite: If True, delete existing workspace. :return: None """ workspaces = [workspace.name for workspace in self.cat.get_workspaces()] if name in workspaces and overwrite is True: # ws2del = self.cat.get_workspace(name) # self.cat.delete(ws2del, purge=True, recurse=True) return None # NOTE: If we delete the workspace then all associated layers are lost. elif name in workspaces and overwrite is False: print "ERROR: Workspace %s already exists (use overwrite=True)." % name self.cat.create_workspace(name, "http://%s/%s" % (self.host, name)) if self.debug is True: print "Workspace %s available." % name ws = self.cat.get_workspace(name) ws.enabled = True def create_pg_store(self, name, workspace, host, port, login, password, dbname, schema, overwrite=False): """ Creates datastore. :param name: Name of the datastore. :param workspace: Name of the workspace to use. :param overwrite: If True replace datastore. :return: None """ stores = [store.name for store in self.cat.get_stores()] if name in stores and overwrite is True: # st2del = self.cat.get_store(name) # self.cat.delete(st2del, purge=True, recurse=True) # self.cat.reload() return None # NOTE: If we delete store, every layers associated with are lost. elif name in stores and overwrite is False: print "ERROR: Store %s already exists (use overwrite=True)." % name ds = self.cat.create_datastore(name, workspace) ds.connection_parameters.update( host=host, port=port, user=login, passwd=password, dbtype="postgis", database=dbname, schema=schema ) self.cat.save(ds) ds = self.cat.get_store(name) if ds.enabled is False: print "ERROR: Geoserver store %s not enabled" % name if self.debug is True: print "Datastore %s created." % name def publish_pg_layer(self, layer_table, layer_name, store, srid, overwrite=True): """ """ existing_lyr = self.cat.get_layer("ma_carte:%s" % layer_table) if existing_lyr is not None: print "Layer ma_carte:%s already exists, deleting it." % layer_table self.cat.delete(existing_lyr) self.cat.reload() ds = self.cat.get_store(store) ft = self.cat.publish_featuretype(layer_table, ds, "EPSG:%s" % srid, srs="EPSG:4326") ft.projection_policy = "REPROJECT_TO_DECLARED" ft.title = layer_name self.cat.save(ft) if ft.enabled is False: print "ERROR: Layer %s %s %s is not enabled."(ft.workspace.name, ft.store.name, ft.title) if self.debug is True: print "Layer %s>%s>%s published." % (ft.workspace.name, ft.store.name, ft.title) def create_style_from_sld(self, style_name, sld_file, workspace, overwrite=True): """ """ if self.cat.get_style(style_name) is not None: print "Style %s already exists, deleting it." % style_name style2del = self.cat.get_style(style_name) self.cat.delete(style2del) self.cat.create_style( style_name, open(sld_file).read(), overwrite=overwrite ) # FIXME: if ", workspace=workspace" specified can't delete style if self.debug is True: print "Style %s created in Geoserver" % style_name def apply_style_to_layer(self, layer_name, style_name): """ Apply a geoserver styler to a layer """ gs_layer = self.cat.get_layer(layer_name) gs_style = self.cat.get_style(style_name) # FIXME: Which works better? # gs_layer.default_style = gs_style / gs_layer._set_default_style(gs_style) # FIXME: Maybe indicate workspace when saving style then name the style as "workspace:style" gs_layer._set_default_style(gs_style) self.cat.save(gs_layer) if self.debug is True: print "Style applied to %s" % layer_name
def processAlgorithm(self, parameters, context, model_feedback): """ Process the algorithm :param parameters: parameters of the process :param context: context of the process :param model_feedback: feedback instance for the process :return: """ # Use a multi-step feedback, so that individual child algorithm progress reports are adjusted for the # overall progress through the model self.xml_path = parameters["XML_PATH"] if not self.xml_path.lower().endswith(".xml"): feedback = QgsProcessingMultiStepFeedback(0, model_feedback) feedback.reportError( "XML Workspace Definition is not an XML file!", True) return {} db_name = parameters["DB_NAME"] db_params = self.get_db_params(db_name) store_name = parameters["GS_STORE"] wrk_name = parameters["GS_WORKSPACE"] dataset_list = [] datasets = self.getDatasets() for dataset in datasets: type = dataset.getElementsByTagName( TAG_DE_TYPE)[0].childNodes[0].data if type == "esriDTFeatureClass": ds_name = dataset.getElementsByTagName( TAG_DE_NAME)[0].childNodes[0].data dataset_list.append({ "name": ds_name.lower(), "srs": "EPSG:4326" }) feedback = QgsProcessingMultiStepFeedback(2 + len(dataset_list), model_feedback) feedback.pushInfo("Get GeoServer Catalog: " + parameters["GS_REST_URL"]) gs_catalogue = Catalog(parameters["GS_REST_URL"], parameters["GS_ADMIN"], parameters["GS_PASSWORD"]) # workspace if wrk_name == "" or wrk_name is None: wrk_name = db_name.lower() + "_ws" wrk_uri = "http://" + wrk_name feedback.pushInfo("GeoServer Workspace: " + wrk_name + " (" + wrk_uri + ")") workspace = gs_catalogue.get_workspace(wrk_name) if workspace is None: workspace = gs_catalogue.create_workspace(wrk_name, wrk_uri) feedback.setCurrentStep(1) # store if store_name == "" or store_name is None: store_name = db_name.lower() + "_ds" feedback.pushInfo("GeoServer Data Store: " + store_name) store = gs_catalogue.get_store(store_name, workspace) if store is None: store = gs_catalogue.create_datastore(store_name, workspace) store.connection_parameters.update(**db_params) gs_catalogue.save(store) feedback.setCurrentStep(2) step = 2 published_count = 0 layer_prefix = "v_" for ds_cur in dataset_list: layer_name = layer_prefix + ds_cur["name"] feedback.pushInfo("GeoServer Publish: " + layer_name + " (" + ds_cur["srs"] + ")") try: layer = gs_catalogue.get_layer(layer_name) if layer is not None: gs_catalogue.delete(layer) gs_catalogue.save() gs_catalogue.publish_featuretype(layer_name, store, ds_cur["srs"]) published_count += 1 except Exception as e: feedback.reportError("Error: " + str(e), False) step += 1 feedback.setCurrentStep(step) gs_catalogue.reload() layers = gs_catalogue.get_layers(store) feedback.pushInfo("-" * 80) feedback.pushInfo("Published layers: " + str(published_count)) for layer in layers: feedback.pushInfo(layer.name + " is published!") feedback.pushInfo("-" * 80) results = {} outputs = {} return results
def create_workspace(server, username, password, workspace): cat = Catalog(server, username, password) if not workspace in [w.name for w in cat.get_workspaces()]: print 'Creating workspace: %s' %workspace cat.create_workspace(workspace, 'api.npolar.no/' + workspace)
logger.error("[x] File:--%r-- failed to be imported", src) data_move(src, ERROR_file) raise Exception( "[x] %r _FAILED to be imported" % src + ' reason--%r' % e) try: logger.debug("[x] Publishing %r to GeoServer", src) cat = Catalog( gs_setting['geoserver_url'] + '/rest', gs_setting['geoserver_username'], gs_setting['geoserver_password'], True) workspace = cat.get_workspace(gs_setting['geoserver_workspace']) if workspace is None: cat.create_workspace( gs_setting['geoserver_workspace'], gs_setting['geoserver_workspace']) str_time = "{:02d}:{:02d}:{:02d}".format( timestamp.hour[0].item(), timestamp.minute[0].item(), timestamp.second[0].item() ) store_name = "{uuid}_{date}T{time}Z".format( uuid=ts.uuid, date=str_date, time=str_time, ) create_geotiff(
class ArchiveAndCatalogueI(geocloud.ArchiveAndCatalogue): def __init__(self, com): if not com: raise RuntimeError("Not communicator") self.com = com q = com.stringToProxy('IceGrid/Query') self.query = IceGrid.QueryPrx.checkedCast(q) if not self.query: raise RuntimeError("Invalid proxy") self.broker = geocloud.BrokerPrx.checkedCast( com.stringToProxy("broker")) self.sem = Ice.threading.RLock( ) #Lock for managing the data structures geoserver_path = "http://localhost:80/geoserver/rest" self.catalog = Catalog(geoserver_path) self.workspace = None def setBroker(self, broker, current=None): self.broker = broker def log(self, log): if self.broker: self.broker.begin_appendLog("<ArchiveAndCatalogue> " + str(log)) else: print log def createScenario(self, scenario, current=None): self.sem.acquire() try: if self.catalog == None: self.log("Catalog") raise geocloud.CreationScenarioException() try: self.workspace = self.catalog.create_workspace( scenario, scenario) self.workspace.enabled = True self.log("Created Scenario %s" % (scenario)) except Exception: print("workspace does not created") self.log("Workspace does not created") except FailedRequestError: self.log("Request failed") raise geocloud.CreationScenarioException() finally: self.sem.release() def catalogue(self, path, store, scenario, current=None): if self.workspace == None: raise geocloud.CataloguingException() else: try: if self.catalog == None: raise geocloud.CataloguingException() # store = path.split('/')[-1] self.sem.acquire() cv = self.catalog.create_coveragestore(store, path, self.workspace) self.log("%s Catalogued!" % (path)) except ConflictingDataError as e: self.log(e) except UploadError as e: self.log(e) except FailedRequestError as e: None except Exception as e: self.log(e) finally: self.sem.release() def deleteScenario(self, scenario, current=None): self.sem.acquire() try: for layer in self.catalog.get_layers(): self.catalog.delete(layer) for coverage in self.catalog.get_stores(): for resource in coverage.get_resources(): catalog.delete(resource) catalog.delete(coverage) for vk in catalog.get_workspaces(): catalog.delete(wk) except Exception: self.log("Exception while cleanning scenario")
def _register_cascaded_service(url, type, name, username, password, wms=None, owner=None, parent=None): """ Register a service as cascading WMS """ try: service = Service.objects.get(base_url=url) return_dict = {} return_dict['service_id'] = service.pk return_dict['msg'] = "This is an existing Service" return HttpResponse(json.dumps(return_dict), mimetype='application/json', status=200) except: # TODO: Handle this error properly pass if wms is None: wms = WebMapService(url) # TODO: Make sure we are parsing all service level metadata # TODO: Handle for setting ServiceProfiletRole service = Service.objects.create(base_url=url, type=type, method='C', name=name, version=wms.identification.version, title=wms.identification.title, abstract=wms.identification.abstract, online_resource=wms.provider.url, owner=owner, parent=parent) service.keywords = ','.join(wms.identification.keywords) service.save() service.set_default_permissions() if type in ['WMS', 'OWS']: # Register the Service with GeoServer to be cascaded cat = Catalog(settings.OGC_SERVER['default']['LOCATION'] + "rest", _user, _password) cascade_ws = cat.get_workspace(name) if cascade_ws is None: cascade_ws = cat.create_workspace(name, "http://geonode.org/cascade") # TODO: Make sure there isn't an existing store with that name, and # deal with it if there is try: cascade_store = cat.get_store(name, cascade_ws) except: cascade_store = cat.create_wmsstore(name, cascade_ws, username, password) cascade_store.capabilitiesURL = url cascade_store.type = "WMS" cat.save(cascade_store) available_resources = cascade_store.get_resources(available=True) elif type == 'WFS': # Register the Service with GeoServer to be cascaded cat = Catalog(settings.OGC_SERVER['default']['LOCATION'] + "rest", _user, _password) # Can we always assume that it is geonode? cascade_ws = cat.get_workspace(settings.CASCADE_WORKSPACE) if cascade_ws is None: cascade_ws = cat.create_workspace(settings.CASCADE_WORKSPACE, "http://geonode.org/cascade") try: wfs_ds = cat.get_store(name, cascade_ws) except: wfs_ds = cat.create_datastore(name, cascade_ws) connection_params = { "WFSDataStoreFactory:MAXFEATURES": "0", "WFSDataStoreFactory:TRY_GZIP": "true", "WFSDataStoreFactory:PROTOCOL": "false", "WFSDataStoreFactory:LENIENT": "true", "WFSDataStoreFactory:TIMEOUT": "3000", "WFSDataStoreFactory:BUFFER_SIZE": "10", "WFSDataStoreFactory:ENCODING": "UTF-8", "WFSDataStoreFactory:WFS_STRATEGY": "nonstrict", "WFSDataStoreFactory:GET_CAPABILITIES_URL": url, } if username and password: connection_params["WFSDataStoreFactory:USERNAME"] = username connection_params["WFSDataStoreFactory:PASSWORD"] = password wfs_ds.connection_parameters = connection_params cat.save(wfs_ds) available_resources = wfs_ds.get_resources(available=True) # Save the Service record service, created = Service.objects.get_or_create(type=type, method='C', base_url=url, name=name, owner=owner) service.save() service.set_default_permissions() elif type == 'WCS': return HttpResponse('Not Implemented (Yet)', status=501) else: return HttpResponse('Invalid Method / Type combo: ' + 'Only Cascaded WMS, WFS and WCS supported', mimetype="text/plain", status=400) message = "Service %s registered" % service.name return_dict = [{ 'status': 'ok', 'msg': message, 'service_id': service.pk, 'service_name': service.name, 'service_title': service.title, 'available_layers': available_resources }] if settings.USE_QUEUE: # Create a layer import job WebServiceHarvestLayersJob.objects.get_or_create(service=service) else: _register_cascaded_layers(service) return HttpResponse(json.dumps(return_dict), mimetype='application/json', status=200)
class ModifyingTests(unittest.TestCase): def setUp(self): self.cat = Catalog(GSPARAMS['GSURL'], username=GSPARAMS['GSUSER'], password=GSPARAMS['GSPASSWORD']) def testFeatureTypeSave(self): # test saving round trip rs = self.cat.get_resource("bugsites") old_abstract = rs.abstract new_abstract = "Not the original abstract" enabled = rs.enabled # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(new_abstract, rs.abstract) self.assertEqual(enabled, rs.enabled) # Change keywords on server rs.keywords = ["bugsites", "gsconfig"] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(["bugsites", "gsconfig"], rs.keywords) self.assertEqual(enabled, rs.enabled) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual([ ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata") ], rs.metadata_links) self.assertEqual(enabled, rs.enabled) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(old_abstract, rs.abstract) def testDataStoreCreate(self): ds = self.cat.create_datastore("vector_gsconfig") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) def testPublishFeatureType(self): # Use the other test and store creation to load vector data into a database # @todo maybe load directly to database? try: self.testDataStoreCreateAndThenAlsoImportData() except FailedRequestError: pass try: lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. self.cat.delete(lyr) self.cat.delete(lyr.resource) ds = self.cat.get_store("gsconfig_import_test") # make sure it's gone self.assertIsNone(self.cat.get_layer('import')) self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326") # and now it's not self.assertIsNotNone(self.cat.get_layer('import')) finally: # tear stuff down to allow the other test to pass if we run first ds = self.cat.get_store("gsconfig_import_test") lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. try: if lyr: self.cat.delete(lyr) self.cat.delete(lyr.resource) if ds: self.cat.delete(ds) except Exception: pass def testDataStoreModify(self): ds = self.cat.get_store("sf") self.assertFalse("foo" in ds.connection_parameters) ds.connection_parameters = ds.connection_parameters ds.connection_parameters["foo"] = "bar" orig_ws = ds.workspace.name self.cat.save(ds) ds = self.cat.get_store("sf") self.assertTrue("foo" in ds.connection_parameters) self.assertEqual("bar", ds.connection_parameters["foo"]) self.assertEqual(orig_ws, ds.workspace.name) @drop_table('import') def testDataStoreCreateAndThenAlsoImportData(self): ds = self.cat.create_datastore("gsconfig_import_test") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) ds = self.cat.get_store("gsconfig_import_test") self.cat.add_data_to_store( ds, "import", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) @drop_table('import2') def testVirtualTables(self): ds = self.cat.create_datastore("gsconfig_import_test2") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) ds = self.cat.get_store("gsconfig_import_test2") self.cat.add_data_to_store( ds, "import2", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) store = self.cat.get_store("gsconfig_import_test2") geom = JDBCVirtualTableGeometry('the_geom', 'MultiPolygon', '4326') ft_name = 'my_jdbc_vt_test' epsg_code = 'EPSG:4326' sql = "select * from import2 where 'STATE_NAME' = 'Illinois'" keyColumn = None parameters = None jdbc_vt = JDBCVirtualTable(ft_name, sql, 'false', geom, keyColumn, parameters) self.cat.publish_featuretype(ft_name, store, epsg_code, jdbc_virtual_table=jdbc_vt) # DISABLED; this test works only in the very particular case # "mytiff.tiff" is already present into the GEOSERVER_DATA_DIR # def testCoverageStoreCreate(self): # ds = self.cat.create_coveragestore2("coverage_gsconfig") # ds.data_url = "file:test/data/mytiff.tiff" # self.cat.save(ds) def testCoverageStoreModify(self): cs = self.cat.get_store("sfdem") self.assertEqual("GeoTIFF", cs.type) cs.type = "WorldImage" self.cat.save(cs) cs = self.cat.get_store("sfdem") self.assertEqual("WorldImage", cs.type) # not sure about order of test runs here, but it might cause problems # for other tests if this layer is misconfigured cs.type = "GeoTIFF" self.cat.save(cs) def testCoverageSave(self): # test saving round trip rs = self.cat.get_resource("Arc_Sample") old_abstract = rs.abstract new_abstract = "Not the original abstract" # # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(new_abstract, rs.abstract) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(old_abstract, rs.abstract) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual([ ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata") ], rs.metadata_links) self.assertEqual(enabled, rs.enabled) srs_before = set(['EPSG:4326']) srs_after = set(['EPSG:4326', 'EPSG:3785']) formats = set( ['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF']) formats_after = set(["PNG", "GIF", "TIFF"]) # set and save request_srs_list self.assertEqual(set(rs.request_srs_list), srs_before, str(rs.request_srs_list)) rs.request_srs_list = rs.request_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(set(rs.request_srs_list), srs_after, str(rs.request_srs_list)) # set and save response_srs_list self.assertEqual(set(rs.response_srs_list), srs_before, str(rs.response_srs_list)) rs.response_srs_list = rs.response_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(set(rs.response_srs_list), srs_after, str(rs.response_srs_list)) # set and save supported_formats self.assertEqual(set(rs.supported_formats), formats, str(rs.supported_formats)) rs.supported_formats = ["PNG", "GIF", "TIFF"] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(set(rs.supported_formats), formats_after, str(rs.supported_formats)) def testWmsStoreCreate(self): ws = self.cat.create_wmsstore("wmsstore_gsconfig") ws.capabilitiesURL = "http://mesonet.agron.iastate.edu/cgi-bin/wms/iowa/rainfall.cgi?VERSION=1.1.1&REQUEST=GetCapabilities&SERVICE=WMS&" # noqa: E501 ws.type = "WMS" self.cat.save(ws) def testWmsLayer(self): self.cat.create_workspace("wmstest", "http://example.com/wmstest") wmstest = self.cat.get_workspace("wmstest") wmsstore = self.cat.create_wmsstore("wmsstore", wmstest) wmsstore.capabilitiesURL = "http://mesonet.agron.iastate.edu/cgi-bin/wms/iowa/rainfall.cgi?VERSION=1.1.1&REQUEST=GetCapabilities&SERVICE=WMS&" # noqa: E501 wmsstore.type = "WMS" self.cat.save(wmsstore) wmsstore = self.cat.get_store("wmsstore") self.assertEqual(1, len(self.cat.get_stores(workspace=wmstest))) available_layers = wmsstore.get_resources(available=True) for layer in available_layers: # sanitize the layer name - validation will fail on newer geoservers name = layer.replace(':', '_') self.cat.create_wmslayer(wmstest, wmsstore, name, nativeName=layer) added_layers = wmsstore.get_resources() self.assertEqual(len(available_layers), len(added_layers)) changed_layer = added_layers[0] self.assertEqual(True, changed_layer.advertised) self.assertEqual(True, changed_layer.enabled) changed_layer.advertised = False changed_layer.enabled = False self.cat.save(changed_layer) self.cat._cache.clear() changed_layer = wmsstore.get_resources()[0] changed_layer.fetch() self.assertEqual(False, changed_layer.advertised) self.assertEqual(False, changed_layer.enabled) # Testing projection and projection policy changes changed_layer.projection = "EPSG:900913" changed_layer.projection_policy = "REPROJECT_TO_DECLARED" self.cat.save(changed_layer) self.cat._cache.clear() layer = self.cat.get_layer(changed_layer.name) self.assertEqual(layer.resource.projection_policy, changed_layer.projection_policy) self.assertEqual(layer.resource.projection, changed_layer.projection) def testFeatureTypeCreate(self): shapefile_plus_sidecars = shapefile_and_friends("test/data/states") expected = { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' } self.assertEqual(len(expected), len(shapefile_plus_sidecars)) for k, v in expected.items(): self.assertEqual(v, shapefile_plus_sidecars[k]) sf = self.cat.get_workspace("sf") self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) self.assertIsNotNone(self.cat.get_resource("states_test", workspace=sf)) self.assertRaises( ConflictingDataError, lambda: self.cat.create_featurestore( "states_test", shapefile_plus_sidecars, sf)) self.assertRaises( UploadError, lambda: self.cat.create_coveragestore( "states_raster_test", shapefile_plus_sidecars, sf)) bogus_shp = { 'shp': 'test/data/Pk50095.tif', 'shx': 'test/data/Pk50095.tif', 'dbf': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } self.assertRaises( UploadError, lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf)) lyr = self.cat.get_layer("states_test") self.cat.delete(lyr) self.assertIsNone(self.cat.get_layer("states_test")) def testCoverageCreate(self): tiffdata = { 'tiff': 'test/data/Pk50095.tif', 'tfw': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } sf = self.cat.get_workspace("sf") self.cat.create_coveragestore("Pk50095", tiffdata, sf) self.assertIsNotNone(self.cat.get_resource("Pk50095", workspace=sf)) self.assertRaises( ConflictingDataError, lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf)) self.assertRaises( UploadError, lambda: self.cat.create_featurestore( "Pk50095_vector", tiffdata, sf)) bogus_tiff = { 'tiff': 'test/data/states.shp', 'tfw': 'test/data/states.shx', 'prj': 'test/data/states.prj' } self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster", bogus_tiff)) self.cat.create_coveragestore_external_geotiff( "Pk50095_ext", 'file:test/data/Pk50095.tif', sf) def testLayerSave(self): # test saving round trip lyr = self.cat.get_layer("states") old_attribution = lyr.attribution new_attribution = { 'title': 'Not the original attribution', 'width': '123', 'height': '321', 'href': 'http://www.georchestra.org', 'url': 'https://www.cigalsace.org/portail/cigal/documents/page/mentions-legales/Logo_geOrchestra.jpg', 'type': 'image/jpeg' } # change attribution on server lyr.attribution = new_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(new_attribution, lyr.attribution) # Restore attribution lyr.attribution = old_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(old_attribution, lyr.attribution) self.assertEqual(lyr.default_style.name, "population") old_default_style = lyr.default_style lyr.default_style = next( (s for s in lyr.styles if s.name == "pophatch")) lyr.styles = [old_default_style] self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(lyr.default_style.name, "pophatch") self.assertEqual([s.name for s in lyr.styles], ["population"]) def testStyles(self): # check count before tests (upload) count = len(self.cat.get_styles()) # upload new style, verify existence with open("test/fred.sld") as fred_sld: self.cat.create_style("fred", fred_sld.read()) fred = self.cat.get_style("fred") self.assertIsNotNone(fred) self.assertEqual("Fred", fred.sld_title) # replace style, verify changes with open("test/ted.sld") as ted_sld: self.cat.create_style("fred", ted_sld.read(), overwrite=True) fred = self.cat.get_style("fred") self.assertIsNotNone(fred) self.assertEqual("Ted", fred.sld_title) # delete style, verify non-existence self.cat.delete(fred, purge=True) self.assertIsNone(self.cat.get_style("fred")) # attempt creating new style with open("test/fred.sld") as fred_sld: self.cat.create_style("fred", fred_sld.read()) fred = self.cat.get_style("fred") self.assertEqual("Fred", fred.sld_title) # verify it can be found via URL and check the name f = self.cat.get_style_by_url(fred.href) self.assertIsNotNone(f) self.assertEqual(f.name, fred.name) # compare count after upload self.assertEqual(count + 1, len(self.cat.get_styles())) # attempt creating a new style without "title" with open("test/notitle.sld") as notitle_sld: self.cat.create_style("notitle", notitle_sld.read()) notitle = self.cat.get_style("notitle") self.assertEqual(None, notitle.sld_title) def testWorkspaceStyles(self): # upload new style, verify existence with open("test/fred.sld") as fred_sld: self.cat.create_style("jed", fred_sld.read(), workspace="topp") jed = self.cat.get_style("jed", workspace="blarny") self.assertIsNone(jed) jed = self.cat.get_style("jed", workspace="topp") self.assertIsNotNone(jed) self.assertEqual("Fred", jed.sld_title) jed = self.cat.get_style("topp:jed") self.assertIsNotNone(jed) self.assertEqual("Fred", jed.sld_title) # replace style, verify changes with open("test/ted.sld") as ted_sld: self.cat.create_style("jed", ted_sld.read(), overwrite=True, workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assertIsNotNone(jed) self.assertEqual("Ted", jed.sld_title) # delete style, verify non-existence self.cat.delete(jed, purge=True) self.assertIsNone(self.cat.get_style("jed", workspace="topp")) # attempt creating new style with open("test/fred.sld") as fred_sld: self.cat.create_style("jed", fred_sld.read(), workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assertEqual("Fred", jed.sld_title) # verify it can be found via URL and check the full name f = self.cat.get_style_by_url(jed.href) self.assertIsNotNone(f) self.assertEqual(f.fqn, jed.fqn) def testLayerWorkspaceStyles(self): # upload new style, verify existence with open("test/fred.sld") as fred_sld: self.cat.create_style("ned", fred_sld.read(), overwrite=True, workspace="topp") with open("test/fred.sld") as ted_sld: self.cat.create_style("zed", ted_sld.read(), overwrite=True, workspace="topp") ned = self.cat.get_style("ned", workspace="topp") zed = self.cat.get_style("zed", workspace="topp") self.assertIsNotNone(ned) self.assertIsNotNone(zed) lyr = self.cat.get_layer("states") lyr.default_style = ned lyr.styles = [zed] self.cat.save(lyr) self.assertEqual("topp:ned", lyr.default_style) self.assertEqual([zed], lyr.styles) lyr.refresh() if lyr.default_style is not None: self.assertEqual("topp:ned", lyr.default_style.fqn) self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles]) def testWorkspaceCreate(self): ws = self.cat.get_workspace("acme") self.assertEqual(None, ws) self.cat.create_workspace("acme", "http://example.com/acme") ws = self.cat.get_workspace("acme") self.assertEqual("acme", ws.name) def testWorkspaceDelete(self): self.cat.create_workspace("foo", "http://example.com/foo") ws = self.cat.get_workspace("foo") self.cat.delete(ws) ws = self.cat.get_workspace("foo") self.assertIsNone(ws) def testWorkspaceDefault(self): # save orig orig = self.cat.get_default_workspace() neu = self.cat.create_workspace("neu", "http://example.com/neu") try: # make sure setting it works self.cat.set_default_workspace("neu") ws = self.cat.get_default_workspace() self.assertEqual('neu', ws.name) finally: # cleanup and reset to the way things were self.cat.delete(neu) self.cat.set_default_workspace(orig.name) ws = self.cat.get_default_workspace() self.assertEqual(orig.name, ws.name) def testFeatureTypeDelete(self): pass def testCoverageDelete(self): pass def testDataStoreDelete(self): states = self.cat.get_store('states_shapefile') self.assertTrue(states.enabled) states.enabled = False self.assertFalse(states.enabled) self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assertFalse(states.enabled) states.enabled = True self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assertTrue(states.enabled) def testLayerGroupSave(self): tas = self.cat.get_layergroup("tasmania") self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities' ], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) tas.layers = tas.layers[:-1] tas.styles = tas.styles[:-1] self.cat.save(tas) # this verifies the local state self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads' ], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) # force a refresh to check the remote state tas.refresh() self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads' ], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) def testImageMosaic(self): """ Test case for Issue #110 """ # testing the mosaic creation name = 'cea_mosaic' with open('test/data/mosaic/cea.zip', 'rb') as data: self.cat.create_imagemosaic(name, data) # get the layer resource back self.cat._cache.clear() resource = self.cat.get_layer(name).resource self.assertIsNotNone(resource) # delete granule from mosaic coverage = name store = self.cat.get_store(name) granules = self.cat.list_granules(coverage, store) self.assertEqual(1, len(granules['features'])) granule_id = name + '.1' self.cat.mosaic_delete_granule(coverage, store, granule_id) granules = self.cat.list_granules(coverage, store) self.assertEqual(0, len(granules['features'])) """ testing external Image mosaic creation """ name = 'cea_mosaic_external' path = 'test/data/mosaic/external' self.cat.create_imagemosaic(name, path, workspace='topp') self.cat._cache.clear() resource = self.cat.get_layer("external").resource self.assertIsNotNone(resource) # add granule to mosaic granule_path = 'test/data/mosaic/granules/cea_20150102.tif' self.cat.add_granule(granule_path, name, workspace='topp') granules = self.cat.list_granules("external", name, 'topp') self.assertEqual(2, len(granules['features'])) # add external granule to mosaic granule_path = os.path.join( os.getcwd(), 'test/data/mosaic/granules/cea_20150103.zip') self.cat.add_granule(granule_path, name, workspace='topp') granules = self.cat.list_granules("external", name, 'topp') self.assertEqual(3, len(granules['features'])) # Delete store store = self.cat.get_store(name) self.cat.delete(store, purge=True, recurse=True) self.cat._cache.clear() def testTimeDimension(self): sf = self.cat.get_workspace("sf") files = shapefile_and_friends( os.path.join(gisdata.GOOD_DATA, "time", "boxes_with_end_date")) self.cat.create_featurestore("boxes_with_end_date", files, sf) get_resource = lambda: self.cat._cache.clear() or self.cat.get_layer( 'boxes_with_end_date').resource # noqa: E501 # configure time as LIST resource = get_resource() timeInfo = DimensionInfo("time", "true", "LIST", None, "ISO8601", None, attribute="date") resource.metadata = {'time': timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual("LIST", timeInfo.presentation) self.assertEqual(True, timeInfo.enabled) self.assertEqual("date", timeInfo.attribute) self.assertEqual("ISO8601", timeInfo.units) # disable time dimension timeInfo = resource.metadata['time'] timeInfo.enabled = False # since this is an xml property, it won't get written unless we modify it resource.metadata = {'time': timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual(False, timeInfo.enabled) # configure with interval, end_attribute and enable again timeInfo.enabled = True timeInfo.presentation = 'DISCRETE_INTERVAL' timeInfo.resolution = '3 days' timeInfo.end_attribute = 'enddate' resource.metadata = {'time': timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual(True, timeInfo.enabled) self.assertEqual('DISCRETE_INTERVAL', timeInfo.presentation) self.assertEqual('3 days', timeInfo.resolution_str()) self.assertEqual('enddate', timeInfo.end_attribute)
def _register_cascaded_layers(service, owner=None): """ Register layers for a cascading WMS """ if service.type == 'WMS' or service.type == "OWS": cat = Catalog(settings.OGC_SERVER['default']['LOCATION'] + "rest", _user, _password) # Can we always assume that it is geonode? # Should cascading layers have a separate workspace? cascade_ws = cat.get_workspace(service.name) if cascade_ws is None: cascade_ws = cat.create_workspace(service.name, 'cascade') try: store = cat.get_store(service.name, cascade_ws) except Exception: store = cat.create_wmsstore(service.name, cascade_ws) cat.save(store) wms = WebMapService(service.base_url) layers = list(wms.contents) count = 0 for layer in layers: lyr = cat.get_resource(layer, store, cascade_ws) if lyr is None: if service.type in ["WMS", "OWS"]: resource = cat.create_wmslayer(cascade_ws, store, layer) elif service.type == "WFS": resource = cat.create_wfslayer(cascade_ws, store, layer) if resource: bbox = resource.latlon_bbox cascaded_layer, created = Layer.objects.get_or_create( typename="%s:%s" % (cascade_ws.name, resource.name), service=service, defaults={ "name": resource.name, "workspace": cascade_ws.name, "store": store.name, "storeType": store.resource_type, "title": resource.title or 'No title provided', "abstract": resource.abstract or 'No abstract provided', "owner": None, "uuid": str(uuid.uuid4()), "bbox_x0": bbox[0], "bbox_x1": bbox[1], "bbox_y0": bbox[2], "bbox_y1": bbox[3], }) if created: cascaded_layer.save() if cascaded_layer is not None and cascaded_layer.bbox is None: cascaded_layer._populate_from_gs( gs_resource=resource) cascaded_layer.set_default_permissions() service_layer, created = ServiceLayer.objects.get_or_create( service=service, typename=cascaded_layer.name) service_layer.layer = cascaded_layer service_layer.title = cascaded_layer.title, service_layer.description = cascaded_layer.abstract, service_layer.styles = cascaded_layer.styles service_layer.save() count += 1 else: logger.error( "Resource %s from store %s could not be saved as layer" % (layer, store.name)) message = "%d Layers Registered" % count return_dict = {'status': 'ok', 'msg': message} return HttpResponse(json.dumps(return_dict), mimetype='application/json', status=200) elif service.type == 'WCS': return HttpResponse('Not Implemented (Yet)', status=501) else: return HttpResponse('Invalid Service Type', status=400)
def publishGeoserver(appdef, progress): viewCrs = appdef["Settings"]["App view CRS"] usesGeoServer = False for applayer in appdef["Layers"]: if applayer.method != METHOD_FILE: if applayer.layer.type() == applayer.layer.VectorLayer and applayer.layer.providerType().lower() != "wfs": usesGeoServer = True if not usesGeoServer: return progress.setText("Publishing to GeoServer") progress.setProgress(0) geoserverUrl = appdef["Deploy"]["GeoServer url"] + "/rest" geoserverPassword = appdef["Deploy"]["GeoServer password"] geoserverUsername = appdef["Deploy"]["GeoServer username"] workspaceName = appdef["Deploy"]["GeoServer workspace"] dsName = "ds_" + workspaceName host = appdef["Deploy"]["PostGIS host"] port = appdef["Deploy"]["PostGIS port"] postgisUsername = appdef["Deploy"]["PostGIS username"] postgisPassword = appdef["Deploy"]["PostGIS password"] database = appdef["Deploy"]["PostGIS database"] schema = appdef["Deploy"]["PostGIS schema"] catalog = Catalog(geoserverUrl, geoserverUsername, geoserverPassword) workspace = catalog.get_workspace(workspaceName) if workspace is None: workspace = catalog.create_workspace(workspaceName, workspaceName) try: store = catalog.get_store(dsName, workspace) resources = store.get_resources() for resource in resources: layers = catalog.get_layers(resource) for layer in layers: catalog.delete(layer) catalog.delete(resource) catalog.delete(store) except Exception: pass try: store = catalog.get_store(dsName, workspace) except FailedRequestError: store = None for i, applayer in enumerate(appdef["Layers"]): layer = applayer.layer if applayer.method != METHOD_FILE and applayer.method != METHOD_DIRECT: name = safeName(layer.name()) sld, icons = getGsCompatibleSld(layer) if sld is not None: catalog.create_style(name, sld, True) uploadIcons(icons, geoserverUsername, geoserverPassword, catalog.gs_base_url) if layer.type() == layer.VectorLayer: if applayer.method == METHOD_WFS_POSTGIS or applayer.method == METHOD_WMS_POSTGIS: if store is None: store = catalog.create_datastore(dsName, workspace) store.connection_parameters.update( host=host, port=str(port), database=database, user=postgisUsername, schema=schema, passwd=postgisPassword, dbtype="postgis") catalog.save(store) catalog.publish_featuretype(name, store, layer.crs().authid()) else: path = getDataFromLayer(layer, viewCrs) catalog.create_featurestore(name, path, workspace=workspace, overwrite=True) gslayer = catalog.get_layer(name) r = gslayer.resource r.dirty['srs'] = viewCrs catalog.save(r) elif layer.type() == layer.RasterLayer: path = getDataFromLayer(layer, viewCrs) catalog.create_coveragestore(name, path, workspace=workspace, overwrite=True) if sld is not None: publishing = catalog.get_layer(name) publishing.default_style = catalog.get_style(name) catalog.save(publishing) progress.setProgress(int((i+1)*100.0/len(appdef["Layers"])))
class UploaderTests(DjagnoOsgeoMixin): """ Basic checks to make sure pages load, etc. """ def create_datastore(self, connection, catalog): settings = connection.settings_dict params = { 'database': settings['NAME'], 'passwd': settings['PASSWORD'], 'namespace': 'http://www.geonode.org/', 'type': 'PostGIS', 'dbtype': 'postgis', 'host': settings['HOST'], 'user': settings['USER'], 'port': settings['PORT'], 'enabled': "True" } store = catalog.create_datastore(settings['NAME'], workspace=self.workspace) store.connection_parameters.update(params) try: catalog.save(store) except FailedRequestError: # assuming this is because it already exists pass return catalog.get_store(settings['NAME']) def setUp(self): if not os.path.exists( os.path.join( os.path.split(__file__)[0], '..', 'importer-test-files')): self.skipTest('Skipping test due to missing test data.') # These tests require geonode to be running on :80! self.postgis = db.connections['datastore'] self.postgis_settings = self.postgis.settings_dict self.username, self.password = self.create_user('admin', 'admin', is_superuser=True) self.non_admin_username, self.non_admin_password = self.create_user( 'non_admin', 'non_admin') self.cat = Catalog(ogc_server_settings.internal_rest, *ogc_server_settings.credentials) if self.cat.get_workspace('geonode') == None: self.cat.create_workspace('geonode', 'http://www.geonode.org/') self.workspace = 'geonode' self.datastore = self.create_datastore(self.postgis, self.cat) def tearDown(self): """ Clean up geoserver. """ self.cat.delete(self.datastore, recurse=True) def generic_import(self, file, configuration_options=[{'index': 0}]): f = file filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', f) res = self.import_file(filename, configuration_options=configuration_options) layer_results = [] for result in res: if result[1].get('raster'): layerfile = result[0] layername = os.path.splitext(os.path.basename(layerfile))[0] layer = Layer.objects.get(name=layername) self.assertTrue(layerfile.endswith('.tif')) self.assertTrue(os.path.exists(layerfile)) l = gdal.OpenEx(layerfile) self.assertTrue(l.GetDriver().ShortName, 'GTiff') layer_results.append(layer) else: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') if not filename.endswith('zip'): self.assertTrue(layer.attributes.count() >= DataSource( filename)[0].num_fields) layer_results.append(layer) return layer_results[0] def generic_raster_import(self, file, configuration_options=[{ 'index': 0 }]): f = file filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', f) res = self.import_file(filename, configuration_options=configuration_options) layerfile = res[0][0] layername = os.path.splitext(os.path.basename(layerfile))[0] layer = Layer.objects.get(name=layername) self.assertTrue(layerfile.endswith('.tif')) self.assertTrue(os.path.exists(layerfile)) l = gdal.OpenEx(layerfile) self.assertTrue(l.GetDriver().ShortName, 'GTiff') return layer def test_raster(self): """ Tests raster import """ layer = self.generic_raster_import('test_grid.tif', configuration_options=[{ 'index': 0 }]) def test_box_with_year_field(self): """ Tests the import of test_box_with_year_field. """ layer = self.generic_import('boxes_with_year_field.shp', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date(self): """ Tests the import of test_boxes_with_date. """ layer = self.generic_import('boxes_with_date.shp', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_gpkg(self): """ Tests the import of test_boxes_with_date.gpkg. """ layer = self.generic_import('boxes_with_date.gpkg', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_plus_raster_gpkg_by_index(self): """ Tests the import of multilayer vector + raster geopackage using index """ layer = self.generic_import('boxes_plus_raster.gpkg', configuration_options=[ { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }, { 'index': 1 }, { 'index': 2 }, { 'index': 3 }, { 'index': 4 }, { 'index': 5 }, { 'index': 6 }, { 'index': 7 }, ]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_csv(self): """ Tests a CSV with WKT polygon. """ layer = self.generic_import('boxes_with_date.csv', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_csv_missing_features(self): """ Test csv that has some rows without geoms and uses ISO-8859 (Latin 4) encoding. """ self.generic_import('missing-features.csv', configuration_options=[{ 'index': 0 }]) def test_boxes_with_iso_date(self): """ Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_duplicate_imports(self): """ Tests importing the same layer twice to ensure incrementing file names is properly handled. """ filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'boxes_with_date_iso_date.zip') gi = OGRImport(filename) layers1 = gi.handle({'index': 0, 'name': 'test'}) layers2 = gi.handle({'index': 0, 'name': 'test'}) self.assertEqual(layers1[0][0], 'test') self.assertEqual(layers2[0][0], 'test0') def test_launder(self): """ Ensure the launder function works as expected. """ self.assertEqual(launder('tm_world_borders_simpl_0.3'), 'tm_world_borders_simpl_0_3') self.assertEqual(launder('Testing#'), 'testing_') self.assertEqual(launder(' '), '_') def test_boxes_with_date_iso_date_zip(self): """ Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.zip', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_dates_bc(self): """ Tests the import of test_boxes_with_dates_bc. """ layer = self.generic_import('boxes_with_dates_bc.shp', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_point_with_date(self): """ Tests the import of point_with_date.geojson """ layer = self.generic_import('point_with_date.geojson', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) # OGR will name geojson layers 'ogrgeojson' we rename to the path basename self.assertTrue(layer.name.startswith('point_with_date')) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_end_date(self): """ Tests the import of test_boxes_with_end_date. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('boxes_with_end_date.shp', configuration_options=[{ 'index': 0, 'convert_to_date': ['date', 'enddate'], 'start_date': 'date', 'end_date': 'enddate', 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] end_date_attr = filter( lambda attr: attr.attribute == 'enddate_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') self.assertEqual(end_date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) def test_us_states_kml(self): """ Tests the import of us_states_kml. This layer has a date and an end date field that are typed correctly. """ # TODO: Support time in kmls. layer = self.generic_import('us_states.kml', configuration_options=[{ 'index': 0 }]) def test_mojstrovka_gpx(self): """ Tests the import of mojstrovka.gpx. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('mojstrovka.gpx', configuration_options=[{ 'index': 0, 'convert_to_date': ['time'], 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'time_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, u'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute) def generic_time_check(self, layer, attribute=None, end_attribute=None): """ Convenience method to run generic tests on time layers. """ self.cat._cache.clear() resource = self.cat.get_resource(layer.name, store=layer.store, workspace=self.workspace) timeInfo = resource.metadata['time'] self.assertEqual("LIST", timeInfo.presentation) self.assertEqual(True, timeInfo.enabled) self.assertEqual(attribute, timeInfo.attribute) self.assertEqual(end_attribute, timeInfo.end_attribute) def test_us_shootings_csv(self): """ Tests the import of US_Shootings.csv. """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') filename = 'US_Shootings.csv' f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', filename) layer = self.generic_import(filename, configuration_options=[{ 'index': 0, 'convert_to_date': ['Date'] }]) self.assertTrue(layer.name.startswith('us_shootings')) date_field = 'date' configure_time(self.cat.get_layer(layer.name).resource, attribute=date_field) self.generic_time_check(layer, attribute=date_field) def test_sitins(self): """ Tests the import of US_Civil_Rights_Sitins0.csv """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') layer = self.generic_import("US_Civil_Rights_Sitins0.csv", configuration_options=[{ 'index': 0, 'convert_to_date': ['Date'] }]) def get_layer_names(self, in_file): """ Gets layer names from a data source. """ ds = DataSource(in_file) return map(lambda layer: layer.name, ds) def test_gdal_import(self): filename = 'point_with_date.geojson' f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', filename) self.generic_import(filename, configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) def test_wfs(self): """ Tests the import from a WFS Endpoint """ wfs = 'WFS:http://demo.boundlessgeo.com/geoserver/wfs' gi = OGRImport(wfs) layers = gi.handle(configuration_options=[{ 'layer_name': 'og:bugsites' }, { 'layer_name': 'topp:states' }]) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def test_arcgisjson(self): """ Tests the import from a WFS Endpoint """ endpoint = 'http://sampleserver3.arcgisonline.com/ArcGIS/rest/services/Hydrography/Watershed173811/FeatureServer/0/query?where=objectid+%3D+objectid&outfields=*&f=json' gi = OGRImport(endpoint) layers = gi.handle(configuration_options=[{'index': 0}]) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def import_file(self, in_file, configuration_options=[]): """ Imports the file. """ self.assertTrue(os.path.exists(in_file)) # run ogr2ogr gi = OGRImport(in_file) layers = gi.handle(configuration_options=configuration_options) return layers @staticmethod def createFeatureType(catalog, datastore, name): """ Exposes a PostGIS feature type in geoserver. """ headers = {"Content-type": "application/xml"} data = "<featureType><name>{name}</name></featureType>".format( name=name) url = datastore.href.replace(".xml", '/featuretypes.xml'.format(name=name)) headers, response = catalog.http.request(url, "POST ", data, headers) return response def test_file_add_view(self): """ Tests the file_add_view. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() # test login required for this view request = c.get(reverse('uploads-new')) self.assertEqual(request.status_code, 302) c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['request'].path, reverse('uploads-list')) self.assertTrue(len(response.context['object_list']) == 1) upload = response.context['object_list'][0] self.assertEqual(upload.user.username, 'non_admin') self.assertEqual(upload.file_type, 'GeoJSON') self.assertTrue(upload.uploadlayer_set.all()) self.assertEqual(upload.state, 'UPLOADED') self.assertIsNotNone(upload.name) uploaded_file = upload.uploadfile_set.first() self.assertTrue(os.path.exists(uploaded_file.file.path)) f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'empty_file.geojson') with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertIn('file', response.context_data['form'].errors) def test_file_add_view_as_json(self): """ Tests the file_add_view. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new-json'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertIn('application/json', response.get('Content-Type', '')) content = json.loads(response.content) self.assertIn('state', content) self.assertIn('id', content) def test_describe_fields(self): """ Tests the describe fields functionality. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv') with GDALInspector(f) as f: layers = f.describe_fields() self.assertTrue(layers[0]['layer_name'], 'us_shootings') self.assertEqual([n['name'] for n in layers[0]['fields']], [ 'Date', 'Shooter', 'Killed', 'Wounded', 'Location', 'City', 'Longitude', 'Latitude' ]) self.assertEqual(layers[0]['feature_count'], 203) def test_gdal_file_type(self): """ Tests the describe fields functionality. """ files = [ (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv'), 'CSV'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson'), 'GeoJSON'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'mojstrovka.gpx'), 'GPX'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'us_states.kml'), 'KML'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'boxes_with_year_field.shp'), 'ESRI Shapefile'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'boxes_with_date_iso_date.zip'), 'ESRI Shapefile'), ] from .models import NoDataSourceFound try: for path, file_type in files: with GDALInspector(path) as f: self.assertEqual(f.file_type(), file_type) except NoDataSourceFound as e: print 'No data source found in: {0}'.format(path) raise e def test_configure_view(self): """ Tests the configuration view. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') new_user = User.objects.create(username='******') new_user_perms = ['change_resourcebase_permissions'] c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{ 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True, 'permissions': { 'users': { 'test': new_user_perms, 'AnonymousUser': [ "change_layer_data", "download_resourcebase", "view_resourcebase" ] } } }] response = c.post('/importer-api/data-layers/{0}/configure/'.format( upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertEqual(Layer.objects.all()[0].owner.username, self.non_admin_username) perms = layer.get_all_level_info() user = User.objects.get(username=self.non_admin_username) # check user permissions for perm in [ u'publish_resourcebase', u'change_resourcebase_permissions', u'delete_resourcebase', u'change_resourcebase', u'change_resourcebase_metadata', u'download_resourcebase', u'view_resourcebase', u'change_layer_style', u'change_layer_data' ]: self.assertIn(perm, perms['users'][user]) self.assertTrue(perms['users'][new_user]) self.assertIn('change_resourcebase_permissions', perms['users'][new_user]) self.assertIn( "change_layer_data", perms['users'][User.objects.get(username='******')]) lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, layer) def test_configure_view_convert_date(self): """ Tests the configure view with a dataset that needs to be converted to a date. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{ 'index': 0, 'convert_to_date': ['Date'], 'start_date': 'Date', 'configureTime': True, 'editable': True }] response = c.get('/importer-api/data-layers/{0}/configure/'.format( upload.id)) self.assertEqual(response.status_code, 405) response = c.post('/importer-api/data-layers/{0}/configure/'.format( upload.id)) self.assertEqual(response.status_code, 400) response = c.post('/importer-api/data-layers/{0}/configure/'.format( upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertTrue(layer.attributes.filter(attribute='date'), 'xsd:dateTime') lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) # ensure a user who does not own the upload cannot configure an import from it. c.logout() c.login_as_admin() response = c.post('/importer-api/data-layers/{0}/configure/'.format( upload.id)) self.assertEqual(response.status_code, 404) def test_list_api(self): c = AdminClient() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 401) c.login_as_non_admin() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 200) admin = User.objects.get(username=self.username) non_admin = User.objects.get(username=self.non_admin_username) from osgeo_importer.models import UploadFile f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv') with open(f, 'rb') as f: uploaded_file = SimpleUploadedFile('test_data', f.read()) admin_upload = UploadedData.objects.create(state='Admin test', user=admin) admin_upload.uploadfile_set.add( UploadFile.objects.create(file=uploaded_file)) non_admin_upload = UploadedData.objects.create( state='Non admin test', user=non_admin) non_admin_upload.uploadfile_set.add( UploadFile.objects.create(file=uploaded_file)) c.login_as_admin() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 2) response = c.get('/importer-api/data/?user__username=admin') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 1) def test_layer_list_api(self): c = AdminClient() response = c.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 401) c.login_as_non_admin() response = c.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 200) def test_delete_from_non_admin_api(self): """ Ensure users can delete their data. """ c = AdminClient() f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) id = UploadedData.objects.first().id response = c.delete('/importer-api/data/{0}/'.format(id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def test_delete_from_admin_api(self): """ Ensure that administrators can delete data that isn't theirs. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) c.logout() c.login_as_admin() id = UploadedData.objects.first().id response = c.delete('/importer-api/data/{0}/'.format(id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def naming_an_import(self): """ Tests providing a name in the configuration options. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() name = 'point-with-a-date' with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) payload = { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'name': name, 'editable': True } response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps(payload), content_type='application/json') layer = Layer.objects.all()[0] self.assertEqual(layer.title, name.replace('-', '_')) def test_api_import(self): """ Tests the import api. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) payload = { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True } self.assertTrue( isinstance(UploadLayer.objects.first().configuration_options, dict)) response = c.get('/importer-api/data-layers/1/') self.assertEqual(response.status_code, 200) response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps([payload]), content_type='application/json') self.assertEqual(response.status_code, 200) self.assertTrue('task' in response.content) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, layer) self.assertTrue(UploadLayer.objects.first().task_id) def test_valid_file_extensions(self): """ Test the file extension validator. """ for extension in load_handler(OSGEO_IMPORTER, 'test.txt').valid_extensions: self.assertIsNone( validate_file_extension( SimpleUploadedFile('test.{0}'.format(extension), ''))) with self.assertRaises(ValidationError): validate_file_extension(SimpleUploadedFile('test.txt', '')) def test_no_geom(self): """ Test the file extension validator. """ with self.assertRaises(ValidationError): validate_inspector_can_read( SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)')) # This should pass (geom type is unknown) validate_inspector_can_read( SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)')) def test_numeric_overflow(self): """ Regression test for numeric field overflows in shapefiles. # https://trac.osgeo.org/gdal/ticket/5241 """ self.generic_import('Walmart.zip', configuration_options=[{ "configureTime": False, "convert_to_date": ["W1_OPENDAT"], "editable": True, "index": 0, "name": "Walmart", "start_date": "W1_OPENDAT" }]) def test_multipolygon_shapefile(self): """ Tests shapefile with multipart polygons. """ self.generic_import('PhoenixFirstDues.zip', configuration_options=[{ 'index': 0 }]) def test_non_4326_SR(self): """ Tests shapefile with multipart polygons. """ res = self.generic_import('Istanbul.zip', configuration_options=[{ 'index': 0 }]) featuretype = self.cat.get_resource(res.name) self.assertEqual(featuretype.projection, 'EPSG:32635') def test_gwc_handler(self): """ Tests the GeoWebCache handler """ layer = self.generic_import('boxes_with_date.shp', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) gwc = GeoWebCacheHandler(None) gs_layer = self.cat.get_layer(layer.name) self.assertTrue(gwc.time_enabled(gs_layer)) gs_layer.fetch() payload = self.cat.http.request(gwc.gwc_url(gs_layer)) self.assertTrue('regexParameterFilter' in payload[1]) self.assertEqual(int(payload[0]['status']), 200) # Don't configure time, ensure everything still works layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{ 'index': 0 }]) gs_layer = self.cat.get_layer(layer.name) self.cat._cache.clear() gs_layer.fetch() payload = self.cat.http.request(gwc.gwc_url(gs_layer)) self.assertFalse('regexParameterFilter' in payload[1]) self.assertEqual(int(payload[0]['status']), 200) def test_utf8(self): """ Tests utf8 characters in attributes """ filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'china_provinces.shp') layer = self.generic_import('china_provinces.shp') gi = OGRImport(filename) ds, insp = gi.open_target_datastore(gi.target_store) sql = str("select NAME_CH from %s where NAME_PY = 'An Zhou'" % (layer.name)) res = ds.ExecuteSQL(sql) feat = res.GetFeature(0) self.assertEqual(feat.GetField('name_ch'), "安州") def test_non_converted_date(self): """ Test converting a field as date. """ results = self.generic_import('TM_WORLD_BORDERS_2005.zip', configuration_options=[{ 'index': 0, 'start_date': 'Year', 'configureTime': True }]) layer = self.cat.get_layer(results.typename) self.assertTrue('time' in layer.resource.metadata) self.assertEqual('year', layer.resource.metadata['time'].attribute) def test_fid_field(self): """ Regression test for preserving an FID field when target layer supports it but source does not. """ self.generic_import('noaa_paleoclimate.zip', configuration_options=[{ 'index': 0 }]) def test_csv_with_wkb_geometry(self): """ Tests problems with the CSV files with multiple geometries. """ files = [ 'police_csv.csv', 'police_csv_nOGC.csv', 'police_csv_noLatLon.csv', 'police_csv_WKR.csv', 'police_csv_nFID.csv', 'police_csv_nOGFID.csv', 'police_csv_noWKB.csv' ] for i in files: self.generic_import( i, { "configureTime": True, "convert_to_date": ["date_time"], "editable": True, "index": 0, "name": i.lower(), "permissions": { "users": { "AnonymousUser": [ "change_layer_data", "download_resourcebase", "view_resourcebase" ] } }, "start_date": "date_time", })
#!/usr/bin/python from geoserver.catalog import Catalog from geoserver.support import JDBCVirtualTable, JDBCVirtualTableGeometry, JDBCVirtualTableParam cat = Catalog("http://localhost:8080/geoserver/rest/", "admin", "geoserver") import sys, os namespace = 'user' workspace = cat.get_workspace(namespace) if workspace is None: workspace = cat.create_workspace(namespace, 'http://mesasadc.org/' + namespace) namespace = 'mesa' workspace = cat.get_workspace(namespace) if workspace is None: workspace = cat.create_workspace(namespace, 'http://mesasadc.org/' + namespace) import geoserver.util shapefile_plus_sidecars = geoserver.util.shapefile_and_friends("mesa_shapefiles/MESASADC") # shapefile_and_friends should look on the filesystem to find a shapefile # and related files based on the base path passed in # # shapefile_plus_sidecars == { # 'shp': 'states.shp', # 'shx': 'states.shx', # 'prj': 'states.prj', # 'dbf': 'states.dbf' # }
class ModifyingTests(unittest.TestCase): def setUp(self): self.cat = Catalog("http://localhost:8080/geoserver/rest") def testFeatureTypeSave(self): # test saving round trip rs = self.cat.get_resource("bugsites") old_abstract = rs.abstract new_abstract = "Not the original abstract" enabled = rs.enabled # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(new_abstract, rs.abstract) self.assertEqual(enabled, rs.enabled) # Change keywords on server rs.keywords = ["bugsites", "gsconfig"] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(["bugsites", "gsconfig"], rs.keywords) self.assertEqual(enabled, rs.enabled) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual( [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")], rs.metadata_links) self.assertEqual(enabled, rs.enabled) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(old_abstract, rs.abstract) def testDataStoreCreate(self): ds = self.cat.create_datastore("vector_gsconfig") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) def testPublishFeatureType(self): # Use the other test and store creation to load vector data into a database # @todo maybe load directly to database? try: self.testDataStoreCreateAndThenAlsoImportData() except FailedRequestError: pass try: lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. self.cat.delete(lyr) self.cat.delete(lyr.resource) ds = self.cat.get_store("gsconfig_import_test") # make sure it's gone self.assert_(self.cat.get_layer('import') is None) self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326") # and now it's not self.assert_(self.cat.get_layer('import') is not None) finally: # tear stuff down to allow the other test to pass if we run first ds = self.cat.get_store("gsconfig_import_test") lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. self.cat.delete(lyr) self.cat.delete(lyr.resource) self.cat.delete(ds) def testDataStoreModify(self): ds = self.cat.get_store("sf") self.assertFalse("foo" in ds.connection_parameters) ds.connection_parameters = ds.connection_parameters ds.connection_parameters["foo"] = "bar" orig_ws = ds.workspace.name self.cat.save(ds) ds = self.cat.get_store("sf") self.assertTrue("foo" in ds.connection_parameters) self.assertEqual("bar", ds.connection_parameters["foo"]) self.assertEqual(orig_ws, ds.workspace.name) @drop_table('import') def testDataStoreCreateAndThenAlsoImportData(self): ds = self.cat.create_datastore("gsconfig_import_test") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) ds = self.cat.get_store("gsconfig_import_test") self.cat.add_data_to_store(ds, "import", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) def testCoverageStoreCreate(self): ds = self.cat.create_coveragestore2("coverage_gsconfig") ds.data_url = "file:data/mytiff.tiff" self.cat.save(ds) def testCoverageStoreModify(self): cs = self.cat.get_store("sfdem") self.assertEqual("GeoTIFF", cs.type) cs.type = "WorldImage" self.cat.save(cs) cs = self.cat.get_store("sfdem") self.assertEqual("WorldImage", cs.type) # not sure about order of test runs here, but it might cause problems # for other tests if this layer is misconfigured cs.type = "GeoTIFF" self.cat.save(cs) def testCoverageSave(self): # test saving round trip rs = self.cat.get_resource("Arc_Sample") old_abstract = rs.abstract new_abstract = "Not the original abstract" # # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(new_abstract, rs.abstract) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(old_abstract, rs.abstract) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual( [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")], rs.metadata_links) self.assertEqual(enabled, rs.enabled) srs_before = set(['EPSG:4326']) srs_after = set(['EPSG:4326', 'EPSG:3785']) formats = set(['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF']) formats_after = set(["PNG", "GIF", "TIFF"]) # set and save request_srs_list self.assertEquals(set(rs.request_srs_list), srs_before, str(rs.request_srs_list)) rs.request_srs_list = rs.request_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.request_srs_list), srs_after, str(rs.request_srs_list)) # set and save response_srs_list self.assertEquals(set(rs.response_srs_list), srs_before, str(rs.response_srs_list)) rs.response_srs_list = rs.response_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.response_srs_list), srs_after, str(rs.response_srs_list)) # set and save supported_formats self.assertEquals(set(rs.supported_formats), formats, str(rs.supported_formats)) rs.supported_formats = ["PNG", "GIF", "TIFF"] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.supported_formats), formats_after, str(rs.supported_formats)) def testWmsStoreCreate(self): ws = self.cat.create_wmsstore("wmsstore_gsconfig") ws.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities" ws.type = "WMS" self.cat.save(ws) def testWmsLayer(self): self.cat.create_workspace("wmstest", "http://example.com/wmstest") wmstest = self.cat.get_workspace("wmstest") wmsstore = self.cat.create_wmsstore("wmsstore", wmstest) wmsstore.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities" wmsstore.type = "WMS" self.cat.save(wmsstore) wmsstore = self.cat.get_store("wmsstore") self.assertEqual(1, len(self.cat.get_stores(wmstest))) available_layers = wmsstore.get_resources(available=True) for layer in available_layers: new_layer = self.cat.create_wmslayer(wmstest, wmsstore, layer) added_layers = wmsstore.get_resources() self.assertEqual(len(available_layers), len(added_layers)) def testFeatureTypeCreate(self): shapefile_plus_sidecars = shapefile_and_friends("test/data/states") expected = { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' } self.assertEqual(len(expected), len(shapefile_plus_sidecars)) for k, v in expected.iteritems(): self.assertEqual(v, shapefile_plus_sidecars[k]) sf = self.cat.get_workspace("sf") self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) self.assert_(self.cat.get_resource("states_test", workspace=sf) is not None) self.assertRaises( ConflictingDataError, lambda: self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) ) self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster_test", shapefile_plus_sidecars, sf) ) bogus_shp = { 'shp': 'test/data/Pk50095.tif', 'shx': 'test/data/Pk50095.tif', 'dbf': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } self.assertRaises( UploadError, lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf) ) lyr = self.cat.get_layer("states_test") self.cat.delete(lyr) self.assert_(self.cat.get_layer("states_test") is None) def testCoverageCreate(self): tiffdata = { 'tiff': 'test/data/Pk50095.tif', 'tfw': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } sf = self.cat.get_workspace("sf") # TODO: Uploading WorldImage file no longer works??? # ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf) # self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None) # self.assertRaises( # ConflictingDataError, # lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf) # ) self.assertRaises( UploadError, lambda: self.cat.create_featurestore("Pk50095_vector", tiffdata, sf) ) bogus_tiff = { 'tiff': 'test/data/states.shp', 'tfw': 'test/data/states.shx', 'prj': 'test/data/states.prj' } self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster", bogus_tiff) ) def testLayerSave(self): # test saving round trip lyr = self.cat.get_layer("states") old_attribution = lyr.attribution new_attribution = "Not the original attribution" # change attribution on server lyr.attribution = new_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(new_attribution, lyr.attribution) # Restore attribution lyr.attribution = old_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(old_attribution, lyr.attribution) self.assertEqual(lyr.default_style.name, "population") old_default_style = lyr.default_style lyr.default_style = (s for s in lyr.styles if s.name == "pophatch").next() lyr.styles = [old_default_style] self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(lyr.default_style.name, "pophatch") self.assertEqual([s.name for s in lyr.styles], ["population"]) def testStyles(self): # upload new style, verify existence self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Fred", fred.sld_title) # replace style, verify changes self.cat.create_style("fred", open("test/ted.sld").read(), overwrite=True) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Ted", fred.sld_title) # delete style, verify non-existence self.cat.delete(fred, purge=True) self.assert_(self.cat.get_style("fred") is None) # attempt creating new style self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assertEqual("Fred", fred.sld_title) # verify it can be found via URL and check the name f = self.cat.get_style_by_url(fred.href) self.assert_(f is not None) self.assertEqual(f.name, fred.name) def testWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") jed = self.cat.get_style("jed", workspace="blarny") self.assert_(jed is None) jed = self.cat.get_style("jed", workspace="topp") self.assert_(jed is not None) self.assertEqual("Fred", jed.sld_title) jed = self.cat.get_style("topp:jed") self.assert_(jed is not None) self.assertEqual("Fred", jed.sld_title) # replace style, verify changes self.cat.create_style("jed", open("test/ted.sld").read(), overwrite=True, workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assert_(jed is not None) self.assertEqual("Ted", jed.sld_title) # delete style, verify non-existence self.cat.delete(jed, purge=True) self.assert_(self.cat.get_style("jed", workspace="topp") is None) # attempt creating new style self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assertEqual("Fred", jed.sld_title) # verify it can be found via URL and check the full name f = self.cat.get_style_by_url(jed.href) self.assert_(f is not None) self.assertEqual(f.fqn, jed.fqn) def testLayerWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("ned", open("test/fred.sld").read(), overwrite=True, workspace="topp") self.cat.create_style("zed", open("test/ted.sld").read(), overwrite=True, workspace="topp") ned = self.cat.get_style("ned", workspace="topp") zed = self.cat.get_style("zed", workspace="topp") self.assert_(ned is not None) self.assert_(zed is not None) lyr = self.cat.get_layer("states") lyr.default_style = ned lyr.styles = [zed] self.cat.save(lyr) self.assertEqual("topp:ned", lyr.default_style) self.assertEqual([zed], lyr.styles) lyr.refresh() self.assertEqual("topp:ned", lyr.default_style.fqn) self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles]) def testWorkspaceCreate(self): ws = self.cat.get_workspace("acme") self.assertEqual(None, ws) self.cat.create_workspace("acme", "http://example.com/acme") ws = self.cat.get_workspace("acme") self.assertEqual("acme", ws.name) def testWorkspaceDelete(self): self.cat.create_workspace("foo", "http://example.com/foo") ws = self.cat.get_workspace("foo") self.cat.delete(ws) ws = self.cat.get_workspace("foo") self.assert_(ws is None) def testWorkspaceDefault(self): # save orig orig = self.cat.get_default_workspace() neu = self.cat.create_workspace("neu", "http://example.com/neu") try: # make sure setting it works self.cat.set_default_workspace("neu") ws = self.cat.get_default_workspace() self.assertEqual('neu', ws.name) finally: # cleanup and reset to the way things were self.cat.delete(neu) self.cat.set_default_workspace(orig.name) ws = self.cat.get_default_workspace() self.assertEqual(orig.name, ws.name) def testFeatureTypeDelete(self): pass def testCoverageDelete(self): pass def testDataStoreDelete(self): states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) states.enabled = False self.assert_(states.enabled == False) self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == False) states.enabled = True self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) def testLayerGroupSave(self): tas = self.cat.get_layergroup("tasmania") self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) tas.layers = tas.layers[:-1] tas.styles = tas.styles[:-1] self.cat.save(tas) # this verifies the local state self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) # force a refresh to check the remote state tas.refresh() self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles)
def _register_cascaded_layers(service, owner=None): """ Register layers for a cascading WMS """ if service.type == "WMS" or service.type == "OWS": cat = Catalog(settings.OGC_SERVER["default"]["LOCATION"] + "rest", _user, _password) # Can we always assume that it is geonode? # Should cascading layers have a separate workspace? cascade_ws = cat.get_workspace(service.name) if cascade_ws is None: cascade_ws = cat.create_workspace(service.name, "cascade") try: store = cat.get_store(service.name, cascade_ws) except Exception: store = cat.create_wmsstore(service.name, cascade_ws) cat.save(store) wms = WebMapService(service.base_url) layers = list(wms.contents) count = 0 for layer in layers: lyr = cat.get_resource(layer, store, cascade_ws) if lyr is None: if service.type in ["WMS", "OWS"]: resource = cat.create_wmslayer(cascade_ws, store, layer) elif service.type == "WFS": resource = cat.create_wfslayer(cascade_ws, store, layer) if resource: bbox = resource.latlon_bbox cascaded_layer, created = Layer.objects.get_or_create( typename="%s:%s" % (cascade_ws.name, resource.name), service=service, defaults={ "name": resource.name, "workspace": cascade_ws.name, "store": store.name, "storeType": store.resource_type, "title": resource.title or "No title provided", "abstract": resource.abstract or "No abstract provided", "owner": None, "uuid": str(uuid.uuid4()), "bbox_x0": bbox[0], "bbox_x1": bbox[1], "bbox_y0": bbox[2], "bbox_y1": bbox[3], }, ) if created: cascaded_layer.save() if cascaded_layer is not None and cascaded_layer.bbox is None: cascaded_layer._populate_from_gs(gs_resource=resource) cascaded_layer.set_default_permissions() service_layer, created = ServiceLayer.objects.get_or_create( service=service, typename=cascaded_layer.name ) service_layer.layer = cascaded_layer service_layer.title = (cascaded_layer.title,) service_layer.description = (cascaded_layer.abstract,) service_layer.styles = cascaded_layer.styles service_layer.save() count += 1 else: logger.error("Resource %s from store %s could not be saved as layer" % (layer, store.name)) message = "%d Layers Registered" % count return_dict = {"status": "ok", "msg": message} return HttpResponse(json.dumps(return_dict), mimetype="application/json", status=200) elif service.type == "WCS": return HttpResponse("Not Implemented (Yet)", status=501) else: return HttpResponse("Invalid Service Type", status=400)
# the last "/" is very important try: GEOSERVER_SERVICE_URL = os.environ["GEOSERVER_SERVICE_URL"] except KeyError: print("Please set the environment variable GEOSERVER_SERVICE_URL") sys.exit(1) try: GEOSERVER_USERNAME = os.environ["GEOSERVER_USERNAME"] except KeyError: print("Please set the environment variable GEOSERVER_USERNAME") sys.exit(1) try: GEOSERVER_PASSWORD = os.environ["GEOSERVER_PASSWORD"] except KeyError: print("Please set the environment variable GEOSERVER_PASSWORD") sys.exit(1) geoserver_catalog = Catalog(service_url=GEOSERVER_SERVICE_URL, username=GEOSERVER_USERNAME, password=GEOSERVER_PASSWORD) try: results = geoserver_catalog.create_workspace(name="TOTO") except AssertionError: _, _, tb = sys.exc_info() traceback.print_tb(tb) # Fixed format tb_info = traceback.extract_tb(tb) filename, line, func, text = tb_info[-1] print(f"An error occurred on line {line} in statement {text}") sys.exit(1)
class GeoGigUploaderBase(ImportHelper): def __init__(self, *args, **kwargs): super(GeoGigUploaderBase, self).__init__(*args, **kwargs) setUpModule() # this isn't available when being used in other module def setUp(self): self.admin_user = self.create_user('admin', 'admin', is_superuser=True) self.non_admin_user = self.create_user('non_admin', 'non_admin') self.catalog = Catalog( ogc_server_settings.internal_rest, *ogc_server_settings.credentials ) if self.catalog.get_workspace('geonode') is None: self.catalog.create_workspace('geonode', 'http://www.geonode.org/') self.workspace = 'geonode' self.datastoreNames = [] def tearDown(self): """Clean up geoserver/geogig catalog. """ # delete stores (will cascade to delete layers) for store_name in self.datastoreNames: self.catalog.delete( self.catalog.get_store(store_name), recurse=True) # delete repository reference in geoserver for store_name in self.datastoreNames: self.remove_geogig_repo(store_name) # geoserver can leave connections open - HACK HACK HACK self.free_geogig_connections() # HACK HACK HACK -- sometimes connections from geoserver to geogig are left open. This kills the postgresql backend! # this is a major hammer. Once geoserver/geogig are better at cleaning up, remove this. def free_geogig_connections(self): with db.connections["geogig"].cursor() as c: c.execute( "select pg_terminate_backend(pid) from pg_stat_activity where application_name = 'PostgreSQL JDBC Driver' or application_name='geogig'") # aggressive delete of the repo (mostly cleans up after itself) # call the geogig rest API DELETE def remove_geogig_repo(self, ref_name): username = ogc_server_settings.credentials.username password = ogc_server_settings.credentials.password url = ogc_server_settings.rest http = httplib2.Http(disable_ssl_certificate_validation=False) http.add_credentials(username, password) netloc = urlparse(url).netloc http.authorizations.append( httplib2.BasicAuthentication( (username, password), netloc, url, {}, None, None, http )) rest_url = ogc_server_settings.LOCATION + \ "geogig/repos/" + ref_name + "/delete.json" resp, content = http.request(rest_url, 'GET') response = json.loads(content) token = response["response"]["token"] rest_url = ogc_server_settings.LOCATION + \ "geogig/repos/" + ref_name + "?token=" + token resp, content = http.request(rest_url, 'DELETE') # convenience method to load in the test dataset # return a (geonode) layer # the layer will be in Geoserver and Geonode # self.catalog.get_layer(layer.name) -- to get the Geoserver Layer def fully_import_file(self, path, fname, start_time_column, end_time_column=None): # setup time if end_time_column is None: time_config = {'convert_to_date': [start_time_column], 'start_date': start_time_column, 'configureTime': True} else: time_config = {'convert_to_date': [start_time_column, end_time_column], 'start_date': start_time_column, 'end_date': end_time_column, 'configureTime': True} name = os.path.splitext(fname)[0] + "_" + str(uuid.uuid1())[:8] self.datastoreNames.append(name) # remember for future deletion full_fname = os.path.join(path, fname) configs = self.prepare_file_for_import(full_fname) configs[0].update({'name': name}) configs[0].update({'layer_name': name}) configs[0].update(time_config) # configure the datastore/repo configs[0]['geoserver_store'] = {} configs[0]['geoserver_store']['type'] = 'geogig' configs[0]['geoserver_store']['name'] = name configs[0]['geoserver_store']['create'] = 'true' configs[0]['geoserver_store']['branch'] = 'master' configs[0]['geoserver_store']['geogig_repository'] = "geoserver://" + name result = self.generic_import(fname, path=path, configs=configs) return result def import_file(self, path, configs=None): """Imports the file. """ if configs is None: configs = [] self.assertTrue(os.path.exists(path), path) # run ogr2ogr ogr = OGRImport(path) layers = ogr.handle(configuration_options=configs) return layers def generic_import(self, filename, path, configs=None): if configs is None: configs = [{'index': 0}] path = os.path.join(path, filename) results = self.import_file(path, configs=configs) layer_results = [] for result in results: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertTrue(layer.store in self.datastoreNames) self.assertEqual(layer.storeType, 'dataStore') if not path.endswith('zip'): self.assertGreaterEqual( layer.attributes.count(), DataSource(path)[0].num_fields ) layer_results.append(layer) return layer_results[0] def prepare_file_for_import(self, filepath): """ Prepares the file path provided for import; performs some housekeeping, uploads & configures the file. Returns a list of dicts of the form {'index': <layer_index>, 'upload_layer_id': <upload_layer_id>} these may be used as configuration options for importing all of the layers in the file. """ # Make a copy of the test file, as it's removed in configure_upload() filename = os.path.basename(filepath) tmppath = os.path.join('/tmp', filename) shutil.copy(filepath, tmppath) # upload & configure_upload expect closed file objects # This is heritage from originally being closely tied to a view passing request.Files of = open(tmppath, 'rb') of.close() files = [of] uploaded_data = self.upload(files, self.admin_user) self.configure_upload(uploaded_data, files) configs = [{'index': l.index, 'upload_layer_id': l.id} for l in uploaded_data.uploadlayer_set.all()] return configs def create_user(self, username, password, **kwargs): """Convenience method for creating users. """ user, created = User.objects.get_or_create(username=username, **kwargs) if created: user.set_password(password) user.save() return user
class GeoServerDatasetEngineEnd2EndTests(unittest.TestCase): def setUp(self): # Files self.tests_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) self.files_root = os.path.join(self.tests_root, 'files') # GeoServer self.gs_endpoint = TEST_GEOSERVER_DATASET_SERVICE['ENDPOINT'] self.gs_username = TEST_GEOSERVER_DATASET_SERVICE['USERNAME'] self.gs_password = TEST_GEOSERVER_DATASET_SERVICE['PASSWORD'] self.catalog = GeoServerCatalog(self.gs_endpoint, username=self.gs_username, password=self.gs_password) # Postgis self.pg_username = TEST_POSTGIS_SERVICE['USERNAME'] self.pg_password = TEST_POSTGIS_SERVICE['PASSWORD'] self.pg_database = TEST_POSTGIS_SERVICE['DATABASE'] self.pg_table_name = 'points' self.pg_host = TEST_POSTGIS_SERVICE['HOST'] self.pg_port = TEST_POSTGIS_SERVICE['PORT'] self.pg_url = TEST_POSTGIS_SERVICE['URL'] self.pg_public_url = TEST_POSTGIS_SERVICE['PUBLIC_URL'] # Setup a testing workspace self.workspace_name = random_string_generator(10) self.workspace_uri = 'http://www.tethysplatform.org/{}'.format(self.workspace_name) retries = 5 while retries > 0: try: self.catalog.create_workspace(self.workspace_name, self.workspace_uri) break except AssertionError as e: if 'Error persisting' in str(e) and retries > 0: print("WARNING: FAILED TO PERSIST WORKSPACE.") retries -= 1 else: raise # Setup Postgis database connection self.public_engine = create_engine(self.pg_public_url) self.connection = self.public_engine.connect() self.transaction = self.connection.begin() # Create GeoServer Engine self.endpoint = TEST_GEOSERVER_DATASET_SERVICE['ENDPOINT'] self.geoserver_engine = GeoServerSpatialDatasetEngine(endpoint=self.endpoint, username=TEST_GEOSERVER_DATASET_SERVICE['USERNAME'], password=TEST_GEOSERVER_DATASET_SERVICE['PASSWORD']) self.geometry_column = 'geometry' self.geometry_type = 'Point' self.srid = 4326 def assert_valid_response_object(self, response_object): # Response object should be a dictionary with the keys 'success' and either 'result' if success is True # or 'error' if success is False self.assertIsInstance(response_object, dict) self.assertIn('success', response_object) if isinstance(response_object, dict) and 'success' in response_object: if response_object['success'] is True: self.assertIn('result', response_object) elif response_object['success'] is False: self.assertIn('error', response_object) def tearDown(self): # Clean up GeoServer workspace = self.catalog.get_workspace(self.workspace_name) self.catalog.delete(workspace, recurse=True, purge=True) # Clean up Postgis database self.transaction.rollback() self.connection.close() self.public_engine.dispose() def setup_postgis_table(self): """ Creates table in the database named "points" with two entries. The table has three columns: "id", "name", and "geometry." Use this table for the tests that require a database. """ # Clean up delete_sql = "DROP TABLE IF EXISTS {table}".\ format(table=self.pg_table_name) self.connection.execute(delete_sql) # Create table geom_table_sql = "CREATE TABLE IF NOT EXISTS {table} (" \ "id integer CONSTRAINT points_primary_key PRIMARY KEY, " \ "name varchar(20)" \ "); " \ "SELECT AddGeometryColumn('public', '{table}', 'geometry', 4326, 'POINT', 2);". \ format(table=self.pg_table_name) self.connection.execute(geom_table_sql) insert_sql = "INSERT INTO {table} VALUES ({id}, '{name}', ST_GeomFromText('POINT({lon} {lat})', 4326));" rows = [ {"id": 1, "name": "Aquaveo", "lat": 40.276039, "lon": -111.651120}, {"id": 2, "name": "BYU", "lat": 40.252335, "lon": -111.649326}, ] for r in rows: sql = insert_sql.format( table=self.pg_table_name, id=r['id'], name=r['name'], lat=r['lat'], lon=r['lon'] ) self.connection.execute(sql) self.transaction.commit() def test_create_shapefile_resource_base(self): # call methods: create_shapefile_resource, list_resources, get_resource, delete_resource # TEST create shapefile # Setup filename = 'test' shapefile_name = os.path.join(self.files_root, 'shapefile', filename) workspace = self.workspace_name store_id = random_string_generator(10) store_id_name = '{}:{}'.format(workspace, store_id) # store_id_name = store_id # Execute response = self.geoserver_engine.create_shapefile_resource(store_id=store_id_name, shapefile_base=shapefile_name, overwrite=True) # Validate response object self.assert_valid_response_object(response) # Should succeed self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) self.assertIn(store_id, r['name']) self.assertIn(store_id, r['store']) # TEST list_resources # Execute response = self.geoserver_engine.list_resources() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # Returns list self.assertIsInstance(result, list) # layer listed self.assertIn(store_id, result) # TEST get_resources # Execute # Geoserver uses the store_id as the layer/resource name (not the filename) resource_id_name = '{}:{}'.format(workspace, store_id) response = self.geoserver_engine.get_resource(resource_id=resource_id_name) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Properties self.assertIn('name', r) self.assertEquals(store_id, r['name']) self.assertIn(store_id, r['wfs']['shapefile']) # TEST delete_resource # Execute # This case the resource id is the same as the store id. response = self.geoserver_engine.delete_resource(resource_id=resource_id_name, store_id=store_id) # Validate response object self.assert_valid_response_object(response) # Success # TODO: delete_resource is returning a 403 error: not authorized. # self.assertTrue(response['success']) def test_create_shapefile_resource_zip(self): # call methods: create_shapefile_resource, list_layers, get_layer, delete_layer # TEST create_shapefile_resource # Test1.zip # Setup shapefile_zip = os.path.join(self.files_root, 'shapefile', "test1.zip") shapefile = "test1" workspace = self.workspace_name store_id = random_string_generator(10) store_id_name = '{}:{}'.format(workspace, store_id) # Execute response = self.geoserver_engine.create_shapefile_resource(store_id=store_id_name, shapefile_zip=shapefile_zip, overwrite=True) # Validate response object self.assert_valid_response_object(response) # Should succeed self.assertTrue(response['success']) # Extract Result r = response['result'] # Type filename = os.path.splitext(os.path.basename(shapefile_zip))[0] self.assertIsInstance(r, dict) self.assertIn(filename, r['name']) self.assertIn(store_id, r['store']) # TEST list_layers test # Execute response = self.geoserver_engine.list_layers() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # Returns list self.assertIsInstance(result, list) # Get the last item from result layer_id = '{}:{}'.format(workspace, shapefile) # TEST get layers test # Execute response = self.geoserver_engine.get_layer(layer_id=layer_id, store_id=store_id) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) self.assertIn(filename, r['name']) self.assertIn(self.workspace_name, r['name']) # TEST delete_layer self.geoserver_engine.delete_layer(layer_id=layer_id, store_id=store_id) self.assert_valid_response_object(response) self.assertTrue(response['success']) def test_create_shapefile_resource_upload(self): # call methods: create_shapefile_resource, list_stores, get_store, delete_store # TEST create_shapefile_resource # Use in memory file list: test.shp and friends # Setup shapefile_cst = os.path.join(self.files_root, 'shapefile', 'test.cst') shapefile_dbf = os.path.join(self.files_root, 'shapefile', 'test.dbf') shapefile_prj = os.path.join(self.files_root, 'shapefile', 'test.prj') shapefile_shp = os.path.join(self.files_root, 'shapefile', 'test.shp') shapefile_shx = os.path.join(self.files_root, 'shapefile', 'test.shx') # Workspace is given store_rand = random_string_generator(10) store_id = '{}:{}'.format(self.workspace_name, store_rand) with open(shapefile_cst, 'rb') as cst_upload,\ open(shapefile_dbf, 'rb') as dbf_upload,\ open(shapefile_prj, 'rb') as prj_upload,\ open(shapefile_shp, 'rb') as shp_upload,\ open(shapefile_shx, 'rb') as shx_upload: upload_list = [cst_upload, dbf_upload, prj_upload, shp_upload, shx_upload] response = self.geoserver_engine.create_shapefile_resource(store_id=store_id, shapefile_upload=upload_list, overwrite=True) # Should succeed self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) self.assertIn(store_rand, r['name']) self.assertIn(store_rand, r['store']) # TEST list_stores # Execute response = self.geoserver_engine.list_stores() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # layer group listed self.assertIn(store_rand, result) # TEST get store # Execute response = self.geoserver_engine.get_store(store_id=store_id) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Properties self.assertIn('name', r) self.assertIn(r['name'], store_rand) self.assertIn('workspace', r) self.assertEqual(self.workspace_name, r['workspace']) # TEST delete_store response = self.geoserver_engine.delete_store(store_id=store_id, purge=True, recurse=True) # Failure Check self.assert_valid_response_object(response) self.assertTrue(response['success']) def test_create_coverage_resource_arcgrid(self): # call methods: create_coverage_resource, list_resources, get_resource, delete_resource # TEST create_coverage_resource # precip30min.zip store_name = random_string_generator(10) expected_store_id = '{}:{}'.format(self.workspace_name, store_name) expected_coverage_type = 'arcgrid' coverage_file_name = 'precip30min.zip' coverage_name = coverage_file_name.split('.')[0] coverage_file = os.path.join(self.files_root, "arc_sample", coverage_file_name) with open(coverage_file, 'rb') as coverage_upload: # Execute response = self.geoserver_engine.create_coverage_resource(store_id=expected_store_id, coverage_type=expected_coverage_type, coverage_upload=coverage_upload, overwrite=True) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Values self.assertEqual(coverage_name, r['name']) self.assertEqual(self.workspace_name, r['workspace']) # TEST list_resources # Execute response = self.geoserver_engine.list_resources() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # Returns list self.assertIsInstance(result, list) # layer listed self.assertIn(coverage_name, result) # TEST get_resource # Execute resource_id = '{}:{}'.format(self.workspace_name, coverage_name) response = self.geoserver_engine.get_resource(resource_id=resource_id, store_id=store_name) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] self.assertIn('ArcGrid', r['keywords']) self.assertEqual(coverage_name, r['title']) self.assertEqual('coverage', r['resource_type']) # delete_resource # TODO: delete_resource is returning a 403 error: not authorized. # Execute resource_id = '{}:{}'.format(self.workspace_name, coverage_name) response = self.geoserver_engine.delete_resource(resource_id=resource_id, store_id=store_name) # # Validate response object self.assert_valid_response_object(response) # # Success # self.assertTrue(response['success']) def test_create_coverage_resource_grassgrid(self): # call methods: create_coverage_resource, list_layers, get_layer, delete_layer # TEST create_coverage resource # my_grass.zip store_name = random_string_generator(10) expected_store_id = '{}:{}'.format(self.workspace_name, store_name) expected_coverage_type = 'grassgrid' coverage_file_name = 'my_grass.zip' coverage_name = coverage_file_name.split('.')[0] coverage_file = os.path.join(self.files_root, "grass_ascii", coverage_file_name) # Execute response = self.geoserver_engine.create_coverage_resource(store_id=expected_store_id, coverage_type=expected_coverage_type, coverage_file=coverage_file, overwrite=True) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Tests self.assertIn(coverage_name, r['name']) self.assertEqual(self.workspace_name, r['workspace']) # TEST list_layers # Execute response = self.geoserver_engine.list_layers() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # Returns list self.assertIsInstance(result, list) # Check if layer is in list self.assertIn(coverage_name, result) # TEST get_layer # Execute layer_id = '{}:{}'.format(self.workspace_name, coverage_name) response = self.geoserver_engine.get_layer(layer_id=layer_id, store_id=store_name) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) self.assertIn(store_name, r['store']) self.assertIn(self.workspace_name, r['name']) # TEST delete_layer self.geoserver_engine.delete_layer(layer_id=layer_id, store_id=store_name) self.assert_valid_response_object(response) self.assertTrue(response['success']) def test_create_coverage_resource_geotiff(self): # adem.tif # call methods: create_coverage_resource, list_stores, get_store, delete_store # TEST create_coverage_resource store_name = random_string_generator(10) expected_store_id = '{}:{}'.format(self.workspace_name, store_name) expected_coverage_type = 'geotiff' coverage_file_name = 'adem.tif' coverage_name = coverage_file_name.split('.')[0] coverage_file = os.path.join(self.files_root, coverage_file_name) with open(coverage_file, 'rb') as coverage_upload: # Execute response = self.geoserver_engine.create_coverage_resource(store_id=expected_store_id, coverage_type=expected_coverage_type, coverage_upload=coverage_upload, overwrite=True) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Values self.assertEqual(coverage_name, r['name']) self.assertEqual(self.workspace_name, r['workspace']) # TEST list_stores # Execute response = self.geoserver_engine.list_stores() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # TEST layer group listed self.assertIn(store_name, result) # TEST get store # Execute response = self.geoserver_engine.get_store(store_id=expected_store_id) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Properties self.assertIn('name', r) self.assertIn(r['name'], store_name) self.assertIn('workspace', r) self.assertEqual(self.workspace_name, r['workspace']) # TEST delete_store response = self.geoserver_engine.delete_store(store_id=expected_store_id, purge=True, recurse=True) # Failure Check self.assert_valid_response_object(response) self.assertTrue(response['success']) def test_create_coverage_resource_world_file_tif(self): # pk50095.zip # call methods: create_coverage_resource, list_layers, get_layer, delete_layer # TEST create_coverage resource store_name = random_string_generator(10) expected_store_id = '{}:{}'.format(self.workspace_name, store_name) expected_coverage_type = 'worldimage' coverage_file_name = 'Pk50095.zip' coverage_name = coverage_file_name.split('.')[0] coverage_file = os.path.join(self.files_root, "img_sample", coverage_file_name) # Execute response = self.geoserver_engine.create_coverage_resource(store_id=expected_store_id, coverage_type=expected_coverage_type, coverage_file=coverage_file, overwrite=True) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Tests self.assertIn(coverage_name, r['name']) self.assertEqual(self.workspace_name, r['workspace']) # TEST list_layers # Execute response = self.geoserver_engine.list_layers() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # Returns list self.assertIsInstance(result, list) # Check if layer is in list self.assertIn(coverage_name, result) # TEST get_layer # Execute layer_id = '{}:{}'.format(self.workspace_name, coverage_name) response = self.geoserver_engine.get_layer(layer_id=layer_id, store_id=store_name) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) self.assertIn(store_name, r['store']) self.assertIn(self.workspace_name, self.workspace_name) # TEST delete_layer self.geoserver_engine.delete_layer(layer_id=coverage_name, store_id=store_name) self.assert_valid_response_object(response) self.assertTrue(response['success']) def test_create_coverage_resource_upload(self): # DO NOT MOCK # Use in memory file list: precip30min.prj & precip30min.asc # call methods: create_coverage_resource, list_resources, get_resource, delete_resource store_id_name = random_string_generator(10) expected_store_id = '{}:{}'.format(self.workspace_name, store_id_name) expected_coverage_type = 'arcgrid' coverage_file_name = 'precip30min.asc' prj_file_name = 'precip30min.prj' coverage_name = coverage_file_name.split('.')[0] arc_sample = os.path.join(self.files_root, "arc_sample") coverage_file = os.path.join(arc_sample, coverage_file_name) prj_file = os.path.join(arc_sample, prj_file_name) with open(coverage_file, 'rb') as coverage_upload: with open(prj_file, 'rb') as prj_upload: upload_list = [coverage_upload, prj_upload] # Execute response = self.geoserver_engine.create_coverage_resource(store_id=expected_store_id, coverage_type=expected_coverage_type, coverage_upload=upload_list, overwrite=True) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Values self.assertEqual(coverage_name, r['name']) self.assertEqual(self.workspace_name, r['workspace']) # TEST list_resources # Execute response = self.geoserver_engine.list_resources() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # Returns list self.assertIsInstance(result, list) # layer listed self.assertIn(coverage_name, result) # TEST get_resources # Execute resource_id = "{}:{}".format(self.workspace_name, coverage_name) response = self.geoserver_engine.get_resource(resource_id=resource_id, store_id=store_id_name) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Properties self.assertIn('name', r) self.assertEquals(coverage_name, r['name']) self.assertIn(coverage_name, r['wcs']['arcgrid']) # TEST delete_resource # TODO: delete_resource is returning a 403 error: not authorized. # Execute # This case the resource id is the same as the filename. resource_id = '{}:{}'.format(self.workspace_name, coverage_name) response = self.geoserver_engine.delete_resource(resource_id=resource_id, store_id=store_id_name) # Validate response object self.assert_valid_response_object(response) # Success # self.assertTrue(response['success']) def test_create_layer_group(self): # call methods: create_layer_group, list_layer_groups, get_layer_group, delete_layer_group # create_layer_group # Use existing layers and styles in geoserver: # layers: sf:roads, sf:bugsites, sf:streams; # styles: simple_roads, capitals, simple_streams # TEST create_layer_group # Do create # expected_layer_group_id = '{}:{}'.format(self.workspace_name, random_string_generator(10)) expected_layer_group_id = random_string_generator(10) expected_layers = ['roads', 'bugsites', 'streams'] expected_styles = ['simple_roads', 'capitals', 'simple_streams'] # TODO: create_layer_group: fails on catalog.save() when workspace is given. response = self.geoserver_engine.create_layer_group(layer_group_id=expected_layer_group_id, layers=expected_layers, styles=expected_styles) # Should succeed self.assert_valid_response_object(response) self.assertTrue(response['success']) # Validate result = response['result'] self.assertEquals(result['name'], expected_layer_group_id) self.assertEquals(result['layers'], expected_layers) self.assertEquals(result['styles'], expected_styles) # TEST list_layer_groups # Execute response = self.geoserver_engine.list_layer_groups() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # layer group listed self.assertIn(expected_layer_group_id, result) # TEST get layer_group # Execute response = self.geoserver_engine.get_layer_group(layer_group_id=expected_layer_group_id) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # List of dictionaries self.assertIn('workspace', r) self.assertEqual(None, r['workspace']) self.assertIn('layers', r) self.assertEqual(expected_layers, r['layers']) self.assertIn('styles', r) self.assertEqual(expected_styles, r['styles']) self.assertNotIn('dom', r) # TEST delete layer group # Clean up self.geoserver_engine.delete_layer_group(layer_group_id=expected_layer_group_id) self.assert_valid_response_object(response) self.assertTrue(response['success']) # self.assertIsNone(response['result']) def test_create_workspace(self): # call methods: create_workspace, list_workspaces, get_workspace, delete_workspace # TEST create workspace expected_workspace_id = random_string_generator(10) expected_uri = 'http://www.tethysplatform.org/{}'.format(expected_workspace_id) # create workspace test response = self.geoserver_engine.create_workspace(workspace_id=expected_workspace_id, uri=expected_uri) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) self.assertIn('name', r) self.assertEqual(expected_workspace_id, r['name']) # TEST list workspace # Execute response = self.geoserver_engine.list_workspaces() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # TEST layer group listed self.assertIn(expected_workspace_id, result) # TEST get_workspace # Execute response = self.geoserver_engine.get_workspace(workspace_id=expected_workspace_id) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Properties self.assertIn('name', r) self.assertIn(r['name'], expected_workspace_id) # TEST delete work_space # Do delete response = self.geoserver_engine.delete_workspace(workspace_id=expected_workspace_id) # Should succeed self.assert_valid_response_object(response) self.assertTrue(response['success']) self.assertIsNone(response['result']) def test_create_style(self): # call methods: create_style, list_styles, get_style, delete_style # TEST create_style expected_style_id_name = random_string_generator(10) expected_style_id = '{}:{}'.format(self.workspace_name, expected_style_id_name) style_file_name = 'point.sld' expected_sld = os.path.join(self.files_root, style_file_name) # Execute with open(expected_sld, 'r') as sld_file: sld_string = sld_file.read() # TODO: create_style: Fails on when overwrite is False. response = self.geoserver_engine.create_style(style_id=expected_style_id, sld=sld_string, overwrite=True) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # TEST list_styles # Execute response = self.geoserver_engine.list_styles(workspace=self.workspace_name) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # Returns list self.assertIsInstance(result, list) # TEST layer listed self.assertIn(expected_style_id_name, result) # TEST get_style # Execute response = self.geoserver_engine.get_style(style_id=expected_style_id) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Properties self.assertIn('name', r) self.assertIn(r['name'], expected_style_id) self.assertIn('workspace', r) self.assertEqual(self.workspace_name, r['workspace']) # TEST delete_style # Do delete response = self.geoserver_engine.delete_style(style_id=expected_style_id) # Should succeed self.assert_valid_response_object(response) self.assertTrue(response['success']) self.assertIsNone(response['result']) def test_link_and_add_table(self): # call methods: link_sqlalchemy_db_to_geoserver, add_table_to_postgis_store, list_stores, get_store, # delete_store self.setup_postgis_table() # TEST link_sqlalchemy_db_to_geoserver store_id_name = random_string_generator(10) store_id = '{}:{}'.format(self.workspace_name, store_id_name) sqlalchemy_engine = create_engine(self.pg_url) response = self.geoserver_engine.link_sqlalchemy_db_to_geoserver(store_id=store_id, sqlalchemy_engine=sqlalchemy_engine, docker=True) # Check for success response self.assertTrue(response['success']) sqlalchemy_engine.dispose() # TEST add_table_to_postgis_store # Execute response = self.geoserver_engine.add_table_to_postgis_store(store_id=store_id, table=self.pg_table_name) # Check for success response self.assertTrue(response['success']) # TEST list_stores # Execute response = self.geoserver_engine.list_stores() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # layer group listed self.assertIn(store_id_name, result) # TEST get store # Execute response = self.geoserver_engine.get_store(store_id=store_id) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Properties self.assertIn('name', r) self.assertIn(store_id_name, r['name']) self.assertIn('workspace', r) self.assertEqual(self.workspace_name, r['workspace']) # TEST delete_store response = self.geoserver_engine.delete_store(store_id=store_id, purge=True, recurse=True) # Failure Check self.assert_valid_response_object(response) self.assertTrue(response['success']) def test_create_postgis_feature_resource(self): # call methods: create_postgis_feature_resource (with table), list_stores, get_store, delete_store self.setup_postgis_table() # TEST create_postgis_feature_resource (with table) store_id_name = random_string_generator(10) store_id = '{}:{}'.format(self.workspace_name, store_id_name) response = self.geoserver_engine.create_postgis_feature_resource(store_id=store_id, host=self.pg_host, port=self.pg_port, database=self.pg_database, user=self.pg_username, password=self.pg_password, table=self.pg_table_name) self.assertTrue(response['success']) # TEST list_stores # Execute response = self.geoserver_engine.list_stores() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # layer group listed self.assertIn(store_id_name, result) # TEST get store # Execute response = self.geoserver_engine.get_store(store_id=store_id) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Properties self.assertIn('name', r) self.assertIn(store_id_name, r['name']) self.assertIn('workspace', r) self.assertEqual(self.workspace_name, r['workspace']) # TEST delete_store response = self.geoserver_engine.delete_store(store_id=store_id, purge=True, recurse=True) # Failure Check self.assert_valid_response_object(response) self.assertTrue(response['success']) def test_create_sql_view(self): # call methods: create_sql_view, list_resources, list_stores, list_layers self.setup_postgis_table() # TEST create_postgis_feature_resource (with table) store_id_name = random_string_generator(10) store_id = '{}:{}'.format(self.workspace_name, store_id_name) response = self.geoserver_engine.create_postgis_feature_resource(store_id=store_id, host=self.pg_host, port=self.pg_port, database=self.pg_database, user=self.pg_username, password=self.pg_password, table=self.pg_table_name) self.assertTrue(response['success']) feature_type_name = random_string_generator(10) postgis_store_id = '{}:{}'.format(self.workspace_name, store_id_name) sql = "SELECT * FROM {}".format(self.pg_table_name) geometry_column = self.geometry_column geometry_type = self.geometry_type response = self.geoserver_engine.create_sql_view(feature_type_name=feature_type_name, postgis_store_id=postgis_store_id, sql=sql, geometry_column=geometry_column, geometry_type=geometry_type) self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) self.assertIn('name', r) self.assertIn(feature_type_name, r['name']) # TEST list_resources # Execute response = self.geoserver_engine.list_resources() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # Returns list self.assertIsInstance(result, list) # layer listed self.assertIn(feature_type_name, result) # TEST get_resources # Execute # Geoserver uses the store_id as the layer/resource name (not the filename) resource_id_name = '{}:{}'.format(self.workspace_name, feature_type_name) response = self.geoserver_engine.get_resource(resource_id=resource_id_name) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Properties self.assertIn('name', r) self.assertEquals(feature_type_name, r['name']) self.assertIn(feature_type_name, r['wfs']['shapefile']) # TEST delete_resource # Execute # This case the resource id is the same as the store id. response = self.geoserver_engine.delete_resource(resource_id=resource_id_name, store_id=store_id_name) # Validate response object self.assert_valid_response_object(response)