def wrapper(self, *args, **kwargs): workspace_name = 'geonode' django_datastore = db.connections['datastore'] catalog = Catalog(ogc_server_settings.internal_rest, *ogc_server_settings.credentials) # Set up workspace/datastore as appropriate ws = catalog.get_workspace(workspace_name) delete_ws = False if ws is None: ws = catalog.create_workspace(workspace_name, 'http://www.geonode.org/') delete_ws = True datastore = create_datastore(workspace_name, django_datastore, catalog) # test method called here try: ret = wrapped_func(self, *args, **kwargs) finally: # Tear down workspace/datastore as appropriate if delete_ws: catalog.delete(ws, recurse=True) else: catalog.delete(datastore, recurse=True) return ret
def _unpublish_from_geoserver(resource, geoserver_context, logger): '''Contact Geoserver and unpublish a layer previously created as an ingested resource. ''' from geoserver.catalog import Catalog, FailedRequestError layer_name = None if resource['format'] == WMSResource.FORMAT: layer_name = resource['wms_layer'] else: layer_name = resource['wfs_layer'] try: api_url = geoserver_context['api_url'] cat = Catalog(api_url + '/rest', username=geoserver_context['username'], password=geoserver_context['password']) layer = cat.get_layer(resource['parent_resource_id'].lower()) cat.delete(layer) cat.reload() logger.info('Unpublished layer %s from Geoserver' % (layer_name)) except AttributeError as ex: logger.error('Failed to unpublish layer %s: %s' % (layer_name, ex)) except FailedRequestError as ex: logger.error('Failed to unpublish layer %s: %s' % (layer_name, ex))
def _unpublish_from_geoserver(resource, geoserver_context, logger): '''Contact Geoserver and unpublish a layer previously created as an ingested resource. ''' from geoserver.catalog import Catalog, FailedRequestError layer_name = None if resource['format'] == WMSResource.FORMAT: layer_name = resource['wms_layer'] else: layer_name = resource['wfs_layer'] try: api_url = geoserver_context['api_url'] cat = Catalog( api_url + '/rest', username=geoserver_context['username'], password=geoserver_context['password']) layer = cat.get_layer(resource['parent_resource_id'].lower()) cat.delete(layer) cat.reload() logger.info('Unpublished layer %s from Geoserver' % (layer_name)) except AttributeError as ex: logger.error('Failed to unpublish layer %s: %s' % (layer_name, ex)) except FailedRequestError as ex: logger.error('Failed to unpublish layer %s: %s' % (layer_name, ex))
class TestCatalogBase(unittest.TestCase): @classmethod def setUpClass(cls): pass # @classmethod # def tearDownClass(cls): # sql = """SELECT relname FROM pg_class # WHERE relkind = 'r' AND relname !~ '^(pg_|sql_)' # AND relname != 'spatial_ref_sys';""" # conn = psycopg2.connect( # database=_DATABASE, # user=_USER, # password=_PASSWORD, # host=_HOST, # port=_PORT # ) # cur = conn.cursor() # cur.execute(sql) # tables = [r[0] for r in cur.fetchall()] # if len(tables) == 0: # return # cur.execute("DROP TABLE {};".format(','.join(tables))) # conn.commit() def setUp(self): self.catalog = Catalog(settings.SERVICE_URL) self.workspace = self.catalog.create_workspace("gsconfig_test") def tearDown(self): self.catalog.delete(self.workspace, recurse=True)
def wrapper(self, *args, **kwargs): workspace_name = 'geonode' django_datastore = db.connections['datastore'] catalog = Catalog( ogc_server_settings.internal_rest, *ogc_server_settings.credentials ) # Set up workspace/datastore as appropriate ws = catalog.get_workspace(workspace_name) delete_ws = False if ws is None: ws = catalog.create_workspace(workspace_name, 'http://www.geonode.org/') delete_ws = True datastore = create_datastore(workspace_name, django_datastore, catalog) # test method called here try: ret = wrapped_func(self, *args, **kwargs) finally: # Tear down workspace/datastore as appropriate if delete_ws: catalog.delete(ws, recurse=True) else: catalog.delete(datastore, recurse=True) return ret
class TestCatalogBase(unittest.TestCase): @classmethod def setUpClass(cls): pass @classmethod def tearDownClass(cls): sql = """SELECT relname FROM pg_class WHERE relkind = 'r' AND relname !~ '^(pg_|sql_)' AND relname != 'spatial_ref_sys';""" conn = psycopg2.connect( database=_DATABASE, user=_USER, password=_PASSWORD, host=_HOST, port=_PORT ) cur = conn.cursor() cur.execute(sql) tables = [r[0] for r in cur.fetchall()] if len(tables) == 0: return cur.execute("DROP TABLE {};".format(','.join(tables))) conn.commit() def setUp(self): self.catalog = Catalog(settings.SERVICE_URL) self.workspace = self.catalog.create_workspace("gsconfig_test") def tearDown(self): self.catalog.delete(self.workspace, recurse=True)
def _unpublish_from_geoserver(resource_id, geoserver_context): geoserver_url = geoserver_context['url'] cat = Catalog(geoserver_url.rstrip('/') + '/rest', username=geoserver_context['username'], password=geoserver_context['password']) layer = cat.get_layer(resource_id.lower()) cat.delete(layer) cat.reload()
def _unpublish_from_geoserver(resource_id, geoserver_context): geoserver_url = geoserver_context['geoserver_url'] geoserver_admin = geoserver_context['geoserver_admin'] geoserver_password = geoserver_context['geoserver_password'] cat = Catalog(geoserver_url + '/rest', username=geoserver_admin, password=geoserver_password) layer = cat.get_layer(resource_id.lower()) cat.delete(layer) cat.reload()
class UploaderTests(MapStoryTestMixin): """ Basic checks to make sure pages load, etc. """ def create_datastore(self, connection, catalog): settings = connection.settings_dict params = {'database': settings['NAME'], 'passwd': settings['PASSWORD'], 'namespace': 'http://www.geonode.org/', 'type': 'PostGIS', 'dbtype': 'postgis', 'host': settings['HOST'], 'user': settings['USER'], 'port': settings['PORT'], 'enabled': "True"} store = catalog.create_datastore( settings['NAME'], workspace=self.workspace) store.connection_parameters.update(params) try: catalog.save(store) except FailedRequestError: # assuming this is because it already exists pass return catalog.get_store(settings['NAME']) def setUp(self): if not os.path.exists(os.path.join(os.path.split(__file__)[0], '..', 'importer-test-files')): self.skipTest('Skipping test due to missing test data.') # These tests require geonode to be running on :80! self.postgis = db.connections['datastore'] self.postgis_settings = self.postgis.settings_dict self.username, self.password = self.create_user( 'admin', 'admin', is_superuser=True) self.non_admin_username, self.non_admin_password = self.create_user( 'non_admin', 'non_admin') self.cat = Catalog( ogc_server_settings.internal_rest, *ogc_server_settings.credentials) if self.cat.get_workspace('geonode') is None: self.cat.create_workspace('geonode', 'http://geonode.org') self.workspace = 'geonode' self.datastore = self.create_datastore(self.postgis, self.cat) def tearDown(self): """ Clean up geoserver. """ self.cat.delete(self.datastore, recurse=True)
def _unpublish_from_geoserver(resource_id, geoserver_context): geoserver_url = geoserver_context['geoserver_url'] geoserver_admin = geoserver_context['geoserver_admin'] geoserver_password = geoserver_context['geoserver_password'] cat = Catalog(geoserver_url + '/rest', username=geoserver_admin, password=geoserver_password) layer = cat.get_layer(resource_id.lower()) cat.delete(layer) cat.reload()
def remove_service(request, service_id): """ Delete a service, and its constituent layers. """ service_obj = get_object_or_404(Service, pk=service_id) if not request.user.has_perm("maps.delete_service", obj=service_obj): return HttpResponse( loader.render_to_string( "401.html", RequestContext(request, {"error_message": _("You are not permitted to remove this service.")}), ), status=401, ) if request.method == "GET": return render_to_response("services/service_remove.html", RequestContext(request, {"service": service_obj})) elif request.method == "POST": # Retrieve this service's workspace from the GeoServer catalog. cat = Catalog(settings.OGC_SERVER["default"]["LOCATION"] + "rest", _user, _password) workspace = cat.get_workspace(service_obj.name) # Delete nested workspace structure from GeoServer for this service. if workspace: for store in cat.get_stores(workspace): for resource in cat.get_resources(store): for layer in cat.get_layers(resource): cat.delete(layer) cat.delete(resource) cat.delete(store) cat.delete(workspace) # Delete service from GeoNode. service_obj.delete() return HttpResponseRedirect(reverse("services"))
def geoserver_upload(self, input_file): print "upload", input_file path, file_ext = os.path.split(input_file) f = os.path.splitext(file_ext)[0] cat = Catalog(self.GEOSERVER_HOST, self.GEOSERVER_USER, self.GEOSERVER_PASSWORD) try: cat.create_coveragestore(name=f, data=input_file, workspace=cat.get_workspace(self.GEOSERVER_WORKSPACE), overwrite=True) except UploadError: print "map already exist" cat.delete(cat.get_layer(f)) cat.delete(cat.get_resources(f, self.GEOSERVER_WORKSPACE)[0]) cat.create_coveragestore(name=f, data=input_file, workspace=cat.get_workspace(self.GEOSERVER_WORKSPACE), overwrite=True)
def style_update(layer, style_template): # Get geoserver catalog cat = Catalog(settings.OGC_SERVER['default']['LOCATION'] + 'rest', username=settings.OGC_SERVER['default']['USER'], password=settings.OGC_SERVER['default']['PASSWORD']) # Get equivalent geoserver layer gs_layer = cat.get_layer(layer.name) print layer.name, ': gs_layer:', gs_layer.name # Get current style # pprint(dir(gs_layer)) cur_def_gs_style = gs_layer._get_default_style() # pprint(dir(cur_def_gs_style)) if cur_def_gs_style is not None: print layer.name, ': cur_def_gs_style.name:', cur_def_gs_style.name # Get proper style attributes = [a.attribute for a in layer.attributes] gs_style = None if '_fh' in layer.name: if 'Var' in attributes: gs_style = cat.get_style(style_template) elif 'Merge' in attributes: gs_style = cat.get_style("fhm_merge") elif 'UVar' in attributes: gs_style = cat.get_style('fhm_uvar') else: gs_style = cat.get_style(style_template) # has_layer_changes = False try: if gs_style is not None: print layer.name, ': gs_style.name:', gs_style.name if cur_def_gs_style and cur_def_gs_style.name != gs_style.name: print layer.name, ': Setting default style...' gs_layer._set_default_style(gs_style) cat.save(gs_layer) print layer.name, ': Deleting old default style from geoserver...' cat.delete(cur_def_gs_style) print layer.name, ': Deleting old default style from geonode...' gn_style = Style.objects.get(name=layer.name) gn_style.delete() except Exception as e: logger.exception("Error setting style")
def cleanup(): #Delete any existing folders existingDirs = os.path.join(appWorkspace.path, '') for dir in os.listdir(existingDirs): shutil.rmtree(os.path.join(existingDirs, dir), ignore_errors=True) #Delete stores from geoserver cat = Catalog(cfg.geoserver['rest_url'], username=cfg.geoserver['user'], password=cfg.geoserver['password']) stores = cat.get_stores(workspace=cfg.geoserver['workspace']) for store in stores: cat.delete(store, recurse=True) cat.reload()
def pre_delete_service(instance, sender, **kwargs): for layer in instance.layer_set.all(): layer.delete() # if instance.method == 'H': # gn = Layer.objects.gn_catalog # gn.control_harvesting_task('stop', [instance.external_id]) # gn.control_harvesting_task('remove', [instance.external_id]) if instance.method == 'C': try: _user = settings.OGC_SERVER['default']['USER'] _password = settings.OGC_SERVER['default']['PASSWORD'] gs = Catalog(settings.OGC_SERVER['default']['LOCATION'] + "rest", _user , _password) cascade_store = gs.get_store(instance.name, settings.CASCADE_WORKSPACE) gs.delete(cascade_store, recurse=True) except FailedRequestError: logger.error("Could not delete cascading WMS Store for %s - maybe already gone" % instance.name)
def pre_delete_service(instance, sender, **kwargs): for layer in instance.layer_set.all(): layer.delete() # if instance.method == 'H': # gn = Layer.objects.gn_catalog # gn.control_harvesting_task('stop', [instance.external_id]) # gn.control_harvesting_task('remove', [instance.external_id]) if instance.method == 'C': try: _user = settings.OGC_SERVER['default']['USER'] _password = settings.OGC_SERVER['default']['PASSWORD'] gs = Catalog(settings.OGC_SERVER['default']['LOCATION'] + "rest", _user, _password) cascade_store = gs.get_store( instance.name, settings.CASCADE_WORKSPACE) gs.delete(cascade_store, recurse=True) except FailedRequestError: logger.error( "Could not delete cascading WMS Store for %s - maybe already gone" % instance.name)
def deleteTempLayersGeoserver(cf): cat = Catalog( "{}/rest".format(cf.get("GeoServer", "host")), username=cf.get("GeoServer", "user"), password=cf.get("GeoServer", "pass"), ) my_resources = cat.get_resources(stores=["temp"], workspaces=["ra2ce-dr"]) counter = 0 for l in my_resources: name = l.name layer = cat.get_layer(name) # check if layer exists if not layer == None: logging.info("deleted:", name) cat.delete(layer) counter += 1 return counter
def fh_style_update(layer,filename): cat = Catalog(settings.OGC_SERVER['default']['LOCATION'] + 'rest', username=settings.OGC_SERVER['default']['USER'], password=settings.OGC_SERVER['default']['PASSWORD']) #layer_list = Layer.objects.filter(name__icontains='fh')#initial run of script includes all fhm layers for cleaning of styles in GN + GS #layer_list = Layer.objects.filter(name__icontains='fh').exclude(styles__name__icontains='fhm' #total_layers = len(layer_list) fhm_style = cat.get_style("fhm") ctr = 0 #for layer in layer_list: #print "[FH STYLE] {0}/{1} : {2} ".format(ctr,total_layers,layer.name) #delete thumbnail first because of permissions try: print "Layer thumbnail url: %s " % layer.thumbnail_url if "192" in settings.BASEURL: url = "geonode/uploaded/thumbs/layer-"+ layer.uuid + "-thumb.png" #if on local os.remove(url) else: url = "/var/www/geonode/uploaded/thumbs/layer-" +layer.uuid + "-thumb.png" #if on lipad os.remove(url) gs_layer = cat.get_layer(layer.name) print "GS LAYER: %s " % gs_layer.name gs_layer._set_default_style(fhm_style) cat.save(gs_layer) #save in geoserver ctr+=1 gs_style = cat.get_style(layer.name) print "GS STYLE: %s " % gs_style.name print "Geoserver: Will delete style %s " % gs_style.name cat.delete(gs_style) #erase in geoserver the default layer_list gn_style = Style.objects.get(name=layer.name) print "Geonode: Will delete style %s " % gn_style.name gn_style.delete()#erase in geonode layer.sld_body = fhm_style.sld_body layer.save() #save in geonode except Exception as e: print "%s" % e pass
def delete_map(request): map_id = request.POST.get('map_id', False) cat = Catalog(map_url, 'admin', 'geoserver') if cat.get_layer('Map:' + map_id): cat.delete(cat.get_layer('Map:' + map_id)) cat.reload() for label_type in range(1, 8): if cat.get_layer('Mask:' + map_id + '_' + str(label_type)): cat.delete(cat.get_layer('Mask:' + map_id + '_' + str(label_type))) cat.reload() try: if cat.get_store(map_id): st = cat.get_store(map_id) cat.delete(st) cat.reload() except Exception: pass if Bmap.objects.filter(id=map_id): map_name = Bmap.objects.get(id=map_id).name dir_root = os.path.join(MAPBASEPATH, map_name) delete_files = (map_id + ".jpg", 'chaneltransform.tif', 'chaneltransform_rpc.txt', 'chaneltransformRPC.txt', 'chaneltransformRPC.tif.ovr', 'label.tif', 'label_rpc.txt', 'labelRPC.tif') for file in delete_files: if os.path.exists(file): os.remove(os.path.join(dir_root, file)) Bmap.objects.filter(id=map_id).delete() return HttpResponse("success")
def style_update(layer, style_template): cat = Catalog(settings.OGC_SERVER['default']['LOCATION'] + 'rest', username=settings.OGC_SERVER['default']['USER'], password=settings.OGC_SERVER['default']['PASSWORD']) try: layer_attrib = layer.attributes[0].attribute.encode("utf-8") except Exception as e: print "No layer attribute %s" % e ctr = 0 if 'fh' in layer.name: style = None if layer_attrib == "Var": style = cat.get_style(style_template) else: style = cat.get_style("fhm_merge") else: style = cat.get_style(style_template) if style is not None: try: gs_layer = cat.get_layer(layer.name) print "GS LAYER: %s " % gs_layer.name gs_layer._set_default_style(style) cat.save(gs_layer) ctr += 1 gs_style = cat.get_style(layer.name) if gs_style: print "GS STYLE: %s " % gs_style.name print "Geoserver: Will delete style %s " % gs_style.name cat.delete(gs_style) gn_style = Style.objects.get(name=layer.name) print "Geonode: Will delete style %s " % gn_style.name gn_style.delete() layer.sld_body = style.sld_body layer.save() except Exception as e: # print "%s" % e traceback.print_exc()
def deleteGeoserverStore(storeName, workspace=workspace, purge=None, recurse=True): """ Delete Geoserver Data Store. Args: storeName Kwargs (Default): workspace(geocolorado) purge(None) - if purge is True. Data files will be deleted. recurse (True) - Store and all metadata items will be deleted. """ cat = Catalog("{0}/rest/".format(geoserver_connection), geoserver_username, geoserver_password) ws = cat.get_workspace(workspace) ds = cat.get_store(storeName, workspace=ws) cat.delete(ds, purge=purge, recurse=recurse) msg = "metadata and data files removed." if purge else "only metadata items removed." return "DataStore: {0} deleted from geoServer with {1}".format( storeName, msg)
def remove_geoserver_layer(self, storename, layername): self.logger.debug("start removing the geoserver layer") last_charactor = self.geoServer[-1] if last_charactor == '/': geoserver_rest = self.geoServer + 'rest' else: geoserver_rest = self.geoServer + '/rest' cat = Catalog(geoserver_rest, username=self.gs_username, password=self.gs_password) # worksp = cat.get_workspace(gs_workspace) store = cat.get_store(storename) self.logger.debug("store name %s" % store) layer = cat.get_layer(layername) self.logger.debug("layer name %s" % layer) try: self.logger.debug("deleting the layer...") cat.delete(layer) cat.reload() self.logger.debug("deleting the store...") cat.delete(store) cat.reload except Exception: self.logger.error("Failed to remove from geoserver")
def remove_service(request, service_id): """ Delete a service, and its constituent layers. """ service_obj = get_object_or_404(Service, pk=service_id) if not request.user.has_perm('maps.delete_service', obj=service_obj): return HttpResponse(loader.render_to_string('401.html', RequestContext(request, { 'error_message': _("You are not permitted to remove this service.") })), status=401) if request.method == 'GET': return render_to_response("services/service_remove.html", RequestContext(request, { "service": service_obj })) elif request.method == 'POST': # Retrieve this service's workspace from the GeoServer catalog. cat = Catalog(settings.OGC_SERVER['default']['LOCATION'] + "rest", _user, _password) workspace = cat.get_workspace(service_obj.name) # Delete nested workspace structure from GeoServer for this service. if workspace: for store in cat.get_stores(workspace): for resource in cat.get_resources(store): for layer in cat.get_layers(resource): cat.delete(layer) cat.delete(resource) cat.delete(store) cat.delete(workspace) # Delete service from GeoNode. service_obj.delete() return HttpResponseRedirect(reverse("services"))
class UploaderTests(DjagnoOsgeoMixin): """ Basic checks to make sure pages load, etc. """ def create_datastore(self, connection, catalog): settings = connection.settings_dict params = { 'database': settings['NAME'], 'passwd': settings['PASSWORD'], 'namespace': 'http://www.geonode.org/', 'type': 'PostGIS', 'dbtype': 'postgis', 'host': settings['HOST'], 'user': settings['USER'], 'port': settings['PORT'], 'enabled': "True" } store = catalog.create_datastore(settings['NAME'], workspace=self.workspace) store.connection_parameters.update(params) try: catalog.save(store) except FailedRequestError: # assuming this is because it already exists pass return catalog.get_store(settings['NAME']) def setUp(self): if not os.path.exists( os.path.join( os.path.split(__file__)[0], '..', 'importer-test-files')): self.skipTest('Skipping test due to missing test data.') # These tests require geonode to be running on :80! self.postgis = db.connections['datastore'] self.postgis_settings = self.postgis.settings_dict self.username, self.password = self.create_user('admin', 'admin', is_superuser=True) self.non_admin_username, self.non_admin_password = self.create_user( 'non_admin', 'non_admin') self.cat = Catalog(ogc_server_settings.internal_rest, *ogc_server_settings.credentials) if self.cat.get_workspace('geonode') == None: self.cat.create_workspace('geonode', 'http://www.geonode.org/') self.workspace = 'geonode' self.datastore = self.create_datastore(self.postgis, self.cat) def tearDown(self): """ Clean up geoserver. """ self.cat.delete(self.datastore, recurse=True) def generic_import(self, file, configuration_options=[{'index': 0}]): f = file filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', f) res = self.import_file(filename, configuration_options=configuration_options) layer_results = [] for result in res: if result[1].get('raster'): layerfile = result[0] layername = os.path.splitext(os.path.basename(layerfile))[0] layer = Layer.objects.get(name=layername) self.assertTrue(layerfile.endswith('.tif')) self.assertTrue(os.path.exists(layerfile)) l = gdal.OpenEx(layerfile) self.assertTrue(l.GetDriver().ShortName, 'GTiff') layer_results.append(layer) else: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') if not filename.endswith('zip'): self.assertTrue(layer.attributes.count() >= DataSource( filename)[0].num_fields) layer_results.append(layer) return layer_results[0] def generic_raster_import(self, file, configuration_options=[{ 'index': 0 }]): f = file filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', f) res = self.import_file(filename, configuration_options=configuration_options) layerfile = res[0][0] layername = os.path.splitext(os.path.basename(layerfile))[0] layer = Layer.objects.get(name=layername) self.assertTrue(layerfile.endswith('.tif')) self.assertTrue(os.path.exists(layerfile)) l = gdal.OpenEx(layerfile) self.assertTrue(l.GetDriver().ShortName, 'GTiff') return layer def test_raster(self): """ Tests raster import """ layer = self.generic_raster_import('test_grid.tif', configuration_options=[{ 'index': 0 }]) def test_box_with_year_field(self): """ Tests the import of test_box_with_year_field. """ layer = self.generic_import('boxes_with_year_field.shp', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date(self): """ Tests the import of test_boxes_with_date. """ layer = self.generic_import('boxes_with_date.shp', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_gpkg(self): """ Tests the import of test_boxes_with_date.gpkg. """ layer = self.generic_import('boxes_with_date.gpkg', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_plus_raster_gpkg_by_index(self): """ Tests the import of multilayer vector + raster geopackage using index """ layer = self.generic_import('boxes_plus_raster.gpkg', configuration_options=[ { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }, { 'index': 1 }, { 'index': 2 }, { 'index': 3 }, { 'index': 4 }, { 'index': 5 }, { 'index': 6 }, { 'index': 7 }, ]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_csv(self): """ Tests a CSV with WKT polygon. """ layer = self.generic_import('boxes_with_date.csv', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_csv_missing_features(self): """ Test csv that has some rows without geoms and uses ISO-8859 (Latin 4) encoding. """ self.generic_import('missing-features.csv', configuration_options=[{ 'index': 0 }]) def test_boxes_with_iso_date(self): """ Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_duplicate_imports(self): """ Tests importing the same layer twice to ensure incrementing file names is properly handled. """ filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'boxes_with_date_iso_date.zip') gi = OGRImport(filename) layers1 = gi.handle({'index': 0, 'name': 'test'}) layers2 = gi.handle({'index': 0, 'name': 'test'}) self.assertEqual(layers1[0][0], 'test') self.assertEqual(layers2[0][0], 'test0') def test_launder(self): """ Ensure the launder function works as expected. """ self.assertEqual(launder('tm_world_borders_simpl_0.3'), 'tm_world_borders_simpl_0_3') self.assertEqual(launder('Testing#'), 'testing_') self.assertEqual(launder(' '), '_') def test_boxes_with_date_iso_date_zip(self): """ Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.zip', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_dates_bc(self): """ Tests the import of test_boxes_with_dates_bc. """ layer = self.generic_import('boxes_with_dates_bc.shp', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_point_with_date(self): """ Tests the import of point_with_date.geojson """ layer = self.generic_import('point_with_date.geojson', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) # OGR will name geojson layers 'ogrgeojson' we rename to the path basename self.assertTrue(layer.name.startswith('point_with_date')) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_end_date(self): """ Tests the import of test_boxes_with_end_date. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('boxes_with_end_date.shp', configuration_options=[{ 'index': 0, 'convert_to_date': ['date', 'enddate'], 'start_date': 'date', 'end_date': 'enddate', 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] end_date_attr = filter( lambda attr: attr.attribute == 'enddate_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') self.assertEqual(end_date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) def test_us_states_kml(self): """ Tests the import of us_states_kml. This layer has a date and an end date field that are typed correctly. """ # TODO: Support time in kmls. layer = self.generic_import('us_states.kml', configuration_options=[{ 'index': 0 }]) def test_mojstrovka_gpx(self): """ Tests the import of mojstrovka.gpx. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('mojstrovka.gpx', configuration_options=[{ 'index': 0, 'convert_to_date': ['time'], 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'time_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, u'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute) def generic_time_check(self, layer, attribute=None, end_attribute=None): """ Convenience method to run generic tests on time layers. """ self.cat._cache.clear() resource = self.cat.get_resource(layer.name, store=layer.store, workspace=self.workspace) timeInfo = resource.metadata['time'] self.assertEqual("LIST", timeInfo.presentation) self.assertEqual(True, timeInfo.enabled) self.assertEqual(attribute, timeInfo.attribute) self.assertEqual(end_attribute, timeInfo.end_attribute) def test_us_shootings_csv(self): """ Tests the import of US_Shootings.csv. """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') filename = 'US_Shootings.csv' f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', filename) layer = self.generic_import(filename, configuration_options=[{ 'index': 0, 'convert_to_date': ['Date'] }]) self.assertTrue(layer.name.startswith('us_shootings')) date_field = 'date' configure_time(self.cat.get_layer(layer.name).resource, attribute=date_field) self.generic_time_check(layer, attribute=date_field) def test_sitins(self): """ Tests the import of US_Civil_Rights_Sitins0.csv """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') layer = self.generic_import("US_Civil_Rights_Sitins0.csv", configuration_options=[{ 'index': 0, 'convert_to_date': ['Date'] }]) def get_layer_names(self, in_file): """ Gets layer names from a data source. """ ds = DataSource(in_file) return map(lambda layer: layer.name, ds) def test_gdal_import(self): filename = 'point_with_date.geojson' f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', filename) self.generic_import(filename, configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) def test_wfs(self): """ Tests the import from a WFS Endpoint """ wfs = 'WFS:http://demo.boundlessgeo.com/geoserver/wfs' gi = OGRImport(wfs) layers = gi.handle(configuration_options=[{ 'layer_name': 'og:bugsites' }, { 'layer_name': 'topp:states' }]) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def test_arcgisjson(self): """ Tests the import from a WFS Endpoint """ endpoint = 'http://sampleserver3.arcgisonline.com/ArcGIS/rest/services/Hydrography/Watershed173811/FeatureServer/0/query?where=objectid+%3D+objectid&outfields=*&f=json' gi = OGRImport(endpoint) layers = gi.handle(configuration_options=[{'index': 0}]) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def import_file(self, in_file, configuration_options=[]): """ Imports the file. """ self.assertTrue(os.path.exists(in_file)) # run ogr2ogr gi = OGRImport(in_file) layers = gi.handle(configuration_options=configuration_options) return layers @staticmethod def createFeatureType(catalog, datastore, name): """ Exposes a PostGIS feature type in geoserver. """ headers = {"Content-type": "application/xml"} data = "<featureType><name>{name}</name></featureType>".format( name=name) url = datastore.href.replace(".xml", '/featuretypes.xml'.format(name=name)) headers, response = catalog.http.request(url, "POST ", data, headers) return response def test_file_add_view(self): """ Tests the file_add_view. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() # test login required for this view request = c.get(reverse('uploads-new')) self.assertEqual(request.status_code, 302) c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['request'].path, reverse('uploads-list')) self.assertTrue(len(response.context['object_list']) == 1) upload = response.context['object_list'][0] self.assertEqual(upload.user.username, 'non_admin') self.assertEqual(upload.file_type, 'GeoJSON') self.assertTrue(upload.uploadlayer_set.all()) self.assertEqual(upload.state, 'UPLOADED') self.assertIsNotNone(upload.name) uploaded_file = upload.uploadfile_set.first() self.assertTrue(os.path.exists(uploaded_file.file.path)) f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'empty_file.geojson') with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertIn('file', response.context_data['form'].errors) def test_file_add_view_as_json(self): """ Tests the file_add_view. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new-json'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertIn('application/json', response.get('Content-Type', '')) content = json.loads(response.content) self.assertIn('state', content) self.assertIn('id', content) def test_describe_fields(self): """ Tests the describe fields functionality. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv') with GDALInspector(f) as f: layers = f.describe_fields() self.assertTrue(layers[0]['layer_name'], 'us_shootings') self.assertEqual([n['name'] for n in layers[0]['fields']], [ 'Date', 'Shooter', 'Killed', 'Wounded', 'Location', 'City', 'Longitude', 'Latitude' ]) self.assertEqual(layers[0]['feature_count'], 203) def test_gdal_file_type(self): """ Tests the describe fields functionality. """ files = [ (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv'), 'CSV'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson'), 'GeoJSON'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'mojstrovka.gpx'), 'GPX'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'us_states.kml'), 'KML'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'boxes_with_year_field.shp'), 'ESRI Shapefile'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'boxes_with_date_iso_date.zip'), 'ESRI Shapefile'), ] from .models import NoDataSourceFound try: for path, file_type in files: with GDALInspector(path) as f: self.assertEqual(f.file_type(), file_type) except NoDataSourceFound as e: print 'No data source found in: {0}'.format(path) raise e def test_configure_view(self): """ Tests the configuration view. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') new_user = User.objects.create(username='******') new_user_perms = ['change_resourcebase_permissions'] c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{ 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True, 'permissions': { 'users': { 'test': new_user_perms, 'AnonymousUser': [ "change_layer_data", "download_resourcebase", "view_resourcebase" ] } } }] response = c.post('/importer-api/data-layers/{0}/configure/'.format( upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertEqual(Layer.objects.all()[0].owner.username, self.non_admin_username) perms = layer.get_all_level_info() user = User.objects.get(username=self.non_admin_username) # check user permissions for perm in [ u'publish_resourcebase', u'change_resourcebase_permissions', u'delete_resourcebase', u'change_resourcebase', u'change_resourcebase_metadata', u'download_resourcebase', u'view_resourcebase', u'change_layer_style', u'change_layer_data' ]: self.assertIn(perm, perms['users'][user]) self.assertTrue(perms['users'][new_user]) self.assertIn('change_resourcebase_permissions', perms['users'][new_user]) self.assertIn( "change_layer_data", perms['users'][User.objects.get(username='******')]) lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, layer) def test_configure_view_convert_date(self): """ Tests the configure view with a dataset that needs to be converted to a date. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{ 'index': 0, 'convert_to_date': ['Date'], 'start_date': 'Date', 'configureTime': True, 'editable': True }] response = c.get('/importer-api/data-layers/{0}/configure/'.format( upload.id)) self.assertEqual(response.status_code, 405) response = c.post('/importer-api/data-layers/{0}/configure/'.format( upload.id)) self.assertEqual(response.status_code, 400) response = c.post('/importer-api/data-layers/{0}/configure/'.format( upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertTrue(layer.attributes.filter(attribute='date'), 'xsd:dateTime') lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) # ensure a user who does not own the upload cannot configure an import from it. c.logout() c.login_as_admin() response = c.post('/importer-api/data-layers/{0}/configure/'.format( upload.id)) self.assertEqual(response.status_code, 404) def test_list_api(self): c = AdminClient() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 401) c.login_as_non_admin() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 200) admin = User.objects.get(username=self.username) non_admin = User.objects.get(username=self.non_admin_username) from osgeo_importer.models import UploadFile f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv') with open(f, 'rb') as f: uploaded_file = SimpleUploadedFile('test_data', f.read()) admin_upload = UploadedData.objects.create(state='Admin test', user=admin) admin_upload.uploadfile_set.add( UploadFile.objects.create(file=uploaded_file)) non_admin_upload = UploadedData.objects.create( state='Non admin test', user=non_admin) non_admin_upload.uploadfile_set.add( UploadFile.objects.create(file=uploaded_file)) c.login_as_admin() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 2) response = c.get('/importer-api/data/?user__username=admin') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 1) def test_layer_list_api(self): c = AdminClient() response = c.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 401) c.login_as_non_admin() response = c.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 200) def test_delete_from_non_admin_api(self): """ Ensure users can delete their data. """ c = AdminClient() f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) id = UploadedData.objects.first().id response = c.delete('/importer-api/data/{0}/'.format(id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def test_delete_from_admin_api(self): """ Ensure that administrators can delete data that isn't theirs. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) c.logout() c.login_as_admin() id = UploadedData.objects.first().id response = c.delete('/importer-api/data/{0}/'.format(id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def naming_an_import(self): """ Tests providing a name in the configuration options. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() name = 'point-with-a-date' with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) payload = { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'name': name, 'editable': True } response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps(payload), content_type='application/json') layer = Layer.objects.all()[0] self.assertEqual(layer.title, name.replace('-', '_')) def test_api_import(self): """ Tests the import api. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) payload = { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True } self.assertTrue( isinstance(UploadLayer.objects.first().configuration_options, dict)) response = c.get('/importer-api/data-layers/1/') self.assertEqual(response.status_code, 200) response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps([payload]), content_type='application/json') self.assertEqual(response.status_code, 200) self.assertTrue('task' in response.content) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, layer) self.assertTrue(UploadLayer.objects.first().task_id) def test_valid_file_extensions(self): """ Test the file extension validator. """ for extension in load_handler(OSGEO_IMPORTER, 'test.txt').valid_extensions: self.assertIsNone( validate_file_extension( SimpleUploadedFile('test.{0}'.format(extension), ''))) with self.assertRaises(ValidationError): validate_file_extension(SimpleUploadedFile('test.txt', '')) def test_no_geom(self): """ Test the file extension validator. """ with self.assertRaises(ValidationError): validate_inspector_can_read( SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)')) # This should pass (geom type is unknown) validate_inspector_can_read( SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)')) def test_numeric_overflow(self): """ Regression test for numeric field overflows in shapefiles. # https://trac.osgeo.org/gdal/ticket/5241 """ self.generic_import('Walmart.zip', configuration_options=[{ "configureTime": False, "convert_to_date": ["W1_OPENDAT"], "editable": True, "index": 0, "name": "Walmart", "start_date": "W1_OPENDAT" }]) def test_multipolygon_shapefile(self): """ Tests shapefile with multipart polygons. """ self.generic_import('PhoenixFirstDues.zip', configuration_options=[{ 'index': 0 }]) def test_non_4326_SR(self): """ Tests shapefile with multipart polygons. """ res = self.generic_import('Istanbul.zip', configuration_options=[{ 'index': 0 }]) featuretype = self.cat.get_resource(res.name) self.assertEqual(featuretype.projection, 'EPSG:32635') def test_gwc_handler(self): """ Tests the GeoWebCache handler """ layer = self.generic_import('boxes_with_date.shp', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) gwc = GeoWebCacheHandler(None) gs_layer = self.cat.get_layer(layer.name) self.assertTrue(gwc.time_enabled(gs_layer)) gs_layer.fetch() payload = self.cat.http.request(gwc.gwc_url(gs_layer)) self.assertTrue('regexParameterFilter' in payload[1]) self.assertEqual(int(payload[0]['status']), 200) # Don't configure time, ensure everything still works layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{ 'index': 0 }]) gs_layer = self.cat.get_layer(layer.name) self.cat._cache.clear() gs_layer.fetch() payload = self.cat.http.request(gwc.gwc_url(gs_layer)) self.assertFalse('regexParameterFilter' in payload[1]) self.assertEqual(int(payload[0]['status']), 200) def test_utf8(self): """ Tests utf8 characters in attributes """ filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'china_provinces.shp') layer = self.generic_import('china_provinces.shp') gi = OGRImport(filename) ds, insp = gi.open_target_datastore(gi.target_store) sql = str("select NAME_CH from %s where NAME_PY = 'An Zhou'" % (layer.name)) res = ds.ExecuteSQL(sql) feat = res.GetFeature(0) self.assertEqual(feat.GetField('name_ch'), "安州") def test_non_converted_date(self): """ Test converting a field as date. """ results = self.generic_import('TM_WORLD_BORDERS_2005.zip', configuration_options=[{ 'index': 0, 'start_date': 'Year', 'configureTime': True }]) layer = self.cat.get_layer(results.typename) self.assertTrue('time' in layer.resource.metadata) self.assertEqual('year', layer.resource.metadata['time'].attribute) def test_fid_field(self): """ Regression test for preserving an FID field when target layer supports it but source does not. """ self.generic_import('noaa_paleoclimate.zip', configuration_options=[{ 'index': 0 }]) def test_csv_with_wkb_geometry(self): """ Tests problems with the CSV files with multiple geometries. """ files = [ 'police_csv.csv', 'police_csv_nOGC.csv', 'police_csv_noLatLon.csv', 'police_csv_WKR.csv', 'police_csv_nFID.csv', 'police_csv_nOGFID.csv', 'police_csv_noWKB.csv' ] for i in files: self.generic_import( i, { "configureTime": True, "convert_to_date": ["date_time"], "editable": True, "index": 0, "name": i.lower(), "permissions": { "users": { "AnonymousUser": [ "change_layer_data", "download_resourcebase", "view_resourcebase" ] } }, "start_date": "date_time", })
def processAlgorithm(self, parameters, context, model_feedback): """ Process the algorithm :param parameters: parameters of the process :param context: context of the process :param model_feedback: feedback instance for the process :return: """ # Use a multi-step feedback, so that individual child algorithm progress reports are adjusted for the # overall progress through the model self.xml_path = parameters["XML_PATH"] if not self.xml_path.lower().endswith(".xml"): feedback = QgsProcessingMultiStepFeedback(0, model_feedback) feedback.reportError( "XML Workspace Definition is not an XML file!", True) return {} db_name = parameters["DB_NAME"] db_params = self.get_db_params(db_name) store_name = parameters["GS_STORE"] wrk_name = parameters["GS_WORKSPACE"] dataset_list = [] datasets = self.getDatasets() for dataset in datasets: type = dataset.getElementsByTagName( TAG_DE_TYPE)[0].childNodes[0].data if type == "esriDTFeatureClass": ds_name = dataset.getElementsByTagName( TAG_DE_NAME)[0].childNodes[0].data dataset_list.append({ "name": ds_name.lower(), "srs": "EPSG:4326" }) feedback = QgsProcessingMultiStepFeedback(2 + len(dataset_list), model_feedback) feedback.pushInfo("Get GeoServer Catalog: " + parameters["GS_REST_URL"]) gs_catalogue = Catalog(parameters["GS_REST_URL"], parameters["GS_ADMIN"], parameters["GS_PASSWORD"]) # workspace if wrk_name == "" or wrk_name is None: wrk_name = db_name.lower() + "_ws" wrk_uri = "http://" + wrk_name feedback.pushInfo("GeoServer Workspace: " + wrk_name + " (" + wrk_uri + ")") workspace = gs_catalogue.get_workspace(wrk_name) if workspace is None: workspace = gs_catalogue.create_workspace(wrk_name, wrk_uri) feedback.setCurrentStep(1) # store if store_name == "" or store_name is None: store_name = db_name.lower() + "_ds" feedback.pushInfo("GeoServer Data Store: " + store_name) store = gs_catalogue.get_store(store_name, workspace) if store is None: store = gs_catalogue.create_datastore(store_name, workspace) store.connection_parameters.update(**db_params) gs_catalogue.save(store) feedback.setCurrentStep(2) step = 2 published_count = 0 layer_prefix = "v_" for ds_cur in dataset_list: layer_name = layer_prefix + ds_cur["name"] feedback.pushInfo("GeoServer Publish: " + layer_name + " (" + ds_cur["srs"] + ")") try: layer = gs_catalogue.get_layer(layer_name) if layer is not None: gs_catalogue.delete(layer) gs_catalogue.save() gs_catalogue.publish_featuretype(layer_name, store, ds_cur["srs"]) published_count += 1 except Exception as e: feedback.reportError("Error: " + str(e), False) step += 1 feedback.setCurrentStep(step) gs_catalogue.reload() layers = gs_catalogue.get_layers(store) feedback.pushInfo("-" * 80) feedback.pushInfo("Published layers: " + str(published_count)) for layer in layers: feedback.pushInfo(layer.name + " is published!") feedback.pushInfo("-" * 80) results = {} outputs = {} return results
class UploaderTests(MapStoryTestMixin): """ Basic checks to make sure pages load, etc. """ def create_datastore(self, connection, catalog): settings = connection.settings_dict params = { 'database': settings['NAME'], 'passwd': settings['PASSWORD'], 'namespace': 'http://www.geonode.org/', 'type': 'PostGIS', 'dbtype': 'postgis', 'host': settings['HOST'], 'user': settings['USER'], 'port': settings['PORT'], 'enabled': "True" } store = catalog.create_datastore(settings['NAME'], workspace=self.workspace) store.connection_parameters.update(params) try: catalog.save(store) except FailedRequestError: # assuming this is because it already exists pass return catalog.get_store(settings['NAME']) def setUp(self): if not os.path.exists( os.path.join(os.path.split(__file__)[0], 'test_ogr')): self.skipTest('Skipping test due to missing test data.') # These tests require geonode to be running on :80! self.postgis = db.connections['datastore'] self.postgis_settings = self.postgis.settings_dict self.username, self.password = self.create_user('admin', 'admin', is_superuser=True) self.non_admin_username, self.non_admin_password = self.create_user( 'non_admin', 'non_admin') self.cat = Catalog(ogc_server_settings.internal_rest, *ogc_server_settings.credentials) self.workspace = 'geonode' self.datastore = self.create_datastore(self.postgis, self.cat) def tearDown(self): """ Clean up geoserver. """ self.cat.delete(self.datastore, recurse=True) def generic_import(self, file, configuration_options=[{'index': 0}]): f = file filename = os.path.join(os.path.dirname(__file__), 'test_ogr', f) res = self.import_file(filename, configuration_options=configuration_options) layer = Layer.objects.get(name=res[0][0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') if not filename.endswith('zip'): self.assertTrue( layer.attributes.count() >= DataSource(filename)[0].num_fields) # make sure we have at least one dateTime attribute self.assertTrue('xsd:dateTime' or 'xsd:date' in [n.attribute_type for n in layer.attributes.all()]) return layer def test_box_with_year_field(self): """ Tests the import of test_box_with_year_field. """ layer = self.generic_import('boxes_with_year_field.shp', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date(self): """ Tests the import of test_boxes_with_date. """ layer = self.generic_import('boxes_with_date.shp', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_csv(self): """ Tests a CSV with WKT polygon. """ layer = self.generic_import('boxes_with_date.csv', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_iso_date(self): """ Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_iso_date_zip(self): """ Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.zip', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_dates_bc(self): """ Tests the import of test_boxes_with_dates_bc. """ layer = self.generic_import('boxes_with_dates_bc.shp', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_point_with_date(self): """ Tests the import of test_boxes_with_dates_bc. """ layer = self.generic_import('point_with_date.geojson', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) # OGR will name geojson layers 'ogrgeojson' we rename to the path basename self.assertTrue(layer.name.startswith('point_with_date')) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_end_date(self): """ Tests the import of test_boxes_with_end_date. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('boxes_with_end_date.shp') date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0] end_date_attr = filter(lambda attr: attr.attribute == 'enddate', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:date') self.assertEqual(end_date_attr.attribute_type, 'xsd:date') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) def test_us_states_kml(self): """ Tests the import of us_states_kml. This layer has a date and an end date field that are typed correctly. """ # TODO: Support time in kmls. layer = self.generic_import('us_states.kml') def test_mojstrovka_gpx(self): """ Tests the import of mojstrovka.gpx. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('mojstrovka.gpx') date_attr = filter(lambda attr: attr.attribute == 'time', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, u'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute) def generic_time_check(self, layer, attribute=None, end_attribute=None): """ Convenience method to run generic tests on time layers. """ self.cat._cache.clear() resource = self.cat.get_resource(layer.name, store=layer.store, workspace=self.workspace) timeInfo = resource.metadata['time'] self.assertEqual("LIST", timeInfo.presentation) self.assertEqual(True, timeInfo.enabled) self.assertEqual(attribute, timeInfo.attribute) self.assertEqual(end_attribute, timeInfo.end_attribute) def test_us_shootings_csv(self): """ Tests the import of US_shootings.csv. """ filename = 'US_shootings.csv' f = os.path.join(os.path.dirname(__file__), 'test_ogr', filename) layer = self.generic_import(filename, configuration_options=[{ 'index': 0, 'convert_to_date': ['Date'] }]) self.assertEqual(layer.name, 'us_shootings') date_field = 'date_as_date' configure_time(self.cat.get_layer(layer.name).resource, attribute=date_field) self.generic_time_check(layer, attribute=date_field) def test_sitins(self): """ Tests the import of US_shootings.csv. """ filename = 'US_Civil_Rights_Sitins0.csv' f = os.path.join(os.path.dirname(__file__), 'test_ogr', filename) layer = self.generic_import(f, configuration_options=[{ 'index': 0, 'convert_to_date': ['Date'] }]) def get_layer_names(self, in_file): """ Gets layer names from a data source. """ ds = DataSource(in_file) return map(lambda layer: layer.name, ds) def test_gdal_import(self): filename = 'point_with_date.geojson' f = os.path.join(os.path.dirname(__file__), 'test_ogr', filename) self.generic_import(filename, configuration_options=[{ 'index': 0, 'convert_to_date': ['date'] }]) def import_file(self, in_file, configuration_options=[]): """ Imports the file. """ self.assertTrue(os.path.exists(in_file)) # run ogr2ogr gi = GDALImport(in_file) layers = gi.handle(configuration_options=configuration_options) return layers @staticmethod def createFeatureType(catalog, datastore, name): """ Exposes a PostGIS feature type in geoserver. """ headers = {"Content-type": "application/xml"} data = "<featureType><name>{name}</name></featureType>".format( name=name) url = datastore.href.replace(".xml", '/featuretypes.xml'.format(name=name)) headers, response = catalog.http.request(url, "POST ", data, headers) return response def test_create_vrt(self): """ Tests the create_vrt function. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'US_shootings.csv') vrt = create_vrt(f) vrt.seek(0) output = vrt.read() self.assertTrue('name="US_shootings"' in output) self.assertTrue( '<SrcDataSource>{0}</SrcDataSource>'.format(f) in output) self.assertTrue( '<GeometryField encoding="PointFromColumns" x="Longitude" y="Latitude" />' .format(f) in output) self.assertEqual(os.path.splitext(vrt.name)[1], '.vrt') def test_file_add_view(self): """ Tests the file_add_view. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') c = AdminClient() # test login required for this view request = c.get(reverse('uploads-new')) self.assertEqual(request.status_code, 302) c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['request'].path, reverse('uploads-list')) self.assertTrue(len(response.context['object_list']) == 1) upload = response.context['object_list'][0] self.assertEqual(upload.user.username, 'non_admin') self.assertEqual(upload.file_type, 'GeoJSON') self.assertTrue(upload.uploadlayer_set.all()) self.assertEqual(upload.state, 'UPLOADED') self.assertIsNotNone(upload.name) uploaded_file = upload.uploadfile_set.first() self.assertTrue(os.path.exists(uploaded_file.file.path)) f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'empty_file.geojson') with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertIn('file', response.context_data['form'].errors) def test_describe_fields(self): """ Tests the describe fields functionality. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'us_shootings.csv') fields = None with GDALInspector(f) as f: layers = f.describe_fields() self.assertTrue(layers[0]['name'], 'us_shootings') self.assertEqual([n['name'] for n in layers[0]['fields']], [ 'Date', 'Shooter', 'Killed', 'Wounded', 'Location', 'City', 'Longitude', 'Latitude' ]) self.assertEqual(layers[0]['feature_count'], 203) def test_gdal_file_type(self): """ Tests the describe fields functionality. """ files = ( (os.path.join(os.path.dirname(__file__), 'test_ogr', 'us_shootings.csv'), 'CSV'), (os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson'), 'GeoJSON'), (os.path.join(os.path.dirname(__file__), 'test_ogr', 'mojstrovka.gpx'), 'GPX'), (os.path.join(os.path.dirname(__file__), 'test_ogr', 'us_states.kml'), 'KML'), (os.path.join(os.path.dirname(__file__), 'test_ogr', 'boxes_with_year_field.shp'), 'ESRI Shapefile'), (os.path.join(os.path.dirname(__file__), 'test_ogr', 'boxes_with_date_iso_date.zip'), 'ESRI Shapefile'), ) for path, file_type in files: with GDALInspector(path) as f: self.assertEqual(f.file_type(), file_type) def test_configure_view(self): """ Tests the configuration view. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{ 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True }] response = c.post('/importer-api/data-layers/{0}/configure/'.format( upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertEqual(Layer.objects.all()[0].owner.username, self.non_admin_username) lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, layer) def test_configure_view_convert_date(self): """ Tests the configure view with a dataset that needs to be converted to a date. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'US_shootings.csv') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{ 'index': 0, 'convert_to_date': ['Date'], 'start_date': 'Date', 'configureTime': True, 'editable': True }] response = c.get('/importer-api/data-layers/{0}/configure/'.format( upload.id)) self.assertEqual(response.status_code, 405) response = c.post('/importer-api/data-layers/{0}/configure/'.format( upload.id)) self.assertEqual(response.status_code, 400) response = c.post('/importer-api/data-layers/{0}/configure/'.format( upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertTrue(layer.attributes.filter(attribute='date_as_date'), 'xsd:dateTime') lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) # ensure a user who does not own the upload cannot configure an import from it. c.logout() c.login_as_admin() response = c.post('/importer-api/data-layers/{0}/configure/'.format( upload.id)) self.assertEqual(response.status_code, 404) def test_list_api(self): c = AdminClient() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 401) c.login_as_non_admin() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 200) admin = User.objects.get(username=self.username) non_admin = User.objects.get(username=self.non_admin_username) from .models import UploadFile f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'US_shootings.csv') with open(f, 'rb') as f: uploaded_file = SimpleUploadedFile('test_data', f.read()) admin_upload = UploadedData.objects.create(state='Admin test', user=admin) admin_upload.uploadfile_set.add( UploadFile.objects.create(file=uploaded_file)) non_admin_upload = UploadedData.objects.create( state='Non admin test', user=non_admin) non_admin_upload.uploadfile_set.add( UploadFile.objects.create(file=uploaded_file)) c.login_as_admin() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 2) response = c.get('/importer-api/data/?user__username=admin') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 1) def test_layer_list_api(self): c = AdminClient() response = c.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 401) c.login_as_non_admin() response = c.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 200) def test_delete_from_non_admin_api(self): """ Ensure users can delete their data. """ c = AdminClient() f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) id = UploadedData.objects.first().id response = c.delete('/importer-api/data/{0}/'.format(id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def test_delete_from_admin_api(self): """ Ensure that administrators can delete data that isn't theirs. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) c.logout() c.login_as_admin() id = UploadedData.objects.first().id response = c.delete('/importer-api/data/{0}/'.format(id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def naming_an_import(self): """ Tests providing a name in the configuration options. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() name = 'point-with-a-date' with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) payload = { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'name': name, 'editable': True } response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps(payload), content_type='application/json') layer = Layer.objects.all()[0] self.assertEqual(layer.title, name.replace('-', '_')) def test_api_import(self): """ Tests the import api. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) payload = { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True } self.assertTrue( isinstance(UploadLayer.objects.first().configuration_options, dict)) response = c.get('/importer-api/data-layers/1/') self.assertEqual(response.status_code, 200) response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps([payload]), content_type='application/json') self.assertEqual(response.status_code, 200) self.assertTrue('task' in response.content) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, layer) self.assertTrue(UploadLayer.objects.first().task_id) def test_valid_file_extensions(self): """ Test the file extension validator. """ for extension in IMPORTER_VALID_EXTENSIONS: self.assertIsNone( validate_file_extension( SimpleUploadedFile('test.{0}'.format(extension), ''))) with self.assertRaises(ValidationError): validate_file_extension(SimpleUploadedFile('test.txt', '')) def test_no_geom(self): """ Test the file extension validator. """ with self.assertRaises(ValidationError): validate_inspector_can_read( SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)')) # This should pass (geom type is unknown) validate_inspector_can_read( SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)')) def test_numeric_overflow(self): """ Regression test for numeric field overflows in shapefiles. # https://trac.osgeo.org/gdal/ticket/5241 """ self.generic_import('Walmart.zip', configuration_options=[{ 'index': 0, 'convert_to_date': [] }]) def test_outside_bounds_regression(self): """ Regression where layers with features outside projection bounds fail. """ self.generic_import('Spring_2015.zip', configuration_options=[{ 'index': 0 }]) resource = self.cat.get_layer('spring_2015').resource self.assertEqual(resource.latlon_bbox, ('-180.0', '180.0', '-90.0', '90.0', 'EPSG:4326')) def test_gwc_handler(self): """ Tests the GeoWebCache handler """ layer = self.generic_import('boxes_with_date.shp', configuration_options=[{ 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) gwc = GeoWebCacheHandler(None) gs_layer = self.cat.get_layer(layer.name) self.assertTrue(gwc.time_enabled(gs_layer)) gs_layer.fetch() payload = self.cat.http.request(gwc.gwc_url(gs_layer)) self.assertTrue('regexParameterFilter' in payload[1]) self.assertEqual(int(payload[0]['status']), 200) # Don't configure time, ensure everything still works layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{ 'index': 0 }]) gs_layer = self.cat.get_layer(layer.name) self.cat._cache.clear() gs_layer.fetch() payload = self.cat.http.request(gwc.gwc_url(gs_layer)) self.assertFalse('regexParameterFilter' in payload[1]) self.assertEqual(int(payload[0]['status']), 200)
project = cfg['general']['projectName'] offline = False error, geoserver_url = utils.checkConnection( cfg['geoserver']['url'] + "/rest/", cfg['geoserver']['user'], cfg['geoserver']['password']) if error: logging.warning("Could not connect to geoserver, will only delete folders") offline = True logging.info("Deleting Project " + project) # Delete Project Folders if os.path.isdir(cfg['frontend']['path'] + '/projects/' + project + '/'): shutil.rmtree(cfg['frontend']['path'] + '/projects/' + project) logging.info("Deleted Frontend folder " + project) makeMap.createProjectHandling() logging.info("Project Handling Updated") else: logging.warning("Could not find Frontend folder " + cfg['frontend']['path'] + '/projects/' + project) # Delete Geoserver Workspaces if not offline: cat = Catalog(geoserver_url, cfg['geoserver']['user'], cfg['geoserver']['password']) if cat.get_workspace(project): cat.delete(cat.get_workspace(project), purge="all", recurse=True) logging.info("Deleted workspace " + project) else: logging.warning("Could not find workspace " + project)
class wrap_geoserver: """ Geoserver (gsconfig) wrapper """ def __init__(self, geoserver_name, username=username, password=password, easy=False): if geoserver_name in list(REST.keys()): self.path = REST[geoserver_name] else: self.path = geoserver_name self.wms = self.path.replace("rest/", "wms") self.name = geoserver_name self.catalog = Catalog(self.path, username, password) if not easy: self.layers = [] self.layer_names = [] for layer in self.catalog.get_layers(): self.layers.append(layer) self.layer_names.append(layer.name) self.stores = [store for store in self.catalog.get_stores()] self.store_names = [store.name for store in self.stores] styles = [] self.workspaces = [] self.workspace_names = [] for workspace in self.catalog.get_workspaces(): styles = styles + self.catalog.get_styles(workspace) self.workspace_names.append(workspace._name) self.workspaces.append(workspace) self.styles = styles + [ style for style in self.catalog.get_styles() ] self.style_names = [style.name for style in self.styles] def unpack(self, workspace_name, store_type="datastore"): layers_and_styles = {} features = [] workspace = self.get_workspace(workspace_name) if store_type == "datastore": store_url = workspace.datastore_url elif store_type == "coveragestore": store_url = workspace.coveragestore_url else: print("No correct store given") for datastore in tqdm(get(store_url, "name")): url = "{}workspaces/{}/datastores/{}".format( self.path, workspace.name, datastore) features = features + get(url, between_quotes=True) for feature in features: layer_name = os.path.basename(feature).split(".")[0] self.get_layer(self.get_slug(workspace.name, layer_name)) layers_and_styles[layer_name] = self.layer.default_style setattr(self, workspace_name + "_data", layers_and_styles) return layers_and_styles def get_layer(self, layer, easy=False): self.layer = self.catalog.get_layer(layer) if not easy: self.resource = self.layer.resource self.layer_name = self.layer.resource.name self.sld_name = self.layer.default_style.name self.sld_body = self.layer.default_style.sld_body self.layer_latlon_bbox = self.layer.resource.latlon_bbox self.layer_title = self.layer.resource.title self.layer_abstract = self.layer.resource.abstract def get_store(self, layer): self.store = self.layer.resource._store def get_resource(self): self.resource = self.catalog.get_resource(self.layer.name, self.store) def get_workspace(self, workspace_name): self.workspace = self.catalog.get_workspace(workspace_name) self.workspace_name = self.workspace._name return self.workspace def write_abstract(self, data, load_resource=True): if load_resource: self.get_resource() self.resource.abstract = data self.catalog.save(self.resource) def write_title(self, title): self.resource.title = title self.catalog.save(self.resource) def get_connection_parameters(self): self.get_resource() return self.resource.store.connection_parameters def create_workspace(self, workspace_name): workspace_exists = workspace_name in self.workspace_names if not workspace_exists: self.workspace = self.catalog.create_workspace(workspace_name) else: print("workspace already exists, using existing workspace") self.workspace = self.catalog.get_workspace(workspace_name) self.workspace_name = workspace_name def create_postgis_datastore(self, store_name, workspace_name, pg_data): try: self.store = self.catalog.get_store(store_name, self.workspace_name) print("store within workspace exists, using existing store") except Exception as e: print(e) ds = self.catalog.create_datastore(store_name, workspace_name) ds.connection_parameters.update( host=pg_data["host"], port=pg_data["port"], database=pg_data["database"], user=pg_data["username"], passwd=pg_data["password"], dbtype="postgis", schema="public", ) self.save(ds) self.store = self.catalog.get_store(store_name, self.workspace_name) self.store_name = store_name def publish_layer(self, layer_name, workspace_name, overwrite=False, epsg="3857", reload=False): layer_exists = layer_name in self.layer_names # if layer_name in self.workspace_layers[workspace_name]: slug = self.get_slug(workspace_name, layer_name) if overwrite and layer_exists: print("Layer exists, deleting layer") try: self.layer = self.catalog.get_layer(slug) self.delete(self.layer) self.reload() layer_exists = False except Exception as e: print(e) print("Layer does not exist in workspace") layer_exists = False if not layer_exists: feature_type = self.catalog.publish_featuretype( layer_name, self.store, "EPSG:{}".format(str(epsg)), srs="EPSG:{}".format(str(epsg)), ) self.save(feature_type) self.feature_type = feature_type else: print("layer already exists, using existing layer") if reload: self.get_layer(slug) self.layer_name = layer_name def publish_layergroup(self, name, layers, styles=(), bounds=None, workspace=None): layer_group = self.catalog.create_layergroup(name, layers, styles, bounds, workspace) self.save(layer_group) def save(self, save_object): return self.catalog.save(save_object) def close(self): self.catalog = None def delete(self, delete_object): self.catalog.delete(delete_object) def reload(self): self.catalog.reload() def upload_shapefile(self, layer_name, shapefile_path): path = shapefile_path.split(".shp")[0] shapefile = shapefile_and_friends(path) ft = self.catalog.create_featurestore(layer_name, shapefile, self.workspace) self.save(ft) def upload_sld(self, sld_name, workspace_name, sld, overwrite=True): style_exists = sld_name in self.style_names if overwrite and style_exists: print("Overwriting style") style = self.catalog.get_style(sld_name, workspace_name) self.delete(style) self.reload() style_exists = False if not style_exists: try: self.catalog.create_style(sld_name, sld, False, workspace_name, "sld11") except Exception as e: print(e) style = self.catalog.get_style(sld_name, workspace_name) self.delete(style) self.reload() self.catalog.create_style(sld_name, sld, False, workspace_name, "sld10") self.style_name = sld_name else: if style_exists: print("Style already exists, using current style") self.style_name = sld_name def set_sld_for_layer(self, workspace_name=None, style_name=None, use_custom=False): if not use_custom: workspace_name = self.workspace_name style_name = self.style_name self.style_slug = self.get_slug(workspace_name, style_name) else: if workspace_name is None: self.style_slug = style_name else: self.style_slug = self.get_slug(workspace_name, style_name) self.style = self.catalog.get_style(self.style_slug) print("Setting {} for {}".format(self.style.name, self.layer.name)) self.layer.default_style = self.style self.save(self.layer) def get_slug(self, workspace, name): return "{}:{}".format(workspace, name) def get_slug_data(self, slug): workspace_name = slug.split(":")[0] layer_name = slug.split(":")[1] return workspace_name, layer_name def get_sld(self, layer_slug=None): if layer_slug is None: self.style = self.catalog.get_style(self.layer_slug) else: self.style = self.catalog.get_style(layer_slug) self.sld_body = self.style.sld_body return self.sld_body def get_layer_workspace(self, layer_name): return self.catalog.get_layer(layer_name).resource.workspace.name
class GeoGigUploaderBase(ImportHelper): def __init__(self, *args, **kwargs): super(GeoGigUploaderBase, self).__init__(*args, **kwargs) setUpModule() # this isn't available when being used in other module def setUp(self): self.admin_user = self.create_user('admin', 'admin', is_superuser=True) self.non_admin_user = self.create_user('non_admin', 'non_admin') self.catalog = Catalog(ogc_server_settings.internal_rest, *ogc_server_settings.credentials) if self.catalog.get_workspace('geonode') is None: self.catalog.create_workspace('geonode', 'http://www.geonode.org/') self.workspace = 'geonode' self.datastoreNames = [] def tearDown(self): """Clean up geoserver/geogig catalog. """ # delete stores (will cascade to delete layers) for store_name in self.datastoreNames: self.catalog.delete(self.catalog.get_store(store_name), recurse=True) # delete repository reference in geoserver for store_name in self.datastoreNames: self.remove_geogig_repo(store_name) # geoserver can leave connections open - HACK HACK HACK self.free_geogig_connections() # HACK HACK HACK -- sometimes connections from geoserver to geogig are left open. This kills the postgresql backend! # this is a major hammer. Once geoserver/geogig are better at cleaning up, remove this. def free_geogig_connections(self): with db.connections["geogig"].cursor() as c: c.execute( "select pg_terminate_backend(pid) from pg_stat_activity where application_name = 'PostgreSQL JDBC Driver' or application_name='geogig'" ) # aggressive delete of the repo (mostly cleans up after itself) # call the geogig rest API DELETE def remove_geogig_repo(self, ref_name): username = ogc_server_settings.credentials.username password = ogc_server_settings.credentials.password url = ogc_server_settings.rest http = httplib2.Http(disable_ssl_certificate_validation=False) http.add_credentials(username, password) netloc = urlparse(url).netloc http.authorizations.append( httplib2.BasicAuthentication((username, password), netloc, url, {}, None, None, http)) rest_url = ogc_server_settings.LOCATION + \ "geogig/repos/" + ref_name + "/delete.json" resp, content = http.request(rest_url, 'GET') response = json.loads(content) token = response["response"]["token"] rest_url = ogc_server_settings.LOCATION + \ "geogig/repos/" + ref_name + "?token=" + token resp, content = http.request(rest_url, 'DELETE') # convenience method to load in the test dataset # return a (geonode) layer # the layer will be in Geoserver and Geonode # self.catalog.get_layer(layer.name) -- to get the Geoserver Layer def fully_import_file(self, path, fname, start_time_column, end_time_column=None): # setup time if end_time_column is None: time_config = { 'convert_to_date': [start_time_column], 'start_date': start_time_column, 'configureTime': True } else: time_config = { 'convert_to_date': [start_time_column, end_time_column], 'start_date': start_time_column, 'end_date': end_time_column, 'configureTime': True } name = os.path.splitext(fname)[0] + "_" + str(uuid.uuid1())[:8] self.datastoreNames.append(name) # remember for future deletion full_fname = os.path.join(path, fname) configs = self.prepare_file_for_import(full_fname) configs[0].update({'name': name}) configs[0].update({'layer_name': name}) configs[0].update(time_config) # configure the datastore/repo configs[0]['geoserver_store'] = {} configs[0]['geoserver_store']['type'] = 'geogig' configs[0]['geoserver_store']['name'] = name configs[0]['geoserver_store']['create'] = 'true' configs[0]['geoserver_store']['branch'] = 'master' configs[0]['geoserver_store'][ 'geogig_repository'] = "geoserver://" + name result = self.generic_import(fname, path=path, configs=configs) return result def import_file(self, path, configs=None): """Imports the file. """ if configs is None: configs = [] self.assertTrue(os.path.exists(path), path) # run ogr2ogr ogr = OGRImport(path) layers = ogr.handle(configuration_options=configs) return layers def generic_import(self, filename, path, configs=None): if configs is None: configs = [{'index': 0}] path = os.path.join(path, filename) results = self.import_file(path, configs=configs) layer_results = [] for result in results: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertTrue(layer.store in self.datastoreNames) self.assertEqual(layer.storeType, 'dataStore') if not path.endswith('zip'): self.assertGreaterEqual(layer.attributes.count(), DataSource(path)[0].num_fields) layer_results.append(layer) return layer_results[0] def prepare_file_for_import(self, filepath): """ Prepares the file path provided for import; performs some housekeeping, uploads & configures the file. Returns a list of dicts of the form {'index': <layer_index>, 'upload_layer_id': <upload_layer_id>} these may be used as configuration options for importing all of the layers in the file. """ # Make a copy of the test file, as it's removed in configure_upload() filename = os.path.basename(filepath) tmppath = os.path.join('/tmp', filename) shutil.copy(filepath, tmppath) # upload & configure_upload expect closed file objects # This is heritage from originally being closely tied to a view passing request.Files of = open(tmppath, 'rb') of.close() files = [of] uploaded_data = self.upload(files, self.admin_user) self.configure_upload(uploaded_data, files) configs = [{ 'index': l.index, 'upload_layer_id': l.id } for l in uploaded_data.uploadlayer_set.all()] return configs def create_user(self, username, password, **kwargs): """Convenience method for creating users. """ user, created = User.objects.get_or_create(username=username, **kwargs) if created: user.set_password(password) user.save() return user
def preprogress(id): def getUploadFile(uploadfolder): try: uploadfiles = os.listdir(uploadfolder) # 上传的文件夹 while(len(uploadfiles)==1): uploadfolder=os.path.join(uploadfolder,uploadfiles[0]) uploadfiles=os.listdir(uploadfolder) for file in uploadfiles: if(file[-11:]=='_fusion.tif'):#融合图 fusionname=file capture_time=re.match('[\w\_\.]+_(\d{8})_',fusionname).group(1) Bmap.objects.filter(id=id).update(capture_time=time.strftime('%Y-%m-%d',time.strptime(capture_time,'%Y%m%d'))) # elif re.search(r'PAN\d.jpg',file):#缩略图 # thumbnailname=file # elif re.search(r'PAN\d.xml',file):#XML # XMLname=file elif re.search(r'_rpc.txt',file):#RPC rpcfile=file # elif re.search(r'MSS\d.rpb',file): # rpbfile=file return uploadfolder,fusionname,rpcfile except Exception as e: return Exception("上传失败,请检查地图名称!") ####转8比特三通道,生成缩略图 def chaneltransform(): try: fusionimage=os.path.join(uploadfiles[0],uploadfiles[1]) gdal.AllRegister() driver = gdal.GetDriverByName("GTiff") fusionimage = gdal.Open(fusionimage.encode('utf-8').decode(), gdal.GA_ReadOnly) im_width = fusionimage.RasterXSize im_height = fusionimage.RasterYSize transformimage = os.path.join(uploadfiles[0],"chaneltransform.tif") dstDS = driver.Create(transformimage, xsize=im_width, ysize=im_height, bands=3, eType=gdal.GDT_Byte) thumbnail=np.zeros(shape=(int(im_height*0.02),int(im_width*0.02),3)) for iband in range(1, 4): imgMatrix = fusionimage.GetRasterBand(iband).ReadAsArray(0, 0, im_width, im_height) zeros = np.size(imgMatrix) - np.count_nonzero(imgMatrix) minVal = np.percentile(imgMatrix, float(zeros / np.size(imgMatrix) * 100 + 0.15)) maxVal = np.percentile(imgMatrix, 99) idx1 = imgMatrix < minVal idx2 = imgMatrix > maxVal idx3 = ~idx1 & ~idx2 imgMatrix[idx1] = imgMatrix[idx1] * 20 / minVal imgMatrix[idx2] = 255 idx1=None idx2=None imgMatrix[idx3] = pow((imgMatrix[idx3] - minVal) / (maxVal - minVal), 0.9) * 255 if iband==1: dstDS.GetRasterBand(3).WriteArray(imgMatrix) dstDS.FlushCache() thumbnail[:, :, 2] = cv2.resize(src=imgMatrix, dsize=(thumbnail.shape[1], thumbnail.shape[0])) imgMatrix = None elif iband==2: dstDS.GetRasterBand(2).WriteArray(imgMatrix) dstDS.FlushCache() thumbnail[:, :, 1] = cv2.resize(src=imgMatrix, dsize=(thumbnail.shape[1], thumbnail.shape[0])) imgMatrix = None else: dstDS.GetRasterBand(1).WriteArray(imgMatrix) dstDS.FlushCache() thumbnail[:, :, 0] = cv2.resize(src=imgMatrix, dsize=(thumbnail.shape[1], thumbnail.shape[0])) imgMatrix = None fusionimage = None dstDS = None cv2.imwrite(os.path.join(uploadfiles[0],str(id)+'.jpg'),thumbnail) return transformimage except Exception as e: return Exception("上传失败,图像转换出错:"+str(e)) def RPCOrthorectification(Alpha=True,is_label=False): try: if not is_label: orginalimage=os.path.join(uploadfiles[0],'chaneltransform.tif') transform_rpc = os.path.join(uploadfiles[0], 'chaneltransform_rpc.txt') else: orginalimage=os.path.join(uploadfiles[0],'label.tif') transform_rpc=os.path.join(uploadfiles[0],'label_rpc.txt') origin_rpc=os.path.join(uploadfiles[0],uploadfiles[2]) shutil.copyfile(origin_rpc,transform_rpc) # with open(rpbfile,'r') as f: # for line in f.readlines(): # hoffLine=re.search(r'heightOffset = ([\+|\-|\d]\d+\.?\d+)',line) # if hoffLine: # hoff=hoffLine.group(1) # break # f.close() # RpcHeight="['RPC_HEIGHT="+str(hoff)+"]'" # transformerOptions=RpcHeight if Alpha: warpOP = gdal.WarpOptions(dstSRS='WGS84', rpc=True, multithread=True, errorThreshold=0.0,creationOptions=['Tiled=yes'], resampleAlg=gdal.gdalconst.GRIORA_Bilinear,dstAlpha=True) else: warpOP = gdal.WarpOptions(dstSRS='WGS84', rpc=True, multithread=True, errorThreshold=0.0,creationOptions=['Tiled=yes'], resampleAlg=gdal.gdalconst.GRIORA_Bilinear,dstNodata=0) image = gdal.Open(orginalimage.encode('utf-8'),gdal.GA_ReadOnly) RPCOrthImage = os.path.join(uploadfiles[0],os.path.basename(orginalimage).replace(".tif","RPC.tif")) srcDS = gdal.Warp(RPCOrthImage.encode('utf-8').decode(), image, options=warpOP) image=None srcDS=None return RPCOrthImage except Exception as e: return Exception("上传失败,RPC正射校正出错:"+str(e)) def buildOverviews(): try: image=os.path.join(uploadfiles[0],'chaneltransformRPC.tif') gdal.AllRegister() TransformDS = gdal.Open(image.encode('utf-8').decode(), gdal.GA_ReadOnly) Width = TransformDS.RasterXSize Heigh = TransformDS.RasterYSize PixelNum = Width * Heigh TopNum = 4096 CurNum = PixelNum / 4 anLevels = [] nLevelCount = 0 while (CurNum > TopNum): anLevels.append(pow(2, nLevelCount + 2)) nLevelCount += 1 CurNum /= 4 TransformDS.BuildOverviews(overviewlist=anLevels) cat = Catalog(map_url,'admin', 'geoserver') wkspce = cat.get_workspace('Map') cat.create_coveragestore_external_geotiff(name=id, data='file://' + image.encode('utf-8').decode('utf-8'), workspace=wkspce) cat.reload() TransformDS = None except Exception as e: return Exception("上传失败,建立金字塔出错"+str(e)) def fit_by_contours(img,geotransfrom): geo = np.array([[geotransfrom[1], geotransfrom[4]], [geotransfrom[2], geotransfrom[5]]]) off = np.array([geotransfrom[0], geotransfrom[3]]) kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (5, 5)) img = cv2.morphologyEx(img, cv2.MORPH_OPEN, kernel) img = cv2.morphologyEx(img, cv2.MORPH_CLOSE, kernel) img = cv2.morphologyEx(img, cv2.MORPH_OPEN, kernel) img = cv2.morphologyEx(img, cv2.MORPH_CLOSE, kernel) contours, hierarchy = cv2.findContours(img, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)[1:] contours = list(map(np.squeeze, contours)) hierarchy = np.squeeze(hierarchy) contours = [LinearRing(np.dot(np.row_stack((single_contour, single_contour[0, :])), geo) + off) for single_contour in contours] hole_exclude_linering = defaultdict(list) for idx in np.argwhere(hierarchy[:, -1] == -1)[:, 0]: hole_exclude_linering[idx].append(contours[idx]) # external_contours=[(idx,contours[idx]) for idx in np.argwhere(hierarchy[:,-1]==-1)[:,0]] extern_linering_idx = np.argwhere(hierarchy[:, 2] != -1)[:, 0] hole_idx = [np.argwhere(hierarchy[:, -1] == idx)[:, 0] for idx in extern_linering_idx] for e_id, h_id in zip(extern_linering_idx, hole_idx): holes = [contours[h] for h in h_id] hole_exclude_linering[e_id].extend(holes) return MultiPolygon([Polygon(*linering) for linering in hole_exclude_linering.values()]) def save_mask(): try: ct=CoordTransform(SpatialReference('WGS84'), SpatialReference('4527')) label_path=os.path.join(uploadfiles[0],'labelRPC.tif') dataset = gdal.Open(label_path) GeoTransform = dataset.GetGeoTransform() if dataset == None: return im_width = dataset.RasterXSize # 栅格矩阵的列数 im_height = dataset.RasterYSize # 栅格矩阵的行数 cood_trans=lambda L,C:(GeoTransform[0] + C * GeoTransform[1] + L * GeoTransform[2],GeoTransform[3] + C * GeoTransform[4] + L * GeoTransform[5]) map_polygon=Polygon(LinearRing(cood_trans(0,0),cood_trans(0,im_width),cood_trans(im_height,im_width),cood_trans(im_height,0),cood_trans(0,0))) Bmap.objects.filter(id=id).update(polygon=map_polygon) im_data = dataset.ReadAsArray(0, 0, im_width, im_height) # 获取数据 dataset = None types = np.unique(im_data) for label_type in types: # if label_type in (0,): # continue mp = fit_by_contours((im_data == label_type).astype(np.uint8), GeoTransform) m = Mask(map=Bmap.objects.get(id=id),type_id=int(label_type), mask=mp,area=round(mp.transform(ct,clone=True).area/1000000,2)) m.save() # img[im_data == label_type]=127 # cv2.imwrite(str(label_type)+".jpg",img) if label_type!=0: payload = "<featureType><name>" + str(id) + '_' + str(m.type_id) + "</name><nativeName>myweb_mask</nativeName>"" \ ""<cqlFilter>type_id=" + str(m.type_id) + " and map_id=" + str(id) + "</cqlFilter></featureType>" headers = {'Content-type': 'text/xml'} resp = requests.post(mask_url, auth=('admin', 'geoserver'), data=payload, headers=headers) if resp.status_code != 201: raise Exception('Upload to geoserver error') else: cat = Catalog(map_url, 'admin', 'geoserver') layer = cat.get_layer('Mask:'+str(id)+'_'+str(m.type_id)) layer.default_style=cat.get_style(str(label_type), 'Mask') cat.save(layer) cat.reload() return "上传成功" except Exception as e: return Exception("上传失败,拟合图斑出错:"+str(e)) # # def makeDownload(): # try: # cwd = os.getcwd() # downloadpath=os.path.join(baseurl,str(id)+'.tar.gz')#前端下载路径 # downloadfile = tarfile.open(downloadpath, "w:gz") # os.chdir(uploadfiles[0]) # downloadfile.add(uploadfiles[4],recursive=False) # downloadfile.add(uploadfiles[5],recursive=False) # os.chdir(tempfolder) # for file in os.listdir(tempfolder): # if ".json" in file: # downloadfile.add(file,recursive=False) # if file=="chaneltransformRPC.tif": # downloadfile.add(file) # downloadfile.close() # os.chdir(cwd) # return downloadpath # except Exception: # # if os.path.exists(downloadpath): # # os.remove(downloadpath) # return "上传失败,无法创建压缩包" # if not os.path.exists(baseurl): # os.makedirs(baseurl) # uploadfiles=(os.path.join(MAPBASEPATH,'GF2_PMS2_E117.4_N39.1_20170510_L1A0002351826'),) # result = detection.detection(uploadfiles[0] + "/", uploadfiles[1], uploadfiles[0]) # if not isinstance(result, Exception): # result =save_mask() # if not isinstance(result, Exception): # result = save_mask() # if not isinstance(result, Exception): # # result=makeDownload() # return '上传成功' # buildOverviews() # RPCOrthorectification() #正式代码 # uploadfolder=os.path.join(MAPBASEPATH,'GF2_PMS2_E117.4_N39.1_20170510_L1A0002351826') # uploadfiles=getUploadFile(uploadfolder) # RPCOrthorectification(Alpha=False,is_label=True) uploadfolder =os.path.join(MAPBASEPATH,Bmap.objects.get(id=id).name) uploadfiles=getUploadFile(uploadfolder) result = uploadfiles if not isinstance(result, Exception): result=chaneltransform() if not isinstance(result,Exception): result=RPCOrthorectification() if not isinstance(result,Exception): result=buildOverviews() if not isinstance(result,Exception): result=detection.detection(uploadfiles[0]+"/",uploadfiles[1],uploadfiles[0]) if not isinstance(result,Exception): result=RPCOrthorectification(Alpha=False,is_label=True) if not isinstance(result,Exception): result=save_mask() if not isinstance(result,Exception): shutil.rmtree('./myweb/Detector/temp') return '上传成功' ####正式代码完 cat = Catalog(map_url, 'admin', 'geoserver') if cat.get_layer('Mask:'+str(id)): cat.delete(cat.get_layer('Mask:'+str(id))) cat.reload() for label_type in range(1,8): if cat.get_layer('Mask:' + str(id)+'_'+str(label_type)): cat.delete(cat.get_layer('Mask:' + str(id)+'_'+str(label_type))) cat.reload() try: if cat.get_store(name=str(id), workspace='Map'): cat.delete(cat.get_store(name=str(id), workspace='Map')) cat.reload() except Exception: pass if os.path.exists('./myweb/Detector/temp'): shutil.rmtree('./myweb/Detector/temp') Bmap.objects.filter(id=id).delete() return str(result)
class GsConn: def __init__(self, host, login, password, debug=False): """ Geoserver connection """ self.host = host self.login = login self.password = password self.debug = debug # Connect to server self.cat = Catalog("http://%s/geoserver/rest" % host, login, password) if self.debug is True: print "Connected to geoserver" def crate_workspace(self, name, overwrite=False): """ Creates a workspace :param name: Workspace name. :param overwrite: If True, delete existing workspace. :return: None """ workspaces = [workspace.name for workspace in self.cat.get_workspaces()] if name in workspaces and overwrite is True: # ws2del = self.cat.get_workspace(name) # self.cat.delete(ws2del, purge=True, recurse=True) return None # NOTE: If we delete the workspace then all associated layers are lost. elif name in workspaces and overwrite is False: print "ERROR: Workspace %s already exists (use overwrite=True)." % name self.cat.create_workspace(name, "http://%s/%s" % (self.host, name)) if self.debug is True: print "Workspace %s available." % name ws = self.cat.get_workspace(name) ws.enabled = True def create_pg_store(self, name, workspace, host, port, login, password, dbname, schema, overwrite=False): """ Creates datastore. :param name: Name of the datastore. :param workspace: Name of the workspace to use. :param overwrite: If True replace datastore. :return: None """ stores = [store.name for store in self.cat.get_stores()] if name in stores and overwrite is True: # st2del = self.cat.get_store(name) # self.cat.delete(st2del, purge=True, recurse=True) # self.cat.reload() return None # NOTE: If we delete store, every layers associated with are lost. elif name in stores and overwrite is False: print "ERROR: Store %s already exists (use overwrite=True)." % name ds = self.cat.create_datastore(name, workspace) ds.connection_parameters.update( host=host, port=port, user=login, passwd=password, dbtype="postgis", database=dbname, schema=schema ) self.cat.save(ds) ds = self.cat.get_store(name) if ds.enabled is False: print "ERROR: Geoserver store %s not enabled" % name if self.debug is True: print "Datastore %s created." % name def publish_pg_layer(self, layer_table, layer_name, store, srid, overwrite=True): """ """ existing_lyr = self.cat.get_layer("ma_carte:%s" % layer_table) if existing_lyr is not None: print "Layer ma_carte:%s already exists, deleting it." % layer_table self.cat.delete(existing_lyr) self.cat.reload() ds = self.cat.get_store(store) ft = self.cat.publish_featuretype(layer_table, ds, "EPSG:%s" % srid, srs="EPSG:4326") ft.projection_policy = "REPROJECT_TO_DECLARED" ft.title = layer_name self.cat.save(ft) if ft.enabled is False: print "ERROR: Layer %s %s %s is not enabled."(ft.workspace.name, ft.store.name, ft.title) if self.debug is True: print "Layer %s>%s>%s published." % (ft.workspace.name, ft.store.name, ft.title) def create_style_from_sld(self, style_name, sld_file, workspace, overwrite=True): """ """ if self.cat.get_style(style_name) is not None: print "Style %s already exists, deleting it." % style_name style2del = self.cat.get_style(style_name) self.cat.delete(style2del) self.cat.create_style( style_name, open(sld_file).read(), overwrite=overwrite ) # FIXME: if ", workspace=workspace" specified can't delete style if self.debug is True: print "Style %s created in Geoserver" % style_name def apply_style_to_layer(self, layer_name, style_name): """ Apply a geoserver styler to a layer """ gs_layer = self.cat.get_layer(layer_name) gs_style = self.cat.get_style(style_name) # FIXME: Which works better? # gs_layer.default_style = gs_style / gs_layer._set_default_style(gs_style) # FIXME: Maybe indicate workspace when saving style then name the style as "workspace:style" gs_layer._set_default_style(gs_style) self.cat.save(gs_layer) if self.debug is True: print "Style applied to %s" % layer_name
class ArchiveAndCatalogueI(geocloud.ArchiveAndCatalogue): def __init__(self, com): if not com: raise RuntimeError("Not communicator") self.com = com q = com.stringToProxy('IceGrid/Query') self.query = IceGrid.QueryPrx.checkedCast(q) if not self.query: raise RuntimeError("Invalid proxy") self.broker = geocloud.BrokerPrx.checkedCast( com.stringToProxy("broker")) self.sem = Ice.threading.RLock( ) #Lock for managing the data structures geoserver_path = "http://localhost:80/geoserver/rest" self.catalog = Catalog(geoserver_path) self.workspace = None def setBroker(self, broker, current=None): self.broker = broker def log(self, log): if self.broker: self.broker.begin_appendLog("<ArchiveAndCatalogue> " + str(log)) else: print log def createScenario(self, scenario, current=None): self.sem.acquire() try: if self.catalog == None: self.log("Catalog") raise geocloud.CreationScenarioException() try: self.workspace = self.catalog.create_workspace( scenario, scenario) self.workspace.enabled = True self.log("Created Scenario %s" % (scenario)) except Exception: print("workspace does not created") self.log("Workspace does not created") except FailedRequestError: self.log("Request failed") raise geocloud.CreationScenarioException() finally: self.sem.release() def catalogue(self, path, store, scenario, current=None): if self.workspace == None: raise geocloud.CataloguingException() else: try: if self.catalog == None: raise geocloud.CataloguingException() # store = path.split('/')[-1] self.sem.acquire() cv = self.catalog.create_coveragestore(store, path, self.workspace) self.log("%s Catalogued!" % (path)) except ConflictingDataError as e: self.log(e) except UploadError as e: self.log(e) except FailedRequestError as e: None except Exception as e: self.log(e) finally: self.sem.release() def deleteScenario(self, scenario, current=None): self.sem.acquire() try: for layer in self.catalog.get_layers(): self.catalog.delete(layer) for coverage in self.catalog.get_stores(): for resource in coverage.get_resources(): catalog.delete(resource) catalog.delete(coverage) for vk in catalog.get_workspaces(): catalog.delete(wk) except Exception: self.log("Exception while cleanning scenario")
class GeoserverLayers(object): def __init__(self, gs_apirest, username, password): try: self.__catalog = Catalog(gs_apirest, username=username, password=password) except Exception as err: print("Geoserver API Rest Error: {0}".format(err)) def __createSldStyle(self, sldpath, stylename, ovwrt=False): try: styles_list = [st.name for st in self.__catalog.get_styles()] if ovwrt or not stylename in styles_list: with open(sldpath) as f: self.__catalog.create_style(stylename, f.read(), overwrite=ovwrt) else: print("This style already exists...") except Exception as err: print("Geoserver API Rest Error creating new style: {0}".format(err)) def __rmvResources(self, rsrc): """ Removing resources """ self.__catalog.delete(rsrc) self.__catalog.reload() def rmvStyle(self, stylename): """ Remove style """ try: style = self.__catalog.get_style(stylename) self.__rmvResources(style) except Exception as err: print("Geoserver API Rest Error removing style: {0}".format(err)) def rmvDataStore(self, ds_store): """ Remove DataStore (with his layer) """ try: lay_rm = self.__catalog.get_layer(ds_store) self.__rmvResources(lay_rm) str_rm = self.__catalog.get_store(ds_store) self.__rmvResources(str_rm) except Exception as err: print("Geoserver API Rest Error removing data store: {0}".format(err)) def createGeoserverWMSLayer(self, data, ws_name, ds_name, stylename, sldpath, debug=False): """ Create Geoserver WMS layer Status codes: 2 : "Geoserver layer successfully created" -1 : "Workspace does not exist" -2 : "Datastore already exists" -3 : "Error creating Geoserver layer" -4 : "File missing" """ try: if not self.__catalog.get_workspace(ws_name): print("Workspace does not exist") return -1 ds_list = [i.name for i in self.__catalog.get_stores()] if ds_name in ds_list: print("Datastore already exists") return -2 ds = self.__catalog.create_datastore(ds_name, ws_name) ft = self.__catalog.create_featurestore(ds_name, workspace=ws_name,data=data) print("Geoserver layer successfully created...") self.__createSldStyle(sldpath, stylename) lyr = self.__catalog.get_layer(ds_name) lyr.enabled = True lyr.default_style = stylename self.__catalog.save(lyr) if debug: rsrc = self.__catalog.get_resource(ds_name, workspace=ws_name) print("Data Store XML: {}".format(rsrc.href)) return 2 except Exception as err: print("Geoserver API Rest Error creating new layer: {0}".format(err)) return -3
def publishGeoserver(appdef, progress): viewCrs = appdef["Settings"]["App view CRS"] usesGeoServer = False for applayer in appdef["Layers"]: if applayer.method != METHOD_FILE: if applayer.layer.type() == applayer.layer.VectorLayer and applayer.layer.providerType().lower() != "wfs": usesGeoServer = True if not usesGeoServer: return progress.setText("Publishing to GeoServer") progress.setProgress(0) geoserverUrl = appdef["Deploy"]["GeoServer url"] + "/rest" geoserverPassword = appdef["Deploy"]["GeoServer password"] geoserverUsername = appdef["Deploy"]["GeoServer username"] workspaceName = appdef["Deploy"]["GeoServer workspace"] dsName = "ds_" + workspaceName host = appdef["Deploy"]["PostGIS host"] port = appdef["Deploy"]["PostGIS port"] postgisUsername = appdef["Deploy"]["PostGIS username"] postgisPassword = appdef["Deploy"]["PostGIS password"] database = appdef["Deploy"]["PostGIS database"] schema = appdef["Deploy"]["PostGIS schema"] catalog = Catalog(geoserverUrl, geoserverUsername, geoserverPassword) workspace = catalog.get_workspace(workspaceName) if workspace is None: workspace = catalog.create_workspace(workspaceName, workspaceName) try: store = catalog.get_store(dsName, workspace) resources = store.get_resources() for resource in resources: layers = catalog.get_layers(resource) for layer in layers: catalog.delete(layer) catalog.delete(resource) catalog.delete(store) except Exception: pass try: store = catalog.get_store(dsName, workspace) except FailedRequestError: store = None for i, applayer in enumerate(appdef["Layers"]): layer = applayer.layer if applayer.method != METHOD_FILE and applayer.method != METHOD_DIRECT: name = safeName(layer.name()) sld, icons = getGsCompatibleSld(layer) if sld is not None: catalog.create_style(name, sld, True) uploadIcons(icons, geoserverUsername, geoserverPassword, catalog.gs_base_url) if layer.type() == layer.VectorLayer: if applayer.method == METHOD_WFS_POSTGIS or applayer.method == METHOD_WMS_POSTGIS: if store is None: store = catalog.create_datastore(dsName, workspace) store.connection_parameters.update( host=host, port=str(port), database=database, user=postgisUsername, schema=schema, passwd=postgisPassword, dbtype="postgis") catalog.save(store) catalog.publish_featuretype(name, store, layer.crs().authid()) else: path = getDataFromLayer(layer, viewCrs) catalog.create_featurestore(name, path, workspace=workspace, overwrite=True) gslayer = catalog.get_layer(name) r = gslayer.resource r.dirty['srs'] = viewCrs catalog.save(r) elif layer.type() == layer.RasterLayer: path = getDataFromLayer(layer, viewCrs) catalog.create_coveragestore(name, path, workspace=workspace, overwrite=True) if sld is not None: publishing = catalog.get_layer(name) publishing.default_style = catalog.get_style(name) catalog.save(publishing) progress.setProgress(int((i+1)*100.0/len(appdef["Layers"])))
class GsConfigTestCase(unittest.TestCase): """Test of the file manager""" gs = None workdir = None def setUp(self): cfg = ConfigParser.SafeConfigParser() cfg.read((os.path.join(TEST_DIR,"tests.cfg"))) self.gs = GsConfig(cfg) self.workdir = os.path.abspath(os.path.join(TEST_DIR,"workdir","data")) gsUrl = self.gs.config.get("GeoServer", "url") gsUser = self.gs.config.get("GeoServer", "user") gsPassword = self.gs.config.get("GeoServer", "password") self.direct_gs = Catalog(gsUrl, gsUser, gsPassword) def tearDown(self): class Req: href=self.gs.config.get("GeoServer", "url") + "/styles/tmp_polygon" if self.direct_gs.get_style("tmp_polygon"): self.direct_gs.delete(Req()) ### LAYERS ### def test_getLayers(self): layers = self.gs.getLayers() print layers # TODO: Add test def test_getLayer(self): layers = self.gs.getLayers() layer = self.gs.getLayer(layers[0].name) print layer # TODO: Add test ### FEATURE STORE ### def test_createFeatrueStore(self): # TODO: make sure that source files exist # check / delete layer line_layer = self.direct_gs.get_layer("line_crs") if line_layer is not None: self.direct_gs.delete(line_layer) # check / delete feature type # # sorry - it is not possible to delete feature type with gsconfig.py # # example in test # https://github.com/dwins/gsconfig.py/blob/master/test/catalogtests.py#L424 # only shows how to disable it # # FOR NOW, PLEASE REMOVE THE FEATURE TYPE "line_crs0" MANUALLY # #line_feature = self.direct_gs.get_featuretype("line_crs0") # this function does not exist #if line_feature is not None: # self.direct_gs.delete(line_feature) # TODO: remove the feature type using the gs rest api itself # check / delete data store # line_store = self.direct_gs.get_store("line_crs") # if line_store is not None: # self.direct_gs.delete(line_store) self.assertEquals( True, None == self.direct_gs.get_layer("line_crs"), "The layer line_crs already exists. Please, remove it manually." ) self.assertEquals( True, None == self.direct_gs.get_store("line_crs"), "The store line_crs already exists. Please, remove it manually." ) self.gs.createFeatureStore(self.workdir+"/line_crs","TestWS","line_crs") # Another problem - gs.createFeatureStore DOES NOT return the resource URI # - hence WE DON'T know the name of the created layer! # TODO: use the gs rest api itself and find out the name of the created layer # - whether it is line_crs0, line_crs1 or what... self.assertEquals( False, None == self.direct_gs.get_layer("line_crs"), "The layer line_crs is not there. Was it created under another name?" ) ### STYLES ### def test_getStyle(self): """Test get style""" style = self.gs.getStyle("line") self.assertTrue(style.find("<sld:Name>line</sld:Name>")>0) def test_postStyle(self): """Test get files function""" global style self.gs.postStyle("tmp_polygon",style) # TODO: add test # FIXME: UploadError: SLD file styles/tmp_polygon.sld.sld already exists. def test_putStyle(self): """Test get files function""" global style self.gs.postStyle("tmp_polygon",style) self.gs.putStyle("tmp_polygon",style)
class UploaderTests(TestCase): """Basic checks to make sure pages load, etc. """ def create_datastore(self, connection, catalog): """Convenience method for creating a datastore. """ settings = connection.settings_dict ds_name = settings['NAME'] params = { 'database': ds_name, 'passwd': settings['PASSWORD'], 'namespace': 'http://www.geonode.org/', 'type': 'PostGIS', 'dbtype': 'postgis', 'host': settings['HOST'], 'user': settings['USER'], 'port': settings['PORT'], 'enabled': 'True' } store = catalog.create_datastore(ds_name, workspace=self.workspace) store.connection_parameters.update(params) try: catalog.save(store) except FailedRequestError: # assuming this is because it already exists pass return catalog.get_store(ds_name) def create_user(self, username, password, **kwargs): """Convenience method for creating users. """ user, created = User.objects.get_or_create(username=username, **kwargs) if created: user.set_password(password) user.save() return user def setUp(self): self.assertTrue( os.path.exists(_TEST_FILES_DIR), 'Test could not run due to missing test data at {0!r}' .format(_TEST_FILES_DIR) ) # These tests require geonode to be running on :80! self.postgis = db.connections['datastore'] self.postgis_settings = self.postgis.settings_dict self.admin_user = self.create_user('admin', 'admin', is_superuser=True) self.non_admin_user = self.create_user('non_admin', 'non_admin') self.catalog = Catalog( ogc_server_settings.internal_rest, *ogc_server_settings.credentials ) if self.catalog.get_workspace('geonode') is None: self.catalog.create_workspace('geonode', 'http://www.geonode.org/') self.workspace = 'geonode' self.datastore = self.create_datastore(self.postgis, self.catalog) def tearDown(self): """Clean up geoserver. """ self.catalog.delete(self.datastore, recurse=True) def import_file(self, path, configs=None): """Imports the file. """ if configs is None: configs = [] self.assertTrue(os.path.exists(path)) # run ogr2ogr ogr = OGRImport(path) layers = ogr.handle(configuration_options=configs) return layers def generic_import(self, filename, configs=None): if configs is None: configs = [{'index': 0}] path = test_file(filename) results = self.import_file(path, configs=configs) layer_results = [] for result in results: if result[1].get('raster'): layer_path = result[0] layer_name = os.path.splitext(os.path.basename(layer_path))[0] layer = Layer.objects.get(name=layer_name) self.assertTrue(layer_path.endswith('.tif')) self.assertTrue(os.path.exists(layer_path)) gdal_layer = gdal.OpenEx(layer_path) self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff') layer_results.append(layer) else: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') if not path.endswith('zip'): self.assertGreaterEqual( layer.attributes.count(), DataSource(path)[0].num_fields ) layer_results.append(layer) return layer_results[0] def generic_api_upload(self, filenames, configs=None): """Tests the import api. """ client = AdminClient() client.login_as_non_admin() # Don't accidentally iterate over given 'foo' as ['f', 'o', 'o']. self.assertNotIsInstance(filenames, str) # Upload Files outfiles = [] for filename in filenames: path = test_file(filename) with open(path) as stream: data = stream.read() upload = SimpleUploadedFile(filename, data) outfiles.append(upload) response = client.post( reverse('uploads-new-json'), {'file': outfiles, 'json': json.dumps(configs)}, follow=True) content = json.loads(response.content) self.assertEqual(response.status_code, 200) self.assertEqual(content['id'], 1) # Configure Uploaded Files upload_id = content['id'] upload_layers = UploadLayer.objects.filter(upload_id=upload_id) for upload_layer in upload_layers: for config in configs: if config['upload_file_name'] == upload_layer.name: payload = config['config'] url = '/importer-api/data-layers/{0}/configure/'.format(upload_layer.id) response = client.post( url, data=json.dumps(payload), content_type='application/json' ) self.assertTrue(response.status_code, 200) url = '/importer-api/data-layers/{0}/'.format(upload_layer.id) response = client.get(url, content_type='application/json') self.assertTrue(response.status_code, 200) return content def generic_raster_import(self, filename, configs=None): if configs is None: configs = [{'index': 0}] path = test_file(filename) results = self.import_file(path, configs=configs) layer_path = results[0][0] layer_name = os.path.splitext(os.path.basename(layer_path))[0] layer = Layer.objects.get(name=layer_name) self.assertTrue(layer_path.endswith('.tif')) self.assertTrue(os.path.exists(layer_path)) gdal_layer = gdal.OpenEx(layer_path) self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff') return layer def test_multi_upload(self): """Tests Uploading Multiple Files """ upload = self.generic_api_upload( filenames=[ 'boxes_with_year_field.zip', 'boxes_with_date.zip', 'point_with_date.geojson' ], configs=[ { 'upload_file_name': 'boxes_with_year_field.shp', 'config': [{'index': 0}] }, { 'upload_file_name': 'boxes_with_date.shp', 'config': [{'index': 0}] }, { 'upload_file_name': 'point_with_date.geojson', 'config': [{'index': 0}] } ] ) self.assertEqual(9, upload['count']) def test_upload_with_slds(self): """Tests Uploading sld """ upload = self.generic_api_upload( filenames=[ 'boxes_with_date.zip', 'boxes.sld', 'boxes1.sld' ], configs=[ { 'upload_file_name': 'boxes_with_date.shp', 'config': [ { 'index': 0, 'default_style': 'boxes.sld', 'styles': ['boxes.sld', 'boxes1.sld'] } ] } ] ) self.assertEqual(6, upload['count']) upload_id = upload['id'] upload_obj = UploadedData.objects.get(pk=upload_id) uplayers = UploadLayer.objects.filter(upload=upload_id) layer_id = uplayers[0].pk upfiles_count = UploadFile.objects.filter(upload=upload_id).count() self.assertEqual(6, upfiles_count) # Warning: this assumes that Layer pks equal UploadLayer pks layer = Layer.objects.get(pk=layer_id) gslayer = self.catalog.get_layer(layer.name) default_style = gslayer.default_style # TODO: can we use public API or omit this? self.catalog._cache.clear() self.assertEqual('boxes.sld', default_style.filename) def test_upload_with_metadata(self): """Tests Uploading metadata """ upload = self.generic_api_upload( filenames=[ 'boxes_with_date.zip', 'samplemetadata.xml', ], configs=[ { 'upload_file_name': 'boxes_with_date.shp', 'config': [ { 'index': 0, 'metadata': 'samplemetadata.xml' } ] } ] ) self.assertEqual(5, upload['count']) upload_id = upload['id'] upload_obj = UploadedData.objects.get(pk=upload_id) # TODO: why did we get upload_obj? uplayers = UploadLayer.objects.filter(upload=upload_id) layer_id = uplayers[0].pk upfiles_count = UploadFile.objects.filter(upload=upload_id).count() self.assertEqual(5, upfiles_count) layer = Layer.objects.get(pk=layer_id) self.assertEqual(layer.language, 'eng') self.assertEqual(layer.title, 'Old_Americas_LSIB_Polygons_Detailed_2013Mar') def test_raster(self): """Exercise raster import. """ layer = self.generic_raster_import( 'test_grid.tif', configs=[ { 'index': 0 } ] ) self.assertTrue(layer.name.startswith('test_grid')) def test_box_with_year_field(self): """Tests the import of test_box_with_year_field. """ layer = self.generic_import( 'boxes_with_year_field.shp', configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date(self): """Tests the import of test_boxes_with_date. """ layer = self.generic_import( 'boxes_with_date.shp', configs=[ { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_gpkg(self): """Tests the import of test_boxes_with_date.gpkg. """ layer = self.generic_import( 'boxes_with_date.gpkg', configs=[ { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_plus_raster_gpkg_by_index(self): """Tests the import of multilayer vector + raster geopackage using index """ layer = self.generic_import( 'boxes_plus_raster.gpkg', configs=[ { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }, {'index': 1}, {'index': 2}, {'index': 3}, {'index': 4}, {'index': 5}, {'index': 6}, {'index': 7}, ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_csv(self): """Tests a CSV with WKT polygon. """ layer = self.generic_import( 'boxes_with_date.csv', configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_csv_missing_features(self): """Test csv that has some rows without geoms and uses ISO-8859 (Latin 4) encoding. """ self.generic_import( 'missing-features.csv', configs=[ {'index': 0} ] ) def test_boxes_with_iso_date(self): """Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import( 'boxes_with_date_iso_date.shp', configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_duplicate_imports(self): """Import the same layer twice to ensure file names increment properly. """ path = test_file('boxes_with_date_iso_date.zip') ogr = OGRImport(path) layers1 = ogr.handle({'index': 0, 'name': 'test'}) layers2 = ogr.handle({'index': 0, 'name': 'test'}) self.assertEqual(layers1[0][0], 'test') self.assertEqual(layers2[0][0], 'test0') def test_launder(self): """Ensure the launder function works as expected. """ self.assertEqual(launder('tm_world_borders_simpl_0.3'), 'tm_world_borders_simpl_0_3') self.assertEqual(launder('Testing#'), 'testing_') self.assertEqual(launder(' '), '_') def test_boxes_with_date_iso_date_zip(self): """Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import( 'boxes_with_date_iso_date.zip', configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_dates_bc(self): """Tests the import of test_boxes_with_dates_bc. """ layer = self.generic_import( 'boxes_with_dates_bc.shp', configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_point_with_date(self): """Tests the import of point_with_date.geojson """ layer = self.generic_import( 'point_with_date.geojson', configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) # OGR will name geojson layers 'ogrgeojson' we rename to the path basename self.assertTrue(layer.name.startswith('point_with_date')) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_end_date(self): """Tests the import of test_boxes_with_end_date. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import( 'boxes_with_end_date.shp', configs=[ { 'index': 0, 'convert_to_date': ['date', 'enddate'], 'start_date': 'date', 'end_date': 'enddate', 'configureTime': True } ] ) date_attr = get_layer_attr(layer, 'date_as_date') end_date_attr = get_layer_attr(layer, 'enddate_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') self.assertEqual(end_date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute ) self.generic_time_check( layer, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute ) def test_us_states_kml(self): """Tests the import of us_states_kml. This layer has a date and an end date field that are typed correctly. """ # TODO: Support time in kmls. layer = self.generic_import( 'us_states.kml', configs=[ { 'index': 0 } ] ) self.assertEqual(layer.name.lower(), "us_states") def test_mojstrovka_gpx(self): """Tests the import of mojstrovka.gpx. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import( 'mojstrovka.gpx', configs=[ { 'index': 0, 'convert_to_date': ['time'], 'configureTime': True } ] ) date_attr = get_layer_attr(layer, 'time_as_date') self.assertEqual(date_attr.attribute_type, u'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute ) self.generic_time_check(layer, attribute=date_attr.attribute) def generic_time_check(self, layer, attribute=None, end_attribute=None): """Convenience method to run generic tests on time layers. """ # TODO: can we use public API or omit this? self.catalog._cache.clear() resource = self.catalog.get_resource( layer.name, store=layer.store, workspace=self.workspace ) time_info = resource.metadata['time'] self.assertEqual('LIST', time_info.presentation) self.assertEqual(True, time_info.enabled) self.assertEqual(attribute, time_info.attribute) self.assertEqual(end_attribute, time_info.end_attribute) def test_us_shootings_csv(self): """Tests the import of US_Shootings.csv. """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') path = test_file('US_Shootings.csv') layer = self.generic_import( path, configs=[ { 'index': 0, 'convert_to_date': ['Date'] } ] ) self.assertTrue(layer.name.startswith('us_shootings')) date_field = 'date' configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_field ) self.generic_time_check(layer, attribute=date_field) def test_sitins(self): """Tests the import of US_Civil_Rights_Sitins0.csv """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') layer = self.generic_import( 'US_Civil_Rights_Sitins0.csv', configs=[ { 'index': 0, 'convert_to_date': ['Date'] } ] ) self.assertEqual(layer.name.lower(), 'us_civil_rights_sitins0') def get_layer_names(self, path): """Gets layer names from a data source. """ data_source = DataSource(path) return [layer.name for layer in data_source] def test_gdal_import(self): path = test_file('point_with_date.geojson') self.generic_import( path, configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) def test_wfs(self): """Tests the import from a WFS Endpoint """ wfs = 'WFS:http://demo.boundlessgeo.com/geoserver/wfs' ogr = OGRImport(wfs) configs = [ {'layer_name': 'og:bugsites'}, {'layer_name': 'topp:states'} ] layers = ogr.handle(configuration_options=configs) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def test_arcgisjson(self): """Tests the import from a WFS Endpoint """ endpoint = 'http://sampleserver3.arcgisonline.com/ArcGIS/rest/services/Hydrography/Watershed173811/FeatureServer/0/query?where=objectid+%3D+objectid&outfields=*&f=json' ogr = OGRImport(endpoint) configs = [{'index':0}] layers = ogr.handle(configuration_options=configs) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def test_file_add_view(self): """Tests the file_add_view. """ client = AdminClient() # test login required for this view request = client.get(reverse('uploads-new')) self.assertEqual(request.status_code, 302) client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['request'].path, reverse('uploads-list')) self.assertEqual(len(response.context['object_list']), 1) upload = response.context['object_list'][0] self.assertEqual(upload.user.username, 'non_admin') self.assertEqual(upload.file_type, 'GeoJSON') self.assertTrue(upload.uploadlayer_set.all()) self.assertEqual(upload.state, 'UPLOADED') self.assertIsNotNone(upload.name) uploaded_file = upload.uploadfile_set.first() self.assertTrue(os.path.exists(uploaded_file.file.path)) with open(test_file('empty_file.geojson')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) self.assertEqual(response.status_code, 200) self.assertIn('file', response.context_data['form'].errors) def test_file_add_view_as_json(self): """Tests the file_add_view. """ client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post( reverse('uploads-new-json'), {'file': stream}, follow=True ) self.assertEqual(response.status_code, 200) self.assertIn('application/json', response.get('Content-Type', '')) content = json.loads(response.content) self.assertIn('state', content) self.assertIn('id', content) def test_describe_fields(self): """Tests the describe fields functionality. """ path = test_file('US_Shootings.csv') with GDALInspector(path) as inspector: layers = inspector.describe_fields() self.assertTrue(layers[0]['layer_name'], 'us_shootings') self.assertEqual([n['name'] for n in layers[0]['fields']], ['Date', 'Shooter', 'Killed', 'Wounded', 'Location', 'City', 'Longitude', 'Latitude']) self.assertEqual(layers[0]['feature_count'], 203) def test_gdal_file_type(self): """Tests the describe fields functionality. """ filenames = { 'US_Shootings.csv': 'CSV', 'point_with_date.geojson': 'GeoJSON', 'mojstrovka.gpx': 'GPX', 'us_states.kml': 'KML', 'boxes_with_year_field.shp': 'ESRI Shapefile', 'boxes_with_date_iso_date.zip': 'ESRI Shapefile' } from .models import NoDataSourceFound try: for filename, file_type in sorted(filenames.items()): path = test_file(filename) with GDALInspector(path) as inspector: self.assertEqual(inspector.file_type(), file_type) except NoDataSourceFound: logging.exception('No data source found in: {0}'.format(path)) raise def test_configure_view(self): """Tests the configuration view. """ path = test_file('point_with_date.geojson') new_user = User.objects.create(username='******') new_user_perms = ['change_resourcebase_permissions'] client = AdminClient() client.login_as_non_admin() with open(path) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) upload = response.context['object_list'][0] payload = [ { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True, 'permissions': { 'users': { 'test': new_user_perms, 'AnonymousUser': [ 'change_layer_data', 'download_resourcebase', 'view_resourcebase' ] } } } ] response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json' ) self.assertTrue(response.status_code, 200) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.srid, 'EPSG:4326') self.assertEqual(first_layer.store, self.datastore.name) self.assertEqual(first_layer.storeType, 'dataStore') self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertEqual( Layer.objects.all()[0].owner.username, self.non_admin_user.username ) perms = first_layer.get_all_level_info() user = User.objects.get(username=self.non_admin_user.username) # check user permissions expected_perms = [ u'publish_resourcebase', u'change_resourcebase_permissions', u'delete_resourcebase', u'change_resourcebase', u'change_resourcebase_metadata', u'download_resourcebase', u'view_resourcebase', u'change_layer_style', u'change_layer_data' ] for perm in expected_perms: self.assertIn(perm, perms['users'][user]) self.assertTrue(perms['users'][new_user]) self.assertIn( 'change_resourcebase_permissions', perms['users'][new_user] ) self.assertIn( 'change_layer_data', perms['users'][User.objects.get(username='******')] ) catalog_layer = self.catalog.get_layer(first_layer.name) self.assertIn('time', catalog_layer.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, first_layer) def test_configure_view_convert_date(self): """Tests the configure view with a dataset that needs to be converted to a date. """ client = AdminClient() client.login_as_non_admin() with open(test_file('US_Shootings.csv')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) upload = response.context['object_list'][0] payload = [ { 'index': 0, 'convert_to_date': ['Date'], 'start_date': 'Date', 'configureTime': True, 'editable': True } ] response = client.get( '/importer-api/data-layers/{0}/configure/'.format(upload.id) ) self.assertEqual(response.status_code, 405) response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id) ) self.assertEqual(response.status_code, 400) response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json' ) self.assertTrue(response.status_code, 200) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.srid, 'EPSG:4326') self.assertEqual(first_layer.store, self.datastore.name) self.assertEqual(first_layer.storeType, 'dataStore') self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertTrue(first_layer.attributes.filter(attribute='date'), 'xsd:dateTime') catalog_layer = self.catalog.get_layer(first_layer.name) self.assertIn('time', catalog_layer.resource.metadata) # ensure a user who does not own the upload cannot configure an import from it. client.logout() client.login_as_admin() response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id) ) self.assertEqual(response.status_code, 404) def test_list_api(self): client = AdminClient() response = client.get('/importer-api/data/') self.assertEqual(response.status_code, 401) client.login_as_non_admin() response = client.get('/importer-api/data/') self.assertEqual(response.status_code, 200) admin = User.objects.get(username=self.admin_user.username) non_admin = User.objects.get(username=self.non_admin_user.username) path = test_file('US_Shootings.csv') with open(path, 'rb') as stream: uploaded_file = SimpleUploadedFile('test_data', stream.read()) admin_upload = UploadedData.objects.create(state='Admin test', user=admin) admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file)) non_admin_upload = UploadedData.objects.create(state='Non admin test', user=non_admin) non_admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file)) client.login_as_admin() response = client.get('/importer-api/data/') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 2) response = client.get('/importer-api/data/?user__username=admin') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 1) def test_layer_list_api(self): client = AdminClient() response = client.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 401) client.login_as_non_admin() response = client.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 200) def test_delete_from_non_admin_api(self): """Ensure users can delete their data. """ client = AdminClient() client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) self.assertEqual(UploadedData.objects.all().count(), 1) upload_id = UploadedData.objects.first().id response = client.delete('/importer-api/data/{0}/'.format(upload_id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def test_delete_from_admin_api(self): """Ensure that administrators can delete data that isn't theirs. """ client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) self.assertEqual(UploadedData.objects.all().count(), 1) client.logout() client.login_as_admin() upload_id = UploadedData.objects.first().id response = client.delete('/importer-api/data/{0}/'.format(upload_id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def naming_an_import(self): """Tests providing a name in the configuration options. """ client = AdminClient() client.login_as_non_admin() name = 'point-with-a-date' with open(test_file('point_with_date.geojson')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) payload = { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'name': name, 'editable': True } response = client.post( '/importer-api/data-layers/1/configure/', data=json.dumps(payload), content_type='application/json' ) self.assertEqual(response.status_code, 200) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.title, name.replace('-', '_')) def test_api_import(self): """Tests the import api. """ client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) payload = { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True } self.assertIsInstance( UploadLayer.objects.first().configuration_options, dict ) response = client.get('/importer-api/data-layers/1/') self.assertEqual(response.status_code, 200) response = client.post( '/importer-api/data-layers/1/configure/', data=json.dumps([payload]), content_type='application/json' ) self.assertEqual(response.status_code, 200) self.assertIn('task', response.content) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.srid, 'EPSG:4326') self.assertEqual(first_layer.store, self.datastore.name) self.assertEqual(first_layer.storeType, 'dataStore') self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime') catalog_layer = self.catalog.get_layer(first_layer.name) self.assertIn('time', catalog_layer.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, first_layer) self.assertTrue(UploadLayer.objects.first().task_id) def test_valid_file_extensions(self): """Test the file extension validator. """ for extension in load_handler(OSGEO_IMPORTER, 'test.txt').valid_extensions: filename = 'test.{0}'.format(extension) upload = SimpleUploadedFile(filename, '') self.assertIsNone(validate_file_extension(upload)) with self.assertRaises(ValidationError): validate_file_extension(SimpleUploadedFile('test.txt', '')) def test_no_geom(self): """Test the file extension validator. """ with self.assertRaises(ValidationError): validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)')) # This should pass (geom type is unknown) validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)')) def test_numeric_overflow(self): """Regression test for numeric field overflows in shapefiles. # https://trac.osgeo.org/gdal/ticket/5241 """ self.generic_import( 'Walmart.zip', configs=[ { 'configureTime':False, 'convert_to_date': ['W1_OPENDAT'], 'editable': True, 'index':0, 'name': 'Walmart', 'start_date': 'W1_OPENDAT' } ] ) def test_multipolygon_shapefile(self): """Tests shapefile with multipart polygons. """ self.generic_import('PhoenixFirstDues.zip', configs=[{'index': 0}]) def test_istanbul(self): """Tests shapefile with multipart polygons and non-WGS84 SR. """ result = self.generic_import( 'Istanbul.zip', configs=[ {'index': 0} ] ) feature_type = self.catalog.get_resource(result.name) self.assertEqual(feature_type.projection, 'EPSG:32635') def test_gwc_handler(self): """Tests the GeoWebCache handler """ layer = self.generic_import( 'boxes_with_date.shp', configs=[ { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True } ] ) gwc = GeoWebCacheHandler(None) gs_layer = self.catalog.get_layer(layer.name) self.assertTrue(gwc.time_enabled(gs_layer)) gs_layer.fetch() payload = self.catalog.http.request(gwc.gwc_url(gs_layer)) self.assertIn('regexParameterFilter', payload[1]) self.assertEqual(int(payload[0]['status']), 200) # Don't configure time, ensure everything still works layer = self.generic_import( 'boxes_with_date_iso_date.shp', configs=[ { 'index': 0 } ] ) gs_layer = self.catalog.get_layer(layer.name) self.catalog._cache.clear() gs_layer.fetch() payload = self.catalog.http.request(gwc.gwc_url(gs_layer)) self.assertNotIn('regexParameterFilter', payload[1]) self.assertEqual(int(payload[0]['status']), 200) def test_utf8(self): """Tests utf8 characters in attributes """ path = test_file('china_provinces.shp') layer = self.generic_import(path) ogr = OGRImport(path) datastore, _ = ogr.open_target_datastore(ogr.target_store) sql = ( "select NAME_CH from {0} where NAME_PY = 'An Zhou'" .format(layer.name) ) result = datastore.ExecuteSQL(sql) feature = result.GetFeature(0) self.assertEqual(feature.GetField('name_ch'), '安州') def test_non_converted_date(self): """Test converting a field as date. """ results = self.generic_import( 'TM_WORLD_BORDERS_2005.zip', configs=[ { 'index': 0, 'start_date': 'Year', 'configureTime': True } ] ) layer = self.catalog.get_layer(results.typename) self.assertIn('time', layer.resource.metadata) self.assertEqual('year', layer.resource.metadata['time'].attribute) def test_fid_field(self): """ Regression test for preserving an FID field when target layer supports it but source does not. """ self.generic_import( 'noaa_paleoclimate.zip', configs=[ { 'index': 0 } ] ) def test_csv_with_wkb_geometry(self): """Exercise import of CSV files with multiple geometries. """ filenames = [ 'police_csv.csv', 'police_csv_nOGC.csv', 'police_csv_noLatLon.csv', 'police_csv_WKR.csv', 'police_csv_nFID.csv', 'police_csv_nOGFID.csv', 'police_csv_noWKB.csv' ] for filename in filenames: self.generic_import( filename, { 'configureTime': True, 'convert_to_date': ['date_time'], 'editable': True, 'index': 0, 'name': filename.lower(), 'permissions': { 'users':{ 'AnonymousUser': [ 'change_layer_data', 'download_resourcebase', 'view_resourcebase' ] } }, 'start_date':'date_time', } )
class ModifyingTests(unittest.TestCase): def setUp(self): self.cat = Catalog("http://localhost:8080/geoserver/rest") def testFeatureTypeSave(self): # test saving round trip rs = self.cat.get_resource("bugsites") old_abstract = rs.abstract new_abstract = "Not the original abstract" enabled = rs.enabled # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(new_abstract, rs.abstract) self.assertEqual(enabled, rs.enabled) # Change keywords on server rs.keywords = ["bugsites", "gsconfig"] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(["bugsites", "gsconfig"], rs.keywords) self.assertEqual(enabled, rs.enabled) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual([ ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata") ], rs.metadata_links) self.assertEqual(enabled, rs.enabled) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(old_abstract, rs.abstract) def testDataStoreCreate(self): ds = self.cat.create_datastore("vector_gsconfig") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) def testPublishFeatureType(self): # Use the other test and store creation to load vector data into a database # @todo maybe load directly to database? try: self.testDataStoreCreateAndThenAlsoImportData() except FailedRequestError: pass try: lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. self.cat.delete(lyr) self.cat.delete(lyr.resource) ds = self.cat.get_store("gsconfig_import_test") # make sure it's gone self.assert_(self.cat.get_layer('import') is None) self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326") # and now it's not self.assert_(self.cat.get_layer('import') is not None) finally: # tear stuff down to allow the other test to pass if we run first ds = self.cat.get_store("gsconfig_import_test") lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. self.cat.delete(lyr) self.cat.delete(lyr.resource) self.cat.delete(ds) def testDataStoreModify(self): ds = self.cat.get_store("sf") self.assertFalse("foo" in ds.connection_parameters) ds.connection_parameters = ds.connection_parameters ds.connection_parameters["foo"] = "bar" orig_ws = ds.workspace.name self.cat.save(ds) ds = self.cat.get_store("sf") self.assertTrue("foo" in ds.connection_parameters) self.assertEqual("bar", ds.connection_parameters["foo"]) self.assertEqual(orig_ws, ds.workspace.name) @drop_table('import') def testDataStoreCreateAndThenAlsoImportData(self): ds = self.cat.create_datastore("gsconfig_import_test") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) ds = self.cat.get_store("gsconfig_import_test") self.cat.add_data_to_store( ds, "import", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) def testCoverageStoreCreate(self): ds = self.cat.create_coveragestore2("coverage_gsconfig") ds.data_url = "file:data/mytiff.tiff" self.cat.save(ds) def testCoverageStoreModify(self): cs = self.cat.get_store("sfdem") self.assertEqual("GeoTIFF", cs.type) cs.type = "WorldImage" self.cat.save(cs) cs = self.cat.get_store("sfdem") self.assertEqual("WorldImage", cs.type) # not sure about order of test runs here, but it might cause problems # for other tests if this layer is misconfigured cs.type = "GeoTIFF" self.cat.save(cs) def testCoverageSave(self): # test saving round trip rs = self.cat.get_resource("Arc_Sample") old_abstract = rs.abstract new_abstract = "Not the original abstract" # # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(new_abstract, rs.abstract) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(old_abstract, rs.abstract) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual([ ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata") ], rs.metadata_links) self.assertEqual(enabled, rs.enabled) srs_before = set(['EPSG:4326']) srs_after = set(['EPSG:4326', 'EPSG:3785']) formats = set( ['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF']) formats_after = set(["PNG", "GIF", "TIFF"]) # set and save request_srs_list self.assertEquals(set(rs.request_srs_list), srs_before, str(rs.request_srs_list)) rs.request_srs_list = rs.request_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.request_srs_list), srs_after, str(rs.request_srs_list)) # set and save response_srs_list self.assertEquals(set(rs.response_srs_list), srs_before, str(rs.response_srs_list)) rs.response_srs_list = rs.response_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.response_srs_list), srs_after, str(rs.response_srs_list)) # set and save supported_formats self.assertEquals(set(rs.supported_formats), formats, str(rs.supported_formats)) rs.supported_formats = ["PNG", "GIF", "TIFF"] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.supported_formats), formats_after, str(rs.supported_formats)) def testWmsStoreCreate(self): ws = self.cat.create_wmsstore("wmsstore_gsconfig") ws.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities" ws.type = "WMS" self.cat.save(ws) def testWmsLayer(self): self.cat.create_workspace("wmstest", "http://example.com/wmstest") wmstest = self.cat.get_workspace("wmstest") wmsstore = self.cat.create_wmsstore("wmsstore", wmstest) wmsstore.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities" wmsstore.type = "WMS" self.cat.save(wmsstore) wmsstore = self.cat.get_store("wmsstore") self.assertEqual(1, len(self.cat.get_stores(wmstest))) available_layers = wmsstore.get_resources(available=True) for layer in available_layers: # sanitize the layer name - validation will fail on newer geoservers name = layer.replace(':', '_') new_layer = self.cat.create_wmslayer(wmstest, wmsstore, name, nativeName=layer) added_layers = wmsstore.get_resources() self.assertEqual(len(available_layers), len(added_layers)) changed_layer = added_layers[0] self.assertEqual(True, changed_layer.advertised) self.assertEqual(True, changed_layer.enabled) changed_layer.advertised = False changed_layer.enabled = False self.cat.save(changed_layer) self.cat._cache.clear() changed_layer = wmsstore.get_resources()[0] changed_layer.fetch() self.assertEqual(False, changed_layer.advertised) self.assertEqual(False, changed_layer.enabled) def testFeatureTypeCreate(self): shapefile_plus_sidecars = shapefile_and_friends("test/data/states") expected = { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' } self.assertEqual(len(expected), len(shapefile_plus_sidecars)) for k, v in expected.iteritems(): self.assertEqual(v, shapefile_plus_sidecars[k]) sf = self.cat.get_workspace("sf") self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) self.assert_( self.cat.get_resource("states_test", workspace=sf) is not None) self.assertRaises( ConflictingDataError, lambda: self.cat.create_featurestore( "states_test", shapefile_plus_sidecars, sf)) self.assertRaises( UploadError, lambda: self.cat.create_coveragestore( "states_raster_test", shapefile_plus_sidecars, sf)) bogus_shp = { 'shp': 'test/data/Pk50095.tif', 'shx': 'test/data/Pk50095.tif', 'dbf': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } self.assertRaises( UploadError, lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf)) lyr = self.cat.get_layer("states_test") self.cat.delete(lyr) self.assert_(self.cat.get_layer("states_test") is None) def testCoverageCreate(self): tiffdata = { 'tiff': 'test/data/Pk50095.tif', 'tfw': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } sf = self.cat.get_workspace("sf") # TODO: Uploading WorldImage file no longer works??? # ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf) # self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None) # self.assertRaises( # ConflictingDataError, # lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf) # ) self.assertRaises( UploadError, lambda: self.cat.create_featurestore( "Pk50095_vector", tiffdata, sf)) bogus_tiff = { 'tiff': 'test/data/states.shp', 'tfw': 'test/data/states.shx', 'prj': 'test/data/states.prj' } self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster", bogus_tiff)) def testLayerSave(self): # test saving round trip lyr = self.cat.get_layer("states") old_attribution = lyr.attribution new_attribution = "Not the original attribution" # change attribution on server lyr.attribution = new_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(new_attribution, lyr.attribution) # Restore attribution lyr.attribution = old_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(old_attribution, lyr.attribution) self.assertEqual(lyr.default_style.name, "population") old_default_style = lyr.default_style lyr.default_style = (s for s in lyr.styles if s.name == "pophatch").next() lyr.styles = [old_default_style] self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(lyr.default_style.name, "pophatch") self.assertEqual([s.name for s in lyr.styles], ["population"]) def testStyles(self): # upload new style, verify existence self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Fred", fred.sld_title) # replace style, verify changes self.cat.create_style("fred", open("test/ted.sld").read(), overwrite=True) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Ted", fred.sld_title) # delete style, verify non-existence self.cat.delete(fred, purge=True) self.assert_(self.cat.get_style("fred") is None) # attempt creating new style self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assertEqual("Fred", fred.sld_title) # verify it can be found via URL and check the name f = self.cat.get_style_by_url(fred.href) self.assert_(f is not None) self.assertEqual(f.name, fred.name) def testWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") jed = self.cat.get_style("jed", workspace="blarny") self.assert_(jed is None) jed = self.cat.get_style("jed", workspace="topp") self.assert_(jed is not None) self.assertEqual("Fred", jed.sld_title) jed = self.cat.get_style("topp:jed") self.assert_(jed is not None) self.assertEqual("Fred", jed.sld_title) # replace style, verify changes self.cat.create_style("jed", open("test/ted.sld").read(), overwrite=True, workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assert_(jed is not None) self.assertEqual("Ted", jed.sld_title) # delete style, verify non-existence self.cat.delete(jed, purge=True) self.assert_(self.cat.get_style("jed", workspace="topp") is None) # attempt creating new style self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assertEqual("Fred", jed.sld_title) # verify it can be found via URL and check the full name f = self.cat.get_style_by_url(jed.href) self.assert_(f is not None) self.assertEqual(f.fqn, jed.fqn) def testLayerWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("ned", open("test/fred.sld").read(), overwrite=True, workspace="topp") self.cat.create_style("zed", open("test/ted.sld").read(), overwrite=True, workspace="topp") ned = self.cat.get_style("ned", workspace="topp") zed = self.cat.get_style("zed", workspace="topp") self.assert_(ned is not None) self.assert_(zed is not None) lyr = self.cat.get_layer("states") lyr.default_style = ned lyr.styles = [zed] self.cat.save(lyr) self.assertEqual("topp:ned", lyr.default_style) self.assertEqual([zed], lyr.styles) lyr.refresh() self.assertEqual("topp:ned", lyr.default_style.fqn) self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles]) def testWorkspaceCreate(self): ws = self.cat.get_workspace("acme") self.assertEqual(None, ws) self.cat.create_workspace("acme", "http://example.com/acme") ws = self.cat.get_workspace("acme") self.assertEqual("acme", ws.name) def testWorkspaceDelete(self): self.cat.create_workspace("foo", "http://example.com/foo") ws = self.cat.get_workspace("foo") self.cat.delete(ws) ws = self.cat.get_workspace("foo") self.assert_(ws is None) def testWorkspaceDefault(self): # save orig orig = self.cat.get_default_workspace() neu = self.cat.create_workspace("neu", "http://example.com/neu") try: # make sure setting it works self.cat.set_default_workspace("neu") ws = self.cat.get_default_workspace() self.assertEqual('neu', ws.name) finally: # cleanup and reset to the way things were self.cat.delete(neu) self.cat.set_default_workspace(orig.name) ws = self.cat.get_default_workspace() self.assertEqual(orig.name, ws.name) def testFeatureTypeDelete(self): pass def testCoverageDelete(self): pass def testDataStoreDelete(self): states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) states.enabled = False self.assert_(states.enabled == False) self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == False) states.enabled = True self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) def testLayerGroupSave(self): tas = self.cat.get_layergroup("tasmania") self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities' ], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) tas.layers = tas.layers[:-1] tas.styles = tas.styles[:-1] self.cat.save(tas) # this verifies the local state self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads' ], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) # force a refresh to check the remote state tas.refresh() self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads' ], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) def testTimeDimension(self): sf = self.cat.get_workspace("sf") files = shapefile_and_friends( os.path.join(gisdata.GOOD_DATA, "time", "boxes_with_end_date")) self.cat.create_featurestore("boxes_with_end_date", files, sf) get_resource = lambda: self.cat._cache.clear() or self.cat.get_layer( 'boxes_with_end_date').resource # configure time as LIST resource = get_resource() timeInfo = DimensionInfo("time", "true", "LIST", None, "ISO8601", None, attribute="date") resource.metadata = {'time': timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual("LIST", timeInfo.presentation) self.assertEqual(True, timeInfo.enabled) self.assertEqual("date", timeInfo.attribute) self.assertEqual("ISO8601", timeInfo.units) # disable time dimension timeInfo = resource.metadata['time'] timeInfo.enabled = False # since this is an xml property, it won't get written unless we modify it resource.metadata = {'time': timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual(False, timeInfo.enabled) # configure with interval, end_attribute and enable again timeInfo.enabled = True timeInfo.presentation = 'DISCRETE_INTERVAL' timeInfo.resolution = '3 days' timeInfo.end_attribute = 'enddate' resource.metadata = {'time': timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual(True, timeInfo.enabled) self.assertEqual('DISCRETE_INTERVAL', timeInfo.presentation) self.assertEqual('3 days', timeInfo.resolution_str()) self.assertEqual('enddate', timeInfo.end_attribute) def testImageMosaic(self): # testing the mosaic creation name = 'cea_mosaic' data = open('test/data/mosaic/cea.zip', 'rb') self.cat.create_imagemosaic(name, data) # get the layer resource back self.cat._cache.clear() resource = self.cat.get_layer(name).resource self.assert_(resource is not None) # delete granule from mosaic coverage = name store = name granule_id = name + '.1' self.cat.mosaic_delete_granule(coverage, store, granule_id)
class ModifyingTests(unittest.TestCase): def setUp(self): self.cat = Catalog("http://localhost:8080/geoserver/rest") def testFeatureTypeSave(self): # test saving round trip rs = self.cat.get_resource("bugsites") old_abstract = rs.abstract new_abstract = "Not the original abstract" enabled = rs.enabled # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(new_abstract, rs.abstract) self.assertEqual(enabled, rs.enabled) # Change keywords on server rs.keywords = ["bugsites", "gsconfig"] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(["bugsites", "gsconfig"], rs.keywords) self.assertEqual(enabled, rs.enabled) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual( [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")], rs.metadata_links) self.assertEqual(enabled, rs.enabled) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(old_abstract, rs.abstract) def testDataStoreCreate(self): ds = self.cat.create_datastore("vector_gsconfig") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) def testPublishFeatureType(self): # Use the other test and store creation to load vector data into a database # @todo maybe load directly to database? try: self.testDataStoreCreateAndThenAlsoImportData() except FailedRequestError: pass try: lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. self.cat.delete(lyr) self.cat.delete(lyr.resource) ds = self.cat.get_store("gsconfig_import_test") # make sure it's gone self.assert_(self.cat.get_layer('import') is None) self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326") # and now it's not self.assert_(self.cat.get_layer('import') is not None) finally: # tear stuff down to allow the other test to pass if we run first ds = self.cat.get_store("gsconfig_import_test") lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. self.cat.delete(lyr) self.cat.delete(lyr.resource) self.cat.delete(ds) def testDataStoreModify(self): ds = self.cat.get_store("sf") self.assertFalse("foo" in ds.connection_parameters) ds.connection_parameters = ds.connection_parameters ds.connection_parameters["foo"] = "bar" orig_ws = ds.workspace.name self.cat.save(ds) ds = self.cat.get_store("sf") self.assertTrue("foo" in ds.connection_parameters) self.assertEqual("bar", ds.connection_parameters["foo"]) self.assertEqual(orig_ws, ds.workspace.name) @drop_table('import') def testDataStoreCreateAndThenAlsoImportData(self): ds = self.cat.create_datastore("gsconfig_import_test") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) ds = self.cat.get_store("gsconfig_import_test") self.cat.add_data_to_store(ds, "import", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) def testCoverageStoreCreate(self): ds = self.cat.create_coveragestore2("coverage_gsconfig") ds.data_url = "file:data/mytiff.tiff" self.cat.save(ds) def testCoverageStoreModify(self): cs = self.cat.get_store("sfdem") self.assertEqual("GeoTIFF", cs.type) cs.type = "WorldImage" self.cat.save(cs) cs = self.cat.get_store("sfdem") self.assertEqual("WorldImage", cs.type) # not sure about order of test runs here, but it might cause problems # for other tests if this layer is misconfigured cs.type = "GeoTIFF" self.cat.save(cs) def testCoverageSave(self): # test saving round trip rs = self.cat.get_resource("Arc_Sample") old_abstract = rs.abstract new_abstract = "Not the original abstract" # # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(new_abstract, rs.abstract) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(old_abstract, rs.abstract) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual( [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")], rs.metadata_links) self.assertEqual(enabled, rs.enabled) srs_before = set(['EPSG:4326']) srs_after = set(['EPSG:4326', 'EPSG:3785']) formats = set(['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF']) formats_after = set(["PNG", "GIF", "TIFF"]) # set and save request_srs_list self.assertEquals(set(rs.request_srs_list), srs_before, str(rs.request_srs_list)) rs.request_srs_list = rs.request_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.request_srs_list), srs_after, str(rs.request_srs_list)) # set and save response_srs_list self.assertEquals(set(rs.response_srs_list), srs_before, str(rs.response_srs_list)) rs.response_srs_list = rs.response_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.response_srs_list), srs_after, str(rs.response_srs_list)) # set and save supported_formats self.assertEquals(set(rs.supported_formats), formats, str(rs.supported_formats)) rs.supported_formats = ["PNG", "GIF", "TIFF"] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.supported_formats), formats_after, str(rs.supported_formats)) def testWmsStoreCreate(self): ws = self.cat.create_wmsstore("wmsstore_gsconfig") ws.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities" ws.type = "WMS" self.cat.save(ws) def testWmsLayer(self): self.cat.create_workspace("wmstest", "http://example.com/wmstest") wmstest = self.cat.get_workspace("wmstest") wmsstore = self.cat.create_wmsstore("wmsstore", wmstest) wmsstore.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities" wmsstore.type = "WMS" self.cat.save(wmsstore) wmsstore = self.cat.get_store("wmsstore") self.assertEqual(1, len(self.cat.get_stores(wmstest))) available_layers = wmsstore.get_resources(available=True) for layer in available_layers: new_layer = self.cat.create_wmslayer(wmstest, wmsstore, layer) added_layers = wmsstore.get_resources() self.assertEqual(len(available_layers), len(added_layers)) def testFeatureTypeCreate(self): shapefile_plus_sidecars = shapefile_and_friends("test/data/states") expected = { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' } self.assertEqual(len(expected), len(shapefile_plus_sidecars)) for k, v in expected.iteritems(): self.assertEqual(v, shapefile_plus_sidecars[k]) sf = self.cat.get_workspace("sf") self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) self.assert_(self.cat.get_resource("states_test", workspace=sf) is not None) self.assertRaises( ConflictingDataError, lambda: self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) ) self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster_test", shapefile_plus_sidecars, sf) ) bogus_shp = { 'shp': 'test/data/Pk50095.tif', 'shx': 'test/data/Pk50095.tif', 'dbf': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } self.assertRaises( UploadError, lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf) ) lyr = self.cat.get_layer("states_test") self.cat.delete(lyr) self.assert_(self.cat.get_layer("states_test") is None) def testCoverageCreate(self): tiffdata = { 'tiff': 'test/data/Pk50095.tif', 'tfw': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } sf = self.cat.get_workspace("sf") # TODO: Uploading WorldImage file no longer works??? # ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf) # self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None) # self.assertRaises( # ConflictingDataError, # lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf) # ) self.assertRaises( UploadError, lambda: self.cat.create_featurestore("Pk50095_vector", tiffdata, sf) ) bogus_tiff = { 'tiff': 'test/data/states.shp', 'tfw': 'test/data/states.shx', 'prj': 'test/data/states.prj' } self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster", bogus_tiff) ) def testLayerSave(self): # test saving round trip lyr = self.cat.get_layer("states") old_attribution = lyr.attribution new_attribution = "Not the original attribution" # change attribution on server lyr.attribution = new_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(new_attribution, lyr.attribution) # Restore attribution lyr.attribution = old_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(old_attribution, lyr.attribution) self.assertEqual(lyr.default_style.name, "population") old_default_style = lyr.default_style lyr.default_style = (s for s in lyr.styles if s.name == "pophatch").next() lyr.styles = [old_default_style] self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(lyr.default_style.name, "pophatch") self.assertEqual([s.name for s in lyr.styles], ["population"]) def testStyles(self): # upload new style, verify existence self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Fred", fred.sld_title) # replace style, verify changes self.cat.create_style("fred", open("test/ted.sld").read(), overwrite=True) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Ted", fred.sld_title) # delete style, verify non-existence self.cat.delete(fred, purge=True) self.assert_(self.cat.get_style("fred") is None) # attempt creating new style self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assertEqual("Fred", fred.sld_title) # verify it can be found via URL and check the name f = self.cat.get_style_by_url(fred.href) self.assert_(f is not None) self.assertEqual(f.name, fred.name) def testWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") jed = self.cat.get_style("jed", workspace="blarny") self.assert_(jed is None) jed = self.cat.get_style("jed", workspace="topp") self.assert_(jed is not None) self.assertEqual("Fred", jed.sld_title) jed = self.cat.get_style("topp:jed") self.assert_(jed is not None) self.assertEqual("Fred", jed.sld_title) # replace style, verify changes self.cat.create_style("jed", open("test/ted.sld").read(), overwrite=True, workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assert_(jed is not None) self.assertEqual("Ted", jed.sld_title) # delete style, verify non-existence self.cat.delete(jed, purge=True) self.assert_(self.cat.get_style("jed", workspace="topp") is None) # attempt creating new style self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assertEqual("Fred", jed.sld_title) # verify it can be found via URL and check the full name f = self.cat.get_style_by_url(jed.href) self.assert_(f is not None) self.assertEqual(f.fqn, jed.fqn) def testLayerWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("ned", open("test/fred.sld").read(), overwrite=True, workspace="topp") self.cat.create_style("zed", open("test/ted.sld").read(), overwrite=True, workspace="topp") ned = self.cat.get_style("ned", workspace="topp") zed = self.cat.get_style("zed", workspace="topp") self.assert_(ned is not None) self.assert_(zed is not None) lyr = self.cat.get_layer("states") lyr.default_style = ned lyr.styles = [zed] self.cat.save(lyr) self.assertEqual("topp:ned", lyr.default_style) self.assertEqual([zed], lyr.styles) lyr.refresh() self.assertEqual("topp:ned", lyr.default_style.fqn) self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles]) def testWorkspaceCreate(self): ws = self.cat.get_workspace("acme") self.assertEqual(None, ws) self.cat.create_workspace("acme", "http://example.com/acme") ws = self.cat.get_workspace("acme") self.assertEqual("acme", ws.name) def testWorkspaceDelete(self): self.cat.create_workspace("foo", "http://example.com/foo") ws = self.cat.get_workspace("foo") self.cat.delete(ws) ws = self.cat.get_workspace("foo") self.assert_(ws is None) def testWorkspaceDefault(self): # save orig orig = self.cat.get_default_workspace() neu = self.cat.create_workspace("neu", "http://example.com/neu") try: # make sure setting it works self.cat.set_default_workspace("neu") ws = self.cat.get_default_workspace() self.assertEqual('neu', ws.name) finally: # cleanup and reset to the way things were self.cat.delete(neu) self.cat.set_default_workspace(orig.name) ws = self.cat.get_default_workspace() self.assertEqual(orig.name, ws.name) def testFeatureTypeDelete(self): pass def testCoverageDelete(self): pass def testDataStoreDelete(self): states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) states.enabled = False self.assert_(states.enabled == False) self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == False) states.enabled = True self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) def testLayerGroupSave(self): tas = self.cat.get_layergroup("tasmania") self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) tas.layers = tas.layers[:-1] tas.styles = tas.styles[:-1] self.cat.save(tas) # this verifies the local state self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) # force a refresh to check the remote state tas.refresh() self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles)
class UploaderTests(TestCase): """Basic checks to make sure pages load, etc. """ def create_datastore(self, connection, catalog): """Convenience method for creating a datastore. """ settings = connection.settings_dict ds_name = settings['NAME'] params = { 'database': ds_name, 'passwd': settings['PASSWORD'], 'namespace': 'http://www.geonode.org/', 'type': 'PostGIS', 'dbtype': 'postgis', 'host': settings['HOST'], 'user': settings['USER'], 'port': settings['PORT'], 'enabled': 'True' } store = catalog.create_datastore(ds_name, workspace=self.workspace) store.connection_parameters.update(params) try: catalog.save(store) except FailedRequestError: # assuming this is because it already exists pass return catalog.get_store(ds_name) def create_user(self, username, password, **kwargs): """Convenience method for creating users. """ user, created = User.objects.get_or_create(username=username, **kwargs) if created: user.set_password(password) user.save() return user def setUp(self): self.assertTrue( os.path.exists(_TEST_FILES_DIR), 'Test could not run due to missing test data at {0!r}'.format( _TEST_FILES_DIR)) # These tests require geonode to be running on :80! self.postgis = db.connections['datastore'] self.postgis_settings = self.postgis.settings_dict self.admin_user = self.create_user('admin', 'admin', is_superuser=True) self.non_admin_user = self.create_user('non_admin', 'non_admin') self.catalog = Catalog(ogc_server_settings.internal_rest, *ogc_server_settings.credentials) if self.catalog.get_workspace('geonode') is None: self.catalog.create_workspace('geonode', 'http://www.geonode.org/') self.workspace = 'geonode' self.datastore = self.create_datastore(self.postgis, self.catalog) def tearDown(self): """Clean up geoserver. """ self.catalog.delete(self.datastore, recurse=True) def import_file(self, path, configs=None): """Imports the file. """ if configs is None: configs = [] self.assertTrue(os.path.exists(path)) # run ogr2ogr ogr = OGRImport(path) layers = ogr.handle(configuration_options=configs) return layers def generic_import(self, filename, configs=None): if configs is None: configs = [{'index': 0}] path = test_file(filename) results = self.import_file(path, configs=configs) layer_results = [] for result in results: if result[1].get('raster'): layer_path = result[0] layer_name = os.path.splitext(os.path.basename(layer_path))[0] layer = Layer.objects.get(name=layer_name) self.assertTrue(layer_path.endswith('.tif')) self.assertTrue(os.path.exists(layer_path)) gdal_layer = gdal.OpenEx(layer_path) self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff') layer_results.append(layer) else: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') if not path.endswith('zip'): self.assertGreaterEqual(layer.attributes.count(), DataSource(path)[0].num_fields) layer_results.append(layer) return layer_results[0] def generic_api_upload(self, filenames, configs=None): """Tests the import api. """ client = AdminClient() client.login_as_non_admin() # Don't accidentally iterate over given 'foo' as ['f', 'o', 'o']. self.assertNotIsInstance(filenames, str) # Upload Files outfiles = [] for filename in filenames: path = test_file(filename) with open(path) as stream: data = stream.read() upload = SimpleUploadedFile(filename, data) outfiles.append(upload) response = client.post(reverse('uploads-new-json'), { 'file': outfiles, 'json': json.dumps(configs) }, follow=True) content = json.loads(response.content) self.assertEqual(response.status_code, 200) self.assertEqual(content['id'], 1) # Configure Uploaded Files upload_id = content['id'] upload_layers = UploadLayer.objects.filter(upload_id=upload_id) for upload_layer in upload_layers: for config in configs: if config['upload_file_name'] == os.path.basename( upload_layer.name): payload = config['config'] url = '/importer-api/data-layers/{0}/configure/'.format( upload_layer.id) response = client.post(url, data=json.dumps(payload), content_type='application/json') self.assertEqual(response.status_code, 200) url = '/importer-api/data-layers/{0}/'.format( upload_layer.id) response = client.get(url, content_type='application/json') self.assertEqual(response.status_code, 200) return content def generic_raster_import(self, filename, configs=None): if configs is None: configs = [{'index': 0}] path = test_file(filename) results = self.import_file(path, configs=configs) layer_path = results[0][0] layer_name = os.path.splitext(os.path.basename(layer_path))[0] layer = Layer.objects.get(name=layer_name) self.assertTrue(layer_path.endswith('.tif')) self.assertTrue(os.path.exists(layer_path)) gdal_layer = gdal.OpenEx(layer_path) self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff') return layer def test_multi_upload(self): """Tests Uploading Multiple Files """ # Number of layers in each file upload_layer_counts = [1, 1, 1] upload = self.generic_api_upload(filenames=[ 'boxes_with_year_field.zip', 'boxes_with_date.zip', 'point_with_date.geojson' ], configs=[{ 'upload_file_name': 'boxes_with_year_field.shp', 'config': [{ 'index': 0 }] }, { 'upload_file_name': 'boxes_with_date.shp', 'config': [{ 'index': 0 }] }, { 'upload_file_name': 'point_with_date.geojson', 'config': [{ 'index': 0 }] }]) self.assertEqual(Layer.objects.count(), sum(upload_layer_counts)) self.assertEqual(9, upload['count']) def test_upload_with_slds(self): """Tests Uploading sld """ upload = self.generic_api_upload( filenames=['boxes_with_date.zip', 'boxes.sld', 'boxes1.sld'], configs=[{ 'upload_file_name': 'boxes_with_date.shp', 'config': [{ 'index': 0, 'default_style': 'boxes.sld', 'styles': ['boxes.sld', 'boxes1.sld'] }] }]) self.assertEqual(6, upload['count']) upload_id = upload['id'] uplayers = UploadLayer.objects.filter(upload=upload_id) layer_id = uplayers[0].pk upfiles_count = UploadFile.objects.filter(upload=upload_id).count() self.assertEqual(6, upfiles_count) # Warning: this assumes that Layer pks equal UploadLayer pks layer = Layer.objects.get(pk=layer_id) gslayer = self.catalog.get_layer(layer.name) default_style = gslayer.default_style # TODO: can we use public API or omit this? self.catalog._cache.clear() self.assertEqual('boxes.sld', default_style.filename) def test_upload_with_metadata(self): """Tests Uploading metadata """ upload = self.generic_api_upload(filenames=[ 'boxes_with_date.zip', 'samplemetadata.xml', ], configs=[{ 'upload_file_name': 'boxes_with_date.shp', 'config': [{ 'index': 0, 'metadata': 'samplemetadata.xml' }] }]) self.assertEqual(5, upload['count']) upload_id = upload['id'] uplayers = UploadLayer.objects.filter(upload=upload_id) layer_id = uplayers[0].pk upfiles_count = UploadFile.objects.filter(upload=upload_id).count() self.assertEqual(5, upfiles_count) layer = Layer.objects.get(pk=layer_id) self.assertEqual(layer.language, 'eng') self.assertEqual(layer.title, 'Old_Americas_LSIB_Polygons_Detailed_2013Mar') def test_geotiff_raster(self): """Exercise GeoTIFF raster import. """ layer = self.generic_raster_import('test_grid.tif', configs=[{ 'index': 0 }]) self.assertTrue(layer.name.startswith('test_grid')) def test_nitf_raster(self): """Tests NITF raster import """ layer = self.generic_raster_import('test_nitf.nitf') self.assertTrue(layer.name.startswith('test_nitf')) def test_box_with_year_field(self): """Tests the import of test_box_with_year_field. """ layer = self.generic_import('boxes_with_year_field.shp', configs=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date(self): """Tests the import of test_boxes_with_date. """ layer = self.generic_import('boxes_with_date.shp', configs=[{ 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_gpkg(self): """Tests the import of test_boxes_with_date.gpkg. """ layer = self.generic_import('boxes_with_date.gpkg', configs=[{ 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_plus_raster_gpkg_by_index(self): """ Tests the import of multilayer vector + tile geopackage using index, treating tile layers as rasters. Tile layers are now treated by default as a distinct layer type. This test forces them to still be treated as rasters and should be removed once tests for vector/tile geopackage files are in place. """ layer = self.generic_import('boxes_plus_raster.gpkg', configs=[ { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }, { 'index': 1 }, { 'index': 2 }, { 'index': 3 }, { 'index': 4 }, { 'index': 5 }, { 'index': 6, 'layer_type': 'raster' }, { 'index': 7, 'layer_type': 'raster' }, ]) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_csv(self): """Tests a CSV with WKT polygon. """ layer = self.generic_import('boxes_with_date.csv', configs=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_csv_missing_features(self): """Test csv that has some rows without geoms and uses ISO-8859 (Latin 4) encoding. """ self.generic_import('missing-features.csv', configs=[{'index': 0}]) def test_boxes_with_iso_date(self): """Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.shp', configs=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_duplicate_imports(self): """Import the same layer twice to ensure file names increment properly. """ path = test_file('boxes_with_date_iso_date.zip') ogr = OGRImport(path) layers1 = ogr.handle({'index': 0, 'name': 'test'}) layers2 = ogr.handle({'index': 0, 'name': 'test'}) self.assertEqual(layers1[0][0], 'test') self.assertEqual(layers2[0][0], 'test0') def test_launder(self): """Ensure the launder function works as expected. """ self.assertEqual(launder('tm_world_borders_simpl_0.3'), 'tm_world_borders_simpl_0_3') self.assertEqual(launder('Testing#'), 'testing_') self.assertEqual(launder(' '), '_') def test_boxes_with_date_iso_date_zip(self): """Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.zip', configs=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_dates_bc(self): """Tests the import of test_boxes_with_dates_bc. """ layer = self.generic_import('boxes_with_dates_bc.shp', configs=[{ 'index': 0, 'convert_to_date': ['date'] }]) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_point_with_date(self): """Tests the import of point_with_date.geojson """ layer = self.generic_import('point_with_date.geojson', configs=[{ 'index': 0, 'convert_to_date': ['date'] }]) # OGR will name geojson layers 'ogrgeojson' we rename to the path basename self.assertTrue(layer.name.startswith('point_with_date')) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_end_date(self): """Tests the import of test_boxes_with_end_date. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('boxes_with_end_date.shp', configs=[{ 'index': 0, 'convert_to_date': ['date', 'enddate'], 'start_date': 'date', 'end_date': 'enddate', 'configureTime': True }]) date_attr = get_layer_attr(layer, 'date_as_date') end_date_attr = get_layer_attr(layer, 'enddate_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') self.assertEqual(end_date_attr.attribute_type, 'xsd:dateTime') configure_time(self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) def test_us_states_kml(self): """Tests the import of us_states_kml. This layer has a date and an end date field that are typed correctly. """ # TODO: Support time in kmls. layer = self.generic_import('us_states.kml', configs=[{'index': 0}]) self.assertEqual(layer.name.lower(), "us_states") def test_mojstrovka_gpx(self): """Tests the import of mojstrovka.gpx. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('mojstrovka.gpx', configs=[{ 'index': 0, 'convert_to_date': ['time'], 'configureTime': True }]) date_attr = get_layer_attr(layer, 'time_as_date') self.assertEqual(date_attr.attribute_type, u'xsd:dateTime') configure_time(self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute) def generic_time_check(self, layer, attribute=None, end_attribute=None): """Convenience method to run generic tests on time layers. """ # TODO: can we use public API or omit this? self.catalog._cache.clear() resource = self.catalog.get_resource(layer.name, store=layer.store, workspace=self.workspace) time_info = resource.metadata['time'] self.assertEqual('LIST', time_info.presentation) self.assertEqual(True, time_info.enabled) self.assertEqual(attribute, time_info.attribute) self.assertEqual(end_attribute, time_info.end_attribute) def test_us_shootings_csv(self): """Tests the import of US_Shootings.csv. """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') path = test_file('US_Shootings.csv') layer = self.generic_import(path, configs=[{ 'index': 0, 'convert_to_date': ['Date'] }]) self.assertTrue(layer.name.startswith('us_shootings')) date_field = 'date' configure_time(self.catalog.get_layer(layer.name).resource, attribute=date_field) self.generic_time_check(layer, attribute=date_field) def test_sitins(self): """Tests the import of US_Civil_Rights_Sitins0.csv """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') layer = self.generic_import('US_Civil_Rights_Sitins0.csv', configs=[{ 'index': 0, 'convert_to_date': ['Date'] }]) self.assertEqual(layer.name.lower(), 'us_civil_rights_sitins0') def get_layer_names(self, path): """Gets layer names from a data source. """ data_source = DataSource(path) return [layer.name for layer in data_source] def test_gdal_import(self): path = test_file('point_with_date.geojson') self.generic_import(path, configs=[{ 'index': 0, 'convert_to_date': ['date'] }]) def test_wfs(self): """Tests the import from a WFS Endpoint """ wfs = 'WFS:http://demo.geo-solutions.it/geoserver/tiger/wfs' ogr = OGRImport(wfs) configs = [ { 'layer_name': 'tiger:giant_polygon' }, { 'layer_name': 'tiger:poi' }, ] layers = ogr.handle(configuration_options=configs) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def test_arcgisjson(self): """Tests the import from a WFS Endpoint """ endpoint = 'http://sampleserver6.arcgisonline.com/arcgis/rest/services/Water_Network/FeatureServer/16/query'\ '?where=objectid=326&outfields=*&f=json' ogr = OGRImport(endpoint) configs = [{'index': 0}] layers = ogr.handle(configuration_options=configs) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def test_file_add_view(self): """Tests the file_add_view. """ client = AdminClient() # test login required for this view request = client.get(reverse('uploads-new')) self.assertEqual(request.status_code, 302) client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post(reverse('uploads-new'), {'file': stream}, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['request'].path, reverse('uploads-list')) self.assertEqual(len(response.context['object_list']), 1) upload = response.context['object_list'][0] self.assertEqual(upload.user.username, 'non_admin') self.assertEqual(upload.file_type, 'GeoJSON') self.assertTrue(upload.uploadlayer_set.all()) self.assertEqual(upload.state, 'UPLOADED') self.assertIsNotNone(upload.name) uploaded_file = upload.uploadfile_set.first() self.assertTrue(os.path.exists(uploaded_file.file.path)) with open(test_file('empty_file.geojson')) as stream: response = client.post(reverse('uploads-new'), {'file': stream}, follow=True) self.assertEqual(response.status_code, 200) self.assertIn('file', response.context_data['form'].errors) def test_file_add_view_as_json(self): """Tests the file_add_view. """ client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post(reverse('uploads-new-json'), {'file': stream}, follow=True) self.assertEqual(response.status_code, 200) self.assertIn('application/json', response.get('Content-Type', '')) content = json.loads(response.content) self.assertIn('state', content) self.assertIn('id', content) def test_describe_fields(self): """Tests the describe fields functionality. """ path = test_file('US_Shootings.csv') with GDALInspector(path) as inspector: layers = inspector.describe_fields() self.assertTrue(layers[0]['layer_name'], 'us_shootings') self.assertEqual([n['name'] for n in layers[0]['fields']], [ 'Date', 'Shooter', 'Killed', 'Wounded', 'Location', 'City', 'Longitude', 'Latitude' ]) self.assertEqual(layers[0]['feature_count'], 203) def test_gdal_file_type(self): """Tests the describe fields functionality. """ filenames = { 'US_Shootings.csv': {'CSV'}, 'point_with_date.geojson': {'GeoJSON'}, 'mojstrovka.gpx': {'GPX'}, 'us_states.kml': {'LIBKML', 'KML'}, 'boxes_with_year_field.shp': {'ESRI Shapefile'}, 'boxes_with_date_iso_date.zip': {'ESRI Shapefile'} } from osgeo_importer.models import NoDataSourceFound try: for filename, file_type in sorted(filenames.items()): path = test_file(filename) with GDALInspector(path) as inspector: self.assertIn(inspector.file_type(), file_type) except NoDataSourceFound: logging.exception('No data source found in: {0}'.format(path)) raise def test_configure_view(self): """Tests the configuration view. """ path = test_file('point_with_date.geojson') new_user = User.objects.create(username='******') new_user_perms = ['change_resourcebase_permissions'] client = AdminClient() client.login_as_non_admin() with open(path) as stream: response = client.post(reverse('uploads-new'), {'file': stream}, follow=True) upload = response.context['object_list'][0] payload = [{ 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True, 'permissions': { 'users': { 'test': new_user_perms, 'AnonymousUser': [ 'change_layer_data', 'download_resourcebase', 'view_resourcebase' ] } } }] response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.srid, 'EPSG:4326') self.assertEqual(first_layer.store, self.datastore.name) self.assertEqual(first_layer.storeType, 'dataStore') self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertEqual(Layer.objects.all()[0].owner.username, self.non_admin_user.username) perms = first_layer.get_all_level_info() user = User.objects.get(username=self.non_admin_user.username) # check user permissions expected_perms = [ u'publish_resourcebase', u'change_resourcebase_permissions', u'delete_resourcebase', u'change_resourcebase', u'change_resourcebase_metadata', u'download_resourcebase', u'view_resourcebase', u'change_layer_style', u'change_layer_data' ] for perm in expected_perms: self.assertIn(perm, perms['users'][user]) self.assertTrue(perms['users'][new_user]) self.assertIn('change_resourcebase_permissions', perms['users'][new_user]) self.assertIn( 'change_layer_data', perms['users'][User.objects.get(username='******')]) catalog_layer = self.catalog.get_layer(first_layer.name) self.assertIn('time', catalog_layer.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, first_layer) def test_configure_view_convert_date(self): """Tests the configure view with a dataset that needs to be converted to a date. """ client = AdminClient() client.login_as_non_admin() with open(test_file('US_Shootings.csv')) as stream: response = client.post(reverse('uploads-new'), {'file': stream}, follow=True) upload = response.context['object_list'][0] payload = [{ 'index': 0, 'convert_to_date': ['Date'], 'start_date': 'Date', 'configureTime': True, 'editable': True }] response = client.get( '/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 405) response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 400) response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.srid, 'EPSG:4326') self.assertEqual(first_layer.store, self.datastore.name) self.assertEqual(first_layer.storeType, 'dataStore') self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertTrue(first_layer.attributes.filter(attribute='date'), 'xsd:dateTime') catalog_layer = self.catalog.get_layer(first_layer.name) self.assertIn('time', catalog_layer.resource.metadata) # ensure a user who does not own the upload cannot configure an import from it. client.logout() client.login_as_admin() response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 404) def test_list_api(self): client = AdminClient() response = client.get('/importer-api/data/') self.assertEqual(response.status_code, 401) client.login_as_non_admin() response = client.get('/importer-api/data/') self.assertEqual(response.status_code, 200) admin = User.objects.get(username=self.admin_user.username) non_admin = User.objects.get(username=self.non_admin_user.username) path = test_file('US_Shootings.csv') with open(path, 'rb') as stream: uploaded_file = SimpleUploadedFile('test_data', stream.read()) admin_upload = UploadedData.objects.create(state='Admin test', user=admin) admin_upload.uploadfile_set.add( UploadFile.objects.create(file=uploaded_file)) non_admin_upload = UploadedData.objects.create( state='Non admin test', user=non_admin) non_admin_upload.uploadfile_set.add( UploadFile.objects.create(file=uploaded_file)) client.login_as_admin() response = client.get('/importer-api/data/') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 2) response = client.get('/importer-api/data/?user__username=admin') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 1) def test_layer_list_api(self): client = AdminClient() response = client.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 401) client.login_as_non_admin() response = client.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 200) def test_delete_from_non_admin_api(self): """Ensure users can delete their data. """ client = AdminClient() client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post(reverse('uploads-new'), {'file': stream}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) upload_id = UploadedData.objects.first().id response = client.delete('/importer-api/data/{0}/'.format(upload_id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def test_delete_from_admin_api(self): """Ensure that administrators can delete data that isn't theirs. """ client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post(reverse('uploads-new'), {'file': stream}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) client.logout() client.login_as_admin() upload_id = UploadedData.objects.first().id response = client.delete('/importer-api/data/{0}/'.format(upload_id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def naming_an_import(self): """Tests providing a name in the configuration options. """ client = AdminClient() client.login_as_non_admin() name = 'point-with-a-date' with open(test_file('point_with_date.geojson')) as stream: response = client.post(reverse('uploads-new'), {'file': stream}, follow=True) payload = { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'name': name, 'editable': True } response = client.post('/importer-api/data-layers/1/configure/', data=json.dumps(payload), content_type='application/json') self.assertEqual(response.status_code, 200) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.title, name.replace('-', '_')) def test_api_import(self): """Tests the import api. """ client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post(reverse('uploads-new'), {'file': stream}, follow=True) payload = { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True } self.assertIsInstance( UploadLayer.objects.first().configuration_options, dict) response = client.get('/importer-api/data-layers/1/') self.assertEqual(response.status_code, 200) response = client.post('/importer-api/data-layers/1/configure/', data=json.dumps([payload]), content_type='application/json') self.assertEqual(response.status_code, 200) self.assertIn('task', response.content) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.srid, 'EPSG:4326') self.assertEqual(first_layer.store, self.datastore.name) self.assertEqual(first_layer.storeType, 'dataStore') self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime') catalog_layer = self.catalog.get_layer(first_layer.name) self.assertIn('time', catalog_layer.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, first_layer) self.assertTrue(UploadLayer.objects.first().task_id) def test_valid_file_extensions(self): """Test the file extension validator. """ for extension in load_handler(OSGEO_IMPORTER, 'test.txt').valid_extensions: filename = 'test.{0}'.format(extension) upload = SimpleUploadedFile(filename, '') self.assertIsNone(validate_file_extension(upload)) with self.assertRaises(ValidationError): validate_file_extension(SimpleUploadedFile('test.txt', '')) def test_no_geom(self): """Test the file extension validator. """ with self.assertRaises(ValidationError): validate_inspector_can_read( SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)')) # This should pass (geom type is unknown) validate_inspector_can_read( SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)')) def test_numeric_overflow(self): """Regression test for numeric field overflows in shapefiles. # https://trac.osgeo.org/gdal/ticket/5241 """ self.generic_import('Walmart.zip', configs=[{ 'configureTime': False, 'convert_to_date': ['W1_OPENDAT'], 'editable': True, 'index': 0, 'name': 'Walmart', 'start_date': 'W1_OPENDAT' }]) def test_multipolygon_shapefile(self): """Tests shapefile with multipart polygons. """ self.generic_import('PhoenixFirstDues.zip', configs=[{'index': 0}]) def test_istanbul(self): """Tests shapefile with multipart polygons and non-WGS84 SR. """ result = self.generic_import('Istanbul.zip', configs=[{'index': 0}]) feature_type = self.catalog.get_resource(result.name) self.assertEqual(feature_type.projection, 'EPSG:32635') def test_houston_tx_annexations(self): """Tests Shapefile with originally unsupported EPSG Code. """ result = self.generic_import('HoustonTXAnnexations.zip', configs=[{ 'index': 0 }]) feature_type = self.catalog.get_resource(result.name) self.assertEqual(feature_type.projection, 'EPSG:2278') def test_gwc_handler(self): """Tests the GeoWebCache handler """ layer = self.generic_import('boxes_with_date.shp', configs=[{ 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) gwc = GeoWebCacheHandler(None) gs_layer = self.catalog.get_layer(layer.name) self.assertTrue(gwc.time_enabled(gs_layer)) gs_layer.fetch() payload = self.catalog.http.request(gwc.gwc_url(gs_layer)) self.assertIn('regexParameterFilter', payload[1]) self.assertEqual(int(payload[0]['status']), 200) # Don't configure time, ensure everything still works layer = self.generic_import('boxes_with_date_iso_date.shp', configs=[{ 'index': 0 }]) gs_layer = self.catalog.get_layer(layer.name) self.catalog._cache.clear() gs_layer.fetch() payload = self.catalog.http.request(gwc.gwc_url(gs_layer)) self.assertNotIn('regexParameterFilter', payload[1]) self.assertEqual(int(payload[0]['status']), 200) def test_utf8(self): """Tests utf8 characters in attributes """ path = test_file('china_provinces.shp') layer = self.generic_import(path) ogr = OGRImport(path) datastore, _ = ogr.open_target_datastore(ogr.target_store) sql = ("select NAME_CH from {0} where NAME_PY = 'An Zhou'".format( layer.name)) result = datastore.ExecuteSQL(sql) feature = result.GetFeature(0) self.assertEqual(feature.GetField('name_ch'), '安州') def test_non_converted_date(self): """Test converting a field as date. """ results = self.generic_import('TM_WORLD_BORDERS_2005.zip', configs=[{ 'index': 0, 'start_date': 'Year', 'configureTime': True }]) layer = self.catalog.get_layer(results.typename) self.assertIn('time', layer.resource.metadata) self.assertEqual('year', layer.resource.metadata['time'].attribute) def test_fid_field(self): """ Regression test for preserving an FID field when target layer supports it but source does not. """ self.generic_import('noaa_paleoclimate.zip', configs=[{'index': 0}]) def test_csv_with_wkb_geometry(self): """Exercise import of CSV files with multiple geometries. """ filenames = [ 'police_csv.csv', 'police_csv_nOGC.csv', 'police_csv_noLatLon.csv', 'police_csv_WKR.csv', 'police_csv_nFID.csv', 'police_csv_nOGFID.csv', 'police_csv_noWKB.csv' ] for filename in filenames: self.generic_import( filename, { 'configureTime': True, 'convert_to_date': ['date_time'], 'editable': True, 'index': 0, 'name': filename.lower(), 'permissions': { 'users': { 'AnonymousUser': [ 'change_layer_data', 'download_resourcebase', 'view_resourcebase' ] } }, 'start_date': 'date_time', })
def delete(uid): """A function to delete a deviation map, all its tables, files and GeoServer layers. GET request: Renders and returns a site showing delete options. POST request: Gets delete options chosen by user and uses them to delete the chosen parts of the deviation map. To delete GeoServer layers the GeoServer configuration client library is used. """ uid = uid.encode("ISO-8859-1") if request.method == "POST": dm = DevMap.query.filter_by(uid=uid).first() if ( current_user.is_authenticated() and dm.owner == current_user or dm.owner == User.query.filter_by(username="******").first() ): if ( dm.wmsposdevlines or dm.wmsmaxdevgrid or dm.wmsabsdevgrid or dm.wmsmatchingrategrid or dm.wmsunmatchedref or dm.wmsunmatchedosm or dm.wmsmatchedref or dm.wmsmatchedosm or dm.wmsminlevenshtein or dm.wmsmaxlevenshtein ): cat = Catalog(gs_url + "rest") cat.username = gs_user cat.password = gs_password ws = None try: ws = cat.get_workspace(gs_workspace) except socket.error, e: detail = "GeoServer is not available. Make sure that it is running and the connection is ok." return render_template("error.html", err=e, detail=detail) st = cat.get_store(gs_store, ws) if "deletemaxdevgrid" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_maxdevgrid" if dm.wmsmaxdevgrid: l = cat.get_layer(feattype) if l is not None: cat.delete(l) dm.wmsmaxdevgrid = False if "deleteposdevlines" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_posdevlines" if dm.wmsposdevlines: l = cat.get_layer(feattype) if l is not None: cat.delete(l) dm.wmsposdevlines = False if "deleteabsdevgrid" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_absdevgrid" if dm.wmsabsdevgrid: l = cat.get_layer(feattype) if l is not None: cat.delete(l) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") if ft is not None: cat.delete(ft) dm.wmsabsdevgrid = False if "deletematchingrategrid" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_matchingrategrid" if dm.wmsmatchingrategrid: l = cat.get_layer(feattype) if l is not None: cat.delete(l) dm.wmsmatchingrategrid = False if "deleteunmatchedref" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_unmatchedref" if dm.wmsunmatchedref: l = cat.get_layer(feattype) if l is not None: cat.delete(l) dm.deleteunmatchedref = False if "deleteunmatchedosm" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_unmatchedosm" if dm.wmsunmatchedosm: l = cat.get_layer(feattype) if l is not None: cat.delete(l) dm.wmsunmatchedosm = False if "deletematchedref" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_matchedref" if dm.wmsmatchedref: l = cat.get_layer(feattype) if l is not None: cat.delete(l) dm.wmsmatchedref = False if "deletematchedosm" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_matchedosm" if dm.wmsmatchedosm: l = cat.get_layer(feattype) if l is not None: cat.delete(l) dm.wmsmatchedosm = False if "deleteminlevenshtein" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_minlevenshtein" if dm.wmsminlevenshtein: l = cat.get_layer(feattype) if l is not None: cat.delete(l) dm.wmsminlevenshtein = False if "deletemaxlevenshtein" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_maxlevenshtein" if dm.wmsmaxlevenshtein: l = cat.get_layer(feattype) if l is not None: cat.delete(l) dm.wmsmaxlevenshtein = False if "deleteall" in request.form: folder = secure_filename(uid) folder = os.path.join(app.config["UPLOAD_FOLDER"], folder) shutil.rmtree(folder, True) db.engine.execute("drop table if exists odf_" + uid + "_ref") db.engine.execute("drop table if exists odf_" + uid + "_ref_presplitted") db.engine.execute("drop table if exists odf_" + uid + "_ref_splitted") db.engine.execute("drop table if exists odf_" + uid + "_found") db.engine.execute("drop table if exists odf_" + uid + "_ref_junctions") db.engine.execute("drop table if exists odf_" + uid + "_ref_points") db.engine.execute("drop table if exists odf_" + uid + "_ref_cutpoints") db.engine.execute("drop table if exists odf_" + uid + "_ref_cutcheckpoints") db.engine.execute("drop table if exists odf_" + uid + "_osm") db.engine.execute("drop table if exists odf_" + uid + "_osm_presplitted") db.engine.execute("drop table if exists odf_" + uid + "_osm_splitted") db.engine.execute("drop table if exists odf_" + uid + "_osm_junctions") db.engine.execute("drop table if exists odf_" + uid + "_osm_points") db.engine.execute("drop table if exists odf_" + uid + "_osm_cutpoints") db.engine.execute("drop table if exists odf_" + uid + "_osm_cutcheckpoints") db.engine.execute("drop table if exists odf_" + uid + "_unmatchedref;") db.engine.execute("drop table if exists odf_" + uid + "_unmatchedosm;") db.engine.execute("drop table if exists odf_" + uid + "_minlevenshtein;") db.engine.execute("drop table if exists odf_" + uid + "_maxlevenshtein;") db.engine.execute("drop table if exists odf_" + uid + "_grid;") db.engine.execute("drop table if exists odf_" + uid + "_maxdevgrid;") db.engine.execute("drop table if exists odf_" + uid + "_matchingrategrid;") db.engine.execute("drop table if exists odf_" + uid + "_deviationlines") db.engine.execute("drop table if exists odf_" + uid + "_junction_deviationlines") if DEBUG: db.engine.execute("drop table if exists odf_" + uid + "_osm_presplitted_cutcheckpoints") db.engine.execute("drop table if exists odf_" + uid + "_osm_presplitted_cutpoints") db.engine.execute("drop table if exists odf_" + uid + "_osm_presplitted_junctions") db.engine.execute("drop table if exists odf_" + uid + "_osm_presplitted_points") db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected") db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected_presplitted") db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected_presplitted_cutcheckpoints") db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected_presplitted_cutpoints") db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected_presplitted_junction_devvec") db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected_presplitted_junctions") db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected_presplitted_points") db.engine.execute("drop table if exists odf_" + uid + "_result") if "deleteall" not in request.form: db.session.add(dm) db.session.commit() return render_template("delete.html", uid=uid, dm=dm, error=None) else: db.session.delete(dm) db.session.commit() return redirect(url_for("basic.index")) else: return render_template("error.html", err="You are not allowed to delete this map!")
class GsConfigTestCase(unittest.TestCase): """Test of the file manager""" gs = None workdir = None def setUp(self): cfg = ConfigParser.SafeConfigParser() cfg.read((os.path.join(TEST_DIR, "tests.cfg"))) self.gs = GsConfig(cfg) self.workdir = os.path.abspath( os.path.join(TEST_DIR, "workdir", "data")) gsUrl = self.gs.config.get("GeoServer", "url") gsUser = self.gs.config.get("GeoServer", "user") gsPassword = self.gs.config.get("GeoServer", "password") self.direct_gs = Catalog(gsUrl, gsUser, gsPassword) def tearDown(self): class Req: href = self.gs.config.get("GeoServer", "url") + "/styles/tmp_polygon" if self.direct_gs.get_style("tmp_polygon"): self.direct_gs.delete(Req()) ### LAYERS ### def test_getLayers(self): layers = self.gs.getLayers() print layers # TODO: Add test def test_getLayer(self): layers = self.gs.getLayers() layer = self.gs.getLayer(layers[0].name) print layer # TODO: Add test ### FEATURE STORE ### def test_createFeatrueStore(self): # TODO: make sure that source files exist # check / delete layer line_layer = self.direct_gs.get_layer("line_crs") if line_layer is not None: self.direct_gs.delete(line_layer) # check / delete feature type # # sorry - it is not possible to delete feature type with gsconfig.py # # example in test # https://github.com/dwins/gsconfig.py/blob/master/test/catalogtests.py#L424 # only shows how to disable it # # FOR NOW, PLEASE REMOVE THE FEATURE TYPE "line_crs0" MANUALLY # #line_feature = self.direct_gs.get_featuretype("line_crs0") # this function does not exist #if line_feature is not None: # self.direct_gs.delete(line_feature) # TODO: remove the feature type using the gs rest api itself # check / delete data store # line_store = self.direct_gs.get_store("line_crs") # if line_store is not None: # self.direct_gs.delete(line_store) self.assertEquals( True, None == self.direct_gs.get_layer("line_crs"), "The layer line_crs already exists. Please, remove it manually.") self.assertEquals( True, None == self.direct_gs.get_store("line_crs"), "The store line_crs already exists. Please, remove it manually.") self.gs.createFeatureStore(self.workdir + "/line_crs", "TestWS", "line_crs") # Another problem - gs.createFeatureStore DOES NOT return the resource URI # - hence WE DON'T know the name of the created layer! # TODO: use the gs rest api itself and find out the name of the created layer # - whether it is line_crs0, line_crs1 or what... self.assertEquals( False, None == self.direct_gs.get_layer("line_crs"), "The layer line_crs is not there. Was it created under another name?" ) ### STYLES ### def test_getStyle(self): """Test get style""" style = self.gs.getStyle("line") self.assertTrue(style.find("<sld:Name>line</sld:Name>") > 0) def test_postStyle(self): """Test get files function""" global style self.gs.postStyle("tmp_polygon", style) # TODO: add test # FIXME: UploadError: SLD file styles/tmp_polygon.sld.sld already exists. def test_putStyle(self): """Test get files function""" global style self.gs.postStyle("tmp_polygon", style) self.gs.putStyle("tmp_polygon", style)
class ModifyingTests(unittest.TestCase): def setUp(self): self.cat = Catalog("http://localhost:8080/geoserver/rest") def testFeatureTypeSave(self): # test saving round trip rs = self.cat.get_resource("bugsites") old_abstract = rs.abstract new_abstract = "Not the original abstract" enabled = rs.enabled # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(new_abstract, rs.abstract) self.assertEqual(enabled, rs.enabled) # Change keywords on server rs.keywords = ["bugsites", "gsconfig"] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(["bugsites", "gsconfig"], rs.keywords) self.assertEqual(enabled, rs.enabled) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual( [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")], rs.metadata_links) self.assertEqual(enabled, rs.enabled) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(old_abstract, rs.abstract) def testDataStoreCreate(self): ds = self.cat.create_datastore("vector_gsconfig") ds.connection_parameters.update( host="localhost", port="5432", database="db", user="******", passwd="password", dbtype="postgis") self.cat.save(ds) def testDataStoreModify(self): ds = self.cat.get_store("sf") self.assertFalse("foo" in ds.connection_parameters) ds.connection_parameters = ds.connection_parameters ds.connection_parameters["foo"] = "bar" orig_ws = ds.workspace.name self.cat.save(ds) ds = self.cat.get_store("sf") self.assertTrue("foo" in ds.connection_parameters) self.assertEqual("bar", ds.connection_parameters["foo"]) self.assertEqual(orig_ws, ds.workspace.name) def testDataStoreCreateAndThenAlsoImportData(self): ds = self.cat.create_datastore("gsconfig_import_test") ds.connection_parameters.update( host="localhost", port="5432", database="db", user="******", passwd="password", dbtype="postgis") self.cat.save(ds) ds = self.cat.get_store("gsconfig_import_test") self.cat.add_data_to_store(ds, "import", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) def testCoverageStoreCreate(self): ds = self.cat.create_coveragestore2("coverage_gsconfig") ds.data_url = "file:data/mytiff.tiff" self.cat.save(ds) def testCoverageStoreModify(self): cs = self.cat.get_store("sfdem") self.assertEqual("GeoTIFF", cs.type) cs.type = "WorldImage" self.cat.save(cs) cs = self.cat.get_store("sfdem") self.assertEqual("WorldImage", cs.type) # not sure about order of test runs here, but it might cause problems # for other tests if this layer is misconfigured cs.type = "GeoTIFF" self.cat.save(cs) def testCoverageSave(self): # test saving round trip rs = self.cat.get_resource("Arc_Sample") old_abstract = rs.abstract new_abstract = "Not the original abstract" # # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(new_abstract, rs.abstract) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(old_abstract, rs.abstract) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual( [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")], rs.metadata_links) self.assertEqual(enabled, rs.enabled) srs_before = set(['EPSG:4326']) srs_after = set(['EPSG:4326', 'EPSG:3785']) formats = set(['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF']) formats_after = set(["PNG", "GIF", "TIFF"]) # set and save request_srs_list self.assertEquals(set(rs.request_srs_list), srs_before, str(rs.request_srs_list)) rs.request_srs_list = rs.request_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.request_srs_list), srs_after, str(rs.request_srs_list)) # set and save response_srs_list self.assertEquals(set(rs.response_srs_list), srs_before, str(rs.response_srs_list)) rs.response_srs_list = rs.response_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.response_srs_list), srs_after, str(rs.response_srs_list)) # set and save supported_formats self.assertEquals(set(rs.supported_formats), formats, str(rs.supported_formats)) rs.supported_formats = ["PNG", "GIF", "TIFF"] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.supported_formats), formats_after, str(rs.supported_formats)) def testFeatureTypeCreate(self): shapefile_plus_sidecars = shapefile_and_friends("test/data/states") expected = { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' } self.assertEqual(len(expected), len(shapefile_plus_sidecars)) for k, v in expected.iteritems(): self.assertEqual(v, shapefile_plus_sidecars[k]) sf = self.cat.get_workspace("sf") self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) self.assert_(self.cat.get_resource("states_test", workspace=sf) is not None) self.assertRaises( ConflictingDataError, lambda: self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) ) self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster_test", shapefile_plus_sidecars, sf) ) bogus_shp = { 'shp': 'test/data/Pk50095.tif', 'shx': 'test/data/Pk50095.tif', 'dbf': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } self.assertRaises( UploadError, lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf) ) lyr = self.cat.get_layer("states_test") self.cat.delete(lyr) self.assert_(self.cat.get_layer("states_test") is None) def testCoverageCreate(self): tiffdata = { 'tiff': 'test/data/Pk50095.tif', 'tfw': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } sf = self.cat.get_workspace("sf") # TODO: Uploading WorldImage file no longer works??? # ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf) # self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None) # self.assertRaises( # ConflictingDataError, # lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf) # ) self.assertRaises( UploadError, lambda: self.cat.create_featurestore("Pk50095_vector", tiffdata, sf) ) bogus_tiff = { 'tiff': 'test/data/states.shp', 'tfw': 'test/data/states.shx', 'prj': 'test/data/states.prj' } self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster", bogus_tiff) ) def testLayerSave(self): # test saving round trip lyr = self.cat.get_layer("states") old_attribution = lyr.attribution new_attribution = "Not the original attribution" # change attribution on server lyr.attribution = new_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(new_attribution, lyr.attribution) # Restore attribution lyr.attribution = old_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(old_attribution, lyr.attribution) self.assertEqual(lyr.default_style.name, "population") old_default_style = lyr.default_style lyr.default_style = (s for s in lyr.styles if s.name == "pophatch").next() lyr.styles = [old_default_style] self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(lyr.default_style.name, "pophatch") self.assertEqual([s.name for s in lyr.styles], ["population"]) def testStyles(self): # upload new style, verify existence self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Fred", fred.sld_title) # replace style, verify changes self.cat.create_style("fred", open("test/ted.sld").read(), overwrite=True) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Ted", fred.sld_title) # delete style, verify non-existence self.cat.delete(fred, purge=True) self.assert_(self.cat.get_style("fred") is None) # attempt creating new style self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assertEqual("Fred", fred.sld_title) def testWorkspaceCreate(self): ws = self.cat.get_workspace("acme") self.assertEqual(None, ws) self.cat.create_workspace("acme", "http://example.com/acme") ws = self.cat.get_workspace("acme") self.assertEqual("acme", ws.name) def testWorkspaceDelete(self): self.cat.create_workspace("foo", "http://example.com/foo") ws = self.cat.get_workspace("foo") self.cat.delete(ws) ws = self.cat.get_workspace("foo") self.assert_(ws is None) def testFeatureTypeDelete(self): pass def testCoverageDelete(self): pass def testDataStoreDelete(self): states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) states.enabled = False self.assert_(states.enabled == False) self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == False) states.enabled = True self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) def testLayerGroupSave(self): tas = self.cat.get_layergroup("tasmania") self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) tas.layers = tas.layers[:-1] tas.styles = tas.styles[:-1] self.cat.save(tas) self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles)
def export(uid): """This function is used to show and handle the export options and uses the GeoServer REST API to export results. This function uses the GeoServer configuration client library by boundlessgeo, see: https://github.com/boundlessgeo/gsconfig To export the results, the GeoServer application has to be running and should be correctly set up to have access to the database and the styles should already be imported, see geoserver styles folder. GET request: Renders and returns a site showing export options. POST request: Gets the export options from the user and exports the results if they are defined in the options. The export is made by using GeoServer configuration client library. After the export, the newly created layers should be visible in the GeoServer web interface and the WM(T)S links should also work. These links can then be used to display the WM(T)S layers in JOSM or a gis. Remarks: This function will be redesigned in future versions """ uid = uid.encode("ISO-8859-1") if request.method == "POST": dm = DevMap.query.filter_by(uid=uid).first() if dm.owner == current_user or dm.owner == User.query.filter_by(username="******").first(): title = request.form["title"] listedmap = False if "listed" in request.form: listedmap = True dm.title = title dm.listed = listedmap cat = Catalog(gs_url + "rest") cat.username = gs_user cat.password = gs_password ws = None try: ws = cat.get_workspace(gs_workspace) except socket.error, e: db.session.add(dm) db.session.commit() return render_template("export.html", uid=uid, error=e, dm=dm) st = cat.get_store(gs_store, ws) if "maxdevgrid" in request.form: feattype = "odf_" + uid + "_maxdevgrid" if dm.wmsmaxdevgrid: l = cat.get_layer(feattype) if l is not None: cat.delete(l) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") if ft is not None: cat.delete(ft) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") cat.save(ft) l = cat.get_layer(feattype) l._set_default_style(gs_workspace + ":maxdevgrid") cat.save(l) dm.wmsmaxdevgrid = True else: dm.wmsmaxdevgrid = True if "posdevlines" in request.form: feattype = "odf_" + uid + "_posdevlines" if dm.wmsposdevlines: l = cat.get_layer(feattype) if l is not None: cat.delete(l) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") if ft is not None: cat.delete(ft) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") cat.save(ft) l = cat.get_layer(feattype) l._set_default_style(gs_workspace + ":posdevlines") cat.save(l) dm.wmsposdevlines = True else: dm.wmsposdevlines = False if "absdevgrid" in request.form: feattype = "odf_" + uid + "_absdevgrid" if dm.wmsabsdevgrid: l = cat.get_layer(feattype) if l is not None: cat.delete(l) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") if ft is not None: cat.delete(ft) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") cat.save(ft) l = cat.get_layer(feattype) l._set_default_style(gs_workspace + ":absdevgrid") cat.save(l) dm.wmsabsdevgrid = True else: dm.wmsabsdevgrid = False if "matchingrategrid" in request.form: feattype = "odf_" + uid + "_matchingrategrid" if dm.wmsmatchingrategrid: l = cat.get_layer(feattype) if l is not None: cat.delete(l) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") if ft is not None: cat.delete(ft) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") cat.save(ft) l = cat.get_layer(feattype) l._set_default_style(gs_workspace + ":matchingrategrid") cat.save(l) dm.wmsmatchingrategrid = True else: dm.wmsmatchingrategrid = False if "unmatchedref" in request.form: feattype = "odf_" + uid + "_unmatchedref" if dm.wmsunmatchedref: l = cat.get_layer(feattype) if l is not None: cat.delete(l) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") if ft is not None: cat.delete(ft) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") cat.save(ft) l = cat.get_layer(feattype) l._set_default_style(gs_workspace + ":ReferenceLines") cat.save(l) dm.unmatchedref = True else: dm.unmatchedref = False if "unmatchedosm" in request.form: feattype = "odf_" + uid + "_unmatchedosm" if dm.unmatchedosm: l = cat.get_layer(feattype) if l is not None: cat.delete(l) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") if ft is not None: cat.delete(ft) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") cat.save(ft) l = cat.get_layer(feattype) l._set_default_style(gs_workspace + ":OSMLines") cat.save(l) dm.wmsunmatchedosm = True else: dm.wmsunmatchedosm = False if "matchedref" in request.form: feattype = "odf_" + uid + "_matchedref" if dm.wmsmatchedref: l = cat.get_layer(feattype) if l is not None: cat.delete(l) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") if ft is not None: cat.delete(ft) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") cat.save(ft) l = cat.get_layer(feattype) l._set_default_style(gs_workspace + ":ReferenceLines") cat.save(l) dm.wmsmatchedref = True else: dm.wmsmatchedref = False if "matchedosm" in request.form: feattype = "odf_" + uid + "_matchedosm" if dm.wmsmatchedosm: l = cat.get_layer(feattype) if l is not None: cat.delete(l) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") if ft is not None: cat.delete(ft) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") cat.save(ft) l = cat.get_layer(feattype) l._set_default_style(gs_workspace + ":OSMLines") cat.save(l) dm.wmsmatchedosm = True else: dm.wmsmatchedosm = False if "minlevenshtein" in request.form: feattype = "odf_" + uid + "_minlevenshtein" if dm.wmsminlevenshtein: l = cat.get_layer(feattype) if l is not None: cat.delete(l) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") if ft is not None: cat.delete(ft) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") cat.save(ft) l = cat.get_layer(feattype) l._set_default_style(gs_workspace + ":ReferenceLines") cat.save(l) dm.wmsminlevenshtein = True else: dm.wmsminlevenshtein = False if "maxlevenshtein" in request.form: feattype = "odf_" + uid + "_maxlevenshtein" if dm.wmsmaxlevenshtein: l = cat.get_layer(feattype) if l is not None: cat.delete(l) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") if ft is not None: cat.delete(ft) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") cat.save(ft) l = cat.get_layer(feattype) l._set_default_style(gs_workspace + ":ReferenceLines") cat.save(l) dm.wmsmaxlevenshtein = True else: dm.wmsmaxlevenshtein = False db.session.add(dm) db.session.commit() return render_template("finished.html", uid=uid) else: return render_template("export.html", uid=uid, error="No User", dm=dm)
class ArchiveAndCatalogueI(geocloud.ArchiveAndCatalogue): def __init__(self,com): if not com: raise RuntimeError("Not communicator") self.com=com q = com.stringToProxy('IceGrid/Query') self.query = IceGrid.QueryPrx.checkedCast(q) if not self.query: raise RuntimeError("Invalid proxy") self.broker=geocloud.BrokerPrx.checkedCast(com.stringToProxy("broker")) self.sem = Ice.threading.RLock() #Lock for managing the data structures geoserver_path = "http://localhost:80/geoserver/rest" self.catalog = Catalog(geoserver_path) self.workspace=None def setBroker(self,broker,current=None): self.broker =broker def log(self,log): if self.broker: self.broker.begin_appendLog("<ArchiveAndCatalogue> "+str(log)) else: print log def createScenario(self,scenario,current=None): self.sem.acquire() try: if self.catalog ==None: self.log("Catalog") raise geocloud.CreationScenarioException() try: self.workspace = self.catalog.create_workspace(scenario,scenario) self.workspace.enabled =True self.log("Created Scenario %s"%(scenario)) except Exception: print ("workspace does not created") self.log("Workspace does not created") except FailedRequestError: self.log("Request failed") raise geocloud.CreationScenarioException(); finally: self.sem.release() def catalogue(self,path,store,scenario,current=None): if self.workspace ==None: raise geocloud.CataloguingException() else: try: if self.catalog==None: raise geocloud.CataloguingException() # store = path.split('/')[-1] self.sem.acquire() cv = self.catalog.create_coveragestore(store,path,self.workspace) self.log("%s Catalogued!"%(path)) except ConflictingDataError as e: self.log(e) except UploadError as e: self.log(e) except FailedRequestError as e: None except Exception as e: self.log(e) finally: self.sem.release() def deleteScenario(self,scenario,current=None): self.sem.acquire() try: for layer in self.catalog.get_layers(): self.catalog.delete(layer) for coverage in self.catalog.get_stores(): for resource in coverage.get_resources(): catalog.delete(resource) catalog.delete(coverage) for vk in catalog.get_workspaces(): catalog.delete(wk) except Exception: self.log("Exception while cleanning scenario")
class GeoServerDatasetEngineEnd2EndTests(unittest.TestCase): def setUp(self): # Files self.tests_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) self.files_root = os.path.join(self.tests_root, 'files') # GeoServer self.gs_endpoint = TEST_GEOSERVER_DATASET_SERVICE['ENDPOINT'] self.gs_username = TEST_GEOSERVER_DATASET_SERVICE['USERNAME'] self.gs_password = TEST_GEOSERVER_DATASET_SERVICE['PASSWORD'] self.catalog = GeoServerCatalog(self.gs_endpoint, username=self.gs_username, password=self.gs_password) # Postgis self.pg_username = TEST_POSTGIS_SERVICE['USERNAME'] self.pg_password = TEST_POSTGIS_SERVICE['PASSWORD'] self.pg_database = TEST_POSTGIS_SERVICE['DATABASE'] self.pg_table_name = 'points' self.pg_host = TEST_POSTGIS_SERVICE['HOST'] self.pg_port = TEST_POSTGIS_SERVICE['PORT'] self.pg_url = TEST_POSTGIS_SERVICE['URL'] self.pg_public_url = TEST_POSTGIS_SERVICE['PUBLIC_URL'] # Setup a testing workspace self.workspace_name = random_string_generator(10) self.workspace_uri = 'http://www.tethysplatform.org/{}'.format(self.workspace_name) retries = 5 while retries > 0: try: self.catalog.create_workspace(self.workspace_name, self.workspace_uri) break except AssertionError as e: if 'Error persisting' in str(e) and retries > 0: print("WARNING: FAILED TO PERSIST WORKSPACE.") retries -= 1 else: raise # Setup Postgis database connection self.public_engine = create_engine(self.pg_public_url) self.connection = self.public_engine.connect() self.transaction = self.connection.begin() # Create GeoServer Engine self.endpoint = TEST_GEOSERVER_DATASET_SERVICE['ENDPOINT'] self.geoserver_engine = GeoServerSpatialDatasetEngine(endpoint=self.endpoint, username=TEST_GEOSERVER_DATASET_SERVICE['USERNAME'], password=TEST_GEOSERVER_DATASET_SERVICE['PASSWORD']) self.geometry_column = 'geometry' self.geometry_type = 'Point' self.srid = 4326 def assert_valid_response_object(self, response_object): # Response object should be a dictionary with the keys 'success' and either 'result' if success is True # or 'error' if success is False self.assertIsInstance(response_object, dict) self.assertIn('success', response_object) if isinstance(response_object, dict) and 'success' in response_object: if response_object['success'] is True: self.assertIn('result', response_object) elif response_object['success'] is False: self.assertIn('error', response_object) def tearDown(self): # Clean up GeoServer workspace = self.catalog.get_workspace(self.workspace_name) self.catalog.delete(workspace, recurse=True, purge=True) # Clean up Postgis database self.transaction.rollback() self.connection.close() self.public_engine.dispose() def setup_postgis_table(self): """ Creates table in the database named "points" with two entries. The table has three columns: "id", "name", and "geometry." Use this table for the tests that require a database. """ # Clean up delete_sql = "DROP TABLE IF EXISTS {table}".\ format(table=self.pg_table_name) self.connection.execute(delete_sql) # Create table geom_table_sql = "CREATE TABLE IF NOT EXISTS {table} (" \ "id integer CONSTRAINT points_primary_key PRIMARY KEY, " \ "name varchar(20)" \ "); " \ "SELECT AddGeometryColumn('public', '{table}', 'geometry', 4326, 'POINT', 2);". \ format(table=self.pg_table_name) self.connection.execute(geom_table_sql) insert_sql = "INSERT INTO {table} VALUES ({id}, '{name}', ST_GeomFromText('POINT({lon} {lat})', 4326));" rows = [ {"id": 1, "name": "Aquaveo", "lat": 40.276039, "lon": -111.651120}, {"id": 2, "name": "BYU", "lat": 40.252335, "lon": -111.649326}, ] for r in rows: sql = insert_sql.format( table=self.pg_table_name, id=r['id'], name=r['name'], lat=r['lat'], lon=r['lon'] ) self.connection.execute(sql) self.transaction.commit() def test_create_shapefile_resource_base(self): # call methods: create_shapefile_resource, list_resources, get_resource, delete_resource # TEST create shapefile # Setup filename = 'test' shapefile_name = os.path.join(self.files_root, 'shapefile', filename) workspace = self.workspace_name store_id = random_string_generator(10) store_id_name = '{}:{}'.format(workspace, store_id) # store_id_name = store_id # Execute response = self.geoserver_engine.create_shapefile_resource(store_id=store_id_name, shapefile_base=shapefile_name, overwrite=True) # Validate response object self.assert_valid_response_object(response) # Should succeed self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) self.assertIn(store_id, r['name']) self.assertIn(store_id, r['store']) # TEST list_resources # Execute response = self.geoserver_engine.list_resources() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # Returns list self.assertIsInstance(result, list) # layer listed self.assertIn(store_id, result) # TEST get_resources # Execute # Geoserver uses the store_id as the layer/resource name (not the filename) resource_id_name = '{}:{}'.format(workspace, store_id) response = self.geoserver_engine.get_resource(resource_id=resource_id_name) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Properties self.assertIn('name', r) self.assertEquals(store_id, r['name']) self.assertIn(store_id, r['wfs']['shapefile']) # TEST delete_resource # Execute # This case the resource id is the same as the store id. response = self.geoserver_engine.delete_resource(resource_id=resource_id_name, store_id=store_id) # Validate response object self.assert_valid_response_object(response) # Success # TODO: delete_resource is returning a 403 error: not authorized. # self.assertTrue(response['success']) def test_create_shapefile_resource_zip(self): # call methods: create_shapefile_resource, list_layers, get_layer, delete_layer # TEST create_shapefile_resource # Test1.zip # Setup shapefile_zip = os.path.join(self.files_root, 'shapefile', "test1.zip") shapefile = "test1" workspace = self.workspace_name store_id = random_string_generator(10) store_id_name = '{}:{}'.format(workspace, store_id) # Execute response = self.geoserver_engine.create_shapefile_resource(store_id=store_id_name, shapefile_zip=shapefile_zip, overwrite=True) # Validate response object self.assert_valid_response_object(response) # Should succeed self.assertTrue(response['success']) # Extract Result r = response['result'] # Type filename = os.path.splitext(os.path.basename(shapefile_zip))[0] self.assertIsInstance(r, dict) self.assertIn(filename, r['name']) self.assertIn(store_id, r['store']) # TEST list_layers test # Execute response = self.geoserver_engine.list_layers() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # Returns list self.assertIsInstance(result, list) # Get the last item from result layer_id = '{}:{}'.format(workspace, shapefile) # TEST get layers test # Execute response = self.geoserver_engine.get_layer(layer_id=layer_id, store_id=store_id) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) self.assertIn(filename, r['name']) self.assertIn(self.workspace_name, r['name']) # TEST delete_layer self.geoserver_engine.delete_layer(layer_id=layer_id, store_id=store_id) self.assert_valid_response_object(response) self.assertTrue(response['success']) def test_create_shapefile_resource_upload(self): # call methods: create_shapefile_resource, list_stores, get_store, delete_store # TEST create_shapefile_resource # Use in memory file list: test.shp and friends # Setup shapefile_cst = os.path.join(self.files_root, 'shapefile', 'test.cst') shapefile_dbf = os.path.join(self.files_root, 'shapefile', 'test.dbf') shapefile_prj = os.path.join(self.files_root, 'shapefile', 'test.prj') shapefile_shp = os.path.join(self.files_root, 'shapefile', 'test.shp') shapefile_shx = os.path.join(self.files_root, 'shapefile', 'test.shx') # Workspace is given store_rand = random_string_generator(10) store_id = '{}:{}'.format(self.workspace_name, store_rand) with open(shapefile_cst, 'rb') as cst_upload,\ open(shapefile_dbf, 'rb') as dbf_upload,\ open(shapefile_prj, 'rb') as prj_upload,\ open(shapefile_shp, 'rb') as shp_upload,\ open(shapefile_shx, 'rb') as shx_upload: upload_list = [cst_upload, dbf_upload, prj_upload, shp_upload, shx_upload] response = self.geoserver_engine.create_shapefile_resource(store_id=store_id, shapefile_upload=upload_list, overwrite=True) # Should succeed self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) self.assertIn(store_rand, r['name']) self.assertIn(store_rand, r['store']) # TEST list_stores # Execute response = self.geoserver_engine.list_stores() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # layer group listed self.assertIn(store_rand, result) # TEST get store # Execute response = self.geoserver_engine.get_store(store_id=store_id) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Properties self.assertIn('name', r) self.assertIn(r['name'], store_rand) self.assertIn('workspace', r) self.assertEqual(self.workspace_name, r['workspace']) # TEST delete_store response = self.geoserver_engine.delete_store(store_id=store_id, purge=True, recurse=True) # Failure Check self.assert_valid_response_object(response) self.assertTrue(response['success']) def test_create_coverage_resource_arcgrid(self): # call methods: create_coverage_resource, list_resources, get_resource, delete_resource # TEST create_coverage_resource # precip30min.zip store_name = random_string_generator(10) expected_store_id = '{}:{}'.format(self.workspace_name, store_name) expected_coverage_type = 'arcgrid' coverage_file_name = 'precip30min.zip' coverage_name = coverage_file_name.split('.')[0] coverage_file = os.path.join(self.files_root, "arc_sample", coverage_file_name) with open(coverage_file, 'rb') as coverage_upload: # Execute response = self.geoserver_engine.create_coverage_resource(store_id=expected_store_id, coverage_type=expected_coverage_type, coverage_upload=coverage_upload, overwrite=True) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Values self.assertEqual(coverage_name, r['name']) self.assertEqual(self.workspace_name, r['workspace']) # TEST list_resources # Execute response = self.geoserver_engine.list_resources() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # Returns list self.assertIsInstance(result, list) # layer listed self.assertIn(coverage_name, result) # TEST get_resource # Execute resource_id = '{}:{}'.format(self.workspace_name, coverage_name) response = self.geoserver_engine.get_resource(resource_id=resource_id, store_id=store_name) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] self.assertIn('ArcGrid', r['keywords']) self.assertEqual(coverage_name, r['title']) self.assertEqual('coverage', r['resource_type']) # delete_resource # TODO: delete_resource is returning a 403 error: not authorized. # Execute resource_id = '{}:{}'.format(self.workspace_name, coverage_name) response = self.geoserver_engine.delete_resource(resource_id=resource_id, store_id=store_name) # # Validate response object self.assert_valid_response_object(response) # # Success # self.assertTrue(response['success']) def test_create_coverage_resource_grassgrid(self): # call methods: create_coverage_resource, list_layers, get_layer, delete_layer # TEST create_coverage resource # my_grass.zip store_name = random_string_generator(10) expected_store_id = '{}:{}'.format(self.workspace_name, store_name) expected_coverage_type = 'grassgrid' coverage_file_name = 'my_grass.zip' coverage_name = coverage_file_name.split('.')[0] coverage_file = os.path.join(self.files_root, "grass_ascii", coverage_file_name) # Execute response = self.geoserver_engine.create_coverage_resource(store_id=expected_store_id, coverage_type=expected_coverage_type, coverage_file=coverage_file, overwrite=True) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Tests self.assertIn(coverage_name, r['name']) self.assertEqual(self.workspace_name, r['workspace']) # TEST list_layers # Execute response = self.geoserver_engine.list_layers() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # Returns list self.assertIsInstance(result, list) # Check if layer is in list self.assertIn(coverage_name, result) # TEST get_layer # Execute layer_id = '{}:{}'.format(self.workspace_name, coverage_name) response = self.geoserver_engine.get_layer(layer_id=layer_id, store_id=store_name) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) self.assertIn(store_name, r['store']) self.assertIn(self.workspace_name, r['name']) # TEST delete_layer self.geoserver_engine.delete_layer(layer_id=layer_id, store_id=store_name) self.assert_valid_response_object(response) self.assertTrue(response['success']) def test_create_coverage_resource_geotiff(self): # adem.tif # call methods: create_coverage_resource, list_stores, get_store, delete_store # TEST create_coverage_resource store_name = random_string_generator(10) expected_store_id = '{}:{}'.format(self.workspace_name, store_name) expected_coverage_type = 'geotiff' coverage_file_name = 'adem.tif' coverage_name = coverage_file_name.split('.')[0] coverage_file = os.path.join(self.files_root, coverage_file_name) with open(coverage_file, 'rb') as coverage_upload: # Execute response = self.geoserver_engine.create_coverage_resource(store_id=expected_store_id, coverage_type=expected_coverage_type, coverage_upload=coverage_upload, overwrite=True) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Values self.assertEqual(coverage_name, r['name']) self.assertEqual(self.workspace_name, r['workspace']) # TEST list_stores # Execute response = self.geoserver_engine.list_stores() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # TEST layer group listed self.assertIn(store_name, result) # TEST get store # Execute response = self.geoserver_engine.get_store(store_id=expected_store_id) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Properties self.assertIn('name', r) self.assertIn(r['name'], store_name) self.assertIn('workspace', r) self.assertEqual(self.workspace_name, r['workspace']) # TEST delete_store response = self.geoserver_engine.delete_store(store_id=expected_store_id, purge=True, recurse=True) # Failure Check self.assert_valid_response_object(response) self.assertTrue(response['success']) def test_create_coverage_resource_world_file_tif(self): # pk50095.zip # call methods: create_coverage_resource, list_layers, get_layer, delete_layer # TEST create_coverage resource store_name = random_string_generator(10) expected_store_id = '{}:{}'.format(self.workspace_name, store_name) expected_coverage_type = 'worldimage' coverage_file_name = 'Pk50095.zip' coverage_name = coverage_file_name.split('.')[0] coverage_file = os.path.join(self.files_root, "img_sample", coverage_file_name) # Execute response = self.geoserver_engine.create_coverage_resource(store_id=expected_store_id, coverage_type=expected_coverage_type, coverage_file=coverage_file, overwrite=True) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Tests self.assertIn(coverage_name, r['name']) self.assertEqual(self.workspace_name, r['workspace']) # TEST list_layers # Execute response = self.geoserver_engine.list_layers() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # Returns list self.assertIsInstance(result, list) # Check if layer is in list self.assertIn(coverage_name, result) # TEST get_layer # Execute layer_id = '{}:{}'.format(self.workspace_name, coverage_name) response = self.geoserver_engine.get_layer(layer_id=layer_id, store_id=store_name) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) self.assertIn(store_name, r['store']) self.assertIn(self.workspace_name, self.workspace_name) # TEST delete_layer self.geoserver_engine.delete_layer(layer_id=coverage_name, store_id=store_name) self.assert_valid_response_object(response) self.assertTrue(response['success']) def test_create_coverage_resource_upload(self): # DO NOT MOCK # Use in memory file list: precip30min.prj & precip30min.asc # call methods: create_coverage_resource, list_resources, get_resource, delete_resource store_id_name = random_string_generator(10) expected_store_id = '{}:{}'.format(self.workspace_name, store_id_name) expected_coverage_type = 'arcgrid' coverage_file_name = 'precip30min.asc' prj_file_name = 'precip30min.prj' coverage_name = coverage_file_name.split('.')[0] arc_sample = os.path.join(self.files_root, "arc_sample") coverage_file = os.path.join(arc_sample, coverage_file_name) prj_file = os.path.join(arc_sample, prj_file_name) with open(coverage_file, 'rb') as coverage_upload: with open(prj_file, 'rb') as prj_upload: upload_list = [coverage_upload, prj_upload] # Execute response = self.geoserver_engine.create_coverage_resource(store_id=expected_store_id, coverage_type=expected_coverage_type, coverage_upload=upload_list, overwrite=True) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Values self.assertEqual(coverage_name, r['name']) self.assertEqual(self.workspace_name, r['workspace']) # TEST list_resources # Execute response = self.geoserver_engine.list_resources() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # Returns list self.assertIsInstance(result, list) # layer listed self.assertIn(coverage_name, result) # TEST get_resources # Execute resource_id = "{}:{}".format(self.workspace_name, coverage_name) response = self.geoserver_engine.get_resource(resource_id=resource_id, store_id=store_id_name) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Properties self.assertIn('name', r) self.assertEquals(coverage_name, r['name']) self.assertIn(coverage_name, r['wcs']['arcgrid']) # TEST delete_resource # TODO: delete_resource is returning a 403 error: not authorized. # Execute # This case the resource id is the same as the filename. resource_id = '{}:{}'.format(self.workspace_name, coverage_name) response = self.geoserver_engine.delete_resource(resource_id=resource_id, store_id=store_id_name) # Validate response object self.assert_valid_response_object(response) # Success # self.assertTrue(response['success']) def test_create_layer_group(self): # call methods: create_layer_group, list_layer_groups, get_layer_group, delete_layer_group # create_layer_group # Use existing layers and styles in geoserver: # layers: sf:roads, sf:bugsites, sf:streams; # styles: simple_roads, capitals, simple_streams # TEST create_layer_group # Do create # expected_layer_group_id = '{}:{}'.format(self.workspace_name, random_string_generator(10)) expected_layer_group_id = random_string_generator(10) expected_layers = ['roads', 'bugsites', 'streams'] expected_styles = ['simple_roads', 'capitals', 'simple_streams'] # TODO: create_layer_group: fails on catalog.save() when workspace is given. response = self.geoserver_engine.create_layer_group(layer_group_id=expected_layer_group_id, layers=expected_layers, styles=expected_styles) # Should succeed self.assert_valid_response_object(response) self.assertTrue(response['success']) # Validate result = response['result'] self.assertEquals(result['name'], expected_layer_group_id) self.assertEquals(result['layers'], expected_layers) self.assertEquals(result['styles'], expected_styles) # TEST list_layer_groups # Execute response = self.geoserver_engine.list_layer_groups() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # layer group listed self.assertIn(expected_layer_group_id, result) # TEST get layer_group # Execute response = self.geoserver_engine.get_layer_group(layer_group_id=expected_layer_group_id) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # List of dictionaries self.assertIn('workspace', r) self.assertEqual(None, r['workspace']) self.assertIn('layers', r) self.assertEqual(expected_layers, r['layers']) self.assertIn('styles', r) self.assertEqual(expected_styles, r['styles']) self.assertNotIn('dom', r) # TEST delete layer group # Clean up self.geoserver_engine.delete_layer_group(layer_group_id=expected_layer_group_id) self.assert_valid_response_object(response) self.assertTrue(response['success']) # self.assertIsNone(response['result']) def test_create_workspace(self): # call methods: create_workspace, list_workspaces, get_workspace, delete_workspace # TEST create workspace expected_workspace_id = random_string_generator(10) expected_uri = 'http://www.tethysplatform.org/{}'.format(expected_workspace_id) # create workspace test response = self.geoserver_engine.create_workspace(workspace_id=expected_workspace_id, uri=expected_uri) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) self.assertIn('name', r) self.assertEqual(expected_workspace_id, r['name']) # TEST list workspace # Execute response = self.geoserver_engine.list_workspaces() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # TEST layer group listed self.assertIn(expected_workspace_id, result) # TEST get_workspace # Execute response = self.geoserver_engine.get_workspace(workspace_id=expected_workspace_id) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Properties self.assertIn('name', r) self.assertIn(r['name'], expected_workspace_id) # TEST delete work_space # Do delete response = self.geoserver_engine.delete_workspace(workspace_id=expected_workspace_id) # Should succeed self.assert_valid_response_object(response) self.assertTrue(response['success']) self.assertIsNone(response['result']) def test_create_style(self): # call methods: create_style, list_styles, get_style, delete_style # TEST create_style expected_style_id_name = random_string_generator(10) expected_style_id = '{}:{}'.format(self.workspace_name, expected_style_id_name) style_file_name = 'point.sld' expected_sld = os.path.join(self.files_root, style_file_name) # Execute with open(expected_sld, 'r') as sld_file: sld_string = sld_file.read() # TODO: create_style: Fails on when overwrite is False. response = self.geoserver_engine.create_style(style_id=expected_style_id, sld=sld_string, overwrite=True) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # TEST list_styles # Execute response = self.geoserver_engine.list_styles(workspace=self.workspace_name) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # Returns list self.assertIsInstance(result, list) # TEST layer listed self.assertIn(expected_style_id_name, result) # TEST get_style # Execute response = self.geoserver_engine.get_style(style_id=expected_style_id) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Properties self.assertIn('name', r) self.assertIn(r['name'], expected_style_id) self.assertIn('workspace', r) self.assertEqual(self.workspace_name, r['workspace']) # TEST delete_style # Do delete response = self.geoserver_engine.delete_style(style_id=expected_style_id) # Should succeed self.assert_valid_response_object(response) self.assertTrue(response['success']) self.assertIsNone(response['result']) def test_link_and_add_table(self): # call methods: link_sqlalchemy_db_to_geoserver, add_table_to_postgis_store, list_stores, get_store, # delete_store self.setup_postgis_table() # TEST link_sqlalchemy_db_to_geoserver store_id_name = random_string_generator(10) store_id = '{}:{}'.format(self.workspace_name, store_id_name) sqlalchemy_engine = create_engine(self.pg_url) response = self.geoserver_engine.link_sqlalchemy_db_to_geoserver(store_id=store_id, sqlalchemy_engine=sqlalchemy_engine, docker=True) # Check for success response self.assertTrue(response['success']) sqlalchemy_engine.dispose() # TEST add_table_to_postgis_store # Execute response = self.geoserver_engine.add_table_to_postgis_store(store_id=store_id, table=self.pg_table_name) # Check for success response self.assertTrue(response['success']) # TEST list_stores # Execute response = self.geoserver_engine.list_stores() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # layer group listed self.assertIn(store_id_name, result) # TEST get store # Execute response = self.geoserver_engine.get_store(store_id=store_id) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Properties self.assertIn('name', r) self.assertIn(store_id_name, r['name']) self.assertIn('workspace', r) self.assertEqual(self.workspace_name, r['workspace']) # TEST delete_store response = self.geoserver_engine.delete_store(store_id=store_id, purge=True, recurse=True) # Failure Check self.assert_valid_response_object(response) self.assertTrue(response['success']) def test_create_postgis_feature_resource(self): # call methods: create_postgis_feature_resource (with table), list_stores, get_store, delete_store self.setup_postgis_table() # TEST create_postgis_feature_resource (with table) store_id_name = random_string_generator(10) store_id = '{}:{}'.format(self.workspace_name, store_id_name) response = self.geoserver_engine.create_postgis_feature_resource(store_id=store_id, host=self.pg_host, port=self.pg_port, database=self.pg_database, user=self.pg_username, password=self.pg_password, table=self.pg_table_name) self.assertTrue(response['success']) # TEST list_stores # Execute response = self.geoserver_engine.list_stores() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # layer group listed self.assertIn(store_id_name, result) # TEST get store # Execute response = self.geoserver_engine.get_store(store_id=store_id) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Properties self.assertIn('name', r) self.assertIn(store_id_name, r['name']) self.assertIn('workspace', r) self.assertEqual(self.workspace_name, r['workspace']) # TEST delete_store response = self.geoserver_engine.delete_store(store_id=store_id, purge=True, recurse=True) # Failure Check self.assert_valid_response_object(response) self.assertTrue(response['success']) def test_create_sql_view(self): # call methods: create_sql_view, list_resources, list_stores, list_layers self.setup_postgis_table() # TEST create_postgis_feature_resource (with table) store_id_name = random_string_generator(10) store_id = '{}:{}'.format(self.workspace_name, store_id_name) response = self.geoserver_engine.create_postgis_feature_resource(store_id=store_id, host=self.pg_host, port=self.pg_port, database=self.pg_database, user=self.pg_username, password=self.pg_password, table=self.pg_table_name) self.assertTrue(response['success']) feature_type_name = random_string_generator(10) postgis_store_id = '{}:{}'.format(self.workspace_name, store_id_name) sql = "SELECT * FROM {}".format(self.pg_table_name) geometry_column = self.geometry_column geometry_type = self.geometry_type response = self.geoserver_engine.create_sql_view(feature_type_name=feature_type_name, postgis_store_id=postgis_store_id, sql=sql, geometry_column=geometry_column, geometry_type=geometry_type) self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) self.assertIn('name', r) self.assertIn(feature_type_name, r['name']) # TEST list_resources # Execute response = self.geoserver_engine.list_resources() # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result result = response['result'] # Returns list self.assertIsInstance(result, list) # layer listed self.assertIn(feature_type_name, result) # TEST get_resources # Execute # Geoserver uses the store_id as the layer/resource name (not the filename) resource_id_name = '{}:{}'.format(self.workspace_name, feature_type_name) response = self.geoserver_engine.get_resource(resource_id=resource_id_name) # Validate response object self.assert_valid_response_object(response) # Success self.assertTrue(response['success']) # Extract Result r = response['result'] # Type self.assertIsInstance(r, dict) # Properties self.assertIn('name', r) self.assertEquals(feature_type_name, r['name']) self.assertIn(feature_type_name, r['wfs']['shapefile']) # TEST delete_resource # Execute # This case the resource id is the same as the store id. response = self.geoserver_engine.delete_resource(resource_id=resource_id_name, store_id=store_id_name) # Validate response object self.assert_valid_response_object(response)
class ModifyingTests(unittest.TestCase): def setUp(self): self.cat = Catalog(GSPARAMS['GSURL'], username=GSPARAMS['GSUSER'], password=GSPARAMS['GSPASSWORD']) def testFeatureTypeSave(self): # test saving round trip rs = self.cat.get_resource("bugsites") old_abstract = rs.abstract new_abstract = "Not the original abstract" enabled = rs.enabled # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(new_abstract, rs.abstract) self.assertEqual(enabled, rs.enabled) # Change keywords on server rs.keywords = ["bugsites", "gsconfig"] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(["bugsites", "gsconfig"], rs.keywords) self.assertEqual(enabled, rs.enabled) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual([ ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata") ], rs.metadata_links) self.assertEqual(enabled, rs.enabled) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(old_abstract, rs.abstract) def testDataStoreCreate(self): ds = self.cat.create_datastore("vector_gsconfig") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) def testPublishFeatureType(self): # Use the other test and store creation to load vector data into a database # @todo maybe load directly to database? try: self.testDataStoreCreateAndThenAlsoImportData() except FailedRequestError: pass try: lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. self.cat.delete(lyr) self.cat.delete(lyr.resource) ds = self.cat.get_store("gsconfig_import_test") # make sure it's gone self.assertIsNone(self.cat.get_layer('import')) self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326") # and now it's not self.assertIsNotNone(self.cat.get_layer('import')) finally: # tear stuff down to allow the other test to pass if we run first ds = self.cat.get_store("gsconfig_import_test") lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. try: if lyr: self.cat.delete(lyr) self.cat.delete(lyr.resource) if ds: self.cat.delete(ds) except Exception: pass def testDataStoreModify(self): ds = self.cat.get_store("sf") self.assertFalse("foo" in ds.connection_parameters) ds.connection_parameters = ds.connection_parameters ds.connection_parameters["foo"] = "bar" orig_ws = ds.workspace.name self.cat.save(ds) ds = self.cat.get_store("sf") self.assertTrue("foo" in ds.connection_parameters) self.assertEqual("bar", ds.connection_parameters["foo"]) self.assertEqual(orig_ws, ds.workspace.name) @drop_table('import') def testDataStoreCreateAndThenAlsoImportData(self): ds = self.cat.create_datastore("gsconfig_import_test") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) ds = self.cat.get_store("gsconfig_import_test") self.cat.add_data_to_store( ds, "import", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) @drop_table('import2') def testVirtualTables(self): ds = self.cat.create_datastore("gsconfig_import_test2") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) ds = self.cat.get_store("gsconfig_import_test2") self.cat.add_data_to_store( ds, "import2", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) store = self.cat.get_store("gsconfig_import_test2") geom = JDBCVirtualTableGeometry('the_geom', 'MultiPolygon', '4326') ft_name = 'my_jdbc_vt_test' epsg_code = 'EPSG:4326' sql = "select * from import2 where 'STATE_NAME' = 'Illinois'" keyColumn = None parameters = None jdbc_vt = JDBCVirtualTable(ft_name, sql, 'false', geom, keyColumn, parameters) self.cat.publish_featuretype(ft_name, store, epsg_code, jdbc_virtual_table=jdbc_vt) # DISABLED; this test works only in the very particular case # "mytiff.tiff" is already present into the GEOSERVER_DATA_DIR # def testCoverageStoreCreate(self): # ds = self.cat.create_coveragestore2("coverage_gsconfig") # ds.data_url = "file:test/data/mytiff.tiff" # self.cat.save(ds) def testCoverageStoreModify(self): cs = self.cat.get_store("sfdem") self.assertEqual("GeoTIFF", cs.type) cs.type = "WorldImage" self.cat.save(cs) cs = self.cat.get_store("sfdem") self.assertEqual("WorldImage", cs.type) # not sure about order of test runs here, but it might cause problems # for other tests if this layer is misconfigured cs.type = "GeoTIFF" self.cat.save(cs) def testCoverageSave(self): # test saving round trip rs = self.cat.get_resource("Arc_Sample") old_abstract = rs.abstract new_abstract = "Not the original abstract" # # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(new_abstract, rs.abstract) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(old_abstract, rs.abstract) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual([ ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata") ], rs.metadata_links) self.assertEqual(enabled, rs.enabled) srs_before = set(['EPSG:4326']) srs_after = set(['EPSG:4326', 'EPSG:3785']) formats = set( ['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF']) formats_after = set(["PNG", "GIF", "TIFF"]) # set and save request_srs_list self.assertEqual(set(rs.request_srs_list), srs_before, str(rs.request_srs_list)) rs.request_srs_list = rs.request_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(set(rs.request_srs_list), srs_after, str(rs.request_srs_list)) # set and save response_srs_list self.assertEqual(set(rs.response_srs_list), srs_before, str(rs.response_srs_list)) rs.response_srs_list = rs.response_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(set(rs.response_srs_list), srs_after, str(rs.response_srs_list)) # set and save supported_formats self.assertEqual(set(rs.supported_formats), formats, str(rs.supported_formats)) rs.supported_formats = ["PNG", "GIF", "TIFF"] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(set(rs.supported_formats), formats_after, str(rs.supported_formats)) def testWmsStoreCreate(self): ws = self.cat.create_wmsstore("wmsstore_gsconfig") ws.capabilitiesURL = "http://mesonet.agron.iastate.edu/cgi-bin/wms/iowa/rainfall.cgi?VERSION=1.1.1&REQUEST=GetCapabilities&SERVICE=WMS&" # noqa: E501 ws.type = "WMS" self.cat.save(ws) def testWmsLayer(self): self.cat.create_workspace("wmstest", "http://example.com/wmstest") wmstest = self.cat.get_workspace("wmstest") wmsstore = self.cat.create_wmsstore("wmsstore", wmstest) wmsstore.capabilitiesURL = "http://mesonet.agron.iastate.edu/cgi-bin/wms/iowa/rainfall.cgi?VERSION=1.1.1&REQUEST=GetCapabilities&SERVICE=WMS&" # noqa: E501 wmsstore.type = "WMS" self.cat.save(wmsstore) wmsstore = self.cat.get_store("wmsstore") self.assertEqual(1, len(self.cat.get_stores(workspace=wmstest))) available_layers = wmsstore.get_resources(available=True) for layer in available_layers: # sanitize the layer name - validation will fail on newer geoservers name = layer.replace(':', '_') self.cat.create_wmslayer(wmstest, wmsstore, name, nativeName=layer) added_layers = wmsstore.get_resources() self.assertEqual(len(available_layers), len(added_layers)) changed_layer = added_layers[0] self.assertEqual(True, changed_layer.advertised) self.assertEqual(True, changed_layer.enabled) changed_layer.advertised = False changed_layer.enabled = False self.cat.save(changed_layer) self.cat._cache.clear() changed_layer = wmsstore.get_resources()[0] changed_layer.fetch() self.assertEqual(False, changed_layer.advertised) self.assertEqual(False, changed_layer.enabled) # Testing projection and projection policy changes changed_layer.projection = "EPSG:900913" changed_layer.projection_policy = "REPROJECT_TO_DECLARED" self.cat.save(changed_layer) self.cat._cache.clear() layer = self.cat.get_layer(changed_layer.name) self.assertEqual(layer.resource.projection_policy, changed_layer.projection_policy) self.assertEqual(layer.resource.projection, changed_layer.projection) def testFeatureTypeCreate(self): shapefile_plus_sidecars = shapefile_and_friends("test/data/states") expected = { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' } self.assertEqual(len(expected), len(shapefile_plus_sidecars)) for k, v in expected.items(): self.assertEqual(v, shapefile_plus_sidecars[k]) sf = self.cat.get_workspace("sf") self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) self.assertIsNotNone(self.cat.get_resource("states_test", workspace=sf)) self.assertRaises( ConflictingDataError, lambda: self.cat.create_featurestore( "states_test", shapefile_plus_sidecars, sf)) self.assertRaises( UploadError, lambda: self.cat.create_coveragestore( "states_raster_test", shapefile_plus_sidecars, sf)) bogus_shp = { 'shp': 'test/data/Pk50095.tif', 'shx': 'test/data/Pk50095.tif', 'dbf': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } self.assertRaises( UploadError, lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf)) lyr = self.cat.get_layer("states_test") self.cat.delete(lyr) self.assertIsNone(self.cat.get_layer("states_test")) def testCoverageCreate(self): tiffdata = { 'tiff': 'test/data/Pk50095.tif', 'tfw': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } sf = self.cat.get_workspace("sf") self.cat.create_coveragestore("Pk50095", tiffdata, sf) self.assertIsNotNone(self.cat.get_resource("Pk50095", workspace=sf)) self.assertRaises( ConflictingDataError, lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf)) self.assertRaises( UploadError, lambda: self.cat.create_featurestore( "Pk50095_vector", tiffdata, sf)) bogus_tiff = { 'tiff': 'test/data/states.shp', 'tfw': 'test/data/states.shx', 'prj': 'test/data/states.prj' } self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster", bogus_tiff)) self.cat.create_coveragestore_external_geotiff( "Pk50095_ext", 'file:test/data/Pk50095.tif', sf) def testLayerSave(self): # test saving round trip lyr = self.cat.get_layer("states") old_attribution = lyr.attribution new_attribution = { 'title': 'Not the original attribution', 'width': '123', 'height': '321', 'href': 'http://www.georchestra.org', 'url': 'https://www.cigalsace.org/portail/cigal/documents/page/mentions-legales/Logo_geOrchestra.jpg', 'type': 'image/jpeg' } # change attribution on server lyr.attribution = new_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(new_attribution, lyr.attribution) # Restore attribution lyr.attribution = old_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(old_attribution, lyr.attribution) self.assertEqual(lyr.default_style.name, "population") old_default_style = lyr.default_style lyr.default_style = next( (s for s in lyr.styles if s.name == "pophatch")) lyr.styles = [old_default_style] self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(lyr.default_style.name, "pophatch") self.assertEqual([s.name for s in lyr.styles], ["population"]) def testStyles(self): # check count before tests (upload) count = len(self.cat.get_styles()) # upload new style, verify existence with open("test/fred.sld") as fred_sld: self.cat.create_style("fred", fred_sld.read()) fred = self.cat.get_style("fred") self.assertIsNotNone(fred) self.assertEqual("Fred", fred.sld_title) # replace style, verify changes with open("test/ted.sld") as ted_sld: self.cat.create_style("fred", ted_sld.read(), overwrite=True) fred = self.cat.get_style("fred") self.assertIsNotNone(fred) self.assertEqual("Ted", fred.sld_title) # delete style, verify non-existence self.cat.delete(fred, purge=True) self.assertIsNone(self.cat.get_style("fred")) # attempt creating new style with open("test/fred.sld") as fred_sld: self.cat.create_style("fred", fred_sld.read()) fred = self.cat.get_style("fred") self.assertEqual("Fred", fred.sld_title) # verify it can be found via URL and check the name f = self.cat.get_style_by_url(fred.href) self.assertIsNotNone(f) self.assertEqual(f.name, fred.name) # compare count after upload self.assertEqual(count + 1, len(self.cat.get_styles())) # attempt creating a new style without "title" with open("test/notitle.sld") as notitle_sld: self.cat.create_style("notitle", notitle_sld.read()) notitle = self.cat.get_style("notitle") self.assertEqual(None, notitle.sld_title) def testWorkspaceStyles(self): # upload new style, verify existence with open("test/fred.sld") as fred_sld: self.cat.create_style("jed", fred_sld.read(), workspace="topp") jed = self.cat.get_style("jed", workspace="blarny") self.assertIsNone(jed) jed = self.cat.get_style("jed", workspace="topp") self.assertIsNotNone(jed) self.assertEqual("Fred", jed.sld_title) jed = self.cat.get_style("topp:jed") self.assertIsNotNone(jed) self.assertEqual("Fred", jed.sld_title) # replace style, verify changes with open("test/ted.sld") as ted_sld: self.cat.create_style("jed", ted_sld.read(), overwrite=True, workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assertIsNotNone(jed) self.assertEqual("Ted", jed.sld_title) # delete style, verify non-existence self.cat.delete(jed, purge=True) self.assertIsNone(self.cat.get_style("jed", workspace="topp")) # attempt creating new style with open("test/fred.sld") as fred_sld: self.cat.create_style("jed", fred_sld.read(), workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assertEqual("Fred", jed.sld_title) # verify it can be found via URL and check the full name f = self.cat.get_style_by_url(jed.href) self.assertIsNotNone(f) self.assertEqual(f.fqn, jed.fqn) def testLayerWorkspaceStyles(self): # upload new style, verify existence with open("test/fred.sld") as fred_sld: self.cat.create_style("ned", fred_sld.read(), overwrite=True, workspace="topp") with open("test/fred.sld") as ted_sld: self.cat.create_style("zed", ted_sld.read(), overwrite=True, workspace="topp") ned = self.cat.get_style("ned", workspace="topp") zed = self.cat.get_style("zed", workspace="topp") self.assertIsNotNone(ned) self.assertIsNotNone(zed) lyr = self.cat.get_layer("states") lyr.default_style = ned lyr.styles = [zed] self.cat.save(lyr) self.assertEqual("topp:ned", lyr.default_style) self.assertEqual([zed], lyr.styles) lyr.refresh() if lyr.default_style is not None: self.assertEqual("topp:ned", lyr.default_style.fqn) self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles]) def testWorkspaceCreate(self): ws = self.cat.get_workspace("acme") self.assertEqual(None, ws) self.cat.create_workspace("acme", "http://example.com/acme") ws = self.cat.get_workspace("acme") self.assertEqual("acme", ws.name) def testWorkspaceDelete(self): self.cat.create_workspace("foo", "http://example.com/foo") ws = self.cat.get_workspace("foo") self.cat.delete(ws) ws = self.cat.get_workspace("foo") self.assertIsNone(ws) def testWorkspaceDefault(self): # save orig orig = self.cat.get_default_workspace() neu = self.cat.create_workspace("neu", "http://example.com/neu") try: # make sure setting it works self.cat.set_default_workspace("neu") ws = self.cat.get_default_workspace() self.assertEqual('neu', ws.name) finally: # cleanup and reset to the way things were self.cat.delete(neu) self.cat.set_default_workspace(orig.name) ws = self.cat.get_default_workspace() self.assertEqual(orig.name, ws.name) def testFeatureTypeDelete(self): pass def testCoverageDelete(self): pass def testDataStoreDelete(self): states = self.cat.get_store('states_shapefile') self.assertTrue(states.enabled) states.enabled = False self.assertFalse(states.enabled) self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assertFalse(states.enabled) states.enabled = True self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assertTrue(states.enabled) def testLayerGroupSave(self): tas = self.cat.get_layergroup("tasmania") self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities' ], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) tas.layers = tas.layers[:-1] tas.styles = tas.styles[:-1] self.cat.save(tas) # this verifies the local state self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads' ], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) # force a refresh to check the remote state tas.refresh() self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads' ], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) def testImageMosaic(self): """ Test case for Issue #110 """ # testing the mosaic creation name = 'cea_mosaic' with open('test/data/mosaic/cea.zip', 'rb') as data: self.cat.create_imagemosaic(name, data) # get the layer resource back self.cat._cache.clear() resource = self.cat.get_layer(name).resource self.assertIsNotNone(resource) # delete granule from mosaic coverage = name store = self.cat.get_store(name) granules = self.cat.list_granules(coverage, store) self.assertEqual(1, len(granules['features'])) granule_id = name + '.1' self.cat.mosaic_delete_granule(coverage, store, granule_id) granules = self.cat.list_granules(coverage, store) self.assertEqual(0, len(granules['features'])) """ testing external Image mosaic creation """ name = 'cea_mosaic_external' path = 'test/data/mosaic/external' self.cat.create_imagemosaic(name, path, workspace='topp') self.cat._cache.clear() resource = self.cat.get_layer("external").resource self.assertIsNotNone(resource) # add granule to mosaic granule_path = 'test/data/mosaic/granules/cea_20150102.tif' self.cat.add_granule(granule_path, name, workspace='topp') granules = self.cat.list_granules("external", name, 'topp') self.assertEqual(2, len(granules['features'])) # add external granule to mosaic granule_path = os.path.join( os.getcwd(), 'test/data/mosaic/granules/cea_20150103.zip') self.cat.add_granule(granule_path, name, workspace='topp') granules = self.cat.list_granules("external", name, 'topp') self.assertEqual(3, len(granules['features'])) # Delete store store = self.cat.get_store(name) self.cat.delete(store, purge=True, recurse=True) self.cat._cache.clear() def testTimeDimension(self): sf = self.cat.get_workspace("sf") files = shapefile_and_friends( os.path.join(gisdata.GOOD_DATA, "time", "boxes_with_end_date")) self.cat.create_featurestore("boxes_with_end_date", files, sf) get_resource = lambda: self.cat._cache.clear() or self.cat.get_layer( 'boxes_with_end_date').resource # noqa: E501 # configure time as LIST resource = get_resource() timeInfo = DimensionInfo("time", "true", "LIST", None, "ISO8601", None, attribute="date") resource.metadata = {'time': timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual("LIST", timeInfo.presentation) self.assertEqual(True, timeInfo.enabled) self.assertEqual("date", timeInfo.attribute) self.assertEqual("ISO8601", timeInfo.units) # disable time dimension timeInfo = resource.metadata['time'] timeInfo.enabled = False # since this is an xml property, it won't get written unless we modify it resource.metadata = {'time': timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual(False, timeInfo.enabled) # configure with interval, end_attribute and enable again timeInfo.enabled = True timeInfo.presentation = 'DISCRETE_INTERVAL' timeInfo.resolution = '3 days' timeInfo.end_attribute = 'enddate' resource.metadata = {'time': timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual(True, timeInfo.enabled) self.assertEqual('DISCRETE_INTERVAL', timeInfo.presentation) self.assertEqual('3 days', timeInfo.resolution_str()) self.assertEqual('enddate', timeInfo.end_attribute)
class UploaderTests(DjagnoOsgeoMixin): """ Basic checks to make sure pages load, etc. """ def create_datastore(self, connection, catalog): settings = connection.settings_dict params = {'database': settings['NAME'], 'passwd': settings['PASSWORD'], 'namespace': 'http://www.geonode.org/', 'type': 'PostGIS', 'dbtype': 'postgis', 'host': settings['HOST'], 'user': settings['USER'], 'port': settings['PORT'], 'enabled': "True"} store = catalog.create_datastore(settings['NAME'], workspace=self.workspace) store.connection_parameters.update(params) try: catalog.save(store) except FailedRequestError: # assuming this is because it already exists pass return catalog.get_store(settings['NAME']) def setUp(self): if not os.path.exists(os.path.join(os.path.split(__file__)[0], '..', 'importer-test-files')): self.skipTest('Skipping test due to missing test data.') # These tests require geonode to be running on :80! self.postgis = db.connections['datastore'] self.postgis_settings = self.postgis.settings_dict self.username, self.password = self.create_user('admin', 'admin', is_superuser=True) self.non_admin_username, self.non_admin_password = self.create_user('non_admin', 'non_admin') self.cat = Catalog(ogc_server_settings.internal_rest, *ogc_server_settings.credentials) if self.cat.get_workspace('geonode') == None: self.cat.create_workspace('geonode', 'http://www.geonode.org/') self.workspace = 'geonode' self.datastore = self.create_datastore(self.postgis, self.cat) def tearDown(self): """ Clean up geoserver. """ self.cat.delete(self.datastore, recurse=True) def generic_import(self, file, configuration_options=[{'index': 0}]): f = file filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', f) res = self.import_file(filename, configuration_options=configuration_options) layer_results=[] for result in res: if result[1].get('raster'): layerfile = result[0] layername = os.path.splitext(os.path.basename(layerfile))[0] layer = Layer.objects.get(name=layername) self.assertTrue(layerfile.endswith('.tif')) self.assertTrue(os.path.exists(layerfile)) l = gdal.OpenEx(layerfile) self.assertTrue(l.GetDriver().ShortName, 'GTiff') layer_results.append(layer) else: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') if not filename.endswith('zip'): self.assertTrue(layer.attributes.count() >= DataSource(filename)[0].num_fields) layer_results.append(layer) return layer_results[0] def generic_raster_import(self, file, configuration_options=[{'index': 0}]): f = file filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', f) res = self.import_file(filename, configuration_options=configuration_options) layerfile = res[0][0] layername = os.path.splitext(os.path.basename(layerfile))[0] layer = Layer.objects.get(name=layername) self.assertTrue(layerfile.endswith('.tif')) self.assertTrue(os.path.exists(layerfile)) l = gdal.OpenEx(layerfile) self.assertTrue(l.GetDriver().ShortName, 'GTiff') return layer def test_raster(self): """ Tests raster import """ layer = self.generic_raster_import('test_grid.tif', configuration_options=[{'index': 0}]) def test_box_with_year_field(self): """ Tests the import of test_box_with_year_field. """ layer = self.generic_import('boxes_with_year_field.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date(self): """ Tests the import of test_boxes_with_date. """ layer = self.generic_import('boxes_with_date.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_gpkg(self): """ Tests the import of test_boxes_with_date.gpkg. """ layer = self.generic_import('boxes_with_date.gpkg', configuration_options=[{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_plus_raster_gpkg_by_index(self): """ Tests the import of multilayer vector + raster geopackage using index """ layer = self.generic_import('boxes_plus_raster.gpkg', configuration_options=[{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }, {'index':1}, {'index':2}, {'index':3}, {'index':4}, {'index':5}, {'index':6}, {'index':7},]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_csv(self): """ Tests a CSV with WKT polygon. """ layer = self.generic_import('boxes_with_date.csv', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_csv_missing_features(self): """ Test csv that has some rows without geoms and uses ISO-8859 (Latin 4) encoding. """ self.generic_import('missing-features.csv', configuration_options=[{'index': 0}]) def test_boxes_with_iso_date(self): """ Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_duplicate_imports(self): """ Tests importing the same layer twice to ensure incrementing file names is properly handled. """ filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'boxes_with_date_iso_date.zip') gi = OGRImport(filename) layers1 = gi.handle({'index': 0, 'name': 'test'}) layers2 = gi.handle({'index': 0, 'name': 'test'}) self.assertEqual(layers1[0][0], 'test') self.assertEqual(layers2[0][0], 'test0') def test_launder(self): """ Ensure the launder function works as expected. """ self.assertEqual(launder('tm_world_borders_simpl_0.3'), 'tm_world_borders_simpl_0_3') self.assertEqual(launder('Testing#'), 'testing_') self.assertEqual(launder(' '), '_') def test_boxes_with_date_iso_date_zip(self): """ Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.zip', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_dates_bc(self): """ Tests the import of test_boxes_with_dates_bc. """ layer = self.generic_import('boxes_with_dates_bc.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_point_with_date(self): """ Tests the import of point_with_date.geojson """ layer = self.generic_import('point_with_date.geojson', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) # OGR will name geojson layers 'ogrgeojson' we rename to the path basename self.assertTrue(layer.name.startswith('point_with_date')) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_end_date(self): """ Tests the import of test_boxes_with_end_date. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('boxes_with_end_date.shp',configuration_options=[{'index': 0, 'convert_to_date': ['date','enddate'], 'start_date': 'date', 'end_date': 'enddate', 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] end_date_attr = filter(lambda attr: attr.attribute == 'enddate_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') self.assertEqual(end_date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) def test_us_states_kml(self): """ Tests the import of us_states_kml. This layer has a date and an end date field that are typed correctly. """ # TODO: Support time in kmls. layer = self.generic_import('us_states.kml',configuration_options=[{'index': 0}]) def test_mojstrovka_gpx(self): """ Tests the import of mojstrovka.gpx. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('mojstrovka.gpx',configuration_options=[{'index': 0, 'convert_to_date': ['time'], 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'time_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, u'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute) def generic_time_check(self, layer, attribute=None, end_attribute=None): """ Convenience method to run generic tests on time layers. """ self.cat._cache.clear() resource = self.cat.get_resource(layer.name, store=layer.store, workspace=self.workspace) timeInfo = resource.metadata['time'] self.assertEqual("LIST", timeInfo.presentation) self.assertEqual(True, timeInfo.enabled) self.assertEqual(attribute, timeInfo.attribute) self.assertEqual(end_attribute, timeInfo.end_attribute) def test_us_shootings_csv(self): """ Tests the import of US_Shootings.csv. """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') filename = 'US_Shootings.csv' f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', filename) layer = self.generic_import(filename, configuration_options=[{'index': 0, 'convert_to_date': ['Date']}]) self.assertTrue(layer.name.startswith('us_shootings')) date_field = 'date' configure_time(self.cat.get_layer(layer.name).resource, attribute=date_field) self.generic_time_check(layer, attribute=date_field) def test_sitins(self): """ Tests the import of US_Civil_Rights_Sitins0.csv """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') layer = self.generic_import("US_Civil_Rights_Sitins0.csv", configuration_options=[{'index': 0, 'convert_to_date': ['Date']}]) def get_layer_names(self, in_file): """ Gets layer names from a data source. """ ds = DataSource(in_file) return map(lambda layer: layer.name, ds) def test_gdal_import(self): filename = 'point_with_date.geojson' f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', filename) self.generic_import(filename, configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) def test_wfs(self): """ Tests the import from a WFS Endpoint """ wfs = 'WFS:http://demo.boundlessgeo.com/geoserver/wfs' gi = OGRImport(wfs) layers = gi.handle(configuration_options=[{'layer_name':'og:bugsites'}, {'layer_name':'topp:states'}]) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def test_arcgisjson(self): """ Tests the import from a WFS Endpoint """ endpoint = 'http://sampleserver3.arcgisonline.com/ArcGIS/rest/services/Hydrography/Watershed173811/FeatureServer/0/query?where=objectid+%3D+objectid&outfields=*&f=json' gi = OGRImport(endpoint) layers = gi.handle(configuration_options=[{'index':0}]) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def import_file(self, in_file, configuration_options=[]): """ Imports the file. """ self.assertTrue(os.path.exists(in_file)) # run ogr2ogr gi = OGRImport(in_file) layers = gi.handle(configuration_options=configuration_options) return layers @staticmethod def createFeatureType(catalog, datastore, name): """ Exposes a PostGIS feature type in geoserver. """ headers = {"Content-type": "application/xml"} data = "<featureType><name>{name}</name></featureType>".format(name=name) url = datastore.href.replace(".xml", '/featuretypes.xml'.format(name=name)) headers, response = catalog.http.request(url, "POST ", data, headers) return response def test_file_add_view(self): """ Tests the file_add_view. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() # test login required for this view request = c.get(reverse('uploads-new')) self.assertEqual(request.status_code, 302) c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['request'].path, reverse('uploads-list')) self.assertTrue(len(response.context['object_list']) == 1) upload = response.context['object_list'][0] self.assertEqual(upload.user.username, 'non_admin') self.assertEqual(upload.file_type, 'GeoJSON') self.assertTrue(upload.uploadlayer_set.all()) self.assertEqual(upload.state, 'UPLOADED') self.assertIsNotNone(upload.name) uploaded_file = upload.uploadfile_set.first() self.assertTrue(os.path.exists(uploaded_file.file.path)) f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'empty_file.geojson') with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertIn('file', response.context_data['form'].errors) def test_file_add_view_as_json(self): """ Tests the file_add_view. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new-json'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertIn('application/json', response.get('Content-Type', '')) content = json.loads(response.content) self.assertIn('state', content) self.assertIn('id', content) def test_describe_fields(self): """ Tests the describe fields functionality. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv') with GDALInspector(f) as f: layers = f.describe_fields() self.assertTrue(layers[0]['layer_name'], 'us_shootings') self.assertEqual([n['name'] for n in layers[0]['fields']], ['Date', 'Shooter', 'Killed', 'Wounded', 'Location', 'City', 'Longitude', 'Latitude']) self.assertEqual(layers[0]['feature_count'], 203) def test_gdal_file_type(self): """ Tests the describe fields functionality. """ files = [ (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv'), 'CSV'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson'), 'GeoJSON'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'mojstrovka.gpx'), 'GPX'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'us_states.kml'), 'KML'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'boxes_with_year_field.shp'), 'ESRI Shapefile'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'boxes_with_date_iso_date.zip'), 'ESRI Shapefile'), ] from .models import NoDataSourceFound try: for path, file_type in files: with GDALInspector(path) as f: self.assertEqual(f.file_type(), file_type) except NoDataSourceFound as e: print 'No data source found in: {0}'.format(path) raise e def test_configure_view(self): """ Tests the configuration view. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') new_user = User.objects.create(username='******') new_user_perms = ['change_resourcebase_permissions'] c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True, 'permissions': {'users': {'test': new_user_perms, 'AnonymousUser': ["change_layer_data", "download_resourcebase", "view_resourcebase"]}}}] response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertEqual(Layer.objects.all()[0].owner.username, self.non_admin_username) perms = layer.get_all_level_info() user = User.objects.get(username=self.non_admin_username) # check user permissions for perm in [u'publish_resourcebase', u'change_resourcebase_permissions', u'delete_resourcebase', u'change_resourcebase', u'change_resourcebase_metadata', u'download_resourcebase', u'view_resourcebase', u'change_layer_style', u'change_layer_data']: self.assertIn(perm, perms['users'][user]) self.assertTrue(perms['users'][new_user]) self.assertIn('change_resourcebase_permissions', perms['users'][new_user]) self.assertIn("change_layer_data", perms['users'][User.objects.get(username='******')]) lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, layer) def test_configure_view_convert_date(self): """ Tests the configure view with a dataset that needs to be converted to a date. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{'index': 0, 'convert_to_date': ['Date'], 'start_date': 'Date', 'configureTime': True, 'editable': True}] response = c.get('/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 405) response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 400) response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertTrue(layer.attributes.filter(attribute='date'), 'xsd:dateTime') lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) # ensure a user who does not own the upload cannot configure an import from it. c.logout() c.login_as_admin() response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 404) def test_list_api(self): c = AdminClient() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 401) c.login_as_non_admin() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 200) admin = User.objects.get(username=self.username) non_admin = User.objects.get(username=self.non_admin_username) from osgeo_importer.models import UploadFile f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv') with open(f, 'rb') as f: uploaded_file = SimpleUploadedFile('test_data', f.read()) admin_upload = UploadedData.objects.create(state='Admin test', user=admin) admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file)) non_admin_upload = UploadedData.objects.create(state='Non admin test', user=non_admin) non_admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file)) c.login_as_admin() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 2) response = c.get('/importer-api/data/?user__username=admin') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 1) def test_layer_list_api(self): c = AdminClient() response = c.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 401) c.login_as_non_admin() response = c.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 200) def test_delete_from_non_admin_api(self): """ Ensure users can delete their data. """ c = AdminClient() f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) id = UploadedData.objects.first().id response = c.delete('/importer-api/data/{0}/'.format(id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def test_delete_from_admin_api(self): """ Ensure that administrators can delete data that isn't theirs. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) c.logout() c.login_as_admin() id = UploadedData.objects.first().id response = c.delete('/importer-api/data/{0}/'.format(id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def naming_an_import(self): """ Tests providing a name in the configuration options. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() name = 'point-with-a-date' with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) payload = {'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'name': name, 'editable': True} response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps(payload), content_type='application/json') layer = Layer.objects.all()[0] self.assertEqual(layer.title, name.replace('-', '_')) def test_api_import(self): """ Tests the import api. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) payload = {'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True} self.assertTrue(isinstance(UploadLayer.objects.first().configuration_options, dict)) response = c.get('/importer-api/data-layers/1/') self.assertEqual(response.status_code, 200) response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps([payload]), content_type='application/json') self.assertEqual(response.status_code, 200) self.assertTrue('task' in response.content) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, layer) self.assertTrue(UploadLayer.objects.first().task_id) def test_valid_file_extensions(self): """ Test the file extension validator. """ for extension in load_handler(OSGEO_IMPORTER, 'test.txt').valid_extensions: self.assertIsNone(validate_file_extension(SimpleUploadedFile('test.{0}'.format(extension), ''))) with self.assertRaises(ValidationError): validate_file_extension(SimpleUploadedFile('test.txt', '')) def test_no_geom(self): """ Test the file extension validator. """ with self.assertRaises(ValidationError): validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)')) # This should pass (geom type is unknown) validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)')) def test_numeric_overflow(self): """ Regression test for numeric field overflows in shapefiles. # https://trac.osgeo.org/gdal/ticket/5241 """ self.generic_import('Walmart.zip', configuration_options=[{"configureTime":False,"convert_to_date":["W1_OPENDAT"],"editable":True,"index":0,"name":"Walmart","start_date":"W1_OPENDAT"}]) def test_multipolygon_shapefile(self): """ Tests shapefile with multipart polygons. """ self.generic_import('PhoenixFirstDues.zip', configuration_options=[{'index': 0}]) def test_non_4326_SR(self): """ Tests shapefile with multipart polygons. """ res = self.generic_import('Istanbul.zip', configuration_options=[{'index': 0}]) featuretype = self.cat.get_resource(res.name) self.assertEqual(featuretype.projection, 'EPSG:32635') def test_gwc_handler(self): """ Tests the GeoWebCache handler """ layer = self.generic_import('boxes_with_date.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) gwc = GeoWebCacheHandler(None) gs_layer = self.cat.get_layer(layer.name) self.assertTrue(gwc.time_enabled(gs_layer)) gs_layer.fetch() payload = self.cat.http.request(gwc.gwc_url(gs_layer)) self.assertTrue('regexParameterFilter' in payload[1]) self.assertEqual(int(payload[0]['status']), 200) # Don't configure time, ensure everything still works layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{'index': 0}]) gs_layer = self.cat.get_layer(layer.name) self.cat._cache.clear() gs_layer.fetch() payload = self.cat.http.request(gwc.gwc_url(gs_layer)) self.assertFalse('regexParameterFilter' in payload[1]) self.assertEqual(int(payload[0]['status']), 200) def test_utf8(self): """ Tests utf8 characters in attributes """ filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'china_provinces.shp') layer = self.generic_import('china_provinces.shp') gi = OGRImport(filename) ds, insp = gi.open_target_datastore(gi.target_store) sql = str("select NAME_CH from %s where NAME_PY = 'An Zhou'" % (layer.name)) res = ds.ExecuteSQL(sql) feat = res.GetFeature(0) self.assertEqual(feat.GetField('name_ch'), "安州") def test_non_converted_date(self): """ Test converting a field as date. """ results = self.generic_import('TM_WORLD_BORDERS_2005.zip', configuration_options=[{'index': 0, 'start_date': 'Year', 'configureTime': True}]) layer = self.cat.get_layer(results.typename) self.assertTrue('time' in layer.resource.metadata) self.assertEqual('year', layer.resource.metadata['time'].attribute) def test_fid_field(self): """ Regression test for preserving an FID field when target layer supports it but source does not. """ self.generic_import('noaa_paleoclimate.zip', configuration_options=[{'index': 0}]) def test_csv_with_wkb_geometry(self): """ Tests problems with the CSV files with multiple geometries. """ files = ['police_csv.csv', 'police_csv_nOGC.csv', 'police_csv_noLatLon.csv', 'police_csv_WKR.csv', 'police_csv_nFID.csv', 'police_csv_nOGFID.csv', 'police_csv_noWKB.csv'] for i in files: self.generic_import(i, {"configureTime":True,"convert_to_date":["date_time"],"editable":True,"index":0,"name":i.lower(),"permissions":{"users":{"AnonymousUser":["change_layer_data","download_resourcebase","view_resourcebase"]}},"start_date":"date_time",})
class ModifyingTests(unittest.TestCase): def setUp(self): self.cat = Catalog(GSPARAMS['GSURL'], username=GSPARAMS['GSUSER'], password=GSPARAMS['GSPASSWORD']) def testFeatureTypeSave(self): # test saving round trip rs = self.cat.get_resource("bugsites") old_abstract = rs.abstract new_abstract = "Not the original abstract" enabled = rs.enabled # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(new_abstract, rs.abstract) self.assertEqual(enabled, rs.enabled) # Change keywords on server rs.keywords = ["bugsites", "gsconfig"] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(["bugsites", "gsconfig"], rs.keywords) self.assertEqual(enabled, rs.enabled) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual( [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")], rs.metadata_links) self.assertEqual(enabled, rs.enabled) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(old_abstract, rs.abstract) def testDataStoreCreate(self): ds = self.cat.create_datastore("vector_gsconfig") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) def testPublishFeatureType(self): # Use the other test and store creation to load vector data into a database # @todo maybe load directly to database? try: self.testDataStoreCreateAndThenAlsoImportData() except FailedRequestError: pass try: lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. self.cat.delete(lyr) self.cat.delete(lyr.resource) ds = self.cat.get_store("gsconfig_import_test") # make sure it's gone self.assert_(self.cat.get_layer('import') is None) self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326") # and now it's not self.assert_(self.cat.get_layer('import') is not None) finally: # tear stuff down to allow the other test to pass if we run first ds = self.cat.get_store("gsconfig_import_test") lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. try: if lyr: self.cat.delete(lyr) self.cat.delete(lyr.resource) if ds: self.cat.delete(ds) except: pass def testDataStoreModify(self): ds = self.cat.get_store("sf") self.assertFalse("foo" in ds.connection_parameters) ds.connection_parameters = ds.connection_parameters ds.connection_parameters["foo"] = "bar" orig_ws = ds.workspace.name self.cat.save(ds) ds = self.cat.get_store("sf") self.assertTrue("foo" in ds.connection_parameters) self.assertEqual("bar", ds.connection_parameters["foo"]) self.assertEqual(orig_ws, ds.workspace.name) @drop_table('import') def testDataStoreCreateAndThenAlsoImportData(self): ds = self.cat.create_datastore("gsconfig_import_test") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) ds = self.cat.get_store("gsconfig_import_test") self.cat.add_data_to_store(ds, "import", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) @drop_table('import2') def testVirtualTables(self): ds = self.cat.create_datastore("gsconfig_import_test2") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) ds = self.cat.get_store("gsconfig_import_test2") self.cat.add_data_to_store(ds, "import2", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) store = self.cat.get_store("gsconfig_import_test2") geom = JDBCVirtualTableGeometry('the_geom','MultiPolygon','4326') ft_name = 'my_jdbc_vt_test' epsg_code = 'EPSG:4326' sql = "select * from import2 where 'STATE_NAME' = 'Illinois'" keyColumn = None parameters = None jdbc_vt = JDBCVirtualTable(ft_name, sql, 'false', geom, keyColumn, parameters) ft = self.cat.publish_featuretype(ft_name, store, epsg_code, jdbc_virtual_table=jdbc_vt) # DISABLED; this test works only in the very particular case # "mytiff.tiff" is already present into the GEOSERVER_DATA_DIR # def testCoverageStoreCreate(self): # ds = self.cat.create_coveragestore2("coverage_gsconfig") # ds.data_url = "file:test/data/mytiff.tiff" # self.cat.save(ds) def testCoverageStoreModify(self): cs = self.cat.get_store("sfdem") self.assertEqual("GeoTIFF", cs.type) cs.type = "WorldImage" self.cat.save(cs) cs = self.cat.get_store("sfdem") self.assertEqual("WorldImage", cs.type) # not sure about order of test runs here, but it might cause problems # for other tests if this layer is misconfigured cs.type = "GeoTIFF" self.cat.save(cs) def testCoverageSave(self): # test saving round trip rs = self.cat.get_resource("Arc_Sample") old_abstract = rs.abstract new_abstract = "Not the original abstract" # # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(new_abstract, rs.abstract) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(old_abstract, rs.abstract) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual( [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")], rs.metadata_links) self.assertEqual(enabled, rs.enabled) srs_before = set(['EPSG:4326']) srs_after = set(['EPSG:4326', 'EPSG:3785']) formats = set(['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF']) formats_after = set(["PNG", "GIF", "TIFF"]) # set and save request_srs_list self.assertEquals(set(rs.request_srs_list), srs_before, str(rs.request_srs_list)) rs.request_srs_list = rs.request_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.request_srs_list), srs_after, str(rs.request_srs_list)) # set and save response_srs_list self.assertEquals(set(rs.response_srs_list), srs_before, str(rs.response_srs_list)) rs.response_srs_list = rs.response_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.response_srs_list), srs_after, str(rs.response_srs_list)) # set and save supported_formats self.assertEquals(set(rs.supported_formats), formats, str(rs.supported_formats)) rs.supported_formats = ["PNG", "GIF", "TIFF"] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.supported_formats), formats_after, str(rs.supported_formats)) def testWmsStoreCreate(self): ws = self.cat.create_wmsstore("wmsstore_gsconfig") ws.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities" ws.type = "WMS" self.cat.save(ws) def testWmsLayer(self): self.cat.create_workspace("wmstest", "http://example.com/wmstest") wmstest = self.cat.get_workspace("wmstest") wmsstore = self.cat.create_wmsstore("wmsstore", wmstest) wmsstore.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities" wmsstore.type = "WMS" self.cat.save(wmsstore) wmsstore = self.cat.get_store("wmsstore") self.assertEqual(1, len(self.cat.get_stores(wmstest))) available_layers = wmsstore.get_resources(available=True) for layer in available_layers: # sanitize the layer name - validation will fail on newer geoservers name = layer.replace(':', '_') new_layer = self.cat.create_wmslayer(wmstest, wmsstore, name, nativeName=layer) added_layers = wmsstore.get_resources() self.assertEqual(len(available_layers), len(added_layers)) changed_layer = added_layers[0] self.assertEqual(True, changed_layer.advertised) self.assertEqual(True, changed_layer.enabled) changed_layer.advertised = False changed_layer.enabled = False self.cat.save(changed_layer) self.cat._cache.clear() changed_layer = wmsstore.get_resources()[0] changed_layer.fetch() self.assertEqual(False, changed_layer.advertised) self.assertEqual(False, changed_layer.enabled) # Testing projection and projection policy changes changed_layer.projection = "EPSG:900913" changed_layer.projection_policy = "REPROJECT_TO_DECLARED" self.cat.save(changed_layer) self.cat._cache.clear() layer = self.cat.get_layer(changed_layer.name) self.assertEqual(layer.resource.projection_policy, changed_layer.projection_policy) self.assertEqual(layer.resource.projection, changed_layer.projection) def testFeatureTypeCreate(self): shapefile_plus_sidecars = shapefile_and_friends("test/data/states") expected = { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' } self.assertEqual(len(expected), len(shapefile_plus_sidecars)) for k, v in expected.iteritems(): self.assertEqual(v, shapefile_plus_sidecars[k]) sf = self.cat.get_workspace("sf") self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) self.assert_(self.cat.get_resource("states_test", workspace=sf) is not None) self.assertRaises( ConflictingDataError, lambda: self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) ) self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster_test", shapefile_plus_sidecars, sf) ) bogus_shp = { 'shp': 'test/data/Pk50095.tif', 'shx': 'test/data/Pk50095.tif', 'dbf': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } self.assertRaises( UploadError, lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf) ) lyr = self.cat.get_layer("states_test") self.cat.delete(lyr) self.assert_(self.cat.get_layer("states_test") is None) def testCoverageCreate(self): tiffdata = { 'tiff': 'test/data/Pk50095.tif', 'tfw': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } sf = self.cat.get_workspace("sf") ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf) self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None) self.assertRaises( ConflictingDataError, lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf) ) self.assertRaises( UploadError, lambda: self.cat.create_featurestore("Pk50095_vector", tiffdata, sf) ) bogus_tiff = { 'tiff': 'test/data/states.shp', 'tfw': 'test/data/states.shx', 'prj': 'test/data/states.prj' } self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster", bogus_tiff) ) ft_ext = self.cat.create_coveragestore_external_geotiff("Pk50095_ext", 'file:test/data/Pk50095.tif', sf) def testLayerSave(self): # test saving round trip lyr = self.cat.get_layer("states") old_attribution = lyr.attribution new_attribution = { 'title': 'Not the original attribution', 'width': '123', 'height': '321', 'href': 'http://www.georchestra.org', 'url': 'https://www.cigalsace.org/portail/cigal/documents/page/mentions-legales/Logo_geOrchestra.jpg', 'type': 'image/jpeg' } # change attribution on server lyr.attribution = new_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(new_attribution, lyr.attribution) # Restore attribution lyr.attribution = old_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(old_attribution, lyr.attribution) self.assertEqual(lyr.default_style.name, "population") old_default_style = lyr.default_style lyr.default_style = (s for s in lyr.styles if s.name == "pophatch").next() lyr.styles = [old_default_style] self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(lyr.default_style.name, "pophatch") self.assertEqual([s.name for s in lyr.styles], ["population"]) def testStyles(self): # check count before tests (upload) count = len(self.cat.get_styles()) # upload new style, verify existence self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Fred", fred.sld_title) # replace style, verify changes self.cat.create_style("fred", open("test/ted.sld").read(), overwrite=True) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Ted", fred.sld_title) # delete style, verify non-existence self.cat.delete(fred, purge=True) self.assert_(self.cat.get_style("fred") is None) # attempt creating new style self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assertEqual("Fred", fred.sld_title) # verify it can be found via URL and check the name f = self.cat.get_style_by_url(fred.href) self.assert_(f is not None) self.assertEqual(f.name, fred.name) # compare count after upload self.assertEqual(count +1, len(self.cat.get_styles())) # attempt creating a new style without "title" self.cat.create_style("notitle", open("test/notitle.sld").read()) notitle = self.cat.get_style("notitle") self.assertEqual(None, notitle.sld_title) def testWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") jed = self.cat.get_style("jed", workspace="blarny") self.assert_(jed is None) jed = self.cat.get_style("jed", workspace="topp") self.assert_(jed is not None) self.assertEqual("Fred", jed.sld_title) jed = self.cat.get_style("topp:jed") self.assert_(jed is not None) self.assertEqual("Fred", jed.sld_title) # replace style, verify changes self.cat.create_style("jed", open("test/ted.sld").read(), overwrite=True, workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assert_(jed is not None) self.assertEqual("Ted", jed.sld_title) # delete style, verify non-existence self.cat.delete(jed, purge=True) self.assert_(self.cat.get_style("jed", workspace="topp") is None) # attempt creating new style self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assertEqual("Fred", jed.sld_title) # verify it can be found via URL and check the full name f = self.cat.get_style_by_url(jed.href) self.assert_(f is not None) self.assertEqual(f.fqn, jed.fqn) def testLayerWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("ned", open("test/fred.sld").read(), overwrite=True, workspace="topp") self.cat.create_style("zed", open("test/ted.sld").read(), overwrite=True, workspace="topp") ned = self.cat.get_style("ned", workspace="topp") zed = self.cat.get_style("zed", workspace="topp") self.assert_(ned is not None) self.assert_(zed is not None) lyr = self.cat.get_layer("states") lyr.default_style = ned lyr.styles = [zed] self.cat.save(lyr) self.assertEqual("topp:ned", lyr.default_style) self.assertEqual([zed], lyr.styles) lyr.refresh() self.assertEqual("topp:ned", lyr.default_style.fqn) self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles]) def testWorkspaceCreate(self): ws = self.cat.get_workspace("acme") self.assertEqual(None, ws) self.cat.create_workspace("acme", "http://example.com/acme") ws = self.cat.get_workspace("acme") self.assertEqual("acme", ws.name) def testWorkspaceDelete(self): self.cat.create_workspace("foo", "http://example.com/foo") ws = self.cat.get_workspace("foo") self.cat.delete(ws) ws = self.cat.get_workspace("foo") self.assert_(ws is None) def testWorkspaceDefault(self): # save orig orig = self.cat.get_default_workspace() neu = self.cat.create_workspace("neu", "http://example.com/neu") try: # make sure setting it works self.cat.set_default_workspace("neu") ws = self.cat.get_default_workspace() self.assertEqual('neu', ws.name) finally: # cleanup and reset to the way things were self.cat.delete(neu) self.cat.set_default_workspace(orig.name) ws = self.cat.get_default_workspace() self.assertEqual(orig.name, ws.name) def testFeatureTypeDelete(self): pass def testCoverageDelete(self): pass def testDataStoreDelete(self): states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) states.enabled = False self.assert_(states.enabled == False) self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == False) states.enabled = True self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) def testLayerGroupSave(self): tas = self.cat.get_layergroup("tasmania") self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) tas.layers = tas.layers[:-1] tas.styles = tas.styles[:-1] self.cat.save(tas) # this verifies the local state self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) # force a refresh to check the remote state tas.refresh() self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) def testImageMosaic(self): """ Test case for Issue #110 """ # testing the mosaic creation name = 'cea_mosaic' data = open('test/data/mosaic/cea.zip', 'rb') self.cat.create_imagemosaic(name, data) # get the layer resource back self.cat._cache.clear() resource = self.cat.get_layer(name).resource self.assert_(resource is not None) # delete granule from mosaic coverage = name store = self.cat.get_store(name) granule_id = name + '.1' self.cat.mosaic_delete_granule(coverage, store, granule_id) def testTimeDimension(self): sf = self.cat.get_workspace("sf") files = shapefile_and_friends(os.path.join(gisdata.GOOD_DATA, "time", "boxes_with_end_date")) self.cat.create_featurestore("boxes_with_end_date", files, sf) get_resource = lambda: self.cat._cache.clear() or self.cat.get_layer('boxes_with_end_date').resource # configure time as LIST resource = get_resource() timeInfo = DimensionInfo("time", "true", "LIST", None, "ISO8601", None, attribute="date") resource.metadata = {'time':timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual("LIST", timeInfo.presentation) self.assertEqual(True, timeInfo.enabled) self.assertEqual("date", timeInfo.attribute) self.assertEqual("ISO8601", timeInfo.units) # disable time dimension timeInfo = resource.metadata['time'] timeInfo.enabled = False # since this is an xml property, it won't get written unless we modify it resource.metadata = {'time' : timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual(False, timeInfo.enabled) # configure with interval, end_attribute and enable again timeInfo.enabled = True timeInfo.presentation = 'DISCRETE_INTERVAL' timeInfo.resolution = '3 days' timeInfo.end_attribute = 'enddate' resource.metadata = {'time' : timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual(True, timeInfo.enabled) self.assertEqual('DISCRETE_INTERVAL', timeInfo.presentation) self.assertEqual('3 days', timeInfo.resolution_str()) self.assertEqual('enddate', timeInfo.end_attribute)
class GeoGigUploaderBase(ImportHelper): def __init__(self, *args, **kwargs): super(GeoGigUploaderBase, self).__init__(*args, **kwargs) setUpModule() # this isn't available when being used in other module def setUp(self): self.admin_user = self.create_user('admin', 'admin', is_superuser=True) self.non_admin_user = self.create_user('non_admin', 'non_admin') self.catalog = Catalog( ogc_server_settings.internal_rest, *ogc_server_settings.credentials ) if self.catalog.get_workspace('geonode') is None: self.catalog.create_workspace('geonode', 'http://www.geonode.org/') self.workspace = 'geonode' self.datastoreNames = [] def tearDown(self): """Clean up geoserver/geogig catalog. """ # delete stores (will cascade to delete layers) for store_name in self.datastoreNames: self.catalog.delete( self.catalog.get_store(store_name), recurse=True) # delete repository reference in geoserver for store_name in self.datastoreNames: self.remove_geogig_repo(store_name) # geoserver can leave connections open - HACK HACK HACK self.free_geogig_connections() # HACK HACK HACK -- sometimes connections from geoserver to geogig are left open. This kills the postgresql backend! # this is a major hammer. Once geoserver/geogig are better at cleaning up, remove this. def free_geogig_connections(self): with db.connections["geogig"].cursor() as c: c.execute( "select pg_terminate_backend(pid) from pg_stat_activity where application_name = 'PostgreSQL JDBC Driver' or application_name='geogig'") # aggressive delete of the repo (mostly cleans up after itself) # call the geogig rest API DELETE def remove_geogig_repo(self, ref_name): username = ogc_server_settings.credentials.username password = ogc_server_settings.credentials.password url = ogc_server_settings.rest http = httplib2.Http(disable_ssl_certificate_validation=False) http.add_credentials(username, password) netloc = urlparse(url).netloc http.authorizations.append( httplib2.BasicAuthentication( (username, password), netloc, url, {}, None, None, http )) rest_url = ogc_server_settings.LOCATION + \ "geogig/repos/" + ref_name + "/delete.json" resp, content = http.request(rest_url, 'GET') response = json.loads(content) token = response["response"]["token"] rest_url = ogc_server_settings.LOCATION + \ "geogig/repos/" + ref_name + "?token=" + token resp, content = http.request(rest_url, 'DELETE') # convenience method to load in the test dataset # return a (geonode) layer # the layer will be in Geoserver and Geonode # self.catalog.get_layer(layer.name) -- to get the Geoserver Layer def fully_import_file(self, path, fname, start_time_column, end_time_column=None): # setup time if end_time_column is None: time_config = {'convert_to_date': [start_time_column], 'start_date': start_time_column, 'configureTime': True} else: time_config = {'convert_to_date': [start_time_column, end_time_column], 'start_date': start_time_column, 'end_date': end_time_column, 'configureTime': True} name = os.path.splitext(fname)[0] + "_" + str(uuid.uuid1())[:8] self.datastoreNames.append(name) # remember for future deletion full_fname = os.path.join(path, fname) configs = self.prepare_file_for_import(full_fname) configs[0].update({'name': name}) configs[0].update({'layer_name': name}) configs[0].update(time_config) # configure the datastore/repo configs[0]['geoserver_store'] = {} configs[0]['geoserver_store']['type'] = 'geogig' configs[0]['geoserver_store']['name'] = name configs[0]['geoserver_store']['create'] = 'true' configs[0]['geoserver_store']['branch'] = 'master' configs[0]['geoserver_store']['geogig_repository'] = "geoserver://" + name result = self.generic_import(fname, path=path, configs=configs) return result def import_file(self, path, configs=None): """Imports the file. """ if configs is None: configs = [] self.assertTrue(os.path.exists(path), path) # run ogr2ogr ogr = OGRImport(path) layers = ogr.handle(configuration_options=configs) return layers def generic_import(self, filename, path, configs=None): if configs is None: configs = [{'index': 0}] path = os.path.join(path, filename) results = self.import_file(path, configs=configs) layer_results = [] for result in results: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertTrue(layer.store in self.datastoreNames) self.assertEqual(layer.storeType, 'dataStore') if not path.endswith('zip'): self.assertGreaterEqual( layer.attributes.count(), DataSource(path)[0].num_fields ) layer_results.append(layer) return layer_results[0] def prepare_file_for_import(self, filepath): """ Prepares the file path provided for import; performs some housekeeping, uploads & configures the file. Returns a list of dicts of the form {'index': <layer_index>, 'upload_layer_id': <upload_layer_id>} these may be used as configuration options for importing all of the layers in the file. """ # Make a copy of the test file, as it's removed in configure_upload() filename = os.path.basename(filepath) tmppath = os.path.join('/tmp', filename) shutil.copy(filepath, tmppath) # upload & configure_upload expect closed file objects # This is heritage from originally being closely tied to a view passing request.Files of = open(tmppath, 'rb') of.close() files = [of] uploaded_data = self.upload(files, self.admin_user) self.configure_upload(uploaded_data, files) configs = [{'index': l.index, 'upload_layer_id': l.id} for l in uploaded_data.uploadlayer_set.all()] return configs def create_user(self, username, password, **kwargs): """Convenience method for creating users. """ user, created = User.objects.get_or_create(username=username, **kwargs) if created: user.set_password(password) user.save() return user
class UploaderTests(MapStoryTestMixin): """ Basic checks to make sure pages load, etc. """ def create_datastore(self, connection, catalog): settings = connection.settings_dict params = {'database': settings['NAME'], 'passwd': settings['PASSWORD'], 'namespace': 'http://www.geonode.org/', 'type': 'PostGIS', 'dbtype': 'postgis', 'host': settings['HOST'], 'user': settings['USER'], 'port': settings['PORT'], 'enabled': "True"} store = catalog.create_datastore(settings['NAME'], workspace=self.workspace) store.connection_parameters.update(params) try: catalog.save(store) except FailedRequestError: # assuming this is because it already exists pass return catalog.get_store(settings['NAME']) def setUp(self): if not os.path.exists(os.path.join(os.path.split(__file__)[0], 'test_ogr')): self.skipTest('Skipping test due to missing test data.') # These tests require geonode to be running on :80! self.postgis = db.connections['datastore'] self.postgis_settings = self.postgis.settings_dict self.username, self.password = self.create_user('admin', 'admin', is_superuser=True) self.non_admin_username, self.non_admin_password = self.create_user('non_admin', 'non_admin') self.cat = Catalog(ogc_server_settings.internal_rest, *ogc_server_settings.credentials) self.workspace = 'geonode' self.datastore = self.create_datastore(self.postgis, self.cat) def tearDown(self): """ Clean up geoserver. """ self.cat.delete(self.datastore, recurse=True) def generic_import(self, file, configuration_options=[{'index': 0}]): f = file filename = os.path.join(os.path.dirname(__file__), 'test_ogr', f) res = self.import_file(filename, configuration_options=configuration_options) layer = Layer.objects.get(name=res[0][0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') if not filename.endswith('zip'): self.assertTrue(layer.attributes.count() >= DataSource(filename)[0].num_fields) # make sure we have at least one dateTime attribute self.assertTrue('xsd:dateTime' or 'xsd:date' in [n.attribute_type for n in layer.attributes.all()]) return layer def test_box_with_year_field(self): """ Tests the import of test_box_with_year_field. """ layer = self.generic_import('boxes_with_year_field.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date(self): """ Tests the import of test_boxes_with_date. """ layer = self.generic_import('boxes_with_date.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_csv(self): """ Tests a CSV with WKT polygon. """ layer = self.generic_import('boxes_with_date.csv', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_schema_csv(self): """ Tests a CSV from schema download. """ layer = self.generic_import('schema_download.csv', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_iso_date(self): """ Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_duplicate_imports(self): """ Tests importing the same layer twice to ensure incrementing file names is properly handled. """ filename = os.path.join(os.path.dirname(__file__), 'test_ogr', 'boxes_with_date_iso_date.zip') gi = GDALImport(filename) layers1 = gi.handle({'index': 0, 'name': 'test'}) layers2 = gi.handle({'index': 0, 'name': 'test'}) self.assertEqual(layers1[0][0], 'test') self.assertEqual(layers2[0][0], 'test0') def test_boxes_with_date_iso_date_zip(self): """ Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.zip', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_dates_bc(self): """ Tests the import of test_boxes_with_dates_bc. """ layer = self.generic_import('boxes_with_dates_bc.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_point_with_date(self): """ Tests the import of test_boxes_with_dates_bc. """ layer = self.generic_import('point_with_date.geojson', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) # OGR will name geojson layers 'ogrgeojson' we rename to the path basename self.assertTrue(layer.name.startswith('point_with_date')) date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_end_date(self): """ Tests the import of test_boxes_with_end_date. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('boxes_with_end_date.shp') date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0] end_date_attr = filter(lambda attr: attr.attribute == 'enddate', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:date') self.assertEqual(end_date_attr.attribute_type, 'xsd:date') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) def test_us_states_kml(self): """ Tests the import of us_states_kml. This layer has a date and an end date field that are typed correctly. """ # TODO: Support time in kmls. layer = self.generic_import('us_states.kml') def test_mojstrovka_gpx(self): """ Tests the import of mojstrovka.gpx. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('mojstrovka.gpx') date_attr = filter(lambda attr: attr.attribute == 'time', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, u'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute) def generic_time_check(self, layer, attribute=None, end_attribute=None): """ Convenience method to run generic tests on time layers. """ self.cat._cache.clear() resource = self.cat.get_resource(layer.name, store=layer.store, workspace=self.workspace) timeInfo = resource.metadata['time'] self.assertEqual("LIST", timeInfo.presentation) self.assertEqual(True, timeInfo.enabled) self.assertEqual(attribute, timeInfo.attribute) self.assertEqual(end_attribute, timeInfo.end_attribute) def test_us_shootings_csv(self): """ Tests the import of US_shootings.csv. """ filename = 'US_shootings.csv' f = os.path.join(os.path.dirname(__file__), 'test_ogr', filename) layer = self.generic_import(filename, configuration_options=[{'index': 0, 'convert_to_date': ['Date']}]) self.assertEqual(layer.name, 'us_shootings') date_field = 'date' configure_time(self.cat.get_layer(layer.name).resource, attribute=date_field) self.generic_time_check(layer, attribute=date_field) def test_sitins(self): """ Tests the import of US_shootings.csv. """ filename = 'US_Civil_Rights_Sitins0.csv' f = os.path.join(os.path.dirname(__file__), 'test_ogr', filename) layer = self.generic_import(f, configuration_options=[{'index': 0, 'convert_to_date': ['Date']}]) def get_layer_names(self, in_file): """ Gets layer names from a data source. """ ds = DataSource(in_file) return map(lambda layer: layer.name, ds) def test_gdal_import(self): filename = 'point_with_date.geojson' f = os.path.join(os.path.dirname(__file__), 'test_ogr', filename) self.generic_import(filename, configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) def import_file(self, in_file, configuration_options=[]): """ Imports the file. """ self.assertTrue(os.path.exists(in_file)) # run ogr2ogr gi = GDALImport(in_file) layers = gi.handle(configuration_options=configuration_options) return layers @staticmethod def createFeatureType(catalog, datastore, name): """ Exposes a PostGIS feature type in geoserver. """ headers = {"Content-type": "application/xml"} data = "<featureType><name>{name}</name></featureType>".format(name=name) url = datastore.href.replace(".xml", '/featuretypes.xml'.format(name=name)) headers, response = catalog.http.request(url, "POST ", data, headers) return response def test_create_vrt(self): """ Tests the create_vrt function. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'US_shootings.csv') vrt = create_vrt(f) vrt.seek(0) output = vrt.read() self.assertTrue('name="US_shootings"' in output) self.assertTrue('<SrcDataSource>{0}</SrcDataSource>'.format(f) in output) self.assertTrue('<GeometryField encoding="PointFromColumns" x="Longitude" y="Latitude" />'.format(f) in output) self.assertEqual(os.path.splitext(vrt.name)[1], '.vrt') def test_file_add_view(self): """ Tests the file_add_view. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') c = AdminClient() # test login required for this view request = c.get(reverse('uploads-new')) self.assertEqual(request.status_code, 302) c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['request'].path, reverse('uploads-list')) self.assertTrue(len(response.context['object_list']) == 1) upload = response.context['object_list'][0] self.assertEqual(upload.user.username, 'non_admin') self.assertEqual(upload.file_type, 'GeoJSON') self.assertTrue(upload.uploadlayer_set.all()) self.assertEqual(upload.state, 'UPLOADED') self.assertIsNotNone(upload.name) uploaded_file = upload.uploadfile_set.first() self.assertTrue(os.path.exists(uploaded_file.file.path)) f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'empty_file.geojson') with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertIn('file', response.context_data['form'].errors) def test_file_add_view_as_json(self): """ Tests the file_add_view. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new-json'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertIn('application/json', response.get('Content-Type', '')) content = json.loads(response.content) self.assertIn('state', content) self.assertIn('id', content) def test_describe_fields(self): """ Tests the describe fields functionality. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'us_shootings.csv') fields = None with GDALInspector(f) as f: layers = f.describe_fields() self.assertTrue(layers[0]['name'], 'us_shootings') self.assertEqual([n['name'] for n in layers[0]['fields']], ['Date', 'Shooter', 'Killed', 'Wounded', 'Location', 'City', 'Longitude', 'Latitude']) self.assertEqual(layers[0]['feature_count'], 203) def test_gdal_file_type(self): """ Tests the describe fields functionality. """ files = ((os.path.join(os.path.dirname(__file__), 'test_ogr', 'us_shootings.csv'), 'CSV'), (os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson'), 'GeoJSON'), (os.path.join(os.path.dirname(__file__), 'test_ogr', 'mojstrovka.gpx'), 'GPX'), (os.path.join(os.path.dirname(__file__), 'test_ogr', 'us_states.kml'), 'KML'), (os.path.join(os.path.dirname(__file__), 'test_ogr', 'boxes_with_year_field.shp'), 'ESRI Shapefile'), (os.path.join(os.path.dirname(__file__), 'test_ogr', 'boxes_with_date_iso_date.zip'), 'ESRI Shapefile'), ) for path, file_type in files: with GDALInspector(path) as f: self.assertEqual(f.file_type(), file_type) def test_append(self): """ Tests the configuration view. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') new_user = User.objects.create(username='******') new_user_perms = ['change_resourcebase_permissions'] c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{'index': 0, 'name': 'append', 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True, 'permissions': {'users': {'test': new_user_perms, 'AnonymousUser': ["change_layer_data", "download_resourcebase", "view_resourcebase"]}}}] response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') cursor = db.connections['datastore'].cursor() cursor.execute('select count(*) from append') self.assertEqual(1,cursor.fetchone()[0]) payload[0]['appendTo'] = 'geonode:append' f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date_2.geojson') with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') cursor = db.connections['datastore'].cursor() cursor.execute('select count(*) from append') self.assertEqual(2,cursor.fetchone()[0]) def test_trunc_append(self): f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'long_attr_name.geojson') new_user = User.objects.create(username='******') new_user_perms = ['change_resourcebase_permissions'] c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{'index': 0, 'name': 'append', 'convert_to_date': ['date_as_date'], 'start_date': 'date_as_date', 'configureTime': True, 'editable': True, 'permissions': {'users': {'test': new_user_perms, 'AnonymousUser': ["change_layer_data", "download_resourcebase", "view_resourcebase"]}}}] response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') cursor = db.connections['datastore'].cursor() cursor.execute('select count(*) from append') self.assertEqual(1, cursor.fetchone()[0]) payload[0]['appendTo'] = 'geonode:append' payload[0]['convert_to_date'] = ['date_as_da'] payload[0]['start_date'] = 'date_as_da' f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'long_attr_trunc.geojson') with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') cursor = db.connections['datastore'].cursor() cursor.execute('select count(*), date from append group by date') result = cursor.fetchone() #ensure that the feature was added and the attribute was appended self.assertEqual(2,result[0]) self.assertNotEqual(None,result[1]) def test_schema_append(self): f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'schema_initial.zip') new_user = User.objects.create(username='******') new_user_perms = ['change_resourcebase_permissions'] c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{'index': 0, 'name': 'append', 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True, 'permissions': {'users': {'test': new_user_perms, 'AnonymousUser': ["change_layer_data", "download_resourcebase", "view_resourcebase"]}}}] response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') cursor = db.connections['datastore'].cursor() cursor.execute('select count(*) from append') self.assertEqual(1, cursor.fetchone()[0]) payload[0]['appendTo'] = 'geonode:append' payload[0]['convert_to_date'] = ['date'] payload[0]['start_date'] = 'date' f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'schema_append.zip') with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') cursor = db.connections['datastore'].cursor() cursor.execute('select count(*), date from append group by date') result = cursor.fetchone() import pdb;pdb.set_trace() #ensure that the feature was added and the attribute was appended self.assertEqual(2,result[0]) self.assertNotEqual(None,result[1]) def test_configure_view(self): """ Tests the configuration view. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') new_user = User.objects.create(username='******') new_user_perms = ['change_resourcebase_permissions'] c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True, 'permissions': {'users': {'test': new_user_perms, 'AnonymousUser': ["change_layer_data", "download_resourcebase", "view_resourcebase"]}}}] response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertEqual(Layer.objects.all()[0].owner.username, self.non_admin_username) perms = layer.get_all_level_info() user = User.objects.get(username=self.non_admin_username) # check user permissions for perm in [u'publish_resourcebase', u'change_resourcebase_permissions', u'delete_resourcebase', u'change_resourcebase', u'change_resourcebase_metadata', u'download_resourcebase', u'view_resourcebase', u'change_layer_style', u'change_layer_data']: self.assertIn(perm, perms['users'][user]) self.assertTrue(perms['users'][new_user]) self.assertIn('change_resourcebase_permissions', perms['users'][new_user]) self.assertIn("change_layer_data", perms['users'][User.objects.get(username='******')]) lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, layer) def test_configure_view_convert_date(self): """ Tests the configure view with a dataset that needs to be converted to a date. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'US_shootings.csv') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{'index': 0, 'convert_to_date': ['Date'], 'start_date': 'Date', 'configureTime': True, 'editable': True}] response = c.get('/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 405) response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 400) response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertTrue(layer.attributes.filter(attribute='date'), 'xsd:dateTime') lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) # ensure a user who does not own the upload cannot configure an import from it. c.logout() c.login_as_admin() response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 404) def test_list_api(self): c = AdminClient() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 401) c.login_as_non_admin() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 200) admin = User.objects.get(username=self.username) non_admin = User.objects.get(username=self.non_admin_username) from .models import UploadFile f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'US_shootings.csv') with open(f, 'rb') as f: uploaded_file = SimpleUploadedFile('test_data', f.read()) admin_upload = UploadedData.objects.create(state='Admin test', user=admin) admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file)) non_admin_upload = UploadedData.objects.create(state='Non admin test', user=non_admin) non_admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file)) c.login_as_admin() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 2) response = c.get('/importer-api/data/?user__username=admin') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 1) def test_layer_list_api(self): c = AdminClient() response = c.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 401) c.login_as_non_admin() response = c.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 200) def test_delete_from_non_admin_api(self): """ Ensure users can delete their data. """ c = AdminClient() f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) id = UploadedData.objects.first().id response = c.delete('/importer-api/data/{0}/'.format(id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def test_delete_from_admin_api(self): """ Ensure that administrators can delete data that isn't theirs. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) c.logout() c.login_as_admin() id = UploadedData.objects.first().id response = c.delete('/importer-api/data/{0}/'.format(id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def naming_an_import(self): """ Tests providing a name in the configuration options. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() name = 'point-with-a-date' with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) payload = {'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'name': name, 'editable': True} response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps(payload), content_type='application/json') layer = Layer.objects.all()[0] self.assertEqual(layer.title, name.replace('-', '_')) def test_api_import(self): """ Tests the import api. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) payload = {'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True} self.assertTrue(isinstance(UploadLayer.objects.first().configuration_options, dict)) response = c.get('/importer-api/data-layers/1/') self.assertEqual(response.status_code, 200) response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps([payload]), content_type='application/json') self.assertEqual(response.status_code, 200) self.assertTrue('task' in response.content) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, layer) self.assertTrue(UploadLayer.objects.first().task_id) def test_valid_file_extensions(self): """ Test the file extension validator. """ for extension in IMPORTER_VALID_EXTENSIONS: self.assertIsNone(validate_file_extension(SimpleUploadedFile('test.{0}'.format(extension), ''))) with self.assertRaises(ValidationError): validate_file_extension(SimpleUploadedFile('test.txt', '')) def test_no_geom(self): """ Test the file extension validator. """ with self.assertRaises(ValidationError): validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)')) # This should pass (geom type is unknown) validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)')) def test_numeric_overflow(self): """ Regression test for numeric field overflows in shapefiles. # https://trac.osgeo.org/gdal/ticket/5241 """ self.generic_import('Walmart.zip', configuration_options=[{'index': 0, 'convert_to_date': []}]) def test_outside_bounds_regression(self): """ Regression where layers with features outside projection bounds fail. """ self.generic_import('Spring_2015.zip', configuration_options=[{'index': 0 }]) resource = self.cat.get_layer('spring_2015').resource self.assertEqual(resource.latlon_bbox, ('-180.0', '180.0', '-90.0', '90.0', 'EPSG:4326')) def test_multipolygon_shapefile(self): """ Tests shapefile with multipart polygons. """ self.generic_import('PhoenixFirstDues.zip', configuration_options=[{'index': 0}]) def test_gwc_handler(self): """ Tests the GeoWebCache handler """ layer = self.generic_import('boxes_with_date.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) gwc = GeoWebCacheHandler(None) gs_layer = self.cat.get_layer(layer.name) self.assertTrue(gwc.time_enabled(gs_layer)) gs_layer.fetch() payload = self.cat.http.request(gwc.gwc_url(gs_layer)) self.assertTrue('regexParameterFilter' in payload[1]) self.assertEqual(int(payload[0]['status']), 200) # Don't configure time, ensure everything still works layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{'index': 0}]) gs_layer = self.cat.get_layer(layer.name) self.cat._cache.clear() gs_layer.fetch() payload = self.cat.http.request(gwc.gwc_url(gs_layer)) self.assertFalse('regexParameterFilter' in payload[1]) self.assertEqual(int(payload[0]['status']), 200)
else: time.sleep(1) cat.reload() passedTime += 1 # ensure AdvancedProjectionSetting is turned off (if not -> layers wont display layers correctly)??? r_set_wms_options = session.put(geoserver_url + '/services/wms/settings', data='<wms><metadata><entry key="advancedProjectionHandling">false</entry></metadata></wms>', headers=headers_xml) # Delete old workspace and create new one if cat.get_workspace(workspace): if cat.get_store(projectName, workspace): test=cat.get_store(projectName, workspace) cat.delete(cat.get_store(projectName, workspace=workspace), purge="all", recurse=True) cat.delete(cat.get_workspace(workspace), purge="all", recurse=True) for layer in cat.get_layers(): if layer.name.startswith(workspace): cat.delete(layer, recurse=True) for style in cat.get_styles(workspaces=[workspace]): cat.delete(style, recurse=True) cat.create_workspace(workspace, geoserver_url + '/' + workspace) # zip the ncFile zfile = cfg['general']['workdir'] + '/outputFiles/data.zip' logging.info('Writing Zipfile ' + zfile) output = zipfile.ZipFile(zfile, 'w') output.write(netcdfFile, projectName + '.nc', zipfile.ZIP_DEFLATED) output.close()
ooitest = cat.get_workspace("ooi_test") data = cat.get_resources(store="asd",workspace="geonode") print data layer = cat.get_layer("1k time varying test") print layer.href cat.add_data_to_store print layer #drop those things not of interest wmstest = cat.get_workspace("geonode") try: wmsstore = cat.get_store("ooi") cat.delete(wmsstore) except: print "opps" #create the data store wmsstore = cat.create_datastore("ooi", wmstest) wmsstore.capabilitiesURL = "http://www.geonode.org/" wmsstore.type = "PostGIS" #connection info params = { 'Connection timeout': '20', 'Estimated extends': 'true', 'Expose primary keys': 'false', 'Loose bbox': 'true',
class ModifyingTests(unittest.TestCase): def setUp(self): self.cat = Catalog("http://localhost:8080/geoserver/rest") def testFeatureTypeSave(self): # test saving round trip rs = self.cat.get_resource("bugsites") old_abstract = rs.abstract new_abstract = "Not the original abstract" # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(new_abstract, rs.abstract) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(old_abstract, rs.abstract) def testCoverageSave(self): # test saving round trip rs = self.cat.get_resource("Arc_Sample") old_abstract = rs.abstract new_abstract = "Not the original abstract" # # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(new_abstract, rs.abstract) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(old_abstract, rs.abstract) def testFeatureTypeCreate(self): shapefile_plus_sidecars = shapefile_and_friends("test/data/states") expected = { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' } self.assertEqual(len(expected), len(shapefile_plus_sidecars)) for k, v in expected.iteritems(): self.assertEqual(v, shapefile_plus_sidecars[k]) sf = self.cat.get_workspace("sf") ft = self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) self.assert_(self.cat.get_resource("states_test", workspace=sf) is not None) self.assertRaises( ConflictingDataError, lambda: self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) ) self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster_test", shapefile_plus_sidecars, sf) ) bogus_shp = { 'shp': 'test/data/Pk50095.tif', 'shx': 'test/data/Pk50095.tif', 'dbf': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } self.assertRaises( UploadError, lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf) ) def testCoverageCreate(self): tiffdata = { 'tiff': 'test/data/Pk50095.tif', 'tfw': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } sf = self.cat.get_workspace("sf") ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf) self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None) self.assertRaises( ConflictingDataError, lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf) ) self.assertRaises( UploadError, lambda: self.cat.create_featurestore("Pk50095_vector", tiffdata, sf) ) bogus_tiff = { 'tiff': 'test/data/states.shp', 'tfw': 'test/data/states.shx', 'prj': 'test/data/states.prj' } self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster", bogus_tiff) ) def testLayerSave(self): # test saving round trip lyr = self.cat.get_layer("states") old_attribution = lyr.attribution new_attribution = "Not the original attribution" # change attribution on server lyr.attribution = new_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(new_attribution, lyr.attribution) # Restore attribution lyr.attribution = old_attribution self.cat.save(lyr) self.assertEqual(old_attribution, lyr.attribution) def testLayerDelete(self): lyr = self.cat.get_layer("states_test") self.cat.delete(lyr) self.assert_(self.cat.get_layer("states_test") is None) def testWorkspaceDelete(self): pass def testFeatureTypeDelete(self): pass def testCoverageDelete(self): pass def testDataStoreDelete(self): pass