def set_style(layer_name, style): cat = Catalog(GEOSERVER_REST_URL, GS_USERNAME, GS_PASSWORD) layer = cat.get_layer(layer_name) style = cat.get_style(style) layer.default_style = style cat.save(layer)
def _unpublish_from_geoserver(resource, geoserver_context, logger): '''Contact Geoserver and unpublish a layer previously created as an ingested resource. ''' from geoserver.catalog import Catalog, FailedRequestError layer_name = None if resource['format'] == WMSResource.FORMAT: layer_name = resource['wms_layer'] else: layer_name = resource['wfs_layer'] try: api_url = geoserver_context['api_url'] cat = Catalog( api_url + '/rest', username=geoserver_context['username'], password=geoserver_context['password']) layer = cat.get_layer(resource['parent_resource_id'].lower()) cat.delete(layer) cat.reload() logger.info('Unpublished layer %s from Geoserver' % (layer_name)) except AttributeError as ex: logger.error('Failed to unpublish layer %s: %s' % (layer_name, ex)) except FailedRequestError as ex: logger.error('Failed to unpublish layer %s: %s' % (layer_name, ex))
def addLayersToGeoserver(self, options): cat = Catalog(self.geoserver_rest_url, options["geoserveradmin"], options["gpw"]) try: ds = cat.get_store(options["alias"]) except Exception as e: raise Exception("Erreur de récupération du workspace") layers = [] try: # connect to tables and create layers and correct urban styles for table in self.urb: try: style = self.urb[table] ft = cat.publish_featuretype(table, ds, "EPSG:31370", srs="EPSG:31370") gs_style = cat.get_style(style) cat.save(ft) res_name = ft.dirty["name"] res_title = options["alias"] + "_" + table cat.save(ft) layer_name = ds.workspace.name + ":" + res_name new_layer = cat.get_layer(layer_name) new_layer.default_style = gs_style cat.save(new_layer) layers.append({"res_name": res_name, "res_title": res_title}) except Exception as e: # a verifier une fois un possesion des styles print(str(e)) except Exception as e: print(str(e)) raise Exception("Erreur lors de la récupération des couches depuis Geoserver") return layers
def _unpublish_from_geoserver(resource_id, geoserver_context): geoserver_url = geoserver_context['geoserver_url'] geoserver_admin = geoserver_context['geoserver_admin'] geoserver_password = geoserver_context['geoserver_password'] cat = Catalog(geoserver_url + '/rest', username=geoserver_admin, password=geoserver_password) layer = cat.get_layer(resource_id.lower()) cat.delete(layer) cat.reload()
def _get_layer_style(self,resource_id): geoserver_url=config['ckanext-vectorstorer.geoserver_url'] cat = Catalog(geoserver_url+"/rest") layer = cat.get_layer(c.layer_id) default_style=layer._get_default_style() xml = minidom.parseString(default_style.sld_body) return xml.toprettyxml()
def geoserver_upload(self, input_file): print "upload", input_file path, file_ext = os.path.split(input_file) f = os.path.splitext(file_ext)[0] cat = Catalog(self.GEOSERVER_HOST, self.GEOSERVER_USER, self.GEOSERVER_PASSWORD) try: cat.create_coveragestore(name=f, data=input_file, workspace=cat.get_workspace(self.GEOSERVER_WORKSPACE), overwrite=True) except UploadError: print "map already exist" cat.delete(cat.get_layer(f)) cat.delete(cat.get_resources(f, self.GEOSERVER_WORKSPACE)[0]) cat.create_coveragestore(name=f, data=input_file, workspace=cat.get_workspace(self.GEOSERVER_WORKSPACE), overwrite=True)
def pre_save_maplayer(instance, sender, **kwargs): # If this object was saved via fixtures, # do not do post processing. if kwargs.get('raw', False): return try: c = Catalog(ogc_server_settings.rest, _user, _password) instance.local = isinstance(c.get_layer(instance.name),GsLayer) except EnvironmentError, e: if e.errno == errno.ECONNREFUSED: msg = 'Could not connect to catalog to verify if layer %s was local' % instance.name logger.warn(msg, e) else: raise e
def _submit_sld(self,sld_body): try: geoserver_url=config['ckanext-vectorstorer.geoserver_url'] cat = Catalog(geoserver_url+"/rest") layer = cat.get_layer(c.layer_id) default_style=layer._get_default_style() if default_style.name ==c.layer_id: cat.create_style(default_style.name, sld_body, overwrite=True) else: cat.create_style(c.layer_id, sld_body, overwrite=True) layer._set_default_style(c.layer_id) cat.save(layer) c.success=True except UploadError, e: c.sld_body=sld_body c.error=e
def fh_style_update(layer,filename): cat = Catalog(settings.OGC_SERVER['default']['LOCATION'] + 'rest', username=settings.OGC_SERVER['default']['USER'], password=settings.OGC_SERVER['default']['PASSWORD']) #layer_list = Layer.objects.filter(name__icontains='fh')#initial run of script includes all fhm layers for cleaning of styles in GN + GS #layer_list = Layer.objects.filter(name__icontains='fh').exclude(styles__name__icontains='fhm' #total_layers = len(layer_list) fhm_style = cat.get_style("fhm") ctr = 0 #for layer in layer_list: #print "[FH STYLE] {0}/{1} : {2} ".format(ctr,total_layers,layer.name) #delete thumbnail first because of permissions try: print "Layer thumbnail url: %s " % layer.thumbnail_url if "192" in settings.BASEURL: url = "geonode/uploaded/thumbs/layer-"+ layer.uuid + "-thumb.png" #if on local os.remove(url) else: url = "/var/www/geonode/uploaded/thumbs/layer-" +layer.uuid + "-thumb.png" #if on lipad os.remove(url) gs_layer = cat.get_layer(layer.name) print "GS LAYER: %s " % gs_layer.name gs_layer._set_default_style(fhm_style) cat.save(gs_layer) #save in geoserver ctr+=1 gs_style = cat.get_style(layer.name) print "GS STYLE: %s " % gs_style.name print "Geoserver: Will delete style %s " % gs_style.name cat.delete(gs_style) #erase in geoserver the default layer_list gn_style = Style.objects.get(name=layer.name) print "Geonode: Will delete style %s " % gn_style.name gn_style.delete()#erase in geonode layer.sld_body = fhm_style.sld_body layer.save() #save in geonode except Exception as e: print "%s" % e pass
def uploadGS(self): shpFile = os.path.join(self.tempFolder, 'diff_simplify') cat = Catalog("http://localhost:8080/geoserver/rest", username="******", password="******") gsWorkspace = cat.get_workspace("SJ") shapefileData = geoserver.util.shapefile_and_friends(shpFile) # TODO: 레이어명 구분이 필요 layerName = self.resId # layerName = time.strftime("%Y%m%d_%H%M%S") cat.create_featurestore(layerName, shapefileData, gsWorkspace) cat.reload() layer = cat.get_layer(layerName) diffStyle = cat.get_style('SJ:diff_style') if diffStyle is not None: layer._set_default_style('SJ:diff_style') cat.save(layer)
layerslist.remove(f) grouplist.append(list(set(similarname))) g.write(str(grouplist)) layers_in_ws = [] styles_in_ws = [] for gl in grouplist: ws = get_layer_workspace(args.gs_user, args.gs_passw, args.geoserver, list(gl)[0]) if ws == args.workspace: styles = [] rlayers = [] layers_in_ws.append(list(gl)) for liws in list(gl): that_layer = cat.get_layer(liws) style = that_layer.default_style styles.append(style.name) styles_in_ws.append(styles) i = 0 lg_names = [] for lg in layers_in_ws: match_strg = lg[0] for string in lg: similarity = difflib.SequenceMatcher( None, string, match_strg).get_matching_blocks() match_strings = [] for match in similarity: match_strings.append(string[match.a:match.a + match.size]) match_strg = match_strings[0] lg_name = match_strings[0] + f'{random.randrange(1, 10**3):03}'
def submitFiles(): """ Send the information of the uploaded files to the Open Data Registration Tool as an encoded JSON string in a GET-request The info is stored in a list of representations, according to the Open Data Registration Tool API: https://github.com/switchonproject/sip-html5-resource-registration/wiki """ threddsAvailable = True geoserverAvailable = True # Check if Thredds server is online threddsAvailable = checkConnection(app.config['THREDDS_SERVER'], "Failed to connect to the THREDDS server at " + app.config['THREDDS_SERVER'] + \ ". NetCDF files will not be accessible using web services, only by HTTP download.") # Check if GeoServer is online geoserverAvailable = checkConnection(app.config['GEOSERVER'], "Failed to connect to the geoserver at " + app.config['GEOSERVER'] + \ ". Shapefiles will not be mapped with WMS and can not be downloaded by WFS.") datasetname = session['DATASETNAME'] datasetFoldername = session['DATASETFOLDERNAME'] generateDOI = session['GENERATEDOI'] if request.form['submitButton'] == 'previous': return redirect('/?datasetname=' + datasetFoldername) if request.form['submitButton'] == 'next': datasetDir = os.path.join(app.config['BASE_UPLOAD_FOLDER'], datasetFoldername) files = [f for f in os.listdir(datasetDir) if os.path.isfile(os.path.join(datasetDir, f)) and f not in app.config['IGNORED_FILES']] if len(files) > 0: representation = {} result = [] urlRoot = request.url_root.rstrip('/') # get the url root without the traling '/' (for string concatenation) # Store the root url of the dataset as the primary representation if there are more than 1 file if len(files) > 1: representation['name'] = datasetname representation['description'] = "File download" representation['type'] = "original data" representation['contentlocation'] = '/'.join([urlRoot, 'data', datasetFoldername]) representation['contenttype'] = "application/octet-stream" representation['function'] = "information" representation['protocol'] = "WWW:LINK-1.0-http--link" result.append(representation) # if there is only one file, store the direct link to this file if len(files) == 1: filename, fileExtension = os.path.splitext(f) # region Check if it is a zipped shapefile # if it is, ignore it (unless geoserver is unavailable), otherwise the zip file is added twice zippedShapefile = False if fileExtension == '.zip' and geoserverAvailable: zipFilePath = os.path.join(datasetDir, f) zipFile = zipfile.ZipFile(zipFilePath, 'r') filesInZip = zipFile.namelist() zipFile.close() for fileInZip in filesInZip: fileInZipExtension = os.path.splitext(fileInZip)[1] if fileInZipExtension == '.shp': zippedShapefile = True #endregion if fileExtension != '.nc' and zippedShapefile == False: representation['name'] = datasetname representation['description'] = "File download" representation['type'] = "original data" # TODO: improve file recognition if fileExtension == ".zip": representation['contenttype'] = "application/zip" else: representation['contenttype'] = "application/octet-stream" representation['contentlocation'] = '/'.join([urlRoot, 'data', datasetFoldername, f]) representation['function'] = "download" representation['protocol'] = "WWW:DOWNLOAD-1.0-http--download" result.append(representation) #region THREDDS if threddsAvailable: if app.config['DEVELOP']: threddsCatalog = '/'.join((app.config['THREDDS_SERVER'], 'netcdftest', 'catalog.xml')) else: threddsCatalog = '/'.join((app.config['THREDDS_SERVER'], datasetFoldername, 'catalog.xml')) try: opendapUrls = threddsclient.opendap_urls(threddsCatalog) for opendapUrl in opendapUrls: filepath, fileExtension = os.path.splitext(opendapUrl) filename = opendapUrl.split('/')[-1] # check if the file is a netCDF file; if yes, store OPeNDAP service url and html download url if fileExtension == '.nc': representation = {} representation['name'] = filename representation['description'] = "Netcdf file OPeNDAP service" representation['contentlocation'] = opendapUrl representation['contenttype'] = "application/x-netcdf" representation['type'] = "original data" representation['function'] = "service" representation['protocol'] = 'OPeNDAP:OPeNDAP' result.append(representation) representation = {} representation['name'] = filename representation['description'] = "HTML interface OPeNDAP service" representation['contentlocation'] = opendapUrl + ".html" representation['contenttype'] = "application/x-netcdf" representation['type'] = "original data" representation['function'] = "download" representation['protocol'] = 'WWW:DOWNLOAD-1.0-http--download' result.append(representation) representation = {} representation['name'] = filename representation['description'] = "WMS service" representation['contentlocation'] = opendapUrl.replace('dodsC', 'wms') + "?service=WMS&version=1.3.0&request=GetCapabilities" representation['contenttype'] = "application/xml" representation['type'] = "original data" representation['function'] = "service" representation['protocol'] = 'OGC:WMS-1.1.1-http-get-capabilities' result.append(representation) except: app.logger.info("URL: " + threddsCatalog + " is not a THREDDS catalog") #endregion # region GEOSERVER: loop through all files to check for shapefiles if geoserverAvailable: for file in files: layerName = '' filename, fileExtension = os.path.splitext(file) if fileExtension == '.zip': zipFilePath = os.path.join(datasetDir, file) zipFile = zipfile.ZipFile(zipFilePath, 'r') filesInZip = zipFile.namelist() for fileInZip in filesInZip: fileInZipName = os.path.split(fileInZip)[1] fileInZipNoExtName, fileInZipExtension = os.path.splitext(fileInZipName) if fileInZipExtension == '.shp': # Layer name is the file without extension layerName = fileInZipNoExtName # Publish .zipped shapefile on geoserver, no subdirectories zipFile.extractall(datasetDir) for root, dirs, files in os.walk(datasetDir): for name in files: os.rename(os.path.join(root, name), os.path.join(datasetDir,name)) # create workspace r = requests.post(url= app.config['GEOSERVER'] + "/rest/workspaces", headers={'Content-type': 'text/xml'}, data="<workspace><name>" + datasetFoldername + "</name></workspace>", auth=HTTPBasicAuth(app.config['GEOSERVER_ADMIN'], app.config['GEOSERVER_PASS'])) if r.status_code > 299: # status code of 201 is success; all else is failure app.logger.error("Error in creating geoserver workspace for " + datasetFoldername + \ "; Status code: " + str(r.status_code) + ", Content: " + r.content) flash("Error in creating workspace on geoserver.") return redirect(url_for('uploadData')) # for testing purposes.. uploaded file is on local machine and can only publish data that is on the data mount of web app if app.config['DEVELOP']: shapeFile = "file://D:/sala/Downloads/sld_cookbook_polygon/sld_cookbook_polygon.shp" else: shapeFile = settings['GEOSERVER_DATA_DIR'] + "/" + datasetFoldername + "/" + fileInZipName # Publish shapefile on the geoserver; the datastore is automatically created and has the same name as the shapefile + ds r = requests.put(url=app.config['GEOSERVER'] + "/rest/workspaces/" + datasetFoldername + "/datastores/" + datasetFoldername + "_ds/external.shp", headers={'Content-type': 'text/plain'}, data='file://'+shapeFile, auth=HTTPBasicAuth(app.config['GEOSERVER_ADMIN'], app.config['GEOSERVER_PASS'])) if r.status_code > 299: app.logger.error("Error in publishing shapefile " + datasetFoldername + " on geoserver; Status code: " \ + str(r.status_code) + ", Content: " + r.content) flash("Error in publishing shapefile on geoserver.") return redirect(url_for('uploadData')) representation = {} representation['name'] = layerName representation['description'] = "WMS service" representation['contentlocation'] = app.config['GEOSERVER'] + "/" + datasetFoldername + "/" + \ "wms?service=WMS&version=1.1.0&request=GetCapabilities" representation['contenttype'] = "application/xml" representation['type'] = "original data" representation['function'] = "service" representation['protocol'] = 'OGC:WMS-1.1.1-http-get-capabilities' result.append(representation) representation = {} representation['name'] = layerName representation['description'] = "WMS service" representation['contentlocation'] = app.config['GEOSERVER'] + "/" + datasetFoldername + "/" + \ "wms?service=WMS&version=1.1.0&request=GetCapabilities" representation['contenttype'] = "application/xml" representation['type'] = "aggregated data" representation['function'] = "service" representation['protocol'] = 'OGC:WMS-1.1.1-http-get-capabilities' #region Get spatial extent from getcapabilities document try: root = ET.fromstring(requests.get(representation['contentlocation']).content) latlonElem = root.find('Capability/Layer/Layer/LatLonBoundingBox') latlonDict = latlonElem.attrib minx = latlonDict['minx'] miny = latlonDict['miny'] maxx = latlonDict['maxx'] maxy = latlonDict['maxy'] # WKT representation: POLYGON((minx miny, maxx miny, maxx maxy, minx maxy, minx miny)) WKTString = 'POLYGON(({0} {1}, {2} {1}, {2} {3}, {0} {3}, {0} {1}))'.format(minx, miny, maxx, maxy) representation['wktboundingbox'] = WKTString except: app.logger.error("Error in deriving WKT bounding box from WMS getcapabilities document") #endregion result.append(representation) representation = {} representation['name'] = fileInZipNoExtName representation['description'] = "WFS service" representation['contentlocation'] = app.config['GEOSERVER'] + "/" + datasetFoldername + "/" + "ows?service=WFS&version=1.0.0&request=GetCapabilities" representation['contenttype'] = "application/xml" representation['type'] = "original data" representation['function'] = "service" representation['protocol'] = "OGC:WFS-1.0.0-http-get-capabilities" result.append(representation) representation = {} representation['name'] = file representation['description'] = "Zipped shapefile" representation['contentlocation'] = '/'.join([urlRoot, 'data', datasetFoldername, file]) representation['contenttype'] = "application/zip" representation['type'] = "original data" representation['function'] = "download" representation['protocol'] = "WWW:DOWNLOAD-1.0-http--download" representation['uploadmessage'] = "deriveSpatialIndex:shp" result.append(representation) # Optional sld file (preconditions, shp uploaded, workspace created) for fileInZip in filesInZip: fileInZipName = os.path.split(fileInZip)[1] fileInZipNoExtName, fileInZipExtension = os.path.splitext(fileInZipName) if fileInZipExtension == '.sld': # for testing purposes.. uploaded file is on local machine and can only publish data that is on the data mount of web app if app.config['DEVELOP']: sldFile = "D:/sala/Downloads/sld_cookbook_polygon/sld_cookbook_polygon.sld" else: sldFile = settings['GEOSERVER_DATA_DIR'] + "/" + datasetFoldername + "/" + fileInZipName # Connect to geoserver catalogue cat = Catalog(app.config['GEOSERVER'] + "/rest", app.config['GEOSERVER_ADMIN'], password=app.config['GEOSERVER_PASS']) # Add or Overwrite with open(sldFile) as f: style=cat.create_style(fileInZipNoExtName, f.read(), overwrite=True) # Link it to the layer layer = cat.get_layer(layerName) layer._set_default_style(fileInZipNoExtName) cat.save(layer) # close zip file after looping through all files in the zip file zipFile.close() #endregion # region if generateDOI: d = DOI(files, datasetDir, datasetname, logger=app.logger) deposition_id = d.runUpload() # endregion resultString = json.dumps(result) text = urllib.quote_plus(resultString.encode('utf-8')) if generateDOI: url = app.config['METADATA_URL'] + text + '&deposition=' + deposition_id else: url = app.config['METADATA_URL'] + text # store the representation app.logger.info("Representations of the dataset: " + resultString) return redirect(url) else: flash("Please upload at least one file") return redirect(url_for('uploadData'))
class CatalogTests(unittest.TestCase): def setUp(self): self.cat = Catalog("http://localhost:8080/geoserver/rest") def testWorkspaces(self): self.assertEqual(7, len(self.cat.get_workspaces())) # marking out test since geoserver default workspace is not consistent # self.assertEqual("cite", self.cat.get_default_workspace().name) self.assertEqual("topp", self.cat.get_workspace("topp").name) def testStores(self): topp = self.cat.get_workspace("topp") sf = self.cat.get_workspace("sf") self.assertEqual(9, len(self.cat.get_stores())) self.assertEqual(2, len(self.cat.get_stores(topp))) self.assertEqual(2, len(self.cat.get_stores(sf))) self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile", topp).name) self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile").name) self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile").name) self.assertEqual("sfdem", self.cat.get_store("sfdem", sf).name) self.assertEqual("sfdem", self.cat.get_store("sfdem").name) def testResources(self): topp = self.cat.get_workspace("topp") sf = self.cat.get_workspace("sf") states = self.cat.get_store("states_shapefile", topp) sfdem = self.cat.get_store("sfdem", sf) self.assertEqual(19, len(self.cat.get_resources())) self.assertEqual(1, len(self.cat.get_resources(states))) self.assertEqual(5, len(self.cat.get_resources(workspace=topp))) self.assertEqual(1, len(self.cat.get_resources(sfdem))) self.assertEqual(6, len(self.cat.get_resources(workspace=sf))) self.assertEqual("states", self.cat.get_resource("states", states).name) self.assertEqual("states", self.cat.get_resource("states", workspace=topp).name) self.assertEqual("states", self.cat.get_resource("states").name) states = self.cat.get_resource("states") fields = [ states.title, states.abstract, states.native_bbox, states.latlon_bbox, states.projection, states.projection_policy ] self.assertFalse(None in fields, str(fields)) self.assertFalse(len(states.keywords) == 0) self.assertFalse(len(states.attributes) == 0) self.assertTrue(states.enabled) self.assertEqual("sfdem", self.cat.get_resource("sfdem", sfdem).name) self.assertEqual("sfdem", self.cat.get_resource("sfdem", workspace=sf).name) self.assertEqual("sfdem", self.cat.get_resource("sfdem").name) def testLayers(self): expected = set(["Arc_Sample", "Pk50095", "Img_Sample", "mosaic", "sfdem", "bugsites", "restricted", "streams", "archsites", "roads", "tasmania_roads", "tasmania_water_bodies", "tasmania_state_boundaries", "tasmania_cities", "states", "poly_landmarks", "tiger_roads", "poi", "giant_polygon" ]) actual = set(l.name for l in self.cat.get_layers()) missing = expected - actual extras = actual - expected message = "Actual layer list did not match expected! (Extras: %s) (Missing: %s)" % (extras, missing) self.assert_(len(expected ^ actual) == 0, message) states = self.cat.get_layer("states") self.assert_("states", states.name) self.assert_(isinstance(states.resource, ResourceInfo)) self.assertEqual(set(s.name for s in states.styles), set(['pophatch', 'polygon'])) self.assertEqual(states.default_style.name, "population") def testLayerGroups(self): expected = set(["tasmania", "tiger-ny", "spearfish"]) actual = set(l.name for l in self.cat.get_layergroups()) missing = expected - actual extras = actual - expected message = "Actual layergroup list did not match expected! (Extras: %s) (Missing: %s)" % (extras, missing) self.assert_(len(expected ^ actual) == 0, message) tas = self.cat.get_layergroup("tasmania") self.assert_("tasmania", tas.name) self.assert_(isinstance(tas, LayerGroup)) self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) def testStyles(self): self.assertEqual(20, len(self.cat.get_styles())) self.assertEqual("population", self.cat.get_style("population").name)
class CatalogTests(unittest.TestCase): def setUp(self): self.cat = Catalog(GSPARAMS['GSURL'], username=GSPARAMS['GSUSER'], password=GSPARAMS['GSPASSWORD']) self.gs_version = self.cat.get_short_version() def testGSVersion(self): version = self.cat.get_version() pat = re.compile('\d\.\d+') self.assertTrue(pat.match('2.2.x')) self.assertTrue(pat.match('2.3.2')) self.assertTrue(pat.match('2.3-SNAPSHOT')) self.assertTrue(pat.match(version)) def testWorkspaces(self): self.assertEqual(7, len(self.cat.get_workspaces())) # marking out test since geoserver default workspace is not consistent # self.assertEqual("cite", self.cat.get_default_workspace().name) self.assertEqual("topp", self.cat.get_workspaces(names="topp")[-1].name) self.assertEqual(2, len(self.cat.get_workspaces(names=['topp', 'sde']))) self.assertEqual(2, len(self.cat.get_workspaces(names='topp, sde'))) self.assertEqual("topp", self.cat.get_workspace("topp").name) self.assertIsNone(self.cat.get_workspace("blahblah-")) def testStores(self): self.assertEqual(0, len(self.cat.get_stores(names="nonexistentstore"))) topp = self.cat.get_workspaces("topp")[0] sf = self.cat.get_workspaces("sf")[0] self.assertEqual(9, len(self.cat.get_stores())) self.assertEqual(2, len(self.cat.get_stores(workspaces=topp))) self.assertEqual(2, len(self.cat.get_stores(workspaces=sf))) self.assertEqual(2, len(self.cat.get_stores(workspaces='sf'))) self.assertEqual(2, len(self.cat.get_stores(names='states_shapefile, sfdem'))) self.assertEqual(2, len(self.cat.get_stores(names=['states_shapefile', 'sfdem']))) self.assertEqual("states_shapefile", self.cat.get_stores(names="states_shapefile", workspaces=topp.name)[0].name) self.assertEqual("states_shapefile", self.cat.get_stores(names="states_shapefile")[0].name) self.assertEqual("sfdem", self.cat.get_stores(names="sfdem", workspaces=sf.name)[0].name) self.assertEqual("sfdem", self.cat.get_stores(names="sfdem")[0].name) self.assertEqual("sfdem", self.cat.get_store("sfdem").name) self.assertIsNone(self.cat.get_store("blah+blah-")) def testResources(self): topp = self.cat.get_workspaces("topp")[0] sf = self.cat.get_workspaces("sf")[0] states = self.cat.get_stores(names="states_shapefile", workspaces=topp.name)[0] sfdem = self.cat.get_stores(names="sfdem", workspaces=sf.name)[0] self.assertEqual(19, len(self.cat.get_resources())) self.assertEqual(2, len(self.cat.get_resources(stores=[states.name, sfdem.name]))) self.assertEqual(11, len(self.cat.get_resources(workspaces=[topp.name, sf.name]))) self.assertEqual("states", self.cat.get_resources(names="states", stores=states.name)[0].name) self.assertEqual("states", self.cat.get_resources(names="states", workspaces=topp.name)[0].name) self.assertEqual("states", self.cat.get_resources(names="states")[0].name) self.assertEqual("states", self.cat.get_resource("states").name) self.assertIsNone(self.cat.get_resource("blah+1blah-2")) states = self.cat.get_resources(names="states")[0] fields = [ states.title, states.abstract, states.native_bbox, states.latlon_bbox, states.projection, states.projection_policy ] self.assertFalse(None in fields, str(fields)) self.assertFalse(len(states.keywords) == 0) self.assertFalse(len(states.attributes) == 0) self.assertTrue(states.enabled) self.assertEqual("sfdem", self.cat.get_resources(names="sfdem", stores=sfdem.name)[0].name) self.assertEqual("sfdem", self.cat.get_resources(names="sfdem", workspaces=sf.name)[0].name) self.assertEqual("sfdem", self.cat.get_resources(names="sfdem")[0].name) def testResourcesUpdate(self): res_dest = self.cat.get_resources() count = 0 for rd in res_dest: # only wms layers if rd.resource_type != "wmsLayer": continue # looking for same name ro = self.cat.get_resources(names=rd.name) if ro is not None: rd.title = ro.title rd.abstract = ro.abstract rd.keywords = ro.keywords rd.projection = ro.projection rd.native_bbox = ro.native_bbox rd.latlon_bbox = ro.latlon_bbox rd.projection_policy = ro.projection_policy rd.enabled = ro.enabled rd.advertised = ro.advertised rd.metadata_links = ro.metadata_links or None self.cat.save(rd) self.cat.reload() count += 1 def testLayers(self): if self.gs_version >= "2.13": expected = set([ 'sf:roads', 'sf:sfdem', 'nurc:mosaic', 'tiger:giant_polygon', 'sf:bugsites', 'topp:states', 'sf:streams', 'tiger:poly_landmarks', 'tiger:poi', 'topp:tasmania_water_bodies', 'tiger:tiger_roads', 'topp:tasmania_roads', 'nurc:Pk50095', 'topp:tasmania_cities', 'nurc:Img_Sample', 'sf:restricted', 'nurc:Arc_Sample', 'sf:archsites', 'topp:tasmania_state_boundaries' ]) else: expected = set([ "Arc_Sample", "Pk50095", "Img_Sample", "mosaic", "sfdem", "bugsites", "restricted", "streams", "archsites", "roads", "tasmania_roads", "tasmania_water_bodies", "tasmania_state_boundaries", "tasmania_cities", "states", "poly_landmarks", "tiger_roads", "poi", "giant_polygon" ]) actual = set(l.name for l in self.cat.get_layers()) missing = expected - actual extras = actual - expected message = "Actual layer list did not match expected! (Extras: %s) (Missing: %s)" % (extras, missing) self.assert_(len(expected ^ actual) == 0, message) states = self.cat.get_layer("states") self.assert_("states", states.name) self.assert_(isinstance(states.resource, ResourceInfo)) self.assertEqual(set(s.name for s in states.styles), set(['pophatch', 'polygon'])) self.assertEqual(states.default_style.name, "population") def testLayerGroups(self): expected = set(["tasmania", "tiger-ny", "spearfish"]) actual = set(l.name for l in self.cat.get_layergroups(names=["tasmania", "tiger-ny", "spearfish"])) missing = expected - actual extras = actual - expected message = "Actual layergroup list did not match expected! (Extras: %s) (Missing: %s)" % (extras, missing) self.assert_(len(expected ^ actual) == 0, message) tas = self.cat.get_layergroups(names="tasmania")[0] self.assert_("tasmania", tas.name) self.assert_(isinstance(tas, LayerGroup)) if self.gs_version >= "2.13": self.assertEqual(tas.layers, [ 'topp:tasmania_state_boundaries', 'topp:tasmania_water_bodies', 'topp:tasmania_roads', 'topp:tasmania_cities' ], tas.layers) else: self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities' ], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) # Try to create a new Layer Group into the "topp" workspace self.assert_(self.cat.get_workspaces("topp")[0] is not None) tas2 = self.cat.create_layergroup("tasmania_reloaded", tas.layers, workspace = "topp") self.cat.save(tas2) self.assertEqual(1, len(self.cat.get_layergroups(names='tasmania_reloaded', workspaces="topp"))) tas2 = self.cat.get_layergroups(names='tasmania_reloaded', workspaces="topp")[0] self.assert_("tasmania_reloaded", tas2.name) self.assert_(isinstance(tas2, LayerGroup)) self.assertEqual(tas2.workspace, "topp", tas2.workspace) if self.gs_version >= "2.13": self.assertEqual(tas2.layers, [ 'topp:tasmania_state_boundaries', 'topp:tasmania_water_bodies', 'topp:tasmania_roads', 'topp:tasmania_cities' ], tas2.layers) else: self.assertEqual(tas2.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities' ], tas2.layers) self.assertEqual(tas2.styles, [None, None, None, None], tas2.styles) def testStyles(self): self.assertEqual("population", self.cat.get_styles("population")[0].name) self.assertEqual("popshade.sld", self.cat.get_styles("population")[0].filename) self.assertEqual("population", self.cat.get_styles("population")[0].sld_name) self.assertEqual("population", self.cat.get_style("population").sld_name) self.assertIsNone(self.cat.get_style("blah+#5blah-")) self.assertEqual(0, len(self.cat.get_styles('non-existing-style'))) def testEscaping(self): # GSConfig is inconsistent about using exceptions vs. returning None # when a resource isn't found. # But the basic idea is that none of them should throw HTTP errors from # misconstructed URLS self.cat.get_styles("best style ever") self.cat.get_workspaces("best workspace ever") self.assertEqual(0, len(self.cat.get_stores(workspaces="best workspace ever", names="best store ever"))) self.cat.get_layer("best layer ever") self.cat.get_layergroups("best layergroup ever") def testUnicodeUrl(self): """ Tests that the geoserver.support.url function support unicode strings. """ # Test the url function with unicode seg = ['workspaces', 'test', 'datastores', u'operaci\xf3n_repo', 'featuretypes.xml'] u = build_url(base=self.cat.service_url, seg=seg) self.assertEqual(u, self.cat.service_url + "/workspaces/test/datastores/operaci%C3%B3n_repo/featuretypes.xml") # Test the url function with normal string seg = ['workspaces', 'test', 'datastores', 'test-repo', 'featuretypes.xml'] u = build_url(base=self.cat.service_url, seg=seg) self.assertEqual(u, self.cat.service_url + "/workspaces/test/datastores/test-repo/featuretypes.xml")
class GsConn(): def __init__(self, host, login, password, debug=False): """ Geoserver connection """ self.host = host self.login = login self.password = password self.debug = debug # Connect to server self.cat = Catalog("http://%s/geoserver/rest" % host, login, password) if self.debug is True: print "Connected to geoserver" def crate_workspace(self, name, uri, overwrite=False): """ Creates a workspace :param name: Workspace name. :param overwrite: If True, delete existing workspace. :return: None """ workspaces = [ workspace.name for workspace in self.cat.get_workspaces() ] if name in workspaces and overwrite is True: # ws2del = self.cat.get_workspace(name) # self.cat.delete(ws2del, purge=True, recurse=True) return None # NOTE: If we delete the workspace then all associated layers are lost. elif name in workspaces and overwrite is False: print "ERROR: Workspace %s already exists (use overwrite=True)." % name self.cat.create_workspace(name, uri) if self.debug is True: print "Workspace %s available." % name ws = self.cat.get_workspace(name) ws.enabled = True def create_pg_store(self, name, workspace, host, port, login, password, dbname, schema, overwrite=False): """ Creates datastore. :param name: Name of the datastore. :param workspace: Name of the workspace to use. :param overwrite: If True replace datastore. :return: None """ stores = [store.name for store in self.cat.get_stores()] if name in stores and overwrite is True: # st2del = self.cat.get_store(name) # self.cat.delete(st2del, purge=True, recurse=True) # self.cat.reload() return None # NOTE: If we delete store, every layers associated with are lost. elif name in stores and overwrite is False: print "ERROR: Store %s already exists (use overwrite=True)." % name ds = self.cat.create_datastore(name, workspace) ds.connection_parameters.update(host=host, port=port, user=login, passwd=password, dbtype='postgis', database=dbname, schema=schema) self.cat.save(ds) ds = self.cat.get_store(name) if ds.enabled is False: print "ERROR: Geoserver store %s not enabled" % name if self.debug is True: print "Datastore %s created." % name def publish_pg_layer(self, layer_table, layer_name, store, srid, overwrite=True): """ """ existing_lyr = self.cat.get_layer("participatubes:%s" % layer_table) if existing_lyr is not None: print "Layer participatubes:%s already exists, deleting it." % layer_table self.cat.delete(existing_lyr) self.cat.reload() ds = self.cat.get_store(store) ft = self.cat.publish_featuretype(layer_table, ds, 'EPSG:%s' % srid, srs='EPSG:4326') ft.projection_policy = "REPROJECT_TO_DECLARED" ft.title = layer_name self.cat.save(ft) if ft.enabled is False: print "ERROR: Layer %s %s %s is not enabled." (ft.workspace.name, ft.store.name, ft.title) if self.debug is True: print "Layer %s>%s>%s published." % (ft.workspace.name, ft.store.name, ft.title) def create_style_from_sld(self, style_name, sld_file, workspace, overwrite=True): """ """ if self.cat.get_style(style_name) is not None: print "Style %s already exists, deleting it." % style_name style2del = self.cat.get_style(style_name) self.cat.delete(style2del) self.cat.create_style( style_name, open(sld_file).read(), overwrite=overwrite ) # FIXME: if ", workspace=workspace" specified can't delete style if self.debug is True: print "Style %s created in Geoserver" % style_name def apply_style_to_layer(self, layer_name, style_name): """ Apply a geoserver styler to a layer """ gs_layer = self.cat.get_layer(layer_name) gs_style = self.cat.get_style(style_name) # FIXME: Which works better? # gs_layer.default_style = gs_style / gs_layer._set_default_style(gs_style) # FIXME: Maybe indicate workspace when saving style then name the style as "workspace:style" gs_layer._set_default_style(gs_style) self.cat.save(gs_layer) if self.debug is True: print "Style applied to %s" % layer_name
def delete(uid): """A function to delete a deviation map, all its tables, files and GeoServer layers. GET request: Renders and returns a site showing delete options. POST request: Gets delete options chosen by user and uses them to delete the chosen parts of the deviation map. To delete GeoServer layers the GeoServer configuration client library is used. """ uid = uid.encode("ISO-8859-1") if request.method == "POST": dm = DevMap.query.filter_by(uid=uid).first() if ( current_user.is_authenticated() and dm.owner == current_user or dm.owner == User.query.filter_by(username="******").first() ): if ( dm.wmsposdevlines or dm.wmsmaxdevgrid or dm.wmsabsdevgrid or dm.wmsmatchingrategrid or dm.wmsunmatchedref or dm.wmsunmatchedosm or dm.wmsmatchedref or dm.wmsmatchedosm or dm.wmsminlevenshtein or dm.wmsmaxlevenshtein ): cat = Catalog(gs_url + "rest") cat.username = gs_user cat.password = gs_password ws = None try: ws = cat.get_workspace(gs_workspace) except socket.error, e: detail = "GeoServer is not available. Make sure that it is running and the connection is ok." return render_template("error.html", err=e, detail=detail) st = cat.get_store(gs_store, ws) if "deletemaxdevgrid" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_maxdevgrid" if dm.wmsmaxdevgrid: l = cat.get_layer(feattype) if l is not None: cat.delete(l) dm.wmsmaxdevgrid = False if "deleteposdevlines" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_posdevlines" if dm.wmsposdevlines: l = cat.get_layer(feattype) if l is not None: cat.delete(l) dm.wmsposdevlines = False if "deleteabsdevgrid" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_absdevgrid" if dm.wmsabsdevgrid: l = cat.get_layer(feattype) if l is not None: cat.delete(l) ft = cat.publish_featuretype(feattype, st, "EPSG:4326") if ft is not None: cat.delete(ft) dm.wmsabsdevgrid = False if "deletematchingrategrid" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_matchingrategrid" if dm.wmsmatchingrategrid: l = cat.get_layer(feattype) if l is not None: cat.delete(l) dm.wmsmatchingrategrid = False if "deleteunmatchedref" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_unmatchedref" if dm.wmsunmatchedref: l = cat.get_layer(feattype) if l is not None: cat.delete(l) dm.deleteunmatchedref = False if "deleteunmatchedosm" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_unmatchedosm" if dm.wmsunmatchedosm: l = cat.get_layer(feattype) if l is not None: cat.delete(l) dm.wmsunmatchedosm = False if "deletematchedref" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_matchedref" if dm.wmsmatchedref: l = cat.get_layer(feattype) if l is not None: cat.delete(l) dm.wmsmatchedref = False if "deletematchedosm" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_matchedosm" if dm.wmsmatchedosm: l = cat.get_layer(feattype) if l is not None: cat.delete(l) dm.wmsmatchedosm = False if "deleteminlevenshtein" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_minlevenshtein" if dm.wmsminlevenshtein: l = cat.get_layer(feattype) if l is not None: cat.delete(l) dm.wmsminlevenshtein = False if "deletemaxlevenshtein" in request.form or "deleteall" in request.form: feattype = "odf_" + uid + "_maxlevenshtein" if dm.wmsmaxlevenshtein: l = cat.get_layer(feattype) if l is not None: cat.delete(l) dm.wmsmaxlevenshtein = False if "deleteall" in request.form: folder = secure_filename(uid) folder = os.path.join(app.config["UPLOAD_FOLDER"], folder) shutil.rmtree(folder, True) db.engine.execute("drop table if exists odf_" + uid + "_ref") db.engine.execute("drop table if exists odf_" + uid + "_ref_presplitted") db.engine.execute("drop table if exists odf_" + uid + "_ref_splitted") db.engine.execute("drop table if exists odf_" + uid + "_found") db.engine.execute("drop table if exists odf_" + uid + "_ref_junctions") db.engine.execute("drop table if exists odf_" + uid + "_ref_points") db.engine.execute("drop table if exists odf_" + uid + "_ref_cutpoints") db.engine.execute("drop table if exists odf_" + uid + "_ref_cutcheckpoints") db.engine.execute("drop table if exists odf_" + uid + "_osm") db.engine.execute("drop table if exists odf_" + uid + "_osm_presplitted") db.engine.execute("drop table if exists odf_" + uid + "_osm_splitted") db.engine.execute("drop table if exists odf_" + uid + "_osm_junctions") db.engine.execute("drop table if exists odf_" + uid + "_osm_points") db.engine.execute("drop table if exists odf_" + uid + "_osm_cutpoints") db.engine.execute("drop table if exists odf_" + uid + "_osm_cutcheckpoints") db.engine.execute("drop table if exists odf_" + uid + "_unmatchedref;") db.engine.execute("drop table if exists odf_" + uid + "_unmatchedosm;") db.engine.execute("drop table if exists odf_" + uid + "_minlevenshtein;") db.engine.execute("drop table if exists odf_" + uid + "_maxlevenshtein;") db.engine.execute("drop table if exists odf_" + uid + "_grid;") db.engine.execute("drop table if exists odf_" + uid + "_maxdevgrid;") db.engine.execute("drop table if exists odf_" + uid + "_matchingrategrid;") db.engine.execute("drop table if exists odf_" + uid + "_deviationlines") db.engine.execute("drop table if exists odf_" + uid + "_junction_deviationlines") if DEBUG: db.engine.execute("drop table if exists odf_" + uid + "_osm_presplitted_cutcheckpoints") db.engine.execute("drop table if exists odf_" + uid + "_osm_presplitted_cutpoints") db.engine.execute("drop table if exists odf_" + uid + "_osm_presplitted_junctions") db.engine.execute("drop table if exists odf_" + uid + "_osm_presplitted_points") db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected") db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected_presplitted") db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected_presplitted_cutcheckpoints") db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected_presplitted_cutpoints") db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected_presplitted_junction_devvec") db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected_presplitted_junctions") db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected_presplitted_points") db.engine.execute("drop table if exists odf_" + uid + "_result") if "deleteall" not in request.form: db.session.add(dm) db.session.commit() return render_template("delete.html", uid=uid, dm=dm, error=None) else: db.session.delete(dm) db.session.commit() return redirect(url_for("basic.index")) else: return render_template("error.html", err="You are not allowed to delete this map!")
class wrap_geoserver: """ Geoserver (gsconfig) wrapper """ def __init__( self, geoserver_name, username=username, password=password, easy=False ): if geoserver_name in list(REST.keys()): self.path = REST[geoserver_name] else: self.path = geoserver_name self.wms = self.path.replace("rest/", "wms") self.name = geoserver_name self.catalog = Catalog(self.path, username, password) if not easy: self.layers = [] self.layer_names = [] for layer in self.catalog.get_layers(): self.layers.append(layer) self.layer_names.append(layer.name) self.stores = [store for store in self.catalog.get_stores()] self.store_names = [store.name for store in self.stores] styles = [] self.workspaces = [] self.workspace_names = [] for workspace in self.catalog.get_workspaces(): styles = styles + self.catalog.get_styles(workspace) self.workspace_names.append(workspace._name) self.workspaces.append(workspace) self.styles = styles + [style for style in self.catalog.get_styles()] self.style_names = [style.name for style in self.styles] def unpack(self, workspace_name, store_type="datastore"): layers_and_styles = {} features = [] workspace = self.get_workspace(workspace_name) if store_type == "datastore": store_url = workspace.datastore_url elif store_type == "coveragestore": store_url = workspace.coveragestore_url else: print("No correct store given") for datastore in tqdm(get(store_url, "name")): url = "{}workspaces/{}/datastores/{}".format( self.path, workspace.name, datastore ) features = features + get(url, between_quotes=True) for feature in features: layer_name = os.path.basename(feature).split(".")[0] self.get_layer(self.get_slug(workspace.name, layer_name)) layers_and_styles[layer_name] = self.layer.default_style setattr(self, workspace_name + "_data", layers_and_styles) return layers_and_styles def get_layer(self, layer, easy=False): self.layer = self.catalog.get_layer(layer) if not easy: self.resource = self.layer.resource self.layer_name = self.layer.resource.name self.sld_name = self.layer.default_style.name self.sld_body = self.layer.default_style.sld_body self.layer_latlon_bbox = self.layer.resource.latlon_bbox self.layer_title = self.layer.resource.title self.layer_abstract = self.layer.resource.abstract def get_store(self, layer): self.store = self.layer.resource._store def get_resource(self): self.resource = self.catalog.get_resource(self.layer.name, self.store) def get_workspace(self, workspace_name): self.workspace = self.catalog.get_workspace(workspace_name) self.workspace_name = self.workspace._name return self.workspace def write_abstract(self, data, load_resource=True): if load_resource: self.get_resource() self.resource.abstract = data self.catalog.save(self.resource) def write_title(self, title): self.resource.title = title self.catalog.save(self.resource) def get_connection_parameters(self): self.get_resource() return self.resource.store.connection_parameters def create_workspace(self, workspace_name): workspace_exists = workspace_name in self.workspace_names if not workspace_exists: self.workspace = self.catalog.create_workspace(workspace_name) else: print("workspace already exists, using existing workspace") self.workspace = self.catalog.get_workspace(workspace_name) self.workspace_name = workspace_name def create_postgis_datastore(self, store_name, workspace_name, pg_data): try: self.store = self.catalog.get_store(store_name, self.workspace_name) print("store within workspace exists, using existing store") except Exception as e: print(e) ds = self.catalog.create_datastore(store_name, workspace_name) ds.connection_parameters.update( host=pg_data["host"], port=pg_data["port"], database=pg_data["database"], user=pg_data["username"], passwd=pg_data["password"], dbtype="postgis", schema="public", ) self.save(ds) self.store = self.catalog.get_store(store_name, self.workspace_name) self.store_name = store_name def publish_layer( self, layer_name, workspace_name, overwrite=False, epsg="3857", reload=False ): layer_exists = layer_name in self.layer_names # if layer_name in self.workspace_layers[workspace_name]: slug = self.get_slug(workspace_name, layer_name) if overwrite and layer_exists: print("Layer exists, deleting layer") try: self.layer = self.catalog.get_layer(slug) self.delete(self.layer) self.reload() layer_exists = False except Exception as e: print(e) print("Layer does not exist in workspace") layer_exists = False if not layer_exists: feature_type = self.catalog.publish_featuretype( layer_name, self.store, "EPSG:{}".format(str(epsg)), srs="EPSG:{}".format(str(epsg)), ) self.save(feature_type) self.feature_type = feature_type else: print("layer already exists, using existing layer") if reload: self.get_layer(slug) self.layer_name = layer_name def publish_layergroup(self, name, layers, styles=(), bounds=None, workspace=None): layer_group = self.catalog.create_layergroup( name, layers, styles, bounds, workspace ) self.save(layer_group) def save(self, save_object): return self.catalog.save(save_object) def close(self): self.catalog = None def delete(self, delete_object): self.catalog.delete(delete_object) def reload(self): self.catalog.reload() def upload_shapefile(self, layer_name, shapefile_path): path = shapefile_path.split(".shp")[0] shapefile = shapefile_and_friends(path) ft = self.catalog.create_featurestore(layer_name, shapefile, self.workspace) self.save(ft) def upload_sld(self, sld_name, workspace_name, sld, overwrite=True): style_exists = sld_name in self.style_names if overwrite and style_exists: print("Overwriting style") style = self.catalog.get_style(sld_name, workspace_name) self.delete(style) self.reload() style_exists = False if not style_exists: try: self.catalog.create_style(sld_name, sld, False, workspace_name, "sld11") except Exception as e: print(e) style = self.catalog.get_style(sld_name, workspace_name) self.delete(style) self.reload() self.catalog.create_style(sld_name, sld, False, workspace_name, "sld10") self.style_name = sld_name else: if style_exists: print("Style already exists, using current style") self.style_name = sld_name def set_sld_for_layer(self, workspace_name=None, style_name=None, use_custom=False): if not use_custom: workspace_name = self.workspace_name style_name = self.style_name self.style_slug = self.get_slug(workspace_name, style_name) else: if workspace_name is None: self.style_slug = style_name else: self.style_slug = self.get_slug(workspace_name, style_name) self.style = self.catalog.get_style(self.style_slug) print("Setting {} for {}".format(self.style.name, self.layer.name)) self.layer.default_style = self.style self.save(self.layer) def get_slug(self, workspace, name): return "{}:{}".format(workspace, name) def get_slug_data(self, slug): workspace_name = slug.split(":")[0] layer_name = slug.split(":")[1] return workspace_name, layer_name def get_sld(self, layer_slug=None): if layer_slug is None: self.style = self.catalog.get_style(self.layer_slug) else: self.style = self.catalog.get_style(layer_slug) self.sld_body = self.style.sld_body return self.sld_body def get_layer_workspace(self, layer_name): return self.catalog.get_layer(layer_name).resource.workspace.name
class CatalogTests(unittest.TestCase): def setUp(self): self.cat = Catalog(GSPARAMS['GSURL'], username=GSPARAMS['GSUSER'], password=GSPARAMS['GSPASSWORD']) self.gs_version = self.cat.get_short_version() def testGSVersion(self): version = self.cat.get_version() pat = re.compile('\d\.\d+') self.assertTrue(pat.match('2.2.x')) self.assertTrue(pat.match('2.3.2')) self.assertTrue(pat.match('2.3-SNAPSHOT')) self.assertTrue(pat.match(version)) def testWorkspaces(self): self.assertEqual(7, len(self.cat.get_workspaces())) # marking out test since geoserver default workspace is not consistent # self.assertEqual("cite", self.cat.get_default_workspace().name) self.assertEqual("topp", self.cat.get_workspaces(names="topp")[-1].name) self.assertEqual(2, len(self.cat.get_workspaces(names=['topp', 'sde']))) self.assertEqual(2, len(self.cat.get_workspaces(names='topp, sde'))) self.assertEqual("topp", self.cat.get_workspace("topp").name) self.assertIsNone(self.cat.get_workspace("blahblah-")) def testStores(self): self.assertEqual(0, len(self.cat.get_stores(names="nonexistentstore"))) topp = self.cat.get_workspaces("topp")[0] sf = self.cat.get_workspaces("sf")[0] self.assertEqual(9, len(self.cat.get_stores())) self.assertEqual(2, len(self.cat.get_stores(workspaces=topp))) self.assertEqual(2, len(self.cat.get_stores(workspaces=sf))) self.assertEqual(2, len(self.cat.get_stores(workspaces='sf'))) self.assertEqual( 2, len(self.cat.get_stores(names='states_shapefile, sfdem'))) self.assertEqual( 2, len(self.cat.get_stores(names=['states_shapefile', 'sfdem']))) self.assertEqual( "states_shapefile", self.cat.get_stores(names="states_shapefile", workspaces=topp.name)[0].name) self.assertEqual("states_shapefile", self.cat.get_stores(names="states_shapefile")[0].name) self.assertEqual( "sfdem", self.cat.get_stores(names="sfdem", workspaces=sf.name)[0].name) self.assertEqual("sfdem", self.cat.get_stores(names="sfdem")[0].name) self.assertEqual("sfdem", self.cat.get_store("sfdem").name) self.assertIsNone(self.cat.get_store("blah+blah-")) def testResources(self): topp = self.cat.get_workspaces("topp")[0] sf = self.cat.get_workspaces("sf")[0] states = self.cat.get_stores(names="states_shapefile", workspaces=topp.name)[0] sfdem = self.cat.get_stores(names="sfdem", workspaces=sf.name)[0] self.assertEqual(19, len(self.cat.get_resources())) self.assertEqual( 2, len(self.cat.get_resources(stores=[states.name, sfdem.name]))) self.assertEqual( 11, len(self.cat.get_resources(workspaces=[topp.name, sf.name]))) self.assertEqual( "states", self.cat.get_resources(names="states", stores=states.name)[0].name) self.assertEqual( "states", self.cat.get_resources(names="states", workspaces=topp.name)[0].name) self.assertEqual("states", self.cat.get_resources(names="states")[0].name) self.assertEqual("states", self.cat.get_resource("states").name) self.assertIsNone(self.cat.get_resource("blah+1blah-2")) states = self.cat.get_resources(names="states")[0] fields = [ states.title, states.abstract, states.native_bbox, states.latlon_bbox, states.projection, states.projection_policy ] self.assertFalse(None in fields, str(fields)) self.assertFalse(len(states.keywords) == 0) self.assertFalse(len(states.attributes) == 0) self.assertTrue(states.enabled) self.assertEqual( "sfdem", self.cat.get_resources(names="sfdem", stores=sfdem.name)[0].name) self.assertEqual( "sfdem", self.cat.get_resources(names="sfdem", workspaces=sf.name)[0].name) self.assertEqual("sfdem", self.cat.get_resources(names="sfdem")[0].name) def testResourcesUpdate(self): res_dest = self.cat.get_resources() count = 0 for rd in res_dest: # only wms layers if rd.resource_type != "wmsLayer": continue # looking for same name ro = self.cat.get_resources(names=rd.name) if ro is not None: rd.title = ro.title rd.abstract = ro.abstract rd.keywords = ro.keywords rd.projection = ro.projection rd.native_bbox = ro.native_bbox rd.latlon_bbox = ro.latlon_bbox rd.projection_policy = ro.projection_policy rd.enabled = ro.enabled rd.advertised = ro.advertised rd.metadata_links = ro.metadata_links or None self.cat.save(rd) self.cat.reload() count += 1 def testLayers(self): if self.gs_version >= "2.13": expected = set([ 'sf:roads', 'sf:sfdem', 'nurc:mosaic', 'tiger:giant_polygon', 'sf:bugsites', 'topp:states', 'sf:streams', 'tiger:poly_landmarks', 'tiger:poi', 'topp:tasmania_water_bodies', 'tiger:tiger_roads', 'topp:tasmania_roads', 'nurc:Pk50095', 'topp:tasmania_cities', 'nurc:Img_Sample', 'sf:restricted', 'nurc:Arc_Sample', 'sf:archsites', 'topp:tasmania_state_boundaries' ]) else: expected = set([ "Arc_Sample", "Pk50095", "Img_Sample", "mosaic", "sfdem", "bugsites", "restricted", "streams", "archsites", "roads", "tasmania_roads", "tasmania_water_bodies", "tasmania_state_boundaries", "tasmania_cities", "states", "poly_landmarks", "tiger_roads", "poi", "giant_polygon" ]) actual = set(l.name for l in self.cat.get_layers()) missing = expected - actual extras = actual - expected message = "Actual layer list did not match expected! (Extras: %s) (Missing: %s)" % ( extras, missing) self.assert_(len(expected ^ actual) == 0, message) states = self.cat.get_layer("states") self.assert_("states", states.name) self.assert_(isinstance(states.resource, ResourceInfo)) self.assertEqual(set(s.name for s in states.styles), set(['pophatch', 'polygon'])) self.assertEqual(states.default_style.name, "population") def testLayerGroups(self): expected = set(["tasmania", "tiger-ny", "spearfish"]) actual = set(l.name for l in self.cat.get_layergroups( names=["tasmania", "tiger-ny", "spearfish"])) missing = expected - actual extras = actual - expected message = "Actual layergroup list did not match expected! (Extras: %s) (Missing: %s)" % ( extras, missing) self.assert_(len(expected ^ actual) == 0, message) tas = self.cat.get_layergroups(names="tasmania")[0] self.assert_("tasmania", tas.name) self.assert_(isinstance(tas, LayerGroup)) if self.gs_version >= "2.13": self.assertEqual(tas.layers, [ 'topp:tasmania_state_boundaries', 'topp:tasmania_water_bodies', 'topp:tasmania_roads', 'topp:tasmania_cities' ], tas.layers) else: self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities' ], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) # Try to create a new Layer Group into the "topp" workspace self.assert_(self.cat.get_workspaces("topp")[0] is not None) tas2 = self.cat.create_layergroup("tasmania_reloaded", tas.layers, workspace="topp") self.cat.save(tas2) self.assertEqual( 1, len( self.cat.get_layergroups(names='tasmania_reloaded', workspaces="topp"))) tas2 = self.cat.get_layergroups(names='tasmania_reloaded', workspaces="topp")[0] self.assert_("tasmania_reloaded", tas2.name) self.assert_(isinstance(tas2, LayerGroup)) self.assertEqual(tas2.workspace, "topp", tas2.workspace) if self.gs_version >= "2.13": self.assertEqual(tas2.layers, [ 'topp:tasmania_state_boundaries', 'topp:tasmania_water_bodies', 'topp:tasmania_roads', 'topp:tasmania_cities' ], tas2.layers) else: self.assertEqual(tas2.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities' ], tas2.layers) self.assertEqual(tas2.styles, [None, None, None, None], tas2.styles) def testStyles(self): self.assertEqual("population", self.cat.get_styles("population")[0].name) self.assertEqual("popshade.sld", self.cat.get_styles("population")[0].filename) self.assertEqual("population", self.cat.get_styles("population")[0].sld_name) self.assertEqual("population", self.cat.get_style("population").sld_name) self.assertIsNone(self.cat.get_style("blah+#5blah-")) self.assertEqual(0, len(self.cat.get_styles('non-existing-style'))) def testEscaping(self): # GSConfig is inconsistent about using exceptions vs. returning None # when a resource isn't found. # But the basic idea is that none of them should throw HTTP errors from # misconstructed URLS self.cat.get_styles("best style ever") self.cat.get_workspaces("best workspace ever") self.assertEqual( 0, len( self.cat.get_stores(workspaces="best workspace ever", names="best store ever"))) self.cat.get_layer("best layer ever") self.cat.get_layergroups("best layergroup ever") def testUnicodeUrl(self): """ Tests that the geoserver.support.url function support unicode strings. """ # Test the url function with unicode seg = [ 'workspaces', 'test', 'datastores', u'operaci\xf3n_repo', 'featuretypes.xml' ] u = build_url(base=self.cat.service_url, seg=seg) self.assertEqual( u, self.cat.service_url + "/workspaces/test/datastores/operaci%C3%B3n_repo/featuretypes.xml") # Test the url function with normal string seg = [ 'workspaces', 'test', 'datastores', 'test-repo', 'featuretypes.xml' ] u = build_url(base=self.cat.service_url, seg=seg) self.assertEqual( u, self.cat.service_url + "/workspaces/test/datastores/test-repo/featuretypes.xml")
class CatalogTests(unittest.TestCase): def setUp(self): self.cat = Catalog(GSPARAMS['GSURL'], username=GSPARAMS['GSUSER'], password=GSPARAMS['GSPASSWORD']) def testAbout(self): about_html = self.cat.about() self.assertTrue( '<html xmlns="http://www.w3.org/1999/xhtml"' in about_html) def testGSVersion(self): version = self.cat.gsversion() pat = re.compile('\d\.\d(\.[\dx]|-SNAPSHOT)') self.assertTrue(pat.match('2.2.x')) self.assertTrue(pat.match('2.3.2')) self.assertTrue(pat.match('2.3-SNAPSHOT')) self.assertFalse(pat.match('2.3.y')) self.assertFalse(pat.match('233')) self.assertTrue(pat.match(version)) def testWorkspaces(self): self.assertEqual(7, len(self.cat.get_workspaces())) # marking out test since geoserver default workspace is not consistent # self.assertEqual("cite", self.cat.get_default_workspace().name) self.assertEqual("topp", self.cat.get_workspace("topp").name) def testStores(self): topp = self.cat.get_workspace("topp") sf = self.cat.get_workspace("sf") self.assertEqual(9, len(self.cat.get_stores())) self.assertEqual(2, len(self.cat.get_stores(topp))) self.assertEqual(2, len(self.cat.get_stores(sf))) self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile", topp).name) self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile").name) self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile").name) self.assertEqual("sfdem", self.cat.get_store("sfdem", sf).name) self.assertEqual("sfdem", self.cat.get_store("sfdem").name) def testResources(self): topp = self.cat.get_workspace("topp") sf = self.cat.get_workspace("sf") states = self.cat.get_store("states_shapefile", topp) sfdem = self.cat.get_store("sfdem", sf) self.assertEqual(19, len(self.cat.get_resources())) self.assertEqual(1, len(self.cat.get_resources(states))) self.assertEqual(5, len(self.cat.get_resources(workspace=topp))) self.assertEqual(1, len(self.cat.get_resources(sfdem))) self.assertEqual(6, len(self.cat.get_resources(workspace=sf))) self.assertEqual("states", self.cat.get_resource("states", states).name) self.assertEqual("states", self.cat.get_resource("states", workspace=topp).name) self.assertEqual("states", self.cat.get_resource("states").name) states = self.cat.get_resource("states") fields = [ states.title, states.abstract, states.native_bbox, states.latlon_bbox, states.projection, states.projection_policy ] self.assertFalse(None in fields, str(fields)) self.assertFalse(len(states.keywords) == 0) self.assertFalse(len(states.attributes) == 0) self.assertTrue(states.enabled) self.assertEqual("sfdem", self.cat.get_resource("sfdem", sfdem).name) self.assertEqual("sfdem", self.cat.get_resource("sfdem", workspace=sf).name) self.assertEqual("sfdem", self.cat.get_resource("sfdem").name) def testResourcesUpdate(self): res_dest = self.cat.get_resources() count = 0 for rd in res_dest: # only wms layers if rd.resource_type != "wmsLayer": continue # looking for same name ro = self.cat.get_resource(rd.name) if ro is not None: rd.title = ro.title rd.abstract = ro.abstract rd.keywords = ro.keywords rd.projection = ro.projection rd.native_bbox = ro.native_bbox rd.latlon_bbox = ro.latlon_bbox rd.projection_policy = ro.projection_policy rd.enabled = ro.enabled rd.advertised = ro.advertised rd.metadata_links = ro.metadata_links or None self.cat.save(rd) self.cat.reload() # print "Updated layer: " + rd.name count += 1 # print "Total updated layers: " + str(count) def testLayers(self): expected = set([ "Arc_Sample", "Pk50095", "Img_Sample", "mosaic", "sfdem", "bugsites", "restricted", "streams", "archsites", "roads", "tasmania_roads", "tasmania_water_bodies", "tasmania_state_boundaries", "tasmania_cities", "states", "poly_landmarks", "tiger_roads", "poi", "giant_polygon" ]) actual = set(l.name for l in self.cat.get_layers()) missing = expected - actual extras = actual - expected message = "Actual layer list did not match expected! (Extras: %s) (Missing: %s)" % ( extras, missing) self.assert_(len(expected ^ actual) == 0, message) states = self.cat.get_layer("states") self.assert_("states", states.name) self.assert_(isinstance(states.resource, ResourceInfo)) self.assertEqual(set(s.name for s in states.styles), set(['pophatch', 'polygon'])) self.assertEqual(states.default_style.name, "population") def testLayerGroups(self): expected = set(["tasmania", "tiger-ny", "spearfish"]) actual = set(l.name for l in self.cat.get_layergroups()) missing = expected - actual extras = actual - expected message = "Actual layergroup list did not match expected! (Extras: %s) (Missing: %s)" % ( extras, missing) self.assert_(len(expected ^ actual) == 0, message) tas = self.cat.get_layergroup("tasmania") self.assert_("tasmania", tas.name) self.assert_(isinstance(tas, LayerGroup)) self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities' ], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) # Try to create a new Layer Group into the "topp" workspace self.assert_(self.cat.get_workspace("topp") is not None) tas2 = self.cat.create_layergroup("tasmania_reloaded", tas.layers, workspace="topp") self.cat.save(tas2) self.assert_(self.cat.get_layergroup("tasmania_reloaded") is None) self.assert_( self.cat.get_layergroup("tasmania_reloaded", "topp") is not None) tas2 = self.cat.get_layergroup("tasmania_reloaded", "topp") self.assert_("tasmania_reloaded", tas2.name) self.assert_(isinstance(tas2, LayerGroup)) self.assertEqual(tas2.workspace, "topp", tas2.workspace) self.assertEqual(tas2.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities' ], tas2.layers) self.assertEqual(tas2.styles, [None, None, None, None], tas2.styles) def testStyles(self): self.assertEqual("population", self.cat.get_style("population").name) self.assertEqual("popshade.sld", self.cat.get_style("population").filename) self.assertEqual("population", self.cat.get_style("population").sld_name) self.assert_(self.cat.get_style('non-existing-style') is None) def testEscaping(self): # GSConfig is inconsistent about using exceptions vs. returning None # when a resource isn't found. # But the basic idea is that none of them should throw HTTP errors from # misconstructed URLS self.cat.get_style("best style ever") self.cat.get_workspace("best workspace ever") try: self.cat.get_store(workspace="best workspace ever", name="best store ever") self.fail('expected exception') except FailedRequestError, fre: self.assertEqual('No store found named: best store ever', fre.message) try: self.cat.get_resource(workspace="best workspace ever", store="best store ever", name="best resource ever") except FailedRequestError, fre: self.assertEqual('No store found named: best store ever', fre.message)
def image_basemap(link, epsg, filetype): # text = '''''' cat = Catalog(settings.OGC_SERVER['default']['LOCATION'] + 'rest', username=settings.OGC_SERVER['default']['USER'], password=settings.OGC_SERVER['default']['PASSWORD']) baseURL = settings.OGC_SERVER['default']['PUBLIC_LOCATION'] localURL = settings.OGC_SERVER['default']['LOCATION'] # get layer name layer, isPhilLidar1, isPhilLidar2 = analyze_link(link) geoserver_layer = cat.get_layer(layer.name) layer_projection = geoserver_layer.resource.projection layer_projection_epsg = layer_projection.split(':')[1] bbox_string = layer.bbox_string.split(',') x0 = float(bbox_string[0]) y0 = float(bbox_string[1]) x1 = float(bbox_string[2]) y1 = float(bbox_string[3]) # for mapfish printing to_srs = epsg # if 'lipad' in settings.SITEURL: # baseURL = settings.OGC_SERVER['default']['LOCATION'] # else: to_srs_str = 'EPSG:' + str(to_srs) # filetype_cp = filetype # transform projection from 4326 to 3857 if epsg == 4683: x0, y0, x1, y1 = prs92_projection(x0, y0, x1, y1) # bbox values bbox = [x0, y0, x1, y1] # json template for requested filetype template_path = os.path.abspath( '/opt/geonode/geonode/layers/templatetags/pdf_print_template.json') if os.path.isfile(template_path): # with open(template_path,'r') as json_file: # data = json_file.read() jsontext = json.load(open(template_path, 'r')) jsontext['mapTitle'] = layer.title.title() jsontext['abstract'] = layer.abstract jsontext['purpose'] = layer.purpose if layer.purpose is None: jsontext['purpose'] = 'No purpose provided' jsontext['srs'] = to_srs_str jsontext['outputFormat'] = filetype jsontext['outputFilename'] = layer.title.replace(',','') # jsontext['layers'][0]['baseURL'] = settings.OGC_SERVER['default']['LOCATION'] + 'wms?SERVICE=WMS&' jsontext['layers'][0]['baseURL'] = localURL + \ 'wms?SERVICE=WMS&' # baseURL for local jsontext['layers'][1]['baseURL'] = localURL + \ 'wms?SERVICE=WMS&' # baseURL for local # jsontext['layers'][0]['layers'] = [str(layer.typename)] jsontext['layers'][1]['layers'] = [str(layer.typename)] jsontext['pages'][0]['bbox'] = bbox legendurl = localURL + 'wms?request=GetLegendGraphic&format=image/png&LAYER=' + str(layer.typename) jsontext['legends'][0]['classes'][0]['icons'][0] = legendurl jsontext['isPhilLidar1'] = isPhilLidar1 jsontext['isPhilLidar2'] = isPhilLidar2 # print '******************** J S O N ********************' # pprint(jsontext) jsonmini = json.dumps(jsontext, separators=(',', ':')) urlencoded = urllib.quote(jsonmini) spec = baseURL + 'pdf/print.pdf?spec=' + urlencoded return spec else: print 'TEMPLATE NOT FOUND'
class UploaderBase(GeoNodeBaseTestSupport): settings_overrides = [] @classmethod def setUpClass(cls): pass @classmethod def tearDownClass(cls): if os.path.exists('integration_settings.py'): os.unlink('integration_settings.py') def setUp(self): # super(UploaderBase, self).setUp() # await startup cl = Client(GEONODE_URL, GEONODE_USER, GEONODE_PASSWD) for i in range(10): time.sleep(.2) try: cl.get_html('/', debug=False) break except BaseException: pass self.client = Client(GEONODE_URL, GEONODE_USER, GEONODE_PASSWD) self.catalog = Catalog(GEOSERVER_URL + 'rest', GEOSERVER_USER, GEOSERVER_PASSWD) self._tempfiles = [] # createlayer must use postgis as a datastore # set temporary settings to use a postgis datastore DB_HOST = settings.DATABASES['default']['HOST'] DB_PORT = settings.DATABASES['default']['PORT'] DB_NAME = settings.DATABASES['default']['NAME'] DB_USER = settings.DATABASES['default']['USER'] DB_PASSWORD = settings.DATABASES['default']['PASSWORD'] settings.DATASTORE_URL = 'postgis://{}:{}@{}:{}/{}'.format( DB_USER, DB_PASSWORD, DB_HOST, DB_PORT, DB_NAME) postgis_db = dj_database_url.parse(settings.DATASTORE_URL, conn_max_age=600) settings.DATABASES['datastore'] = postgis_db settings.OGC_SERVER['default']['DATASTORE'] = 'datastore' def tearDown(self): # super(UploaderBase, self).tearDown() map(os.unlink, self._tempfiles) # move to original settings settings.OGC_SERVER['default']['DATASTORE'] = '' del settings.DATABASES['datastore'] # Cleanup Layer.objects.all().delete() Map.objects.all().delete() Document.objects.all().delete() def check_layer_geonode_page(self, path): """ Check that the final layer page render's correctly after an layer is uploaded """ # the final url for uploader process. This does a redirect to # the final layer page in geonode resp, _ = self.client.get_html(path) self.assertTrue(resp.code == 200) self.assertTrue('content-type' in resp.headers) def check_layer_geoserver_caps(self, type_name): """ Check that a layer shows up in GeoServer's get capabilities document """ # using owslib wms = get_wms(type_name=type_name, username=GEOSERVER_USER, password=GEOSERVER_PASSWD) ws, layer_name = type_name.split(':') self.assertTrue(layer_name in wms.contents, '%s is not in %s' % (layer_name, wms.contents)) def check_layer_geoserver_rest(self, layer_name): """ Check that a layer shows up in GeoServer rest api after the uploader is done""" # using gsconfig to test the geoserver rest api. layer = self.catalog.get_layer(layer_name) self.assertIsNotNone(layer is not None) def check_and_pass_through_timestep(self, redirect_to): time_step = upload_step('time') srs_step = upload_step('srs') if srs_step in redirect_to: resp = self.client.make_request(redirect_to) else: self.assertTrue(time_step in redirect_to) resp = self.client.make_request(redirect_to) token = self.client.get_csrf_token(True) self.assertEquals(resp.code, 200) resp = self.client.make_request(redirect_to, {'csrfmiddlewaretoken': token}, ajax=True) data = json.loads(resp.read()) return resp, data def complete_raster_upload(self, file_path, resp, data): return self.complete_upload(file_path, resp, data, is_raster=True) def check_save_step(self, resp, data): """Verify the initial save step""" self.assertEquals(resp.code, 200) self.assertTrue(isinstance(data, dict)) # make that the upload returns a success True key self.assertTrue(data['success'], 'expected success but got %s' % data) self.assertTrue('redirect_to' in data) def complete_upload(self, file_path, resp, data, is_raster=False): """Method to check if a layer was correctly uploaded to the GeoNode. arguments: file path, the django http response Checks to see if a layer is configured in Django Checks to see if a layer is configured in GeoServer checks the Rest API checks the get cap document """ layer_name, ext = os.path.splitext(os.path.basename(file_path)) if not isinstance(data, basestring): self.check_save_step(resp, data) layer_page = self.finish_upload(data['redirect_to'], layer_name, is_raster) self.check_layer_complete(layer_page, layer_name) def finish_upload(self, current_step, layer_name, is_raster=False, skip_srs=False): if not is_raster and _ALLOW_TIME_STEP: resp, data = self.check_and_pass_through_timestep(current_step) self.assertEquals(resp.code, 200) if not isinstance(data, basestring): if data['success']: self.assertTrue(data['success'], 'expected success but got %s' % data) self.assertTrue('redirect_to' in data) current_step = data['redirect_to'] self.wait_for_progress(data.get('progress')) if not is_raster and not skip_srs: self.assertTrue(upload_step('srs') in current_step) # if all is good, the srs step will redirect to the final page resp = self.client.get(current_step) content = json.loads(resp.read()) if not content.get('url') and content.get( 'redirect_to', current_step) == upload_step('final'): resp = self.client.get(content.get('redirect_to')) else: self.assertTrue(upload_step('final') in current_step) resp = self.client.get(current_step) self.assertEquals(resp.code, 200) resp_js = resp.read() try: c = json.loads(resp_js) url = c['url'] url = urllib.unquote(url) # and the final page should redirect to the layer page # @todo - make the check match completely (endswith at least) # currently working around potential 'orphaned' db tables self.assertTrue(layer_name in url, 'expected %s in URL, got %s' % (layer_name, url)) return url except BaseException: return current_step def check_upload_model(self, original_name): # we can only test this if we're using the same DB as the test instance if not settings.OGC_SERVER['default']['DATASTORE']: return upload = None try: # AF: TODO Headhakes here... nose is not accessing to the test # db!!! uploads = Upload.objects.all() if uploads: upload = Upload.objects.filter(name=str(original_name)).last() except Upload.DoesNotExist: self.fail('expected to find Upload object for %s' % original_name) # AF: TODO Headhakes here... nose is not accessing to the test db!!! if upload: self.assertTrue(upload.complete) def check_layer_complete(self, layer_page, original_name): '''check everything to verify the layer is complete''' self.check_layer_geonode_page(layer_page) # @todo use the original_name # currently working around potential 'orphaned' db tables # this grabs the name from the url (it might contain a 0) type_name = os.path.basename(layer_page) layer_name = original_name try: layer_name = type_name.split(':')[1] except BaseException: pass # work around acl caching on geoserver side of things caps_found = False for i in range(10): time.sleep(.5) try: self.check_layer_geoserver_caps(type_name) caps_found = True except BaseException: pass if caps_found: self.check_layer_geoserver_rest(layer_name) self.check_upload_model(layer_name) else: logger.warning("Could not recognize Layer %s on GeoServer WMS" % original_name) def check_invalid_projection(self, layer_name, resp, data): """ Makes sure that we got the correct response from an layer that can't be uploaded""" self.assertTrue(resp.code, 200) if not isinstance(data, basestring): self.assertTrue(data['success']) self.assertTrue(upload_step("srs") in data['redirect_to']) resp, soup = self.client.get_html(data['redirect_to']) # grab an h2 and find the name there as part of a message saying it's # bad h2 = soup.find_all(['h2'])[0] self.assertTrue(str(h2).find(layer_name)) def upload_folder_of_files(self, folder, final_check, session_ids=None): mains = ('.tif', '.shp', '.zip') def is_main(_file): _, ext = os.path.splitext(_file) return (ext.lower() in mains) main_files = filter(is_main, os.listdir(folder)) for main in main_files: # get the abs path to the file _file = os.path.join(folder, main) base, _ = os.path.splitext(_file) resp, data = self.client.upload_file(_file) if session_ids is not None: if not isinstance(data, basestring) and data.get('url'): session_id = re.search(r'.*id=(\d+)', data.get('url')).group(1) if session_id: session_ids += [session_id] if not isinstance(data, basestring): self.wait_for_progress(data.get('progress')) final_check(base, resp, data) def upload_file(self, fname, final_check, check_name=None, session_ids=None): if not check_name: check_name, _ = os.path.splitext(fname) resp, data = self.client.upload_file(fname) if session_ids is not None: if not isinstance(data, basestring): if data.get('url'): session_id = re.search(r'.*id=(\d+)', data.get('url')).group(1) if session_id: session_ids += [session_id] if not isinstance(data, basestring): self.wait_for_progress(data.get('progress')) final_check(check_name, resp, data) def wait_for_progress(self, progress_url): if progress_url: resp = self.client.get(progress_url) assert resp.getcode() == 200, 'Invalid progress status code' raw_data = resp.read() json_data = json.loads(raw_data) # "COMPLETE" state means done if json_data.get('state', '') == 'RUNNING': time.sleep(0.1) self.wait_for_progress(progress_url) def temp_file(self, ext): fd, abspath = tempfile.mkstemp(ext) self._tempfiles.append(abspath) return fd, abspath def make_csv(self, *rows): fd, abspath = self.temp_file('.csv') fp = os.fdopen(fd, 'wb') out = csv.writer(fp) for r in rows: out.writerow(r) fp.close() return abspath
class GeoserverHelper: def __init__(self, geoserverUrl="", geoserverUserName="", geoserverPW="", geoserverWorkSpace="", postgreIP="", postgreUserName="", postgrePW=""): """use the constructor given arguments if used""" self.geoserverUrl = geoserverUrl if geoserverUrl != "" else GEOSERVERURL self.geoserverUserName = geoserverUserName if geoserverUserName != "" else GEOSERVERUSERNAME self.geoserverPW = geoserverPW if geoserverPW != "" else GEOSERVERPW self.geoserverWorkSpace = geoserverWorkSpace if geoserverWorkSpace != "" else "crc" self.postgreIP = postgreIP if postgreIP != "" else POSTGREIP self.postgreUserName = postgreUserName if postgreUserName != "" else POSTGREUSERNAME self.postgrePW = postgrePW if postgrePW != "" else POSTGREPW if self.geoserverUrl[-1] != '/': self.geoserverUrl += '/' self.catalog = Catalog(self.geoserverUrl + "rest/") self.catalog.http.add_credentials(self.geoserverUserName, self.geoserverPW) try: workspaces = self.catalog.get_workspaces() except: e = sys.exc_info()[0] log(str(e), Error) raise Exception("Init Error") self.cWorkSpace = self.catalog.get_workspace(self.geoserverWorkSpace) def getLayers(self): return self.cWorkSpace.catalog.get_layers() def insertShapeIntoPostGis(self, shapeFile, databaseName, tableName, encoding=3857): '''returns the returnCode of the execution of the insert script, e.g: helper.insertShapeIntoPostGis('/home/c815/gsTest/test.shp','crc','testingHelper2')''' if not os.path.isfile(shapeFile): print("Shape file not found") return -1 cmds = "PGPASSWORD={pgPW} ./createWSFTFromSHP.sh -s {shapeFileF} -d {databaseNameF} -t {tableNameF} -u {postgreUsername} -i {postgreIP}".format( pgPW=self.postgrePW, shapeFileF=shapeFile, databaseNameF=databaseName, tableNameF=tableName, postgreUsername=self.postgreUserName, postgreIP=self.postgreIP) return subprocess.call(cmds, shell=True) def uploadShapeFile(self, shapeFile, storeName): shpPlusSidcars = geoserver.util.shapefile_and_friends(shapeFile[:-3]) shpPlusSidcars self.ft = self.catalog.create_featurestore(storeName, shpPlusSidcars, self.cWorkSpace) def getStyles(self): return self.catalog.get_styles() def uploadStyleFile(self, sldFile, styleName, overWrite, workSpace=None): f = open(sldFile, 'r') styleSrc = f.read() uploadStyle(styleSrc, styleName, overWrite, workSpace) f.close() def uploadStyle(self, sldSrc, styleName, overWrite, workSpace=None): self.catalog.create_style(styleName, sldSrc, overWrite, workSpace) def publishPostGISLayer(self, postGISLayerName, storeName, crs='EPSG:3857'): '''cat.publish_featuretype('testingstuff',crcStore,native_crs='EPSG:3857')''' store = self.catalog.get_store(storeName) if store != None: return self.catalog.publish_featuretype(postGISLayerName, store, crs) return None def setDefaultStyleForLayer(self, layerName, styleName): l = self.catalog.get_layer(layerName) sNames = [i.name for i in self.getStyles()] if styleName not in sNames: split = styleName.split(':') if len(split) == 2: workSpace = styleName.split(':')[0] newStyleName = styleName.split(':')[1] else: return -1 style = self.catalog.get_style(newStyleName, workSpace) if style is None: return -1 if l != None: l._set_default_style(styleName) self.catalog.save(l) return 0 def createPostGISDataStore(self, storeName, postGisPassword, postGisUser, postGisHost, postGisDatabase, workSpace=None): #check if connection parameter are valid try: conn = psycopg2.connect( "dbname='{dbName}' user='******' host='{Host}' password='******'" .format(dbName=postGisDatabase, dbUser=postGisUser, Host=postGisHost, password=postGisPassword)) except: return False w = self.catalog.create_datastore(storeName, workSpace) template = Template( """{'validate connections': 'true', 'port': '5432', 'Support on the fly geometry simplification': 'true', 'create database': 'false', 'dbtype': 'postgis', 'Connection timeout': '20', 'namespace': 'http://www.crcproject.com', 'Max connection idle time': '300', 'Expose primary keys': 'false', 'min connections': '1', 'Max open prepared statements':'50', 'passwd': '$passwd', 'encode functions': 'false', 'max connections': '10', 'Evictor tests per run': '3', 'Loose bbox': 'true', 'Evictor run periodicity': '300', 'Estimated extends': 'true', 'database': '$database', 'fetch size': '1000', 'Test while idle': 'true', 'host': '$host', 'preparedStatements': 'false', 'schema': 'public', 'user': '******'}""") dic = ast.literal_eval( template.substitute(passwd=postGisPassword, user=postGisUser, host=postGisHost, database=postGisDatabase)) #'passwd': 'crypt1:Bsaz2AUI8T+6Pj43krA7kg==', #'user': '******'} #'database': 'crc', #'host': 'localhost', w.connection_parameters = dic self.catalog.save(w) return True
class CatalogTests(unittest.TestCase): def setUp(self): self.cat = Catalog("http://localhost:8080/geoserver/rest") def testAbout(self): about_html = self.cat.about() self.assertTrue('<html xmlns="http://www.w3.org/1999/xhtml"' in about_html) def testGSVersion(self): version = self.cat.gsversion() pat = re.compile('\d\.\d(\.[\dx]|-SNAPSHOT)') self.assertTrue(pat.match('2.2.x')) self.assertTrue(pat.match('2.3.2')) self.assertTrue(pat.match('2.3-SNAPSHOT')) self.assertFalse(pat.match('2.3.y')) self.assertFalse(pat.match('233')) self.assertTrue(pat.match(version)) def testWorkspaces(self): self.assertEqual(7, len(self.cat.get_workspaces())) # marking out test since geoserver default workspace is not consistent # self.assertEqual("cite", self.cat.get_default_workspace().name) self.assertEqual("topp", self.cat.get_workspace("topp").name) def testStores(self): topp = self.cat.get_workspace("topp") sf = self.cat.get_workspace("sf") self.assertEqual(9, len(self.cat.get_stores())) self.assertEqual(2, len(self.cat.get_stores(topp))) self.assertEqual(2, len(self.cat.get_stores(sf))) self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile", topp).name) self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile").name) self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile").name) self.assertEqual("sfdem", self.cat.get_store("sfdem", sf).name) self.assertEqual("sfdem", self.cat.get_store("sfdem").name) def testResources(self): topp = self.cat.get_workspace("topp") sf = self.cat.get_workspace("sf") states = self.cat.get_store("states_shapefile", topp) sfdem = self.cat.get_store("sfdem", sf) self.assertEqual(19, len(self.cat.get_resources())) self.assertEqual(1, len(self.cat.get_resources(states))) self.assertEqual(5, len(self.cat.get_resources(workspace=topp))) self.assertEqual(1, len(self.cat.get_resources(sfdem))) self.assertEqual(6, len(self.cat.get_resources(workspace=sf))) self.assertEqual("states", self.cat.get_resource("states", states).name) self.assertEqual("states", self.cat.get_resource("states", workspace=topp).name) self.assertEqual("states", self.cat.get_resource("states").name) states = self.cat.get_resource("states") fields = [ states.title, states.abstract, states.native_bbox, states.latlon_bbox, states.projection, states.projection_policy ] self.assertFalse(None in fields, str(fields)) self.assertFalse(len(states.keywords) == 0) self.assertFalse(len(states.attributes) == 0) self.assertTrue(states.enabled) self.assertEqual("sfdem", self.cat.get_resource("sfdem", sfdem).name) self.assertEqual("sfdem", self.cat.get_resource("sfdem", workspace=sf).name) self.assertEqual("sfdem", self.cat.get_resource("sfdem").name) def testLayers(self): expected = set(["Arc_Sample", "Pk50095", "Img_Sample", "mosaic", "sfdem", "bugsites", "restricted", "streams", "archsites", "roads", "tasmania_roads", "tasmania_water_bodies", "tasmania_state_boundaries", "tasmania_cities", "states", "poly_landmarks", "tiger_roads", "poi", "giant_polygon" ]) actual = set(l.name for l in self.cat.get_layers()) missing = expected - actual extras = actual - expected message = "Actual layer list did not match expected! (Extras: %s) (Missing: %s)" % (extras, missing) self.assert_(len(expected ^ actual) == 0, message) states = self.cat.get_layer("states") self.assert_("states", states.name) self.assert_(isinstance(states.resource, ResourceInfo)) self.assertEqual(set(s.name for s in states.styles), set(['pophatch', 'polygon'])) self.assertEqual(states.default_style.name, "population") def testLayerGroups(self): expected = set(["tasmania", "tiger-ny", "spearfish"]) actual = set(l.name for l in self.cat.get_layergroups()) missing = expected - actual extras = actual - expected message = "Actual layergroup list did not match expected! (Extras: %s) (Missing: %s)" % (extras, missing) self.assert_(len(expected ^ actual) == 0, message) tas = self.cat.get_layergroup("tasmania") self.assert_("tasmania", tas.name) self.assert_(isinstance(tas, LayerGroup)) self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) def testStyles(self): self.assertEqual(20, len(self.cat.get_styles())) self.assertEqual("population", self.cat.get_style("population").name) self.assertEqual("popshade.sld", self.cat.get_style("population").filename) self.assertEqual("population", self.cat.get_style("population").sld_name) def testEscaping(self): # GSConfig is inconsistent about using exceptions vs. returning None # when a resource isn't found. # But the basic idea is that none of them should throw HTTP errors from # misconstructed URLS self.cat.get_style("best style ever") self.cat.get_workspace("best workspace ever") self.cat.get_store(workspace="best workspace ever", name="best store ever") self.assertRaises(FailedRequestError, lambda: self.cat.get_resource( workspace="best workspace ever", store="best store ever", name="best resource ever")) self.cat.get_layer("best layer ever") self.cat.get_layergroup("best layergroup ever") def testUnicodeUrl(self): """ Tests that the geoserver.support.url function support unicode strings. """ # Test the url function with unicode seg = ['workspaces', 'test', 'datastores', u'operaci\xf3n_repo', 'featuretypes.xml'] u = url(base=self.cat.service_url, seg=seg) self.assertEqual(u, self.cat.service_url + "/workspaces/test/datastores/operaci%C3%B3n_repo/featuretypes.xml") # Test the url function with normal string seg = ['workspaces', 'test', 'datastores', 'test-repo', 'featuretypes.xml'] u = url(base=self.cat.service_url, seg=seg) self.assertEqual(u, self.cat.service_url + "/workspaces/test/datastores/test-repo/featuretypes.xml")
class UploaderTests(MapStoryTestMixin): """ Basic checks to make sure pages load, etc. """ def create_datastore(self, connection, catalog): settings = connection.settings_dict params = {'database': settings['NAME'], 'passwd': settings['PASSWORD'], 'namespace': 'http://www.geonode.org/', 'type': 'PostGIS', 'dbtype': 'postgis', 'host': settings['HOST'], 'user': settings['USER'], 'port': settings['PORT'], 'enabled': "True"} store = catalog.create_datastore(settings['NAME'], workspace=self.workspace) store.connection_parameters.update(params) try: catalog.save(store) except FailedRequestError: # assuming this is because it already exists pass return catalog.get_store(settings['NAME']) def setUp(self): if not os.path.exists(os.path.join(os.path.split(__file__)[0], 'test_ogr')): self.skipTest('Skipping test due to missing test data.') # These tests require geonode to be running on :80! self.postgis = db.connections['datastore'] self.postgis_settings = self.postgis.settings_dict self.username, self.password = self.create_user('admin', 'admin', is_superuser=True) self.non_admin_username, self.non_admin_password = self.create_user('non_admin', 'non_admin') self.cat = Catalog(ogc_server_settings.internal_rest, *ogc_server_settings.credentials) self.workspace = 'geonode' self.datastore = self.create_datastore(self.postgis, self.cat) def tearDown(self): """ Clean up geoserver. """ self.cat.delete(self.datastore, recurse=True) def generic_import(self, file, configuration_options=[{'index': 0}]): f = file filename = os.path.join(os.path.dirname(__file__), 'test_ogr', f) res = self.import_file(filename, configuration_options=configuration_options) layer = Layer.objects.get(name=res[0][0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') if not filename.endswith('zip'): self.assertTrue(layer.attributes.count() >= DataSource(filename)[0].num_fields) # make sure we have at least one dateTime attribute self.assertTrue('xsd:dateTime' or 'xsd:date' in [n.attribute_type for n in layer.attributes.all()]) return layer def test_box_with_year_field(self): """ Tests the import of test_box_with_year_field. """ layer = self.generic_import('boxes_with_year_field.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date(self): """ Tests the import of test_boxes_with_date. """ layer = self.generic_import('boxes_with_date.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_csv(self): """ Tests a CSV with WKT polygon. """ layer = self.generic_import('boxes_with_date.csv', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_schema_csv(self): """ Tests a CSV from schema download. """ layer = self.generic_import('schema_download.csv', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_iso_date(self): """ Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_duplicate_imports(self): """ Tests importing the same layer twice to ensure incrementing file names is properly handled. """ filename = os.path.join(os.path.dirname(__file__), 'test_ogr', 'boxes_with_date_iso_date.zip') gi = GDALImport(filename) layers1 = gi.handle({'index': 0, 'name': 'test'}) layers2 = gi.handle({'index': 0, 'name': 'test'}) self.assertEqual(layers1[0][0], 'test') self.assertEqual(layers2[0][0], 'test0') def test_boxes_with_date_iso_date_zip(self): """ Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.zip', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_dates_bc(self): """ Tests the import of test_boxes_with_dates_bc. """ layer = self.generic_import('boxes_with_dates_bc.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_point_with_date(self): """ Tests the import of test_boxes_with_dates_bc. """ layer = self.generic_import('point_with_date.geojson', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) # OGR will name geojson layers 'ogrgeojson' we rename to the path basename self.assertTrue(layer.name.startswith('point_with_date')) date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_end_date(self): """ Tests the import of test_boxes_with_end_date. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('boxes_with_end_date.shp') date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0] end_date_attr = filter(lambda attr: attr.attribute == 'enddate', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:date') self.assertEqual(end_date_attr.attribute_type, 'xsd:date') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) def test_us_states_kml(self): """ Tests the import of us_states_kml. This layer has a date and an end date field that are typed correctly. """ # TODO: Support time in kmls. layer = self.generic_import('us_states.kml') def test_mojstrovka_gpx(self): """ Tests the import of mojstrovka.gpx. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('mojstrovka.gpx') date_attr = filter(lambda attr: attr.attribute == 'time', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, u'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute) def generic_time_check(self, layer, attribute=None, end_attribute=None): """ Convenience method to run generic tests on time layers. """ self.cat._cache.clear() resource = self.cat.get_resource(layer.name, store=layer.store, workspace=self.workspace) timeInfo = resource.metadata['time'] self.assertEqual("LIST", timeInfo.presentation) self.assertEqual(True, timeInfo.enabled) self.assertEqual(attribute, timeInfo.attribute) self.assertEqual(end_attribute, timeInfo.end_attribute) def test_us_shootings_csv(self): """ Tests the import of US_shootings.csv. """ filename = 'US_shootings.csv' f = os.path.join(os.path.dirname(__file__), 'test_ogr', filename) layer = self.generic_import(filename, configuration_options=[{'index': 0, 'convert_to_date': ['Date']}]) self.assertEqual(layer.name, 'us_shootings') date_field = 'date' configure_time(self.cat.get_layer(layer.name).resource, attribute=date_field) self.generic_time_check(layer, attribute=date_field) def test_sitins(self): """ Tests the import of US_shootings.csv. """ filename = 'US_Civil_Rights_Sitins0.csv' f = os.path.join(os.path.dirname(__file__), 'test_ogr', filename) layer = self.generic_import(f, configuration_options=[{'index': 0, 'convert_to_date': ['Date']}]) def get_layer_names(self, in_file): """ Gets layer names from a data source. """ ds = DataSource(in_file) return map(lambda layer: layer.name, ds) def test_gdal_import(self): filename = 'point_with_date.geojson' f = os.path.join(os.path.dirname(__file__), 'test_ogr', filename) self.generic_import(filename, configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) def import_file(self, in_file, configuration_options=[]): """ Imports the file. """ self.assertTrue(os.path.exists(in_file)) # run ogr2ogr gi = GDALImport(in_file) layers = gi.handle(configuration_options=configuration_options) return layers @staticmethod def createFeatureType(catalog, datastore, name): """ Exposes a PostGIS feature type in geoserver. """ headers = {"Content-type": "application/xml"} data = "<featureType><name>{name}</name></featureType>".format(name=name) url = datastore.href.replace(".xml", '/featuretypes.xml'.format(name=name)) headers, response = catalog.http.request(url, "POST ", data, headers) return response def test_create_vrt(self): """ Tests the create_vrt function. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'US_shootings.csv') vrt = create_vrt(f) vrt.seek(0) output = vrt.read() self.assertTrue('name="US_shootings"' in output) self.assertTrue('<SrcDataSource>{0}</SrcDataSource>'.format(f) in output) self.assertTrue('<GeometryField encoding="PointFromColumns" x="Longitude" y="Latitude" />'.format(f) in output) self.assertEqual(os.path.splitext(vrt.name)[1], '.vrt') def test_file_add_view(self): """ Tests the file_add_view. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') c = AdminClient() # test login required for this view request = c.get(reverse('uploads-new')) self.assertEqual(request.status_code, 302) c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['request'].path, reverse('uploads-list')) self.assertTrue(len(response.context['object_list']) == 1) upload = response.context['object_list'][0] self.assertEqual(upload.user.username, 'non_admin') self.assertEqual(upload.file_type, 'GeoJSON') self.assertTrue(upload.uploadlayer_set.all()) self.assertEqual(upload.state, 'UPLOADED') self.assertIsNotNone(upload.name) uploaded_file = upload.uploadfile_set.first() self.assertTrue(os.path.exists(uploaded_file.file.path)) f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'empty_file.geojson') with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertIn('file', response.context_data['form'].errors) def test_file_add_view_as_json(self): """ Tests the file_add_view. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new-json'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertIn('application/json', response.get('Content-Type', '')) content = json.loads(response.content) self.assertIn('state', content) self.assertIn('id', content) def test_describe_fields(self): """ Tests the describe fields functionality. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'us_shootings.csv') fields = None with GDALInspector(f) as f: layers = f.describe_fields() self.assertTrue(layers[0]['name'], 'us_shootings') self.assertEqual([n['name'] for n in layers[0]['fields']], ['Date', 'Shooter', 'Killed', 'Wounded', 'Location', 'City', 'Longitude', 'Latitude']) self.assertEqual(layers[0]['feature_count'], 203) def test_gdal_file_type(self): """ Tests the describe fields functionality. """ files = ((os.path.join(os.path.dirname(__file__), 'test_ogr', 'us_shootings.csv'), 'CSV'), (os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson'), 'GeoJSON'), (os.path.join(os.path.dirname(__file__), 'test_ogr', 'mojstrovka.gpx'), 'GPX'), (os.path.join(os.path.dirname(__file__), 'test_ogr', 'us_states.kml'), 'KML'), (os.path.join(os.path.dirname(__file__), 'test_ogr', 'boxes_with_year_field.shp'), 'ESRI Shapefile'), (os.path.join(os.path.dirname(__file__), 'test_ogr', 'boxes_with_date_iso_date.zip'), 'ESRI Shapefile'), ) for path, file_type in files: with GDALInspector(path) as f: self.assertEqual(f.file_type(), file_type) def test_append(self): """ Tests the configuration view. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') new_user = User.objects.create(username='******') new_user_perms = ['change_resourcebase_permissions'] c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{'index': 0, 'name': 'append', 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True, 'permissions': {'users': {'test': new_user_perms, 'AnonymousUser': ["change_layer_data", "download_resourcebase", "view_resourcebase"]}}}] response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') cursor = db.connections['datastore'].cursor() cursor.execute('select count(*) from append') self.assertEqual(1,cursor.fetchone()[0]) payload[0]['appendTo'] = 'geonode:append' f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date_2.geojson') with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') cursor = db.connections['datastore'].cursor() cursor.execute('select count(*) from append') self.assertEqual(2,cursor.fetchone()[0]) def test_trunc_append(self): f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'long_attr_name.geojson') new_user = User.objects.create(username='******') new_user_perms = ['change_resourcebase_permissions'] c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{'index': 0, 'name': 'append', 'convert_to_date': ['date_as_date'], 'start_date': 'date_as_date', 'configureTime': True, 'editable': True, 'permissions': {'users': {'test': new_user_perms, 'AnonymousUser': ["change_layer_data", "download_resourcebase", "view_resourcebase"]}}}] response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') cursor = db.connections['datastore'].cursor() cursor.execute('select count(*) from append') self.assertEqual(1, cursor.fetchone()[0]) payload[0]['appendTo'] = 'geonode:append' payload[0]['convert_to_date'] = ['date_as_da'] payload[0]['start_date'] = 'date_as_da' f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'long_attr_trunc.geojson') with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') cursor = db.connections['datastore'].cursor() cursor.execute('select count(*), date from append group by date') result = cursor.fetchone() #ensure that the feature was added and the attribute was appended self.assertEqual(2,result[0]) self.assertNotEqual(None,result[1]) def test_schema_append(self): f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'schema_initial.zip') new_user = User.objects.create(username='******') new_user_perms = ['change_resourcebase_permissions'] c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{'index': 0, 'name': 'append', 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True, 'permissions': {'users': {'test': new_user_perms, 'AnonymousUser': ["change_layer_data", "download_resourcebase", "view_resourcebase"]}}}] response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') cursor = db.connections['datastore'].cursor() cursor.execute('select count(*) from append') self.assertEqual(1, cursor.fetchone()[0]) payload[0]['appendTo'] = 'geonode:append' payload[0]['convert_to_date'] = ['date'] payload[0]['start_date'] = 'date' f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'schema_append.zip') with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') cursor = db.connections['datastore'].cursor() cursor.execute('select count(*), date from append group by date') result = cursor.fetchone() import pdb;pdb.set_trace() #ensure that the feature was added and the attribute was appended self.assertEqual(2,result[0]) self.assertNotEqual(None,result[1]) def test_configure_view(self): """ Tests the configuration view. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') new_user = User.objects.create(username='******') new_user_perms = ['change_resourcebase_permissions'] c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True, 'permissions': {'users': {'test': new_user_perms, 'AnonymousUser': ["change_layer_data", "download_resourcebase", "view_resourcebase"]}}}] response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertEqual(Layer.objects.all()[0].owner.username, self.non_admin_username) perms = layer.get_all_level_info() user = User.objects.get(username=self.non_admin_username) # check user permissions for perm in [u'publish_resourcebase', u'change_resourcebase_permissions', u'delete_resourcebase', u'change_resourcebase', u'change_resourcebase_metadata', u'download_resourcebase', u'view_resourcebase', u'change_layer_style', u'change_layer_data']: self.assertIn(perm, perms['users'][user]) self.assertTrue(perms['users'][new_user]) self.assertIn('change_resourcebase_permissions', perms['users'][new_user]) self.assertIn("change_layer_data", perms['users'][User.objects.get(username='******')]) lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, layer) def test_configure_view_convert_date(self): """ Tests the configure view with a dataset that needs to be converted to a date. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'US_shootings.csv') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{'index': 0, 'convert_to_date': ['Date'], 'start_date': 'Date', 'configureTime': True, 'editable': True}] response = c.get('/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 405) response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 400) response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertTrue(layer.attributes.filter(attribute='date'), 'xsd:dateTime') lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) # ensure a user who does not own the upload cannot configure an import from it. c.logout() c.login_as_admin() response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 404) def test_list_api(self): c = AdminClient() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 401) c.login_as_non_admin() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 200) admin = User.objects.get(username=self.username) non_admin = User.objects.get(username=self.non_admin_username) from .models import UploadFile f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'US_shootings.csv') with open(f, 'rb') as f: uploaded_file = SimpleUploadedFile('test_data', f.read()) admin_upload = UploadedData.objects.create(state='Admin test', user=admin) admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file)) non_admin_upload = UploadedData.objects.create(state='Non admin test', user=non_admin) non_admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file)) c.login_as_admin() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 2) response = c.get('/importer-api/data/?user__username=admin') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 1) def test_layer_list_api(self): c = AdminClient() response = c.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 401) c.login_as_non_admin() response = c.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 200) def test_delete_from_non_admin_api(self): """ Ensure users can delete their data. """ c = AdminClient() f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) id = UploadedData.objects.first().id response = c.delete('/importer-api/data/{0}/'.format(id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def test_delete_from_admin_api(self): """ Ensure that administrators can delete data that isn't theirs. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) c.logout() c.login_as_admin() id = UploadedData.objects.first().id response = c.delete('/importer-api/data/{0}/'.format(id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def naming_an_import(self): """ Tests providing a name in the configuration options. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() name = 'point-with-a-date' with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) payload = {'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'name': name, 'editable': True} response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps(payload), content_type='application/json') layer = Layer.objects.all()[0] self.assertEqual(layer.title, name.replace('-', '_')) def test_api_import(self): """ Tests the import api. """ f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) payload = {'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True} self.assertTrue(isinstance(UploadLayer.objects.first().configuration_options, dict)) response = c.get('/importer-api/data-layers/1/') self.assertEqual(response.status_code, 200) response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps([payload]), content_type='application/json') self.assertEqual(response.status_code, 200) self.assertTrue('task' in response.content) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, layer) self.assertTrue(UploadLayer.objects.first().task_id) def test_valid_file_extensions(self): """ Test the file extension validator. """ for extension in IMPORTER_VALID_EXTENSIONS: self.assertIsNone(validate_file_extension(SimpleUploadedFile('test.{0}'.format(extension), ''))) with self.assertRaises(ValidationError): validate_file_extension(SimpleUploadedFile('test.txt', '')) def test_no_geom(self): """ Test the file extension validator. """ with self.assertRaises(ValidationError): validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)')) # This should pass (geom type is unknown) validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)')) def test_numeric_overflow(self): """ Regression test for numeric field overflows in shapefiles. # https://trac.osgeo.org/gdal/ticket/5241 """ self.generic_import('Walmart.zip', configuration_options=[{'index': 0, 'convert_to_date': []}]) def test_outside_bounds_regression(self): """ Regression where layers with features outside projection bounds fail. """ self.generic_import('Spring_2015.zip', configuration_options=[{'index': 0 }]) resource = self.cat.get_layer('spring_2015').resource self.assertEqual(resource.latlon_bbox, ('-180.0', '180.0', '-90.0', '90.0', 'EPSG:4326')) def test_multipolygon_shapefile(self): """ Tests shapefile with multipart polygons. """ self.generic_import('PhoenixFirstDues.zip', configuration_options=[{'index': 0}]) def test_gwc_handler(self): """ Tests the GeoWebCache handler """ layer = self.generic_import('boxes_with_date.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) gwc = GeoWebCacheHandler(None) gs_layer = self.cat.get_layer(layer.name) self.assertTrue(gwc.time_enabled(gs_layer)) gs_layer.fetch() payload = self.cat.http.request(gwc.gwc_url(gs_layer)) self.assertTrue('regexParameterFilter' in payload[1]) self.assertEqual(int(payload[0]['status']), 200) # Don't configure time, ensure everything still works layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{'index': 0}]) gs_layer = self.cat.get_layer(layer.name) self.cat._cache.clear() gs_layer.fetch() payload = self.cat.http.request(gwc.gwc_url(gs_layer)) self.assertFalse('regexParameterFilter' in payload[1]) self.assertEqual(int(payload[0]['status']), 200)
'BLZ', 'GHA', 'STP', 'TGO', 'NER', 'MLI', 'BEN', 'GNB', 'CPV', 'KEN', 'BDI', 'SOM', 'DJI', 'RWA', 'TZA', 'AGO', 'LSO', 'NAM', 'SWZ', 'MWI', 'MOZ', 'ZMB', 'ZWE' ] layers_to_be_created_southamerica = [ 'DOM', 'VEN', 'COL', 'ECU', 'HTI', 'GTM', 'PAN', 'PRY', 'PER', 'CHL', 'NIC', 'HND', 'DMA', 'BOL', 'CUB', 'SLV' ] #layers_to_be_created = ['BLZ','DOM'] for iso in layers_to_be_created_asia: #!!! change the list based on the continent you want to create data table_name = 'asp_trs_roads_osm' # !!change the table name based on the continent (asp_trs_roads_osm -> asia, afr_trs_roads_osm -> africa, lac_trs_roads_osm -> latin america) layer_name = iso.lower() + "_trs_roads_osm" layer_exist = cat.get_layer(layer_name) # delete layers in case of messup ''' url = "http://ogcserver.gis.wfp.org/geoserver/rest/layers/geonode:{layer_name}.xml".format(** { 'layer_name': layer_name, }) response = requests.delete(url, auth=('admin', 'osm_sparc_2017')) print response.status_code url = "http://ogcserver.gis.wfp.org/geoserver/rest/workspaces/geonode/datastores/osm_prod/featuretypes/{layer_name}.xml".format(** { 'layer_name': layer_name, }) response = requests.delete(url, auth=('admin', 'osm_sparc_2017'))
from geoserver.store import coveragestore_from_index, datastore_from_index, \ DataStore, CoverageStore, UnsavedDataStore, UnsavedCoverageStore from geoserver.style import Style from geoserver.support import prepare_upload_bundle from geoserver.layergroup import LayerGroup, UnsavedLayerGroup from geoserver.workspace import workspace_from_index, Workspace from geoserver.resource import FeatureType from geoserver.support import prepare_upload_bundle, url style_to_check = "point" cat = Catalog("http://localhost:8080/geoserver/rest", "admin", "admin") ooiDataStore = "ooi" ooiworkspace = "geonode" #update layer that_layer = cat.get_layer("1k time varying test") that_layer.enabled = False # at this point that_layer is still published in GeoServer cat.save(that_layer) # now it is disabled that_layer = cat.get_layer("1k time varying test") that_layer.enabled = True cat.save(that_layer) ooitest = cat.get_workspace("ooi_test") data = cat.get_resources(store="asd",workspace="geonode") print data layer = cat.get_layer("1k time varying test") print layer.href cat.add_data_to_store
class UploaderBase(GeoNodeBaseTestSupport): type = 'dataset' @classmethod def setUpClass(cls): pass @classmethod def tearDownClass(cls): if os.path.exists('integration_settings.py'): os.unlink('integration_settings.py') def setUp(self): # await startup cl = Client(GEONODE_URL, GEONODE_USER, GEONODE_PASSWD) for i in range(10): try: cl.get_html('/', debug=False) break except Exception: pass self.client = Client(GEONODE_URL, GEONODE_USER, GEONODE_PASSWD) self.catalog = Catalog( f"{GEOSERVER_URL}rest", GEOSERVER_USER, GEOSERVER_PASSWD, retries=ogc_server_settings.MAX_RETRIES, backoff_factor=ogc_server_settings.BACKOFF_FACTOR) settings.DATABASES['default']['NAME'] = DB_NAME connections['default'].settings_dict['ATOMIC_REQUESTS'] = False connections['default'].connect() self._tempfiles = [] def _post_teardown(self): pass def tearDown(self): connections.databases['default']['ATOMIC_REQUESTS'] = False for temp_file in self._tempfiles: os.unlink(temp_file) # Cleanup if settings.OGC_SERVER['default'].get("GEOFENCE_SECURITY_ENABLED", False): from geonode.geoserver.security import purge_geofence_all purge_geofence_all() def check_dataset_geonode_page(self, path): """ Check that the final dataset page render's correctly after an dataset is uploaded """ # the final url for uploader process. This does a redirect to # the final dataset page in geonode resp, _ = self.client.get_html(path) self.assertEqual(resp.status_code, 200) self.assertTrue('content-type' in resp.headers) def check_dataset_geoserver_caps(self, type_name): """ Check that a dataset shows up in GeoServer's get capabilities document """ # using owslib wms = get_wms(type_name=type_name, username=GEOSERVER_USER, password=GEOSERVER_PASSWD) ws, dataset_name = type_name.split(':') self.assertTrue(dataset_name in wms.contents, f'{dataset_name} is not in {wms.contents}') def check_dataset_geoserver_rest(self, dataset_name): """ Check that a dataset shows up in GeoServer rest api after the uploader is done""" # using gsconfig to test the geoserver rest api. dataset = self.catalog.get_layer(dataset_name) self.assertIsNotNone(dataset) def check_and_pass_through_timestep(self, redirect_to): time_step = upload_step('time') srs_step = upload_step('srs') if srs_step in redirect_to: resp = self.client.make_request(redirect_to) else: self.assertTrue(time_step in redirect_to) resp = self.client.make_request(redirect_to) token = self.client.get_csrf_token(True) self.assertEqual(resp.status_code, 200) resp = self.client.make_request(redirect_to, {'csrfmiddlewaretoken': token}, ajax=True) return resp, resp.json() def complete_raster_upload(self, file_path, resp, data): return self.complete_upload(file_path, resp, data, is_raster=True) def check_save_step(self, resp, data): """Verify the initial save step""" self.assertEqual(resp.status_code, 200) self.assertTrue(isinstance(data, dict)) # make that the upload returns a success True key self.assertTrue(data['success'], f'expected success but got {data}') self.assertTrue('redirect_to' in data) def complete_upload(self, file_path, resp, data, is_raster=False): """Method to check if a dataset was correctly uploaded to the GeoNode. arguments: file path, the django http response Checks to see if a dataset is configured in Django Checks to see if a dataset is configured in GeoServer checks the Rest API checks the get cap document """ dataset_name, ext = os.path.splitext(os.path.basename(file_path)) if not isinstance(data, str): self.check_save_step(resp, data) dataset_page = self.finish_upload(data['redirect_to'], dataset_name, is_raster) self.check_dataset_complete(dataset_page, dataset_name) def finish_upload(self, current_step, dataset_name, is_raster=False, skip_srs=False): if not is_raster and _ALLOW_TIME_STEP: resp, data = self.check_and_pass_through_timestep(current_step) self.assertEqual(resp.status_code, 200) if not isinstance(data, str): if data['success']: self.assertTrue(data['success'], f'expected success but got {data}') self.assertTrue('redirect_to' in data) current_step = data['redirect_to'] # self.wait_for_progress(data.get('progress')) if not is_raster and not skip_srs: self.assertTrue(upload_step('srs') in current_step) # if all is good, the srs step will redirect to the final page final_step = current_step.replace('srs', 'final') resp = self.client.make_request(final_step) else: self.assertTrue( urlsplit(upload_step('final')).path in current_step, f"current_step: {current_step} - upload_step('final'): {upload_step('final')}" ) resp = self.client.get(current_step) self.assertEqual(resp.status_code, 200) try: c = resp.json() url = c['url'] url = unquote(url) # and the final page should redirect to the dataset page # @todo - make the check match completely (endswith at least) # currently working around potential 'orphaned' db tables self.assertTrue(dataset_name in url, f'expected {dataset_name} in URL, got {url}') return url except Exception: return current_step def check_upload_model(self, original_name): # we can only test this if we're using the same DB as the test instance if not settings.OGC_SERVER['default']['DATASTORE']: return upload = None try: upload = Upload.objects.filter( name__icontains=str(original_name)).last() # Making sure the Upload object is present on the DB and # the import session is COMPLETE if upload and not upload.complete: logger.warning( f"Upload not complete for Dataset {original_name}") except Upload.DoesNotExist: self.fail(f'expected to find Upload object for {original_name}') def check_dataset_complete(self, dataset_page, original_name): '''check everything to verify the dataset is complete''' self.check_dataset_geonode_page(dataset_page) # @todo use the original_name # currently working around potential 'orphaned' db tables # this grabs the name from the url (it might contain a 0) type_name = os.path.basename(dataset_page) dataset_name = original_name try: dataset_name = type_name.split(':')[1] except Exception: pass # work around acl caching on geoserver side of things caps_found = False for i in range(10): try: self.check_dataset_geoserver_caps(type_name) self.check_dataset_geoserver_rest(dataset_name) caps_found = True except Exception: pass if not caps_found: logger.warning( f"Could not recognize Dataset {original_name} on GeoServer WMS Capa" ) self.check_upload_model(dataset_name) def check_invalid_projection(self, dataset_name, resp, data): """ Makes sure that we got the correct response from an dataset that can't be uploaded""" self.assertTrue(resp.status_code, 200) if not isinstance(data, str): self.assertTrue(data['success']) srs_step = upload_step("srs") if "srs" in data['redirect_to']: self.assertTrue(srs_step in data['redirect_to']) resp, soup = self.client.get_html(data['redirect_to']) # grab an h2 and find the name there as part of a message saying it's # bad h2 = soup.find_all(['h2'])[0] self.assertTrue(str(h2).find(dataset_name)) def check_upload_complete(self, dataset_name, resp, data): """ Makes sure that we got the correct response from an dataset that has been uploaded""" self.assertTrue(resp.status_code, 200) if not isinstance(data, str): self.assertTrue(data['success']) final_step = upload_step("final") if "final" in data['redirect_to']: self.assertTrue(final_step in data['redirect_to']) def check_upload_failed(self, dataset_name, resp, data): """ Makes sure that we got the correct response from an dataset that can't be uploaded""" self.assertTrue(resp.status_code, 400) def upload_folder_of_files(self, folder, final_check, session_ids=None): mains = ('.tif', '.shp', '.zip', '.asc') def is_main(_file): _, ext = os.path.splitext(_file) return (ext.lower() in mains) for main in filter(is_main, os.listdir(folder)): # get the abs path to the file _file = os.path.join(folder, main) base, _ = os.path.splitext(_file) resp, data = self.client.upload_file(_file) if session_ids is not None: if not isinstance(data, str) and data.get('url'): session_id = re.search(r'.*id=(\d+)', data.get('url')).group(1) if session_id: session_ids += [session_id] if not isinstance(data, str): self.wait_for_progress(data.get('progress')) final_check(base, resp, data) def upload_file(self, fname, final_check, check_name=None, session_ids=None): if not check_name: check_name, _ = os.path.splitext(fname) logger.error(f" debug CircleCI...........upload_file: {fname}") resp, data = self.client.upload_file(fname) if session_ids is not None: if not isinstance(data, str): if data.get('url'): session_id = re.search(r'.*id=(\d+)', data.get('url')).group(1) if session_id: session_ids += [session_id] if not isinstance(data, str): logger.error( f" debug CircleCI...........wait_for_progress: {data.get('progress')}" ) self.wait_for_progress(data.get('progress')) final_check(check_name, resp, data) def wait_for_progress(self, progress_url, wait_for_progress_cnt=0): if progress_url: resp = self.client.get(progress_url) json_data = resp.json() logger.error( f" [{wait_for_progress_cnt}] debug CircleCI...........json_data: {json_data}" ) # "COMPLETE" state means done if json_data and json_data.get('state', '') == 'COMPLETE': return json_data elif json_data and json_data.get('state', '') == 'RUNNING' and \ wait_for_progress_cnt < 30: logger.error( f"[{wait_for_progress_cnt}] ... wait_for_progress @ {progress_url}" ) json_data = self.wait_for_progress( progress_url, wait_for_progress_cnt=wait_for_progress_cnt + 1) return json_data def temp_file(self, ext): fd, abspath = tempfile.mkstemp(ext) self._tempfiles.append(abspath) return fd, abspath def make_csv(self, fieldnames, *rows): fd, abspath = self.temp_file('.csv') with open(abspath, 'w', newline='') as csvfile: writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer.writeheader() for r in rows: writer.writerow(r) return abspath
else: return (layer, demo.get_layer(layer).default_style.name) g = demo.get_layergroup("groupname") resolved = [resolve(l, s) for (l, s) in zip(g.layers, g.styles)] # upload all styles to live for (l, s) in resolved: wayne_style = prefix + s style_on_server = live.get_style(wayne_style) sld = demo.get_style(s).sld_body if style_on_server is None: live.create_style(wayne_style, sld) else: style_on_server.update_body(sld) backup_layernames = {} # check that all requisite layers exist! for (l, s) in resolved: assert live.get_layer(l) is not None or l in backup_layernames, l lyrs = [backup_layernames.get(x[0], x[0]) for x in resolved] stls = [(prefix + x[1]) for x in resolved] wayne_group = live.get_layergroup(groupname) if wayne_group is None: wayne_group = live.create_layergroup(groupname) wayne_group.layers = lyrs wayne_group.styles = stls live.save(wayne_group)
class ModifyingTests(unittest.TestCase): def setUp(self): self.cat = Catalog(GSPARAMS['GSURL'], username=GSPARAMS['GSUSER'], password=GSPARAMS['GSPASSWORD']) self.gs_version = self.cat.get_short_version() def testFeatureTypeSave(self): # test saving round trip rs = self.cat.get_resources("bugsites")[0] old_abstract = rs.abstract new_abstract = "Not the original abstract" enabled = rs.enabled # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resources("bugsites")[0] self.assertEqual(new_abstract, rs.abstract) self.assertEqual(enabled, rs.enabled) # Change keywords on server rs.keywords = ["bugsites", "gsconfig"] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resources("bugsites")[0] self.assertEqual(["bugsites", "gsconfig"], rs.keywords) self.assertEqual(enabled, rs.enabled) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resources("bugsites")[0] self.assertEqual([ ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata") ], rs.metadata_links) self.assertEqual(enabled, rs.enabled) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resources("bugsites")[0] self.assertEqual(old_abstract, rs.abstract) def testDataStoreCreate(self): ds = self.cat.create_datastore("vector_gsconfig") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) def testPublishFeatureType(self): # Use the other test and store creation to load vector data into a database # @todo maybe load directly to database? try: self.testDataStoreCreateAndThenAlsoImportData() except FailedRequestError: pass try: lyr = self.cat.get_layer('import') data_source_name = lyr.resource.native_name # Delete the existing layer and resource to allow republishing. self.cat.delete(lyr) self.cat.delete(lyr.resource) ds = self.cat.get_stores("gsconfig_import_test")[0] # make sure it's gone self.assert_(self.cat.get_layer('import') is None) self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326", native_name=data_source_name) # and now it's not self.assert_(self.cat.get_layer('import') is not None) finally: # tear stuff down to allow the other test to pass if we run first ds = self.cat.get_stores("gsconfig_import_test")[0] lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. try: if lyr: self.cat.delete(lyr) self.cat.delete(lyr.resource) if ds: self.cat.delete(ds) except: pass def testDataStoreModify(self): ds = self.cat.get_stores("sf")[0] self.assertFalse("foo" in ds.connection_parameters) ds.connection_parameters = ds.connection_parameters ds.connection_parameters["foo"] = "bar" orig_ws = ds.workspace.name self.cat.save(ds) ds = self.cat.get_stores("sf")[0] self.assertTrue("foo" in ds.connection_parameters) self.assertEqual("bar", ds.connection_parameters["foo"]) self.assertEqual(orig_ws, ds.workspace.name) @drop_table('import') def testDataStoreCreateAndThenAlsoImportData(self): ds = self.cat.create_datastore("gsconfig_import_test") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) ds = self.cat.get_stores("gsconfig_import_test")[0] self.cat.add_data_to_store( ds, "import", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) @drop_table('import2') def testVirtualTables(self): ds = self.cat.create_datastore("gsconfig_import_test2") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) ds = self.cat.get_stores("gsconfig_import_test2")[0] self.cat.add_data_to_store( ds, "import2", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) geom = JDBCVirtualTableGeometry('the_geom', 'MultiPolygon', '4326') ft_name = 'my_jdbc_vt_test' epsg_code = 'EPSG:4326' sql = "select * from import2 where 'STATE_NAME' = 'Illinois'" keyColumn = None parameters = None jdbc_vt = JDBCVirtualTable(ft_name, sql, 'false', geom, keyColumn, parameters) ft = self.cat.publish_featuretype(ft_name, ds, epsg_code, jdbc_virtual_table=jdbc_vt) # DISABLED; this test works only in the very particular case # "mytiff.tiff" is already present into the GEOSERVER_DATA_DIR # def testCoverageStoreCreate(self): # ds = self.cat.create_coveragestore2("coverage_gsconfig") # ds.data_url = "file:test/data/mytiff.tiff" # self.cat.save(ds) def testCoverageStoreModify(self): cs = self.cat.get_stores("sfdem")[0] self.assertEqual("GeoTIFF", cs.type) cs.type = "WorldImage" self.cat.save(cs) cs = self.cat.get_stores("sfdem")[0] self.assertEqual("WorldImage", cs.type) # not sure about order of test runs here, but it might cause problems # for other tests if this layer is misconfigured cs.type = "GeoTIFF" self.cat.save(cs) def testCoverageSave(self): # test saving round trip rs = self.cat.get_resources("Arc_Sample")[0] old_abstract = rs.abstract new_abstract = "Not the original abstract" # # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resources("Arc_Sample")[0] self.assertEqual(new_abstract, rs.abstract) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resources("Arc_Sample")[0] self.assertEqual(old_abstract, rs.abstract) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resources("Arc_Sample")[0] self.assertEqual([ ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata") ], rs.metadata_links) self.assertEqual(enabled, rs.enabled) srs_before = set(['EPSG:4326']) srs_after = set(['EPSG:4326', 'EPSG:3785']) formats = set( ['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF']) formats_after = set(["PNG", "GIF", "TIFF"]) # set and save request_srs_list self.assertEquals(set(rs.request_srs_list), srs_before, str(rs.request_srs_list)) rs.request_srs_list = rs.request_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resources("Arc_Sample")[0] self.assertEquals(set(rs.request_srs_list), srs_after, str(rs.request_srs_list)) # set and save response_srs_list self.assertEquals(set(rs.response_srs_list), srs_before, str(rs.response_srs_list)) rs.response_srs_list = rs.response_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resources("Arc_Sample")[0] self.assertEquals(set(rs.response_srs_list), srs_after, str(rs.response_srs_list)) # set and save supported_formats self.assertEquals(set(rs.supported_formats), formats, str(rs.supported_formats)) rs.supported_formats = ["PNG", "GIF", "TIFF"] self.cat.save(rs) rs = self.cat.get_resources("Arc_Sample")[0] self.assertEquals(set(rs.supported_formats), formats_after, str(rs.supported_formats)) def testWmsStoreCreate(self): ws = self.cat.create_wmsstore("wmsstore_gsconfig") ws.capabilitiesURL = "http://mesonet.agron.iastate.edu/cgi-bin/wms/iowa/rainfall.cgi?VERSION=1.1.1&REQUEST=GetCapabilities&SERVICE=WMS&" ws.type = "WMS" self.cat.save(ws) def testWmsLayer(self): self.cat.create_workspace("wmstest", "http://example.com/wmstest") wmstest = self.cat.get_workspaces("wmstest")[0] wmsstore = self.cat.create_wmsstore("wmsstore", wmstest) wmsstore.capabilitiesURL = "http://mesonet.agron.iastate.edu/cgi-bin/wms/iowa/rainfall.cgi?VERSION=1.1.1&REQUEST=GetCapabilities&SERVICE=WMS&" wmsstore.type = "WMS" self.cat.save(wmsstore) wmsstore = self.cat.get_stores("wmsstore")[0] self.assertEqual(1, len(self.cat.get_stores(workspaces=wmstest.name))) available_layers = wmsstore.get_resources(available=True) for layer in available_layers: # sanitize the layer name - validation will fail on newer geoservers name = layer.replace(':', '_') new_layer = self.cat.create_wmslayer(wmstest, wmsstore, name, nativeName=layer) added_layers = wmsstore.get_resources() self.assertEqual(len(available_layers), len(added_layers)) changed_layer = added_layers[0] self.assertEqual(True, changed_layer.advertised) self.assertEqual(True, changed_layer.enabled) changed_layer.advertised = False changed_layer.enabled = False self.cat.save(changed_layer) self.cat._cache.clear() changed_layer = wmsstore.get_resources()[0] changed_layer.fetch() self.assertEqual(False, changed_layer.advertised) self.assertEqual(False, changed_layer.enabled) # Testing projection and projection policy changes changed_layer.projection = "EPSG:900913" changed_layer.projection_policy = "REPROJECT_TO_DECLARED" self.cat.save(changed_layer) self.cat._cache.clear() layer = self.cat.get_layer(changed_layer.name) self.assertEqual(layer.resource.projection_policy, changed_layer.projection_policy) self.assertEqual(layer.resource.projection, changed_layer.projection) def testFeatureTypeCreate(self): shapefile_plus_sidecars = shapefile_and_friends("test/data/states") expected = { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' } self.assertEqual(len(expected), len(shapefile_plus_sidecars)) for k, v in expected.items(): self.assertEqual(v, shapefile_plus_sidecars[k]) sf = self.cat.get_workspaces("sf")[0] self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf.name) self.assert_( len(self.cat.get_resources("states_test", workspaces=sf.name)) > 0) self.assertRaises( ConflictingDataError, lambda: self.cat.create_featurestore( "states_test", shapefile_plus_sidecars, sf)) lyr = self.cat.get_layer("states_test") self.cat.delete(lyr) self.assert_(self.cat.get_layer("states_test") is None) def testLayerSave(self): # test saving round trip lyr = self.cat.get_layer("states") old_attribution = lyr.attribution new_attribution = { 'title': 'Not the original attribution', 'width': '123', 'height': '321', 'href': 'http://www.georchestra.org', 'url': 'https://www.cigalsace.org/portail/cigal/documents/page/mentions-legales/Logo_geOrchestra.jpg', 'type': 'image/jpeg' } # change attribution on server lyr.attribution = new_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(new_attribution, lyr.attribution) # Restore attribution lyr.attribution = old_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(old_attribution, lyr.attribution) self.assertEqual(lyr.default_style.name, "population") old_default_style = lyr.default_style lyr.default_style = 'pophatch' lyr.styles = [old_default_style] self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(lyr.default_style.name, "pophatch") self.assertEqual([s.name for s in lyr.styles], ["population"]) def testStyles(self): # check count before tests (upload) count = len(self.cat.get_styles()) # upload new style, verify existence self.cat.create_style("fred", open("test/fred.sld").read()) self.cat._cache.clear() fred = self.cat.get_styles(names="fred")[0] self.assert_(fred is not None) self.assertEqual("Fred", fred.sld_title) # replace style, verify changes self.cat.create_style("fred", open("test/ted.sld").read(), overwrite=True) self.cat._cache.clear() fred = self.cat.get_styles("fred")[0] self.assert_(fred is not None) self.assertEqual("Ted", fred.sld_title) # delete style, verify non-existence self.cat.delete(fred, purge=True) self.cat._cache.clear() self.assert_(len(self.cat.get_styles("fred")) == 0) # attempt creating new style self.cat.create_style("fred", open("test/fred.sld").read()) self.cat._cache.clear() fred = self.cat.get_styles("fred")[0] self.assertEqual("Fred", fred.sld_title) # compare count after upload self.assertEqual(count + 1, len(self.cat.get_styles())) # attempt creating a new style without "title" self.cat.create_style("notitle", open("test/notitle.sld").read()) self.cat._cache.clear() notitle = self.cat.get_styles("notitle")[0] self.assertEqual(None, notitle.sld_title) def testWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") self.cat._cache.clear() jed = self.cat.get_styles(names="jed", workspaces="blarny") self.assert_(len(jed) == 0) jed = self.cat.get_styles(names="jed", workspaces="topp") self.assert_(len(jed) == 1) self.assertEqual("Fred", jed[0].sld_title) # replace style, verify changes self.cat.create_style("jed", open("test/ted.sld").read(), overwrite=True, workspace="topp") self.cat._cache.clear() jed = self.cat.get_styles(names="jed", workspaces="topp") self.assert_(len(jed) == 1) self.assertEqual("Ted", jed[0].sld_title) # delete style, verify non-existence self.cat.delete(jed[0], purge=True) self.assertEqual( 0, len(self.cat.get_styles(names="jed", workspaces="topp"))) # attempt creating new style self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") self.cat._cache.clear() jed = self.cat.get_styles(names="jed", workspaces="topp") self.assertEqual("Fred", jed[0].sld_title) def testLayerWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("ned", open("test/fred.sld").read(), overwrite=True, workspace="topp") self.cat.create_style("zed", open("test/ted.sld").read(), overwrite=True, workspace="topp") self.cat._cache.clear() styles = self.cat.get_styles(names="ned, zed", workspaces="topp") self.assertEqual(2, len(styles)) ned, zed = styles lyr = self.cat.get_layer("states") lyr.default_style = ned lyr.styles = [zed] self.cat.save(lyr) self.assertEqual("topp:ned", lyr.default_style) self.assertEqual([zed], lyr.styles) lyr.refresh() self.assertEqual("topp:ned", lyr.default_style.fqn) self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles]) def testWorkspaceCreate(self): ws = self.cat.get_workspaces("acme") self.assertEqual(0, len(ws)) self.cat.create_workspace("acme", "http://example.com/acme") ws = self.cat.get_workspaces("acme")[0] self.assertEqual("acme", ws.name) def testWorkspaceDelete(self): self.cat.create_workspace("foo", "http://example.com/foo") ws = self.cat.get_workspaces("foo")[0] self.cat.delete(ws) ws = self.cat.get_workspaces("foo") self.assertEqual(0, len(ws)) def testWorkspaceDefault(self): # save orig orig = self.cat.get_default_workspace() neu = self.cat.create_workspace("neu", "http://example.com/neu") try: # make sure setting it works self.cat.set_default_workspace("neu") ws = self.cat.get_default_workspace() self.assertEqual('neu', ws.name) finally: # cleanup and reset to the way things were self.cat.delete(neu) self.cat.set_default_workspace(orig.name) ws = self.cat.get_default_workspace() self.assertEqual(orig.name, ws.name) def testFeatureTypeDelete(self): pass def testCoverageDelete(self): pass def testDataStoreDelete(self): states = self.cat.get_stores('states_shapefile')[0] self.assert_(states.enabled == True) states.enabled = False self.assert_(states.enabled == False) self.cat.save(states) states = self.cat.get_stores('states_shapefile')[0] self.assert_(states.enabled == False) states.enabled = True self.cat.save(states) states = self.cat.get_stores('states_shapefile')[0] self.assert_(states.enabled == True) def testLayerGroupSave(self): tas = self.cat.get_layergroups("tasmania")[0] if self.gs_version >= "2.13": self.assertEqual(tas.layers, [ 'topp:tasmania_state_boundaries', 'topp:tasmania_water_bodies', 'topp:tasmania_roads', 'topp:tasmania_cities' ], tas.layers) else: self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities' ], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) tas.layers = tas.layers[:-1] tas.styles = tas.styles[:-1] self.cat.save(tas) # this verifies the local state if self.gs_version >= "2.13": self.assertEqual(tas.layers, [ 'topp:tasmania_state_boundaries', 'topp:tasmania_water_bodies', 'topp:tasmania_roads' ], tas.layers) else: self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads' ], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) # force a refresh to check the remote state tas.refresh() if self.gs_version >= "2.13": self.assertEqual(tas.layers, [ 'topp:tasmania_state_boundaries', 'topp:tasmania_water_bodies', 'topp:tasmania_roads' ], tas.layers) else: self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads' ], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) def testImageMosaic(self): """ Test case for Issue #110 """ # testing the mosaic creation name = 'cea_mosaic' data = open('test/data/mosaic/cea.zip', 'rb') self.cat.create_imagemosaic(name, data) # get the layer resource back self.cat._cache.clear() resource = self.cat.get_layer(name).resource self.assert_(resource is not None) # delete granule from mosaic coverage = name store = self.cat.get_stores(name)[0] granules = self.cat.list_granules(coverage, store) self.assertEqual(1, len(granules['features'])) granule_id = name + '.1' self.cat.delete_granule(coverage, store, granule_id) granules = self.cat.list_granules(coverage, store) self.assertEqual(0, len(granules['features'])) ''' testing external Image mosaic creation ''' name = 'cea_mosaic_external' path = os.path.join(os.getcwd(), 'test/data/mosaic/external') self.cat.create_imagemosaic(name, path, workspace='topp') self.cat._cache.clear() resource = self.cat.get_layer("external").resource self.assert_(resource is not None) # add granule to mosaic granule_path = os.path.join( os.getcwd(), 'test/data/mosaic/granules/cea_20150102.tif') self.cat.add_granule(granule_path, name, workspace='topp') granules = self.cat.list_granules("external", name, 'topp') self.assertEqual(2, len(granules['features'])) # add external granule to mosaic granule_path = os.path.join( os.getcwd(), 'test/data/mosaic/granules/cea_20150103.zip') self.cat.add_granule(granule_path, name, workspace='topp') granules = self.cat.list_granules("external", name, 'topp') self.assertEqual(3, len(granules['features'])) # Delete store store = self.cat.get_stores(name)[0] self.cat.delete(store, purge=True, recurse=True) self.cat._cache.clear() def testTimeDimension(self): sf = self.cat.get_workspaces("sf")[0] files = shapefile_and_friends( os.path.join(gisdata.GOOD_DATA, "time", "boxes_with_end_date")) self.cat.create_featurestore("boxes_with_end_date", files, sf) get_resource = lambda: self.cat._cache.clear() or self.cat.get_layer( 'boxes_with_end_date').resource # configure time as LIST resource = get_resource() timeInfo = DimensionInfo("time", "true", "LIST", None, "ISO8601", None, attribute="date") resource.metadata = {'time': timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual("LIST", timeInfo.presentation) self.assertEqual(True, timeInfo.enabled) self.assertEqual("date", timeInfo.attribute) self.assertEqual("ISO8601", timeInfo.units) # disable time dimension timeInfo = resource.metadata['time'] timeInfo.enabled = False # since this is an xml property, it won't get written unless we modify it resource.metadata = {'time': timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual(False, timeInfo.enabled) # configure with interval, end_attribute and enable again timeInfo.enabled = True timeInfo.presentation = 'DISCRETE_INTERVAL' timeInfo.resolution = '3 days' timeInfo.end_attribute = 'enddate' resource.metadata = {'time': timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual(True, timeInfo.enabled) self.assertEqual('DISCRETE_INTERVAL', timeInfo.presentation) self.assertEqual('3 days', timeInfo.resolution_str()) self.assertEqual('enddate', timeInfo.end_attribute)
def handle(self, *args, **options): rest_url = ogc_server_settings.rest rest_user = ogc_server_settings.credentials.username rest_pass = ogc_server_settings.credentials.password dry_run = options['dry-run'] gs = Catalog(rest_url, rest_user, rest_pass) layers = gs.get_layers() if options['layer_list'] is not None: layers = options['layer_list'].split(',') unfixable = [] valid = [] def add_unfixable(layer_name, reason): layer = {'name': layer_name, 'error': reason} unfixable.append(layer) def add_valid(layer_name, logged_attributes): valid_layer = {'name': layer_name} for key, value in logged_attributes.items(): valid_layer[key] = value valid.append(valid_layer) def check_valid_attribute(validation_object, object_to_check, attribute_to_check, default_value_to_check): attribute = getattr(object_to_check, attribute_to_check) if attribute is not None and attribute is not default_value_to_check: validation_object[attribute_to_check] = attribute return True else: return False def fix_time_enabled(self, layer_object, validation_dict, dry_run): resource = layer_object.resource metadata = dict(resource.metadata) metadata['time'].enabled = True resource.metadata = metadata if dry_run is False: resource.catalog.save(resource) check_layer = gs.get_layer(layer_object.name) if check_valid_attribute(validation_dict, check_layer.resource.metadata.get('time'), 'enabled', False) is False: self.stdout.write('Could not enable time for {0}'.format( layer_object.name)) else: self.stdout.write('Time has been enabled for {0}'.format( layer_object.name)) def fix_time_presentation(self, layer_object, validation_dict, dry_run): resource = layer_object.resource metadata = dict(resource.metadata) metadata['time'].presentation = 'LIST' resource.metadata = metadata if dry_run is False: resource.catalog.save(resource) check_layer = gs.get_layer(layer_object.name) if check_valid_attribute(validation_dict, check_layer.resource.metadata.get('time'), 'presentation', 'LIST') is False: self.stdout.write('Could not set presentation for {0}'.format( layer_object.name)) else: self.stdout.write( 'Presentation has been set to list for {0}'.format( layer_object.name)) def fix_time_attribute(self, layer_object, validation_dict, dry_run): # find date fields resource = layer_object.resource metadata = dict(resource.metadata) fields = requests.get(layer_object.resource.href.replace( '.xml', '.json'), auth=(rest_user, rest_pass)) the_fields = fields.json()['featureType']['attributes'] dates = [ field.get('name') for field in the_fields['attribute'] if 'Time' in field.get('binding') or 'Date' in field.get('binding') ] if len(dates) == 0: add_unfixable(layer_object.name, 'Layer does not contain a time attribute') return elif len(dates) == 1: metadata['time'].attribute = dates[0] else: title = 'More than one date field found for {0} please select which to use for time attribute'.format( layer_object.name) option, index = pick(dates, title) metadata['time'].attribute = dates[index] resource.metadata = metadata if dry_run is False: resource.catalog.save(resource) check_layer = gs.get_layer(layer_object.name) if check_valid_attribute(validation_dict, check_layer.resource.metadata.get('time'), 'attribute', None) is False: self.stdout.write('Could not set attribute for {0}'.format( layer_object.name)) else: self.stdout.write('Attribute set to {0} for {1}'.format( metadata['time'].attribute, layer_object.name)) for lyr in layers: lyr_obj = lyr if type(lyr) is str: lyr_obj = gs.get_layer(lyr) if lyr_obj is None: add_unfixable(lyr, 'Layer does not exist') self.stdout.write('{} does not exist in geoserver'.format(lyr)) continue layer_validation = dict() if lyr_obj.resource.metadata: dimension_info = lyr_obj.resource.metadata.get('time') if dimension_info is not None: if check_valid_attribute(layer_validation, dimension_info, 'enabled', False) is False: fix_time_enabled(self, lyr_obj, layer_validation, dry_run) if check_valid_attribute(layer_validation, dimension_info, 'presentation', 'LIST') is False: fix_time_presentation(self, lyr_obj, layer_validation, dry_run) if check_valid_attribute(layer_validation, dimension_info, 'attribute', None) is False: fix_time_attribute(self, lyr_obj, layer_validation, dry_run) else: add_unfixable( lyr, 'Layer was not uploaded with time configured.') add_valid(lyr, layer_validation) self.print_invalid_list(unfixable)
def handle(self, *args, **options): rest_url = ogc_server_settings.rest rest_user = ogc_server_settings.credentials.username rest_pass = ogc_server_settings.credentials.password dry_run = options['dry-run'] gs = Catalog(rest_url, rest_user, rest_pass) layers = gs.get_layers() if options['layer_list'] is not None: layers = options['layer_list'].split(',') unfixable = [] valid = [] def add_unfixable(layer_name, reason): layer = {'name': layer_name, 'error': reason} unfixable.append(layer) def add_valid(layer_name, logged_attributes): valid_layer = {'name': layer_name} for key, value in logged_attributes.items(): valid_layer[key] = value valid.append(valid_layer) def check_valid_attribute(validation_object, object_to_check, attribute_to_check, default_value_to_check): attribute = getattr(object_to_check, attribute_to_check) if attribute is not None and attribute is not default_value_to_check: validation_object[attribute_to_check] = attribute return True else: return False def fix_time_enabled(self, layer_object, validation_dict, dry_run): resource = layer_object.resource metadata = dict(resource.metadata) metadata['time'].enabled = True resource.metadata = metadata if dry_run is False: resource.catalog.save(resource) check_layer = gs.get_layer(layer_object.name) if check_valid_attribute(validation_dict, check_layer.resource.metadata.get('time'), 'enabled', False) is False: self.stdout.write( 'Could not enable time for {0}'.format(layer_object.name)) else: self.stdout.write( 'Time has been enabled for {0}'.format(layer_object.name)) def fix_time_presentation(self, layer_object, validation_dict, dry_run): resource = layer_object.resource metadata = dict(resource.metadata) metadata['time'].presentation = 'LIST' resource.metadata = metadata if dry_run is False: resource.catalog.save(resource) check_layer = gs.get_layer(layer_object.name) if check_valid_attribute(validation_dict, check_layer.resource.metadata.get('time'), 'presentation', 'LIST') is False: self.stdout.write( 'Could not set presentation for {0}'.format(layer_object.name)) else: self.stdout.write( 'Presentation has been set to list for {0}'.format(layer_object.name)) def fix_time_attribute(self, layer_object, validation_dict, dry_run): # find date fields resource = layer_object.resource metadata = dict(resource.metadata) fields = requests.get(layer_object.resource.href.replace( '.xml', '.json'), auth=(rest_user, rest_pass)) the_fields = fields.json()['featureType']['attributes'] dates = [field.get('name') for field in the_fields['attribute'] if 'Time' in field.get( 'binding') or 'Date' in field.get('binding')] if len(dates) == 0: add_unfixable(layer_object.name, 'Layer does not contain a time attribute') return elif len(dates) == 1: metadata['time'].attribute = dates[0] else: title = 'More than one date field found for {0} please select which to use for time attribute'.format( layer_object.name) option, index = pick(dates, title) metadata['time'].attribute = dates[index] resource.metadata = metadata if dry_run is False: resource.catalog.save(resource) check_layer = gs.get_layer(layer_object.name) if check_valid_attribute(validation_dict, check_layer.resource.metadata.get('time'), 'attribute', None) is False: self.stdout.write( 'Could not set attribute for {0}'.format(layer_object.name)) else: self.stdout.write('Attribute set to {0} for {1}'.format( metadata['time'].attribute, layer_object.name)) for lyr in layers: lyr_obj = lyr if type(lyr) is str: lyr_obj = gs.get_layer(lyr) if lyr_obj is None: add_unfixable(lyr, 'Layer does not exist') self.stdout.write('{} does not exist in geoserver'.format(lyr)) continue layer_validation = dict() if lyr_obj.resource.metadata: dimension_info = lyr_obj.resource.metadata.get('time') if dimension_info is not None: if check_valid_attribute(layer_validation, dimension_info, 'enabled', False) is False: fix_time_enabled( self, lyr_obj, layer_validation, dry_run) if check_valid_attribute(layer_validation, dimension_info, 'presentation', 'LIST') is False: fix_time_presentation( self, lyr_obj, layer_validation, dry_run) if check_valid_attribute(layer_validation, dimension_info, 'attribute', None) is False: fix_time_attribute( self, lyr_obj, layer_validation, dry_run) else: add_unfixable( lyr, 'Layer was not uploaded with time configured.') add_valid(lyr, layer_validation) self.print_invalid_list(unfixable)
def geoserver_pre_save(instance, sender, **kwargs): """Send information to geoserver. The attributes sent include: * Title * Abstract * Name * Keywords * Metadata Links, * Point of Contact name and url """ url = "%srest" % settings.OGC_SERVER['default']['LOCATION'] try: gs_catalog = Catalog(url, _user, _password) gs_resource = gs_catalog.get_resource(instance.name) except (EnvironmentError, FailedRequestError) as e: gs_resource = None msg = ('Could not connect to geoserver at "%s"' 'to save information for layer "%s"' % (settings.OGC_SERVER['default']['LOCATION'], instance.name)) logger.warn(msg, e) # If geoserver is not online, there is no need to continue return # If there is no resource returned it could mean one of two things: # a) There is a syncronization problem in geoserver # b) The unit tests are running and another geoserver is running in the # background. # For both cases it is sensible to stop processing the layer if gs_resource is None: logger.warn('Could not get geoserver resource for %s' % instance) return gs_resource.title = instance.title gs_resource.abstract = instance.abstract gs_resource.name = instance.name # Get metadata links metadata_links = [] for link in instance.link_set.metadata(): metadata_links.append((link.name, link.mime, link.url)) gs_resource.metadata_links = metadata_links gs_catalog.save(gs_resource) gs_layer = gs_catalog.get_layer(instance.name) if instance.poc and instance.poc.user: gs_layer.attribution = str(instance.poc.user) profile = Profile.objects.get(user=instance.poc.user) gs_layer.attribution_link = settings.SITEURL[: -1] + profile.get_absolute_url( ) gs_catalog.save(gs_layer) """Get information from geoserver. The attributes retrieved include: * Bounding Box * SRID * Download links (WMS, WCS or WFS and KML) * Styles (SLD) """ gs_resource = gs_catalog.get_resource(instance.name) bbox = gs_resource.latlon_bbox #FIXME(Ariel): Correct srid setting below #self.srid = gs_resource.src # Set bounding box values instance.bbox_x0 = bbox[0] instance.bbox_x1 = bbox[1] instance.bbox_y0 = bbox[2] instance.bbox_y1 = bbox[3] instance.update_thumbnail(save=False)
class ModifyingTests(unittest.TestCase): def setUp(self): self.cat = Catalog("http://localhost:8080/geoserver/rest") def testFeatureTypeSave(self): # test saving round trip rs = self.cat.get_resource("bugsites") old_abstract = rs.abstract new_abstract = "Not the original abstract" enabled = rs.enabled # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(new_abstract, rs.abstract) self.assertEqual(enabled, rs.enabled) # Change keywords on server rs.keywords = ["bugsites", "gsconfig"] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(["bugsites", "gsconfig"], rs.keywords) self.assertEqual(enabled, rs.enabled) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual([ ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata") ], rs.metadata_links) self.assertEqual(enabled, rs.enabled) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(old_abstract, rs.abstract) def testDataStoreCreate(self): ds = self.cat.create_datastore("vector_gsconfig") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) def testPublishFeatureType(self): # Use the other test and store creation to load vector data into a database # @todo maybe load directly to database? try: self.testDataStoreCreateAndThenAlsoImportData() except FailedRequestError: pass try: lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. self.cat.delete(lyr) self.cat.delete(lyr.resource) ds = self.cat.get_store("gsconfig_import_test") # make sure it's gone self.assert_(self.cat.get_layer('import') is None) self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326") # and now it's not self.assert_(self.cat.get_layer('import') is not None) finally: # tear stuff down to allow the other test to pass if we run first ds = self.cat.get_store("gsconfig_import_test") lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. self.cat.delete(lyr) self.cat.delete(lyr.resource) self.cat.delete(ds) def testDataStoreModify(self): ds = self.cat.get_store("sf") self.assertFalse("foo" in ds.connection_parameters) ds.connection_parameters = ds.connection_parameters ds.connection_parameters["foo"] = "bar" orig_ws = ds.workspace.name self.cat.save(ds) ds = self.cat.get_store("sf") self.assertTrue("foo" in ds.connection_parameters) self.assertEqual("bar", ds.connection_parameters["foo"]) self.assertEqual(orig_ws, ds.workspace.name) @drop_table('import') def testDataStoreCreateAndThenAlsoImportData(self): ds = self.cat.create_datastore("gsconfig_import_test") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) ds = self.cat.get_store("gsconfig_import_test") self.cat.add_data_to_store( ds, "import", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) def testCoverageStoreCreate(self): ds = self.cat.create_coveragestore2("coverage_gsconfig") ds.data_url = "file:data/mytiff.tiff" self.cat.save(ds) def testCoverageStoreModify(self): cs = self.cat.get_store("sfdem") self.assertEqual("GeoTIFF", cs.type) cs.type = "WorldImage" self.cat.save(cs) cs = self.cat.get_store("sfdem") self.assertEqual("WorldImage", cs.type) # not sure about order of test runs here, but it might cause problems # for other tests if this layer is misconfigured cs.type = "GeoTIFF" self.cat.save(cs) def testCoverageSave(self): # test saving round trip rs = self.cat.get_resource("Arc_Sample") old_abstract = rs.abstract new_abstract = "Not the original abstract" # # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(new_abstract, rs.abstract) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(old_abstract, rs.abstract) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual([ ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata") ], rs.metadata_links) self.assertEqual(enabled, rs.enabled) srs_before = set(['EPSG:4326']) srs_after = set(['EPSG:4326', 'EPSG:3785']) formats = set( ['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF']) formats_after = set(["PNG", "GIF", "TIFF"]) # set and save request_srs_list self.assertEquals(set(rs.request_srs_list), srs_before, str(rs.request_srs_list)) rs.request_srs_list = rs.request_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.request_srs_list), srs_after, str(rs.request_srs_list)) # set and save response_srs_list self.assertEquals(set(rs.response_srs_list), srs_before, str(rs.response_srs_list)) rs.response_srs_list = rs.response_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.response_srs_list), srs_after, str(rs.response_srs_list)) # set and save supported_formats self.assertEquals(set(rs.supported_formats), formats, str(rs.supported_formats)) rs.supported_formats = ["PNG", "GIF", "TIFF"] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.supported_formats), formats_after, str(rs.supported_formats)) def testWmsStoreCreate(self): ws = self.cat.create_wmsstore("wmsstore_gsconfig") ws.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities" ws.type = "WMS" self.cat.save(ws) def testWmsLayer(self): self.cat.create_workspace("wmstest", "http://example.com/wmstest") wmstest = self.cat.get_workspace("wmstest") wmsstore = self.cat.create_wmsstore("wmsstore", wmstest) wmsstore.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities" wmsstore.type = "WMS" self.cat.save(wmsstore) wmsstore = self.cat.get_store("wmsstore") self.assertEqual(1, len(self.cat.get_stores(wmstest))) available_layers = wmsstore.get_resources(available=True) for layer in available_layers: new_layer = self.cat.create_wmslayer(wmstest, wmsstore, layer) added_layers = wmsstore.get_resources() self.assertEqual(len(available_layers), len(added_layers)) def testFeatureTypeCreate(self): shapefile_plus_sidecars = shapefile_and_friends("test/data/states") expected = { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' } self.assertEqual(len(expected), len(shapefile_plus_sidecars)) for k, v in expected.iteritems(): self.assertEqual(v, shapefile_plus_sidecars[k]) sf = self.cat.get_workspace("sf") self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) self.assert_( self.cat.get_resource("states_test", workspace=sf) is not None) self.assertRaises( ConflictingDataError, lambda: self.cat.create_featurestore( "states_test", shapefile_plus_sidecars, sf)) self.assertRaises( UploadError, lambda: self.cat.create_coveragestore( "states_raster_test", shapefile_plus_sidecars, sf)) bogus_shp = { 'shp': 'test/data/Pk50095.tif', 'shx': 'test/data/Pk50095.tif', 'dbf': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } self.assertRaises( UploadError, lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf)) lyr = self.cat.get_layer("states_test") self.cat.delete(lyr) self.assert_(self.cat.get_layer("states_test") is None) def testCoverageCreate(self): tiffdata = { 'tiff': 'test/data/Pk50095.tif', 'tfw': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } sf = self.cat.get_workspace("sf") # TODO: Uploading WorldImage file no longer works??? # ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf) # self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None) # self.assertRaises( # ConflictingDataError, # lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf) # ) self.assertRaises( UploadError, lambda: self.cat.create_featurestore( "Pk50095_vector", tiffdata, sf)) bogus_tiff = { 'tiff': 'test/data/states.shp', 'tfw': 'test/data/states.shx', 'prj': 'test/data/states.prj' } self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster", bogus_tiff)) def testLayerSave(self): # test saving round trip lyr = self.cat.get_layer("states") old_attribution = lyr.attribution new_attribution = "Not the original attribution" # change attribution on server lyr.attribution = new_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(new_attribution, lyr.attribution) # Restore attribution lyr.attribution = old_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(old_attribution, lyr.attribution) self.assertEqual(lyr.default_style.name, "population") old_default_style = lyr.default_style lyr.default_style = (s for s in lyr.styles if s.name == "pophatch").next() lyr.styles = [old_default_style] self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(lyr.default_style.name, "pophatch") self.assertEqual([s.name for s in lyr.styles], ["population"]) def testStyles(self): # upload new style, verify existence self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Fred", fred.sld_title) # replace style, verify changes self.cat.create_style("fred", open("test/ted.sld").read(), overwrite=True) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Ted", fred.sld_title) # delete style, verify non-existence self.cat.delete(fred, purge=True) self.assert_(self.cat.get_style("fred") is None) # attempt creating new style self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assertEqual("Fred", fred.sld_title) # verify it can be found via URL and check the name f = self.cat.get_style_by_url(fred.href) self.assert_(f is not None) self.assertEqual(f.name, fred.name) def testWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") jed = self.cat.get_style("jed", workspace="blarny") self.assert_(jed is None) jed = self.cat.get_style("jed", workspace="topp") self.assert_(jed is not None) self.assertEqual("Fred", jed.sld_title) jed = self.cat.get_style("topp:jed") self.assert_(jed is not None) self.assertEqual("Fred", jed.sld_title) # replace style, verify changes self.cat.create_style("jed", open("test/ted.sld").read(), overwrite=True, workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assert_(jed is not None) self.assertEqual("Ted", jed.sld_title) # delete style, verify non-existence self.cat.delete(jed, purge=True) self.assert_(self.cat.get_style("jed", workspace="topp") is None) # attempt creating new style self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assertEqual("Fred", jed.sld_title) # verify it can be found via URL and check the full name f = self.cat.get_style_by_url(jed.href) self.assert_(f is not None) self.assertEqual(f.fqn, jed.fqn) def testLayerWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("ned", open("test/fred.sld").read(), overwrite=True, workspace="topp") self.cat.create_style("zed", open("test/ted.sld").read(), overwrite=True, workspace="topp") ned = self.cat.get_style("ned", workspace="topp") zed = self.cat.get_style("zed", workspace="topp") self.assert_(ned is not None) self.assert_(zed is not None) lyr = self.cat.get_layer("states") lyr.default_style = ned lyr.styles = [zed] self.cat.save(lyr) self.assertEqual("topp:ned", lyr.default_style) self.assertEqual([zed], lyr.styles) lyr.refresh() self.assertEqual("topp:ned", lyr.default_style.fqn) self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles]) def testWorkspaceCreate(self): ws = self.cat.get_workspace("acme") self.assertEqual(None, ws) self.cat.create_workspace("acme", "http://example.com/acme") ws = self.cat.get_workspace("acme") self.assertEqual("acme", ws.name) def testWorkspaceDelete(self): self.cat.create_workspace("foo", "http://example.com/foo") ws = self.cat.get_workspace("foo") self.cat.delete(ws) ws = self.cat.get_workspace("foo") self.assert_(ws is None) def testWorkspaceDefault(self): # save orig orig = self.cat.get_default_workspace() neu = self.cat.create_workspace("neu", "http://example.com/neu") try: # make sure setting it works self.cat.set_default_workspace("neu") ws = self.cat.get_default_workspace() self.assertEqual('neu', ws.name) finally: # cleanup and reset to the way things were self.cat.delete(neu) self.cat.set_default_workspace(orig.name) ws = self.cat.get_default_workspace() self.assertEqual(orig.name, ws.name) def testFeatureTypeDelete(self): pass def testCoverageDelete(self): pass def testDataStoreDelete(self): states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) states.enabled = False self.assert_(states.enabled == False) self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == False) states.enabled = True self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) def testLayerGroupSave(self): tas = self.cat.get_layergroup("tasmania") self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities' ], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) tas.layers = tas.layers[:-1] tas.styles = tas.styles[:-1] self.cat.save(tas) # this verifies the local state self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads' ], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) # force a refresh to check the remote state tas.refresh() self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads' ], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles)
class UploaderBase(TestCase): settings_overrides = [] @classmethod def setUpClass(cls): super(UploaderBase, cls).setUpClass() # don't accidentally delete anyone's layers if Layer.objects.all().count(): if 'DELETE_LAYERS' not in os.environ: print print 'FAIL: There are layers in the test database' print 'Will not run integration tests unless `DELETE_LAYERS`' print 'Is specified as an environment variable' print raise Exception('FAIL, SEE ABOVE') # make a test_settings module that will apply our overrides test_settings = ['from geonode.settings import *'] if os.path.exists('geonode/upload/tests/test_settings.py'): test_settings.append('from geonode.upload.tests.test_settings import *') for so in cls.settings_overrides: test_settings.append('%s=%s' % so) with open('integration_settings.py', 'w') as fp: fp.write('\n'.join(test_settings)) # runserver with settings args = ['python','manage.py','runserver','--settings=integration_settings','--verbosity=0'] # see http://www.doughellmann.com/PyMOTW/subprocess/#process-groups-sessions cls._runserver = subprocess.Popen(args, stderr=open('test.log','w') ,preexec_fn=os.setsid) # await startup cl = Client( GEONODE_URL, GEONODE_USER, GEONODE_PASSWD ) for i in range(5): try: cl.get_html('/') break except: time.sleep(.5) if cls._runserver.poll() is not None: raise Exception("Error starting server, check test.log") @classmethod def tearDownClass(cls): super(UploaderBase, cls).tearDownClass() # kill server process group os.killpg(cls._runserver.pid, signal.SIGKILL) if os.path.exists('integration_settings.py'): os.unlink('integration_settings.py') def setUp(self): super(UploaderBase, self).setUp() self._tempfiles = [] self.client = Client( GEONODE_URL, GEONODE_USER, GEONODE_PASSWD ) self.catalog = Catalog( GEOSERVER_URL + 'rest', GEOSERVER_USER, GEOSERVER_PASSWD ) # @todo - this is obviously the brute force approach - ideally, # these cases would be more declarative and delete only the things # they mess with for l in Layer.objects.all(): try: l.delete() except: print 'unable to delete layer', l # and destroy anything left dangling on geoserver cat = Layer.objects.gs_catalog map(lambda name: cascading_delete(cat, name), [l.name for l in cat.get_layers()]) def tearDown(self): super(UploaderBase, self).tearDown() map(os.unlink, self._tempfiles) def check_layer_geonode_page(self, path): """ Check that the final layer page render's correctly after an layer is uploaded """ # the final url for uploader process. This does a redirect to # the final layer page in geonode resp, _ = self.client.get_html(path) self.assertTrue('content-type' in resp.headers) # if we don't get a content type of html back, thats how we # know there was an error. self.assertTrue( resp.headers['content-type'].startswith('text/html') ) def check_layer_geoserver_caps(self, original_name): """ Check that a layer shows up in GeoServer's get capabilities document """ # using owslib wms = get_wms(layer_name=original_name) self.assertTrue(original_name in wms.contents, '%s is not in %s' % (original_name, wms.contents)) def check_layer_geoserver_rest(self, original_name): """ Check that a layer shows up in GeoServer rest api after the uploader is done""" # using gsconfig to test the geoserver rest api. layer = self.catalog.get_layer(original_name) self.assertIsNotNone(layer is not None) def check_and_pass_through_timestep(self): raise Exception('not implemented') redirect_to = data['redirect_to'] self.assertEquals(redirect_to, upload_step('time')) resp = self.client.make_request(upload_step('time')) self.assertEquals(resp.code, 200) data = {'csrfmiddlewaretoken': self.client.get_crsf_token()} resp = self.client.make_request(upload_step('time'), data) data = json.loads(resp.read()) return resp, data def complete_raster_upload(self, file_path, resp, data): return self.complete_upload(file_path, resp, data, is_raster=True) def check_save_step(self, resp, data): """Verify the initial save step""" self.assertEquals(resp.code, 200) self.assertTrue(isinstance(data, dict)) # make that the upload returns a success True key self.assertTrue(data['success'], 'expected success but got %s' % data) self.assertTrue('redirect_to' in data) def complete_upload(self, file_path, resp, data, is_raster=False): """Method to check if a layer was correctly uploaded to the GeoNode. arguments: file path, the django http response Checks to see if a layer is configured in Django Checks to see if a layer is configured in GeoServer checks the Rest API checks the get cap document """ layer_name, ext = os.path.splitext(os.path.basename(file_path)) self.check_save_step(resp, data) layer_page = self.finish_upload(data['redirect_to'], layer_name, is_raster) self.check_layer_complete(layer_page, layer_name) def finish_upload(self, current_step, layer_name, is_raster=False, skip_srs=False): if (not is_raster and _ALLOW_TIME_STEP): resp, data = self.check_and_pass_through_timestep() self.assertEquals(resp.code, 200) self.assertTrue(data['success'], 'expected success but got %s' % data) self.assertTrue('redirect_to' in data) current_step = data['redirect_to'] self.wait_for_progress(data.get('progress')) if not is_raster and not skip_srs: self.assertEquals(current_step, upload_step('srs')) # if all is good, the srs step will redirect to the final page resp = self.client.get(current_step) else: self.assertEquals(current_step, upload_step('final')) resp = self.client.get(current_step) self.assertEquals(resp.code, 200) url = json.loads(resp.read())['url'] # and the final page should redirect to tha layer page # @todo - make the check match completely (endswith at least) # currently working around potential 'orphaned' db tables self.assertTrue(layer_name in url, 'expected %s in URL, got %s' % (layer_name, url)) return url def check_upload_model(self, original_name): # we can only test this if we're using the same DB as the test instance if not settings.OGC_SERVER['default']['DATASTORE']: return try: upload = Upload.objects.get(layer__name=original_name) except Upload.DoesNotExist: self.fail('expected to find Upload object for %s' % original_name) self.assertTrue(upload.complete) def check_layer_complete(self, layer_page, original_name): '''check everything to verify the layer is complete''' self.check_layer_geonode_page(layer_page) # @todo use the original_name # currently working around potential 'orphaned' db tables # this grabs the name from the url (it might contain a 0) original_name = os.path.basename(layer_page).split(':')[1] self.check_layer_geoserver_caps(original_name) self.check_layer_geoserver_rest(original_name) self.check_upload_model(original_name) def check_invalid_projection(self, layer_name, resp, data): """ Makes sure that we got the correct response from an layer that can't be uploaded""" if _ALLOW_TIME_STEP: resp, data = self.check_and_pass_through_timestep() self.assertTrue(resp.code, 200) self.assertTrue(data['success']) self.assertEquals(upload_step("srs"), data['redirect_to']) resp, soup = self.client.get_html(data['redirect_to']) # grab an h2 and find the name there as part of a message saying it's bad h2 = soup.find_all(['h2'])[0] self.assertTrue(str(h2).find(layer_name)) def upload_folder_of_files(self, folder, final_check): mains = ('.tif', '.shp', '.zip') def is_main(_file): _, ext = os.path.splitext(_file) return (ext.lower() in mains) self.client.login() main_files = filter(is_main, os.listdir(folder)) for main in main_files: # get the abs path to the file _file = os.path.join(folder, main) base, _ = os.path.splitext(_file) resp, data = self.client.upload_file(_file) self.wait_for_progress(data.get('progress')) final_check(base, resp, data) def upload_file(self, fname, final_check, check_name=None): self.client.login() if not check_name: check_name, _ = os.path.splitext(fname) resp, data = self.client.upload_file(fname) self.wait_for_progress(data.get('progress')) final_check(check_name, resp, data) def wait_for_progress(self, progress_url): if progress_url: resp = self.client.get(progress_url) assert resp.getcode() == 200, 'Invalid progress status code' raw_data = resp.read() json_data = json.loads(raw_data) # "COMPLETE" state means done if json_data.get('state', '') == 'RUNNING': time.sleep(0.1) self.wait_for_progress(progress_url) def temp_file(self, ext): fd, abspath = tempfile.mkstemp(ext) self._tempfiles.append(abspath) return fd, abspath def make_csv(self, *rows): fd, abspath = self.temp_file('.csv') fp = os.fdopen(fd,'wb') out = csv.writer(fp) for r in rows: out.writerow(r) fp.close() return abspath
class UploaderTests(DjagnoOsgeoMixin): """ Basic checks to make sure pages load, etc. """ def create_datastore(self, connection, catalog): settings = connection.settings_dict params = {'database': settings['NAME'], 'passwd': settings['PASSWORD'], 'namespace': 'http://www.geonode.org/', 'type': 'PostGIS', 'dbtype': 'postgis', 'host': settings['HOST'], 'user': settings['USER'], 'port': settings['PORT'], 'enabled': "True"} store = catalog.create_datastore(settings['NAME'], workspace=self.workspace) store.connection_parameters.update(params) try: catalog.save(store) except FailedRequestError: # assuming this is because it already exists pass return catalog.get_store(settings['NAME']) def setUp(self): if not os.path.exists(os.path.join(os.path.split(__file__)[0], '..', 'importer-test-files')): self.skipTest('Skipping test due to missing test data.') # These tests require geonode to be running on :80! self.postgis = db.connections['datastore'] self.postgis_settings = self.postgis.settings_dict self.username, self.password = self.create_user('admin', 'admin', is_superuser=True) self.non_admin_username, self.non_admin_password = self.create_user('non_admin', 'non_admin') self.cat = Catalog(ogc_server_settings.internal_rest, *ogc_server_settings.credentials) if self.cat.get_workspace('geonode') == None: self.cat.create_workspace('geonode', 'http://www.geonode.org/') self.workspace = 'geonode' self.datastore = self.create_datastore(self.postgis, self.cat) def tearDown(self): """ Clean up geoserver. """ self.cat.delete(self.datastore, recurse=True) def generic_import(self, file, configuration_options=[{'index': 0}]): f = file filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', f) res = self.import_file(filename, configuration_options=configuration_options) layer_results=[] for result in res: if result[1].get('raster'): layerfile = result[0] layername = os.path.splitext(os.path.basename(layerfile))[0] layer = Layer.objects.get(name=layername) self.assertTrue(layerfile.endswith('.tif')) self.assertTrue(os.path.exists(layerfile)) l = gdal.OpenEx(layerfile) self.assertTrue(l.GetDriver().ShortName, 'GTiff') layer_results.append(layer) else: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') if not filename.endswith('zip'): self.assertTrue(layer.attributes.count() >= DataSource(filename)[0].num_fields) layer_results.append(layer) return layer_results[0] def generic_raster_import(self, file, configuration_options=[{'index': 0}]): f = file filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', f) res = self.import_file(filename, configuration_options=configuration_options) layerfile = res[0][0] layername = os.path.splitext(os.path.basename(layerfile))[0] layer = Layer.objects.get(name=layername) self.assertTrue(layerfile.endswith('.tif')) self.assertTrue(os.path.exists(layerfile)) l = gdal.OpenEx(layerfile) self.assertTrue(l.GetDriver().ShortName, 'GTiff') return layer def test_raster(self): """ Tests raster import """ layer = self.generic_raster_import('test_grid.tif', configuration_options=[{'index': 0}]) def test_box_with_year_field(self): """ Tests the import of test_box_with_year_field. """ layer = self.generic_import('boxes_with_year_field.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date(self): """ Tests the import of test_boxes_with_date. """ layer = self.generic_import('boxes_with_date.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_gpkg(self): """ Tests the import of test_boxes_with_date.gpkg. """ layer = self.generic_import('boxes_with_date.gpkg', configuration_options=[{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_plus_raster_gpkg_by_index(self): """ Tests the import of multilayer vector + raster geopackage using index """ layer = self.generic_import('boxes_plus_raster.gpkg', configuration_options=[{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }, {'index':1}, {'index':2}, {'index':3}, {'index':4}, {'index':5}, {'index':6}, {'index':7},]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_csv(self): """ Tests a CSV with WKT polygon. """ layer = self.generic_import('boxes_with_date.csv', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_csv_missing_features(self): """ Test csv that has some rows without geoms and uses ISO-8859 (Latin 4) encoding. """ self.generic_import('missing-features.csv', configuration_options=[{'index': 0}]) def test_boxes_with_iso_date(self): """ Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_duplicate_imports(self): """ Tests importing the same layer twice to ensure incrementing file names is properly handled. """ filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'boxes_with_date_iso_date.zip') gi = OGRImport(filename) layers1 = gi.handle({'index': 0, 'name': 'test'}) layers2 = gi.handle({'index': 0, 'name': 'test'}) self.assertEqual(layers1[0][0], 'test') self.assertEqual(layers2[0][0], 'test0') def test_launder(self): """ Ensure the launder function works as expected. """ self.assertEqual(launder('tm_world_borders_simpl_0.3'), 'tm_world_borders_simpl_0_3') self.assertEqual(launder('Testing#'), 'testing_') self.assertEqual(launder(' '), '_') def test_boxes_with_date_iso_date_zip(self): """ Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import('boxes_with_date_iso_date.zip', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_dates_bc(self): """ Tests the import of test_boxes_with_dates_bc. """ layer = self.generic_import('boxes_with_dates_bc.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_point_with_date(self): """ Tests the import of point_with_date.geojson """ layer = self.generic_import('point_with_date.geojson', configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) # OGR will name geojson layers 'ogrgeojson' we rename to the path basename self.assertTrue(layer.name.startswith('point_with_date')) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_end_date(self): """ Tests the import of test_boxes_with_end_date. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('boxes_with_end_date.shp',configuration_options=[{'index': 0, 'convert_to_date': ['date','enddate'], 'start_date': 'date', 'end_date': 'enddate', 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0] end_date_attr = filter(lambda attr: attr.attribute == 'enddate_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') self.assertEqual(end_date_attr.attribute_type, 'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute) def test_us_states_kml(self): """ Tests the import of us_states_kml. This layer has a date and an end date field that are typed correctly. """ # TODO: Support time in kmls. layer = self.generic_import('us_states.kml',configuration_options=[{'index': 0}]) def test_mojstrovka_gpx(self): """ Tests the import of mojstrovka.gpx. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import('mojstrovka.gpx',configuration_options=[{'index': 0, 'convert_to_date': ['time'], 'configureTime': True }]) date_attr = filter(lambda attr: attr.attribute == 'time_as_date', layer.attributes)[0] self.assertEqual(date_attr.attribute_type, u'xsd:dateTime') configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute) self.generic_time_check(layer, attribute=date_attr.attribute) def generic_time_check(self, layer, attribute=None, end_attribute=None): """ Convenience method to run generic tests on time layers. """ self.cat._cache.clear() resource = self.cat.get_resource(layer.name, store=layer.store, workspace=self.workspace) timeInfo = resource.metadata['time'] self.assertEqual("LIST", timeInfo.presentation) self.assertEqual(True, timeInfo.enabled) self.assertEqual(attribute, timeInfo.attribute) self.assertEqual(end_attribute, timeInfo.end_attribute) def test_us_shootings_csv(self): """ Tests the import of US_Shootings.csv. """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') filename = 'US_Shootings.csv' f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', filename) layer = self.generic_import(filename, configuration_options=[{'index': 0, 'convert_to_date': ['Date']}]) self.assertTrue(layer.name.startswith('us_shootings')) date_field = 'date' configure_time(self.cat.get_layer(layer.name).resource, attribute=date_field) self.generic_time_check(layer, attribute=date_field) def test_sitins(self): """ Tests the import of US_Civil_Rights_Sitins0.csv """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') layer = self.generic_import("US_Civil_Rights_Sitins0.csv", configuration_options=[{'index': 0, 'convert_to_date': ['Date']}]) def get_layer_names(self, in_file): """ Gets layer names from a data source. """ ds = DataSource(in_file) return map(lambda layer: layer.name, ds) def test_gdal_import(self): filename = 'point_with_date.geojson' f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', filename) self.generic_import(filename, configuration_options=[{'index': 0, 'convert_to_date': ['date']}]) def test_wfs(self): """ Tests the import from a WFS Endpoint """ wfs = 'WFS:http://demo.boundlessgeo.com/geoserver/wfs' gi = OGRImport(wfs) layers = gi.handle(configuration_options=[{'layer_name':'og:bugsites'}, {'layer_name':'topp:states'}]) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def test_arcgisjson(self): """ Tests the import from a WFS Endpoint """ endpoint = 'http://sampleserver3.arcgisonline.com/ArcGIS/rest/services/Hydrography/Watershed173811/FeatureServer/0/query?where=objectid+%3D+objectid&outfields=*&f=json' gi = OGRImport(endpoint) layers = gi.handle(configuration_options=[{'index':0}]) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def import_file(self, in_file, configuration_options=[]): """ Imports the file. """ self.assertTrue(os.path.exists(in_file)) # run ogr2ogr gi = OGRImport(in_file) layers = gi.handle(configuration_options=configuration_options) return layers @staticmethod def createFeatureType(catalog, datastore, name): """ Exposes a PostGIS feature type in geoserver. """ headers = {"Content-type": "application/xml"} data = "<featureType><name>{name}</name></featureType>".format(name=name) url = datastore.href.replace(".xml", '/featuretypes.xml'.format(name=name)) headers, response = catalog.http.request(url, "POST ", data, headers) return response def test_file_add_view(self): """ Tests the file_add_view. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() # test login required for this view request = c.get(reverse('uploads-new')) self.assertEqual(request.status_code, 302) c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['request'].path, reverse('uploads-list')) self.assertTrue(len(response.context['object_list']) == 1) upload = response.context['object_list'][0] self.assertEqual(upload.user.username, 'non_admin') self.assertEqual(upload.file_type, 'GeoJSON') self.assertTrue(upload.uploadlayer_set.all()) self.assertEqual(upload.state, 'UPLOADED') self.assertIsNotNone(upload.name) uploaded_file = upload.uploadfile_set.first() self.assertTrue(os.path.exists(uploaded_file.file.path)) f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'empty_file.geojson') with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertIn('file', response.context_data['form'].errors) def test_file_add_view_as_json(self): """ Tests the file_add_view. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new-json'), {'file': fp}, follow=True) self.assertEqual(response.status_code, 200) self.assertIn('application/json', response.get('Content-Type', '')) content = json.loads(response.content) self.assertIn('state', content) self.assertIn('id', content) def test_describe_fields(self): """ Tests the describe fields functionality. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv') with GDALInspector(f) as f: layers = f.describe_fields() self.assertTrue(layers[0]['layer_name'], 'us_shootings') self.assertEqual([n['name'] for n in layers[0]['fields']], ['Date', 'Shooter', 'Killed', 'Wounded', 'Location', 'City', 'Longitude', 'Latitude']) self.assertEqual(layers[0]['feature_count'], 203) def test_gdal_file_type(self): """ Tests the describe fields functionality. """ files = [ (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv'), 'CSV'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson'), 'GeoJSON'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'mojstrovka.gpx'), 'GPX'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'us_states.kml'), 'KML'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'boxes_with_year_field.shp'), 'ESRI Shapefile'), (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'boxes_with_date_iso_date.zip'), 'ESRI Shapefile'), ] from .models import NoDataSourceFound try: for path, file_type in files: with GDALInspector(path) as f: self.assertEqual(f.file_type(), file_type) except NoDataSourceFound as e: print 'No data source found in: {0}'.format(path) raise e def test_configure_view(self): """ Tests the configuration view. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') new_user = User.objects.create(username='******') new_user_perms = ['change_resourcebase_permissions'] c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True, 'permissions': {'users': {'test': new_user_perms, 'AnonymousUser': ["change_layer_data", "download_resourcebase", "view_resourcebase"]}}}] response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertEqual(Layer.objects.all()[0].owner.username, self.non_admin_username) perms = layer.get_all_level_info() user = User.objects.get(username=self.non_admin_username) # check user permissions for perm in [u'publish_resourcebase', u'change_resourcebase_permissions', u'delete_resourcebase', u'change_resourcebase', u'change_resourcebase_metadata', u'download_resourcebase', u'view_resourcebase', u'change_layer_style', u'change_layer_data']: self.assertIn(perm, perms['users'][user]) self.assertTrue(perms['users'][new_user]) self.assertIn('change_resourcebase_permissions', perms['users'][new_user]) self.assertIn("change_layer_data", perms['users'][User.objects.get(username='******')]) lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, layer) def test_configure_view_convert_date(self): """ Tests the configure view with a dataset that needs to be converted to a date. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) upload = response.context['object_list'][0] payload = [{'index': 0, 'convert_to_date': ['Date'], 'start_date': 'Date', 'configureTime': True, 'editable': True}] response = c.get('/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 405) response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 400) response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json') self.assertTrue(response.status_code, 200) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertTrue(layer.attributes.filter(attribute='date'), 'xsd:dateTime') lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) # ensure a user who does not own the upload cannot configure an import from it. c.logout() c.login_as_admin() response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id)) self.assertEqual(response.status_code, 404) def test_list_api(self): c = AdminClient() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 401) c.login_as_non_admin() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 200) admin = User.objects.get(username=self.username) non_admin = User.objects.get(username=self.non_admin_username) from osgeo_importer.models import UploadFile f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv') with open(f, 'rb') as f: uploaded_file = SimpleUploadedFile('test_data', f.read()) admin_upload = UploadedData.objects.create(state='Admin test', user=admin) admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file)) non_admin_upload = UploadedData.objects.create(state='Non admin test', user=non_admin) non_admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file)) c.login_as_admin() response = c.get('/importer-api/data/') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 2) response = c.get('/importer-api/data/?user__username=admin') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 1) def test_layer_list_api(self): c = AdminClient() response = c.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 401) c.login_as_non_admin() response = c.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 200) def test_delete_from_non_admin_api(self): """ Ensure users can delete their data. """ c = AdminClient() f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) id = UploadedData.objects.first().id response = c.delete('/importer-api/data/{0}/'.format(id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def test_delete_from_admin_api(self): """ Ensure that administrators can delete data that isn't theirs. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) self.assertEqual(UploadedData.objects.all().count(), 1) c.logout() c.login_as_admin() id = UploadedData.objects.first().id response = c.delete('/importer-api/data/{0}/'.format(id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def naming_an_import(self): """ Tests providing a name in the configuration options. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() name = 'point-with-a-date' with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) payload = {'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'name': name, 'editable': True} response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps(payload), content_type='application/json') layer = Layer.objects.all()[0] self.assertEqual(layer.title, name.replace('-', '_')) def test_api_import(self): """ Tests the import api. """ f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson') c = AdminClient() c.login_as_non_admin() with open(f) as fp: response = c.post(reverse('uploads-new'), {'file': fp}, follow=True) payload = {'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True} self.assertTrue(isinstance(UploadLayer.objects.first().configuration_options, dict)) response = c.get('/importer-api/data-layers/1/') self.assertEqual(response.status_code, 200) response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps([payload]), content_type='application/json') self.assertEqual(response.status_code, 200) self.assertTrue('task' in response.content) layer = Layer.objects.all()[0] self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime') lyr = self.cat.get_layer(layer.name) self.assertTrue('time' in lyr.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, layer) self.assertTrue(UploadLayer.objects.first().task_id) def test_valid_file_extensions(self): """ Test the file extension validator. """ for extension in load_handler(OSGEO_IMPORTER, 'test.txt').valid_extensions: self.assertIsNone(validate_file_extension(SimpleUploadedFile('test.{0}'.format(extension), ''))) with self.assertRaises(ValidationError): validate_file_extension(SimpleUploadedFile('test.txt', '')) def test_no_geom(self): """ Test the file extension validator. """ with self.assertRaises(ValidationError): validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)')) # This should pass (geom type is unknown) validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)')) def test_numeric_overflow(self): """ Regression test for numeric field overflows in shapefiles. # https://trac.osgeo.org/gdal/ticket/5241 """ self.generic_import('Walmart.zip', configuration_options=[{"configureTime":False,"convert_to_date":["W1_OPENDAT"],"editable":True,"index":0,"name":"Walmart","start_date":"W1_OPENDAT"}]) def test_multipolygon_shapefile(self): """ Tests shapefile with multipart polygons. """ self.generic_import('PhoenixFirstDues.zip', configuration_options=[{'index': 0}]) def test_non_4326_SR(self): """ Tests shapefile with multipart polygons. """ res = self.generic_import('Istanbul.zip', configuration_options=[{'index': 0}]) featuretype = self.cat.get_resource(res.name) self.assertEqual(featuretype.projection, 'EPSG:32635') def test_gwc_handler(self): """ Tests the GeoWebCache handler """ layer = self.generic_import('boxes_with_date.shp', configuration_options=[{'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }]) gwc = GeoWebCacheHandler(None) gs_layer = self.cat.get_layer(layer.name) self.assertTrue(gwc.time_enabled(gs_layer)) gs_layer.fetch() payload = self.cat.http.request(gwc.gwc_url(gs_layer)) self.assertTrue('regexParameterFilter' in payload[1]) self.assertEqual(int(payload[0]['status']), 200) # Don't configure time, ensure everything still works layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{'index': 0}]) gs_layer = self.cat.get_layer(layer.name) self.cat._cache.clear() gs_layer.fetch() payload = self.cat.http.request(gwc.gwc_url(gs_layer)) self.assertFalse('regexParameterFilter' in payload[1]) self.assertEqual(int(payload[0]['status']), 200) def test_utf8(self): """ Tests utf8 characters in attributes """ filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'china_provinces.shp') layer = self.generic_import('china_provinces.shp') gi = OGRImport(filename) ds, insp = gi.open_target_datastore(gi.target_store) sql = str("select NAME_CH from %s where NAME_PY = 'An Zhou'" % (layer.name)) res = ds.ExecuteSQL(sql) feat = res.GetFeature(0) self.assertEqual(feat.GetField('name_ch'), "安州") def test_non_converted_date(self): """ Test converting a field as date. """ results = self.generic_import('TM_WORLD_BORDERS_2005.zip', configuration_options=[{'index': 0, 'start_date': 'Year', 'configureTime': True}]) layer = self.cat.get_layer(results.typename) self.assertTrue('time' in layer.resource.metadata) self.assertEqual('year', layer.resource.metadata['time'].attribute) def test_fid_field(self): """ Regression test for preserving an FID field when target layer supports it but source does not. """ self.generic_import('noaa_paleoclimate.zip', configuration_options=[{'index': 0}]) def test_csv_with_wkb_geometry(self): """ Tests problems with the CSV files with multiple geometries. """ files = ['police_csv.csv', 'police_csv_nOGC.csv', 'police_csv_noLatLon.csv', 'police_csv_WKR.csv', 'police_csv_nFID.csv', 'police_csv_nOGFID.csv', 'police_csv_noWKB.csv'] for i in files: self.generic_import(i, {"configureTime":True,"convert_to_date":["date_time"],"editable":True,"index":0,"name":i.lower(),"permissions":{"users":{"AnonymousUser":["change_layer_data","download_resourcebase","view_resourcebase"]}},"start_date":"date_time",})
def create_geoserver_layer(name, user, srid, overwrite=False, title=None, abstract=None, charset='UTF-8'): if "geonode.geoserver" in settings.INSTALLED_APPS: _user, _password = ogc_server_settings.credentials # # Step 2. Check that it is uploading to the same resource type as # the existing resource logger.info( '>>> Step 2. Make sure we are not trying to overwrite a ' 'existing resource named [%s] with the wrong type', name) the_layer_type = "vector" # Get a short handle to the gsconfig geoserver catalog cat = Catalog(ogc_server_settings.internal_rest, _user, _password) workspace = cat.get_default_workspace() # Check if the store exists in geoserver try: store = get_store(cat, name, workspace=workspace) except FailedRequestError as e: # There is no store, ergo the road is clear pass else: # If we get a store, we do the following: resources = store.get_resources() # If the store is empty, we just delete it. if len(resources) == 0: cat.delete(store) else: # If our resource is already configured in the store it needs # to have the right resource type for resource in resources: if resource.name == name: msg = 'Name already in use and overwrite is False' assert overwrite, msg existing_type = resource.resource_type if existing_type != the_layer_type: msg = ('Type of uploaded file %s (%s) ' 'does not match type of existing ' 'resource type ' '%s' % (name, the_layer_type, existing_type)) logger.info(msg) raise GeoNodeException(msg) logger.debug('Creating vector layer: [%s]', name) ds = create_feature_store(cat, workspace) gs_resource = gs_catalog.publish_featuretype(name, ds, "EPSG:" + str(srid)) # # Step 7. Create the style and assign it to the created resource # # FIXME: Put this in gsconfig.py logger.info('>>> Step 7. Creating style for [%s]' % name) publishing = cat.get_layer(name) create_style() sld = get_sld_for(gs_catalog, publishing) style = None if sld is not None: try: cat.create_style(name, sld) style = cat.get_style(name) except geoserver.catalog.ConflictingDataError as e: msg = ('There was already a style named %s in GeoServer, ' 'try to use: "%s"' % (name + "_layer", str(e))) logger.warn(msg) e.args = (msg, ) try: cat.create_style(name + '_layer', sld) style = cat.get_style(name + "_layer") except geoserver.catalog.ConflictingDataError as e: style = cat.get_style('point') msg = ('There was already a style named %s in GeoServer, ' 'cannot overwrite: "%s"' % (name, str(e))) logger.error(msg) e.args = (msg, ) # FIXME: Should we use the fully qualified typename? publishing.default_style = style cat.save(publishing) return gs_resource
class UploaderBase(GeoNodeBaseTestSupport): settings_overrides = [] @classmethod def setUpClass(cls): # super(UploaderBase, cls).setUpClass() # make a test_settings module that will apply our overrides # test_settings = ['from geonode.settings import *'] # using_test_settings = os.getenv('DJANGO_SETTINGS_MODULE') == 'geonode.upload.tests.test_settings' # if using_test_settings: # test_settings.append( # 'from geonode.upload.tests.test_settings import *') # for so in cls.settings_overrides: # test_settings.append('%s=%s' % so) # with open('integration_settings.py', 'w') as fp: # fp.write('\n'.join(test_settings)) # # # runserver with settings # args = [ # 'python', # 'manage.py', # 'runserver', # '--settings=integration_settings', # '--verbosity=0'] # # see for details regarding os.setsid: # # http://www.doughellmann.com/PyMOTW/subprocess/#process-groups-sessions # cls._runserver = subprocess.Popen( # args, # preexec_fn=os.setsid) # await startup # cl = Client( # GEONODE_URL, GEONODE_USER, GEONODE_PASSWD # ) # for i in range(10): # time.sleep(.2) # try: # cl.get_html('/', debug=False) # break # except: # pass # if cls._runserver.poll() is not None: # raise Exception("Error starting server, check test.log") # # cls.client = Client( # GEONODE_URL, GEONODE_USER, GEONODE_PASSWD # ) # cls.catalog = Catalog( # GEOSERVER_URL + 'rest', GEOSERVER_USER, GEOSERVER_PASSWD # ) pass @classmethod def tearDownClass(cls): # super(UploaderBase, cls).tearDownClass() # kill server process group # if cls._runserver.pid: # os.killpg(cls._runserver.pid, signal.SIGKILL) if os.path.exists('integration_settings.py'): os.unlink('integration_settings.py') def setUp(self): # super(UploaderBase, self).setUp() # await startup cl = Client( GEONODE_URL, GEONODE_USER, GEONODE_PASSWD ) for i in range(10): time.sleep(.2) try: cl.get_html('/', debug=False) break except BaseException: pass self.client = Client( GEONODE_URL, GEONODE_USER, GEONODE_PASSWD ) self.catalog = Catalog( GEOSERVER_URL + 'rest', GEOSERVER_USER, GEOSERVER_PASSWD ) self._tempfiles = [] # createlayer must use postgis as a datastore # set temporary settings to use a postgis datastore DB_HOST = settings.DATABASES['default']['HOST'] DB_PORT = settings.DATABASES['default']['PORT'] DB_NAME = settings.DATABASES['default']['NAME'] DB_USER = settings.DATABASES['default']['USER'] DB_PASSWORD = settings.DATABASES['default']['PASSWORD'] settings.DATASTORE_URL = 'postgis://{}:{}@{}:{}/{}'.format( DB_USER, DB_PASSWORD, DB_HOST, DB_PORT, DB_NAME ) postgis_db = dj_database_url.parse( settings.DATASTORE_URL, conn_max_age=600) settings.DATABASES['datastore'] = postgis_db settings.OGC_SERVER['default']['DATASTORE'] = 'datastore' def tearDown(self): # super(UploaderBase, self).tearDown() map(os.unlink, self._tempfiles) # move to original settings settings.OGC_SERVER['default']['DATASTORE'] = '' del settings.DATABASES['datastore'] # Cleanup Layer.objects.all().delete() Map.objects.all().delete() Document.objects.all().delete() def check_layer_geonode_page(self, path): """ Check that the final layer page render's correctly after an layer is uploaded """ # the final url for uploader process. This does a redirect to # the final layer page in geonode resp, _ = self.client.get_html(path) self.assertTrue(resp.code == 200) self.assertTrue('content-type' in resp.headers) def check_layer_geoserver_caps(self, type_name): """ Check that a layer shows up in GeoServer's get capabilities document """ # using owslib wms = get_wms(type_name=type_name) ws, layer_name = type_name.split(':') self.assertTrue(layer_name in wms.contents, '%s is not in %s' % (layer_name, wms.contents)) def check_layer_geoserver_rest(self, layer_name): """ Check that a layer shows up in GeoServer rest api after the uploader is done""" # using gsconfig to test the geoserver rest api. layer = self.catalog.get_layer(layer_name) self.assertIsNotNone(layer is not None) def check_and_pass_through_timestep(self, redirect_to): time_step = upload_step('time') srs_step = upload_step('srs') if srs_step in redirect_to: resp = self.client.make_request(redirect_to) else: self.assertTrue(time_step in redirect_to) resp = self.client.make_request(redirect_to) token = self.client.get_csrf_token(True) self.assertEquals(resp.code, 200) resp = self.client.make_request( redirect_to, {'csrfmiddlewaretoken': token}, ajax=True) data = json.loads(resp.read()) return resp, data def complete_raster_upload(self, file_path, resp, data): return self.complete_upload(file_path, resp, data, is_raster=True) def check_save_step(self, resp, data): """Verify the initial save step""" self.assertEquals(resp.code, 200) self.assertTrue(isinstance(data, dict)) # make that the upload returns a success True key self.assertTrue(data['success'], 'expected success but got %s' % data) self.assertTrue('redirect_to' in data) def complete_upload(self, file_path, resp, data, is_raster=False): """Method to check if a layer was correctly uploaded to the GeoNode. arguments: file path, the django http response Checks to see if a layer is configured in Django Checks to see if a layer is configured in GeoServer checks the Rest API checks the get cap document """ layer_name, ext = os.path.splitext(os.path.basename(file_path)) if not isinstance(data, basestring): self.check_save_step(resp, data) layer_page = self.finish_upload( data['redirect_to'], layer_name, is_raster) self.check_layer_complete(layer_page, layer_name) def finish_upload( self, current_step, layer_name, is_raster=False, skip_srs=False): if not is_raster and _ALLOW_TIME_STEP: resp, data = self.check_and_pass_through_timestep(current_step) self.assertEquals(resp.code, 200) if not isinstance(data, basestring): if data['success']: self.assertTrue( data['success'], 'expected success but got %s' % data) self.assertTrue('redirect_to' in data) current_step = data['redirect_to'] self.wait_for_progress(data.get('progress')) if not is_raster and not skip_srs: self.assertTrue(upload_step('srs') in current_step) # if all is good, the srs step will redirect to the final page resp = self.client.get(current_step) content = json.loads(resp.read()) if not content.get('url') and content.get( 'redirect_to', current_step) == upload_step('final'): resp = self.client.get(content.get('redirect_to')) else: self.assertTrue(upload_step('final') in current_step) resp = self.client.get(current_step) self.assertEquals(resp.code, 200) resp_js = resp.read() try: c = json.loads(resp_js) url = c['url'] url = urllib.unquote(url) # and the final page should redirect to the layer page # @todo - make the check match completely (endswith at least) # currently working around potential 'orphaned' db tables self.assertTrue( layer_name in url, 'expected %s in URL, got %s' % (layer_name, url)) return url except BaseException: return current_step def check_upload_model(self, original_name): # we can only test this if we're using the same DB as the test instance if not settings.OGC_SERVER['default']['DATASTORE']: return upload = None try: # AF: TODO Headhakes here... nose is not accessing to the test # db!!! uploads = Upload.objects.all() if uploads: upload = Upload.objects.filter(name=str(original_name)).last() except Upload.DoesNotExist: self.fail('expected to find Upload object for %s' % original_name) # AF: TODO Headhakes here... nose is not accessing to the test db!!! if upload: self.assertTrue(upload.complete) def check_layer_complete(self, layer_page, original_name): '''check everything to verify the layer is complete''' self.check_layer_geonode_page(layer_page) # @todo use the original_name # currently working around potential 'orphaned' db tables # this grabs the name from the url (it might contain a 0) type_name = os.path.basename(layer_page) layer_name = original_name try: layer_name = type_name.split(':')[1] except BaseException: pass # work around acl caching on geoserver side of things caps_found = False for i in range(10): time.sleep(.5) try: self.check_layer_geoserver_caps(type_name) caps_found = True except BaseException: pass if caps_found: self.check_layer_geoserver_rest(layer_name) self.check_upload_model(layer_name) else: logger.warning( "Could not recognize Layer %s on GeoServer WMS" % original_name) def check_invalid_projection(self, layer_name, resp, data): """ Makes sure that we got the correct response from an layer that can't be uploaded""" self.assertTrue(resp.code, 200) if not isinstance(data, basestring): self.assertTrue(data['success']) self.assertTrue(upload_step("srs") in data['redirect_to']) resp, soup = self.client.get_html(data['redirect_to']) # grab an h2 and find the name there as part of a message saying it's # bad h2 = soup.find_all(['h2'])[0] self.assertTrue(str(h2).find(layer_name)) def upload_folder_of_files(self, folder, final_check, session_ids=None): mains = ('.tif', '.shp', '.zip') def is_main(_file): _, ext = os.path.splitext(_file) return (ext.lower() in mains) main_files = filter(is_main, os.listdir(folder)) for main in main_files: # get the abs path to the file _file = os.path.join(folder, main) base, _ = os.path.splitext(_file) resp, data = self.client.upload_file(_file) if session_ids is not None: if not isinstance(data, basestring) and data.get('url'): session_id = re.search( r'.*id=(\d+)', data.get('url')).group(1) if session_id: session_ids += [session_id] if not isinstance(data, basestring): self.wait_for_progress(data.get('progress')) final_check(base, resp, data) def upload_file(self, fname, final_check, check_name=None, session_ids=None): if not check_name: check_name, _ = os.path.splitext(fname) resp, data = self.client.upload_file(fname) if session_ids is not None: if not isinstance(data, basestring): if data.get('url'): session_id = re.search( r'.*id=(\d+)', data.get('url')).group(1) if session_id: session_ids += [session_id] if not isinstance(data, basestring): self.wait_for_progress(data.get('progress')) final_check(check_name, resp, data) def wait_for_progress(self, progress_url): if progress_url: resp = self.client.get(progress_url) assert resp.getcode() == 200, 'Invalid progress status code' raw_data = resp.read() json_data = json.loads(raw_data) # "COMPLETE" state means done if json_data.get('state', '') == 'RUNNING': time.sleep(0.1) self.wait_for_progress(progress_url) def temp_file(self, ext): fd, abspath = tempfile.mkstemp(ext) self._tempfiles.append(abspath) return fd, abspath def make_csv(self, *rows): fd, abspath = self.temp_file('.csv') fp = os.fdopen(fd, 'wb') out = csv.writer(fp) for r in rows: out.writerow(r) fp.close() return abspath
class CatalogTests(unittest.TestCase): def setUp(self): self.cat = Catalog("http://localhost:8080/geoserver/rest") def testAbout(self): about_html = self.cat.about() self.assertTrue( '<html xmlns="http://www.w3.org/1999/xhtml"' in about_html) def testGSVersion(self): version = self.cat.gsversion() pat = re.compile('\d\.\d(\.[\dx]|-SNAPSHOT)') self.assertTrue(pat.match('2.2.x')) self.assertTrue(pat.match('2.3.2')) self.assertTrue(pat.match('2.3-SNAPSHOT')) self.assertFalse(pat.match('2.3.y')) self.assertFalse(pat.match('233')) self.assertTrue(pat.match(version)) def testWorkspaces(self): self.assertEqual(7, len(self.cat.get_workspaces())) # marking out test since geoserver default workspace is not consistent # self.assertEqual("cite", self.cat.get_default_workspace().name) self.assertEqual("topp", self.cat.get_workspace("topp").name) def testStores(self): topp = self.cat.get_workspace("topp") sf = self.cat.get_workspace("sf") self.assertEqual(9, len(self.cat.get_stores())) self.assertEqual(2, len(self.cat.get_stores(topp))) self.assertEqual(2, len(self.cat.get_stores(sf))) self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile", topp).name) self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile").name) self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile").name) self.assertEqual("sfdem", self.cat.get_store("sfdem", sf).name) self.assertEqual("sfdem", self.cat.get_store("sfdem").name) def testResources(self): topp = self.cat.get_workspace("topp") sf = self.cat.get_workspace("sf") states = self.cat.get_store("states_shapefile", topp) sfdem = self.cat.get_store("sfdem", sf) self.assertEqual(19, len(self.cat.get_resources())) self.assertEqual(1, len(self.cat.get_resources(states))) self.assertEqual(5, len(self.cat.get_resources(workspace=topp))) self.assertEqual(1, len(self.cat.get_resources(sfdem))) self.assertEqual(6, len(self.cat.get_resources(workspace=sf))) self.assertEqual("states", self.cat.get_resource("states", states).name) self.assertEqual("states", self.cat.get_resource("states", workspace=topp).name) self.assertEqual("states", self.cat.get_resource("states").name) states = self.cat.get_resource("states") fields = [ states.title, states.abstract, states.native_bbox, states.latlon_bbox, states.projection, states.projection_policy ] self.assertFalse(None in fields, str(fields)) self.assertFalse(len(states.keywords) == 0) self.assertFalse(len(states.attributes) == 0) self.assertTrue(states.enabled) self.assertEqual("sfdem", self.cat.get_resource("sfdem", sfdem).name) self.assertEqual("sfdem", self.cat.get_resource("sfdem", workspace=sf).name) self.assertEqual("sfdem", self.cat.get_resource("sfdem").name) def testLayers(self): expected = set([ "Arc_Sample", "Pk50095", "Img_Sample", "mosaic", "sfdem", "bugsites", "restricted", "streams", "archsites", "roads", "tasmania_roads", "tasmania_water_bodies", "tasmania_state_boundaries", "tasmania_cities", "states", "poly_landmarks", "tiger_roads", "poi", "giant_polygon" ]) actual = set(l.name for l in self.cat.get_layers()) missing = expected - actual extras = actual - expected message = "Actual layer list did not match expected! (Extras: %s) (Missing: %s)" % ( extras, missing) self.assert_(len(expected ^ actual) == 0, message) states = self.cat.get_layer("states") self.assert_("states", states.name) self.assert_(isinstance(states.resource, ResourceInfo)) self.assertEqual(set(s.name for s in states.styles), set(['pophatch', 'polygon'])) self.assertEqual(states.default_style.name, "population") def testLayerGroups(self): expected = set(["tasmania", "tiger-ny", "spearfish"]) actual = set(l.name for l in self.cat.get_layergroups()) missing = expected - actual extras = actual - expected message = "Actual layergroup list did not match expected! (Extras: %s) (Missing: %s)" % ( extras, missing) self.assert_(len(expected ^ actual) == 0, message) tas = self.cat.get_layergroup("tasmania") self.assert_("tasmania", tas.name) self.assert_(isinstance(tas, LayerGroup)) self.assertEqual(tas.layers, [ 'tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities' ], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) def testStyles(self): self.assertEqual(20, len(self.cat.get_styles())) self.assertEqual("population", self.cat.get_style("population").name) self.assertEqual("popshade.sld", self.cat.get_style("population").filename) self.assertEqual("population", self.cat.get_style("population").sld_name) self.assert_(self.cat.get_style('non-existing-style') is None) def testEscaping(self): # GSConfig is inconsistent about using exceptions vs. returning None # when a resource isn't found. # But the basic idea is that none of them should throw HTTP errors from # misconstructed URLS self.cat.get_style("best style ever") self.cat.get_workspace("best workspace ever") try: self.cat.get_store(workspace="best workspace ever", name="best store ever") self.fail('expected exception') except FailedRequestError, fre: self.assertEqual('No store found named: best store ever', fre.message) try: self.cat.get_resource(workspace="best workspace ever", store="best store ever", name="best resource ever") except FailedRequestError, fre: self.assertEqual('No store found named: best store ever', fre.message)
class ModifyingTests(unittest.TestCase): def setUp(self): self.cat = Catalog("http://localhost:8080/geoserver/rest") def testFeatureTypeSave(self): # test saving round trip rs = self.cat.get_resource("bugsites") old_abstract = rs.abstract new_abstract = "Not the original abstract" enabled = rs.enabled # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(new_abstract, rs.abstract) self.assertEqual(enabled, rs.enabled) # Change keywords on server rs.keywords = ["bugsites", "gsconfig"] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(["bugsites", "gsconfig"], rs.keywords) self.assertEqual(enabled, rs.enabled) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual( [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")], rs.metadata_links) self.assertEqual(enabled, rs.enabled) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(old_abstract, rs.abstract) def testDataStoreCreate(self): ds = self.cat.create_datastore("vector_gsconfig") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) def testPublishFeatureType(self): # Use the other test and store creation to load vector data into a database # @todo maybe load directly to database? try: self.testDataStoreCreateAndThenAlsoImportData() except FailedRequestError: pass try: lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. self.cat.delete(lyr) self.cat.delete(lyr.resource) ds = self.cat.get_store("gsconfig_import_test") # make sure it's gone self.assert_(self.cat.get_layer('import') is None) self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326") # and now it's not self.assert_(self.cat.get_layer('import') is not None) finally: # tear stuff down to allow the other test to pass if we run first ds = self.cat.get_store("gsconfig_import_test") lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. self.cat.delete(lyr) self.cat.delete(lyr.resource) self.cat.delete(ds) def testDataStoreModify(self): ds = self.cat.get_store("sf") self.assertFalse("foo" in ds.connection_parameters) ds.connection_parameters = ds.connection_parameters ds.connection_parameters["foo"] = "bar" orig_ws = ds.workspace.name self.cat.save(ds) ds = self.cat.get_store("sf") self.assertTrue("foo" in ds.connection_parameters) self.assertEqual("bar", ds.connection_parameters["foo"]) self.assertEqual(orig_ws, ds.workspace.name) @drop_table('import') def testDataStoreCreateAndThenAlsoImportData(self): ds = self.cat.create_datastore("gsconfig_import_test") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) ds = self.cat.get_store("gsconfig_import_test") self.cat.add_data_to_store(ds, "import", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) def testCoverageStoreCreate(self): ds = self.cat.create_coveragestore2("coverage_gsconfig") ds.data_url = "file:data/mytiff.tiff" self.cat.save(ds) def testCoverageStoreModify(self): cs = self.cat.get_store("sfdem") self.assertEqual("GeoTIFF", cs.type) cs.type = "WorldImage" self.cat.save(cs) cs = self.cat.get_store("sfdem") self.assertEqual("WorldImage", cs.type) # not sure about order of test runs here, but it might cause problems # for other tests if this layer is misconfigured cs.type = "GeoTIFF" self.cat.save(cs) def testCoverageSave(self): # test saving round trip rs = self.cat.get_resource("Arc_Sample") old_abstract = rs.abstract new_abstract = "Not the original abstract" # # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(new_abstract, rs.abstract) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(old_abstract, rs.abstract) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual( [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")], rs.metadata_links) self.assertEqual(enabled, rs.enabled) srs_before = set(['EPSG:4326']) srs_after = set(['EPSG:4326', 'EPSG:3785']) formats = set(['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF']) formats_after = set(["PNG", "GIF", "TIFF"]) # set and save request_srs_list self.assertEquals(set(rs.request_srs_list), srs_before, str(rs.request_srs_list)) rs.request_srs_list = rs.request_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.request_srs_list), srs_after, str(rs.request_srs_list)) # set and save response_srs_list self.assertEquals(set(rs.response_srs_list), srs_before, str(rs.response_srs_list)) rs.response_srs_list = rs.response_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.response_srs_list), srs_after, str(rs.response_srs_list)) # set and save supported_formats self.assertEquals(set(rs.supported_formats), formats, str(rs.supported_formats)) rs.supported_formats = ["PNG", "GIF", "TIFF"] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.supported_formats), formats_after, str(rs.supported_formats)) def testWmsStoreCreate(self): ws = self.cat.create_wmsstore("wmsstore_gsconfig") ws.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities" ws.type = "WMS" self.cat.save(ws) def testWmsLayer(self): self.cat.create_workspace("wmstest", "http://example.com/wmstest") wmstest = self.cat.get_workspace("wmstest") wmsstore = self.cat.create_wmsstore("wmsstore", wmstest) wmsstore.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities" wmsstore.type = "WMS" self.cat.save(wmsstore) wmsstore = self.cat.get_store("wmsstore") self.assertEqual(1, len(self.cat.get_stores(wmstest))) available_layers = wmsstore.get_resources(available=True) for layer in available_layers: new_layer = self.cat.create_wmslayer(wmstest, wmsstore, layer) added_layers = wmsstore.get_resources() self.assertEqual(len(available_layers), len(added_layers)) def testFeatureTypeCreate(self): shapefile_plus_sidecars = shapefile_and_friends("test/data/states") expected = { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' } self.assertEqual(len(expected), len(shapefile_plus_sidecars)) for k, v in expected.iteritems(): self.assertEqual(v, shapefile_plus_sidecars[k]) sf = self.cat.get_workspace("sf") self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) self.assert_(self.cat.get_resource("states_test", workspace=sf) is not None) self.assertRaises( ConflictingDataError, lambda: self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) ) self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster_test", shapefile_plus_sidecars, sf) ) bogus_shp = { 'shp': 'test/data/Pk50095.tif', 'shx': 'test/data/Pk50095.tif', 'dbf': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } self.assertRaises( UploadError, lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf) ) lyr = self.cat.get_layer("states_test") self.cat.delete(lyr) self.assert_(self.cat.get_layer("states_test") is None) def testCoverageCreate(self): tiffdata = { 'tiff': 'test/data/Pk50095.tif', 'tfw': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } sf = self.cat.get_workspace("sf") # TODO: Uploading WorldImage file no longer works??? # ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf) # self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None) # self.assertRaises( # ConflictingDataError, # lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf) # ) self.assertRaises( UploadError, lambda: self.cat.create_featurestore("Pk50095_vector", tiffdata, sf) ) bogus_tiff = { 'tiff': 'test/data/states.shp', 'tfw': 'test/data/states.shx', 'prj': 'test/data/states.prj' } self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster", bogus_tiff) ) def testLayerSave(self): # test saving round trip lyr = self.cat.get_layer("states") old_attribution = lyr.attribution new_attribution = "Not the original attribution" # change attribution on server lyr.attribution = new_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(new_attribution, lyr.attribution) # Restore attribution lyr.attribution = old_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(old_attribution, lyr.attribution) self.assertEqual(lyr.default_style.name, "population") old_default_style = lyr.default_style lyr.default_style = (s for s in lyr.styles if s.name == "pophatch").next() lyr.styles = [old_default_style] self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(lyr.default_style.name, "pophatch") self.assertEqual([s.name for s in lyr.styles], ["population"]) def testStyles(self): # upload new style, verify existence self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Fred", fred.sld_title) # replace style, verify changes self.cat.create_style("fred", open("test/ted.sld").read(), overwrite=True) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Ted", fred.sld_title) # delete style, verify non-existence self.cat.delete(fred, purge=True) self.assert_(self.cat.get_style("fred") is None) # attempt creating new style self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assertEqual("Fred", fred.sld_title) # verify it can be found via URL and check the name f = self.cat.get_style_by_url(fred.href) self.assert_(f is not None) self.assertEqual(f.name, fred.name) def testWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") jed = self.cat.get_style("jed", workspace="blarny") self.assert_(jed is None) jed = self.cat.get_style("jed", workspace="topp") self.assert_(jed is not None) self.assertEqual("Fred", jed.sld_title) jed = self.cat.get_style("topp:jed") self.assert_(jed is not None) self.assertEqual("Fred", jed.sld_title) # replace style, verify changes self.cat.create_style("jed", open("test/ted.sld").read(), overwrite=True, workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assert_(jed is not None) self.assertEqual("Ted", jed.sld_title) # delete style, verify non-existence self.cat.delete(jed, purge=True) self.assert_(self.cat.get_style("jed", workspace="topp") is None) # attempt creating new style self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assertEqual("Fred", jed.sld_title) # verify it can be found via URL and check the full name f = self.cat.get_style_by_url(jed.href) self.assert_(f is not None) self.assertEqual(f.fqn, jed.fqn) def testLayerWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("ned", open("test/fred.sld").read(), overwrite=True, workspace="topp") self.cat.create_style("zed", open("test/ted.sld").read(), overwrite=True, workspace="topp") ned = self.cat.get_style("ned", workspace="topp") zed = self.cat.get_style("zed", workspace="topp") self.assert_(ned is not None) self.assert_(zed is not None) lyr = self.cat.get_layer("states") lyr.default_style = ned lyr.styles = [zed] self.cat.save(lyr) self.assertEqual("topp:ned", lyr.default_style) self.assertEqual([zed], lyr.styles) lyr.refresh() self.assertEqual("topp:ned", lyr.default_style.fqn) self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles]) def testWorkspaceCreate(self): ws = self.cat.get_workspace("acme") self.assertEqual(None, ws) self.cat.create_workspace("acme", "http://example.com/acme") ws = self.cat.get_workspace("acme") self.assertEqual("acme", ws.name) def testWorkspaceDelete(self): self.cat.create_workspace("foo", "http://example.com/foo") ws = self.cat.get_workspace("foo") self.cat.delete(ws) ws = self.cat.get_workspace("foo") self.assert_(ws is None) def testWorkspaceDefault(self): # save orig orig = self.cat.get_default_workspace() neu = self.cat.create_workspace("neu", "http://example.com/neu") try: # make sure setting it works self.cat.set_default_workspace("neu") ws = self.cat.get_default_workspace() self.assertEqual('neu', ws.name) finally: # cleanup and reset to the way things were self.cat.delete(neu) self.cat.set_default_workspace(orig.name) ws = self.cat.get_default_workspace() self.assertEqual(orig.name, ws.name) def testFeatureTypeDelete(self): pass def testCoverageDelete(self): pass def testDataStoreDelete(self): states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) states.enabled = False self.assert_(states.enabled == False) self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == False) states.enabled = True self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) def testLayerGroupSave(self): tas = self.cat.get_layergroup("tasmania") self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) tas.layers = tas.layers[:-1] tas.styles = tas.styles[:-1] self.cat.save(tas) # this verifies the local state self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) # force a refresh to check the remote state tas.refresh() self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles)
continue thread = threading.Thread(target=add_layer, args=(shp,)) thread.name = shp thread.start() layers.append(shp) threads.append(thread) while threads: print '{0} threads running ...'.format(len(threads)) time.sleep(2) threads = [ t for t in threads if t.isAlive() ] proj = None for shp in layers: # collect bbox info for layergroup layer = cat.get_layer(shp) bbox = layer.resource.native_bbox minx = sys.float_info.max maxx = 0 miny = sys.float_info.max maxy = 0 if not proj: proj = layer.resource.projection else: minx = min(minx, float(bbox[0])) maxx = max(maxx, float(bbox[1])) miny = min(miny, float(bbox[2])) maxy = max(maxy, float(bbox[3])) if proj != layer.resource.projection: print 'projection mismatch...' sys.exit(1)
class GsConn: def __init__(self, host, login, password, debug=False): """ Geoserver connection """ self.host = host self.login = login self.password = password self.debug = debug # Connect to server self.cat = Catalog("http://%s/geoserver/rest" % host, login, password) if self.debug is True: print "Connected to geoserver" def crate_workspace(self, name, overwrite=False): """ Creates a workspace :param name: Workspace name. :param overwrite: If True, delete existing workspace. :return: None """ workspaces = [workspace.name for workspace in self.cat.get_workspaces()] if name in workspaces and overwrite is True: # ws2del = self.cat.get_workspace(name) # self.cat.delete(ws2del, purge=True, recurse=True) return None # NOTE: If we delete the workspace then all associated layers are lost. elif name in workspaces and overwrite is False: print "ERROR: Workspace %s already exists (use overwrite=True)." % name self.cat.create_workspace(name, "http://%s/%s" % (self.host, name)) if self.debug is True: print "Workspace %s available." % name ws = self.cat.get_workspace(name) ws.enabled = True def create_pg_store(self, name, workspace, host, port, login, password, dbname, schema, overwrite=False): """ Creates datastore. :param name: Name of the datastore. :param workspace: Name of the workspace to use. :param overwrite: If True replace datastore. :return: None """ stores = [store.name for store in self.cat.get_stores()] if name in stores and overwrite is True: # st2del = self.cat.get_store(name) # self.cat.delete(st2del, purge=True, recurse=True) # self.cat.reload() return None # NOTE: If we delete store, every layers associated with are lost. elif name in stores and overwrite is False: print "ERROR: Store %s already exists (use overwrite=True)." % name ds = self.cat.create_datastore(name, workspace) ds.connection_parameters.update( host=host, port=port, user=login, passwd=password, dbtype="postgis", database=dbname, schema=schema ) self.cat.save(ds) ds = self.cat.get_store(name) if ds.enabled is False: print "ERROR: Geoserver store %s not enabled" % name if self.debug is True: print "Datastore %s created." % name def publish_pg_layer(self, layer_table, layer_name, store, srid, overwrite=True): """ """ existing_lyr = self.cat.get_layer("ma_carte:%s" % layer_table) if existing_lyr is not None: print "Layer ma_carte:%s already exists, deleting it." % layer_table self.cat.delete(existing_lyr) self.cat.reload() ds = self.cat.get_store(store) ft = self.cat.publish_featuretype(layer_table, ds, "EPSG:%s" % srid, srs="EPSG:4326") ft.projection_policy = "REPROJECT_TO_DECLARED" ft.title = layer_name self.cat.save(ft) if ft.enabled is False: print "ERROR: Layer %s %s %s is not enabled."(ft.workspace.name, ft.store.name, ft.title) if self.debug is True: print "Layer %s>%s>%s published." % (ft.workspace.name, ft.store.name, ft.title) def create_style_from_sld(self, style_name, sld_file, workspace, overwrite=True): """ """ if self.cat.get_style(style_name) is not None: print "Style %s already exists, deleting it." % style_name style2del = self.cat.get_style(style_name) self.cat.delete(style2del) self.cat.create_style( style_name, open(sld_file).read(), overwrite=overwrite ) # FIXME: if ", workspace=workspace" specified can't delete style if self.debug is True: print "Style %s created in Geoserver" % style_name def apply_style_to_layer(self, layer_name, style_name): """ Apply a geoserver styler to a layer """ gs_layer = self.cat.get_layer(layer_name) gs_style = self.cat.get_style(style_name) # FIXME: Which works better? # gs_layer.default_style = gs_style / gs_layer._set_default_style(gs_style) # FIXME: Maybe indicate workspace when saving style then name the style as "workspace:style" gs_layer._set_default_style(gs_style) self.cat.save(gs_layer) if self.debug is True: print "Style applied to %s" % layer_name
class CatalogTests(unittest.TestCase): def setUp(self): self.cat = Catalog(GSPARAMS['GSURL'], username=GSPARAMS['GSUSER'], password=GSPARAMS['GSPASSWORD']) def testAbout(self): about_html = self.cat.about() self.assertTrue('<html xmlns="http://www.w3.org/1999/xhtml"' in about_html) def testGSVersion(self): version = self.cat.gsversion() pat = re.compile('\d\.\d(\.[\dx]|-SNAPSHOT)') self.assertTrue(pat.match('2.2.x')) self.assertTrue(pat.match('2.3.2')) self.assertTrue(pat.match('2.3-SNAPSHOT')) self.assertFalse(pat.match('2.3.y')) self.assertFalse(pat.match('233')) self.assertTrue(pat.match(version)) def testWorkspaces(self): self.assertEqual(7, len(self.cat.get_workspaces())) # marking out test since geoserver default workspace is not consistent # self.assertEqual("cite", self.cat.get_default_workspace().name) self.assertEqual("topp", self.cat.get_workspace("topp").name) def testStores(self): topp = self.cat.get_workspace("topp") sf = self.cat.get_workspace("sf") self.assertEqual(9, len(self.cat.get_stores())) self.assertEqual(2, len(self.cat.get_stores(topp))) self.assertEqual(2, len(self.cat.get_stores(sf))) self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile", topp).name) self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile").name) self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile").name) self.assertEqual("sfdem", self.cat.get_store("sfdem", sf).name) self.assertEqual("sfdem", self.cat.get_store("sfdem").name) def testResources(self): topp = self.cat.get_workspace("topp") sf = self.cat.get_workspace("sf") states = self.cat.get_store("states_shapefile", topp) sfdem = self.cat.get_store("sfdem", sf) self.assertEqual(19, len(self.cat.get_resources())) self.assertEqual(1, len(self.cat.get_resources(states))) self.assertEqual(5, len(self.cat.get_resources(workspace=topp))) self.assertEqual(1, len(self.cat.get_resources(sfdem))) self.assertEqual(6, len(self.cat.get_resources(workspace=sf))) self.assertEqual("states", self.cat.get_resource("states", states).name) self.assertEqual("states", self.cat.get_resource("states", workspace=topp).name) self.assertEqual("states", self.cat.get_resource("states").name) states = self.cat.get_resource("states") fields = [ states.title, states.abstract, states.native_bbox, states.latlon_bbox, states.projection, states.projection_policy ] self.assertFalse(None in fields, str(fields)) self.assertFalse(len(states.keywords) == 0) self.assertFalse(len(states.attributes) == 0) self.assertTrue(states.enabled) self.assertEqual("sfdem", self.cat.get_resource("sfdem", sfdem).name) self.assertEqual("sfdem", self.cat.get_resource("sfdem", workspace=sf).name) self.assertEqual("sfdem", self.cat.get_resource("sfdem").name) def testResourcesUpdate(self): res_dest = self.cat.get_resources() count = 0 for rd in res_dest: # only wms layers if rd.resource_type != "wmsLayer": continue # looking for same name ro = self.cat.get_resource(rd.name) if ro is not None: rd.title = ro.title rd.abstract = ro.abstract rd.keywords = ro.keywords rd.projection = ro.projection rd.native_bbox = ro.native_bbox rd.latlon_bbox = ro.latlon_bbox rd.projection_policy = ro.projection_policy rd.enabled = ro.enabled rd.advertised = ro.advertised rd.metadata_links = ro.metadata_links or None self.cat.save(rd) self.cat.reload() # print "Updated layer: " + rd.name count += 1 # print "Total updated layers: " + str(count) def testLayers(self): expected = set(["Arc_Sample", "Pk50095", "Img_Sample", "mosaic", "sfdem", "bugsites", "restricted", "streams", "archsites", "roads", "tasmania_roads", "tasmania_water_bodies", "tasmania_state_boundaries", "tasmania_cities", "states", "poly_landmarks", "tiger_roads", "poi", "giant_polygon" ]) actual = set(l.name for l in self.cat.get_layers()) missing = expected - actual extras = actual - expected message = "Actual layer list did not match expected! (Extras: %s) (Missing: %s)" % (extras, missing) self.assert_(len(expected ^ actual) == 0, message) states = self.cat.get_layer("states") self.assert_("states", states.name) self.assert_(isinstance(states.resource, ResourceInfo)) self.assertEqual(set(s.name for s in states.styles), set(['pophatch', 'polygon'])) self.assertEqual(states.default_style.name, "population") def testLayerGroups(self): expected = set(["tasmania", "tiger-ny", "spearfish"]) actual = set(l.name for l in self.cat.get_layergroups()) missing = expected - actual extras = actual - expected message = "Actual layergroup list did not match expected! (Extras: %s) (Missing: %s)" % (extras, missing) self.assert_(len(expected ^ actual) == 0, message) tas = self.cat.get_layergroup("tasmania") self.assert_("tasmania", tas.name) self.assert_(isinstance(tas, LayerGroup)) self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) # Try to create a new Layer Group into the "topp" workspace self.assert_(self.cat.get_workspace("topp") is not None) tas2 = self.cat.create_layergroup("tasmania_reloaded", tas.layers, workspace = "topp") self.cat.save(tas2) self.assert_(self.cat.get_layergroup("tasmania_reloaded") is None) self.assert_(self.cat.get_layergroup("tasmania_reloaded", "topp") is not None) tas2 = self.cat.get_layergroup("tasmania_reloaded", "topp") self.assert_("tasmania_reloaded", tas2.name) self.assert_(isinstance(tas2, LayerGroup)) self.assertEqual(tas2.workspace, "topp", tas2.workspace) self.assertEqual(tas2.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas2.layers) self.assertEqual(tas2.styles, [None, None, None, None], tas2.styles) def testStyles(self): self.assertEqual("population", self.cat.get_style("population").name) self.assertEqual("popshade.sld", self.cat.get_style("population").filename) self.assertEqual("population", self.cat.get_style("population").sld_name) self.assert_(self.cat.get_style('non-existing-style') is None) def testEscaping(self): # GSConfig is inconsistent about using exceptions vs. returning None # when a resource isn't found. # But the basic idea is that none of them should throw HTTP errors from # misconstructed URLS self.cat.get_style("best style ever") self.cat.get_workspace("best workspace ever") try: self.cat.get_store(workspace="best workspace ever", name="best store ever") self.fail('expected exception') except FailedRequestError, fre: self.assertEqual('No store found named: best store ever', fre.message) try: self.cat.get_resource(workspace="best workspace ever", store="best store ever", name="best resource ever") except FailedRequestError, fre: self.assertEqual('No store found named: best store ever', fre.message)
class ModifyingTests(unittest.TestCase): def setUp(self): self.cat = Catalog("http://localhost:8080/geoserver/rest") def testFeatureTypeSave(self): # test saving round trip rs = self.cat.get_resource("bugsites") old_abstract = rs.abstract new_abstract = "Not the original abstract" enabled = rs.enabled # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(new_abstract, rs.abstract) self.assertEqual(enabled, rs.enabled) # Change keywords on server rs.keywords = ["bugsites", "gsconfig"] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(["bugsites", "gsconfig"], rs.keywords) self.assertEqual(enabled, rs.enabled) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual( [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")], rs.metadata_links) self.assertEqual(enabled, rs.enabled) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(old_abstract, rs.abstract) def testDataStoreCreate(self): ds = self.cat.create_datastore("vector_gsconfig") ds.connection_parameters.update( host="localhost", port="5432", database="db", user="******", passwd="password", dbtype="postgis") self.cat.save(ds) def testDataStoreModify(self): ds = self.cat.get_store("sf") self.assertFalse("foo" in ds.connection_parameters) ds.connection_parameters = ds.connection_parameters ds.connection_parameters["foo"] = "bar" orig_ws = ds.workspace.name self.cat.save(ds) ds = self.cat.get_store("sf") self.assertTrue("foo" in ds.connection_parameters) self.assertEqual("bar", ds.connection_parameters["foo"]) self.assertEqual(orig_ws, ds.workspace.name) def testDataStoreCreateAndThenAlsoImportData(self): ds = self.cat.create_datastore("gsconfig_import_test") ds.connection_parameters.update( host="localhost", port="5432", database="db", user="******", passwd="password", dbtype="postgis") self.cat.save(ds) ds = self.cat.get_store("gsconfig_import_test") self.cat.add_data_to_store(ds, "import", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) def testCoverageStoreCreate(self): ds = self.cat.create_coveragestore2("coverage_gsconfig") ds.data_url = "file:data/mytiff.tiff" self.cat.save(ds) def testCoverageStoreModify(self): cs = self.cat.get_store("sfdem") self.assertEqual("GeoTIFF", cs.type) cs.type = "WorldImage" self.cat.save(cs) cs = self.cat.get_store("sfdem") self.assertEqual("WorldImage", cs.type) # not sure about order of test runs here, but it might cause problems # for other tests if this layer is misconfigured cs.type = "GeoTIFF" self.cat.save(cs) def testCoverageSave(self): # test saving round trip rs = self.cat.get_resource("Arc_Sample") old_abstract = rs.abstract new_abstract = "Not the original abstract" # # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(new_abstract, rs.abstract) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(old_abstract, rs.abstract) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual( [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")], rs.metadata_links) self.assertEqual(enabled, rs.enabled) srs_before = set(['EPSG:4326']) srs_after = set(['EPSG:4326', 'EPSG:3785']) formats = set(['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF']) formats_after = set(["PNG", "GIF", "TIFF"]) # set and save request_srs_list self.assertEquals(set(rs.request_srs_list), srs_before, str(rs.request_srs_list)) rs.request_srs_list = rs.request_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.request_srs_list), srs_after, str(rs.request_srs_list)) # set and save response_srs_list self.assertEquals(set(rs.response_srs_list), srs_before, str(rs.response_srs_list)) rs.response_srs_list = rs.response_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.response_srs_list), srs_after, str(rs.response_srs_list)) # set and save supported_formats self.assertEquals(set(rs.supported_formats), formats, str(rs.supported_formats)) rs.supported_formats = ["PNG", "GIF", "TIFF"] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.supported_formats), formats_after, str(rs.supported_formats)) def testFeatureTypeCreate(self): shapefile_plus_sidecars = shapefile_and_friends("test/data/states") expected = { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' } self.assertEqual(len(expected), len(shapefile_plus_sidecars)) for k, v in expected.iteritems(): self.assertEqual(v, shapefile_plus_sidecars[k]) sf = self.cat.get_workspace("sf") self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) self.assert_(self.cat.get_resource("states_test", workspace=sf) is not None) self.assertRaises( ConflictingDataError, lambda: self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) ) self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster_test", shapefile_plus_sidecars, sf) ) bogus_shp = { 'shp': 'test/data/Pk50095.tif', 'shx': 'test/data/Pk50095.tif', 'dbf': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } self.assertRaises( UploadError, lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf) ) lyr = self.cat.get_layer("states_test") self.cat.delete(lyr) self.assert_(self.cat.get_layer("states_test") is None) def testCoverageCreate(self): tiffdata = { 'tiff': 'test/data/Pk50095.tif', 'tfw': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } sf = self.cat.get_workspace("sf") # TODO: Uploading WorldImage file no longer works??? # ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf) # self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None) # self.assertRaises( # ConflictingDataError, # lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf) # ) self.assertRaises( UploadError, lambda: self.cat.create_featurestore("Pk50095_vector", tiffdata, sf) ) bogus_tiff = { 'tiff': 'test/data/states.shp', 'tfw': 'test/data/states.shx', 'prj': 'test/data/states.prj' } self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster", bogus_tiff) ) def testLayerSave(self): # test saving round trip lyr = self.cat.get_layer("states") old_attribution = lyr.attribution new_attribution = "Not the original attribution" # change attribution on server lyr.attribution = new_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(new_attribution, lyr.attribution) # Restore attribution lyr.attribution = old_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(old_attribution, lyr.attribution) self.assertEqual(lyr.default_style.name, "population") old_default_style = lyr.default_style lyr.default_style = (s for s in lyr.styles if s.name == "pophatch").next() lyr.styles = [old_default_style] self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(lyr.default_style.name, "pophatch") self.assertEqual([s.name for s in lyr.styles], ["population"]) def testStyles(self): # upload new style, verify existence self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Fred", fred.sld_title) # replace style, verify changes self.cat.create_style("fred", open("test/ted.sld").read(), overwrite=True) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Ted", fred.sld_title) # delete style, verify non-existence self.cat.delete(fred, purge=True) self.assert_(self.cat.get_style("fred") is None) # attempt creating new style self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assertEqual("Fred", fred.sld_title) def testWorkspaceCreate(self): ws = self.cat.get_workspace("acme") self.assertEqual(None, ws) self.cat.create_workspace("acme", "http://example.com/acme") ws = self.cat.get_workspace("acme") self.assertEqual("acme", ws.name) def testWorkspaceDelete(self): self.cat.create_workspace("foo", "http://example.com/foo") ws = self.cat.get_workspace("foo") self.cat.delete(ws) ws = self.cat.get_workspace("foo") self.assert_(ws is None) def testFeatureTypeDelete(self): pass def testCoverageDelete(self): pass def testDataStoreDelete(self): states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) states.enabled = False self.assert_(states.enabled == False) self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == False) states.enabled = True self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) def testLayerGroupSave(self): tas = self.cat.get_layergroup("tasmania") self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) tas.layers = tas.layers[:-1] tas.styles = tas.styles[:-1] self.cat.save(tas) self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles)
class UploaderBase(TestCase): settings_overrides = [] @classmethod def setUpClass(cls): super(UploaderBase, cls).setUpClass() # don't accidentally delete anyone's layers if Layer.objects.all().count(): if 'DELETE_LAYERS' not in os.environ: print print 'FAIL: There are layers in the test database' print 'Will not run integration tests unless `DELETE_LAYERS`' print 'Is specified as an environment variable' print raise Exception('FAIL, SEE ABOVE') # make a test_settings module that will apply our overrides test_settings = ['from geonode.settings import *'] if os.path.exists('geonode/upload/tests/test_settings.py'): test_settings.append('from geonode.upload.tests.test_settings import *') for so in cls.settings_overrides: test_settings.append('%s=%s' % so) with open('integration_settings.py', 'w') as fp: fp.write('\n'.join(test_settings)) # runserver with settings args = ['python','manage.py','runserver','--settings=integration_settings','--verbosity=0'] # see http://www.doughellmann.com/PyMOTW/subprocess/#process-groups-sessions cls._runserver = subprocess.Popen(args, stderr=open('test.log','w') ,preexec_fn=os.setsid) # await startup cl = Client( GEONODE_URL, GEONODE_USER, GEONODE_PASSWD ) for i in range(5): try: cl.get_html('/') break except: time.sleep(.5) if cls._runserver.poll() is not None: raise Exception("Error starting server, check test.log") @classmethod def tearDownClass(cls): super(UploaderBase, cls).tearDownClass() # kill server process group os.killpg(cls._runserver.pid, signal.SIGKILL) if os.path.exists('integration_settings.py'): os.unlink('integration_settings.py') def setUp(self): super(UploaderBase, self).setUp() self._tempfiles = [] self.client = Client( GEONODE_URL, GEONODE_USER, GEONODE_PASSWD ) self.catalog = Catalog( GEOSERVER_URL + 'rest', GEOSERVER_USER, GEOSERVER_PASSWD ) # @todo - this is obviously the brute force approach - ideally, # these cases would be more declarative and delete only the things # they mess with for l in Layer.objects.all(): try: l.delete() except: print 'unable to delete layer', l # and destroy anything left dangling on geoserver cat = Layer.objects.gs_catalog map(lambda name: cascading_delete(cat, name), [l.name for l in cat.get_layers()]) def tearDown(self): super(UploaderBase, self).tearDown() map(os.unlink, self._tempfiles) def check_layer_geonode_page(self, path): """ Check that the final layer page render's correctly after an layer is uploaded """ # the final url for uploader process. This does a redirect to # the final layer page in geonode resp, _ = self.client.get_html(path) self.assertTrue('content-type' in resp.headers) # if we don't get a content type of html back, thats how we # know there was an error. self.assertTrue( resp.headers['content-type'].startswith('text/html') ) def check_layer_geoserver_caps(self, original_name): """ Check that a layer shows up in GeoServer's get capabilities document """ # using owslib wms = get_wms(layer_name=original_name) self.assertTrue(original_name in wms.contents, '%s is not in %s' % (original_name, wms.contents)) def check_layer_geoserver_rest(self, original_name): """ Check that a layer shows up in GeoServer rest api after the uploader is done""" # using gsconfig to test the geoserver rest api. layer = self.catalog.get_layer(original_name) self.assertIsNotNone(layer is not None) def check_and_pass_through_timestep(self): raise Exception('not implemented') redirect_to = data['redirect_to'] self.assertEquals(redirect_to, upload_step('time')) resp = self.client.make_request(upload_step('time')) self.assertEquals(resp.code, 200) data = {'csrfmiddlewaretoken': self.client.get_crsf_token()} resp = self.client.make_request(upload_step('time'), data) data = json.loads(resp.read()) return resp, data def complete_raster_upload(self, file_path, resp, data): return self.complete_upload(file_path, resp, data, is_raster=True) def check_save_step(self, resp, data): """Verify the initial save step""" self.assertEquals(resp.code, 200) self.assertTrue(isinstance(data, dict)) # make that the upload returns a success True key self.assertTrue(data['success'], 'expected success but got %s' % data) self.assertTrue('redirect_to' in data) def complete_upload(self, file_path, resp, data, is_raster=False): """Method to check if a layer was correctly uploaded to the GeoNode. arguments: file path, the django http response Checks to see if a layer is configured in Django Checks to see if a layer is configured in GeoServer checks the Rest API checks the get cap document """ layer_name, ext = os.path.splitext(os.path.basename(file_path)) self.check_save_step(resp, data) layer_page = self.finish_upload(data['redirect_to'], layer_name, is_raster) self.check_layer_complete(layer_page, layer_name) def finish_upload(self, current_step, layer_name, is_raster=False, skip_srs=False): if (not is_raster and _ALLOW_TIME_STEP): resp, data = self.check_and_pass_through_timestep() self.assertEquals(resp.code, 200) self.assertTrue(data['success'], 'expected success but got %s' % data) self.assertTrue('redirect_to' in data) current_step = data['redirect_to'] self.wait_for_progress(data.get('progress')) if not is_raster and not skip_srs: self.assertEquals(current_step, upload_step('srs')) # if all is good, the srs step will redirect to the final page resp = self.client.get(current_step) else: self.assertEquals(current_step, upload_step('final')) resp = self.client.get(current_step) self.assertEquals(resp.code, 200) url = json.loads(resp.read())['url'] # and the final page should redirect to tha layer page # @todo - make the check match completely (endswith at least) # currently working around potential 'orphaned' db tables self.assertTrue(layer_name in url, 'expected %s in URL, got %s' % (layer_name, url)) return url def check_upload_model(self, original_name): # we can only test this if we're using the same DB as the test instance if not settings.DB_DATASTORE: return try: upload = Upload.objects.get(layer__name=original_name) except Upload.DoesNotExist: self.fail('expected to find Upload object for %s' % original_name) self.assertTrue(upload.complete) def check_layer_complete(self, layer_page, original_name): '''check everything to verify the layer is complete''' self.check_layer_geonode_page(layer_page) # @todo use the original_name # currently working around potential 'orphaned' db tables # this grabs the name from the url (it might contain a 0) original_name = os.path.basename(layer_page).split(':')[1] self.check_layer_geoserver_caps(original_name) self.check_layer_geoserver_rest(original_name) self.check_upload_model(original_name) def check_invalid_projection(self, layer_name, resp, data): """ Makes sure that we got the correct response from an layer that can't be uploaded""" if _ALLOW_TIME_STEP: resp, data = self.check_and_pass_through_timestep() self.assertTrue(resp.code, 200) self.assertTrue(data['success']) self.assertEquals(upload_step("srs"), data['redirect_to']) resp, soup = self.client.get_html(data['redirect_to']) # grab an h2 and find the name there as part of a message saying it's bad h2 = soup.find_all(['h2'])[0] self.assertTrue(str(h2).find(layer_name)) def upload_folder_of_files(self, folder, final_check): mains = ('.tif', '.shp', '.zip') def is_main(_file): _, ext = os.path.splitext(_file) return (ext.lower() in mains) self.client.login() main_files = filter(is_main, os.listdir(folder)) for main in main_files: # get the abs path to the file _file = os.path.join(folder, main) base, _ = os.path.splitext(_file) resp, data = self.client.upload_file(_file) self.wait_for_progress(data.get('progress')) final_check(base, resp, data) def upload_file(self, fname, final_check, check_name=None): self.client.login() if not check_name: check_name, _ = os.path.splitext(fname) resp, data = self.client.upload_file(fname) self.wait_for_progress(data.get('progress')) final_check(check_name, resp, data) def wait_for_progress(self, progress_url): if progress_url: resp = self.client.get(progress_url) assert resp.getcode() == 200, 'Invalid progress status code' raw_data = resp.read() json_data = json.loads(raw_data) # "COMPLETE" state means done if json_data.get('state', '') == 'RUNNING': time.sleep(0.1) self.wait_for_progress(progress_url) def temp_file(self, ext): fd, abspath = tempfile.mkstemp(ext) self._tempfiles.append(abspath) return fd, abspath def make_csv(self, *rows): fd, abspath = self.temp_file('.csv') fp = os.fdopen(fd,'wb') out = csv.writer(fp) for r in rows: out.writerow(r) fp.close() return abspath
def preprogress(id): def getUploadFile(uploadfolder): try: uploadfiles = os.listdir(uploadfolder) # 上传的文件夹 while(len(uploadfiles)==1): uploadfolder=os.path.join(uploadfolder,uploadfiles[0]) uploadfiles=os.listdir(uploadfolder) for file in uploadfiles: if(file[-11:]=='_fusion.tif'):#融合图 fusionname=file capture_time=re.match('[\w\_\.]+_(\d{8})_',fusionname).group(1) Bmap.objects.filter(id=id).update(capture_time=time.strftime('%Y-%m-%d',time.strptime(capture_time,'%Y%m%d'))) # elif re.search(r'PAN\d.jpg',file):#缩略图 # thumbnailname=file # elif re.search(r'PAN\d.xml',file):#XML # XMLname=file elif re.search(r'_rpc.txt',file):#RPC rpcfile=file # elif re.search(r'MSS\d.rpb',file): # rpbfile=file return uploadfolder,fusionname,rpcfile except Exception as e: return Exception("上传失败,请检查地图名称!") ####转8比特三通道,生成缩略图 def chaneltransform(): try: fusionimage=os.path.join(uploadfiles[0],uploadfiles[1]) gdal.AllRegister() driver = gdal.GetDriverByName("GTiff") fusionimage = gdal.Open(fusionimage.encode('utf-8').decode(), gdal.GA_ReadOnly) im_width = fusionimage.RasterXSize im_height = fusionimage.RasterYSize transformimage = os.path.join(uploadfiles[0],"chaneltransform.tif") dstDS = driver.Create(transformimage, xsize=im_width, ysize=im_height, bands=3, eType=gdal.GDT_Byte) thumbnail=np.zeros(shape=(int(im_height*0.02),int(im_width*0.02),3)) for iband in range(1, 4): imgMatrix = fusionimage.GetRasterBand(iband).ReadAsArray(0, 0, im_width, im_height) zeros = np.size(imgMatrix) - np.count_nonzero(imgMatrix) minVal = np.percentile(imgMatrix, float(zeros / np.size(imgMatrix) * 100 + 0.15)) maxVal = np.percentile(imgMatrix, 99) idx1 = imgMatrix < minVal idx2 = imgMatrix > maxVal idx3 = ~idx1 & ~idx2 imgMatrix[idx1] = imgMatrix[idx1] * 20 / minVal imgMatrix[idx2] = 255 idx1=None idx2=None imgMatrix[idx3] = pow((imgMatrix[idx3] - minVal) / (maxVal - minVal), 0.9) * 255 if iband==1: dstDS.GetRasterBand(3).WriteArray(imgMatrix) dstDS.FlushCache() thumbnail[:, :, 2] = cv2.resize(src=imgMatrix, dsize=(thumbnail.shape[1], thumbnail.shape[0])) imgMatrix = None elif iband==2: dstDS.GetRasterBand(2).WriteArray(imgMatrix) dstDS.FlushCache() thumbnail[:, :, 1] = cv2.resize(src=imgMatrix, dsize=(thumbnail.shape[1], thumbnail.shape[0])) imgMatrix = None else: dstDS.GetRasterBand(1).WriteArray(imgMatrix) dstDS.FlushCache() thumbnail[:, :, 0] = cv2.resize(src=imgMatrix, dsize=(thumbnail.shape[1], thumbnail.shape[0])) imgMatrix = None fusionimage = None dstDS = None cv2.imwrite(os.path.join(uploadfiles[0],str(id)+'.jpg'),thumbnail) return transformimage except Exception as e: return Exception("上传失败,图像转换出错:"+str(e)) def RPCOrthorectification(Alpha=True,is_label=False): try: if not is_label: orginalimage=os.path.join(uploadfiles[0],'chaneltransform.tif') transform_rpc = os.path.join(uploadfiles[0], 'chaneltransform_rpc.txt') else: orginalimage=os.path.join(uploadfiles[0],'label.tif') transform_rpc=os.path.join(uploadfiles[0],'label_rpc.txt') origin_rpc=os.path.join(uploadfiles[0],uploadfiles[2]) shutil.copyfile(origin_rpc,transform_rpc) # with open(rpbfile,'r') as f: # for line in f.readlines(): # hoffLine=re.search(r'heightOffset = ([\+|\-|\d]\d+\.?\d+)',line) # if hoffLine: # hoff=hoffLine.group(1) # break # f.close() # RpcHeight="['RPC_HEIGHT="+str(hoff)+"]'" # transformerOptions=RpcHeight if Alpha: warpOP = gdal.WarpOptions(dstSRS='WGS84', rpc=True, multithread=True, errorThreshold=0.0,creationOptions=['Tiled=yes'], resampleAlg=gdal.gdalconst.GRIORA_Bilinear,dstAlpha=True) else: warpOP = gdal.WarpOptions(dstSRS='WGS84', rpc=True, multithread=True, errorThreshold=0.0,creationOptions=['Tiled=yes'], resampleAlg=gdal.gdalconst.GRIORA_Bilinear,dstNodata=0) image = gdal.Open(orginalimage.encode('utf-8'),gdal.GA_ReadOnly) RPCOrthImage = os.path.join(uploadfiles[0],os.path.basename(orginalimage).replace(".tif","RPC.tif")) srcDS = gdal.Warp(RPCOrthImage.encode('utf-8').decode(), image, options=warpOP) image=None srcDS=None return RPCOrthImage except Exception as e: return Exception("上传失败,RPC正射校正出错:"+str(e)) def buildOverviews(): try: image=os.path.join(uploadfiles[0],'chaneltransformRPC.tif') gdal.AllRegister() TransformDS = gdal.Open(image.encode('utf-8').decode(), gdal.GA_ReadOnly) Width = TransformDS.RasterXSize Heigh = TransformDS.RasterYSize PixelNum = Width * Heigh TopNum = 4096 CurNum = PixelNum / 4 anLevels = [] nLevelCount = 0 while (CurNum > TopNum): anLevels.append(pow(2, nLevelCount + 2)) nLevelCount += 1 CurNum /= 4 TransformDS.BuildOverviews(overviewlist=anLevels) cat = Catalog(map_url,'admin', 'geoserver') wkspce = cat.get_workspace('Map') cat.create_coveragestore_external_geotiff(name=id, data='file://' + image.encode('utf-8').decode('utf-8'), workspace=wkspce) cat.reload() TransformDS = None except Exception as e: return Exception("上传失败,建立金字塔出错"+str(e)) def fit_by_contours(img,geotransfrom): geo = np.array([[geotransfrom[1], geotransfrom[4]], [geotransfrom[2], geotransfrom[5]]]) off = np.array([geotransfrom[0], geotransfrom[3]]) kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (5, 5)) img = cv2.morphologyEx(img, cv2.MORPH_OPEN, kernel) img = cv2.morphologyEx(img, cv2.MORPH_CLOSE, kernel) img = cv2.morphologyEx(img, cv2.MORPH_OPEN, kernel) img = cv2.morphologyEx(img, cv2.MORPH_CLOSE, kernel) contours, hierarchy = cv2.findContours(img, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)[1:] contours = list(map(np.squeeze, contours)) hierarchy = np.squeeze(hierarchy) contours = [LinearRing(np.dot(np.row_stack((single_contour, single_contour[0, :])), geo) + off) for single_contour in contours] hole_exclude_linering = defaultdict(list) for idx in np.argwhere(hierarchy[:, -1] == -1)[:, 0]: hole_exclude_linering[idx].append(contours[idx]) # external_contours=[(idx,contours[idx]) for idx in np.argwhere(hierarchy[:,-1]==-1)[:,0]] extern_linering_idx = np.argwhere(hierarchy[:, 2] != -1)[:, 0] hole_idx = [np.argwhere(hierarchy[:, -1] == idx)[:, 0] for idx in extern_linering_idx] for e_id, h_id in zip(extern_linering_idx, hole_idx): holes = [contours[h] for h in h_id] hole_exclude_linering[e_id].extend(holes) return MultiPolygon([Polygon(*linering) for linering in hole_exclude_linering.values()]) def save_mask(): try: ct=CoordTransform(SpatialReference('WGS84'), SpatialReference('4527')) label_path=os.path.join(uploadfiles[0],'labelRPC.tif') dataset = gdal.Open(label_path) GeoTransform = dataset.GetGeoTransform() if dataset == None: return im_width = dataset.RasterXSize # 栅格矩阵的列数 im_height = dataset.RasterYSize # 栅格矩阵的行数 cood_trans=lambda L,C:(GeoTransform[0] + C * GeoTransform[1] + L * GeoTransform[2],GeoTransform[3] + C * GeoTransform[4] + L * GeoTransform[5]) map_polygon=Polygon(LinearRing(cood_trans(0,0),cood_trans(0,im_width),cood_trans(im_height,im_width),cood_trans(im_height,0),cood_trans(0,0))) Bmap.objects.filter(id=id).update(polygon=map_polygon) im_data = dataset.ReadAsArray(0, 0, im_width, im_height) # 获取数据 dataset = None types = np.unique(im_data) for label_type in types: # if label_type in (0,): # continue mp = fit_by_contours((im_data == label_type).astype(np.uint8), GeoTransform) m = Mask(map=Bmap.objects.get(id=id),type_id=int(label_type), mask=mp,area=round(mp.transform(ct,clone=True).area/1000000,2)) m.save() # img[im_data == label_type]=127 # cv2.imwrite(str(label_type)+".jpg",img) if label_type!=0: payload = "<featureType><name>" + str(id) + '_' + str(m.type_id) + "</name><nativeName>myweb_mask</nativeName>"" \ ""<cqlFilter>type_id=" + str(m.type_id) + " and map_id=" + str(id) + "</cqlFilter></featureType>" headers = {'Content-type': 'text/xml'} resp = requests.post(mask_url, auth=('admin', 'geoserver'), data=payload, headers=headers) if resp.status_code != 201: raise Exception('Upload to geoserver error') else: cat = Catalog(map_url, 'admin', 'geoserver') layer = cat.get_layer('Mask:'+str(id)+'_'+str(m.type_id)) layer.default_style=cat.get_style(str(label_type), 'Mask') cat.save(layer) cat.reload() return "上传成功" except Exception as e: return Exception("上传失败,拟合图斑出错:"+str(e)) # # def makeDownload(): # try: # cwd = os.getcwd() # downloadpath=os.path.join(baseurl,str(id)+'.tar.gz')#前端下载路径 # downloadfile = tarfile.open(downloadpath, "w:gz") # os.chdir(uploadfiles[0]) # downloadfile.add(uploadfiles[4],recursive=False) # downloadfile.add(uploadfiles[5],recursive=False) # os.chdir(tempfolder) # for file in os.listdir(tempfolder): # if ".json" in file: # downloadfile.add(file,recursive=False) # if file=="chaneltransformRPC.tif": # downloadfile.add(file) # downloadfile.close() # os.chdir(cwd) # return downloadpath # except Exception: # # if os.path.exists(downloadpath): # # os.remove(downloadpath) # return "上传失败,无法创建压缩包" # if not os.path.exists(baseurl): # os.makedirs(baseurl) # uploadfiles=(os.path.join(MAPBASEPATH,'GF2_PMS2_E117.4_N39.1_20170510_L1A0002351826'),) # result = detection.detection(uploadfiles[0] + "/", uploadfiles[1], uploadfiles[0]) # if not isinstance(result, Exception): # result =save_mask() # if not isinstance(result, Exception): # result = save_mask() # if not isinstance(result, Exception): # # result=makeDownload() # return '上传成功' # buildOverviews() # RPCOrthorectification() #正式代码 # uploadfolder=os.path.join(MAPBASEPATH,'GF2_PMS2_E117.4_N39.1_20170510_L1A0002351826') # uploadfiles=getUploadFile(uploadfolder) # RPCOrthorectification(Alpha=False,is_label=True) uploadfolder =os.path.join(MAPBASEPATH,Bmap.objects.get(id=id).name) uploadfiles=getUploadFile(uploadfolder) result = uploadfiles if not isinstance(result, Exception): result=chaneltransform() if not isinstance(result,Exception): result=RPCOrthorectification() if not isinstance(result,Exception): result=buildOverviews() if not isinstance(result,Exception): result=detection.detection(uploadfiles[0]+"/",uploadfiles[1],uploadfiles[0]) if not isinstance(result,Exception): result=RPCOrthorectification(Alpha=False,is_label=True) if not isinstance(result,Exception): result=save_mask() if not isinstance(result,Exception): shutil.rmtree('./myweb/Detector/temp') return '上传成功' ####正式代码完 cat = Catalog(map_url, 'admin', 'geoserver') if cat.get_layer('Mask:'+str(id)): cat.delete(cat.get_layer('Mask:'+str(id))) cat.reload() for label_type in range(1,8): if cat.get_layer('Mask:' + str(id)+'_'+str(label_type)): cat.delete(cat.get_layer('Mask:' + str(id)+'_'+str(label_type))) cat.reload() try: if cat.get_store(name=str(id), workspace='Map'): cat.delete(cat.get_store(name=str(id), workspace='Map')) cat.reload() except Exception: pass if os.path.exists('./myweb/Detector/temp'): shutil.rmtree('./myweb/Detector/temp') Bmap.objects.filter(id=id).delete() return str(result)
class UploaderTests(TestCase): """Basic checks to make sure pages load, etc. """ def create_datastore(self, connection, catalog): """Convenience method for creating a datastore. """ settings = connection.settings_dict ds_name = settings['NAME'] params = { 'database': ds_name, 'passwd': settings['PASSWORD'], 'namespace': 'http://www.geonode.org/', 'type': 'PostGIS', 'dbtype': 'postgis', 'host': settings['HOST'], 'user': settings['USER'], 'port': settings['PORT'], 'enabled': 'True' } store = catalog.create_datastore(ds_name, workspace=self.workspace) store.connection_parameters.update(params) try: catalog.save(store) except FailedRequestError: # assuming this is because it already exists pass return catalog.get_store(ds_name) def create_user(self, username, password, **kwargs): """Convenience method for creating users. """ user, created = User.objects.get_or_create(username=username, **kwargs) if created: user.set_password(password) user.save() return user def setUp(self): self.assertTrue( os.path.exists(_TEST_FILES_DIR), 'Test could not run due to missing test data at {0!r}' .format(_TEST_FILES_DIR) ) # These tests require geonode to be running on :80! self.postgis = db.connections['datastore'] self.postgis_settings = self.postgis.settings_dict self.admin_user = self.create_user('admin', 'admin', is_superuser=True) self.non_admin_user = self.create_user('non_admin', 'non_admin') self.catalog = Catalog( ogc_server_settings.internal_rest, *ogc_server_settings.credentials ) if self.catalog.get_workspace('geonode') is None: self.catalog.create_workspace('geonode', 'http://www.geonode.org/') self.workspace = 'geonode' self.datastore = self.create_datastore(self.postgis, self.catalog) def tearDown(self): """Clean up geoserver. """ self.catalog.delete(self.datastore, recurse=True) def import_file(self, path, configs=None): """Imports the file. """ if configs is None: configs = [] self.assertTrue(os.path.exists(path)) # run ogr2ogr ogr = OGRImport(path) layers = ogr.handle(configuration_options=configs) return layers def generic_import(self, filename, configs=None): if configs is None: configs = [{'index': 0}] path = test_file(filename) results = self.import_file(path, configs=configs) layer_results = [] for result in results: if result[1].get('raster'): layer_path = result[0] layer_name = os.path.splitext(os.path.basename(layer_path))[0] layer = Layer.objects.get(name=layer_name) self.assertTrue(layer_path.endswith('.tif')) self.assertTrue(os.path.exists(layer_path)) gdal_layer = gdal.OpenEx(layer_path) self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff') layer_results.append(layer) else: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') if not path.endswith('zip'): self.assertGreaterEqual( layer.attributes.count(), DataSource(path)[0].num_fields ) layer_results.append(layer) return layer_results[0] def generic_api_upload(self, filenames, configs=None): """Tests the import api. """ client = AdminClient() client.login_as_non_admin() # Don't accidentally iterate over given 'foo' as ['f', 'o', 'o']. self.assertNotIsInstance(filenames, str) # Upload Files outfiles = [] for filename in filenames: path = test_file(filename) with open(path) as stream: data = stream.read() upload = SimpleUploadedFile(filename, data) outfiles.append(upload) response = client.post( reverse('uploads-new-json'), {'file': outfiles, 'json': json.dumps(configs)}, follow=True) content = json.loads(response.content) self.assertEqual(response.status_code, 200) self.assertEqual(content['id'], 1) # Configure Uploaded Files upload_id = content['id'] upload_layers = UploadLayer.objects.filter(upload_id=upload_id) for upload_layer in upload_layers: for config in configs: if config['upload_file_name'] == upload_layer.name: payload = config['config'] url = '/importer-api/data-layers/{0}/configure/'.format(upload_layer.id) response = client.post( url, data=json.dumps(payload), content_type='application/json' ) self.assertTrue(response.status_code, 200) url = '/importer-api/data-layers/{0}/'.format(upload_layer.id) response = client.get(url, content_type='application/json') self.assertTrue(response.status_code, 200) return content def generic_raster_import(self, filename, configs=None): if configs is None: configs = [{'index': 0}] path = test_file(filename) results = self.import_file(path, configs=configs) layer_path = results[0][0] layer_name = os.path.splitext(os.path.basename(layer_path))[0] layer = Layer.objects.get(name=layer_name) self.assertTrue(layer_path.endswith('.tif')) self.assertTrue(os.path.exists(layer_path)) gdal_layer = gdal.OpenEx(layer_path) self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff') return layer def test_multi_upload(self): """Tests Uploading Multiple Files """ upload = self.generic_api_upload( filenames=[ 'boxes_with_year_field.zip', 'boxes_with_date.zip', 'point_with_date.geojson' ], configs=[ { 'upload_file_name': 'boxes_with_year_field.shp', 'config': [{'index': 0}] }, { 'upload_file_name': 'boxes_with_date.shp', 'config': [{'index': 0}] }, { 'upload_file_name': 'point_with_date.geojson', 'config': [{'index': 0}] } ] ) self.assertEqual(9, upload['count']) def test_upload_with_slds(self): """Tests Uploading sld """ upload = self.generic_api_upload( filenames=[ 'boxes_with_date.zip', 'boxes.sld', 'boxes1.sld' ], configs=[ { 'upload_file_name': 'boxes_with_date.shp', 'config': [ { 'index': 0, 'default_style': 'boxes.sld', 'styles': ['boxes.sld', 'boxes1.sld'] } ] } ] ) self.assertEqual(6, upload['count']) upload_id = upload['id'] upload_obj = UploadedData.objects.get(pk=upload_id) uplayers = UploadLayer.objects.filter(upload=upload_id) layer_id = uplayers[0].pk upfiles_count = UploadFile.objects.filter(upload=upload_id).count() self.assertEqual(6, upfiles_count) # Warning: this assumes that Layer pks equal UploadLayer pks layer = Layer.objects.get(pk=layer_id) gslayer = self.catalog.get_layer(layer.name) default_style = gslayer.default_style # TODO: can we use public API or omit this? self.catalog._cache.clear() self.assertEqual('boxes.sld', default_style.filename) def test_upload_with_metadata(self): """Tests Uploading metadata """ upload = self.generic_api_upload( filenames=[ 'boxes_with_date.zip', 'samplemetadata.xml', ], configs=[ { 'upload_file_name': 'boxes_with_date.shp', 'config': [ { 'index': 0, 'metadata': 'samplemetadata.xml' } ] } ] ) self.assertEqual(5, upload['count']) upload_id = upload['id'] upload_obj = UploadedData.objects.get(pk=upload_id) # TODO: why did we get upload_obj? uplayers = UploadLayer.objects.filter(upload=upload_id) layer_id = uplayers[0].pk upfiles_count = UploadFile.objects.filter(upload=upload_id).count() self.assertEqual(5, upfiles_count) layer = Layer.objects.get(pk=layer_id) self.assertEqual(layer.language, 'eng') self.assertEqual(layer.title, 'Old_Americas_LSIB_Polygons_Detailed_2013Mar') def test_raster(self): """Exercise raster import. """ layer = self.generic_raster_import( 'test_grid.tif', configs=[ { 'index': 0 } ] ) self.assertTrue(layer.name.startswith('test_grid')) def test_box_with_year_field(self): """Tests the import of test_box_with_year_field. """ layer = self.generic_import( 'boxes_with_year_field.shp', configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date(self): """Tests the import of test_boxes_with_date. """ layer = self.generic_import( 'boxes_with_date.shp', configs=[ { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_gpkg(self): """Tests the import of test_boxes_with_date.gpkg. """ layer = self.generic_import( 'boxes_with_date.gpkg', configs=[ { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_plus_raster_gpkg_by_index(self): """Tests the import of multilayer vector + raster geopackage using index """ layer = self.generic_import( 'boxes_plus_raster.gpkg', configs=[ { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True }, {'index': 1}, {'index': 2}, {'index': 3}, {'index': 4}, {'index': 5}, {'index': 6}, {'index': 7}, ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute,) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_date_csv(self): """Tests a CSV with WKT polygon. """ layer = self.generic_import( 'boxes_with_date.csv', configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_csv_missing_features(self): """Test csv that has some rows without geoms and uses ISO-8859 (Latin 4) encoding. """ self.generic_import( 'missing-features.csv', configs=[ {'index': 0} ] ) def test_boxes_with_iso_date(self): """Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import( 'boxes_with_date_iso_date.shp', configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_duplicate_imports(self): """Import the same layer twice to ensure file names increment properly. """ path = test_file('boxes_with_date_iso_date.zip') ogr = OGRImport(path) layers1 = ogr.handle({'index': 0, 'name': 'test'}) layers2 = ogr.handle({'index': 0, 'name': 'test'}) self.assertEqual(layers1[0][0], 'test') self.assertEqual(layers2[0][0], 'test0') def test_launder(self): """Ensure the launder function works as expected. """ self.assertEqual(launder('tm_world_borders_simpl_0.3'), 'tm_world_borders_simpl_0_3') self.assertEqual(launder('Testing#'), 'testing_') self.assertEqual(launder(' '), '_') def test_boxes_with_date_iso_date_zip(self): """Tests the import of test_boxes_with_iso_date. """ layer = self.generic_import( 'boxes_with_date_iso_date.zip', configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_dates_bc(self): """Tests the import of test_boxes_with_dates_bc. """ layer = self.generic_import( 'boxes_with_dates_bc.shp', configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_point_with_date(self): """Tests the import of point_with_date.geojson """ layer = self.generic_import( 'point_with_date.geojson', configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) # OGR will name geojson layers 'ogrgeojson' we rename to the path basename self.assertTrue(layer.name.startswith('point_with_date')) date_attr = get_layer_attr(layer, 'date_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, ) self.generic_time_check(layer, attribute=date_attr.attribute) def test_boxes_with_end_date(self): """Tests the import of test_boxes_with_end_date. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import( 'boxes_with_end_date.shp', configs=[ { 'index': 0, 'convert_to_date': ['date', 'enddate'], 'start_date': 'date', 'end_date': 'enddate', 'configureTime': True } ] ) date_attr = get_layer_attr(layer, 'date_as_date') end_date_attr = get_layer_attr(layer, 'enddate_as_date') self.assertEqual(date_attr.attribute_type, 'xsd:dateTime') self.assertEqual(end_date_attr.attribute_type, 'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute ) self.generic_time_check( layer, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute ) def test_us_states_kml(self): """Tests the import of us_states_kml. This layer has a date and an end date field that are typed correctly. """ # TODO: Support time in kmls. layer = self.generic_import( 'us_states.kml', configs=[ { 'index': 0 } ] ) self.assertEqual(layer.name.lower(), "us_states") def test_mojstrovka_gpx(self): """Tests the import of mojstrovka.gpx. This layer has a date and an end date field that are typed correctly. """ layer = self.generic_import( 'mojstrovka.gpx', configs=[ { 'index': 0, 'convert_to_date': ['time'], 'configureTime': True } ] ) date_attr = get_layer_attr(layer, 'time_as_date') self.assertEqual(date_attr.attribute_type, u'xsd:dateTime') configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_attr.attribute ) self.generic_time_check(layer, attribute=date_attr.attribute) def generic_time_check(self, layer, attribute=None, end_attribute=None): """Convenience method to run generic tests on time layers. """ # TODO: can we use public API or omit this? self.catalog._cache.clear() resource = self.catalog.get_resource( layer.name, store=layer.store, workspace=self.workspace ) time_info = resource.metadata['time'] self.assertEqual('LIST', time_info.presentation) self.assertEqual(True, time_info.enabled) self.assertEqual(attribute, time_info.attribute) self.assertEqual(end_attribute, time_info.end_attribute) def test_us_shootings_csv(self): """Tests the import of US_Shootings.csv. """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') path = test_file('US_Shootings.csv') layer = self.generic_import( path, configs=[ { 'index': 0, 'convert_to_date': ['Date'] } ] ) self.assertTrue(layer.name.startswith('us_shootings')) date_field = 'date' configure_time( self.catalog.get_layer(layer.name).resource, attribute=date_field ) self.generic_time_check(layer, attribute=date_field) def test_sitins(self): """Tests the import of US_Civil_Rights_Sitins0.csv """ if osgeo.gdal.__version__ < '2.0.0': self.skipTest('GDAL Version does not support open options') layer = self.generic_import( 'US_Civil_Rights_Sitins0.csv', configs=[ { 'index': 0, 'convert_to_date': ['Date'] } ] ) self.assertEqual(layer.name.lower(), 'us_civil_rights_sitins0') def get_layer_names(self, path): """Gets layer names from a data source. """ data_source = DataSource(path) return [layer.name for layer in data_source] def test_gdal_import(self): path = test_file('point_with_date.geojson') self.generic_import( path, configs=[ { 'index': 0, 'convert_to_date': ['date'] } ] ) def test_wfs(self): """Tests the import from a WFS Endpoint """ wfs = 'WFS:http://demo.boundlessgeo.com/geoserver/wfs' ogr = OGRImport(wfs) configs = [ {'layer_name': 'og:bugsites'}, {'layer_name': 'topp:states'} ] layers = ogr.handle(configuration_options=configs) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def test_arcgisjson(self): """Tests the import from a WFS Endpoint """ endpoint = 'http://sampleserver3.arcgisonline.com/ArcGIS/rest/services/Hydrography/Watershed173811/FeatureServer/0/query?where=objectid+%3D+objectid&outfields=*&f=json' ogr = OGRImport(endpoint) configs = [{'index':0}] layers = ogr.handle(configuration_options=configs) for result in layers: layer = Layer.objects.get(name=result[0]) self.assertEqual(layer.srid, 'EPSG:4326') self.assertEqual(layer.store, self.datastore.name) self.assertEqual(layer.storeType, 'dataStore') def test_file_add_view(self): """Tests the file_add_view. """ client = AdminClient() # test login required for this view request = client.get(reverse('uploads-new')) self.assertEqual(request.status_code, 302) client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['request'].path, reverse('uploads-list')) self.assertEqual(len(response.context['object_list']), 1) upload = response.context['object_list'][0] self.assertEqual(upload.user.username, 'non_admin') self.assertEqual(upload.file_type, 'GeoJSON') self.assertTrue(upload.uploadlayer_set.all()) self.assertEqual(upload.state, 'UPLOADED') self.assertIsNotNone(upload.name) uploaded_file = upload.uploadfile_set.first() self.assertTrue(os.path.exists(uploaded_file.file.path)) with open(test_file('empty_file.geojson')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) self.assertEqual(response.status_code, 200) self.assertIn('file', response.context_data['form'].errors) def test_file_add_view_as_json(self): """Tests the file_add_view. """ client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post( reverse('uploads-new-json'), {'file': stream}, follow=True ) self.assertEqual(response.status_code, 200) self.assertIn('application/json', response.get('Content-Type', '')) content = json.loads(response.content) self.assertIn('state', content) self.assertIn('id', content) def test_describe_fields(self): """Tests the describe fields functionality. """ path = test_file('US_Shootings.csv') with GDALInspector(path) as inspector: layers = inspector.describe_fields() self.assertTrue(layers[0]['layer_name'], 'us_shootings') self.assertEqual([n['name'] for n in layers[0]['fields']], ['Date', 'Shooter', 'Killed', 'Wounded', 'Location', 'City', 'Longitude', 'Latitude']) self.assertEqual(layers[0]['feature_count'], 203) def test_gdal_file_type(self): """Tests the describe fields functionality. """ filenames = { 'US_Shootings.csv': 'CSV', 'point_with_date.geojson': 'GeoJSON', 'mojstrovka.gpx': 'GPX', 'us_states.kml': 'KML', 'boxes_with_year_field.shp': 'ESRI Shapefile', 'boxes_with_date_iso_date.zip': 'ESRI Shapefile' } from .models import NoDataSourceFound try: for filename, file_type in sorted(filenames.items()): path = test_file(filename) with GDALInspector(path) as inspector: self.assertEqual(inspector.file_type(), file_type) except NoDataSourceFound: logging.exception('No data source found in: {0}'.format(path)) raise def test_configure_view(self): """Tests the configuration view. """ path = test_file('point_with_date.geojson') new_user = User.objects.create(username='******') new_user_perms = ['change_resourcebase_permissions'] client = AdminClient() client.login_as_non_admin() with open(path) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) upload = response.context['object_list'][0] payload = [ { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True, 'permissions': { 'users': { 'test': new_user_perms, 'AnonymousUser': [ 'change_layer_data', 'download_resourcebase', 'view_resourcebase' ] } } } ] response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json' ) self.assertTrue(response.status_code, 200) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.srid, 'EPSG:4326') self.assertEqual(first_layer.store, self.datastore.name) self.assertEqual(first_layer.storeType, 'dataStore') self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertEqual( Layer.objects.all()[0].owner.username, self.non_admin_user.username ) perms = first_layer.get_all_level_info() user = User.objects.get(username=self.non_admin_user.username) # check user permissions expected_perms = [ u'publish_resourcebase', u'change_resourcebase_permissions', u'delete_resourcebase', u'change_resourcebase', u'change_resourcebase_metadata', u'download_resourcebase', u'view_resourcebase', u'change_layer_style', u'change_layer_data' ] for perm in expected_perms: self.assertIn(perm, perms['users'][user]) self.assertTrue(perms['users'][new_user]) self.assertIn( 'change_resourcebase_permissions', perms['users'][new_user] ) self.assertIn( 'change_layer_data', perms['users'][User.objects.get(username='******')] ) catalog_layer = self.catalog.get_layer(first_layer.name) self.assertIn('time', catalog_layer.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, first_layer) def test_configure_view_convert_date(self): """Tests the configure view with a dataset that needs to be converted to a date. """ client = AdminClient() client.login_as_non_admin() with open(test_file('US_Shootings.csv')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) upload = response.context['object_list'][0] payload = [ { 'index': 0, 'convert_to_date': ['Date'], 'start_date': 'Date', 'configureTime': True, 'editable': True } ] response = client.get( '/importer-api/data-layers/{0}/configure/'.format(upload.id) ) self.assertEqual(response.status_code, 405) response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id) ) self.assertEqual(response.status_code, 400) response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload), content_type='application/json' ) self.assertTrue(response.status_code, 200) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.srid, 'EPSG:4326') self.assertEqual(first_layer.store, self.datastore.name) self.assertEqual(first_layer.storeType, 'dataStore') self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime') self.assertTrue(first_layer.attributes.filter(attribute='date'), 'xsd:dateTime') catalog_layer = self.catalog.get_layer(first_layer.name) self.assertIn('time', catalog_layer.resource.metadata) # ensure a user who does not own the upload cannot configure an import from it. client.logout() client.login_as_admin() response = client.post( '/importer-api/data-layers/{0}/configure/'.format(upload.id) ) self.assertEqual(response.status_code, 404) def test_list_api(self): client = AdminClient() response = client.get('/importer-api/data/') self.assertEqual(response.status_code, 401) client.login_as_non_admin() response = client.get('/importer-api/data/') self.assertEqual(response.status_code, 200) admin = User.objects.get(username=self.admin_user.username) non_admin = User.objects.get(username=self.non_admin_user.username) path = test_file('US_Shootings.csv') with open(path, 'rb') as stream: uploaded_file = SimpleUploadedFile('test_data', stream.read()) admin_upload = UploadedData.objects.create(state='Admin test', user=admin) admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file)) non_admin_upload = UploadedData.objects.create(state='Non admin test', user=non_admin) non_admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file)) client.login_as_admin() response = client.get('/importer-api/data/') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 2) response = client.get('/importer-api/data/?user__username=admin') self.assertEqual(response.status_code, 200) body = json.loads(response.content) self.assertEqual(len(body['objects']), 1) def test_layer_list_api(self): client = AdminClient() response = client.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 401) client.login_as_non_admin() response = client.get('/importer-api/data-layers/') self.assertEqual(response.status_code, 200) def test_delete_from_non_admin_api(self): """Ensure users can delete their data. """ client = AdminClient() client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) self.assertEqual(UploadedData.objects.all().count(), 1) upload_id = UploadedData.objects.first().id response = client.delete('/importer-api/data/{0}/'.format(upload_id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def test_delete_from_admin_api(self): """Ensure that administrators can delete data that isn't theirs. """ client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) self.assertEqual(UploadedData.objects.all().count(), 1) client.logout() client.login_as_admin() upload_id = UploadedData.objects.first().id response = client.delete('/importer-api/data/{0}/'.format(upload_id)) self.assertEqual(response.status_code, 204) self.assertEqual(UploadedData.objects.all().count(), 0) def naming_an_import(self): """Tests providing a name in the configuration options. """ client = AdminClient() client.login_as_non_admin() name = 'point-with-a-date' with open(test_file('point_with_date.geojson')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) payload = { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'name': name, 'editable': True } response = client.post( '/importer-api/data-layers/1/configure/', data=json.dumps(payload), content_type='application/json' ) self.assertEqual(response.status_code, 200) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.title, name.replace('-', '_')) def test_api_import(self): """Tests the import api. """ client = AdminClient() client.login_as_non_admin() with open(test_file('point_with_date.geojson')) as stream: response = client.post( reverse('uploads-new'), {'file': stream}, follow=True ) payload = { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True, 'editable': True } self.assertIsInstance( UploadLayer.objects.first().configuration_options, dict ) response = client.get('/importer-api/data-layers/1/') self.assertEqual(response.status_code, 200) response = client.post( '/importer-api/data-layers/1/configure/', data=json.dumps([payload]), content_type='application/json' ) self.assertEqual(response.status_code, 200) self.assertIn('task', response.content) first_layer = Layer.objects.all()[0] self.assertEqual(first_layer.srid, 'EPSG:4326') self.assertEqual(first_layer.store, self.datastore.name) self.assertEqual(first_layer.storeType, 'dataStore') self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime') catalog_layer = self.catalog.get_layer(first_layer.name) self.assertIn('time', catalog_layer.resource.metadata) self.assertEqual(UploadLayer.objects.first().layer, first_layer) self.assertTrue(UploadLayer.objects.first().task_id) def test_valid_file_extensions(self): """Test the file extension validator. """ for extension in load_handler(OSGEO_IMPORTER, 'test.txt').valid_extensions: filename = 'test.{0}'.format(extension) upload = SimpleUploadedFile(filename, '') self.assertIsNone(validate_file_extension(upload)) with self.assertRaises(ValidationError): validate_file_extension(SimpleUploadedFile('test.txt', '')) def test_no_geom(self): """Test the file extension validator. """ with self.assertRaises(ValidationError): validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)')) # This should pass (geom type is unknown) validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)')) def test_numeric_overflow(self): """Regression test for numeric field overflows in shapefiles. # https://trac.osgeo.org/gdal/ticket/5241 """ self.generic_import( 'Walmart.zip', configs=[ { 'configureTime':False, 'convert_to_date': ['W1_OPENDAT'], 'editable': True, 'index':0, 'name': 'Walmart', 'start_date': 'W1_OPENDAT' } ] ) def test_multipolygon_shapefile(self): """Tests shapefile with multipart polygons. """ self.generic_import('PhoenixFirstDues.zip', configs=[{'index': 0}]) def test_istanbul(self): """Tests shapefile with multipart polygons and non-WGS84 SR. """ result = self.generic_import( 'Istanbul.zip', configs=[ {'index': 0} ] ) feature_type = self.catalog.get_resource(result.name) self.assertEqual(feature_type.projection, 'EPSG:32635') def test_gwc_handler(self): """Tests the GeoWebCache handler """ layer = self.generic_import( 'boxes_with_date.shp', configs=[ { 'index': 0, 'convert_to_date': ['date'], 'start_date': 'date', 'configureTime': True } ] ) gwc = GeoWebCacheHandler(None) gs_layer = self.catalog.get_layer(layer.name) self.assertTrue(gwc.time_enabled(gs_layer)) gs_layer.fetch() payload = self.catalog.http.request(gwc.gwc_url(gs_layer)) self.assertIn('regexParameterFilter', payload[1]) self.assertEqual(int(payload[0]['status']), 200) # Don't configure time, ensure everything still works layer = self.generic_import( 'boxes_with_date_iso_date.shp', configs=[ { 'index': 0 } ] ) gs_layer = self.catalog.get_layer(layer.name) self.catalog._cache.clear() gs_layer.fetch() payload = self.catalog.http.request(gwc.gwc_url(gs_layer)) self.assertNotIn('regexParameterFilter', payload[1]) self.assertEqual(int(payload[0]['status']), 200) def test_utf8(self): """Tests utf8 characters in attributes """ path = test_file('china_provinces.shp') layer = self.generic_import(path) ogr = OGRImport(path) datastore, _ = ogr.open_target_datastore(ogr.target_store) sql = ( "select NAME_CH from {0} where NAME_PY = 'An Zhou'" .format(layer.name) ) result = datastore.ExecuteSQL(sql) feature = result.GetFeature(0) self.assertEqual(feature.GetField('name_ch'), '安州') def test_non_converted_date(self): """Test converting a field as date. """ results = self.generic_import( 'TM_WORLD_BORDERS_2005.zip', configs=[ { 'index': 0, 'start_date': 'Year', 'configureTime': True } ] ) layer = self.catalog.get_layer(results.typename) self.assertIn('time', layer.resource.metadata) self.assertEqual('year', layer.resource.metadata['time'].attribute) def test_fid_field(self): """ Regression test for preserving an FID field when target layer supports it but source does not. """ self.generic_import( 'noaa_paleoclimate.zip', configs=[ { 'index': 0 } ] ) def test_csv_with_wkb_geometry(self): """Exercise import of CSV files with multiple geometries. """ filenames = [ 'police_csv.csv', 'police_csv_nOGC.csv', 'police_csv_noLatLon.csv', 'police_csv_WKR.csv', 'police_csv_nFID.csv', 'police_csv_nOGFID.csv', 'police_csv_noWKB.csv' ] for filename in filenames: self.generic_import( filename, { 'configureTime': True, 'convert_to_date': ['date_time'], 'editable': True, 'index': 0, 'name': filename.lower(), 'permissions': { 'users':{ 'AnonymousUser': [ 'change_layer_data', 'download_resourcebase', 'view_resourcebase' ] } }, 'start_date':'date_time', } )
def publishGeoserver(appdef, progress): viewCrs = appdef["Settings"]["App view CRS"] usesGeoServer = False for applayer in appdef["Layers"]: if applayer.method != METHOD_FILE: if applayer.layer.type() == applayer.layer.VectorLayer and applayer.layer.providerType().lower() != "wfs": usesGeoServer = True if not usesGeoServer: return progress.setText("Publishing to GeoServer") progress.setProgress(0) geoserverUrl = appdef["Deploy"]["GeoServer url"] + "/rest" geoserverPassword = appdef["Deploy"]["GeoServer password"] geoserverUsername = appdef["Deploy"]["GeoServer username"] workspaceName = appdef["Deploy"]["GeoServer workspace"] dsName = "ds_" + workspaceName host = appdef["Deploy"]["PostGIS host"] port = appdef["Deploy"]["PostGIS port"] postgisUsername = appdef["Deploy"]["PostGIS username"] postgisPassword = appdef["Deploy"]["PostGIS password"] database = appdef["Deploy"]["PostGIS database"] schema = appdef["Deploy"]["PostGIS schema"] catalog = Catalog(geoserverUrl, geoserverUsername, geoserverPassword) workspace = catalog.get_workspace(workspaceName) if workspace is None: workspace = catalog.create_workspace(workspaceName, workspaceName) try: store = catalog.get_store(dsName, workspace) resources = store.get_resources() for resource in resources: layers = catalog.get_layers(resource) for layer in layers: catalog.delete(layer) catalog.delete(resource) catalog.delete(store) except Exception: pass try: store = catalog.get_store(dsName, workspace) except FailedRequestError: store = None for i, applayer in enumerate(appdef["Layers"]): layer = applayer.layer if applayer.method != METHOD_FILE and applayer.method != METHOD_DIRECT: name = safeName(layer.name()) sld, icons = getGsCompatibleSld(layer) if sld is not None: catalog.create_style(name, sld, True) uploadIcons(icons, geoserverUsername, geoserverPassword, catalog.gs_base_url) if layer.type() == layer.VectorLayer: if applayer.method == METHOD_WFS_POSTGIS or applayer.method == METHOD_WMS_POSTGIS: if store is None: store = catalog.create_datastore(dsName, workspace) store.connection_parameters.update( host=host, port=str(port), database=database, user=postgisUsername, schema=schema, passwd=postgisPassword, dbtype="postgis") catalog.save(store) catalog.publish_featuretype(name, store, layer.crs().authid()) else: path = getDataFromLayer(layer, viewCrs) catalog.create_featurestore(name, path, workspace=workspace, overwrite=True) gslayer = catalog.get_layer(name) r = gslayer.resource r.dirty['srs'] = viewCrs catalog.save(r) elif layer.type() == layer.RasterLayer: path = getDataFromLayer(layer, viewCrs) catalog.create_coveragestore(name, path, workspace=workspace, overwrite=True) if sld is not None: publishing = catalog.get_layer(name) publishing.default_style = catalog.get_style(name) catalog.save(publishing) progress.setProgress(int((i+1)*100.0/len(appdef["Layers"])))
from geoserver.catalog import Catalog import geoserver.util cat = Catalog("http://localhost:9090/geoserver/rest/") cat.username = "******" cat.password = "******" workspace = cat.get_workspace("fenix") cat.create_coveragestore( "test_gsconfig2", "/home/vortex/Desktop/LAYERS/MOROCCO_MICHELA/to_publish/original/wheat_mask.tif", workspace) layer = cat.get_layer("test_gsconfig2") layer._set_default_style("mask") cat.save(layer) # print cat # print "here" # layer = cat.get_style("burg")
class ModifyingTests(unittest.TestCase): def setUp(self): self.cat = Catalog(GSPARAMS['GSURL'], username=GSPARAMS['GSUSER'], password=GSPARAMS['GSPASSWORD']) def testFeatureTypeSave(self): # test saving round trip rs = self.cat.get_resource("bugsites") old_abstract = rs.abstract new_abstract = "Not the original abstract" enabled = rs.enabled # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(new_abstract, rs.abstract) self.assertEqual(enabled, rs.enabled) # Change keywords on server rs.keywords = ["bugsites", "gsconfig"] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(["bugsites", "gsconfig"], rs.keywords) self.assertEqual(enabled, rs.enabled) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual( [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")], rs.metadata_links) self.assertEqual(enabled, rs.enabled) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("bugsites") self.assertEqual(old_abstract, rs.abstract) def testDataStoreCreate(self): ds = self.cat.create_datastore("vector_gsconfig") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) def testPublishFeatureType(self): # Use the other test and store creation to load vector data into a database # @todo maybe load directly to database? try: self.testDataStoreCreateAndThenAlsoImportData() except FailedRequestError: pass try: lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. self.cat.delete(lyr) self.cat.delete(lyr.resource) ds = self.cat.get_store("gsconfig_import_test") # make sure it's gone self.assert_(self.cat.get_layer('import') is None) self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326") # and now it's not self.assert_(self.cat.get_layer('import') is not None) finally: # tear stuff down to allow the other test to pass if we run first ds = self.cat.get_store("gsconfig_import_test") lyr = self.cat.get_layer('import') # Delete the existing layer and resource to allow republishing. try: if lyr: self.cat.delete(lyr) self.cat.delete(lyr.resource) if ds: self.cat.delete(ds) except: pass def testDataStoreModify(self): ds = self.cat.get_store("sf") self.assertFalse("foo" in ds.connection_parameters) ds.connection_parameters = ds.connection_parameters ds.connection_parameters["foo"] = "bar" orig_ws = ds.workspace.name self.cat.save(ds) ds = self.cat.get_store("sf") self.assertTrue("foo" in ds.connection_parameters) self.assertEqual("bar", ds.connection_parameters["foo"]) self.assertEqual(orig_ws, ds.workspace.name) @drop_table('import') def testDataStoreCreateAndThenAlsoImportData(self): ds = self.cat.create_datastore("gsconfig_import_test") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) ds = self.cat.get_store("gsconfig_import_test") self.cat.add_data_to_store(ds, "import", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) @drop_table('import2') def testVirtualTables(self): ds = self.cat.create_datastore("gsconfig_import_test2") ds.connection_parameters.update(**DBPARAMS) self.cat.save(ds) ds = self.cat.get_store("gsconfig_import_test2") self.cat.add_data_to_store(ds, "import2", { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' }) store = self.cat.get_store("gsconfig_import_test2") geom = JDBCVirtualTableGeometry('the_geom','MultiPolygon','4326') ft_name = 'my_jdbc_vt_test' epsg_code = 'EPSG:4326' sql = "select * from import2 where 'STATE_NAME' = 'Illinois'" keyColumn = None parameters = None jdbc_vt = JDBCVirtualTable(ft_name, sql, 'false', geom, keyColumn, parameters) ft = self.cat.publish_featuretype(ft_name, store, epsg_code, jdbc_virtual_table=jdbc_vt) # DISABLED; this test works only in the very particular case # "mytiff.tiff" is already present into the GEOSERVER_DATA_DIR # def testCoverageStoreCreate(self): # ds = self.cat.create_coveragestore2("coverage_gsconfig") # ds.data_url = "file:test/data/mytiff.tiff" # self.cat.save(ds) def testCoverageStoreModify(self): cs = self.cat.get_store("sfdem") self.assertEqual("GeoTIFF", cs.type) cs.type = "WorldImage" self.cat.save(cs) cs = self.cat.get_store("sfdem") self.assertEqual("WorldImage", cs.type) # not sure about order of test runs here, but it might cause problems # for other tests if this layer is misconfigured cs.type = "GeoTIFF" self.cat.save(cs) def testCoverageSave(self): # test saving round trip rs = self.cat.get_resource("Arc_Sample") old_abstract = rs.abstract new_abstract = "Not the original abstract" # # Change abstract on server rs.abstract = new_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(new_abstract, rs.abstract) # Restore abstract rs.abstract = old_abstract self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual(old_abstract, rs.abstract) # Change metadata links on server rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")] enabled = rs.enabled self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEqual( [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")], rs.metadata_links) self.assertEqual(enabled, rs.enabled) srs_before = set(['EPSG:4326']) srs_after = set(['EPSG:4326', 'EPSG:3785']) formats = set(['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF']) formats_after = set(["PNG", "GIF", "TIFF"]) # set and save request_srs_list self.assertEquals(set(rs.request_srs_list), srs_before, str(rs.request_srs_list)) rs.request_srs_list = rs.request_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.request_srs_list), srs_after, str(rs.request_srs_list)) # set and save response_srs_list self.assertEquals(set(rs.response_srs_list), srs_before, str(rs.response_srs_list)) rs.response_srs_list = rs.response_srs_list + ['EPSG:3785'] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.response_srs_list), srs_after, str(rs.response_srs_list)) # set and save supported_formats self.assertEquals(set(rs.supported_formats), formats, str(rs.supported_formats)) rs.supported_formats = ["PNG", "GIF", "TIFF"] self.cat.save(rs) rs = self.cat.get_resource("Arc_Sample") self.assertEquals(set(rs.supported_formats), formats_after, str(rs.supported_formats)) def testWmsStoreCreate(self): ws = self.cat.create_wmsstore("wmsstore_gsconfig") ws.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities" ws.type = "WMS" self.cat.save(ws) def testWmsLayer(self): self.cat.create_workspace("wmstest", "http://example.com/wmstest") wmstest = self.cat.get_workspace("wmstest") wmsstore = self.cat.create_wmsstore("wmsstore", wmstest) wmsstore.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities" wmsstore.type = "WMS" self.cat.save(wmsstore) wmsstore = self.cat.get_store("wmsstore") self.assertEqual(1, len(self.cat.get_stores(wmstest))) available_layers = wmsstore.get_resources(available=True) for layer in available_layers: # sanitize the layer name - validation will fail on newer geoservers name = layer.replace(':', '_') new_layer = self.cat.create_wmslayer(wmstest, wmsstore, name, nativeName=layer) added_layers = wmsstore.get_resources() self.assertEqual(len(available_layers), len(added_layers)) changed_layer = added_layers[0] self.assertEqual(True, changed_layer.advertised) self.assertEqual(True, changed_layer.enabled) changed_layer.advertised = False changed_layer.enabled = False self.cat.save(changed_layer) self.cat._cache.clear() changed_layer = wmsstore.get_resources()[0] changed_layer.fetch() self.assertEqual(False, changed_layer.advertised) self.assertEqual(False, changed_layer.enabled) # Testing projection and projection policy changes changed_layer.projection = "EPSG:900913" changed_layer.projection_policy = "REPROJECT_TO_DECLARED" self.cat.save(changed_layer) self.cat._cache.clear() layer = self.cat.get_layer(changed_layer.name) self.assertEqual(layer.resource.projection_policy, changed_layer.projection_policy) self.assertEqual(layer.resource.projection, changed_layer.projection) def testFeatureTypeCreate(self): shapefile_plus_sidecars = shapefile_and_friends("test/data/states") expected = { 'shp': 'test/data/states.shp', 'shx': 'test/data/states.shx', 'dbf': 'test/data/states.dbf', 'prj': 'test/data/states.prj' } self.assertEqual(len(expected), len(shapefile_plus_sidecars)) for k, v in expected.iteritems(): self.assertEqual(v, shapefile_plus_sidecars[k]) sf = self.cat.get_workspace("sf") self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) self.assert_(self.cat.get_resource("states_test", workspace=sf) is not None) self.assertRaises( ConflictingDataError, lambda: self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf) ) self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster_test", shapefile_plus_sidecars, sf) ) bogus_shp = { 'shp': 'test/data/Pk50095.tif', 'shx': 'test/data/Pk50095.tif', 'dbf': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } self.assertRaises( UploadError, lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf) ) lyr = self.cat.get_layer("states_test") self.cat.delete(lyr) self.assert_(self.cat.get_layer("states_test") is None) def testCoverageCreate(self): tiffdata = { 'tiff': 'test/data/Pk50095.tif', 'tfw': 'test/data/Pk50095.tfw', 'prj': 'test/data/Pk50095.prj' } sf = self.cat.get_workspace("sf") ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf) self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None) self.assertRaises( ConflictingDataError, lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf) ) self.assertRaises( UploadError, lambda: self.cat.create_featurestore("Pk50095_vector", tiffdata, sf) ) bogus_tiff = { 'tiff': 'test/data/states.shp', 'tfw': 'test/data/states.shx', 'prj': 'test/data/states.prj' } self.assertRaises( UploadError, lambda: self.cat.create_coveragestore("states_raster", bogus_tiff) ) ft_ext = self.cat.create_coveragestore_external_geotiff("Pk50095_ext", 'file:test/data/Pk50095.tif', sf) def testLayerSave(self): # test saving round trip lyr = self.cat.get_layer("states") old_attribution = lyr.attribution new_attribution = { 'title': 'Not the original attribution', 'width': '123', 'height': '321', 'href': 'http://www.georchestra.org', 'url': 'https://www.cigalsace.org/portail/cigal/documents/page/mentions-legales/Logo_geOrchestra.jpg', 'type': 'image/jpeg' } # change attribution on server lyr.attribution = new_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(new_attribution, lyr.attribution) # Restore attribution lyr.attribution = old_attribution self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(old_attribution, lyr.attribution) self.assertEqual(lyr.default_style.name, "population") old_default_style = lyr.default_style lyr.default_style = (s for s in lyr.styles if s.name == "pophatch").next() lyr.styles = [old_default_style] self.cat.save(lyr) lyr = self.cat.get_layer("states") self.assertEqual(lyr.default_style.name, "pophatch") self.assertEqual([s.name for s in lyr.styles], ["population"]) def testStyles(self): # check count before tests (upload) count = len(self.cat.get_styles()) # upload new style, verify existence self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Fred", fred.sld_title) # replace style, verify changes self.cat.create_style("fred", open("test/ted.sld").read(), overwrite=True) fred = self.cat.get_style("fred") self.assert_(fred is not None) self.assertEqual("Ted", fred.sld_title) # delete style, verify non-existence self.cat.delete(fred, purge=True) self.assert_(self.cat.get_style("fred") is None) # attempt creating new style self.cat.create_style("fred", open("test/fred.sld").read()) fred = self.cat.get_style("fred") self.assertEqual("Fred", fred.sld_title) # verify it can be found via URL and check the name f = self.cat.get_style_by_url(fred.href) self.assert_(f is not None) self.assertEqual(f.name, fred.name) # compare count after upload self.assertEqual(count +1, len(self.cat.get_styles())) # attempt creating a new style without "title" self.cat.create_style("notitle", open("test/notitle.sld").read()) notitle = self.cat.get_style("notitle") self.assertEqual(None, notitle.sld_title) def testWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") jed = self.cat.get_style("jed", workspace="blarny") self.assert_(jed is None) jed = self.cat.get_style("jed", workspace="topp") self.assert_(jed is not None) self.assertEqual("Fred", jed.sld_title) jed = self.cat.get_style("topp:jed") self.assert_(jed is not None) self.assertEqual("Fred", jed.sld_title) # replace style, verify changes self.cat.create_style("jed", open("test/ted.sld").read(), overwrite=True, workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assert_(jed is not None) self.assertEqual("Ted", jed.sld_title) # delete style, verify non-existence self.cat.delete(jed, purge=True) self.assert_(self.cat.get_style("jed", workspace="topp") is None) # attempt creating new style self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp") jed = self.cat.get_style("jed", workspace="topp") self.assertEqual("Fred", jed.sld_title) # verify it can be found via URL and check the full name f = self.cat.get_style_by_url(jed.href) self.assert_(f is not None) self.assertEqual(f.fqn, jed.fqn) def testLayerWorkspaceStyles(self): # upload new style, verify existence self.cat.create_style("ned", open("test/fred.sld").read(), overwrite=True, workspace="topp") self.cat.create_style("zed", open("test/ted.sld").read(), overwrite=True, workspace="topp") ned = self.cat.get_style("ned", workspace="topp") zed = self.cat.get_style("zed", workspace="topp") self.assert_(ned is not None) self.assert_(zed is not None) lyr = self.cat.get_layer("states") lyr.default_style = ned lyr.styles = [zed] self.cat.save(lyr) self.assertEqual("topp:ned", lyr.default_style) self.assertEqual([zed], lyr.styles) lyr.refresh() self.assertEqual("topp:ned", lyr.default_style.fqn) self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles]) def testWorkspaceCreate(self): ws = self.cat.get_workspace("acme") self.assertEqual(None, ws) self.cat.create_workspace("acme", "http://example.com/acme") ws = self.cat.get_workspace("acme") self.assertEqual("acme", ws.name) def testWorkspaceDelete(self): self.cat.create_workspace("foo", "http://example.com/foo") ws = self.cat.get_workspace("foo") self.cat.delete(ws) ws = self.cat.get_workspace("foo") self.assert_(ws is None) def testWorkspaceDefault(self): # save orig orig = self.cat.get_default_workspace() neu = self.cat.create_workspace("neu", "http://example.com/neu") try: # make sure setting it works self.cat.set_default_workspace("neu") ws = self.cat.get_default_workspace() self.assertEqual('neu', ws.name) finally: # cleanup and reset to the way things were self.cat.delete(neu) self.cat.set_default_workspace(orig.name) ws = self.cat.get_default_workspace() self.assertEqual(orig.name, ws.name) def testFeatureTypeDelete(self): pass def testCoverageDelete(self): pass def testDataStoreDelete(self): states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) states.enabled = False self.assert_(states.enabled == False) self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == False) states.enabled = True self.cat.save(states) states = self.cat.get_store('states_shapefile') self.assert_(states.enabled == True) def testLayerGroupSave(self): tas = self.cat.get_layergroup("tasmania") self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers) self.assertEqual(tas.styles, [None, None, None, None], tas.styles) tas.layers = tas.layers[:-1] tas.styles = tas.styles[:-1] self.cat.save(tas) # this verifies the local state self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) # force a refresh to check the remote state tas.refresh() self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers) self.assertEqual(tas.styles, [None, None, None], tas.styles) def testImageMosaic(self): """ Test case for Issue #110 """ # testing the mosaic creation name = 'cea_mosaic' data = open('test/data/mosaic/cea.zip', 'rb') self.cat.create_imagemosaic(name, data) # get the layer resource back self.cat._cache.clear() resource = self.cat.get_layer(name).resource self.assert_(resource is not None) # delete granule from mosaic coverage = name store = self.cat.get_store(name) granule_id = name + '.1' self.cat.mosaic_delete_granule(coverage, store, granule_id) def testTimeDimension(self): sf = self.cat.get_workspace("sf") files = shapefile_and_friends(os.path.join(gisdata.GOOD_DATA, "time", "boxes_with_end_date")) self.cat.create_featurestore("boxes_with_end_date", files, sf) get_resource = lambda: self.cat._cache.clear() or self.cat.get_layer('boxes_with_end_date').resource # configure time as LIST resource = get_resource() timeInfo = DimensionInfo("time", "true", "LIST", None, "ISO8601", None, attribute="date") resource.metadata = {'time':timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual("LIST", timeInfo.presentation) self.assertEqual(True, timeInfo.enabled) self.assertEqual("date", timeInfo.attribute) self.assertEqual("ISO8601", timeInfo.units) # disable time dimension timeInfo = resource.metadata['time'] timeInfo.enabled = False # since this is an xml property, it won't get written unless we modify it resource.metadata = {'time' : timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual(False, timeInfo.enabled) # configure with interval, end_attribute and enable again timeInfo.enabled = True timeInfo.presentation = 'DISCRETE_INTERVAL' timeInfo.resolution = '3 days' timeInfo.end_attribute = 'enddate' resource.metadata = {'time' : timeInfo} self.cat.save(resource) # and verify resource = get_resource() timeInfo = resource.metadata['time'] self.assertEqual(True, timeInfo.enabled) self.assertEqual('DISCRETE_INTERVAL', timeInfo.presentation) self.assertEqual('3 days', timeInfo.resolution_str()) self.assertEqual('enddate', timeInfo.end_attribute)
else: return (layer, demo.get_layer(layer).default_style.name) g = demo.get_layergroup("groupname") resolved = [resolve(l, s) for (l, s) in zip(g.layers, g.styles)] # upload all styles to live for (l, s) in resolved: wayne_style = prefix + s style_on_server = live.get_style(wayne_style) sld = demo.get_style(s).sld_body if style_on_server is None: live.create_style(wayne_style, sld) else: style_on_server.update_body(sld) backup_layernames = {} # check that all requisite layers exist! for (l, s) in resolved: assert live.get_layer(l) is not None or l in backup_layernames, l lyrs = [backup_layernames.get(x[0], x[0]) for x in resolved] stls = [(prefix + x[1]) for x in resolved] wayne_group = live.get_layergroup(groupname) if wayne_group is None: wayne_group = live.create_layergroup(groupname) wayne_group.layers = lyrs wayne_group.styles = stls live.save(wayne_group)
def geoserver_pre_save(instance, sender, **kwargs): """Send information to geoserver. The attributes sent include: * Title * Abstract * Name * Keywords * Metadata Links, * Point of Contact name and url """ url = ogc_server_settings.rest try: gs_catalog = Catalog(url, _user, _password) gs_resource = gs_catalog.get_resource(instance.name) except (EnvironmentError, FailedRequestError) as e: gs_resource = None msg = ('Could not connect to geoserver at "%s"' 'to save information for layer "%s"' % ( ogc_server_settings.LOCATION, instance.name.encode('utf-8')) ) logger.warn(msg, e) # If geoserver is not online, there is no need to continue return # If there is no resource returned it could mean one of two things: # a) There is a synchronization problem in geoserver # b) The unit tests are running and another geoserver is running in the # background. # For both cases it is sensible to stop processing the layer if gs_resource is None: logger.warn('Could not get geoserver resource for %s' % instance) return gs_resource.title = instance.title gs_resource.abstract = instance.abstract gs_resource.name= instance.name # Get metadata links metadata_links = [] for link in instance.link_set.metadata(): metadata_links.append((link.name, link.mime, link.url)) gs_resource.metadata_links = metadata_links #gs_resource should only be called if ogc_server_settings.BACKEND_WRITE_ENABLED == True if getattr(ogc_server_settings,"BACKEND_WRITE_ENABLED", True): gs_catalog.save(gs_resource) gs_layer = gs_catalog.get_layer(instance.name) if instance.poc and instance.poc.user: gs_layer.attribution = str(instance.poc.user) profile = Profile.objects.get(user=instance.poc.user) gs_layer.attribution_link = settings.SITEURL[:-1] + profile.get_absolute_url() #gs_layer should only be called if ogc_server_settings.BACKEND_WRITE_ENABLED == True if getattr(ogc_server_settings,"BACKEND_WRITE_ENABLED", True): gs_catalog.save(gs_layer) """Get information from geoserver. The attributes retrieved include: * Bounding Box * SRID * Download links (WMS, WCS or WFS and KML) * Styles (SLD) """ gs_resource = gs_catalog.get_resource(instance.name) bbox = gs_resource.latlon_bbox #FIXME(Ariel): Correct srid setting below #self.srid = gs_resource.src # Set bounding box values instance.bbox_x0 = bbox[0] instance.bbox_x1 = bbox[1] instance.bbox_y0 = bbox[2] instance.bbox_y1 = bbox[3] instance.update_thumbnail(save=False)
def publishGeoserver(appdef, progress): viewCrs = appdef["Settings"]["App view CRS"] usesGeoServer = False for applayer in appdef["Layers"]: if applayer.method != METHOD_FILE: if applayer.layer.type( ) == applayer.layer.VectorLayer and applayer.layer.providerType( ).lower() != "wfs": usesGeoServer = True if not usesGeoServer: return progress.setText("Publishing to GeoServer") progress.setProgress(0) geoserverUrl = appdef["Deploy"]["GeoServer url"] + "/rest" geoserverPassword = appdef["Deploy"]["GeoServer password"] geoserverUsername = appdef["Deploy"]["GeoServer username"] workspaceName = appdef["Deploy"]["GeoServer workspace"] dsName = "ds_" + workspaceName host = appdef["Deploy"]["PostGIS host"] port = appdef["Deploy"]["PostGIS port"] postgisUsername = appdef["Deploy"]["PostGIS username"] postgisPassword = appdef["Deploy"]["PostGIS password"] database = appdef["Deploy"]["PostGIS database"] schema = appdef["Deploy"]["PostGIS schema"] catalog = Catalog(geoserverUrl, geoserverUsername, geoserverPassword) workspace = catalog.get_workspace(workspaceName) if workspace is None: workspace = catalog.create_workspace(workspaceName, workspaceName) try: store = catalog.get_store(dsName, workspace) resources = store.get_resources() for resource in resources: layers = catalog.get_layers(resource) for layer in layers: catalog.delete(layer) catalog.delete(resource) catalog.delete(store) except Exception: pass try: store = catalog.get_store(dsName, workspace) except FailedRequestError: store = None for i, applayer in enumerate(appdef["Layers"]): layer = applayer.layer if applayer.method != METHOD_FILE and applayer.method != METHOD_DIRECT: name = safeName(layer.name()) sld, icons = getGsCompatibleSld(layer) if sld is not None: catalog.create_style(name, sld, True) uploadIcons(icons, geoserverUsername, geoserverPassword, catalog.gs_base_url) if layer.type() == layer.VectorLayer: if applayer.method == METHOD_WFS_POSTGIS or applayer.method == METHOD_WMS_POSTGIS: if store is None: store = catalog.create_datastore(dsName, workspace) store.connection_parameters.update( host=host, port=str(port), database=database, user=postgisUsername, schema=schema, passwd=postgisPassword, dbtype="postgis") catalog.save(store) catalog.publish_featuretype(name, store, layer.crs().authid()) else: path = getDataFromLayer(layer, viewCrs) catalog.create_featurestore(name, path, workspace=workspace, overwrite=True) gslayer = catalog.get_layer(name) r = gslayer.resource r.dirty['srs'] = viewCrs catalog.save(r) elif layer.type() == layer.RasterLayer: path = getDataFromLayer(layer, viewCrs) catalog.create_coveragestore(name, path, workspace=workspace, overwrite=True) if sld is not None: publishing = catalog.get_layer(name) publishing.default_style = catalog.get_style(name) catalog.save(publishing) progress.setProgress(int((i + 1) * 100.0 / len(appdef["Layers"])))