def getAttributes(shapefile, WFS_URL): """ Given a valid shapefile(WFS Featuretype as returned by getShapefiles), this function will make a request for one feature from the featureType and parse out the attributes that come from a namespace not associated with the normal GML schema. There may be a better way to determine which are shapefile dbf attributes, but this should work pretty well. """ wfs = WebFeatureService(WFS_URL, version='1.1.0') feature = wfs.getfeature(typename=shapefile, maxfeatures=1, propertyname=None) gml = etree.parse(feature) gml_root=gml.getroot() name_spaces = gml_root.nsmap attributes = [] for namespace in name_spaces.values(): if namespace not in ['http://www.opengis.net/wfs', 'http://www.w3.org/2001/XMLSchema-instance', 'http://www.w3.org/1999/xlink', 'http://www.opengis.net/gml', 'http://www.opengis.net/ogc', 'http://www.opengis.net/ows']: custom_namespace = namespace for element in gml.iter('{'+custom_namespace+'}*'): if etree.QName(element).localname not in ['the_geom', 'Shape', shapefile.split(':')[1]]: attributes.append(etree.QName(element).localname) return attributes
def getWFSData(): from owslib.wfs import WebFeatureService import string params = getWFSParams() # Get parameters params = checkParams(params) # Check parameters wfs = WebFeatureService(params['baseURL'].value, version=params['version'].value) response = wfs.getfeature(typename=str(params['typeName'].value), featureid=[params['featureID'].value]) # Contact server if string.find(params['baseURL'].value, 'bodc', 0): response = processBODCResponse(response.read(), params) # Get data from response else: pass current_app.logger.debug('Jsonifying response...') # DEBUG # Convert to json try: jsonData = jsonify(output = response) except TypeError as e: g.error = "Request aborted, exception encountered: %s" % e error_handler.setError('2-06', None, g.user.id, "views/wfs.py:getWFSData - Type error, returning 500 to user. Exception %s" % e, request) abort(500) # If we fail to jsonify the data return 500 current_app.logger.debug('Request complete, Sending results') # DEBUG return jsonData # return json
def get_forecast(place=None, latlon=None, timestep=60, start_time=None, end_time=None): url = 'https://opendata.fmi.fi/wfs' wfs = WebFeatureService(url=url, version='2.0.0') params = {'timestep': timestep} if latlon: params['latlon'] = '%s,%s' % (latlon[0], latlon[1]) elif place: params['place'] = place if start_time: params['starttime'] = start_time.isoformat().split('.')[0] + 'Z' query_id = 'fmi::forecast::harmonie::surface::point::multipointcoverage' resp = wfs.getfeature(storedQueryID=query_id, storedQueryParams=params) root = etree.fromstring(bytes(resp.read(), encoding='utf8')) print(str(etree.tostring(root), encoding='utf8')) result_time = root.find('.//{*}resultTime//{*}timePosition').text result_time = dateutil.parser.parse(result_time).astimezone(LOCAL_TZ) positions = root.find('.//{*}positions').text observations = root.find( './/{*}DataBlock/{*}doubleOrNilReasonTupleList').text fields = root.findall('.//{*}DataRecord/{*}field') field_names = [x.attrib['name'] for x in fields] positions = [ re.findall(r'\S+', x.strip()) for x in positions.splitlines() if x.strip() ] observations = [ re.findall(r'\S+', x.strip()) for x in observations.splitlines() if x.strip() ] data = [] last_precipitation = None for pos, obs in zip(positions, observations): d = { field_name: float(sample) for field_name, sample in zip(field_names, obs) } ts = datetime.fromtimestamp(int(pos[2])) ts.replace(tzinfo=pytz.UTC) d['time'] = LOCAL_TZ.localize(ts) if 'PrecipitationAmount' in d: if last_precipitation: val = d['PrecipitationAmount'] d['PrecipitationAmount'] -= last_precipitation last_precipitation = val else: last_precipitation = d['PrecipitationAmount'] data.append(d) return dict(observations=data, meta=dict(result_time=result_time))
def test_srsname_wfs_110(): wfs = WebFeatureService( 'https://www.sciencebase.gov/catalogMaps/mapping/ows/53398e51e4b0db25ad10d288', version='1.1.0') # ServiceException: Unable to support srsName: EPSG:99999999 with pytest.raises(ServiceException): feature = wfs.getfeature( typename=['sb:Project_Area'], maxfeatures=1, propertyname=None, outputFormat='application/json', srsname="EPSG:99999999") wfs = WebFeatureService( 'https://www.sciencebase.gov/catalogMaps/mapping/ows/53398e51e4b0db25ad10d288', version='1.0.0') feature = wfs.getfeature( typename=['sb:Project_Area'], maxfeatures=1, propertyname=None, outputFormat='application/json', srsname="urn:x-ogc:def:crs:EPSG:4326") assert len(json.loads(feature.read())['features']) == 1
def test_outputformat_wfs_100(): wfs = WebFeatureService( 'https://www.sciencebase.gov/catalogMaps/mapping/ows/53398e51e4b0db25ad10d288', version='1.0.0') feature = wfs.getfeature(typename=['sb:Project_Area'], maxfeatures=1, propertyname=None, outputFormat='application/json') assert len(json.loads(feature.read())['features']) == 1
def getValues(shapefile, attribute, getTuples, limitFeatures, wfs_url): """ Similar to get attributes, given a shapefile and a valid attribute this function will make a call to the Web Feature Services returning a list of values associated with the shapefile and attribute. If getTuples = True, will also return the tuples of [feature:id] along with values [feature] """ wfs = WebFeatureService(wfs_url, version='1.1.0') feature = wfs.getfeature(typename=shapefile, maxfeatures=limitFeatures, propertyname=[attribute]) content = BytesIO(feature.read().encode()) gml = etree.parse(content) values = [] for el in gml.iter(): if attribute in el.tag: if el.text not in values: values.append(el.text) if getTuples == 'true' or getTuples == 'only': tuples = [] att = False # If features are encoded as a list of featureMember elements. gmlid_found = False for featureMember in gml.iter('{' + GML_NAMESPACE + '}featureMember'): for el in featureMember.iter(): if el.get('{' + GML_NAMESPACE + '}id'): gmlid = el.get('{' + GML_NAMESPACE + '}id') att = True gmlid_found = True if attribute in el.tag and att is True: value = el.text tuples.append((value, gmlid)) att = False if not gmlid_found: raise Exception('No gml:id found in source feature service. This form of GML is not supported.') # If features are encoded as a featureMembers element. for featureMember in gml.iter('{' + GML_NAMESPACE + '}featureMembers'): for el in featureMember.iter(): gmlid = el.get('{' + GML_NAMESPACE + '}id') for feat in el.getchildren(): if attribute in feat.tag: value = feat.text tuples.append((value, gmlid)) if getTuples == 'true': return sorted(values), sorted(tuples) elif getTuples == 'only': return sorted(tuples) else: return sorted(values)
def load_layer_data(request, template='layers/layer_detail.html'): context_dict = {} data_dict = json.loads(request.POST.get('json_data')) layername = data_dict['layer_name'] filtered_attributes = [ x for x in data_dict['filtered_attributes'].split(',') if '/load_layer_data' not in x ] workspace, name = layername.split(':') location = "{location}{service}".format( **{ 'location': settings.OGC_SERVER['default']['LOCATION'], 'service': 'wms', }) try: # TODO: should be improved by using OAuth2 token (or at least user related to it) instead of super-powers username = settings.OGC_SERVER['default']['USER'] password = settings.OGC_SERVER['default']['PASSWORD'] wfs = WebFeatureService(location, version='1.1.0', username=username, password=password) response = wfs.getfeature(typename=name, propertyname=filtered_attributes, outputFormat='application/json') x = response.read() x = json.loads(x) features_response = json.dumps(x) decoded = json.loads(features_response) decoded_features = decoded['features'] properties = {} for key in decoded_features[0]['properties']: properties[key] = [] # loop the dictionary based on the values on the list and add the properties # in the dictionary (if doesn't exist) together with the value for i in range(len(decoded_features)): for key, value in decoded_features[i]['properties'].iteritems(): if value != '' and isinstance( value, (string_types, int, float)) and ('/load_layer_data' not in value): properties[key].append(value) for key in properties: properties[key] = list(set(properties[key])) properties[key].sort() context_dict["feature_properties"] = properties except: import traceback traceback.print_exc() print "Possible error with OWSLib." return HttpResponse(json.dumps(context_dict), content_type="application/json")
def webgisfilter(mapserv, layer, maxfeatures=None, startindex=None, bbox=None, filters=None): """webgis wfs client Each filter format should look like: { 'attribute': ATTRIBUTE_NAME, # e.g. 'NAME' 'operator': OPERATOR, # e.g. '=' 'value': VALUE # e.g. 'Prague' } Operators: = != ~ IN :param str mapserv: url to mapserver :param str layer: layer name :param int maxfeatures: number of returned features :param int startindex: starting feature index :param Tupple.<dict> filters: tupple of filters :return: json-encoded result :rtype: dict """ mywfs = WebFeatureService(url=mapserv, version='1.0.0') fes = None if filters: if bbox: filters.append({'operator': 'BBOX', 'value': bbox}) fes = get_filter_root(get_filter_fes(filters)) fes = etree.tostring(fes, encoding='unicode') if bbox and not filters: fes = None elif not bbox and filters: bbox = None elif bbox and filters: bbox = None layer_data = mywfs.getfeature(typename=[layer], filter=fes, bbox=bbox, featureid=None, outputFormat="GeoJSON", maxfeatures=maxfeatures, startindex=startindex) data = json.load(layer_data) for feature in data['features']: feature.pop('geometry', None) return data
def do_query_wfs(cls, request, typename, propertyname, bbox, return_template, filter): """ Query a WFS service """ try: settings = request.registry.settings url = settings['spch_wfs_url'] version = settings['version'] srsname = settings['srsname'] localites_typename = settings['localites_typename'] cadastre_typename = settings['cadastre_typename'] communes_typename = settings['communes_typename'] wfs = WebFeatureService(url=url, version=version) response = wfs.getfeature(typename=typename, propertyname=propertyname, srsname=srsname, bbox=bbox, filter=filter) #gml = ElementTree.fromstring(response.read()) xpars = xmltodict.parse(response.read()) #wfs_json = json.dumps(xpars) formattedFeatures = [] if "wfs:FeatureCollection" in xpars and "gml:featureMember" in xpars[ "wfs:FeatureCollection"]: features = xpars["wfs:FeatureCollection"]["gml:featureMember"] for feature in features: currentTypename = None if "ms:" + localites_typename in feature: currentTypename = localites_typename elif "ms:" + cadastre_typename in feature: currentTypename = cadastre_typename elif "ms:" + communes_typename in feature: currentTypename = communes_typename if "ms:" + currentTypename in feature: atts = feature["ms:" + currentTypename] one_return_obj = cls.substitute(atts, return_template) formattedFeatures.append(json.loads(one_return_obj)) return formattedFeatures except Exception as error: raise Exception(str(error))
def fmi_request(query_id, start_time, end_time, weather_params, time_row_label): debug_input_filename = "pyfiles/FMIObservationSample.xml" ns = { 'wfs': 'http://www.opengis.net/wfs/2.0', 'BsWfs': 'http://xml.fmi.fi/schema/wfs/2.0', 'gml': 'http://www.opengis.net/gml/3.2' } xml_root = "" if debug_input: xml_root = ET.parse(debug_input_filename).getroot() else: fmi_wfs = WebFeatureService(url='http://data.fmi.fi/fmi-apikey/' + get_config('FMI_API_KEY') + '/wfs', version='2.0.0') keys = ",".join(weather_params.keys()) query_params = { 'place': 'helsinki', 'starttime': start_time, 'endtime': end_time, 'timestep': 60, # minutes 'parameters': keys } try: feature_read = fmi_wfs.getfeature( storedQueryID=query_id, storedQueryParams=query_params).read() xml_root = ET.fromstring(feature_read) if save_alert_sample: with open(debug_input_filename, "w") as data_file: data_file.write(bytes(feature_read)) except Exception as e: print "fmi_forecast_request exception: ", e time_retrieved = xml_root.get('timeStamp') response_table = [] first_name = None nan_counter = 0 row_build = {'time_retrieved': time_retrieved} for member in xml_root.iterfind('wfs:member', ns): for item in list(member): name_elem = item.find('BsWfs:ParameterName', ns).text if not first_name: first_name = name_elem elif name_elem == first_name: response_table.append(row_build) row_build = {'time_retrieved': time_retrieved} row_build[time_row_label] = item.find('BsWfs:Time', ns).text value_elem = item.find('BsWfs:ParameterValue', ns).text if value_elem == 'NaN': value_elem = 0 nan_counter += 1 if name_elem in weather_params: row_build[weather_params[name_elem]] = value_elem if len(row_build) > 2: response_table.append(row_build) if nan_counter > 24: response_table = None # Sometimes all values return as 'NaN' all the way from FMI. Useless result - rather reload later return response_table
def wfs_request_matching_file_pattern( imos_layer_name, filename_wfs_filter, url_column='url', geoserver_url='http://geoserver-123.aodn.org.au/geoserver/wfs', s3_bucket_url=False): """ returns a list of url matching a file pattern defined by filename_wfs_filter * if s3_bucket_url is False, returns the url as stored in WFS layer * if s3_bucket_url is True, append to its start the s3 IMOS bucket link used to download the file Examples: wfs_request_matching_file_pattern('srs_oc_ljco_wws_hourly_wqm_fv01_timeseries_map', '%') wfs_request_matching_file_pattern('srs_oc_ljco_wws_hourly_wqm_fv01_timeseries_map', '%', s3_bucket_url=True) wfs_request_matching_file_pattern('srs_oc_ljco_wws_hourly_wqm_fv01_timeseries_map', '%2014/06/%') wfs_request_matching_file_pattern('anmn_nrs_rt_meteo_timeseries_map', '%IMOS_ANMN-NRS_MT_%', url_column='file_url', s3_bucket_url=True) WARNING: Please exec $DATA_SERVICES_DIR/lib/test/python/manual_test_wfs_query.py to run unittests before modifying function """ from owslib.etree import etree from owslib.fes import PropertyIsLike from owslib.wfs import WebFeatureService import os import xml.etree.ElementTree as ET imos_layer_name = 'imos:%s' % imos_layer_name data_aodn_http_prefix = 'http://data.aodn.org.au' wfs11 = WebFeatureService(url=geoserver_url, version='1.1.0') wfs_filter = PropertyIsLike(propertyname=url_column, literal=filename_wfs_filter, wildCard='%') filterxml = etree.tostring(wfs_filter.toXML()).decode("utf-8") response = wfs11.getfeature(typename=imos_layer_name, filter=filterxml, propertyname=[url_column]) # parse XML to get list of URLS xml_wfs_output = response.read() root = ET.fromstring(xml_wfs_output) list_url = [] # parse xml if len(root) > 0: for item in root[0]: for subitem in item: file_url = subitem.text if s3_bucket_url: list_url.append( os.path.join(data_aodn_http_prefix, file_url)) else: list_url.append(file_url) return list_url
def runTest(self): minX = -76.766960 minY = 39.283611 maxX = -76.684120 maxY = 39.338394 filter = "<Filter><BBOX><PropertyName>Geometry</PropertyName> <Box srsName='EPSG:4326'><coordinates>%f,%f %f,%f</coordinates> </Box></BBOX></Filter>" % (minX, minY, maxX, maxY) wfs = WebFeatureService('http://SDMDataAccess.nrcs.usda.gov/Spatial/SDMWGS84Geographic.wfs', version='1.0.0') response = wfs.getfeature(typename=('MapunitPolyExtended',), filter=filter, propertyname=None) self.assertTrue(response.read().find('<wfs:FeatureCollection') > 0, 'Unable to find feature dataset in WFS response')
def getValues(shapefile, attribute, getTuples, limitFeatures, WFS_URL): """ Similar to get attributes, given a shapefile and a valid attribute this function will make a call to the Web Feature Services returning a list of values associated with the shapefile and attribute. If getTuples = True, will also return the tuples of [feature:id] along with values [feature] """ wfs = WebFeatureService(WFS_URL, version="1.1.0") feature = wfs.getfeature(typename=shapefile, maxfeatures=limitFeatures, propertyname=[attribute]) gml = etree.parse(feature) values = [] for el in gml.iter(): if attribute in el.tag: if el.text not in values: values.append(el.text) if getTuples == "true" or getTuples == "only": tuples = [] # If features are encoded as a list of featureMember elements. gmlid_found = False for featureMember in gml.iter("{" + GML_NAMESPACE + "}featureMember"): for el in featureMember.iter(): if el.get("{" + GML_NAMESPACE + "}id"): gmlid = el.get("{" + GML_NAMESPACE + "}id") att = True gmlid_found = True if attribute in el.tag and att == True: value = el.text tuples.append((value, gmlid)) att = False if gmlid_found == False: raise Exception("No gml:id found in source feature service. This form of GML is not supported.") # If features are encoded as a featureMembers element. for featureMember in gml.iter("{" + GML_NAMESPACE + "}featureMembers"): for el in featureMember.iter(): gmlid = el.get("{" + GML_NAMESPACE + "}id") for feat in el.getchildren(): if attribute in feat.tag: value = feat.text tuples.append((value, gmlid)) if getTuples == "true": return sorted(values), sorted(tuples) elif getTuples == "only": return sorted(tuples) else: return sorted(values)
def load_dataset_data(request, template='datasets/dataset_detail.html'): context_dict = {} data_dict = json.loads(request.POST.get('json_data')) layername = data_dict['dataset_name'] filtered_attributes = '' if not isinstance(data_dict['filtered_attributes'], str): filtered_attributes = [x for x in data_dict['filtered_attributes'] if '/load_dataset_data' not in x] name = layername if ':' not in layername else layername.split(':')[1] location = f"{(settings.OGC_SERVER['default']['LOCATION'])}wms" headers = {} if request and 'access_token' in request.session: access_token = request.session['access_token'] headers['Authorization'] = f'Bearer {access_token}' try: wfs = WebFeatureService( location, version='1.1.0', headers=headers ) response = wfs.getfeature( typename=name, propertyname=filtered_attributes, outputFormat='application/json') x = response.read() x = json.loads(x) features_response = json.dumps(x) decoded = json.loads(features_response) decoded_features = decoded['features'] properties = {} for key in decoded_features[0]['properties']: properties[key] = [] # loop the dictionary based on the values on the list and add the properties # in the dictionary (if doesn't exist) together with the value from collections.abc import Iterable for i in range(len(decoded_features)): for key, value in decoded_features[i]['properties'].items(): if value != '' and isinstance(value, (str, int, float)) and ( (isinstance(value, Iterable) and '/load_dataset_data' not in value) or value): properties[key].append(value) for key in properties: properties[key] = list(set(properties[key])) properties[key].sort() context_dict["feature_properties"] = properties except Exception: traceback.print_exc() logger.error("Possible error with OWSLib.") return HttpResponse(json.dumps(context_dict), content_type="application/json")
def test_xmlfilter_wfs_200(): wfs = WebFeatureService( 'https://services.ga.gov.au/gis/stratunits/ows', version='2.0.0') filter_prop = PropertyIsLike(propertyname='stratunit:geologichistory', literal='Cisuralian - Guadalupian', matchCase=True) filterxml = etree.tostring(filter_prop.toXML()).decode("utf-8") getfeat_params = {'typename': 'stratunit:StratigraphicUnit', 'filter': filterxml} response = wfs.getfeature(**getfeat_params).read() assert b'<stratunit:geologichistory>Cisuralian - Guadalupian</stratunit:geologichistory>' in response
def load_data(bbox, max_features=1000): wfs = WebFeatureService(NOVO_FCAMPO_WFS, version='1.1.0', auth=Authentication(verify=False)) response = wfs.getfeature(typename='geoquimica:novo-fcampo', bbox=bbox, srsname='urn:x-ogc:def:crs:EPSG:4326', maxfeatures=max_features) data = gpd.read_file(response) data.rename(lambda x: str(x).lower(), axis='columns', inplace=True) return data
def webgisfilter(mapserv, layer, maxfeatures=None, startindex=None, bbox=None, filters=None): """webgis wfs client Each filter format should look like: { 'attribute': ATTRIBUTE_NAME, # e.g. 'NAME' 'operator': OPERATOR, # e.g. '=' 'value': VALUE # e.g. 'Prague' } Operators: = != ~ IN :param str mapserv: url to mapserver :param str layer: layer name :param int maxfeatures: number of returned features :param int startindex: starting feature index :param Tupple.<dict> filters: tupple of filters :return: json-encoded result :rtype: dict """ mywfs = WebFeatureService(url=mapserv, version='1.0.0') fes = None if filters: if bbox: filters.append({ 'operator':'BBOX', 'value': bbox}) fes = get_filter_root(get_filter_fes(filters)) fes = etree.tostring(fes) if bbox and not filters: fes = None elif not bbox and filters: bbox = None elif bbox and filters: bbox = None layer_data = mywfs.getfeature(typename=[layer], filter=fes, bbox=bbox, featureid=None, outputFormat="GeoJSON", maxfeatures=maxfeatures, startindex=startindex) data = json.load(layer_data) for feature in data['features']: feature.pop('geometry') return data
def GetWFSLayer(u, p): start = time.time() # Separate the WFS URL & the layer name split_url = u.split('?') server_url = split_url[0] ows = server_url[-3:] print 'The OGC standard is: '+ ows spacename_wfs = split_url[1] tmp_chemin = p + spacename_wfs+"_.zip" chemin = tmp_chemin[:-5]+".zip" if not os.path.exists(chemin): # Get the vector layer using OGC WFS standard wfs = WebFeatureService(server_url ,version='1.0.0') getFeature = wfs.getfeature(typename = [spacename_wfs], outputFormat ="shape-zip") print('Downloading... : '+ spacename_wfs) print("From: "+ server_url) # Download the zipped shapefile data = getFeature.read() f = open(tmp_chemin ,'wb') f.write(data) f.close() # Delete .txt & .cst files from the zipped file zin = zipp(tmp_chemin, 'r') # zin.extractall(p) zout = zipp(chemin, 'w') for item in zin.infolist(): buffer = zin.read(item.filename) ext = item.filename[-4:] if (ext != '.txt' and ext != '.cst'): zout.writestr(item, buffer) zout.close() zin.close() os.remove(tmp_chemin) # # Unzip zipped shapefile os.system("unzip "+ chemin + ' -d '+ p) # Calculat time temps =time.time() - start tps = round(temps,2) temps_ms = str(tps) print "GetWFSLayer download time : " + temps_ms +" ms" return
def read_wfs_layer(): wfs = WebFeatureService( url='https://kartta.hel.fi/ws/geoserver/avoindata/wfs', version='2.0.0') out = wfs.getfeature(typename='avoindata:Rakennukset_alue_rekisteritiedot') s = out.read() s = s.encode('utf8') bio = io.BytesIO(s) df = gpd.read_file(bio) # df = df.drop(columns='geometry') # df['geometry'] = df.geometry.astype(str) return df
def runTest(self): minX = -76.766960 minY = 39.283611 maxX = -76.684120 maxY = 39.338394 filter = "<Filter><BBOX><PropertyName>Geometry</PropertyName> <Box srsName='EPSG:4326'><coordinates>%f,%f %f,%f</coordinates> </Box></BBOX></Filter>" % ( minX, minY, maxX, maxY) wfs = WebFeatureService( 'http://SDMDataAccess.nrcs.usda.gov/Spatial/SDMWGS84Geographic.wfs', version='1.0.0') response = wfs.getfeature(typename=('MapunitPolyExtended', ), filter=filter, propertyname=None) self.assertTrue(response.read().find('<wfs:FeatureCollection') > 0, 'Unable to find feature dataset in WFS response')
def test_filter(): """A request without filtering will yield 600 entries. With filtering we expect only one. Note that this type of topological filtering only works (so far) with WFS 2.0.0 and POST requests. """ wfs = WebFeatureService(SERVICE_URL, version="2.0.0") layer = "psf:level4" point = Point(id="random", srsName="http://www.opengis.net/gml/srs/epsg.xml#4326", pos=[-129.8, 55.44]) f = fes2.Filter(fes2.Contains(propertyname="geom", geometry=point)) r = wfs.getfeature(layer, outputFormat="application/json", method="POST", filter=f.toXML()) assert json.load(r)["totalFeatures"] == 1
def downloadWFSToGeoDataFrame(url, version, bbox, typename): """Download a WFS from a given url :url: urlstring where WFS is located :version: WFS version to download :bbox: boundingbox (xmin, xmax, ymin, ymax) :typename: key:value pair needed on national georegister website to download certain WFS :returns: GDF: geodataframe with requested data """ MapWFS = WebFeatureService(url=url, version = version) response = MapWFS.getfeature(typename=typename, bbox=bbox, maxfeatures=100, outputFormat='json', startindex=0) data = json.loads(response.read()) GDF = gpd.GeoDataFrame.from_features(data['features']) return GDF
def test_xmlfilter_wfs_110(): wfs = WebFeatureService('https://services.ga.gov.au/gis/stratunits/ows', version='1.1.0') filter_prop = PropertyIsLike(propertyname='stratunit:GEOLOGICHISTORY', literal='Cisuralian - Guadalupian', matchCase=True) filterxml = etree.tostring(filter_prop.toXML()).decode("utf-8") getfeat_params = { 'typename': 'stratunit:StratigraphicUnit', 'filter': filterxml } response = wfs.getfeature(**getfeat_params).read() assert b'<stratunit:NAME>Ellen Harkins carbonaceous shale</stratunit:NAME>' in response
def get_data_wfs(tp): """ Cette fonction prend en argument une chaine de caractere desigant l'identifiant (wfs.contents) des donnees a recuperer, effectue les requetes sur l'API WFS, parse la reponse et retourne une liste d'elements etree. """ wfs = WebFeatureService(url_wfs, version=vrsn) response = wfs.getfeature(typename=tp) s = response.getvalue() filename = make_file(tp, s) tree = etree.parse(filename) element_list = tree.xpath( '//*[local-name()="FeatureCollection"]/*[local-name()="featureMember"]' ) return element_list
def _download(self): """ Downloads the data from the WFS. :return: Top element of the parsed XML document. :rtype: xml.etree.ElementTree """ logger.info('Getting data from the server.') try: wfs = WebFeatureService( url='http://kartta.hel.fi/ws/geoserver/avoindata/wfs', version='2.0.0', ) response = wfs.getfeature( typename='avoindata:liikennemerkkipilotti_pysakointipaikat', ) return etree.fromstring(bytes(response.getvalue(), 'UTF-8')) except: logger.error('Unable to get data from the server.', exc_info=True)
def __init__(self, locType, locId, deltaHours=0): ns = { "BsWfs": "http://xml.fmi.fi/schema/wfs/2.0", "gml": "http://www.opengis.net/gml/3.2", "wfs": "http://www.opengis.net/wfs/2.0" } wfs20 = WebFeatureService( url='https://opendata.fmi.fi/wfs?request=GetCapabilities', version='2.0.0') try: resp = wfs20.getfeature( storedQueryID='fmi::observations::weather::simple', storedQueryParams={locType: locId}) except: print("Error fetching data. Perhaps place is not known.") quit() resp_xml = resp.read().decode("utf-8") root = ET.fromstring(resp_xml) self.loctype = locType self.locid = locId lastTimestamp = root.findall(".//BsWfs:Time", ns)[-1].text self.timestamp = lastTimestamp self.pos = root.find(".//gml:pos", ns).text self.parameter = {} self.parameterDelta = {} for BsWfsElement in root.findall( "./wfs:member/BsWfs:BsWfsElement/[BsWfs:Time='" + lastTimestamp + "']", ns): self.parameter[BsWfsElement.findall( ".//BsWfs:ParameterName", ns)[0].text] = BsWfsElement.findall(".//BsWfs:ParameterValue", ns)[0].text if deltaHours != 0: self.deltaHours = deltaHours dtLastTimestamp = dateutil.parser.isoparse(lastTimestamp) dtDelta = timedelta(hours=deltaHours) dtdeltaTimestamp = dtLastTimestamp - dtDelta self.deltaTimestamp = dtdeltaTimestamp.strftime( "%Y-%m-%dT%H:%M:%S") + 'Z' for BsWfsElement in root.findall( "./wfs:member/BsWfs:BsWfsElement/[BsWfs:Time='" + self.deltaTimestamp + "']", ns): self.parameterDelta[BsWfsElement.findall( ".//BsWfs:ParameterName", ns)[0].text] = BsWfsElement.findall( ".//BsWfs:ParameterValue", ns)[0].text
def AddDataPoint(): global last_timestamp wfs = WebFeatureService(url='http://opendata.fmi.fi/wfs', version='2.0.0') response = wfs.getfeature( storedQueryID='fmi::observations::weather::simple', storedQueryParams={'Place': 'vaasa'}) xml = response.read() root = ET.fromstring(xml) for latestData in root[-11:]: for member in latestData: memberKey = member[2].text memberValue = member[3].text time = member[1].text if memberKey == 't2m': temperature = memberValue elif memberKey == 'ws_10min': windSpeed = memberValue elif memberKey == 'wg_10min': windGust = memberValue elif memberKey == 'wd_10min': windDirection = memberValue elif memberKey == 'rh': relativeHumidity = memberValue elif memberKey == 'td': dewPoint = memberValue elif memberKey == 'r_1h': precipitationAmount = memberValue elif memberKey == 'snow_aws': snowDepth = memberValue elif memberKey == 'p_sea': pressure = memberValue elif memberKey == 'vis': visibility = memberValue if time != last_timestamp: last_timestamp = time SendDataToIngester(temperature, time, "Temperature") SendDataToIngester(windSpeed, time, "WindSpeed") SendDataToIngester(windGust, time, "WindGust") SendDataToIngester(windDirection, time, "WindDirection") SendDataToIngester(relativeHumidity, time, "RelativeHumidity") SendDataToIngester(dewPoint, time, "DewPoint") SendDataToIngester(precipitationAmount, time, "Precipitation") SendDataToIngester(pressure, time, "Pressure") SendDataToIngester(visibility, time, "Visibility")
def _download(self): """!Downloads data from WFS server using OSWlLib driver @return temp_map with downloaded data """ grass.message(_("Downloading data from WFS server...")) if self.bbox: query_bbox = ( self.bbox["minx"], self.bbox["miny"], self.bbox["maxx"], self.bbox["maxy"], ) else: query_bbox = self.bbox wfs = WebFeatureService(url=self.o_url, version=self.o_wfs_version) try: wfs_data = wfs.getfeature( typename=[self.o_layers], srsname="EPSG:" + str(self.o_srs), maxfeatures=self.o_maximum_features, bbox=query_bbox, ) # TODO do it better except ServiceException as e: grass.fatal(_("Server returned exception")) grass.debug(url) temp_map = self._temp() # download data into temporary file try: temp_map_opened = open(temp_map, "w") temp_map_opened.write(wfs_data.read()) temp_map_opened except IOError: grass.fatal(_("Unable to write data into tempfile")) finally: temp_map_opened.close() return temp_map
def wfs_prepare(bbox, URL, layer): """Takes a bounding box and a WFS service URL. Requests features in the bounding box, finds polygons that are within the bounding box or intersect it. Crops the intersecting geometries to the extents of the bounding box, and returns the contained and cropped geometries. """ A = Point(bbox[0][0], bbox[1][1]) B = Point(bbox[0][1], bbox[1][1]) C = Point(bbox[0][1], bbox[1][0]) D = Point(bbox[0][0], bbox[1][0]) bbox_lines = LineString([A, B, C, D, A]) bbox_object = box(bbox[0][0], bbox[1][0], bbox[0][1], bbox[1][1]) wfs = WebFeatureService(url=URL, version='2.0.0') response = wfs.getfeature(typename=layer, bbox=(bbox[0][0], bbox[1][0], bbox[0][1], bbox[1][1]), outputFormat='json') response_json = json.loads(response.read()) for feature in response_json['features']: rings = feature['geometry']['coordinates'] for ring_coords, i in zip(rings, range(len(rings))): ring = [vx[0:2] for vx in ring_coords] feature['geometry']['coordinates'][i] = ring out = [] for feature in response_json['features']: merger = [bbox_lines] rings = feature['geometry']['coordinates'] for ring_coords in rings: ring = Polygon(ring_coords) if ring.within(bbox_object): out.append(shape(feature['geometry'])) elif ring.intersects(bbox_object): merger.append(ring.boundary) if len(merger) != 1: if len(rings) > 1: poly = Polygon(rings[0], rings[1:]) else: poly = Polygon(rings[0]) merged = linemerge(merger) borders = unary_union(merged) polygons = polygonize(borders) for p in polygons: if p.within(bbox_object) and poly.contains(p.buffer(-1e-8)): feature['geometry']['coordinates'] = [p.exterior.coords] feature['properties']['Shape_Leng'] = p.length feature['properties']['Shape_Area'] = p.area out.append(shape(feature['geometry'])) return out
def get_roads(shape,outf): #get roads url = 'https://geodata.nationaalgeoregister.nl/nwbwegen/wfs?request=GetCapabilities' wfs = WebFeatureService(url=url, version='2.0.0') layer = list(wfs.contents)[0] response = wfs.getfeature(typename=layer,bbox=tuple(shape.total_bounds)).read() fname = outf.replace('shp','gml') with open(fname, 'wb') as file: file.write(response) gdf_road = gpd.read_file(fname)[['gml_id','geometry']] gdf_road['label'] = 0 gdf_road = gdf_road.dissolve('label') if os.path.exists(fname): os.remove(fname) return gdf_road
def retrieve_bag(): # Connect to BAG WFS service and parse to ElementTree wfs11 = WebFeatureService( "https://geodata.nationaalgeoregister.nl/bag/wfs?", version="1.1.0") wfs_response = wfs11.getfeature(typename="bag:pand", bbox=(93020, 436021, 94263, 436884)) bag_parse = ET.parse(wfs_response) bag = bag_parse.getroot() bag_polygons = [] bag_ids = [] # find all BAG panden for pand in bag.findall(".//{http://bag.geonovum.nl}pand"): # find all BAG ids for bag_id in pand.findall(".//{http://bag.geonovum.nl}identificatie"): bag_ids.append(bag_id.text) # find all BAG geometries bag_polygon = [] for geom in pand.findall(".//{http://www.opengis.net/gml}posList"): # place in array to remove z-coordinates (they are always 0) and format them for a Shapely polygon np_points = (np.array(geom.text.split())).astype(float) np_points = np_points[np_points != 0] np_points = np.split(np_points, len(np_points) / 2) bag_polygon.append( list(map(tuple, np_points)) ) # some features have interiors, thus don't make a polygon yet! # in this case, the feature's geometry only has an exterior boundary if len(bag_polygon) == 1: bag_polygons.append(Polygon(bag_polygon[0])) # in this case, multiple geometries have been found for the feature, which means it has interior boundaries as well elif len(bag_polygon) > 1: bag_polygons.append(Polygon(bag_polygon[0], bag_polygon[1:])) return bag_polygons, bag_ids
def get_borehole_idents_and_urls(self, maxids=None): """ Generates a dictionary containing identifiers and urls for boreholes with NVCL scanned data at this endpoint :param maxids: The maximum number of boreholes to request or None for no limit :type maxids: integer :returns: an dictionary of urls keyed by borehole identifiers """ wfs = WebFeatureService(self.urls['wfsurl'], version="1.1.0") wfsresponse = wfs.getfeature(typename="nvcl:ScannedBoreholeCollection", maxfeatures=maxids) xmltree = etree.parse(wfsresponse) idents = {} bhstring = ".//{http://www.auscope.org/nvcl}scannedBorehole" for match in xmltree.findall(bhstring): idents[match.get('{http://www.w3.org/1999/xlink}title')] = \ match.get('{http://www.w3.org/1999/xlink}href') return idents
def get_hydrobasins_location_wfs(coordinates=None, level=12, lakes=True): """Return features from the USGS HydroBASINS data set using bounding box coordinates and WFS 1.1.0 protocol. For geographic rasters, subsetting is based on WGS84 (Long, Lat) boundaries. If not geographic, subsetting based on projected coordinate system (Easting, Northing) boundaries. Parameters ---------- coordinates : sequence Geographic coordinates of the bounding box (left, down, right, up) level : int Level of granularity requested for the lakes vector (1:12). Default: 12. lakes : bool Whether or not the vector should include the delimitation of lakes. Returns ------- str A GML-encoded vector feature. """ from owslib.wfs import WebFeatureService layer = 'public:USGS_HydroBASINS_{}na_lev{}'.format( 'lake_' if lakes else '', level) if coordinates is not None: wfs = WebFeatureService('http://boreas.ouranos.ca/geoserver/wfs', version='1.1.0', timeout=30) try: resp = wfs.getfeature(typename=layer, bbox=coordinates, srsname='urn:x-ogc:def:crs:EPSG:4326') except Exception as e: raise Exception(e) else: raise NotImplementedError data = resp.read() return data
def load_layer_data(request, template='layers/layer_detail.html'): context_dict = {} data_dict = json.loads(request.POST.get('json_data')) layername = data_dict['layer_name'] filtered_attributes = data_dict['filtered_attributes'] workspace, name = layername.split(':') location = "{location}{service}".format(** { 'location': settings.OGC_SERVER['default']['LOCATION'], 'service': 'wms', }) try: username = settings.OGC_SERVER['default']['USER'] password = settings.OGC_SERVER['default']['PASSWORD'] wfs = WebFeatureService(location, version='1.1.0', username=username, password=password) response = wfs.getfeature(typename=name, propertyname=filtered_attributes, outputFormat='application/json') x = response.read() x = json.loads(x) features_response = json.dumps(x) decoded = json.loads(features_response) decoded_features = decoded['features'] properties = {} for key in decoded_features[0]['properties']: properties[key] = [] # loop the dictionary based on the values on the list and add the properties # in the dictionary (if doesn't exist) together with the value for i in range(len(decoded_features)): for key, value in decoded_features[i]['properties'].iteritems(): if value != '' and isinstance(value, (string_types, int, float)): properties[key].append(value) for key in properties: properties[key] = list(set(properties[key])) properties[key].sort() context_dict["feature_properties"] = properties except: print "Possible error with OWSLib." return HttpResponse(json.dumps(context_dict), content_type="application/json")
def get_features(wfs_url, layer, verbose=False): """Get feature from Web Feature Service (WFS) in GeoJSON format Input: wfs_url: URL for web feature service. E.g. http://www.aifdr.org:8080/geoserver/ows? layer: Feature layer name as <workspace>:<layer> verbose [optional]: Flag controlling the verbosity level. Default is False. Output: GEOJSON dictionary or None. """ if verbose: print('Retrieving %s from %s' % (layer, wfs_url)) wfs = WebFeatureService(wfs_url, version='1.0.0') if layer not in wfs.contents.keys(): return None response = wfs.getfeature(typename=[layer], outputFormat='json', maxfeatures=1) return geojson.loads(response.read())
def get_features(wfs_url, layer, verbose=False): """Get feature from Web Feature Service (WFS) in GeoJSON format Input: wfs_url: URL for web feature service. E.g. http://www.aifdr.org:8080/geoserver/ows? layer: Feature layer name as <workspace>:<layer> verbose [optional]: Flag controlling the verbosity level. Default is False. Output: GEOJSON dictionary or None. """ if verbose: print('Retrieving %s from %s' % (layer, wfs_url)) wfs = WebFeatureService(wfs_url, version='1.0.0') if layer not in wfs.contents.keys(): return None response = wfs.getfeature(typename=[layer], format='json') return geojson.loads(response.read())
def get_wfs(server_url, spacename_wfs): chemin = '/home/tmp/'+spacename_wfs+'.gml' if not os.path.exists(chemin): wfs = WebFeatureService(server_url +"/wfs/",version='1.0.0') vector = spacename_wfs print "Downloading the WFS: "+spacename_wfs print "From: "+server_url response = wfs.getfeature(typename =[vector]) data = response.read() f = open(chemin,'wb') f.write(data) f.close() print "Done" return chemin
def download_bag_buildings(bbox, file_dir): """Connects to the Dutch geodata registry and retrieves all buildings of the BAG. // Deprecated, WFS does not supply all buildings """ for i in range(0, 100): wfs = WebFeatureService( url= 'https://geodata.nationaalgeoregister.nl/bag/wfs?request=GetCapabilities', version='2.0.0') response = wfs.getfeature(typename='bag:pand', bbox=bbox, startindex=i * 1000) # Max features flag does not work filename = file_dir + "temp_bag{}.gml".format(i) out = open(filename, 'wb') out.write(bytes(response.read(), 'UTF-8')) out.close() # Check size of response, breaks once no response is given # Appended to prevent locking the memory address using os.seek_end, else the file cannot be written if response.seek(0, os.SEEK_END) < 1000: os.remove(filename) break
def save(self): wfs = WebFeatureService(url=self.url, version=self.version) # check that typename is in the list of available feature types try: idx = list(wfs.contents).index(self.typename) except ValueError: idx = -1 print("Invalid typename " + self.typename + ' with WFS service ' + self.url) if idx >= 0: response = wfs.getfeature(typename=self.typename, outputFormat=self.format) if self.maxlines != '' and self.format == 'csv': ans = "" for i in range(int(self.maxlines)): ans += response.readline() else: ans = ''.join(response.readlines()) out = open(self.output, 'wb') out.write(bytes(ans.encode('utf-8', 'ignore'))) out.close()
def getMapunitFeaturesForBoundingBox(outputDir, bbox, \ mapunitExtended=False, tileBbox=False): """ Query USDA Soil Data Mart for SSURGO Mapunit features with a given bounding box. Features will be written to one or more GML files, one file for each bboxTile tile, stored in the specified output directory. The filename will be returned as a string. Will fetch SSURGO tabular data (see ssurgolib.attributequery.attributeList for a list of attributes) and join those data to the features in the GML files(s). @note Will silently exit if features already exist. @param outputDir String representing the absolute/relative path of the directory into which features should be written @param bbox A dict containing keys: minX, minY, maxX, maxY, srs, where srs='EPSG:4326' @param mapunitExtended True if extended mapunit features should be fetched. @param tileBoundingBox True if bounding box should be tiled if extent exceeds featurequery.MAX_SSURGO_EXTENT @return A list of strings representing the name of the GML file(s) to which the mapunit features were saved. @exception IOError if output directory is not a directory @exception IOError if output directory is not writable @exception Exception if bounding box area is greater than MAX_SSURGO_EXTENT """ if not os.path.isdir(outputDir): raise IOError(errno.ENOTDIR, "Output directory %s is not a directory" % (outputDir,)) if not os.access(outputDir, os.W_OK): raise IOError(errno.EACCES, "Not allowed to write to output directory %s" % (outputDir,)) outputDir = os.path.abspath(outputDir) if mapunitExtended: typeName = 'MapunitPolyExtended' else: typeName = 'MapunitPoly' if tileBbox: bboxes = tileBoundingBox(bbox, MAX_SSURGO_EXTENT) sys.stderr.write("Dividing bounding box %s into %d tiles\n" % (str(bbox), len(bboxes))) else: if calculateBoundingBoxAreaSqMeters(bbox) > MAX_SSURGO_EXTENT: raise Exception("Bounding box area is greater than %f sq. meters" % (MAX_SSURGO_EXTENT,)) bboxes = [bbox] gmlFiles = [] for bboxTile in bboxes: minX = bboxTile['minX']; minY = bboxTile['minY']; maxX = bboxTile['maxX']; maxY = bboxTile['maxY'] bboxLabel = str(minX) + "_" + str(minY) + "_" + str(maxX) + "_" + str(maxY) gmlFilename = "%s_bbox_%s-attr.gml" % (typeName, bboxLabel) gmlFilepath = os.path.join(outputDir, gmlFilename) if not os.path.exists(gmlFilepath): sys.stderr.write("Fetching SSURGO data for sub bboxTile %s\n" % bboxLabel) wfs = WebFeatureService(WFS_URL, version='1.0.0') filter = "<Filter><BBOX><PropertyName>Geometry</PropertyName> <Box srsName='EPSG:4326'><coordinates>%f,%f %f,%f</coordinates> </Box></BBOX></Filter>" % (minX, minY, maxX, maxY) gml = wfs.getfeature(typename=(typeName,), filter=filter, propertyname=None) # Write intermediate GML to a file intGmlFilename = "%s_bbox_%s.gml" % (typeName, bboxLabel) intGmlFilepath = os.path.join(outputDir, intGmlFilename) out = open(intGmlFilepath, 'w') out.write(gml.read()) out.close() # Parse GML to get list of MUKEYs gmlFile = open(intGmlFilepath, 'r') ssurgoFeatureHandler = SSURGOFeatureHandler() xml.sax.parse(gmlFile, ssurgoFeatureHandler) gmlFile.close() mukeys = ssurgoFeatureHandler.mukeys # Get attributes (ksat, texture, %clay, %silt, and %sand) for all components in MUKEYS attributes = getParentMatKsatTexturePercentClaySiltSandForComponentsInMUKEYs(mukeys) # Compute weighted average of soil properties across all components in each map unit avgAttributes = computeWeightedAverageKsatClaySandSilt(attributes) # Join map unit component-averaged soil properties to attribute table in GML file gmlFile = open(intGmlFilepath, 'r') joinedGmlStr = joinSSURGOAttributesToFeaturesByMUKEY(gmlFile, typeName, avgAttributes) gmlFile.close() # Write Joined GML to a file out = open(gmlFilepath, 'w') out.write(joinedGmlStr) out.close() # Delete intermediate GML file os.unlink(intGmlFilepath) gmlFiles.append(gmlFilename) # TODO: join tiled data if tileBbox return gmlFiles
class WoudcClient(object): """WOUDC Client""" def __init__(self, url='http://geo.woudc.org/ows', timeout=30): """ Initialize a WOUDC Client. :returns: instance of pywoudc.WoudcClient """ self.url = url """The URL of the WOUDC data service""" self.timeout = timeout """Time (in seconds) after which requests should timeout""" self.about = 'http://woudc.org/about/data-access.php' """The About Data Access page""" self.outputformat = 'application/json; subtype=geojson' """The default outputformat when requesting WOUDC data""" self.maxfeatures = 25000 """The default limit of records to return""" LOGGER.info('Contacting %s', self.url) self.server = WebFeatureService(self.url, '1.1.0', timeout=self.timeout) """The main WOUDC server""" def get_station_metadata(self, raw=False): """ Download WOUDC station metadata :param raw: a boolean specifying whether to return the raw GeoJSON payload as a string (default is False) :returns: dictionary of GeoJSON payload """ LOGGER.info('Fetching station metadata') return self._get_metadata('stations', raw) def get_instrument_metadata(self, raw=False): """ Download WOUDC instrument metadata :param raw: a boolean specifying whether to return the raw GeoJSON payload as a string (default is False) :returns: dictionary of GeoJSON payload """ LOGGER.info('Fetching instrument metadata') return self._get_metadata('instruments', raw) def get_contributor_metadata(self, raw=False): """ Download WOUDC contributors metadata :param raw: a boolean specifying whether to return the raw GeoJSON payload as a string (default is False) :returns: dictionary of GeoJSON payload """ LOGGER.info('Fetching contributor metadata') return self._get_metadata('contributors', raw) def get_data(self, typename, **kwargs): """ Download WOUDC observations :param bbox: a list representing a bounding box spatial filter (`minx, miny, maxx, maxy`) :param temporal: a list of two elements representing a time period (start, end) which accepts the following types: - :py:class:`datetime.date` - :py:class:`datetime.datetime` - string date (e.g. ``2012-10-30``) - string datetime (e.g. ``2012-10-30 11:11:11``) :param property_name: a string representing the property name to apply as filter against :param property_value: a string representing the value which filters against `property_name` :param variables: a list of variables to return as part of the response (default returns all) :param sort_property: a string representing the property on which to sort results (default ``instance_datetime``) :param sort_order: a string representing sort order of response (``asc`` or ``desc``). Default is ``asc``. Applied if `sort_property` is specified :returns: list of WOUDC observations GeoJSON payload """ constraints = [] variables = '*' filter_string = None bbox = None temporal = None property_name = None property_value = None sort_property = None sort_order = 'asc' startindex = 0 features = None feature_collection = None sort_descending = False LOGGER.info('Downloading dataset %s', typename) LOGGER.debug('Assembling query parameters') for key, value in kwargs.items(): if key == 'bbox': bbox = value if key == 'temporal': temporal = value if key == 'property_name': property_name = value if key == 'property_value': property_value = str(value) if key == 'variables': variables = value if key == 'sortby': sort_property = value if key == 'sort_order': sort_order = value LOGGER.debug('Assembling constraints') if property_name is not None and property_value is not None: constraints.append(fes.PropertyIsEqualTo(property_name, property_value)) if bbox is not None: if not isinstance(bbox, list) or len(bbox) != 4: raise ValueError('bbox must be list of minx, miny, maxx, maxy') LOGGER.debug('Setting spatial constraint') constraints.append(fes.BBox(bbox)) if temporal is not None: if not isinstance(temporal, list) or len(temporal) != 2: msg = 'temporal must be list of start date, end date' raise ValueError(msg) LOGGER.info('Setting temporal constraint') temporal_start = date2string(temporal[0], 'begin') temporal_end = date2string(temporal[1], 'end') constraints.append(fes.PropertyIsBetween( 'instance_datetime', temporal_start, temporal_end)) if sort_order not in ['asc', 'desc']: raise ValueError('sort_order must be asc or desc') else: if sort_order == 'desc': sort_descending = True if variables != '*': if not isinstance(variables, list): raise ValueError('variables must be list') if constraints: LOGGER.debug('Combining constraints') flt = fes.FilterRequest() if len(constraints) == 1: LOGGER.debug('Single constraint') filter_string = flt.setConstraint(constraints[0], tostring=True) if len(constraints) > 1: LOGGER.debug('Multiple constraints') filter_string = flt.setConstraintList([constraints], tostring=True) LOGGER.info('Fetching observations') LOGGER.info('Filters:') LOGGER.info('bbox: %r', bbox) LOGGER.info('temporal: %r', temporal) LOGGER.info('attribute query: %r = %r', property_name, property_value) # page download and assemble single list of JSON features while True: LOGGER.debug('Fetching features %d - %d', startindex, startindex + self.maxfeatures) payload = self.server.getfeature( typename=typename, startindex=startindex, propertyname=variables, maxfeatures=self.maxfeatures, filter=filter_string, outputFormat=self.outputformat).read() LOGGER.debug('Processing response') if payload.isspace(): LOGGER.debug('Empty response. Exiting') break try: features = json.loads(payload) except ValueError: msg = 'Query produced no results' LOGGER.info(msg) return None len_features = len(features['features']) LOGGER.debug('Found %d features', len_features) if feature_collection is None: feature_collection = features else: feature_collection['features'].extend(features['features']) if len_features < self.maxfeatures: break startindex = startindex + self.maxfeatures len_feature_collection = len(feature_collection['features']) LOGGER.info('Found %d total features', len_feature_collection) if sort_property is not None: LOGGER.info('Sorting response by %s', sort_property) feature_collection['features'].sort( key=lambda e: e['properties'][sort_property], reverse=sort_descending) return feature_collection def _get_metadata(self, typename, raw=False): """generic design pattern to download WOUDC metadata""" LOGGER.debug('Fetching data from server') features = self.server.getfeature(typename=typename, outputFormat=self.outputformat) LOGGER.debug('Processing response') if raw: LOGGER.info('Emitting raw GeoJSON response') return features.read() LOGGER.info('Emitting GeoJSON features as list') return json.loads(features.read())
def main(OBJECTID, lck, featureclassname, idfieldname, count, length, getArea=False): """ OBJECTID - the objectid of the feature from the wfs service lck - multiprocess lock featureclassname - the name of the feature class in PostGIS to intersect with the species data idfieldname - the unique field in the feature class that represents the primary key value count - how many features have been processed length - the total number of features to be processed getArea - boolean flag to indicate whether to capture the area of intersection """ try: multiprocessing.current_process().cnt += 1 devconn = dbconnect('species_dev') # connect to Postgresql devconn.cur.execute("insert into species_wdpa_analysis_summary (objectid,wfs_requested) values (" + str(OBJECTID) + ",TRUE) RETURNING oid;") oid = devconn.cur.fetchone()[0] wfsfilter = "<ogc:Filter><ogc:PropertyIsEqualTo><ogc:PropertyName>OBJECTID</ogc:PropertyName><ogc:Literal>" + str(OBJECTID) + "</ogc:Literal></ogc:PropertyIsEqualTo></ogc:Filter>" # create the filter for the species _wfs_ = WebFeatureService(WFS_URL + '?request=GetCapabilities', version='1.0.0') # get the WFS Service Capabilities try: stream = _wfs_.getfeature(typename=['Andrew_SpeciesWFSLatLong:AllSpecies'], filter=wfsfilter, propertyname=[]) # get the species range as gml from the WFS service except: raise Exception("update species_wdpa_analysis_summary set wfs_failed=TRUE where oid=" + str(oid)) elementTree = ET.parse(stream) # load the gml into memory featureMember = elementTree.find('{' + OPENGIS_NAMESPACE + '}featureMember') # get the features if featureMember == None: # error check raise Exception("update species_wdpa_analysis_summary set no_features_returned=TRUE where oid=" + str(oid)) speciesid = getAttribute(elementTree, 'SpeciesID') # get the SpeciesID if speciesid == None: # error check raise Exception("update species_wdpa_analysis_summary set no_speciesid=TRUE where oid=" + str(oid)) else: devconn.cur.execute("update species_wdpa_analysis_summary set speciesid=" + str(speciesid) + " where objectid=" + str(OBJECTID)) presence = getAttribute(elementTree, 'PRESENCE') # get the PRESENCE if presence == None: # error check raise Exception("update species_wdpa_analysis_summary set no_presence_value=TRUE where oid=" + str(oid)) polygons = elementTree.getiterator('{' + OPENGIS_NAMESPACE + '}Polygon') # get the polygons if len(polygons) == 0: # error check raise Exception("update species_wdpa_analysis_summary set no_wfs_polygons=TRUE where oid=" + str(oid)) intersects = False liveconn = dbconnect('species_live') # connect to Postgresql to do the intersection analysis for i in polygons: # iterate through all of the species range polygons # intersect the species range features with the intersectingfeature features if getArea: sql = "select " + idfieldname + ", st_area(st_transform(st_intersection(ST_GeomFromGML('" + ET.tostring(i) + "',4326), geom),97099)) from " + featureclassname + " where st_intersects(ST_GeomFromGML('" + ET.tostring(i) + "',4326), geom)" else: sql = "select " + idfieldname + " from " + featureclassname + " where st_intersects(ST_GeomFromGML('" + ET.tostring(i) + "',4326), geom)" # select the features that overlap the species range, e.g. select wdpaid from wdpa where st_intersects liveconn.cur.execute(sql) # execute the query intersectingfeatures = liveconn.cur.fetchall() # get all of the records for intersectingfeature in intersectingfeatures: # iterate through the intersectingfeatures sql = "insert into species_wdpa (speciesid, presence, wdpaid, objectid) VALUES (" + str(speciesid) + "," + str(presence) + "," + str(int(intersectingfeature[0])) + "," + str(OBJECTID) + ") RETURNING oid" print sql if getArea: devconn.cur.execute("insert into species_wdpa (speciesid, presence, wdpaid, objectid,area) VALUES (" + str(speciesid) + "," + str(presence) + "," + str(int(intersectingfeature[0])) + "," + str(OBJECTID) + "," + str(intersectingfeature[1]) + ") RETURNING oid") else: devconn.cur.execute("insert into species_wdpa (speciesid, presence, wdpaid, objectid) VALUES (" + str(speciesid) + "," + str(presence) + "," + str(int(intersectingfeature[0])) + "," + str(OBJECTID) + ") RETURNING oid") intersects = True if intersects == False: raise Exception("update species_wdpa_analysis_summary set intersecting_features=FALSE where oid=" + str(oid)) except Exception as inst: devconn.cur.execute(inst.args[0]) finally: devconn.cur.close() # close the cursor liveconn.cur.close() # close the cursor del(conn) # close the connection
# <codecell> from owslib.wfs import WebFeatureService known_wfs = "http://solo.axiomalaska.com/geoserver/audubon/ows" wfs = WebFeatureService(known_wfs, version='1.0.0') print sorted(wfs.contents.keys()) # <markdowncell> # ##### We already know that the 'audubon:audubon_ibas' layer is Import Bird Areas. Request 'geojson' response from the layer # <codecell> import geojson geojson_response = wfs.getfeature(typename=['audubon:audubon_ibas'], maxfeatures=1, outputFormat="application/json", srsname="urn:x-ogc:def:crs:EPSG:4326").read() feature = geojson.loads(geojson_response) # <markdowncell> # ##### Convert to Shapely geometry objects # <codecell> from shapely.geometry import shape shapes = [shape(s.get("geometry")) for s in feature.get("features")] # <markdowncell> # ##### Map the geometry objects
from utilities import css_styles css_styles() # ## Load Important Bird Areas # These are defined by [Audobon](http://web4.audubon.org/bird/iba/) and hosted by AOOS on a geoserver. Currently, they represent areas important for numerous species of birds. # # The single species core areas must be processed differently, because they have different metadata and include species information. They are contained in a separate notebook: [located here](http://127.0.0.1:8888/cc62f4c2-b95f-41a4-9b84-13c46ce9c37a). # In[155]: #Load our important bird areas again. known_wfs = "http://solo.axiomalaska.com/geoserver/audubon_ibav3/ows" wfs = WebFeatureService(known_wfs, version='1.0.0') geojson_response = wfs.getfeature(typename=['audubon_ibav3:audubon_ibas_v3_20aug2014'], outputFormat="application/json", srsname="urn:x-ogc:def:crs:EPSG:4326").read() geojson = json.loads(geojson_response) # In[156]: geometries = find_dict_keys('geometry', geojson) shapes = [shape(g) for g in geometries] ids = find_dict_keys('id', geojson) ids=[(str(j).split('.', 1))[1] for j in ids] sitenames = find_dict_keys('newsitenam', geojson) sitenames = [str(s) for s in sitenames] profiles = find_dict_keys('profile', geojson)
def getMapunitFeaturesForBoundingBox(config, outputDir, bbox, tileBbox=False, t_srs='EPSG:4326'): """ Query USDA Soil Data Mart for SSURGO MapunitPolyExtended features with a given bounding box. Features will be written to one or more shapefiles, one file for each bboxTile tile, stored in the specified output directory. The filename will be returned as a string. Will fetch SSURGO tabular data (see ssurgolib.attributequery.ATTRIBUTE_LIST for a list of attributes) and join those data to the features in the final shapefiles(s). @note Will silently exit if features already exist. @param config onfigParser containing the section 'GDAL/OGR' and option 'PATH_OF_OGR2OGR' @param outputDir String representing the absolute/relative path of the directory into which features should be written @param bbox A dict containing keys: minX, minY, maxX, maxY, srs, where srs='EPSG:4326' @param tileBoundingBox True if bounding box should be tiled if extent exceeds featurequery.MAX_SSURGO_EXTENT @param t_srs String representing the spatial reference system of the output shapefiles, of the form 'EPSG:XXXX' @return A list of strings representing the name of the shapefile(s) to which the mapunit features were saved. @exception IOError if output directory is not a directory @exception IOError if output directory is not writable @exception Exception if bounding box area is greater than MAX_SSURGO_EXTENT @exception Exception if no MUKEYs were returned """ if not os.path.isdir(outputDir): raise IOError(errno.ENOTDIR, "Output directory %s is not a directory" % (outputDir,)) if not os.access(outputDir, os.W_OK): raise IOError(errno.EACCES, "Not allowed to write to output directory %s" % (outputDir,)) outputDir = os.path.abspath(outputDir) typeName = 'MapunitPolyExtended' if tileBbox: bboxes = tileBoundingBox(bbox, MAX_SSURGO_EXTENT) sys.stderr.write("Dividing bounding box %s into %d tiles\n" % (str(bbox), len(bboxes))) else: if calculateBoundingBoxArea(bbox, t_srs) > MAX_SSURGO_EXTENT: raise Exception("Bounding box area is greater than %f sq. meters" % (MAX_SSURGO_EXTENT,)) bboxes = [bbox] outFiles = [] for bboxTile in bboxes: minX = bboxTile['minX']; minY = bboxTile['minY']; maxX = bboxTile['maxX']; maxY = bboxTile['maxY'] bboxLabel = str(minX) + "_" + str(minY) + "_" + str(maxX) + "_" + str(maxY) gmlFilename = "%s_bbox_%s-attr.gml" % (typeName, bboxLabel) gmlFilepath = os.path.join(outputDir, gmlFilename) if not os.path.exists(gmlFilepath): sys.stderr.write("Fetching SSURGO data for sub bboxTile %s\n" % bboxLabel) wfs = WebFeatureService(WFS_URL, version='1.0.0') filter = "<Filter><BBOX><PropertyName>Geometry</PropertyName> <Box srsName='EPSG:4326'><coordinates>%f,%f %f,%f</coordinates> </Box></BBOX></Filter>" % (minX, minY, maxX, maxY) gml = wfs.getfeature(typename=(typeName,), filter=filter, propertyname=None) # Write intermediate GML to a file intGmlFilename = "%s_bbox_%s.gml" % (typeName, bboxLabel) intGmlFilepath = os.path.join(outputDir, intGmlFilename) out = open(intGmlFilepath, 'w') out.write(gml.read()) out.close() # Parse GML to get list of MUKEYs gmlFile = open(intGmlFilepath, 'r') ssurgoFeatureHandler = SSURGOFeatureHandler() xml.sax.parse(gmlFile, ssurgoFeatureHandler) gmlFile.close() mukeys = ssurgoFeatureHandler.mukeys if len(mukeys) < 1: raise Exception("No SSURGO features returned from WFS query. SSURGO GML format may have changed.\nPlease contact the developer.") # Get attributes (ksat, texture, %clay, %silt, and %sand) for all components in MUKEYS attributes = getParentMatKsatTexturePercentClaySiltSandForComponentsInMUKEYs(mukeys) # Compute weighted average of soil properties across all components in each map unit avgAttributes = computeWeightedAverageKsatClaySandSilt(attributes) # Convert GML to GeoJSON so that we can add fields easily (GDAL 1.10+ validates GML schema # and won't let us add fields) tmpGeoJSONFilename = convertGMLToGeoJSON(config, outputDir, intGmlFilepath, typeName) tmpGeoJSONFilepath = os.path.join(outputDir, tmpGeoJSONFilename) # Join map unit component-averaged soil properties to attribute table in GML file # gmlFile = open(intGmlFilepath, 'r') # joinedGmlStr = joinSSURGOAttributesToFeaturesByMUKEY(gmlFile, typeName, avgAttributes) # gmlFile.close() tmpGeoJSONFile = open(tmpGeoJSONFilepath, 'r') geojson = json.load(tmpGeoJSONFile) tmpGeoJSONFile.close() joinSSURGOAttributesToFeaturesByMUKEY_GeoJSON(geojson, typeName, avgAttributes) # Write Joined GeoJSON to a file out = open(tmpGeoJSONFilepath, 'w') json.dump(geojson, out) out.close() # Convert GeoJSON to shapefile filename = os.path.splitext(intGmlFilename)[0] shpFilename = convertGeoJSONToShapefile(config, outputDir, tmpGeoJSONFilepath, filename, t_srs=t_srs) # Delete intermediate files os.unlink(intGmlFilepath) os.unlink(tmpGeoJSONFilepath) outFiles.append(shpFilename) # TODO: join tiled data if tileBbox return outFiles
from owslib.wfs import WebFeatureService wfs11 = WebFeatureService(url='http://geoserv.weichand.de:8080/geoserver/wfs', version='1.1.0') print(wfs11.identification.title) [print(operation.name) for operation in wfs11.operations] print(list(wfs11.contents)) response = wfs11.getfeature(typename='bvv:gmd_ex', bbox=(4500000,5500000,4500500,5500500), srsname='urn:x-ogc:def:crs:EPSG:31468') out = open('/tmp/data.gml', 'wb') out.write(bytes(response.read(), 'UTF-8')) out.close()
def _getMapunitFeaturesForBoundingBoxTile(config, outputDir, bboxTile, typeName, currTile, numTiles): minX = bboxTile['minX']; minY = bboxTile['minY']; maxX = bboxTile['maxX']; maxY = bboxTile['maxY'] bboxLabel = str(minX) + "_" + str(minY) + "_" + str(maxX) + "_" + str(maxY) gmlFilename = "%s_bbox_%s-attr.gml" % (typeName, bboxLabel) gmlFilepath = os.path.join(outputDir, gmlFilename) geoJSONLayername = "%s_bbox_%s-attr" % (typeName, bboxLabel) if not os.path.exists(gmlFilepath): sys.stderr.write("Fetching SSURGO data for tile %s of %s, bbox: %s\n" % (currTile, numTiles, bboxLabel)) sys.stderr.flush() wfs = WebFeatureService(WFS_URL, version='1.1.0', timeout=SSURGO_WFS_TIMEOUT_SEC) filter = "<Filter><BBOX><PropertyName>Geometry</PropertyName> <Box srsName='EPSG:4326'><coordinates>%f,%f %f,%f</coordinates> </Box></BBOX></Filter>" % (minX, minY, maxX, maxY) intGmlFilename = "%s_bbox_%s.gml" % (typeName, bboxLabel) intGmlFilepath = os.path.join(outputDir, intGmlFilename) ssurgoFeatureHandler = SSURGOFeatureHandler() downloadComplete = False downloadAttempts = 0 while not downloadComplete: try: gml = wfs.getfeature(typename=typeName, filter=filter, propertyname=None) # Write intermediate GML to a file out = open(intGmlFilepath, 'w') out.write(gml.read()) out.close() # Parse GML to get list of MUKEYs gmlFile = open(intGmlFilepath, 'r') xml.sax.parse(gmlFile, ssurgoFeatureHandler) gmlFile.close() downloadComplete = True except xml.sax.SAXParseException as e: # Try to re-download downloadAttempts += 1 if downloadAttempts > SSURGO_GML_MAX_DOWNLOAD_ATTEMPTS: raise Exception("Giving up on downloading tile {0} of {1} after {2} attempts. There may be something wrong with the web service. Try again later.".format(currTile, numTiles, downloadAttempts)) else: sys.stderr.write("Initial download of tile {0} of {1} possibly incomplete, error: {0}. Retrying...".format(currTile, numTiles, str(e))) sys.stderr.flush() mukeys = ssurgoFeatureHandler.mukeys if len(mukeys) < 1: raise Exception("No SSURGO features returned from WFS query. SSURGO GML format may have changed.\nPlease contact the developer.") # Get attributes (ksat, texture, %clay, %silt, and %sand) for all components in MUKEYS attributes = getParentMatKsatTexturePercentClaySiltSandForComponentsInMUKEYs(mukeys) # Compute weighted average of soil properties across all components in each map unit avgAttributes = computeWeightedAverageKsatClaySandSilt(attributes) # Convert GML to GeoJSON so that we can add fields easily (GDAL 1.10+ validates GML schema # and won't let us add fields) tmpGeoJSONFilename = convertGMLToGeoJSON(config, outputDir, intGmlFilepath, geoJSONLayername, flip_gml_coords=True) tmpGeoJSONFilepath = os.path.join(outputDir, tmpGeoJSONFilename) # Join map unit component-averaged soil properties to attribute table in GeoJSON file tmpGeoJSONFile = open(tmpGeoJSONFilepath, 'r') geojson = json.load(tmpGeoJSONFile) tmpGeoJSONFile.close() joinSSURGOAttributesToFeaturesByMUKEY_GeoJSON(geojson, typeName, avgAttributes) # Write joined GeoJSON to a file out = open(tmpGeoJSONFilepath, 'w') json.dump(geojson, out) out.close() # Delete intermediate files os.unlink(intGmlFilepath) return tmpGeoJSONFilepath
#!/usr/bin/python # -*- coding: UTF-8 -*- __author__ = "Juergen Weichand" from owslib.wfs import WebFeatureService wfs = WebFeatureService(url="http://geoserv.weichand.de:8080/geoserver/wfs", version="2.0.0", timeout=30) # List StoredQueries print("\nStoredQueries for %s" % wfs.identification.title) for storedquery in wfs.storedqueries: print(storedquery.id, storedquery.title) # List Parameter for a given StoredQuery storedquery = wfs.storedqueries[5] print("\nStoredQuery parameters for %s" % storedquery.id) for parameter in storedquery.parameters: print(parameter.name, parameter.type) # GetFeature StoredQuery print("\nDownload data from %s" % wfs.identification.title) response = wfs.getfeature( storedQueryID="GemeindeByGemeindeschluesselEpsg31468", storedQueryParams={"gemeindeschluessel": "09162000"} ) out = open("/tmp/test-storedquery.gml", "wb") out.write(response.read()) out.close() print("... done")
for k, v in geo.iteritems(): if k == "coordinates": z = np.asarray(geo[k]) f = z.flatten() geo[k] = np.dstack((f[1::2], f[::2])).reshape(z.shape).tolist() else: flip_geojson_coordinates(v) elif isinstance(geo, list): for k in geo: flip_geojson_coordinates(k) # <codecell> #srs='EPSG:4326' # v1.0 syntax srs='urn:x-ogc:def:crs:EPSG:4326' # v1.1 syntax json_response = wfs.getfeature(typename=[shp[0]], propertyname=None, srsname=srs, outputFormat='application/json').read() geo = geojson.loads(json_response) flip_geojson_coordinates(geo) # <codecell> print geo.keys() # <codecell> print geo['type'] # <codecell> geodetic = ccrs.Geodetic(globe=ccrs.Globe(datum='WGS84'))
def get_feature(url, typename, features): """Return geometry for WFS server.""" wfs = WebFeatureService(url, version='2.0.0') resp = wfs.getfeature([typename], featureid=features, outputFormat='application/json') return json.loads(resp.read())
class WFSDataServiceToReclineJS(): def __init__(self, url, version="1.0.0"): self.wfs = WebFeatureService(url, version=version) self.type = self.wfs.identification.type self.version = self.wfs.identification.version self.title = self.wfs.identification.title self.abstract = self.wfs.identification.abstract def get_layer_list(self): return list(self.wfs.contents) def get_single_layer(self, layer): theseLayers = self.get_layer_list() return [i for i in theseLayers if i == layer] def get_service_operations(self): thisWFS = self.wfs.operations return [op.name for op in thisWFS] def get_GET_feature_operation(self): operations = self.get_service_operations() return [i for i in operations if i.endswith("GetFeature")][0] def get_service_methods(self, service_operation): thisWFS = self.wfs thisOperation = service_operation return thisWFS.getOperationByName(thisOperation).methods # def get_service_method_URL(self, service_operation): thisWFS = self.wfs thisOperation = service_operation return thisWFS.getOperationByName('{http://www.opengis.net/wfs}GetFeature').methods['{http://www.opengis.net/wfs}Get']['url'] def get_service_format_options(self, service_operation): thisWFS = self.wfs thisOperation = service_operation return thisWFS.getOperationByName(thisOperation).formatOptions def get_GML_format_option(self, service_operation): formatOptions = self.get_service_format_options(service_operation) return [i for i in formatOptions if i.endswith("GML2")][0] def get_response(self, layer): thisLayer = self.get_single_layer(layer) thisOperation = self.get_GET_feature_operation() thisGML = self.get_GML_format_option(thisOperation) response = self.wfs.getfeature(typename=thisLayer) return response def get_items(self): return self.wfs.items() def hack_up_a_layer_name(self, data_dict): data = data_dict.get("resource") if data.get("layer_name"): return data.get("layer_name") elif data.get("layer"): return data.get("layer") elif data.get("layers"): return data.get("layers") else: try: layer_list = self.get_layers() return layer_list[0] except: return "Sorry, can't find a layer!" def make_recline_url(self, data_dict): data = data_dict thisLayer = self.hack_up_a_layer_name(data).lower() getMethod = self.get_GET_feature_operation() baseURL = self.get_service_method_URL(getMethod) baseURL += "&service=WFS&version=1.0.0&typeName=" baseURL += thisLayer return baseURL def MakeReclineJSON(self, data_dict): json_obj = [] attribs = [] data = data_dict gml_wfs = self.make_recline_url(data) source = ogr.Open(gml_wfs) print source layer = source.GetLayerByIndex(0) print layer for feature in layer: json_obj.append(feature.ExportToJson(as_object=True)) for i in json_obj: properties = i['properties'] properties.update(dict(geometry=i['geometry'])) attribs.append(properties) return attribs
get_feat = wfs.getOperationByName('GetFeature') # help(get_feat) for layer in list(wfs.contents): try: print(u'Layer: %s, Features: %s, SR: %s...' % (wfs[layer].title, wfs[layer].abstract, wfs[layer].crsOptions)) print(wfs[layer].boundingBox) print(wfs[layer].boundingBoxWGS84) print(wfs[layer].keywords) # response = wfs.describefeaturetype() # print dir(response) response = wfs.getfeature(typename=(layer,)) print(type(response)) numb = response.read().find('<wfs:FeatureCollection') print(numb) except UnicodeEncodeError: title = wfs[layer].title print title.encode("UTF-8") abstract = wfs[layer].abstract if abstract: print abstract.encode("UTF-8") print wfs[layer].id print wfs[layer].keywords print(wfs[layer].boundingBox) print(wfs[layer].boundingBoxWGS84) print(wfs[layer].keywords) print(wfs[layer].metadataUrls)
print (rec) url='http://gis.nature.cz/arcgis/services/UzemniOchrana/ChranUzemi/MapServer/WFSServer' chranena_uzemi_wfs = WebFeatureService(url) for rec in chranena_uzemi_wfs.contents: print (rec) identifier = u'ChranUzemi:Zonace_velkoplošného_zvláště_chráněného_území' print (chranena_uzemi_wfs.contents[identifier]) print ('{}\n{}'.format(chranena_uzemi_wfs.contents[identifier].boundingBox, chranena_uzemi_wfs.contents[identifier].crsOptions)) # getfeature nepodporuje UTF-8, ani zakodovani timto zpusobem nemusi fungovat identifier = 'ChranUzemi:Zonace_velkoplo\xc5\xa1n\xc3\xa9ho_zvl\xc3\xa1\xc5\xa1t\xc4\x9b_chr\xc3\xa1n\xc4\x9bn\xc3\xa9ho_\xc3\xbazem\xc3\xad' features = chranena_uzemi_wfs.getfeature([identifier]) print (features) print (features.read()) cuzk = WebFeatureService('http://geoportal.cuzk.cz/wfs_au/wfservice.aspx', version="2.0.0") for c in cuzk.contents: print (c) kraj = cuzk.getfeature(['gmgml:KRAJ']) print (kraj.read()) vuv = WebFeatureService('http://ags.vuv.cz/arcgis/services/inspire/priority_datasets/MapServer/WFSServer', version="2.0.0") for c in vuv.contents: