Esempio n. 1
0
    def discovery(self, fmisid: int, starttime: datetime, endtime: datetime):

        queryparams = {
            'fmisid': fmisid,  # Kiikala fmisid: 100967
            'starttime': starttime,
            'endtime': endtime,
            'timestep': 60
        }

        breakpoint()
        wfs = WebFeatureService(FMIDataAccess.FMIWFS,
                                version=FMIDataAccess.FMIWFS_VERSION)
        response = wfs.getfeature(
            storedQueryID=FMIDataAccess.FMIWFS_STORED_QUERY_ID,
            storedQueryParams=queryparams)

        tree = self.tree_from_response(responsebody=response)
        resultstring = etree.tostring(tree).decode('utf-8')

        # TODO: rem debug out
        #f=open('/Users/tommi/Desktop/out.xml', 'w+')
        #f.write(resultstring)
        #f.close()

        return resultstring
Esempio n. 2
0
def available_maps(storedQueryID='fmi::radar::composite::rr',
                   resolution_scaling=1, **kws):
    """
    Query links to available radar data.

    If given, start and end times are passed as query parameters, e.g.:
    starttime='2017-10-17T07:00:00Z', endtime='2017-10-17T07:30:00Z'

    Returns:
         Series: WMS links with timezone aware index in UTC time
    """
    key = environ['FMI_API_KEY']
    url_wfs = 'http://data.fmi.fi/fmi-apikey/{}/wfs'.format(key)
    wfs = WebFeatureService(url=url_wfs, version='2.0.0')
    response = wfs.getfeature(storedQueryID=storedQueryID, storedQueryParams=kws)
    root = etree.fromstring(response.read().encode('utf8'))
    result_query = 'wfs:member/omso:GridSeriesObservation'
    file_subquery = 'om:result/gmlcov:RectifiedGridCoverage/gml:rangeSet/gml:File/gml:fileReference'
    time_subquery = 'om:resultTime/gml:TimeInstant/gml:timePosition'
    d = dict()
    for result in root.findall(result_query, root.nsmap):
        t = result.find(time_subquery, root.nsmap).text
        f = result.find(file_subquery, root.nsmap).text
        d[t] = f
    s = pd.Series(d)
    s.index = pd.DatetimeIndex(s.index, tz=pytz.utc)
    s = s.apply(scale_url_width_height, factor=resolution_scaling)
    return s.sort_index()
def fromNodeIdToCsv(targetNodeId):
    nodeId = '189'
    wfs = WebFeatureService(url='https://kommisdd.dresden.de/net3/public/ogcsl.ashx?NODEID='+nodeId+'&Service=WFS&', version='2.0.0')
    
    featureTypes = list(wfs.contents)
    
    statBezirke = {}
    
    statBezirkTextProperties = ['autoid', 'flaeche_m2', 'obj_id']
    
    for featureType in featureTypes:
        root = ET.fromstring(wfs.getfeature(typename=featureType).getvalue())
        for feature in root.iter('{http://www.cardogis.com/kommisdd}'+featureType[4:]):
            blockNr = feature.find('{http://www.cardogis.com/kommisdd}blocknr').text
            statBezirke[blockNr] = { 'Statistischer Bezirk': blockNr }
            for prop in statBezirkTextProperties:
                statBezirke[blockNr][prop] = feature.find('{http://www.cardogis.com/kommisdd}'+prop).text
            for polygon in feature.find('{http://www.cardogis.com/kommisdd}PrimaryGeometry'):
                for posList in polygon.iter('{http://www.opengis.net/gml/3.2}posList'):
                    statBezirke[blockNr]['polygon'] = posList.text
    for bezirk in statBezirke.values():
        coords = bezirk['polygon'].split(' ')
        points = []
        #print(len(coords))
        #print(math.floor(len(coords) / 2))
        for i in range(math.floor(len(coords) / 2)):
            points.append((float(coords[i * 2]), float(coords[i*2+1])))
        bezirk['polygon'] = Polygon(points)
    
    return transformToCsv(targetNodeId, statBezirke)
Esempio n. 4
0
    def validate_wfs(self):
        """Validate input arguments with the WFS service."""
        wfs = WebFeatureService(self.url, version=self.version)

        valid_layers = list(wfs.contents)
        if self.layer is None:
            raise MissingInputs("The layer argument is missing." +
                                " The following layers are available:\n" +
                                ", ".join(valid_layers))

        if self.layer not in valid_layers:
            raise InvalidInputValue("layers", valid_layers)

        valid_outformats = wfs.getOperationByName(
            "GetFeature").parameters["outputFormat"]["values"]
        valid_outformats = [v.lower() for v in valid_outformats]
        if self.outformat is None:
            raise MissingInputs(
                "The outformat argument is missing." +
                " The following output formats are available:\n" +
                ", ".join(valid_outformats))

        if self.outformat not in valid_outformats:
            raise InvalidInputValue("outformat", valid_outformats)

        valid_crss = [
            f"{s.authority.lower()}:{s.code}"
            for s in wfs[self.layer].crsOptions
        ]
        if self.crs.lower() not in valid_crss:
            raise InvalidInputValue("crs", valid_crss)
Esempio n. 5
0
def getAttributes(shapefile, WFS_URL):
    """
    Given a valid shapefile(WFS Featuretype as returned by getShapefiles), this function will 
    make a request for one feature from the featureType and parse out the attributes that come from
    a namespace not associated with the normal GML schema. There may be a better way to determine 
    which are shapefile dbf attributes, but this should work pretty well.
    """
    wfs = WebFeatureService(WFS_URL, version='1.1.0')
    feature = wfs.getfeature(typename=shapefile, maxfeatures=1, propertyname=None)
    gml = etree.parse(feature)
    gml_root=gml.getroot()
    name_spaces = gml_root.nsmap
    
    attributes = []
    
    for namespace in name_spaces.values():
        if namespace not in ['http://www.opengis.net/wfs',
                             'http://www.w3.org/2001/XMLSchema-instance',
                             'http://www.w3.org/1999/xlink',
                             'http://www.opengis.net/gml',
                             'http://www.opengis.net/ogc',
                             'http://www.opengis.net/ows']:
            custom_namespace = namespace
            
            for element in gml.iter('{'+custom_namespace+'}*'):
                if etree.QName(element).localname not in ['the_geom', 'Shape', shapefile.split(':')[1]]:
                    attributes.append(etree.QName(element).localname)
    return attributes
Esempio n. 6
0
def get_3d(shape,thr=0):
    #get 3d buildings
    url = 'http://3dbag.bk.tudelft.nl/data/wfs?request=getcapabilities'
    wfs = WebFeatureService(url=url, version='2.0.0')
    layer = list(wfs.contents)[0]

    response = wfs.getfeature(typename=layer,outputFormat='JSON',bbox=tuple(shape.total_bounds)).read()
    total_feats = json.loads(response)['totalFeatures']
    
    factor = 10**(len(str(total_feats))-1)
    total_feats_round = round(total_feats/factor)*factor
    total_pages = range(0,total_feats_round,1000)
    
    gdf_bags = []
    for page in total_pages:
        response = wfs.getfeature(typename=layer,startindex=page,
                                  outputFormat='JSON',
                                  bbox=tuple(shape.total_bounds)).read()
        features = json.loads(response)['features']
        gdf_bag = gpd.GeoDataFrame.from_features(features,crs='EPSG:28992')
        gdf_bags.append(gdf_bag)
    gdf_bags = pd.concat(gdf_bags,0)[['bouwjaar','roof-0.99','geometry']]
    gdf_bags = gdf_bags[gdf_bags['roof-0.99']>thr].reset_index(drop=True)
    gdf_bags.loc[:,'bouwjaar'] = gdf_bags['bouwjaar'].str.split('-').str[0].astype(int)
    gdf_bags.columns = ['year','height','geometry']
    return gdf_bags
Esempio n. 7
0
def test_ows_interfaces_wfs():
    wfsxml = open(resource_file('mapserver-wfs-cap.xml'), 'rb').read()
    service = WebFeatureService('url', version='1.0', xml=wfsxml)
    # Check each service instance conforms to OWSLib interface
    service.alias = 'CSW'
    isinstance(service, owslib.feature.wfs100.WebFeatureService_1_0_0)
    # URL attribute
    assert service.url == 'url'
    # version attribute
    assert service.version == '1.0'
    # Identification object
    assert hasattr(service, 'identification')
    # Check all ServiceIdentification attributes
    assert service.identification.type == 'MapServer WFS'
    for attribute in ['type', 'version', 'title', 'abstract', 'keywords', 'accessconstraints', 'fees']:
        assert hasattr(service.identification, attribute)
    # Check all ServiceProvider attributes
    for attribute in ['name', 'url']:
        assert hasattr(service.provider, attribute)
    # Check all operations implement IOperationMetadata
    for op in service.operations:
        for attribute in ['name', 'formatOptions', 'methods']:
            assert hasattr(op, attribute)
    # Check all contents implement IContentMetadata as a dictionary
    isinstance(service.contents, dict)
    # Check any item (WCS coverage, WMS layer etc) from the contents of each service
    # Check it conforms to IContentMetadata interface
    # get random item from contents dictionary -has to be a nicer way to do this!
    content = service.contents[list(service.contents.keys())[0]]
    for attribute in ['id', 'title', 'boundingBox', 'boundingBoxWGS84', 'crsOptions', 'styles', 'timepositions']:
        assert hasattr(content, attribute)
    def __init__(self, url='http://localhost:3007', map_name='', debug=False):
        WfsFilter.__init__(self)
        if map_name:
            wfs_url = u"{0}/{1}".format(url, map_name)
        else:
            wfs_url = url
        self.debug = debug
        self.map_name_use = map_name

        self.wfs_args = {
            "url": wfs_url,
            "version": self.wfs_ver,
        }
        if isinstance(self.wfs_timeout, int):
            self.wfs_args["timeout"] = self.wfs_timeout
        try:
            self.wfs = WebFeatureService(**self.wfs_args)
        except Exception as err:
            raise Exception(u"WFS is not support in '{0}'\n{1}".format(
                wfs_url, err))
        else:
            self.capabilities = None
            self.get_capabilities()
            self.info = None
            self.get_info()
            self._set_def_resp_params()
def get_geohashes(bbox: tuple = None, crs: str = "EPSG:4326") -> [str]:
    """Gets all waterbody geohashes.
    
    Parameters
    ----------
    bbox : (xmin, ymin, xmax, ymax)
        Optional bounding box.
    crs : str
        Optional CRS for the bounding box.
    
    Returns
    -------
    [str]
        A list of geohashes.
    """
    wfs = WebFeatureService(url=WFS_ADDRESS, version="1.1.0")
    if bbox is not None:
        bbox = tuple(bbox) + (crs, )
    response = wfs.getfeature(
        typename="DigitalEarthAustraliaWaterbodies",
        propertyname="uid",
        outputFormat="json",
        bbox=bbox,
    )
    wb_gpd = gpd.read_file(response)
    return list(wb_gpd["uid"])
Esempio n. 10
0
def test():
    wfs = WebFeatureService("https://wfs.geonorge.no/skwms1/wfs.inspire-lcv",
                            version="2.0.0")
    filter = PropertyIsLike(propertyname="lcv:LandCoverObservation",
                            literal="21")
    filterxml = etree.tostring(filter.toXML()).decode("utf-8")
    response = wfs.getfeature(typename="lcv:LandCoverUnit", filter=filterxml)
Esempio n. 11
0
def _get_location_wfs(
    coordinates: Tuple[Union[int, float, str], Union[str, float, int],
                       Union[str, float, int], Union[str, float, int], ],
    layer: str = True,
    geoserver: str = GEO_URL,
) -> bytes:
    """Return leveled features from a hosted data set using bounding box coordinates and WFS 1.1.0 protocol.

    For geographic rasters, subsetting is based on WGS84 (Long, Lat) boundaries. If not geographic, subsetting based
    on projected coordinate system (Easting, Northing) boundaries.

    Parameters
    ----------
    coordinates : Tuple[Union[str, float, int], Union[str, float, int], Union[str, float, int], Union[str, float, int]]
      Geographic coordinates of the bounding box (left, down, right, up).
    layer : str
      The WFS/WMS layer name requested.
    geoserver: str
      The address of the geoserver housing the layer to be queried. Default: http://pavics.ouranos.ca/geoserver/.

    Returns
    -------
    str
      A GML-encoded vector feature.

    """
    wfs = WebFeatureService(url=urljoin(geoserver, "wfs"),
                            version="1.1.0",
                            timeout=30)
    resp = wfs.getfeature(typename=layer,
                          bbox=coordinates,
                          srsname="urn:x-ogc:def:crs:EPSG:4326")

    data = resp.read()
    return data
Esempio n. 12
0
def getWFSData():
    from owslib.wfs import WebFeatureService
    import string

    params = getWFSParams()  # Get parameters
    params = checkParams(params)  # Check parameters

    wfs = WebFeatureService(params['baseURL'].value,
                            version=params['version'].value)
    response = wfs.getfeature(typename=str(params['typeName'].value),
                              featureid=[params['featureID'].value
                                         ])  # Contact server

    if string.find(params['baseURL'].value, 'bodc', 0):
        response = processBODCResponse(response.read(),
                                       params)  # Get data from response
    else:
        pass

    current_app.logger.debug('Jsonifying response...')  # DEBUG

    # Convert to json
    try:
        jsonData = jsonify(output=response)
    except TypeError as e:
        g.error = "Request aborted, exception encountered: %s" % e
        error_handler.setError(
            '2-06', None, g.user.id,
            "views/wfs.py:getWFSData - Type error, returning 500 to user. Exception %s"
            % e, request)
        abort(500)  # If we fail to jsonify the data return 500

    current_app.logger.debug('Request complete, Sending results')  # DEBUG

    return jsonData  # return json
Esempio n. 13
0
def getWFSData():
   from owslib.wfs import WebFeatureService   
   import string
   
   params = getWFSParams() # Get parameters     
   params = checkParams(params) # Check parameters
   
   wfs = WebFeatureService(params['baseURL'].value, version=params['version'].value)
   response = wfs.getfeature(typename=str(params['typeName'].value), featureid=[params['featureID'].value]) # Contact server
   
   if string.find(params['baseURL'].value, 'bodc', 0): 
      response = processBODCResponse(response.read(), params) # Get data from response
   else:
      pass
   
   current_app.logger.debug('Jsonifying response...') # DEBUG
   
   # Convert to json
   try:
      jsonData = jsonify(output = response)
   except TypeError as e:
      g.error = "Request aborted, exception encountered: %s" % e
      error_handler.setError('2-06', None, g.user.id, "views/wfs.py:getWFSData - Type error, returning 500 to user. Exception %s" % e, request)
      abort(500) # If we fail to jsonify the data return 500
      
   current_app.logger.debug('Request complete, Sending results') # DEBUG
   
   return jsonData # return json
Esempio n. 14
0
    def __init__(self, url='https://geo.woudc.org/ows', timeout=30):
        """
        Initialize a WOUDC Client.

        :returns: instance of pywoudc.WoudcClient
        """

        self.url = url
        """The URL of the WOUDC data service"""

        self.timeout = timeout
        """Time (in seconds) after which requests should timeout"""

        self.about = 'https://woudc.org/about/data-access.php'
        """The About Data Access page"""

        self.outputformat = 'application/json; subtype=geojson'
        """The default outputformat when requesting WOUDC data"""

        self.maxfeatures = 25000
        """The default limit of records to return"""

        LOGGER.info('Contacting %s', self.url)
        self.server = WebFeatureService(self.url, '1.1.0',
                                        timeout=self.timeout)
        """The main WOUDC server"""

        try:
            mf = int(self.server.constraints['DefaultMaxFeatures'].values[0])
            self.maxfeatures = mf
        except KeyError:
            LOGGER.info('Using default maxfeatures')
Esempio n. 15
0
def get_forecast(place=None,
                 latlon=None,
                 timestep=60,
                 start_time=None,
                 end_time=None):
    url = 'https://opendata.fmi.fi/wfs'
    wfs = WebFeatureService(url=url, version='2.0.0')
    params = {'timestep': timestep}
    if latlon:
        params['latlon'] = '%s,%s' % (latlon[0], latlon[1])
    elif place:
        params['place'] = place

    if start_time:
        params['starttime'] = start_time.isoformat().split('.')[0] + 'Z'
    query_id = 'fmi::forecast::harmonie::surface::point::multipointcoverage'

    resp = wfs.getfeature(storedQueryID=query_id, storedQueryParams=params)
    root = etree.fromstring(bytes(resp.read(), encoding='utf8'))
    print(str(etree.tostring(root), encoding='utf8'))

    result_time = root.find('.//{*}resultTime//{*}timePosition').text
    result_time = dateutil.parser.parse(result_time).astimezone(LOCAL_TZ)

    positions = root.find('.//{*}positions').text
    observations = root.find(
        './/{*}DataBlock/{*}doubleOrNilReasonTupleList').text
    fields = root.findall('.//{*}DataRecord/{*}field')
    field_names = [x.attrib['name'] for x in fields]

    positions = [
        re.findall(r'\S+', x.strip()) for x in positions.splitlines()
        if x.strip()
    ]
    observations = [
        re.findall(r'\S+', x.strip()) for x in observations.splitlines()
        if x.strip()
    ]

    data = []
    last_precipitation = None
    for pos, obs in zip(positions, observations):
        d = {
            field_name: float(sample)
            for field_name, sample in zip(field_names, obs)
        }
        ts = datetime.fromtimestamp(int(pos[2]))
        ts.replace(tzinfo=pytz.UTC)
        d['time'] = LOCAL_TZ.localize(ts)
        if 'PrecipitationAmount' in d:
            if last_precipitation:
                val = d['PrecipitationAmount']
                d['PrecipitationAmount'] -= last_precipitation
                last_precipitation = val
            else:
                last_precipitation = d['PrecipitationAmount']
        data.append(d)

    return dict(observations=data, meta=dict(result_time=result_time))
Esempio n. 16
0
def get_sortkey(table):
    """Get a field to sort by
    """
    # Just pick the first column in the table in alphabetical order.
    # Ideally we would get the primary key from bcdc api, but it doesn't
    # seem to be available
    wfs = WebFeatureService(url=bcdata.OWS_URL, version="2.0.0")
    return sorted(wfs.get_schema("pub:" + table)["properties"].keys())[0]
Esempio n. 17
0
def test_schema_wfs_200():
    wfs = WebFeatureService(
        'https://www.sciencebase.gov/catalogMaps/mapping/ows/53398e51e4b0db25ad10d288',
        version='2.0.0')
    schema = wfs.get_schema('footprint')
    assert len(schema['properties']) == 4
    assert schema['properties']['summary'] == 'string'
    assert schema['geometry'] == '3D Polygon'
Esempio n. 18
0
def test_wfs3_ldproxy():
    w = WebFeatureService(SERVICE_URL, version='3.0')

    assert w.url == 'https://www.ldproxy.nrw.de/rest/services/kataster/'
    assert w.version == '3.0'
    assert w.url_query_string == 'f=json'

    conformance = w.conformance()
    assert len(conformance['conformsTo']) == 5
Esempio n. 19
0
def test_outputformat_wfs_100():
    wfs = WebFeatureService(
        'https://www.sciencebase.gov/catalogMaps/mapping/ows/53398e51e4b0db25ad10d288',
        version='1.0.0')
    feature = wfs.getfeature(typename=['sb:Project_Area'],
                             maxfeatures=1,
                             propertyname=None,
                             outputFormat='application/json')
    assert len(json.loads(feature.read())['features']) == 1
Esempio n. 20
0
    def perform_request(self):
        """
        Perform the validation.
        Uses https://github.com/p1c2u/openapi-spec-validator on
        the specfile (dict) returned from the OpenAPI endpoint.
        """

        # Step 1 basic sanity check
        result = Result(True, 'OpenAPI Validation Test')
        result.start()
        api_doc = None
        try:
            wfs3 = WebFeatureService(self._resource.url, version='3.0')

            # TODO: OWSLib 0.17.1 has no call to '/api yet.
            # Build endpoint URL (may have f=json etc)
            api_url = wfs3._build_url('api')

            # Get OpenAPI spec from endpoint as dict once
            api_doc = requests.get(api_url).json()

            # Basic sanity check
            for attr in ['components', 'paths', 'openapi']:
                val = api_doc.get(attr, None)
                if val is None:
                    msg = '/api: missing attr: %s' % attr
                    result.set(False, msg)
                    break
        except Exception as err:
            result.set(False, str(err))

        result.stop()
        self.result.add_result(result)

        # No use to proceed if OpenAPI basics not complied
        if api_doc is None or result.success is False:
            return

        # ASSERTION: /api exists, next OpenAPI Validation

        # Step 2 detailed OpenAPI Compliance test
        result = Result(True, 'Validate OpenAPI Compliance')
        result.start()
        try:
            # Call the openapi-spec-validator and iterate through errors
            errors_iterator = openapi_v3_spec_validator.iter_errors(api_doc)
            for error in errors_iterator:
                # Add each validation error as separate Result object
                result = push_result(self, result, False, str(error),
                                     'OpenAPI Compliance Result')
        except Exception as err:
            result.set(False, 'OpenAPI Validation err: e=%s' % str(err))

        result.stop()

        # Add to overall Probe result
        self.result.add_result(result)
Esempio n. 21
0
def index():
    if not request.json:
        abort(404)

    body = request.json
    print(body)
    wfs = WebFeatureService(url=body['url'], version=body['version'])
    response = wfs.get_schema(body['key'])
    return jsonify(response)
def load_full_month(year, mm, place):
    import calendar
    assert datetime.date(year, mm, calendar.monthrange(year, mm)[1]) < datetime.date.today(), \
        "wrong date {}/{} given, query only past months excluding current".format(year, mm)

    # create dictionary of empty lists for feature values
    values = dict([(feature, []) for feature in [
        'Temperature', 'Humidity', 'WindDirection', 'WindSpeedMS',
        'TotalCloudCover', 'Precipitation1h'
    ]])

    wfs11 = WebFeatureService(url='https://opendata.fmi.fi/wfs',
                              version='2.0.0')

    # query one week at a time
    for day in range(1, calendar.monthrange(year, mm)[1], 7):
        starttime = datetime.datetime(year, mm, day)
        endtime = datetime.datetime(
            year, mm, min(day + 6,
                          calendar.monthrange(year, mm)[1]))
        endtime = endtime.strftime('%Y-%m-%d 23:00:00')

        # fetch data for given feature
        for feature in [
                'Temperature', 'Humidity', 'WindDirection', 'WindSpeedMS',
                'TotalCloudCover', 'Precipitation1h'
        ]:

            response = wfs11.getGETGetFeatureRequest(
                storedQueryID='fmi::observations::weather::timevaluepair',
                storedQueryParams={
                    'parameters': feature,
                    'place': place,
                    'timestep': 60,
                    'starttime': starttime,
                    'endtime': endtime
                })
            # save response to temp XML file
            download_file(response)

            # returns TPV pairs
            try:
                TPVs = parse_xml_fields("tempXML.xml")
            except:
                print("Error occurred in parsing the XML response: ", response)
                print('Place: {}, feature: {}'.format(place, feature))
                sys.exit()

            for pair in TPVs:
                #time = pair[0].text
                value = pair[1].text

                # append value to the list of the feature
                values[feature].append(value)

    return values
Esempio n. 23
0
def getValues(shapefile, attribute, getTuples, limitFeatures, wfs_url):
    """
    Similar to get attributes, given a shapefile and a valid attribute this function
    will make a call to the Web Feature Services returning a list of values associated
    with the shapefile and attribute.

    If getTuples = True, will also return the tuples of [feature:id]  along with values [feature]
    """

    wfs = WebFeatureService(wfs_url, version='1.1.0')

    feature = wfs.getfeature(typename=shapefile, maxfeatures=limitFeatures, propertyname=[attribute])
    content = BytesIO(feature.read().encode())
    gml = etree.parse(content)

    values = []

    for el in gml.iter():
        if attribute in el.tag:
            if el.text not in values:
                values.append(el.text)

    if getTuples == 'true' or getTuples == 'only':
        tuples = []
        att = False

        # If features are encoded as a list of featureMember elements.
        gmlid_found = False
        for featureMember in gml.iter('{' + GML_NAMESPACE + '}featureMember'):
            for el in featureMember.iter():
                if el.get('{' + GML_NAMESPACE + '}id'):
                    gmlid = el.get('{' + GML_NAMESPACE + '}id')
                    att = True
                    gmlid_found = True
                if attribute in el.tag and att is True:
                    value = el.text
                    tuples.append((value, gmlid))
                    att = False
            if not gmlid_found:
                raise Exception('No gml:id found in source feature service. This form of GML is not supported.')

        # If features are encoded as a featureMembers element.
        for featureMember in gml.iter('{' + GML_NAMESPACE + '}featureMembers'):
            for el in featureMember.iter():
                gmlid = el.get('{' + GML_NAMESPACE + '}id')
                for feat in el.getchildren():
                    if attribute in feat.tag:
                        value = feat.text
                        tuples.append((value, gmlid))

    if getTuples == 'true':
        return sorted(values), sorted(tuples)
    elif getTuples == 'only':
        return sorted(tuples)
    else:
        return sorted(values)
Esempio n. 24
0
def load_layer_data(request, template='layers/layer_detail.html'):
    context_dict = {}
    data_dict = json.loads(request.POST.get('json_data'))
    layername = data_dict['layer_name']
    filtered_attributes = [
        x for x in data_dict['filtered_attributes'].split(',')
        if '/load_layer_data' not in x
    ]
    workspace, name = layername.split(':')
    location = "{location}{service}".format(
        **{
            'location': settings.OGC_SERVER['default']['LOCATION'],
            'service': 'wms',
        })

    try:
        # TODO: should be improved by using OAuth2 token (or at least user related to it) instead of super-powers
        username = settings.OGC_SERVER['default']['USER']
        password = settings.OGC_SERVER['default']['PASSWORD']
        wfs = WebFeatureService(location,
                                version='1.1.0',
                                username=username,
                                password=password)
        response = wfs.getfeature(typename=name,
                                  propertyname=filtered_attributes,
                                  outputFormat='application/json')
        x = response.read()
        x = json.loads(x)
        features_response = json.dumps(x)
        decoded = json.loads(features_response)
        decoded_features = decoded['features']
        properties = {}
        for key in decoded_features[0]['properties']:
            properties[key] = []

        # loop the dictionary based on the values on the list and add the properties
        # in the dictionary (if doesn't exist) together with the value
        for i in range(len(decoded_features)):
            for key, value in decoded_features[i]['properties'].iteritems():
                if value != '' and isinstance(
                        value,
                    (string_types, int, float)) and ('/load_layer_data'
                                                     not in value):
                    properties[key].append(value)

        for key in properties:
            properties[key] = list(set(properties[key]))
            properties[key].sort()

        context_dict["feature_properties"] = properties
    except:
        import traceback
        traceback.print_exc()
        print "Possible error with OWSLib."
    return HttpResponse(json.dumps(context_dict),
                        content_type="application/json")
Esempio n. 25
0
    def __init__(self,
                 wfsurl,
                 wfsversion='2.0.0'):

        # mywfs = WebFeatureService(url='https://dservices.arcgis.com/v09SvJE7IY8GgvSx/arcgis/services/DDC_ACTIVEPROJECTS_PUBLIC/WFSServer',
        #                           version=2.0.0)
        
        self.wfsurl = wfsurl
        self.wfs = WebFeatureService(url=self.wfsurl 
                                    ,version=wfsversion)
Esempio n. 26
0
    def do_query_wfs(cls, request, typename, propertyname, bbox,
                     return_template, filter):
        """
        Query a WFS service
        """

        try:

            settings = request.registry.settings
            url = settings['spch_wfs_url']
            version = settings['version']
            srsname = settings['srsname']

            localites_typename = settings['localites_typename']
            cadastre_typename = settings['cadastre_typename']
            communes_typename = settings['communes_typename']

            wfs = WebFeatureService(url=url, version=version)

            response = wfs.getfeature(typename=typename,
                                      propertyname=propertyname,
                                      srsname=srsname,
                                      bbox=bbox,
                                      filter=filter)

            #gml = ElementTree.fromstring(response.read())
            xpars = xmltodict.parse(response.read())
            #wfs_json = json.dumps(xpars)

            formattedFeatures = []

            if "wfs:FeatureCollection" in xpars and "gml:featureMember" in xpars[
                    "wfs:FeatureCollection"]:
                features = xpars["wfs:FeatureCollection"]["gml:featureMember"]

                for feature in features:

                    currentTypename = None

                    if "ms:" + localites_typename in feature:
                        currentTypename = localites_typename
                    elif "ms:" + cadastre_typename in feature:
                        currentTypename = cadastre_typename
                    elif "ms:" + communes_typename in feature:
                        currentTypename = communes_typename

                    if "ms:" + currentTypename in feature:
                        atts = feature["ms:" + currentTypename]
                        one_return_obj = cls.substitute(atts, return_template)
                        formattedFeatures.append(json.loads(one_return_obj))

            return formattedFeatures

        except Exception as error:
            raise Exception(str(error))
def fmi_request(query_id, start_time, end_time, weather_params,
                time_row_label):
    debug_input_filename = "pyfiles/FMIObservationSample.xml"
    ns = {
        'wfs': 'http://www.opengis.net/wfs/2.0',
        'BsWfs': 'http://xml.fmi.fi/schema/wfs/2.0',
        'gml': 'http://www.opengis.net/gml/3.2'
    }
    xml_root = ""
    if debug_input:
        xml_root = ET.parse(debug_input_filename).getroot()
    else:
        fmi_wfs = WebFeatureService(url='http://data.fmi.fi/fmi-apikey/' +
                                    get_config('FMI_API_KEY') + '/wfs',
                                    version='2.0.0')
        keys = ",".join(weather_params.keys())
        query_params = {
            'place': 'helsinki',
            'starttime': start_time,
            'endtime': end_time,
            'timestep': 60,  # minutes
            'parameters': keys
        }
        try:
            feature_read = fmi_wfs.getfeature(
                storedQueryID=query_id, storedQueryParams=query_params).read()
            xml_root = ET.fromstring(feature_read)
            if save_alert_sample:
                with open(debug_input_filename, "w") as data_file:
                    data_file.write(bytes(feature_read))
        except Exception as e:
            print "fmi_forecast_request exception: ", e
    time_retrieved = xml_root.get('timeStamp')
    response_table = []
    first_name = None
    nan_counter = 0
    row_build = {'time_retrieved': time_retrieved}
    for member in xml_root.iterfind('wfs:member', ns):
        for item in list(member):
            name_elem = item.find('BsWfs:ParameterName', ns).text
            if not first_name: first_name = name_elem
            elif name_elem == first_name:
                response_table.append(row_build)
                row_build = {'time_retrieved': time_retrieved}
            row_build[time_row_label] = item.find('BsWfs:Time', ns).text
            value_elem = item.find('BsWfs:ParameterValue', ns).text
            if value_elem == 'NaN':
                value_elem = 0
                nan_counter += 1
            if name_elem in weather_params:
                row_build[weather_params[name_elem]] = value_elem
    if len(row_build) > 2: response_table.append(row_build)
    if nan_counter > 24:
        response_table = None  # Sometimes all values return as 'NaN' all the way from FMI. Useless result - rather reload later
    return response_table
Esempio n. 28
0
def webgisfilter(mapserv,
                 layer,
                 maxfeatures=None,
                 startindex=None,
                 bbox=None,
                 filters=None):
    """webgis wfs client

    Each filter format should look like:

    {
        'attribute': ATTRIBUTE_NAME, # e.g. 'NAME'
        'operator': OPERATOR, # e.g. '='
        'value': VALUE # e.g. 'Prague'
    }

    Operators: = != ~ IN

    :param str mapserv: url to mapserver
    :param str layer: layer name
    :param int maxfeatures: number of returned features
    :param int startindex: starting feature index
    :param Tupple.<dict> filters: tupple of filters
    :return: json-encoded result
    :rtype: dict
    """

    mywfs = WebFeatureService(url=mapserv, version='1.0.0')
    fes = None
    if filters:
        if bbox:
            filters.append({'operator': 'BBOX', 'value': bbox})
        fes = get_filter_root(get_filter_fes(filters))
        fes = etree.tostring(fes, encoding='unicode')

    if bbox and not filters:
        fes = None
    elif not bbox and filters:
        bbox = None
    elif bbox and filters:
        bbox = None

    layer_data = mywfs.getfeature(typename=[layer],
                                  filter=fes,
                                  bbox=bbox,
                                  featureid=None,
                                  outputFormat="GeoJSON",
                                  maxfeatures=maxfeatures,
                                  startindex=startindex)
    data = json.load(layer_data)

    for feature in data['features']:
        feature.pop('geometry', None)

    return data
Esempio n. 29
0
def wfs_request_matching_file_pattern(
        imos_layer_name,
        filename_wfs_filter,
        url_column='url',
        geoserver_url='http://geoserver-123.aodn.org.au/geoserver/wfs',
        s3_bucket_url=False):
    """
    returns a list of url matching a file pattern defined by filename_wfs_filter
    * if s3_bucket_url is False, returns the url as stored in WFS layer
    * if s3_bucket_url is True, append to its start the s3 IMOS bucket link used to
      download the file
    Examples:
    wfs_request_matching_file_pattern('srs_oc_ljco_wws_hourly_wqm_fv01_timeseries_map', '%')
    wfs_request_matching_file_pattern('srs_oc_ljco_wws_hourly_wqm_fv01_timeseries_map', '%', s3_bucket_url=True)
    wfs_request_matching_file_pattern('srs_oc_ljco_wws_hourly_wqm_fv01_timeseries_map', '%2014/06/%')
    wfs_request_matching_file_pattern('anmn_nrs_rt_meteo_timeseries_map', '%IMOS_ANMN-NRS_MT_%', url_column='file_url', s3_bucket_url=True)

    WARNING: Please exec $DATA_SERVICES_DIR/lib/test/python/manual_test_wfs_query.py to run unittests before modifying function
    """
    from owslib.etree import etree
    from owslib.fes import PropertyIsLike
    from owslib.wfs import WebFeatureService
    import os
    import xml.etree.ElementTree as ET

    imos_layer_name = 'imos:%s' % imos_layer_name
    data_aodn_http_prefix = 'http://data.aodn.org.au'

    wfs11 = WebFeatureService(url=geoserver_url, version='1.1.0')
    wfs_filter = PropertyIsLike(propertyname=url_column,
                                literal=filename_wfs_filter,
                                wildCard='%')
    filterxml = etree.tostring(wfs_filter.toXML()).decode("utf-8")
    response = wfs11.getfeature(typename=imos_layer_name,
                                filter=filterxml,
                                propertyname=[url_column])

    # parse XML to get list of URLS
    xml_wfs_output = response.read()
    root = ET.fromstring(xml_wfs_output)
    list_url = []

    # parse xml
    if len(root) > 0:
        for item in root[0]:
            for subitem in item:
                file_url = subitem.text
                if s3_bucket_url:
                    list_url.append(
                        os.path.join(data_aodn_http_prefix, file_url))
                else:
                    list_url.append(file_url)

    return list_url
Esempio n. 30
0
 def runTest(self):
     minX = -76.766960
     minY = 39.283611
     maxX = -76.684120
     maxY = 39.338394
     
     filter = "<Filter><BBOX><PropertyName>Geometry</PropertyName> <Box srsName='EPSG:4326'><coordinates>%f,%f %f,%f</coordinates> </Box></BBOX></Filter>" % (minX, minY, maxX, maxY)
     wfs = WebFeatureService('http://SDMDataAccess.nrcs.usda.gov/Spatial/SDMWGS84Geographic.wfs', version='1.0.0')
     response = wfs.getfeature(typename=('MapunitPolyExtended',), filter=filter, propertyname=None)
     self.assertTrue(response.read().find('<wfs:FeatureCollection') > 0,
                     'Unable to find feature dataset in WFS response')
Esempio n. 31
0
    def test_get_schema(self, mp_wfs_110, mp_remote_describefeaturetype):
        """Test the get_schema method for a standard schema.

        Parameters
        ----------
        mp_wfs_110 : pytest.fixture
            Monkeypatch the call to the remote GetCapabilities request.
        mp_remote_describefeaturetype : pytest.fixture
            Monkeypatch the call to the remote DescribeFeatureType request.
        """
        wfs110 = WebFeatureService(WFS_SERVICE_URL, version='1.1.0')
        schema = wfs110.get_schema('dov-pub:Boringen')
Esempio n. 32
0
def hydro_routing_upstream(
    fid: Union[str, float, int],
    level: int = 12,
    lakes: str = "1km",
    geoserver: str = GEO_URL,
) -> pd.Series:
    """Return a list of hydro routing features located upstream.

    Parameters
    ----------
    fid : Union[str, float, int]
      Basin feature ID code of the downstream feature.
    level : int
      Level of granularity requested for the lakes vector (range(7,13)). Default: 12.
    lakes : {"1km", "all"}
      Query the version of dataset with lakes under 1km in width removed ("1km") or return all lakes ("all").
    geoserver: str
      The address of the geoserver housing the layer to be queried. Default: https://pavics.ouranos.ca/geoserver/.

    Returns
    -------
    pd.Series
      Basins ids including `fid` and its upstream contributors.
    """
    wfs = WebFeatureService(url=urljoin(geoserver, "wfs"),
                            version="2.0.0",
                            timeout=30)
    layer = f"public:routing_{lakes}Lakes_{str(level).zfill(2)}"

    # Get attributes necessary to identify upstream watersheds
    resp = wfs.getfeature(
        typename=layer,
        propertyname=["SubId", "DowSubId"],
        outputFormat="application/json",
    )
    df = gpd.read_file(resp)

    # Identify upstream features
    df_upstream = _determine_upstream_ids(
        fid=fid,
        df=df,
        basin_field="SubId",
        downstream_field="DowSubId",
    )

    # Fetch upstream features
    resp = wfs.getfeature(
        typename=layer,
        featureid=df_upstream["id"].tolist(),
        outputFormat="application/json",
    )

    return gpd.read_file(resp.read().decode())
def getODRL(serviceURL):
    wfs = WebFeatureService(serviceURL)
    wfs.getcapabilities()    

    candidateURLs = re.findall(r'(https?://[^\s]+)', wfs.identification.accessconstraints)
    for url in candidateURLs:        
        response = urllib2.urlopen(url)
        response_text = response.read()
        if is_valid_protocol(response_text):
            return response_text
    # If we are here, there is not any valid ODRL XML in any of the URLs in Access Constraints
    return None
Esempio n. 34
0
def getValues(shapefile, attribute, getTuples, limitFeatures, WFS_URL):
    """
    Similar to get attributes, given a shapefile and a valid attribute this function
    will make a call to the Web Feature Services returning a list of values associated
    with the shapefile and attribute.
    
    If getTuples = True, will also return the tuples of [feature:id]  along with values [feature]
    """

    wfs = WebFeatureService(WFS_URL, version="1.1.0")

    feature = wfs.getfeature(typename=shapefile, maxfeatures=limitFeatures, propertyname=[attribute])

    gml = etree.parse(feature)

    values = []

    for el in gml.iter():
        if attribute in el.tag:
            if el.text not in values:
                values.append(el.text)

    if getTuples == "true" or getTuples == "only":
        tuples = []
        # If features are encoded as a list of featureMember elements.
        gmlid_found = False
        for featureMember in gml.iter("{" + GML_NAMESPACE + "}featureMember"):
            for el in featureMember.iter():
                if el.get("{" + GML_NAMESPACE + "}id"):
                    gmlid = el.get("{" + GML_NAMESPACE + "}id")
                    att = True
                    gmlid_found = True
                if attribute in el.tag and att == True:
                    value = el.text
                    tuples.append((value, gmlid))
                    att = False
            if gmlid_found == False:
                raise Exception("No gml:id found in source feature service. This form of GML is not supported.")
        # If features are encoded as a featureMembers element.
        for featureMember in gml.iter("{" + GML_NAMESPACE + "}featureMembers"):
            for el in featureMember.iter():
                gmlid = el.get("{" + GML_NAMESPACE + "}id")
                for feat in el.getchildren():
                    if attribute in feat.tag:
                        value = feat.text
                        tuples.append((value, gmlid))

    if getTuples == "true":
        return sorted(values), sorted(tuples)
    elif getTuples == "only":
        return sorted(tuples)
    else:
        return sorted(values)
Esempio n. 35
0
def webgisfilter(mapserv, layer, maxfeatures=None, startindex=None, bbox=None,
        filters=None):
    """webgis wfs client

    Each filter format should look like:

    {
        'attribute': ATTRIBUTE_NAME, # e.g. 'NAME'
        'operator': OPERATOR, # e.g. '='
        'value': VALUE # e.g. 'Prague'
    }

    Operators: = != ~ IN

    :param str mapserv: url to mapserver
    :param str layer: layer name
    :param int maxfeatures: number of returned features
    :param int startindex: starting feature index
    :param Tupple.<dict> filters: tupple of filters
    :return: json-encoded result
    :rtype: dict
    """

    mywfs = WebFeatureService(url=mapserv, version='1.0.0')
    fes = None
    if filters:
        if bbox:
            filters.append({ 'operator':'BBOX', 'value': bbox})
        fes = get_filter_root(get_filter_fes(filters))
        fes = etree.tostring(fes)

    if bbox and not filters:
        fes = None
    elif not bbox and filters:
        bbox = None
    elif bbox and filters:
        bbox = None

    layer_data = mywfs.getfeature(typename=[layer],
                                  filter=fes,
                                  bbox=bbox,
                                  featureid=None,
                                  outputFormat="GeoJSON",
                                  maxfeatures=maxfeatures,
                                  startindex=startindex)
    data = json.load(layer_data)

    for feature in data['features']:
        feature.pop('geometry')

    return data
Esempio n. 36
0
def GetWFSLayer(u, p):
  start = time.time()
  # Separate the WFS URL & the layer name
  split_url = u.split('?')
  server_url = split_url[0]
  ows = server_url[-3:]
  print 'The OGC standard is: '+ ows
  
  spacename_wfs = split_url[1]
  tmp_chemin = p + spacename_wfs+"_.zip"
  chemin = tmp_chemin[:-5]+".zip"
  
  if not os.path.exists(chemin):
    # Get the vector layer using OGC WFS standard
    wfs = WebFeatureService(server_url ,version='1.0.0')
    getFeature = wfs.getfeature(typename = [spacename_wfs], outputFormat ="shape-zip") 
    
    print('Downloading... : '+ spacename_wfs)
    print("From: "+ server_url)
    
    # Download the zipped shapefile
    data = getFeature.read()
    f = open(tmp_chemin ,'wb')
    f.write(data)
    f.close()
    
    # Delete .txt & .cst files from the zipped file
    zin = zipp(tmp_chemin, 'r')
#    zin.extractall(p)
    zout = zipp(chemin, 'w')
    for item in zin.infolist():
      buffer = zin.read(item.filename)
      ext = item.filename[-4:]
      if (ext != '.txt' and ext != '.cst'):
          zout.writestr(item, buffer)
          
    zout.close()
    zin.close()
    os.remove(tmp_chemin)
    
#    # Unzip zipped shapefile
    os.system("unzip "+ chemin + ' -d '+ p)
  
  # Calculat time
  temps =time.time() - start
  tps = round(temps,2)
  temps_ms = str(tps)
  
  print "GetWFSLayer download time : " + temps_ms +" ms"
  
  return
Esempio n. 37
0
    def __init__(self, url='http://geo.woudc.org/ows', timeout=30):
        """
        Initialize a WOUDC Client.

        :returns: instance of pywoudc.WoudcClient
        """

        self.url = url
        """The URL of the WOUDC data service"""

        self.timeout = timeout
        """Time (in seconds) after which requests should timeout"""

        self.about = 'http://woudc.org/about/data-access.php'
        """The About Data Access page"""

        self.outputformat = 'application/json; subtype=geojson'
        """The default outputformat when requesting WOUDC data"""

        self.maxfeatures = 25000
        """The default limit of records to return"""

        LOGGER.info('Contacting %s', self.url)
        self.server = WebFeatureService(self.url, '1.1.0',
                                        timeout=self.timeout)
        """The main WOUDC server"""
Esempio n. 38
0
class HandleWFS():

    def __init__(self, url, version="1.0.0"):
        self.wfs = WebFeatureService(url, version=version)
        self.type = self.wfs.identification.type
        self.version = self.wfs.identification.version
        self.title = self.wfs.identification.title
        self.abstract = self.wfs.identification.abstract

    def do_layer_check(self, data_dict):
        layer_list = list(self.wfs.contents)
        resource = data_dict.get("resource", {})
        this_layer = resource.get("layer")
        try:
            first_layer = layer_list[0]
            if this_layer in layer_list:
                return this_layer
            elif this_layer.lower() in layer_list:
                return this_layer.lower()
            else:
                return first_layer
        except Exception:
            pass

    def build_url(self, typename=None, method='{http://www.opengis.net/wfs}Get',
                  operation='{http://www.opengis.net/wfs}GetFeature', maxFeatures=None):
        service_url = self.wfs.getOperationByName(operation).methods[method]['url']
        request = {'service': 'WFS', 'version': self.version}
        try:
            assert len(typename) > 0
            request['typename'] = ','.join([typename])
        except Exception:
            request['typename'] = ','.join('ERROR_HERE')
            pass

        if maxFeatures: request['maxfeatures'] = str(maxFeatures)

        encoded_request = "&".join("%s=%s" % (key,value) for (key,value) in request.items())
        url = service_url + "&" + encoded_request
        return url

    def make_geojson(self, data_dict):
        geojson = []
        type_name = self.do_layer_check(data_dict)
        wfs_url = self.build_url(type_name, maxFeatures=100)
        source = ogr.Open(wfs_url)
        layer = source.GetLayerByIndex(0)
        for feature in layer:
            geojson.append(feature.ExportToJson(as_object=True))
        return geojson

    def make_recline_json(self, data_dict):
        recline_json = []
        geojson = self.make_geojson(data_dict)
        for i in geojson:
            properties = i['properties']
            properties.update(dict(geometry=i['geometry']))
            recline_json.append(properties)
        return recline_json
Esempio n. 39
0
def load_layer_data(request, template='layers/layer_detail.html'):
    context_dict = {}
    data_dict = json.loads(request.POST.get('json_data'))
    layername = data_dict['layer_name']
    filtered_attributes = data_dict['filtered_attributes']
    workspace, name = layername.split(':')
    location = "{location}{service}".format(** {
        'location': settings.OGC_SERVER['default']['LOCATION'],
        'service': 'wms',
    })

    try:
        username = settings.OGC_SERVER['default']['USER']
        password = settings.OGC_SERVER['default']['PASSWORD']
        wfs = WebFeatureService(location, version='1.1.0', username=username, password=password)
        response = wfs.getfeature(typename=name, propertyname=filtered_attributes, outputFormat='application/json')
        x = response.read()
        x = json.loads(x)
        features_response = json.dumps(x)
        decoded = json.loads(features_response)
        decoded_features = decoded['features']
        properties = {}
        for key in decoded_features[0]['properties']:
            properties[key] = []

        # loop the dictionary based on the values on the list and add the properties
        # in the dictionary (if doesn't exist) together with the value
        for i in range(len(decoded_features)):

            for key, value in decoded_features[i]['properties'].iteritems():
                if value != '' and isinstance(value, (string_types, int, float)):
                    properties[key].append(value)

        for key in properties:
            properties[key] = list(set(properties[key]))
            properties[key].sort()

        context_dict["feature_properties"] = properties
    except:
        print "Possible error with OWSLib."
    return HttpResponse(json.dumps(context_dict), content_type="application/json")
Esempio n. 40
0
def get_features(wfs_url, layer, verbose=False):
    """Get feature from Web Feature Service (WFS) in GeoJSON format
    
    Input:
       wfs_url: URL for web feature service. E.g. http://www.aifdr.org:8080/geoserver/ows?
       layer: Feature layer name as <workspace>:<layer>
       verbose [optional]: Flag controlling the verbosity level. Default is False.
       
    Output:
       GEOJSON dictionary or None.
    """
    
    if verbose:
        print('Retrieving %s from %s' % (layer, wfs_url))
        
    wfs = WebFeatureService(wfs_url, version='1.0.0')
    
    if layer not in wfs.contents.keys():
        return None
    response = wfs.getfeature(typename=[layer], outputFormat='json', maxfeatures=1)
    return geojson.loads(response.read())
def get_wfs(server_url, spacename_wfs):

    chemin = '/home/tmp/'+spacename_wfs+'.gml'

    if not os.path.exists(chemin):
        
        wfs = WebFeatureService(server_url +"/wfs/",version='1.0.0')
    
        vector = spacename_wfs
        
        print "Downloading the WFS: "+spacename_wfs
        print "From: "+server_url
        response = wfs.getfeature(typename =[vector])    
    
        data = response.read()
        f = open(chemin,'wb')
        f.write(data)
        f.close()
        print "Done"

    return chemin
Esempio n. 42
0
    #     print 'Layer: %s, Features: %s, SR: %s' % (layer.GetName(), layer.GetFeatureCount(), srs.ExportToWkt()[0:50])
    #     print layer.GetExtent()
    #     feat = layer.GetNextFeature()
    #     geom = feat.GetGeometryRef()
    #     print(geom.GetGeometryName())
        
    #     layer_def = layer.GetLayerDefn()
    #     print(layer_def.GetFieldCount())
    #     layer = None

    # OGR_end = time.clock()
    # print(OGR_end - OGR_start)

    OWS_start = time.clock()
    try:
        wfs = WebFeatureService(WFS_URL, version="2.0.0")
    except AttributeError:
        wfs = WebFeatureService(WFS_URL, version="1.1.0")

    print("\n\tGlobal: ", dir(wfs))
    print(wfs.version)
    print(wfs.url)
    print(wfs.items()[0][1])
    help(wfs.getSRS)
    print(wfs.timeout)

    print("\n\tIdentification: ", dir(wfs.identification))
    print(wfs.identification.type)
    print(wfs.identification.title)
    print(wfs.identification.service)
    abstract = wfs.identification.abstract
Esempio n. 43
0
def getMapunitFeaturesForBoundingBox(outputDir, bbox, \
                                     mapunitExtended=False, tileBbox=False):
    """ Query USDA Soil Data Mart for SSURGO Mapunit features with a given bounding box.
        Features will be written to one or more GML files, one file for each bboxTile tile,
        stored in the specified output directory. The filename will be returned as a string.
        Will fetch SSURGO tabular data (see ssurgolib.attributequery.attributeList for a list
        of attributes) and join those data to the features in the GML files(s).
    
        @note Will silently exit if features already exist.
    
        @param outputDir String representing the absolute/relative path of the directory into which features should be written
        @param bbox A dict containing keys: minX, minY, maxX, maxY, srs, where srs='EPSG:4326'
        @param mapunitExtended True if extended mapunit features should be fetched.
        @param tileBoundingBox True if bounding box should be tiled if extent exceeds featurequery.MAX_SSURGO_EXTENT
        
        @return A list of strings representing the name of the GML file(s) to which the mapunit features were saved.
        
        @exception IOError if output directory is not a directory
        @exception IOError if output directory is not writable
        @exception Exception if bounding box area is greater than MAX_SSURGO_EXTENT
    """
    if not os.path.isdir(outputDir):
        raise IOError(errno.ENOTDIR, "Output directory %s is not a directory" % (outputDir,))
    if not os.access(outputDir, os.W_OK):
        raise IOError(errno.EACCES, "Not allowed to write to output directory %s" % (outputDir,))
    outputDir = os.path.abspath(outputDir)

    if mapunitExtended:
        typeName = 'MapunitPolyExtended'
    else:
        typeName = 'MapunitPoly'

    if tileBbox:
        bboxes = tileBoundingBox(bbox, MAX_SSURGO_EXTENT)
        sys.stderr.write("Dividing bounding box %s into %d tiles\n" % (str(bbox), len(bboxes)))
    else:
        if calculateBoundingBoxAreaSqMeters(bbox) > MAX_SSURGO_EXTENT:
            raise Exception("Bounding box area is greater than %f sq. meters" % (MAX_SSURGO_EXTENT,))
        bboxes = [bbox]
    
    gmlFiles = []
    
    for bboxTile in bboxes:
        minX = bboxTile['minX']; minY = bboxTile['minY']; maxX = bboxTile['maxX']; maxY = bboxTile['maxY']
        bboxLabel = str(minX) + "_" + str(minY) + "_" + str(maxX) + "_" + str(maxY)
    
        gmlFilename = "%s_bbox_%s-attr.gml" % (typeName, bboxLabel)
        gmlFilepath = os.path.join(outputDir, gmlFilename)
    
        if not os.path.exists(gmlFilepath):
            sys.stderr.write("Fetching SSURGO data for sub bboxTile %s\n" % bboxLabel)
        
            wfs = WebFeatureService(WFS_URL, version='1.0.0')
            filter = "<Filter><BBOX><PropertyName>Geometry</PropertyName> <Box srsName='EPSG:4326'><coordinates>%f,%f %f,%f</coordinates> </Box></BBOX></Filter>" % (minX, minY, maxX, maxY)
            gml = wfs.getfeature(typename=(typeName,), filter=filter, propertyname=None)
    
            # Write intermediate GML to a file
            intGmlFilename = "%s_bbox_%s.gml" % (typeName, bboxLabel)
            intGmlFilepath = os.path.join(outputDir, intGmlFilename)
            out = open(intGmlFilepath, 'w')
            out.write(gml.read())
            out.close()
            
            # Parse GML to get list of MUKEYs
            gmlFile = open(intGmlFilepath, 'r')
            ssurgoFeatureHandler = SSURGOFeatureHandler()
            xml.sax.parse(gmlFile, ssurgoFeatureHandler)
            gmlFile.close()
            mukeys = ssurgoFeatureHandler.mukeys
            
            # Get attributes (ksat, texture, %clay, %silt, and %sand) for all components in MUKEYS
            attributes = getParentMatKsatTexturePercentClaySiltSandForComponentsInMUKEYs(mukeys)
            
            # Compute weighted average of soil properties across all components in each map unit
            avgAttributes = computeWeightedAverageKsatClaySandSilt(attributes)
            
            # Join map unit component-averaged soil properties to attribute table in GML file
            gmlFile = open(intGmlFilepath, 'r')
            joinedGmlStr = joinSSURGOAttributesToFeaturesByMUKEY(gmlFile, typeName, avgAttributes)
            gmlFile.close()
            
            # Write Joined GML to a file
            out = open(gmlFilepath, 'w')
            out.write(joinedGmlStr)
            out.close()
            
            # Delete intermediate GML file
            os.unlink(intGmlFilepath)
        
        gmlFiles.append(gmlFilename)
    
    # TODO: join tiled data if tileBbox
        
    return gmlFiles
    
    
Esempio n. 44
0
 def __init__(self, url, version="1.0.0"):
     self.wfs = WebFeatureService(url, version=version)
     self.type = self.wfs.identification.type
     self.version = self.wfs.identification.version
     self.title = self.wfs.identification.title
     self.abstract = self.wfs.identification.abstract
Esempio n. 45
0
def _getMapunitFeaturesForBoundingBoxTile(config, outputDir, bboxTile, typeName, currTile, numTiles):
    minX = bboxTile['minX']; minY = bboxTile['minY']; maxX = bboxTile['maxX']; maxY = bboxTile['maxY']
    bboxLabel = str(minX) + "_" + str(minY) + "_" + str(maxX) + "_" + str(maxY)

    gmlFilename = "%s_bbox_%s-attr.gml" % (typeName, bboxLabel)
    gmlFilepath = os.path.join(outputDir, gmlFilename)
    geoJSONLayername = "%s_bbox_%s-attr" % (typeName, bboxLabel)

    if not os.path.exists(gmlFilepath):
        sys.stderr.write("Fetching SSURGO data for tile %s of %s, bbox: %s\n" % (currTile, numTiles, bboxLabel))
        sys.stderr.flush()
    
        wfs = WebFeatureService(WFS_URL, version='1.1.0', timeout=SSURGO_WFS_TIMEOUT_SEC)
        filter = "<Filter><BBOX><PropertyName>Geometry</PropertyName> <Box srsName='EPSG:4326'><coordinates>%f,%f %f,%f</coordinates> </Box></BBOX></Filter>" % (minX, minY, maxX, maxY)
        
        intGmlFilename = "%s_bbox_%s.gml" % (typeName, bboxLabel)
        intGmlFilepath = os.path.join(outputDir, intGmlFilename)
        ssurgoFeatureHandler = SSURGOFeatureHandler()
        
        downloadComplete = False
        downloadAttempts = 0
        while not downloadComplete:
            try:
                gml = wfs.getfeature(typename=typeName, filter=filter, propertyname=None)
        
                # Write intermediate GML to a file
                out = open(intGmlFilepath, 'w')
                out.write(gml.read())
                out.close()
                
                # Parse GML to get list of MUKEYs
                gmlFile = open(intGmlFilepath, 'r')
                xml.sax.parse(gmlFile, ssurgoFeatureHandler)
                gmlFile.close()
                downloadComplete = True
            except xml.sax.SAXParseException as e:
                # Try to re-download
                downloadAttempts += 1
                if downloadAttempts > SSURGO_GML_MAX_DOWNLOAD_ATTEMPTS:
                    raise Exception("Giving up on downloading tile {0} of {1} after {2} attempts.  There may be something wrong with the web service.  Try again later.".format(currTile, numTiles, downloadAttempts))
                else:
                    sys.stderr.write("Initial download of tile {0} of {1} possibly incomplete, error: {0}.  Retrying...".format(currTile, numTiles, str(e)))
                    sys.stderr.flush()
                    
        mukeys = ssurgoFeatureHandler.mukeys
        
        if len(mukeys) < 1:
            raise Exception("No SSURGO features returned from WFS query.  SSURGO GML format may have changed.\nPlease contact the developer.")
        
        # Get attributes (ksat, texture, %clay, %silt, and %sand) for all components in MUKEYS
        attributes = getParentMatKsatTexturePercentClaySiltSandForComponentsInMUKEYs(mukeys)
        
        # Compute weighted average of soil properties across all components in each map unit
        avgAttributes = computeWeightedAverageKsatClaySandSilt(attributes)
        
        # Convert GML to GeoJSON so that we can add fields easily (GDAL 1.10+ validates GML schema 
        #   and won't let us add fields)
        tmpGeoJSONFilename = convertGMLToGeoJSON(config, outputDir, intGmlFilepath, geoJSONLayername,
                                                 flip_gml_coords=True)
        tmpGeoJSONFilepath = os.path.join(outputDir, tmpGeoJSONFilename)
        
        # Join map unit component-averaged soil properties to attribute table in GeoJSON file
        tmpGeoJSONFile = open(tmpGeoJSONFilepath, 'r')
        geojson = json.load(tmpGeoJSONFile)
        tmpGeoJSONFile.close()
        joinSSURGOAttributesToFeaturesByMUKEY_GeoJSON(geojson, typeName, avgAttributes)
        
        # Write joined GeoJSON to a file
        out = open(tmpGeoJSONFilepath, 'w')
        json.dump(geojson, out)
        out.close()
        
        # Delete intermediate files
        os.unlink(intGmlFilepath)
        
        return tmpGeoJSONFilepath
Esempio n. 46
0
# ## Q1 - Can we discover, access, and overlay Important Bird Area polygons (and therefore other similar layers for additional important resource areas) on modeled datasets in the Bering Sea?

# <markdowncell>

# <div class="error"><strong>Discovery is not possible</strong> - No Important Bird Area polygons are not discoverable at this time.  They are, however, available in a GeoServer 'known' to us.  This should be fixed.  The WFS service should be added to a queryable CSW.</div>

# <markdowncell>

# ##### Load 'known' WFS endpoint with Important Bird Area polygons

# <codecell>

from owslib.wfs import WebFeatureService
known_wfs = "http://solo.axiomalaska.com/geoserver/audubon/ows"
wfs = WebFeatureService(known_wfs, version='1.0.0')
print sorted(wfs.contents.keys())

# <markdowncell>

# ##### We already know that the 'audubon:audubon_ibas' layer is Import Bird Areas.  Request 'geojson' response from the layer

# <codecell>

import geojson
geojson_response = wfs.getfeature(typename=['audubon:audubon_ibas'], maxfeatures=1, outputFormat="application/json", srsname="urn:x-ogc:def:crs:EPSG:4326").read()
feature = geojson.loads(geojson_response)

# <markdowncell>

# ##### Convert to Shapely geometry objects
Esempio n. 47
0
def getMapunitFeaturesForBoundingBox(config, outputDir, bbox, tileBbox=False, t_srs='EPSG:4326'):
    """ Query USDA Soil Data Mart for SSURGO MapunitPolyExtended features with a given bounding box.
        Features will be written to one or more shapefiles, one file for each bboxTile tile,
        stored in the specified output directory. The filename will be returned as a string.
        Will fetch SSURGO tabular data (see ssurgolib.attributequery.ATTRIBUTE_LIST for a list
        of attributes) and join those data to the features in the final shapefiles(s).
    
        @note Will silently exit if features already exist.
    
        @param config onfigParser containing the section 'GDAL/OGR' and option 'PATH_OF_OGR2OGR'
        @param outputDir String representing the absolute/relative path of the directory into which features should be written
        @param bbox A dict containing keys: minX, minY, maxX, maxY, srs, where srs='EPSG:4326'
        @param tileBoundingBox True if bounding box should be tiled if extent exceeds featurequery.MAX_SSURGO_EXTENT
        @param t_srs String representing the spatial reference system of the output shapefiles, of the form 'EPSG:XXXX'
        
        @return A list of strings representing the name of the shapefile(s) to which the mapunit features were saved.
        
        @exception IOError if output directory is not a directory
        @exception IOError if output directory is not writable
        @exception Exception if bounding box area is greater than MAX_SSURGO_EXTENT
        @exception Exception if no MUKEYs were returned
    """
    if not os.path.isdir(outputDir):
        raise IOError(errno.ENOTDIR, "Output directory %s is not a directory" % (outputDir,))
    if not os.access(outputDir, os.W_OK):
        raise IOError(errno.EACCES, "Not allowed to write to output directory %s" % (outputDir,))
    outputDir = os.path.abspath(outputDir)

    typeName = 'MapunitPolyExtended'

    if tileBbox:
        bboxes = tileBoundingBox(bbox, MAX_SSURGO_EXTENT)
        sys.stderr.write("Dividing bounding box %s into %d tiles\n" % (str(bbox), len(bboxes)))
    else:
        if calculateBoundingBoxArea(bbox, t_srs) > MAX_SSURGO_EXTENT:
            raise Exception("Bounding box area is greater than %f sq. meters" % (MAX_SSURGO_EXTENT,))
        bboxes = [bbox]
    
    outFiles = []
    
    for bboxTile in bboxes:
        minX = bboxTile['minX']; minY = bboxTile['minY']; maxX = bboxTile['maxX']; maxY = bboxTile['maxY']
        bboxLabel = str(minX) + "_" + str(minY) + "_" + str(maxX) + "_" + str(maxY)
    
        gmlFilename = "%s_bbox_%s-attr.gml" % (typeName, bboxLabel)
        gmlFilepath = os.path.join(outputDir, gmlFilename)
    
        if not os.path.exists(gmlFilepath):
            sys.stderr.write("Fetching SSURGO data for sub bboxTile %s\n" % bboxLabel)
        
            wfs = WebFeatureService(WFS_URL, version='1.0.0')
            filter = "<Filter><BBOX><PropertyName>Geometry</PropertyName> <Box srsName='EPSG:4326'><coordinates>%f,%f %f,%f</coordinates> </Box></BBOX></Filter>" % (minX, minY, maxX, maxY)
            gml = wfs.getfeature(typename=(typeName,), filter=filter, propertyname=None)
    
            # Write intermediate GML to a file
            intGmlFilename = "%s_bbox_%s.gml" % (typeName, bboxLabel)
            intGmlFilepath = os.path.join(outputDir, intGmlFilename)
            out = open(intGmlFilepath, 'w')
            out.write(gml.read())
            out.close()
            
            # Parse GML to get list of MUKEYs
            gmlFile = open(intGmlFilepath, 'r')
            ssurgoFeatureHandler = SSURGOFeatureHandler()
            xml.sax.parse(gmlFile, ssurgoFeatureHandler)
            gmlFile.close()
            mukeys = ssurgoFeatureHandler.mukeys
            
            if len(mukeys) < 1:
                raise Exception("No SSURGO features returned from WFS query.  SSURGO GML format may have changed.\nPlease contact the developer.")
            
            # Get attributes (ksat, texture, %clay, %silt, and %sand) for all components in MUKEYS
            attributes = getParentMatKsatTexturePercentClaySiltSandForComponentsInMUKEYs(mukeys)
            
            # Compute weighted average of soil properties across all components in each map unit
            avgAttributes = computeWeightedAverageKsatClaySandSilt(attributes)
            
            # Convert GML to GeoJSON so that we can add fields easily (GDAL 1.10+ validates GML schema 
            #   and won't let us add fields)
            tmpGeoJSONFilename = convertGMLToGeoJSON(config, outputDir, intGmlFilepath, typeName)
            tmpGeoJSONFilepath = os.path.join(outputDir, tmpGeoJSONFilename)
            
            # Join map unit component-averaged soil properties to attribute table in GML file
#             gmlFile = open(intGmlFilepath, 'r')
#             joinedGmlStr = joinSSURGOAttributesToFeaturesByMUKEY(gmlFile, typeName, avgAttributes)
#             gmlFile.close()
            tmpGeoJSONFile = open(tmpGeoJSONFilepath, 'r')
            geojson = json.load(tmpGeoJSONFile)
            tmpGeoJSONFile.close()
            joinSSURGOAttributesToFeaturesByMUKEY_GeoJSON(geojson, typeName, avgAttributes)
            
            # Write Joined GeoJSON to a file
            out = open(tmpGeoJSONFilepath, 'w')
            json.dump(geojson, out)
            out.close()
            
            # Convert GeoJSON to shapefile
            filename = os.path.splitext(intGmlFilename)[0]
            shpFilename = convertGeoJSONToShapefile(config, outputDir, tmpGeoJSONFilepath, filename, t_srs=t_srs)
            
            # Delete intermediate files
            os.unlink(intGmlFilepath)
            os.unlink(tmpGeoJSONFilepath)
        
        outFiles.append(shpFilename)
    
    # TODO: join tiled data if tileBbox
        
    return outFiles
    
    
Esempio n. 48
0
from owslib.wfs import WebFeatureService
wfs11 = WebFeatureService(url='http://geoserv.weichand.de:8080/geoserver/wfs', version='1.1.0')
print(wfs11.identification.title)

[print(operation.name) for operation in wfs11.operations]

print(list(wfs11.contents))

response = wfs11.getfeature(typename='bvv:gmd_ex', bbox=(4500000,5500000,4500500,5500500), srsname='urn:x-ogc:def:crs:EPSG:31468')

out = open('/tmp/data.gml', 'wb')
out.write(bytes(response.read(), 'UTF-8'))
out.close()
Esempio n. 49
0
#!/usr/bin/python
# -*- coding: UTF-8 -*-
__author__ = "Juergen Weichand"


from owslib.wfs import WebFeatureService

wfs = WebFeatureService(url="http://geoserv.weichand.de:8080/geoserver/wfs", version="2.0.0", timeout=30)

# List StoredQueries
print("\nStoredQueries for %s" % wfs.identification.title)
for storedquery in wfs.storedqueries:
    print(storedquery.id, storedquery.title)

# List Parameter for a given StoredQuery
storedquery = wfs.storedqueries[5]
print("\nStoredQuery parameters for %s" % storedquery.id)
for parameter in storedquery.parameters:
    print(parameter.name, parameter.type)

# GetFeature StoredQuery
print("\nDownload data from %s" % wfs.identification.title)
response = wfs.getfeature(
    storedQueryID="GemeindeByGemeindeschluesselEpsg31468", storedQueryParams={"gemeindeschluessel": "09162000"}
)
out = open("/tmp/test-storedquery.gml", "wb")
out.write(response.read())
out.close()
print("... done")
Esempio n. 50
0
# Read shapefile data from USGS ScienceBase WFS 1.1 service in JSON format
# (shapefile was previosly uploaded to ScienceBase, creating the WFS service)

# <codecell>

# getCapabilities
#https://www.sciencebase.gov/catalogMaps/mapping/ows/5342c54be4b0aa151574a8dc?service=wfs&version=1.0.0&request=GetCapabilities
# containes LatLongBoundingBox for each feature

# <codecell>

# some USGS ScienceBase Geoserver WFS endpoints:
#endpoint='https://www.sciencebase.gov/catalogMaps/mapping/ows/5342c54be4b0aa151574a8dc'
endpoint='https://www.sciencebase.gov/catalogMaps/mapping/ows/5342c5fce4b0aa151574a8ed'
#endpoint='https://www.sciencebase.gov/catalogMaps/mapping/ows/5342e124e4b0aa151574a969'
wfs = WebFeatureService(endpoint, version='1.1.0')
print wfs.version

# <codecell>

shp = wfs.contents.keys()
print shp

# <codecell>

a = wfs.contents['sb:footprint']
b = a.boundingBoxWGS84

# <codecell>

shp = filter(lambda a: a != 'sb:footprint', shp)
Esempio n. 51
0
    def build_wfs_url(self, api_layer, srv_details, rsc_type="ds_dyn_lyr_srv", mode="complete"):
        """Reformat the input WMS url so it fits QGIS criterias.

        Tests weither all the needed information is provided in the url, and
        then build the url in the syntax understood by QGIS.
        """
        # local variables
        layer_title = api_layer.get("titles")[0].get("value", "WFS Layer")
        wfs_url_getcap = srv_details.get("path")\
                         + "?request=GetCapabilities&service=WFS"
        geoserver = "geoserver" in wfs_url_getcap
        layer_id = api_layer.get("id")
        layer_name = re.sub('\{.*?}', "", layer_id)
        # handling WFS namespaces
        if "{" in layer_id:
            namespace = layer_id[layer_id.find("{") + 1:layer_id.find("}")]
            logging.debug("WFS - Namespace: " + namespace)
        else:
            namespace = ""

        if mode == "quicky":
            # let's try a quick & dirty url build
            srs_map = plg_tools.get_map_crs()
            wfs_url_base = srv_details.get("path")
            uri = QgsDataSourceURI()
            uri.setParam("url", wfs_url_base)
            uri.setParam("typename", layer_name)
            uri.setParam("version", "auto")
            uri.setParam("srsname", srs_map)
            uri.setParam("restrictToRequestBBOX", "1")
            wfs_url_quicky = uri.uri()

            btn_lbl = "WFS : {}".format(layer_title)
            return ["WFS", layer_title, wfs_url_quicky,
                    api_layer, srv_details, btn_lbl]
        elif mode == "complete":
            # Clean, complete but slower way - OWSLib -------------------------
            if srv_details.get("path") == self.cached_wfs.get("srv_path"):
                logger.debug("WFS: already in cache")
            else:
                self.cached_wfs["srv_path"] = srv_details.get("path")
                logger.debug("WFS: new service")
                pass
            # basic checks on service url
            try:
                wfs = WebFeatureService(wfs_url_getcap)
            except ServiceException as e:
                logger.error(str(e))
                return 0, "WFS - Bad operation: " + wfs_url_getcap, str(e)
            except HTTPError as e:
                logger.error(str(e))
                return 0, "WFS - Service not reached: " + wfs_url_getcap, str(e)
            except Exception as e:
                return 0, e

            # check if GetFeature and DescribeFeatureType operation are available
            if not hasattr(wfs, "getfeature") or "GetFeature" not in [op.name for op in wfs.operations]:
                self.cached_wfs["GetFeature"] = 0
                return 0, "Required GetFeature operation not available in: " + wfs_url_getcap
            else:
                self.cached_wfs["GetFeature"] = 1
                logger.info("GetFeature available")
                pass

            if "DescribeFeatureType" not in [op.name for op in wfs.operations]:
                self.cached_wfs["DescribeFeatureType"] = 0
                return 0, "Required DescribeFeatureType operation not available in: " + wfs_url_getcap
            else:
                self.cached_wfs["DescribeFeatureType"] = 1
                logger.info("DescribeFeatureType available")
                pass

            # check if required layer is present
            try:
                wfs_lyr = wfs[layer_name]
            except KeyError as e:
                logger.error("Layer {} not found in WFS service: {}"
                             .format(layer_name,
                                     wfs_url_getcap))
                if geoserver and layer_name in [l.split(":")[1] for l in list(wfs.contents)]:
                    layer_name = list(wfs.contents)[[l.split(":")[1]
                                                    for l in list(wfs.contents)].index(layer_name)]
                    try:
                        wfs_lyr = wfs[layer_name]
                    except KeyError as e:
                        return (0,
                                "Layer {} not found in WFS service: {}"
                                .format(layer_name,
                                        wfs_url_getcap),
                                e)

            # SRS definition
            srs_map = plg_tools.get_map_crs()
            srs_lyr_new = qsettings.value("/Projections/defaultBehaviour")
            srs_lyr_crs = qsettings.value("/Projections/layerDefaultCrs")
            srs_qgs_new = qsettings.value("/Projections/projectDefaultCrs")
            srs_qgs_otf_on = qsettings.value("/Projections/otfTransformEnabled")
            srs_qgs_otf_auto = qsettings.value("/Projections/otfTransformAutoEnable")

            # DEV
            # print("CRS: ", wms_lyr.crsOptions,
            #       "For new layers: " + srs_lyr_new + srs_lyr_crs,
            #       "For new projects: " + srs_qgs_new,
            #       "OTF enabled: " + srs_qgs_otf_on,
            #       "OTF smart enabled: " + srs_qgs_otf_auto,
            #       "Map canvas SRS:" + plg_tools.get_map_crs())

            wfs_lyr_crs_epsg = ["{}:{}".format(srs.authority, srs.code)
                                for srs in wfs_lyr.crsOptions]
            self.cached_wfs["CRS"] = wfs_lyr_crs_epsg
            if srs_map in wfs_lyr_crs_epsg:
                logger.debug("It's a SRS match! With map canvas: " + srs_map)
                srs = srs_map
            elif srs_qgs_new in wfs_lyr_crs_epsg\
                 and srs_qgs_otf_on == "false"\
                 and srs_qgs_otf_auto == "false":
                logger.debug("It's a SRS match! With default new project: " + srs_qgs_new)
                srs = srs_qgs_new
            elif srs_lyr_crs in wfs_lyr_crs_epsg and srs_lyr_new == "useGlobal":
                logger.debug("It's a SRS match! With default new layer: " + srs_lyr_crs)
                srs = srs_lyr_crs
            elif "EPSG:4326" in wfs_lyr_crs_epsg:
                logger.debug("It's a SRS match! With standard WGS 84 (EPSG:4326)")
                srs = "EPSG:4326"
            else:
                logger.debug("Map Canvas SRS not available within service CRS.")
                srs = wfs_lyr_crs_epsg[0]

            # Style definition
            # print("Styles: ", wms_lyr.styles, type(wms_lyr.styles))
            # lyr_style = wfs_lyr.styles.keys()[0]
            # print(lyr_style)

            # GetFeature URL
            wfs_lyr_url = wfs.getOperationByName('GetFeature').methods
            wfs_lyr_url = wfs_lyr_url[0].get("url")
            if wfs_lyr_url[-1] != "&":
                wfs_lyr_url = wfs_lyr_url + "&"
            else:
                pass
            self.cached_wfs["url"] = wfs_lyr_url

            # url construction
            try:
                wfs_url_params = {"SERVICE": "WFS",
                                  "VERSION": "1.0.0",
                                  "TYPENAME": layer_name,
                                  "SRSNAME": srs,
                                  }
                wfs_url_final = wfs_lyr_url + unquote(urlencode(wfs_url_params, "utf8"))
            except UnicodeEncodeError:
                wfs_url_params = {"SERVICE": "WFS",
                                  "VERSION": "1.0.0",
                                  "TYPENAME": layer_name.decode("latin1"),
                                  "SRSNAME": srs,
                                  }
                wfs_url_final = wfs_lyr_url + unquote(urlencode(wfs_url_params))
            # method ending
            logger.debug(wfs_url_final)
            # logger.debug(uri)
            return ["WFS", layer_title, wfs_url_final]
            # return ["WFS", layer_title, uri.uri()]
        else:
            return None
class HandleWFS():
    """
    Processor for WFS resources.  Requires a getCapabilities URL for the WFS and a WFS version passed in as a string.
    Default version is '1.1.0'; other supported versions are '1.0.0' and '2.0.0'
    """

    def __init__(self, url, version="1.1.0"):
        self.wfs = WebFeatureService(url, version=version)
        self.type = self.wfs.identification.type
        self.version = self.wfs.identification.version
        self.title = self.wfs.identification.title
        self.abstract = self.wfs.identification.abstract

    # Return a specific service URL, getFeature is default
    def get_service_url(self, operation='{http://www.opengis.net/wfs}GetFeature', method='{http://www.opengis.net/wfs}Get'):
        if self.version == "1.1.0":
            operation = "GetFeature"
            method = "Get"

        return self.wfs.getOperationByName(operation).methods[method]['url']

    # Pass in a dictionary with the layer name bound to 'layer'.  If the 'layer' is not found, then just return the
    # first layer in the list of available layers
    def do_layer_check(self, data_dict):
        layer_list = list(self.wfs.contents)
        resource = data_dict.get("resource", {})
        this_layer = resource.get("layer")
        try:
            first_layer = layer_list[0]
            if this_layer in layer_list:
                return this_layer
            elif this_layer.lower() in layer_list:
                return this_layer.lower()
            else:
                return first_layer
        except Exception:
            pass

    # Build a URL for accessing service data, getFeature is default
    def build_url(self, typename=None, method='{http://www.opengis.net/wfs}Get', operation='{http://www.opengis.net/wfs}GetFeature', maxFeatures=None):

        if self.version == "1.1.0":
            operation = "GetFeature"
            method = "Get"

        service_url = self.wfs.getOperationByName(operation).methods[method]['url']
        request = {
            'service': 'WFS',
            'version': self.version
        }

        if self.version == "1.1.0":
            request = {
                'service': 'WFS',
                'version': self.version,
                'request': 'GetFeature'
            }

        try:
            assert len(typename) > 0
            request['typename'] = ','.join([typename])
        except Exception:
            request['typename'] = ','.join('ERROR_HERE')
            pass

        if maxFeatures:
            request['maxfeatures'] = str(maxFeatures)

        encoded_request = "&".join("%s=%s" % (key, value) for (key, value) in request.items())
        url = service_url + "&" + encoded_request

        if self.version == "1.1.0":
            url = service_url + "?" + encoded_request

        return url

    # Take a data_dict, use information to build a getFeature URL and get features as GML.  Then take that GML response
    # and turn it into GeoJSON.
    def make_geojson(self, data_dict):
        geojson = []
        type_name = self.do_layer_check(data_dict)
        wfs_url = self.build_url(type_name, maxFeatures=100)
        source = ogr.Open(wfs_url)
        layer = source.GetLayerByIndex(0)
        for feature in layer:
            geojson.append(feature.ExportToJson(as_object=True))
        return geojson

    # Recline.js doesn't support the GeoJSON specification and instead just wants it's own flavor of spatial-json.  So,
    # give this method the same data_dict you would give the 'make_geojson' method and we'll take the GeoJSON and turn
    # it into Recline JSON.
    def make_recline_json(self, data_dict):
        recline_json = []
        geojson = self.make_geojson(data_dict)
        for i in geojson:
            properties = i['properties']
            properties.update(dict(geometry=i['geometry']))
            recline_json.append(properties)
        return recline_json
# coding=utf-8
from owslib.wfs import WebFeatureService
# pokud nefunguje na pripojeni https, pouzijeme http
url = 'http://gis.nature.cz/arcgis/services/UzemniOchrana/Natura2000/MapServer/WFSServer'
aopk = WebFeatureService(url)

capabilities = aopk.getcapabilities()
print (capabilities.geturl())

print (u'{}\n{}\n{}\n{}\n{}'.format(aopk.provider.name,
                                    aopk.identification.title,
                                    aopk.identification.keywords[0],
                                    aopk.identification.fees,
                                    aopk.identification.abstract))

for rec in aopk.contents:
    print (rec)

url='http://gis.nature.cz/arcgis/services/UzemniOchrana/ChranUzemi/MapServer/WFSServer'
chranena_uzemi_wfs = WebFeatureService(url)
for rec in chranena_uzemi_wfs.contents:
    print (rec)

identifier = u'ChranUzemi:Zonace_velkoplošného_zvláště_chráněného_území'
print (chranena_uzemi_wfs.contents[identifier])
    
print ('{}\n{}'.format(chranena_uzemi_wfs.contents[identifier].boundingBox,
                       chranena_uzemi_wfs.contents[identifier].crsOptions))

# getfeature nepodporuje UTF-8, ani zakodovani timto zpusobem nemusi fungovat
identifier = 'ChranUzemi:Zonace_velkoplo\xc5\xa1n\xc3\xa9ho_zvl\xc3\xa1\xc5\xa1t\xc4\x9b_chr\xc3\xa1n\xc4\x9bn\xc3\xa9ho_\xc3\xbazem\xc3\xad'
Esempio n. 54
0
class WoudcClient(object):
    """WOUDC Client"""

    def __init__(self, url='http://geo.woudc.org/ows', timeout=30):
        """
        Initialize a WOUDC Client.

        :returns: instance of pywoudc.WoudcClient
        """

        self.url = url
        """The URL of the WOUDC data service"""

        self.timeout = timeout
        """Time (in seconds) after which requests should timeout"""

        self.about = 'http://woudc.org/about/data-access.php'
        """The About Data Access page"""

        self.outputformat = 'application/json; subtype=geojson'
        """The default outputformat when requesting WOUDC data"""

        self.maxfeatures = 25000
        """The default limit of records to return"""

        LOGGER.info('Contacting %s', self.url)
        self.server = WebFeatureService(self.url, '1.1.0',
                                        timeout=self.timeout)
        """The main WOUDC server"""

    def get_station_metadata(self, raw=False):
        """
        Download WOUDC station metadata

        :param raw: a boolean specifying whether to return the raw GeoJSON
                    payload as a string (default is False)
        :returns: dictionary of GeoJSON payload
        """

        LOGGER.info('Fetching station metadata')
        return self._get_metadata('stations', raw)

    def get_instrument_metadata(self, raw=False):
        """
        Download WOUDC instrument metadata

        :param raw: a boolean specifying whether to return the raw GeoJSON
                    payload as a string (default is False)
        :returns: dictionary of GeoJSON payload
        """

        LOGGER.info('Fetching instrument metadata')
        return self._get_metadata('instruments', raw)

    def get_contributor_metadata(self, raw=False):
        """
        Download WOUDC contributors metadata

        :param raw: a boolean specifying whether to return the raw GeoJSON
                    payload as a string (default is False)
        :returns: dictionary of GeoJSON payload
        """

        LOGGER.info('Fetching contributor metadata')
        return self._get_metadata('contributors', raw)

    def get_data(self, typename, **kwargs):
        """
        Download WOUDC observations

        :param bbox: a list representing a bounding box spatial
                     filter (`minx, miny, maxx, maxy`)
        :param temporal: a list of two elements representing a time period
                         (start, end) which accepts the following types:

                          - :py:class:`datetime.date`
                          - :py:class:`datetime.datetime`
                          - string date (e.g. ``2012-10-30``)
                          - string datetime (e.g. ``2012-10-30 11:11:11``)

        :param property_name: a string representing the property name to apply
                              as filter against
        :param property_value: a string representing the value which filters
                               against `property_name`
        :param variables: a list of variables to return
                          as part of the response (default returns all)
        :param sort_property: a string representing the property on which
                              to sort results (default ``instance_datetime``)
        :param sort_order: a string representing sort order of response
                           (``asc`` or ``desc``).  Default is ``asc``.
                           Applied if `sort_property` is specified

        :returns: list of WOUDC observations GeoJSON payload
        """

        constraints = []
        variables = '*'
        filter_string = None
        bbox = None
        temporal = None
        property_name = None
        property_value = None
        sort_property = None
        sort_order = 'asc'
        startindex = 0
        features = None
        feature_collection = None
        sort_descending = False

        LOGGER.info('Downloading dataset %s', typename)

        LOGGER.debug('Assembling query parameters')
        for key, value in kwargs.items():
            if key == 'bbox':
                bbox = value
            if key == 'temporal':
                temporal = value
            if key == 'property_name':
                property_name = value
            if key == 'property_value':
                property_value = str(value)
            if key == 'variables':
                variables = value
            if key == 'sortby':
                sort_property = value
            if key == 'sort_order':
                sort_order = value

        LOGGER.debug('Assembling constraints')
        if property_name is not None and property_value is not None:
            constraints.append(fes.PropertyIsEqualTo(property_name,
                                                     property_value))
        if bbox is not None:
            if not isinstance(bbox, list) or len(bbox) != 4:
                raise ValueError('bbox must be list of minx, miny, maxx, maxy')

            LOGGER.debug('Setting spatial constraint')
            constraints.append(fes.BBox(bbox))

        if temporal is not None:
            if not isinstance(temporal, list) or len(temporal) != 2:
                msg = 'temporal must be list of start date, end date'
                raise ValueError(msg)

            LOGGER.info('Setting temporal constraint')
            temporal_start = date2string(temporal[0], 'begin')
            temporal_end = date2string(temporal[1], 'end')

            constraints.append(fes.PropertyIsBetween(
                'instance_datetime', temporal_start, temporal_end))

        if sort_order not in ['asc', 'desc']:
            raise ValueError('sort_order must be asc or desc')
        else:
            if sort_order == 'desc':
                sort_descending = True

        if variables != '*':
            if not isinstance(variables, list):
                raise ValueError('variables must be list')

        if constraints:
            LOGGER.debug('Combining constraints')
            flt = fes.FilterRequest()
            if len(constraints) == 1:
                LOGGER.debug('Single constraint')
                filter_string = flt.setConstraint(constraints[0],
                                                  tostring=True)
            if len(constraints) > 1:
                LOGGER.debug('Multiple constraints')
                filter_string = flt.setConstraintList([constraints],
                                                      tostring=True)

        LOGGER.info('Fetching observations')
        LOGGER.info('Filters:')
        LOGGER.info('bbox: %r', bbox)
        LOGGER.info('temporal: %r', temporal)
        LOGGER.info('attribute query: %r = %r', property_name, property_value)

        # page download and assemble single list of JSON features
        while True:
            LOGGER.debug('Fetching features %d - %d',
                         startindex, startindex + self.maxfeatures)

            payload = self.server.getfeature(
                typename=typename,
                startindex=startindex,
                propertyname=variables,
                maxfeatures=self.maxfeatures,
                filter=filter_string,
                outputFormat=self.outputformat).read()

            LOGGER.debug('Processing response')
            if payload.isspace():
                LOGGER.debug('Empty response. Exiting')
                break

            try:
                features = json.loads(payload)
            except ValueError:
                msg = 'Query produced no results'
                LOGGER.info(msg)
                return None

            len_features = len(features['features'])

            LOGGER.debug('Found %d features', len_features)

            if feature_collection is None:
                feature_collection = features
            else:
                feature_collection['features'].extend(features['features'])

            if len_features < self.maxfeatures:
                break

            startindex = startindex + self.maxfeatures

        len_feature_collection = len(feature_collection['features'])
        LOGGER.info('Found %d total features', len_feature_collection)

        if sort_property is not None:
            LOGGER.info('Sorting response by %s', sort_property)
            feature_collection['features'].sort(
                key=lambda e: e['properties'][sort_property],
                reverse=sort_descending)

        return feature_collection

    def _get_metadata(self, typename, raw=False):
        """generic design pattern to download WOUDC metadata"""

        LOGGER.debug('Fetching data from server')
        features = self.server.getfeature(typename=typename,
                                          outputFormat=self.outputformat)

        LOGGER.debug('Processing response')
        if raw:
            LOGGER.info('Emitting raw GeoJSON response')
            return features.read()
        LOGGER.info('Emitting GeoJSON features as list')
        return json.loads(features.read())
def main(OBJECTID, lck, featureclassname, idfieldname, count, length, getArea=False):
    """
    OBJECTID           - the objectid of the feature from the wfs service
    lck                - multiprocess lock
    featureclassname   - the name of the feature class in PostGIS to intersect with the species data
    idfieldname        - the unique field in the feature class that represents the primary key value
    count              - how many features have been processed
    length             - the total number of features to be processed
    getArea            - boolean flag to indicate whether to capture the area of intersection 
    """
    try:
        multiprocessing.current_process().cnt += 1   
        devconn = dbconnect('species_dev')                                                      # connect to Postgresql
        devconn.cur.execute("insert into species_wdpa_analysis_summary (objectid,wfs_requested) values (" + str(OBJECTID) + ",TRUE) RETURNING oid;")
        oid = devconn.cur.fetchone()[0]
        wfsfilter = "<ogc:Filter><ogc:PropertyIsEqualTo><ogc:PropertyName>OBJECTID</ogc:PropertyName><ogc:Literal>" + str(OBJECTID) + "</ogc:Literal></ogc:PropertyIsEqualTo></ogc:Filter>"   # create the filter for the species
        _wfs_ = WebFeatureService(WFS_URL + '?request=GetCapabilities', version='1.0.0')        # get the WFS Service Capabilities
        try:
            stream = _wfs_.getfeature(typename=['Andrew_SpeciesWFSLatLong:AllSpecies'], filter=wfsfilter, propertyname=[])    # get the species range as gml from the WFS service
        except:
            raise Exception("update species_wdpa_analysis_summary set wfs_failed=TRUE where oid=" + str(oid))
        elementTree = ET.parse(stream)                                                          # load the gml into memory
        featureMember = elementTree.find('{' + OPENGIS_NAMESPACE + '}featureMember')            # get the features
        if featureMember == None:                                                               # error check
            raise Exception("update species_wdpa_analysis_summary set no_features_returned=TRUE where oid=" + str(oid)) 
        speciesid = getAttribute(elementTree, 'SpeciesID')                                      # get the SpeciesID
        if speciesid == None:                                                                   # error check
            raise Exception("update species_wdpa_analysis_summary set no_speciesid=TRUE where oid=" + str(oid))
        else:
            devconn.cur.execute("update species_wdpa_analysis_summary set speciesid=" + str(speciesid) + " where objectid=" + str(OBJECTID))
        presence = getAttribute(elementTree, 'PRESENCE')                                        # get the PRESENCE
        if presence == None:                                                                    # error check
            raise Exception("update species_wdpa_analysis_summary set no_presence_value=TRUE where oid=" + str(oid))
        polygons = elementTree.getiterator('{' + OPENGIS_NAMESPACE + '}Polygon')                # get the polygons         
        if len(polygons) == 0:                                                                  # error check
            raise Exception("update species_wdpa_analysis_summary set no_wfs_polygons=TRUE where oid=" + str(oid))
        intersects = False
        liveconn = dbconnect('species_live')                                                    # connect to Postgresql to do the intersection analysis
        for i in polygons:                                                                      # iterate through all of the species range polygons
             
            # intersect the species range features with the intersectingfeature features
            if getArea:
                sql = "select " + idfieldname + ", st_area(st_transform(st_intersection(ST_GeomFromGML('" + ET.tostring(i) + "',4326), geom),97099)) from " + featureclassname + " where st_intersects(ST_GeomFromGML('" + ET.tostring(i) + "',4326), geom)"    
            else:
                sql = "select " + idfieldname + " from " + featureclassname + " where st_intersects(ST_GeomFromGML('" + ET.tostring(i) + "',4326), geom)" # select the features that overlap the species range, e.g. select wdpaid from wdpa where st_intersects
             
            liveconn.cur.execute(sql)                                                           # execute the query
            intersectingfeatures = liveconn.cur.fetchall()                                      # get all of the records
            for intersectingfeature in intersectingfeatures:                                    # iterate through the intersectingfeatures
                sql =   "insert into species_wdpa (speciesid, presence, wdpaid, objectid)  VALUES (" + str(speciesid) + "," + str(presence) + "," + str(int(intersectingfeature[0])) + "," + str(OBJECTID) + ") RETURNING oid"
                print sql
                if getArea:
                    devconn.cur.execute("insert into species_wdpa (speciesid, presence, wdpaid, objectid,area)  VALUES (" + str(speciesid) + "," + str(presence) + "," + str(int(intersectingfeature[0])) + "," + str(OBJECTID) + "," + str(intersectingfeature[1]) + ") RETURNING oid")
                else:      
                    devconn.cur.execute("insert into species_wdpa (speciesid, presence, wdpaid, objectid)  VALUES (" + str(speciesid) + "," + str(presence) + "," + str(int(intersectingfeature[0])) + "," + str(OBJECTID) + ") RETURNING oid")                                                                  
                intersects = True
        if intersects == False:
            raise Exception("update species_wdpa_analysis_summary set intersecting_features=FALSE where oid=" + str(oid))            
                        
    except Exception as inst:
        devconn.cur.execute(inst.args[0])
    finally:
        devconn.cur.close()                                                                     # close the cursor
        liveconn.cur.close()                                                                    # close the cursor
        del(conn)                                                                               # close the connection
from matplotlib import ticker

from utilities import css_styles
css_styles()


# ## Load Important Bird Areas
# These are defined by [Audobon](http://web4.audubon.org/bird/iba/) and hosted by AOOS on a geoserver. Currently, they represent areas important for numerous species of birds.
# 
# The single species core areas must be processed differently, because they have different metadata and include species information. They are contained in a separate notebook: [located here](http://127.0.0.1:8888/cc62f4c2-b95f-41a4-9b84-13c46ce9c37a).

# In[155]:

#Load our important bird areas again.
known_wfs = "http://solo.axiomalaska.com/geoserver/audubon_ibav3/ows"
wfs = WebFeatureService(known_wfs, version='1.0.0')
geojson_response = wfs.getfeature(typename=['audubon_ibav3:audubon_ibas_v3_20aug2014'], outputFormat="application/json", srsname="urn:x-ogc:def:crs:EPSG:4326").read()
geojson = json.loads(geojson_response)


# In[156]:

geometries = find_dict_keys('geometry', geojson)
shapes = [shape(g) for g in geometries]

ids = find_dict_keys('id', geojson)
ids=[(str(j).split('.', 1))[1]  for j in ids]

sitenames = find_dict_keys('newsitenam', geojson)
sitenames = [str(s) for s in sitenames]