def _setconstraint(self, parent, qtype=None, propertyname='csw:AnyText', keywords=[], bbox=None, cql=None, identifier=None): if keywords or bbox is not None or qtype is not None or cql is not None or identifier is not None: node0 = etree.SubElement( parent, util.nspath_eval('csw:Constraint', namespaces)) node0.set('version', '1.1.0') if identifier is not None: # set identifier filter, overrides all other parameters flt = fes.FilterRequest() node0.append(flt.set(identifier=identifier)) elif cql is not None: # send raw CQL query # CQL passed, overrides all other parameters node1 = etree.SubElement( node0, util.nspath_eval('csw:CqlText', namespaces)) node1.text = cql else: # construct a Filter request flt = fes.FilterRequest() node0.append( flt.set(qtype=qtype, keywords=keywords, propertyname=propertyname, bbox=bbox))
def GetWFSLayerFilter(u, l, pwd, n, d, a, fe, s): start = datetime.now() idList = fe.split(",") chemin = d if not exists(chemin): filterList = [fes.PropertyIsEqualTo(a, i) for i in idList] fr = fes.FilterRequest() filter_fes = fr.setConstraintList(filterList, tostring=True) # Get the vector layer using OGC WFS standard vrsion 1.0.0 wfs = WebFeatureService(u, version='1.0.0', username=l, password=pwd, timeout=10) # Supported outputFormat : GML2, GML3, shape-zip, application/json getFeature = wfs.getfeature(typename=(n, ), filter=filter_fes, outputFormat="application/json", srsname=s) # maxfeatures=200) # Download the zipped shapefile data = getFeature.read() f = open(chemin, 'wb') f.write(data) f.close() # Calculat time delta = datetime.now() - start print "\n{0} Downloaded on : {1}\n".format(n, delta) else: print "\n{0} exsists\n".format(n) return
def getrecords2(self, constraints=[], sortby=None, typenames='csw:Record', esn='summary', outputschema=namespaces['csw'], format=outputformat, startposition=0, maxrecords=10, cql=None, xml=None, resulttype='results'): """ Construct and process a GetRecords request Parameters ---------- - constraints: the list of constraints (OgcExpression from owslib.fes module) - sortby: an OGC SortBy object (SortBy from owslib.fes module) - typenames: the typeNames to query against (default is csw:Record) - esn: the ElementSetName 'full', 'brief' or 'summary' (default is 'summary') - outputschema: the outputSchema (default is 'http://www.opengis.net/cat/csw/2.0.2') - format: the outputFormat (default is 'application/xml') - startposition: requests a slice of the result set, starting at this position (default is 0) - maxrecords: the maximum number of records to return. No records are returned if 0 (default is 10) - cql: common query language text. Note this overrides bbox, qtype, keywords - xml: raw XML request. Note this overrides all other options - resulttype: the resultType 'hits', 'results', 'validate' (default is 'results') """ if xml is not None: self.request = etree.fromstring(xml) val = self.request.find( util.nspath_eval('csw:Query/csw:ElementSetName', namespaces)) if val is not None: esn = util.testXMLValue(val) val = self.request.attrib.get('outputSchema') if val is not None: outputschema = util.testXMLValue(val, True) else: # construct request node0 = self._setrootelement('csw:GetRecords') if etree.__name__ != 'lxml.etree': # apply nsmap manually node0.set('xmlns:ows', namespaces['ows']) node0.set('xmlns:gmd', namespaces['gmd']) node0.set('xmlns:dif', namespaces['dif']) node0.set('xmlns:fgdc', namespaces['fgdc']) node0.set('outputSchema', outputschema) node0.set('outputFormat', format) node0.set('version', self.version) node0.set('service', self.service) node0.set('resultType', resulttype) if startposition > 0: node0.set('startPosition', str(startposition)) node0.set('maxRecords', str(maxrecords)) node0.set(util.nspath_eval('xsi:schemaLocation', namespaces), schema_location) node1 = etree.SubElement(node0, util.nspath_eval('csw:Query', namespaces)) node1.set('typeNames', typenames) etree.SubElement( node1, util.nspath_eval('csw:ElementSetName', namespaces)).text = esn if any([len(constraints) > 0, cql is not None]): node2 = etree.SubElement( node1, util.nspath_eval('csw:Constraint', namespaces)) node2.set('version', '1.1.0') flt = fes.FilterRequest() if len(constraints) > 0: node2.append(flt.setConstraintList(constraints)) # Now add a CQL filter if passed in elif cql is not None: etree.SubElement( node2, util.nspath_eval('csw:CqlText', namespaces)).text = cql if sortby is not None and isinstance(sortby, fes.SortBy): node1.append(sortby.toXML()) self.request = node0 self._invoke() if self.exceptionreport is None: self.results = {} # process search results attributes val = self._exml.find( util.nspath_eval( 'csw:SearchResults', namespaces)).attrib.get('numberOfRecordsMatched') self.results['matches'] = int(util.testXMLValue(val, True)) val = self._exml.find( util.nspath_eval( 'csw:SearchResults', namespaces)).attrib.get('numberOfRecordsReturned') self.results['returned'] = int(util.testXMLValue(val, True)) val = self._exml.find( util.nspath_eval('csw:SearchResults', namespaces)).attrib.get('nextRecord') if val is not None: self.results['nextrecord'] = int(util.testXMLValue(val, True)) else: warnings.warn( """CSW Server did not supply a nextRecord value (it is optional), so the client should page through the results in another way.""") # For more info, see: # https://github.com/geopython/OWSLib/issues/100 self.results['nextrecord'] = None # process list of matching records self.records = OrderedDict() self._parserecords(outputschema, esn)
def compose_query(query_string, bbox, wfs_filters): """Compose a wfs filter query from a string The query string should be composed as: "property_name operator literal". The property names and operators are initialized with the DovBoringen class. Multiple filters can be added by comma separation e.g.: "property_name1 operator1 literal1, property_name2 operator2 literal2" The PropertyIsBetween operator requires a lower and upper boundary, it is given by a tuple in the string, e.g.: "diepte_tot_m << (20,100)" Parameters ---------- query_string : str A string containing the query that will be used as constrained in the WFS call. See also: get_boringen() bbox : tuple of floats, or empty tuple The X, Y coordinates of the bounding box as (xmin, ymin, xmax, ymax) wfs_filters : dict A dictionary mapping the operator in the query string to the comparison operator of the wfs call Returns ------- filterxml : str A string of the xml constraint for a wfs call using owslib """ filters = [] # extract criteria if query_string: query_raw = [x.strip(' ,') for x in query_string.split(' ')] if len(query_raw) % 3 != 0: raise ValueError('The query string is not correct. ' 'It should be composed of "property operator ' 'literal"') idx = 1 for fltr in query_raw[1::3]: if fltr != '<<': filters.append(wfs_filters[fltr]( propertyname=query_raw[idx - 1], literal=query_raw[idx + 1])) else: lb, ub = [ x.strip(['(', ')']) for x in query_raw[idx + 1].split(',') ] filters.append(wfs_filters[fltr]( propertyname=query_raw[idx - 1], lowerboundary=lb, upperboundary=ub)) idx += 3 if bbox: filters.append(fes.BBox(bbox)) if len(filters) == 1: filter = fes.FilterRequest().setConstraint(filters[0]) elif len(filters) > 1: # only logical AND is evaluated (constraint = [[a, b]]) filter = fes.FilterRequest().setConstraintList([filters]) else: return '' filterxml = fes.etree.tostring(filter, encoding="utf-8", method='xml') return filterxml
def get_data(self, typename, **kwargs): """ Download WOUDC observations :param bbox: a list representing a bounding box spatial filter (`minx, miny, maxx, maxy`) :param temporal: a list of two elements representing a time period (start, end) which accepts the following types: - :py:class:`datetime.date` - :py:class:`datetime.datetime` - string date (e.g. ``2012-10-30``) - string datetime (e.g. ``2012-10-30 11:11:11``) :param property_name: a string representing the property name to apply as filter against :param property_value: a string representing the value which filters against `property_name` :param sort_property: a string representing the property on which to sort results (default ``instance_datetime``) :param sort_descending: a boolean of whether to sort descending (default is ``False``). Applied if `sort_property` is specified :returns: list of WOUDC observations GeoJSON payload """ constraints = [] variables = [] filter_string = None bbox = None temporal = None property_name = None property_value = None sort_property = None sort_descending = False startindex = 0 output = [] LOGGER.info('Downloading dataset %s', typename) LOGGER.debug('Assembling query parameters') for key, value in kwargs.iteritems(): if key == 'bbox': bbox = value if key == 'temporal': temporal = value if key == 'property_name': property_name = value if key == 'property_value': property_value = str(value) if key == 'variables': variables = value if key == 'sortby': sort_property = value if key == 'sort_descending': sort_descending = value LOGGER.debug('Assembling constraints') if property_name is not None and property_value is not None: constraints.append(fes.PropertyIsEqualTo(property_name, property_value)) if bbox is not None: if not isinstance(bbox, list) or len(bbox) != 4: raise ValueError('bbox must be list of minx, miny, maxx, maxy') LOGGER.debug('Setting spatial constraint') constraints.append(fes.BBox(bbox)) if temporal is not None: if not isinstance(temporal, list) or len(temporal) != 2: msg = 'temporal must be list of start date, end date' raise ValueError(msg) LOGGER.info('Setting temporal constraint') temporal_start = date2string(temporal[0], 'begin') temporal_end = date2string(temporal[1], 'end') constraints.append(fes.PropertyIsBetween( 'instance_datetime', temporal_start, temporal_end)) if sort_descending is not None: if not isinstance(sort_descending, bool): raise ValueError('sort_descending must be boolean') if constraints: LOGGER.debug('Combining constraints') flt = fes.FilterRequest() if len(constraints) == 1: LOGGER.debug('Single constraint') filter_string = flt.setConstraint(constraints[0], tostring=True) if len(constraints) > 1: LOGGER.debug('Multiple constraints') filter_string = flt.setConstraintList([constraints], tostring=True) LOGGER.info('Fetching observations') LOGGER.info('Filters:') LOGGER.info('bbox: %r', bbox) LOGGER.info('temporal: %r', temporal) LOGGER.info('attribute query: %r = %r', property_name, property_value) # page download and assemble single list of JSON features while True: LOGGER.debug('Fetching features %d - %d', startindex, startindex + self.maxfeatures) payload = self.server.getfeature( typename=typename, startindex=startindex, propertyname=variables, maxfeatures=self.maxfeatures, filter=filter_string, outputFormat=self.outputformat).read() LOGGER.debug('Processing response') if payload.isspace(): LOGGER.debug('Empty response. Exiting') break try: features = json.loads(payload)['features'] except ValueError: msg = 'Query produced no results' LOGGER.info(msg) return None len_features = len(features) LOGGER.debug('Found %d features', len_features) output.extend(features) if len_features < self.maxfeatures: break startindex = startindex + self.maxfeatures LOGGER.info('Found %d features', len(output)) if sort_property is not None: LOGGER.info('Sorting response by %s', sort_property) output.sort(key=lambda e: e['properties'][sort_property], reverse=sort_descending) return output
def get_data(self, typename, **kwargs): """ Download WOUDC observations :param bbox: a list representing a bounding box spatial filter (`minx, miny, maxx, maxy`) :param temporal: a list of two elements representing a time period (start, end) which accepts the following types: - :py:class:`datetime.date` - :py:class:`datetime.datetime` - string date (e.g. ``2012-10-30``) - string datetime (e.g. ``2012-10-30 11:11:11``) :param filters: `dict` of key-value pairs of property names and values. Constructs exclusive search :param variables: a list of variables to return as part of the response (default returns all) :param sort_property: a string representing the property on which to sort results (default ``instance_datetime``) :param sort_order: a string representing sort order of response (``asc`` or ``desc``). Default is ``asc``. Applied if `sort_property` is specified :returns: list of WOUDC observations GeoJSON payload """ constraints = [] filters = [] variables = '*' filter_string = None bbox = None temporal = None sort_property = None sort_order = 'asc' startindex = 0 features = None feature_collection = None sort_descending = False LOGGER.info('Downloading dataset %s', typename) LOGGER.debug('Assembling query parameters') for key, value in kwargs.items(): if key == 'bbox': bbox = value if key == 'temporal': temporal = value if key == 'filters': filters = value if key == 'variables': variables = value if key == 'sortby': sort_property = value if key == 'sort_order': sort_order = value LOGGER.debug('Assembling constraints') if filters: for key, value in filters.items(): constraints.append(fes.PropertyIsEqualTo(key, value)) if bbox is not None: if not isinstance(bbox, list) or len(bbox) != 4: raise ValueError('bbox must be list of minx, miny, maxx, maxy') LOGGER.debug('Setting spatial constraint') constraints.append(fes.BBox(bbox)) if temporal is not None: if not isinstance(temporal, list) or len(temporal) != 2: msg = 'temporal must be list of start date, end date' raise ValueError(msg) LOGGER.info('Setting temporal constraint') temporal_start = date2string(temporal[0], 'begin') temporal_end = date2string(temporal[1], 'end') constraints.append(fes.PropertyIsBetween( 'instance_datetime', temporal_start, temporal_end)) if sort_order not in ['asc', 'desc']: raise ValueError('sort_order must be asc or desc') else: if sort_order == 'desc': sort_descending = True if variables != '*': if not isinstance(variables, list): raise ValueError('variables must be list') if constraints: LOGGER.debug('Combining constraints') flt = fes.FilterRequest() if len(constraints) == 1: LOGGER.debug('Single constraint') filter_string = flt.setConstraint(constraints[0], tostring=True) if len(constraints) > 1: LOGGER.debug('Multiple constraints') filter_string = flt.setConstraintList([constraints], tostring=True) LOGGER.info('Fetching observations') LOGGER.info('Filters:') LOGGER.info('bbox: %r', bbox) LOGGER.info('temporal: %r', temporal) LOGGER.info('attribute queries: %r', filters) # page download and assemble single list of JSON features while True: LOGGER.debug('Fetching features %d - %d', startindex, startindex + self.maxfeatures) payload = self.server.getfeature( typename=typename, startindex=startindex, propertyname=variables, maxfeatures=self.maxfeatures, filter=filter_string, outputFormat=self.outputformat).read() LOGGER.debug('Processing response') if payload.isspace(): LOGGER.debug('Empty response. Exiting') break try: features = json.loads(payload) except ValueError: msg = 'Query produced no results' LOGGER.info(msg) return None len_features = len(features['features']) LOGGER.debug('Found %d features', len_features) if feature_collection is None: feature_collection = features else: feature_collection['features'].extend(features['features']) if len_features < self.maxfeatures: break startindex = startindex + self.maxfeatures len_feature_collection = len(feature_collection['features']) LOGGER.info('Found %d total features', len_feature_collection) if sort_property is not None: LOGGER.info('Sorting response by %s', sort_property) feature_collection['features'].sort( key=lambda e: e['properties'][sort_property], reverse=sort_descending) return feature_collection
def getrecords2( self, constraints=[], sortby=None, typenames="csw:Record", esn="summary", outputschema=csw_namespaces["csw"], format=csw_outputformat, startposition=0, maxrecords=10, cql=None, xml=None, resulttype="results", ): if xml is not None: self.request = etree.fromstring(xml) val = self.request.find( util.nspath_eval("csw:Query/csw:ElementSetName", csw_namespaces)) if val is not None: esn = util.testXMLValue(val) val = self.request.attrib.get("outputSchema") if val is not None: outputschema = util.testXMLValue(val, True) else: # construct request node0 = self._setrootelement("csw:GetRecords") if etree.__name__ != "lxml.etree": # apply nsmap manually node0.set("xmlns:ows", csw_namespaces["ows"]) node0.set("xmlns:gmd", csw_namespaces["gmd"]) node0.set("xmlns:dif", csw_namespaces["dif"]) node0.set("xmlns:fgdc", csw_namespaces["fgdc"]) node0.set("outputSchema", outputschema) node0.set("outputFormat", format) node0.set("version", self.version) node0.set("service", self.service) node0.set("resultType", resulttype) if startposition > 0: node0.set("startPosition", str(startposition)) node0.set("maxRecords", str(maxrecords)) node0.set( util.nspath_eval("xsi:schemaLocation", csw_namespaces), csw_schema_location, ) node1 = etree.SubElement( node0, util.nspath_eval("csw:Query", csw_namespaces)) node1.set("typeNames", typenames) etree.SubElement( node1, util.nspath_eval("csw:ElementSetName", csw_namespaces)).text = esn if any([len(constraints) > 0, cql is not None]): node2 = etree.SubElement( node1, util.nspath_eval("csw:Constraint", csw_namespaces)) node2.set("version", "1.1.0") flt = fes.FilterRequest() if len(constraints) > 0: node2.append(flt.setConstraintList(constraints)) # Now add a CQL filter if passed in elif cql is not None: etree.SubElement( node2, util.nspath_eval("csw:CqlText", csw_namespaces)).text = cql if sortby is not None and isinstance(sortby, fes.SortBy): node1.append(sortby.toXML()) self.request = node0 # print("Delta") self._invoke() if self.exceptionreport is None: self.results = {} # process search results attributes val = self._exml.find( util.nspath_eval( "csw:SearchResults", csw_namespaces)).attrib.get("numberOfRecordsMatched") self.results["matches"] = int(util.testXMLValue(val, True)) val = self._exml.find( util.nspath_eval( "csw:SearchResults", csw_namespaces)).attrib.get("numberOfRecordsReturned") self.results["returned"] = int(util.testXMLValue(val, True)) val = self._exml.find( util.nspath_eval("csw:SearchResults", csw_namespaces)).attrib.get("nextRecord") if val is not None: self.results["nextrecord"] = int(util.testXMLValue(val, True)) else: warnings.warn( """CSW Server did not supply a nextRecord value (it is optional), so the client should page through the results in another way.""") # For more info, see: # https://github.com/geopython/OWSLib/issues/100 self.results["nextrecord"] = None # process list of matching records self.records = OrderedDict() self._parserecords(outputschema, esn)