def _parse_layers(self, xml_obj): """ Parses all layers of a service and creates OGCWebMapLayer objects from each. Uses recursion on the inside to get all children. Args: xml_obj: The iterable xml tree Returns: nothing """ # get most upper parent layer, which normally lives directly in <Capability> layers = xml_helper.try_get_element_from_xml( elem="//" + GENERIC_NAMESPACE_TEMPLATE.format("Capability") + "/" + GENERIC_NAMESPACE_TEMPLATE.format("Layer"), xml_elem=xml_obj) total_layers = xml_helper.try_get_element_from_xml( elem="//" + GENERIC_NAMESPACE_TEMPLATE.format("Layer"), xml_elem=xml_obj) # calculate the step size for an async call # 55 is the diff from the last process update (10) to the next static one (65) len_layers = len(total_layers) if len_layers == 0: # No division by zero! len_layers = 1 step_size = float(PROGRESS_STATUS_AFTER_PARSING / len_layers) service_logger.debug( "Total number of layers: {}. Step size: {}".format( len_layers, step_size)) self._parse_layers_recursive(layers, step_size=step_size)
def _overwrite_capabilities_iso_metadata_links(xml_obj: _Element, metadata: Metadata): """ Overwrites links in capabilities document Args: xml_obj (_Element): The xml_object of the document metadata (Metadata): The metadata object, holding the data Returns: """ # get list of all iso md links that really exist (from the metadata object) iso_md_links = metadata.get_related_metadata_uris() # get list of all MetadataURL elements from the capabilities element xml_links = xml_helper.try_get_element_from_xml("./MetadataURL", xml_obj) for xml_link in xml_links: xml_online_resource_elem = xml_helper.try_get_element_from_xml( "./OnlineResource", xml_link) xml_link_attr = xml_helper.try_get_attribute_from_xml_element( xml_online_resource_elem, "xlink:href") if xml_link_attr in iso_md_links: # we still use this, so we are good # Remove this link from iso_md_links to get an overview of which links are left over in the end # These links must be new then! iso_md_links.remove(xml_link_attr) continue else: # this does not seem to exist anymore -> remove it from the xml xml_helper.remove_element(xml_link) # what is left over in iso_md_links are new links that must be added to the capabilities doc for new_link in iso_md_links: xml_helper.add_iso_md_element(xml_obj, new_link)
def parse_projection_system(self, layer, layer_obj): srs = xml_helper.try_get_element_from_xml( elem="./" + GENERIC_NAMESPACE_TEMPLATE.format("SRS"), xml_elem=layer ) for elem in srs: layer_obj.capability_projection_system.append(elem.text)
def test_get_records_sort(self): """ Test whether the sorting parameter is working properly Returns: """ get_records_param = { "service": "CSW", "version": "2.0.2", "request": "GetRecords", "elementsetname": "brief", "resulttype": "results", "sortby": "dc:title:D", } response = self.client.get(reverse(CSW_PATH), data=get_records_param) status_code = response.status_code content = response.content content_xml = xml_helper.parse_xml(content) self.assertEqual(response.status_code, 200, WRONG_STATUS_CODE_TEMPLATE.format(status_code)) self.assertIsNotNone(content_xml, INVALID_XML_MSG) # Iterate over dc:title objects and check whether they are sorted correctly! title_elems = xml_helper.try_get_element_from_xml( "//" + GENERIC_NAMESPACE_TEMPLATE.format("title"), content_xml) titles = [ xml_helper.try_get_text_from_xml_element(title_elem) for title_elem in title_elems ] titles_sorted = copy(titles) titles.sort(reverse=True) # Check the descending sorted way self.assertEqual(titles, titles_sorted)
def _create_formats_from_md_metadata(self, md_metadata: Element) -> list: """ Creates a list of MimeType objects from MD_Metadata element Args: md_metadata (Element): The xml element Returns: formats (list) """ formats = [] distribution_elem = xml_helper.try_get_single_element_from_xml( ".//" + GENERIC_NAMESPACE_TEMPLATE.format("distributionFormat"), md_metadata) if distribution_elem is None: return formats md_format_elems = xml_helper.try_get_element_from_xml( ".//" + GENERIC_NAMESPACE_TEMPLATE.format("MD_Format"), md_metadata) for md_format_elem in md_format_elems: name = xml_helper.try_get_text_from_xml_element( md_format_elem, ".//" + GENERIC_NAMESPACE_TEMPLATE.format("name") + "/" + GENERIC_NAMESPACE_TEMPLATE.format("CharacterString")) if name is not None: formats.append(name) return formats
def _parse_xml_legal_dates(self, xml_obj: Element): """ Parses existing CI_Date elements from the MD_DataIdentification element Args: xml_obj (Element): The document xml element Returns: """ md_data_ident_elem = xml_helper.try_get_single_element_from_xml( "//" + GENERIC_NAMESPACE_TEMPLATE.format("MD_DataIdentification"), xml_obj) legal_date_elems = xml_helper.try_get_element_from_xml( ".//" + GENERIC_NAMESPACE_TEMPLATE.format("CI_Date"), md_data_ident_elem) if legal_date_elems: for legal_date_elem in legal_date_elems: legal_date = LegalDate() legal_date.date = xml_helper.try_get_text_from_xml_element( legal_date_elem, ".//" + GENERIC_NAMESPACE_TEMPLATE.format("Date")) legal_date.date_type_code = xml_helper.try_get_attribute_from_xml_element( legal_date_elem, "codeListValue", ".//" + GENERIC_NAMESPACE_TEMPLATE.format("CI_DateTypeCode")) legal_date.date_type_code_list_url = xml_helper.try_get_attribute_from_xml_element( legal_date_elem, "codeList", ".//" + GENERIC_NAMESPACE_TEMPLATE.format("CI_DateTypeCode")) self.legal_dates.append(legal_date)
def parse_keywords(self, layer, layer_obj): keywords = xml_helper.try_get_element_from_xml( elem="./" + GENERIC_NAMESPACE_TEMPLATE.format("KeywordList") + "/" + GENERIC_NAMESPACE_TEMPLATE.format("Keyword"), xml_elem=layer) for keyword in keywords: layer_obj.capability_keywords.append(keyword.text)
def parse_lat_lon_bounding_box(self, layer, layer_obj): """ Version specific implementation of the bounding box parsing Args: layer: The xml element which holds the layer info (parsing from) layer_obj: The backend model which holds the layer data (parsing to) Returns: nothing """ try: bbox = xml_helper.try_get_element_from_xml( "./" + GENERIC_NAMESPACE_TEMPLATE.format("EX_GeographicBoundingBox"), layer)[0] attrs = { "westBoundLongitude": "minx", "eastBoundLongitude": "maxx", "southBoundLatitude": "miny", "northBoundLatitude": "maxy", } for key, val in attrs.items(): tmp = xml_helper.try_get_text_from_xml_element( xml_elem=bbox, elem="./" + GENERIC_NAMESPACE_TEMPLATE.format(key)) if tmp is None: tmp = 0 layer_obj.capability_bbox_lat_lon[val] = tmp except IndexError: pass
def parse_dimension(self, layer, layer_obj): """ The version specific implementation of the dimension parsing Args: layer: The xml element which holds the layer info (parsing from) layer_obj: The backend model which holds the layer data (parsing to) Returns: nothing """ dim_list = [] try: dims = xml_helper.try_get_element_from_xml( elem="./" + GENERIC_NAMESPACE_TEMPLATE.format("Dimension"), xml_elem=layer) for dim in dims: dim_dict = { "type": dim.get("name"), "units": dim.get("units"), "extent": dim.text, } dim_list.append(dim_dict) except (IndexError, AttributeError) as error: pass layer_obj.dimension_list = dim_list
def _parse_single_layer(self, layer, parent, step_size: float = None, async_task: Task = None): """ Parses data from an xml <Layer> element into the OGCWebMapLayer object. Runs recursive through own children for further parsing Args: layer: The layer xml element parent: The parent OGCWebMapLayer object Returns: nothing """ # iterate over all top level layer and find their children layer_obj = self._start_single_layer_parsing(layer) if step_size is not None and async_task is not None: task_helper.update_progress_by_step(async_task, step_size) task_helper.update_service_description(async_task, None, "Parsing {}".format(layer_obj.title)) layer_obj.parent = parent if self.layers is None: self.layers = [] self.layers.append(layer_obj) sublayers = xml_helper.try_get_element_from_xml( elem="./" + GENERIC_NAMESPACE_TEMPLATE.format("Layer"), xml_elem=layer ) if parent is not None: parent.child_layers.append(layer_obj) self._parse_layers_recursive(layers=sublayers, parent=layer_obj, step_size=step_size, async_task=async_task)
def get_service_operations_and_formats(self, xml_obj): """ Creates table records from <Capability><Request></Request></Capability contents Creates MimeType records Args: xml_obj: The xml document object Returns: """ cap_request = xml_helper.try_get_single_element_from_xml( "//" + GENERIC_NAMESPACE_TEMPLATE.format("Capability") + "/" + GENERIC_NAMESPACE_TEMPLATE.format("Request"), xml_obj ) operations = cap_request.getchildren() for operation in operations: RequestOperation.objects.get_or_create( operation_name=operation.tag, ) # Parse formats formats = xml_helper.try_get_element_from_xml( "./" + GENERIC_NAMESPACE_TEMPLATE.format("Format"), operation ) formats = [f.text for f in formats] self.operation_format_map[operation.tag] = formats
def _parse_parameter_metadata(self, upper_elem): """ Parses the <Parameter> elements inside of <OperationsMetadata> Args: upper_elem (Element): The upper xml element Returns: parameter_map (dict): Mapped parameters and values """ parameter_objs = xml_helper.try_get_element_from_xml( "./" + GENERIC_NAMESPACE_TEMPLATE.format("Parameter"), upper_elem ) parameter_map = {} for parameter in parameter_objs: param_name = xml_helper.try_get_attribute_from_xml_element( parameter, "name" ) param_val = xml_helper.try_get_text_from_xml_element( parameter, ".//" + GENERIC_NAMESPACE_TEMPLATE.format("Value") ) parameter_map[param_name] = param_val return parameter_map
def parse_scale_hint(self, layer, layer_obj): try: scales = xml_helper.try_get_element_from_xml( elem="./" + GENERIC_NAMESPACE_TEMPLATE.format("ScaleHint"), xml_elem=layer)[0] attrs = ["min", "max"] for attr in attrs: layer_obj.capability_scale_hint[attr] = scales.get(attr) except IndexError: pass
def test_get_records_by_id(self): """ Test for checking if the GetRecordsById is working fine or not. Returns: """ get_records_param = { "service": "CSW", "version": "2.0.2", "request": "GetRecordById", "id": self.test_id, "elementsetname": "full", } response = self.client.get(reverse(CSW_PATH), data=get_records_param) status_code = response.status_code content = response.content content_xml = xml_helper.parse_xml(content) self.assertEqual(response.status_code, 200, WRONG_STATUS_CODE_TEMPLATE.format(status_code)) self.assertIsNotNone(content_xml, INVALID_XML_MSG) # Check that the results are correct in amount and quality num_returned_elems = int( xml_helper.try_get_attribute_from_xml_element( xml_elem=content_xml, attribute="numberOfRecordsMatched", elem="//" + GENERIC_NAMESPACE_TEMPLATE.format("SearchResults"))) self.assertEqual( num_returned_elems, 1, "More than one element returned on GetRecordsById with only one used identifier!" ) real_returned_elems = xml_helper.try_get_element_from_xml( "//" + GENERIC_NAMESPACE_TEMPLATE.format("Record"), content_xml) num_real_returned_elems = len(real_returned_elems) self.assertEqual( num_real_returned_elems, num_returned_elems, "csw:SearchResults contains wrong numberOfRecordsMatched! {} stated but {} returned!" .format(num_returned_elems, num_real_returned_elems)) identifiers = [ xml_helper.try_get_text_from_xml_element( real_returned_elem, "//" + GENERIC_NAMESPACE_TEMPLATE.format("identifier")) for real_returned_elem in real_returned_elems ] identifiers_identical = [ identifier == self.test_id for identifier in identifiers ] self.assertTrue( False not in identifiers_identical, "Elements with not matching identifier has been returned: {}". format(", ".join(identifiers)))
def _parse_operations_metadata(self, upper_elem): """ Parses the <Operation> elements inside of <OperationsMetadata> Args: upper_elem (Element): The upper xml element Returns: """ operations_objs = xml_helper.try_get_element_from_xml( ".//" + GENERIC_NAMESPACE_TEMPLATE.format("Operation"), upper_elem ) attribute_map = { OGCOperationEnum.GET_CAPABILITIES.value: 'get_capabilities_uri', OGCOperationEnum.DESCRIBE_RECORD.value: 'describe_record_uri', OGCOperationEnum.GET_RECORDS.value: 'get_records_uri', OGCOperationEnum.GET_RECORD_BY_ID.value: 'get_record_by_id_uri', } for operation in operations_objs: operation_name = xml_helper.try_get_attribute_from_xml_element( operation, "name", ) get_uri = xml_helper.try_get_single_element_from_xml( ".//" + GENERIC_NAMESPACE_TEMPLATE.format("Get"), operation ) get_uri = xml_helper.get_href_attribute(get_uri) if get_uri is not None else None post_uri = xml_helper.try_get_single_element_from_xml( ".//" + GENERIC_NAMESPACE_TEMPLATE.format("Post"), operation ) post_uri = xml_helper.get_href_attribute(post_uri) if post_uri is not None else None if attribute_map.get(operation_name): setattr(self, attribute_map.get(operation_name)+'_GET', get_uri) setattr(self, attribute_map.get(operation_name)+'_POST', post_uri) else: # the given operation is not supported for now pass parameters = self._parse_parameter_metadata(operation) output_format = parameters.get("outputFormat", None) if output_format is not None: self.formats_list.append( MimeType.objects.get_or_create( operation=operation_name, mime_type=output_format, )[0] )
def has_dataset_metadata(self, xml): """ Checks whether the xml element has an iso 19115 dataset metadata record or not Args: xml: The xml etree object Returns: True if element has dataset metadata, false otherwise """ iso_metadata = xml_helper.try_get_element_from_xml( xml_elem=xml, elem="./" + GENERIC_NAMESPACE_TEMPLATE.format("MetadataURL")) return len(iso_metadata) != 0
def _get_num_of_layers(self, xml_obj): return """ Helping function to get the number of the layers in the service Args: xml_obj: The capabilities xml object Returns: The number of layer objects inside the xml object """ layer_elems = xml_helper.try_get_element_from_xml( "//" + GENERIC_NAMESPACE_TEMPLATE.format("Layer"), xml_obj) or [] return len(layer_elems)
def _get_axis_order(self, identifier: str): """ Returns the axis order for a given spatial result system Args: identifier: Returns: """ id = self.get_real_identifier(identifier) axis_order = self.cacher.get(str(id)) if axis_order is not None: axis_order = json.loads(axis_order) return axis_order XML_NAMESPACES["gml"] = "http://www.opengis.net/gml/3.2" uri = self.registry_uri + self.id_prefix + str(id) response = requests.request("Get", url=uri, proxies=PROXIES) response = xml_helper.parse_xml(str(response.content.decode())) type = xml_helper.try_get_text_from_xml_element(xml_elem=response, elem="//epsg:type") if type == "projected": cartes_elem = xml_helper.try_get_single_element_from_xml( "//gml:cartesianCS", response) second_level_srs_uri = xml_helper.get_href_attribute( xml_elem=cartes_elem) elif type == "geographic 2D": geogr_elem = xml_helper.try_get_single_element_from_xml( "//gml:ellipsoidalCS", response) second_level_srs_uri = xml_helper.get_href_attribute( xml_elem=geogr_elem) else: second_level_srs_uri = "" uri = self.registry_uri + second_level_srs_uri response = requests.request("Get", url=uri, proxies=PROXIES) response = xml_helper.parse_xml(str(response.content.decode())) axis = xml_helper.try_get_element_from_xml("//gml:axisDirection", response) order = [] for a in axis: order.append(a.text) order = { "first_axis": order[0], "second_axis": order[1], } # Write this to cache, so it can be used on another request! self.cacher.set(str(id), json.dumps(order)) return order
def parse_projection_system(self, layer, layer_obj): """ Version specific implementation of the projection system parsing Args: layer: The xml element which holds the layer info (parsing from) layer_obj: The backend model which holds the layer data (parsing to) Returns: nothing """ crs = xml_helper.try_get_element_from_xml( "./" + GENERIC_NAMESPACE_TEMPLATE.format("CRS"), layer) for elem in crs: layer_obj.capability_projection_system.append(elem.text)
def _parse_xml_polygons(self, xml_obj: _Element, xpath_type: str): """ Parse the polygon information from the xml document Args: xml_obj (_Element): The xml element xpath_type (str): The element identificator which is determined by SV_ServiceIdentification or MD_DataIdentification Returns: nothing """ polygons = xml_helper.try_get_element_from_xml( xml_elem=xml_obj, elem= '//gmd:MD_Metadata/gmd:identificationInfo/{}/gmd:extent/gmd:EX_Extent/gmd:geographicElement/gmd:EX_BoundingPolygon/gmd:polygon/gml:MultiSurface' .format(xpath_type)) if len(polygons) > 0: surface_elements = xml_helper.try_get_element_from_xml( xml_elem=xml_obj, elem= "//gmd:MD_Metadata/gmd:identificationInfo/{}/gmd:extent/gmd:EX_Extent/gmd:geographicElement/gmd:EX_BoundingPolygon/gmd:polygon/gml:MultiSurface/gml:surfaceMember" .format(xpath_type)) for element in surface_elements: self.polygonal_extent_exterior.append( self.parse_polygon(element)) else: polygons = xml_helper.try_get_text_from_xml_element( xml_obj, '//gmd:MD_Metadata/gmd:identificationInfo/{}/gmd:extent/gmd:EX_Extent/gmd:geographicElement/gmd:EX_BoundingPolygon/gmd:polygon/gml:Polygon' .format(xpath_type)) if polygons is not None: polygon = xml_helper.try_get_single_element_from_xml( xml_elem=xml_obj, elem= "//gmd:MD_Metadata/gmd:identificationInfo/{}/gmd:extent/gmd:EX_Extent/gmd:geographicElement/gmd:EX_BoundingPolygon/gmd:polygon" .format(xpath_type)) self.polygonal_extent_exterior.append( self.parse_polygon(polygon)) else: self.polygonal_extent_exterior.append( self.parse_bbox(self.bounding_box))
def parse_lat_lon_bounding_box(self, layer, layer_obj): try: bbox = xml_helper.try_get_element_from_xml( elem="./" + GENERIC_NAMESPACE_TEMPLATE.format("LatLonBoundingBox"), xml_elem=layer)[0] attrs = ["minx", "miny", "maxx", "maxy"] for attr in attrs: val = bbox.get(attr, 0) if val is None: val = 0 layer_obj.capability_bbox_lat_lon[attr] = val except IndexError: pass
def _get_axis_order(self, identifier: str): """ Returns the axis order for a given spatial result system Args: identifier: Returns: """ id = self.get_real_identifier(identifier) axis_order = self.cacher.get(str(id)) if axis_order is not None: axis_order = json.loads(axis_order) return axis_order XML_NAMESPACES["gml"] = "http://www.opengis.net/gml/3.2" XML_NAMESPACES["epsg"] = "urn:x-ogp:spec:schema-xsd:EPSG:2.2:dataset" uri = self.registry_uri.replace("{CRS_IDENTIFIER}", str(id)) # change header headers = {'Accept': 'application/xml'} response = requests.request("Get", url=uri, proxies=PROXIES, headers=headers) response = xml_helper.parse_xml(str(response.content.decode())) type = xml_helper.try_get_text_from_xml_element(xml_elem=response, elem="//epsg:type") if type == "projected": cartes_elem = xml_helper.try_get_single_element_from_xml("//gml:cartesianCS", response) second_level_srs_uri = xml_helper.get_href_attribute(xml_elem=cartes_elem) elif type in ["geographic 2D", "geographic 2d"]: geogr_elem = xml_helper.try_get_single_element_from_xml("//gml:ellipsoidalCS", response) second_level_srs_uri = xml_helper.get_href_attribute(xml_elem=geogr_elem) else: second_level_srs_uri = "" uri = second_level_srs_uri headers = {'Accept': 'application/xml'} response = requests.request("Get", url=uri, proxies=PROXIES, headers=headers) response = xml_helper.parse_xml(str(response.content.decode())) axis = xml_helper.try_get_element_from_xml("//gml:axisDirection", response) order = [] for a in axis: order.append(a.text) order = { "first_axis": order[0], "second_axis": order[1], } # Write this to cache, so it can be used on another request! self.cacher.set(str(id), json.dumps(order)) return order
def parse_formats(self, layer, layer_obj): actions = ["GetMap", "GetCapabilities", "GetFeatureInfo", "DescribeLayer", "GetLegendGraphic", "GetStyles"] results = {} for action in actions: try: results[action] = [] format_list = xml_helper.try_get_element_from_xml( elem="//" + GENERIC_NAMESPACE_TEMPLATE.format(action) + "/" + GENERIC_NAMESPACE_TEMPLATE.format("Format"), xml_elem=layer ) for format in format_list: results[action].append(format.text) except AttributeError: pass layer_obj.format_list = results
def parse_dataset_md(self, layer, layer_obj): # check for possible dataset metadata if self.has_dataset_metadata(layer): iso_metadata_xml_elements = xml_helper.try_get_element_from_xml( xml_elem=layer, elem="./" + GENERIC_NAMESPACE_TEMPLATE.format("MetadataURL") + "/" + GENERIC_NAMESPACE_TEMPLATE.format("OnlineResource") ) for iso_xml in iso_metadata_xml_elements: iso_uri = xml_helper.get_href_attribute(xml_elem=iso_xml) try: iso_metadata = ISOMetadata(uri=iso_uri, origin=ResourceOriginEnum.CAPABILITIES.value) except Exception as e: # there are iso metadatas that have been filled wrongly -> if so we will drop them continue layer_obj.iso_metadata.append(iso_metadata)
def parse_bounding_box_generic(self, layer, layer_obj, elem_name): bboxs = xml_helper.try_get_element_from_xml( elem="./" + GENERIC_NAMESPACE_TEMPLATE.format("BoundingBox"), xml_elem=layer) for bbox in bboxs: srs = bbox.get(elem_name) srs_dict = { "minx": "", "miny": "", "maxx": "", "maxy": "", } attrs = ["minx", "miny", "maxx", "maxy"] for attr in attrs: srs_dict[attr] = bbox.get(attr) layer_obj.capability_bbox_srs[srs] = srs_dict
def get_layer_by_identifier(self, identifier: str): """ Returns the layer identified by the parameter 'identifier' as OGCWebMapServiceLayer object Args: identifier (str): The identifier as string Returns: layer_obj (OGCWebMapServiceLayer): The found and parsed layer """ if self.service_capabilities_xml is None: # load xml, might have been forgotten self.get_capabilities() layer_xml = xml_helper.parse_xml(xml=self.service_capabilities_xml) layer_xml = xml_helper.try_get_element_from_xml(xml_elem=layer_xml, elem="//Layer/Name[text()='{}']/parent::Layer".format(identifier)) if len(layer_xml) > 0: layer_xml = layer_xml[0] else: return None return self._start_single_layer_parsing(layer_xml)
def parse_dimension(self, layer, layer_obj): dim_list = [] try: dims = xml_helper.try_get_element_from_xml( elem="./" + GENERIC_NAMESPACE_TEMPLATE.format("Dimension"), xml_elem=layer) for dim in dims: ext = xml_helper.try_get_single_element_from_xml( elem="./" + GENERIC_NAMESPACE_TEMPLATE.format("Extent") + '[@name="' + dim.get('name') + '"]', xml_elem=layer) dim_dict = { "type": dim.get("name"), "units": dim.get("units"), "extent": ext.text, } dim_list.append(dim_dict) except (IndexError, AttributeError) as error: pass layer_obj.dimension_list = dim_list
def test_new_service_check_reference_systems(self): return """ Tests whether the layers have all their reference systems, which are provided by the capabilities document. Checks for each layer. Returns: """ layers = self.service_wms.get_subelements().select_related( 'metadata').prefetch_related('metadata__reference_system') cap_xml = self.cap_doc_wms.content for layer in layers: xml_layer_obj = xml_helper.try_get_single_element_from_xml( "//Name[text()='{}']/parent::Layer".format(layer.identifier), cap_xml) if xml_layer_obj is None: # it is possible, that there are layers without a real identifier -> this is generally bad. # we have to ignore these and concentrate on those, which are identifiable continue xml_ref_systems = xml_helper.try_get_element_from_xml( "./" + GENERIC_NAMESPACE_TEMPLATE.format("SRS"), xml_layer_obj) xml_ref_systems_strings = [] for xml_ref_system in xml_ref_systems: xml_ref_systems_strings.append( xml_helper.try_get_text_from_xml_element(xml_ref_system)) layer_ref_systems = layer.metadata.reference_system.all() for ref_system in layer_ref_systems: self.assertTrue( ref_system.code in ALLOWED_SRS, msg="Unallowed reference system registered: {}".format( ref_system.code)) self.assertTrue( ref_system.code in xml_ref_systems_strings, msg= "Reference system registered, which was not in the service: {}" .format(ref_system.code))
def test_get_records_constraint(self): """ Test whether the constraint parameter is working properly Returns: """ get_records_param = { "service": "CSW", "version": "2.0.2", "request": "GetRecords", "elementsetname": "brief", "resulttype": "results", "constraint": "dc:identifier like %{}%".format(self.test_id), "constraintlanguage": "CQL_TEXT", } response = self.client.get(reverse(CSW_PATH), data=get_records_param) status_code = response.status_code content = response.content content_xml = xml_helper.parse_xml(content) self.assertEqual(response.status_code, 200, WRONG_STATUS_CODE_TEMPLATE.format(status_code)) self.assertIsNotNone(content_xml, INVALID_XML_MSG) # Iterate over dc:title objects and check whether they are sorted correctly! identifier_elems = xml_helper.try_get_element_from_xml( "//" + GENERIC_NAMESPACE_TEMPLATE.format("identifier"), content_xml) identifier = [ xml_helper.try_get_text_from_xml_element(id_elem) for id_elem in identifier_elems ] identifier_inside = [self.test_id in id_elem for id_elem in identifier] self.assertTrue( False not in identifier_inside, "A result was returned, which does not fit to the given constraint parameter!" )
def _parse_single_layer(self, layer, parent, step_size: float = None): """ Parses data from an xml <Layer> element into the OGCWebMapLayer object. Runs recursive through own children for further parsing Args: layer: The layer xml element parent: The parent OGCWebMapLayer object Returns: nothing """ # iterate over all top level layer and find their children layer_obj = self._start_single_layer_parsing(layer) if current_task: current_task.update_state( state=states.STARTED, meta={ 'current': AsyncResult(current_task.request.id).info.get( "current", 0) + step_size, 'phase': "Parsing {}".format(layer_obj.title), }) layer_obj.parent = parent if self.layers is None: self.layers = [] self.layers.append(layer_obj) sublayers = xml_helper.try_get_element_from_xml( elem="./" + GENERIC_NAMESPACE_TEMPLATE.format("Layer"), xml_elem=layer) if parent is not None: parent.child_layers.append(layer_obj) self._parse_layers_recursive(layers=sublayers, parent=layer_obj, step_size=step_size)