def test_csw_search_count(self): """Verify that GeoNode CSW can handle search counting""" csw = get_catalogue( backend={ 'ENGINE': 'geonode.catalogue.backends.pycsw_local', 'URL': urljoin('http://localhost:8001/', '/catalogue/csw'), }, skip_caps=False) self.assertEqual( csw.catalogue.url, urljoin('http://localhost:8001/', '/catalogue/csw') ) # get all records csw.catalogue.getrecords(typenames='csw:Record') self.assertEqual( csw.catalogue.results['matches'], 16, 'Expected 16 records') # get all ISO records, test for numberOfRecordsMatched csw.catalogue.getrecords(typenames='gmd:MD_Metadata') self.assertEqual( csw.catalogue.results['matches'], 16, 'Expected 16 records against ISO typename')
def delete_record(self, id): """ Remove a CSW record """ catalogue = get_catalogue() catalogue.remove_record(id)
def test_csw_outputschema_dc_bbox(self): """Verify that GeoNode can handle ISO metadata BBOX model with Dublin Core outputSchema""" # GeoNetwork is not to spec for DC BBOX output # once ticket http://trac.osgeo.org/geonetwork/ticket/730 is fixed # we can remove this condition csw = get_catalogue() if csw.catalogue.type != 'geonetwork': # search for 'san_andres_y_providencia_location', output as Dublin Core csw.catalogue.getrecords(typenames='gmd:MD_Metadata', keywords=['san_andres_y_providencia_location'], outputschema='http://www.opengis.net/cat/csw/2.0.2', esn='full') record = csw.catalogue.records.values()[0] # test CRS constructs in Dublin Core self.assertEqual(record.bbox.crs.code, 4326, 'Expected a specific CRS code value in Dublin Core model') # test BBOX properties in Dublin Core self.assertEqual(record.bbox.minx, '-81.8593555', 'Expected a specific minx coordinate value in Dublin Core model') self.assertEqual(record.bbox.miny, '12.1665322', 'Expected a specific minx coordinate value in Dublin Core model') self.assertEqual(record.bbox.maxx, '-81.356409', 'Expected a specific maxx coordinate value in Dublin Core model') self.assertEqual(record.bbox.maxy, '13.396306', 'Expected a specific maxy coordinate value in Dublin Core model')
def test_csw_base(self): """Verify that GeoNode works against any CSW""" csw = get_catalogue(skip_caps=False) # test that OGC:CSW URLs are identifical to what's defined in GeoNode for op in csw.catalogue.operations: self.assertEqual( csw.catalogue.url, op.methods["Get"]["url"], "Expected GeoNode URL to be equal to all CSW URLs" ) # test that OGC:CSW 2.0.2 is supported self.assertEqual(csw.catalogue.version, "2.0.2", 'Expected "2.0.2" as a supported version') # test that transactions are supported if csw.catalogue.type != "pycsw_local": self.assertTrue( "Transaction" in [o.name for o in csw.catalogue.operations], "Expected Transaction to be a supported operation", ) # test that gmd:MD_Metadata is a supported typename for o in csw.catalogue.operations: if o.name == "GetRecords": typenames = o.parameters["typeNames"]["values"] self.assertTrue("gmd:MD_Metadata" in typenames, 'Expected "gmd:MD_Metadata" to be a supported typeNames value') # test that http://www.isotc211.org/2005/gmd is a supported outputschema for o in csw.catalogue.operations: if o.name == "GetRecords": outputschemas = o.parameters["outputSchema"]["values"] self.assertTrue( "http://www.isotc211.org/2005/gmd" in outputschemas, 'Expected "http://www.isotc211.org/2005/gmd" to be a supported outputSchema value', )
def test_csw_search_count(self): """Verify that GeoNode CSW can handle search counting""" csw = get_catalogue(backend={ 'ENGINE': 'geonode.catalogue.backends.pycsw_local', 'URL': urljoin('http://localhost:8001/', '/catalogue/csw'), }, skip_caps=False) self.assertEqual(csw.catalogue.url, urljoin('http://localhost:8001/', '/catalogue/csw')) # get all records csw.catalogue.getrecords(typenames='csw:Record') self.assertEqual(csw.catalogue.results['matches'], 16, 'Expected 16 records') # get all ISO records, test for numberOfRecordsMatched csw.catalogue.getrecords(typenames='gmd:MD_Metadata') self.assertEqual(csw.catalogue.results['matches'], 16, 'Expected 16 records against ISO typename') # Make sure it currently counts both published and unpublished ones too try: ResourceBase.objects.filter(is_published=True).update( is_published=False) # get all ISO records, test for numberOfRecordsMatched csw.catalogue.getrecords(typenames='gmd:MD_Metadata') self.assertEqual(csw.catalogue.results['matches'], 16, 'Expected 16 records against ISO typename') finally: ResourceBase.objects.filter(is_published=False).update( is_published=True)
def test_csw_base(self): """Verify that GeoNode works against any CSW""" csw = get_catalogue(skip_caps=False) # test that OGC:CSW URLs are identical to what is defined in GeoNode for op in csw.catalogue.operations: self.assertEqual(csw.catalogue.url, op.methods['Get']['url'], 'Expected GeoNode URL to be equal to all CSW URLs') # test that OGC:CSW 2.0.2 is supported self.assertEqual(csw.catalogue.version, '2.0.2', 'Expected "2.0.2" as a supported version') # test that transactions are supported if csw.catalogue.type != 'pycsw_local': self.assertTrue('Transaction' in [o.name for o in csw.catalogue.operations], 'Expected Transaction to be a supported operation') # test that gmd:MD_Metadata is a supported typename for o in csw.catalogue.operations: if o.name == 'GetRecords': typenames = o.parameters['typeNames']['values'] self.assertTrue('gmd:MD_Metadata' in typenames, 'Expected "gmd:MD_Metadata" to be a supported typeNames value') # test that http://www.isotc211.org/2005/gmd is a supported output schema for o in csw.catalogue.operations: if o.name == 'GetRecords': outputschemas = o.parameters['outputSchema']['values'] self.assertTrue('http://www.isotc211.org/2005/gmd' in outputschemas, 'Expected "http://www.isotc211.org/2005/gmd" to be a supported outputSchema value')
def test_csw_query_bbox(self): """Verify that GeoNode CSW can handle bbox queries""" csw = get_catalogue() csw.catalogue.getrecords(bbox=[-140, -70, 80, 70]) logger.debug(csw.catalogue.results) self.assertEqual(csw.catalogue.results, {'matches': 7, 'nextrecord': 0, 'returned': 7})
def test_csw_outputschema_iso(self): """Verify that GeoNode can handle ISO metadata with ISO outputSchema""" csw = get_catalogue() # search for 'san_andres_y_providencia_location', output as Dublin Core csw.catalogue.getrecords(typenames='gmd:MD_Metadata', keywords=['%san_andres_y_providencia_location%'], outputschema='http://www.isotc211.org/2005/gmd', esn='full') record = csw.catalogue.records.values()[0] # test that the ISO title maps correctly in Dublin Core self.assertEqual(record.identification.title, 'San Andres Y Providencia Location', 'Expected a specific title in ISO model') # test that the ISO abstract maps correctly in Dublin Core self.assertEqual(record.identification.abstract, 'No abstract provided', 'Expected a specific abstract in ISO model') # test BBOX properties in Dublin Core self.assertEqual(record.identification.bbox.minx, '-81.8593555', 'Expected a specific minx coordinate value in ISO model') self.assertEqual(record.identification.bbox.miny, '12.1665322', 'Expected a specific minx coordinate value in ISO model') self.assertEqual(record.identification.bbox.maxx, '-81.356409', 'Expected a specific maxx coordinate value in ISO model') self.assertEqual(record.identification.bbox.maxy, '13.396306', 'Expected a specific maxy coordinate value in ISO model')
def test_csw_bulk_upload(self): """Verify that GeoNode CSW can handle bulk upload of ISO and FGDC metadata""" # GeoNetwork and deegree do not transform ISO <-> FGDC # once this is implemented we can remove this condition csw = get_catalogue() if csw.catalogue.type == 'pycsw_http': identifiers = [] # upload all metadata for root, dirs, files in os.walk(os.path.join(gisdata.GOOD_METADATA, 'sangis.org')): for mfile in files: if mfile.endswith('.xml'): md_doc = etree.tostring( dlxml.fromstring( open( os.path.join( root, mfile)).read())) csw.catalogue.transaction( ttype='insert', typename='fgdc:metadata', record=md_doc) identifiers.append( csw.catalogue.results['insertresults'][0]) for md in glob.glob(os.path.join(gisdata.GOOD_METADATA, 'wustl.edu', '*.xml')): md_doc = etree.tostring(dlxml.fromstring(open(md).read())) csw.catalogue.transaction( ttype='insert', typename='gmd:MD_Metadata', record=md_doc) identifiers.append(csw.catalogue.results['insertresults'][0]) # query against FGDC typename csw.catalogue.getrecords(typenames='fgdc:metadata') self.assertEqual( csw.catalogue.results['matches'], 72, 'Expected 187 records in FGDC model') # query against ISO typename csw.catalogue.getrecords(typenames='gmd:MD_Metadata') self.assertEqual( csw.catalogue.results['matches'], 115, 'Expected 194 records in ISO model') # query against FGDC and ISO typename csw.catalogue.getrecords(typenames='gmd:MD_Metadata fgdc:metadata') self.assertEqual( csw.catalogue.results['matches'], 187, 'Expected 381 records total in FGDC and ISO model') # clean up for i in identifiers: csw.catalogue.transaction(ttype='delete', identifier=i)
def prefix_xsl_line(req, id): # if the layer is in the catalogue, try to get the distribution urls # that cannot be precalculated. resource = None try: resource = get_object_or_404(ResourceBase, pk=id) catalogue = get_catalogue() record = catalogue.get_record(resource.uuid) if record: logger.debug(record.xml) except Exception: logger.debug(traceback.format_exc()) msg = f'Could not connect to catalogue to save information for layer "{str(resource)}"' return HttpResponse(msg) try: # generate an XML document (GeoNode's default is ISO) if resource.metadata_uploaded and resource.metadata_uploaded_preserve: md_doc = etree.tostring(dlxml.fromstring(resource.metadata_xml)) else: md_doc = catalogue.catalogue.csw_gen_xml( resource, settings.CATALOG_METADATA_TEMPLATE) xml = md_doc except Exception: logger.debug(traceback.format_exc()) return HttpResponse("Resource Metadata not available!") site_url = settings.SITEURL.rstrip('/') if settings.SITEURL.startswith( 'http') else settings.SITEURL xsl_static = getattr(settings, 'CATALOG_METADATA_XSL', '/static/metadataxsl/metadata.xsl') xsl_path = f'{site_url}{xsl_static}' xsl_line = f'<?xml-stylesheet type="text/xsl" href="{xsl_path}"?>' return HttpResponse(xsl_line + xml, content_type="text/xml")
def test_csw_bulk_upload(self): """Verify that GeoNode CSW can handle bulk upload of ISO and FGDC metadata""" # GeoNetwork and deegree do not transform ISO <-> FGDC # once this is implemented we can remove this condition csw = get_catalogue() if csw.catalogue.type == 'pycsw_http': identifiers = [] # upload all metadata for root, dirs, files in os.walk(os.path.join(gisdata.GOOD_METADATA, 'sangis.org')): for mfile in files: if mfile.endswith('.xml'): md_doc = etree.tostring( etree.fromstring( open( os.path.join( root, mfile)).read())) csw.catalogue.transaction( ttype='insert', typename='fgdc:metadata', record=md_doc) identifiers.append( csw.catalogue.results['insertresults'][0]) for md in glob.glob(os.path.join(gisdata.GOOD_METADATA, 'wustl.edu', '*.xml')): md_doc = etree.tostring(etree.fromstring(open(md).read())) csw.catalogue.transaction( ttype='insert', typename='gmd:MD_Metadata', record=md_doc) identifiers.append(csw.catalogue.results['insertresults'][0]) # query against FGDC typename csw.catalogue.getrecords(typenames='fgdc:metadata') self.assertEqual( csw.catalogue.results['matches'], 72, 'Expected 187 records in FGDC model') # query against ISO typename csw.catalogue.getrecords(typenames='gmd:MD_Metadata') self.assertEqual( csw.catalogue.results['matches'], 115, 'Expected 194 records in ISO model') # query against FGDC and ISO typename csw.catalogue.getrecords(typenames='gmd:MD_Metadata fgdc:metadata') self.assertEqual( csw.catalogue.results['matches'], 187, 'Expected 381 records total in FGDC and ISO model') # clean up for i in identifiers: csw.catalogue.transaction(ttype='delete', identifier=i)
def catalogue_post_save(instance, sender, **kwargs): """Get information from catalogue""" resources = ResourceBase.objects.filter(id=instance.resourcebase_ptr.id) # Update the Catalog try: catalogue = get_catalogue() catalogue.create_record(instance) record = catalogue.get_record(instance.uuid) except EnvironmentError as err: msg = f'Could not connect to catalogue to save information for layer "{instance.name}"' if err.errno == errno.ECONNREFUSED: LOGGER.warn(msg, err) return else: raise err if not record: msg = f'Metadata record for {instance.title} does not exist, check the catalogue signals.' LOGGER.warning(msg) return if not hasattr(record, 'links'): msg = f'Metadata record for {instance.title} should contain links.' raise Exception(msg) # Create the different metadata links with the available formats for mime, name, metadata_url in record.links['metadata']: try: Link.objects.get_or_create(resource=instance.resourcebase_ptr, url=metadata_url, defaults=dict(name=name, extension='xml', mime=mime, link_type='metadata')) except Exception: _d = dict(name=name, extension='xml', mime=mime, link_type='metadata') Link.objects.filter(resource=instance.resourcebase_ptr, url=metadata_url, extension='xml', link_type='metadata').update(**_d) # generate an XML document (GeoNode's default is ISO) if instance.metadata_uploaded and instance.metadata_uploaded_preserve: md_doc = etree.tostring(dlxml.fromstring(instance.metadata_xml)) else: md_doc = catalogue.catalogue.csw_gen_xml( instance, settings.CATALOG_METADATA_TEMPLATE) try: csw_anytext = catalogue.catalogue.csw_gen_anytext(md_doc) except Exception as e: LOGGER.exception(e) csw_anytext = '' resources.update(metadata_xml=md_doc, csw_wkt_geometry=instance.geographic_bounding_box, csw_anytext=csw_anytext)
def test_csw_outputschema_fgdc(self): """Verify that GeoNode can handle ISO metadata with FGDC outputSchema""" # GeoNetwork and deegree do not transform ISO <-> FGDC # once this is implemented we can remove this condition csw = get_catalogue() if csw.catalogue.type in ['pycsw_http', 'pycsw_local']: # get all ISO records in FGDC schema csw.catalogue.getrecords( typenames='gmd:MD_Metadata', keywords=['san_andres_y_providencia_location'], outputschema='http://www.opengis.net/cat/csw/csdgm') record = csw.catalogue.records.values()[0] # test that the ISO title maps correctly in FGDC self.assertEqual(record.idinfo.citation.citeinfo['title'], 'San Andres Y Providencia Location', 'Expected a specific title in FGDC model') # test that the ISO abstract maps correctly in FGDC self.assertEqual(record.idinfo.descript.abstract, 'No abstract provided', 'Expected a specific abstract in FGDC model')
def test_csw_outputschema_dc(self): """Verify that GeoNode can handle ISO metadata with Dublin Core outputSchema""" csw = get_catalogue() # search for 'san_andres_y_providencia_location', output as Dublin Core csw.catalogue.getrecords( typenames='gmd:MD_Metadata', keywords=['%san_andres_y_providencia_location%'], outputschema='http://www.opengis.net/cat/csw/2.0.2', esn='full') record = csw.catalogue.records.values()[0] # test that the ISO title maps correctly in Dublin Core self.assertEqual(record.title, 'San Andres Y Providencia Location', 'Expected a specific title in Dublin Core model') # test that the ISO abstract maps correctly in Dublin Core self.assertEqual(record.abstract, 'No abstract provided', 'Expected a specific abstract in Dublin Core model') # test for correct service link articulation for link in record.references: if link['scheme'] == 'OGC:WMS': self.assertEqual(link['url'], 'http://localhost:8080/geoserver/geonode/wms', 'Expected a specific OGC:WMS URL') elif link['scheme'] == 'OGC:WFS': self.assertEqual(link['url'], 'http://localhost:8080/geoserver/geonode/wfs', 'Expected a specific OGC:WFS URL')
def test_csw_outputschema_fgdc(self): """Verify that GeoNode CSW can handle ISO metadata with FGDC outputSchema""" # GeoNetwork and deegree do not transform ISO <-> FGDC # once this is implemented we can remove this condition csw = get_catalogue() if csw.catalogue.type in ['pycsw_http', 'pycsw_local']: # get all ISO records in FGDC schema csw.catalogue.getrecords( typenames='gmd:MD_Metadata', keywords=['san_andres_y_providencia_location'], outputschema='http://www.opengis.net/cat/csw/csdgm') record = csw.catalogue.records.values()[0] # test that the ISO title maps correctly in FGDC self.assertEqual( record.idinfo.citation.citeinfo['title'], 'San Andres Y Providencia Location', 'Expected a specific title in FGDC model') # test that the ISO abstract maps correctly in FGDC self.assertEqual( record.idinfo.descript.abstract, 'No abstract provided', 'Expected a specific abstract in FGDC model')
def test_csw_outputschema_dc_bbox(self): """Verify that GeoNode CSW can handle ISO metadata BBOX model with Dublin Core outputSchema""" # GeoNetwork is not to spec for DC BBOX output # once ticket http://trac.osgeo.org/geonetwork/ticket/730 is fixed # we can remove this condition csw = get_catalogue() if csw.catalogue.type != 'geonetwork': # search for 'san_andres_y_providencia_location', output as Dublin # Core csw.catalogue.getrecords( typenames='gmd:MD_Metadata', keywords=['san_andres_y_providencia_location'], outputschema='http://www.opengis.net/cat/csw/2.0.2', esn='full') record = csw.catalogue.records.values()[0] # test CRS constructs in Dublin Core self.assertEquals(record.bbox.crs.code, 4326) # test BBOX properties in Dublin Core from decimal import Decimal logger.debug([ Decimal(record.bbox.minx), Decimal(record.bbox.miny), Decimal(record.bbox.maxx), Decimal(record.bbox.maxy) ]) self.assertEquals(Decimal(record.bbox.minx), Decimal('-81.8593555')) self.assertEquals(Decimal(record.bbox.miny), Decimal('12.1665322')) self.assertEquals(Decimal(record.bbox.maxx), Decimal('-81.356409')) self.assertEquals(Decimal(record.bbox.maxy), Decimal('13.396306'))
def test_csw_outputschema_dc(self): """Verify that GeoNode can handle ISO metadata with Dublin Core outputSchema""" csw = get_catalogue() # search for 'san_andres_y_providencia_location', output as Dublin Core csw.catalogue.getrecords( typenames='gmd:MD_Metadata', keywords=['%san_andres_y_providencia_location%'], outputschema='http://www.opengis.net/cat/csw/2.0.2', esn='full') record = csw.catalogue.records.values()[0] # test that the ISO title maps correctly in Dublin Core self.assertEqual(record.title, 'San Andres Y Providencia Location', 'Expected a specific title in Dublin Core model') # test that the ISO abstract maps correctly in Dublin Core self.assertEqual(record.abstract, 'No abstract provided', 'Expected a specific abstract in Dublin Core model') # test for correct service link articulation for link in record.references: if link['scheme'] == 'OGC:WMS': self.assertEqual( link['url'], 'http://localhost:8080/geoserver/geonode/wms', 'Expected a specific OGC:WMS URL') elif link['scheme'] == 'OGC:WFS': self.assertEqual( link['url'], 'http://localhost:8080/geoserver/geonode/wfs', 'Expected a specific OGC:WFS URL')
def test_csw_outputschema_dc(self): """Verify that GeoNode CSW can handle ISO metadata with Dublin Core outputSchema""" csw = get_catalogue() # search for 'san_andres_y_providencia_location', output as Dublin Core csw.catalogue.getrecords( typenames='gmd:MD_Metadata', keywords=['%san_andres_y_providencia_location%'], outputschema='http://www.opengis.net/cat/csw/2.0.2', esn='full') record = list(csw.catalogue.records.values())[0] # test that the ISO title maps correctly in Dublin Core self.assertEqual(record.title, "San Andres Y Providencia Location") # test that the ISO abstract maps correctly in Dublin Core self.assertEqual(record.abstract, 'No abstract provided') # test for correct service link articulation for link in record.references: if check_ogc_backend(geoserver.BACKEND_PACKAGE): if link['scheme'] == 'OGC:WMS': self.assertEqual( link['url'], f"{settings.GEOSERVER_PUBLIC_LOCATION}ows") elif link['scheme'] == 'OGC:WFS': self.assertEqual( link['url'], f"{settings.GEOSERVER_PUBLIC_LOCATION}ows") elif link['scheme'] == 'OGC:WCS': self.assertEqual( link['url'], f"{settings.GEOSERVER_PUBLIC_LOCATION}ows")
def prefix_xsl_line(req, id): resource = get_object_or_404(ResourceBase, pk=id) # if the layer is in the catalogue, try to get the distribution urls # that cannot be precalculated. try: catalogue = get_catalogue() record = catalogue.get_record(resource.uuid) except Exception as err: msg = 'Could not connect to catalogue to save information for layer "%s"' % str( resource.title) logger.warn(msg) raise err try: xml = record.xml # generate an XML document (GeoNode's default is ISO) if resource.metadata_uploaded and resource.metadata_uploaded_preserve: md_doc = etree.tostring(etree.fromstring(resource.metadata_xml)) else: md_doc = catalogue.catalogue.csw_gen_xml( resource, 'catalogue/full_metadata.xml') xml = md_doc except BaseException: logger.error(traceback.format_exc()) return HttpResponse("Resource Metadata not available!") xsl_path = '{}/static/metadataxsl/metadata.xsl'.format( settings.SITEURL.rstrip('/')) xsl_line = '<?xml-stylesheet type="text/xsl" href="{}"?>'.format(xsl_path) return HttpResponse(xsl_line + xml, content_type="text/xml")
def test_csw_outputschema_dc_bbox(self): """Verify that GeoNode CSW can handle ISO metadata BBOX model with Dublin Core outputSchema""" csw = get_catalogue() # search for 'san_andres_y_providencia_location', output as Dublin # Core csw.catalogue.getrecords( typenames='gmd:MD_Metadata', keywords=['san_andres_y_providencia_location'], outputschema='http://www.opengis.net/cat/csw/2.0.2', esn='full') record = list(csw.catalogue.records.values())[0] # test CRS constructs in Dublin Core self.assertEqual(record.bbox.crs.code, 4326) # test BBOX properties in Dublin Core from decimal import Decimal logger.debug([ Decimal(record.bbox.minx), Decimal(record.bbox.miny), Decimal(record.bbox.maxx), Decimal(record.bbox.maxy) ]) self.assertAlmostEqual(Decimal(record.bbox.minx), Decimal('-81.859356'), places=3) self.assertAlmostEqual(Decimal(record.bbox.miny), Decimal('12.166532'), places=3) self.assertAlmostEqual(Decimal(record.bbox.maxx), Decimal('-81.356409'), places=3) self.assertAlmostEqual(Decimal(record.bbox.maxy), Decimal('13.396306'), places=3)
def test_csw_outputschema_dc(self): """Verify that GeoNode can handle ISO metadata with Dublin Core outputSchema""" csw = get_catalogue() # search for 'san_andres_y_providencia_location', output as Dublin Core csw.catalogue.getrecords( typenames="gmd:MD_Metadata", keywords=["%san_andres_y_providencia_location%"], outputschema="http://www.opengis.net/cat/csw/2.0.2", esn="full", ) record = csw.catalogue.records.values()[0] # test that the ISO title maps correctly in Dublin Core self.assertEqual( record.title, "San Andres Y Providencia Location", "Expected a specific title in Dublin Core model" ) # test that the ISO abstract maps correctly in Dublin Core self.assertEqual(record.abstract, "No abstract provided", "Expected a specific abstract in Dublin Core model") # test for correct service link articulation for link in record.references: if link["scheme"] == "OGC:WMS": self.assertEqual( link["url"], "http://localhost:8080/geoserver/geonode/wms", "Expected a specific OGC:WMS URL" ) elif link["scheme"] == "OGC:WFS": self.assertEqual( link["url"], "http://localhost:8080/geoserver/geonode/wfs", "Expected a specific OGC:WFS URL" )
def prefix_xsl_line(req, id): resource = get_object_or_404(ResourceBase, pk=id) catalogue = get_catalogue() record = catalogue.get_record(resource.uuid) try: xml = record.xml # generate an XML document (GeoNode's default is ISO) if resource.metadata_uploaded and resource.metadata_uploaded_preserve: md_doc = etree.tostring(etree.fromstring(resource.metadata_xml)) else: md_doc = catalogue.catalogue.csw_gen_xml(resource, 'catalogue/full_metadata.xml') xml = md_doc except: logger.error(traceback.format_exc()) return HttpResponse( "Resource Metadata not available!" ) xsl_path = '{}/static/metadataxsl/metadata.xsl'.format(settings.SITEURL.rstrip('/')) xsl_line = '<?xml-stylesheet type="text/xsl" href="{}"?>'.format(xsl_path) return HttpResponse( xsl_line + xml, content_type="text/xml" )
def prefix_xsl_line(req, id): resource = get_object_or_404(ResourceBase, pk=id) # if the layer is in the catalogue, try to get the distribution urls # that cannot be precalculated. try: catalogue = get_catalogue() record = catalogue.get_record(resource.uuid) if record: logger.debug(record.xml) except Exception as err: msg = 'Could not connect to catalogue to save information for layer "%s"' % str(resource.title) logger.warn(msg) raise err try: # generate an XML document (GeoNode's default is ISO) if resource.metadata_uploaded and resource.metadata_uploaded_preserve: md_doc = etree.tostring(etree.fromstring(resource.metadata_xml)) else: md_doc = catalogue.catalogue.csw_gen_xml(resource, 'catalogue/full_metadata.xml') xml = md_doc except BaseException: logger.error(traceback.format_exc()) return HttpResponse( "Resource Metadata not available!" ) site_url = settings.SITEURL.rstrip('/') if settings.SITEURL.startswith('http') else settings.SITEURL xsl_path = '{}/static/metadataxsl/metadata.xsl'.format(site_url) xsl_line = '<?xml-stylesheet type="text/xsl" href="{}"?>'.format(xsl_path) return HttpResponse( xsl_line + xml, content_type="text/xml" )
def test_csw_outputschema_iso(self): """Verify that GeoNode CSW can handle ISO metadata with ISO outputSchema""" csw = get_catalogue() # search for 'san_andres_y_providencia_location', output as Dublin Core csw.catalogue.getrecords( typenames='gmd:MD_Metadata', keywords=['%san_andres_y_providencia_location%'], outputschema='http://www.isotc211.org/2005/gmd', esn='full') record = csw.catalogue.records.values()[0] # test that the ISO title maps correctly in Dublin Core self.assertEqual(record.identification.title, 'San Andres Y Providencia Location', 'Expected a specific title in ISO model') # test that the ISO abstract maps correctly in Dublin Core self.assertEqual(record.identification.abstract, 'No abstract provided', 'Expected a specific abstract in ISO model') # test BBOX properties in Dublin Core from decimal import Decimal self.assertEqual( Decimal(record.identification.bbox.minx), Decimal('-81.8593555'), 'Expected a specific minx coordinate value in ISO model') self.assertEqual( Decimal(record.identification.bbox.miny), Decimal('12.1665322'), 'Expected a specific minx coordinate value in ISO model') self.assertEqual( Decimal(record.identification.bbox.maxx), Decimal('-81.356409'), 'Expected a specific maxx coordinate value in ISO model') self.assertEqual( Decimal(record.identification.bbox.maxy), Decimal('13.396306'), 'Expected a specific maxy coordinate value in ISO model') # test for correct link articulation for link in record.distribution.online: if check_ogc_backend(geoserver.BACKEND_PACKAGE): if link.protocol == 'OGC:WMS': self.assertEqual(link.url, 'http://localhost:8000/gs/ows', 'Expected a specific OGC:WMS URL') elif link.protocol == 'OGC:WFS': self.assertEqual(link.url, 'http://localhost:8000/gs/wfs', 'Expected a specific OGC:WFS URL') if check_ogc_backend(qgis_server.BACKEND_PACKAGE): if link.protocol == 'OGC:WMS': self.assertEqual( link.url, 'http://localhost:8000/qgis-server/ogc/' 'san_andres_y_providencia_location', 'Expected a specific OGC:WMS URL') elif link.protocol == 'OGC:WFS': self.assertEqual( link.url, 'http://localhost:8000/qgis-server/ogc/' 'san_andres_y_providencia_location', 'Expected a specific OGC:WFS URL')
def catalogue_post_save(instance, sender, **kwargs): """Get information from catalogue""" # if layer is not to be published, temporarily # change publish state to be able to update # properties (#2332) is_published = instance.is_published resources = ResourceBase.objects.filter(id=instance.resourcebase_ptr.id) # Trmporarly enable the Resources if not is_published: resources.update(is_published=True) # Update the Catalog try: try: catalogue = get_catalogue() catalogue.create_record(instance) record = catalogue.get_record(instance.uuid) except EnvironmentError, err: msg = 'Could not connect to catalogue to save information for layer "%s"' % instance.name if err.reason.errno == errno.ECONNREFUSED: LOGGER.warn(msg, err) return else: raise err msg = ('Metadata record for %s does not exist,' ' check the catalogue signals.' % instance.title) assert record is not None, msg msg = ('Metadata record for %s should contain links.' % instance.title) assert hasattr(record, 'links'), msg # Create the different metadata links with the available formats for mime, name, metadata_url in record.links['metadata']: Link.objects.get_or_create(resource=instance.resourcebase_ptr, url=metadata_url, defaults=dict(name=name, extension='xml', mime=mime, link_type='metadata') ) # generate an XML document (GeoNode's default is ISO) if instance.metadata_uploaded and instance.metadata_uploaded_preserve: md_doc = etree.tostring(etree.fromstring(instance.metadata_xml)) else: md_doc = catalogue.catalogue.csw_gen_xml(instance, 'catalogue/full_metadata.xml') csw_anytext = catalogue.catalogue.csw_gen_anytext(md_doc) csw_wkt_geometry = instance.geographic_bounding_box.split(';')[-1] resources = ResourceBase.objects.filter(id=instance.resourcebase_ptr.id) resources.update(metadata_xml=md_doc) resources.update(csw_wkt_geometry=csw_wkt_geometry) resources.update(csw_anytext=csw_anytext)
def test_csw_upload_fgdc(self): """Verify that GeoNode CSW can handle FGDC metadata upload""" csw = get_catalogue() if csw.catalogue.type == 'pycsw_http': # upload a native FGDC metadata document md_doc = etree.tostring( dlxml.fromstring( open( os.path.join( gisdata.GOOD_METADATA, 'sangis.org', 'Census', 'Census_Blockgroup_Pop_Housing.shp.xml')).read())) csw.catalogue.transaction(ttype='insert', typename='fgdc:metadata', record=md_doc) # test that FGDC document was successfully inserted self.assertEqual(csw.catalogue.results['inserted'], 1) # query against FGDC typename, output FGDC csw.catalogue.getrecords(typenames='fgdc:metadata') self.assertEqual(csw.catalogue.results['matches'], 1) record = list(csw.catalogue.records.values())[0] # test that the FGDC title maps correctly in DC self.assertEqual(record.title, "Census_Blockgroup_Pop_Housing") # test that the FGDC type maps correctly in DC self.assertEqual(record.type, "vector digital data") # test CRS constructs in Dublin Core self.assertEqual(record.bbox.crs.code, 4326) # test BBOX properties in Dublin Core from decimal import Decimal self.assertEqual(Decimal(record.bbox.minx), Decimal('-117.6')) self.assertEqual(Decimal(record.bbox.miny), Decimal('32.53')) self.assertEqual(Decimal(record.bbox.maxx), Decimal('-116.08')) self.assertEqual(Decimal(record.bbox.maxy), Decimal('33.51')) # query against FGDC typename, return in ISO csw.catalogue.getrecords( typenames='fgdc:metadata', esn='brief', outputschema='http://www.isotc211.org/2005/gmd') self.assertEqual(csw.catalogue.results['matches'], 1) record = list(csw.catalogue.records.values())[0] # test that the FGDC title maps correctly in ISO self.assertEqual(record.identification.title, "Census_Blockgroup_Pop_Housing") # cleanup and delete inserted FGDC metadata document csw.catalogue.transaction( ttype='delete', typename='fgdc:metadata', cql='fgdc:Title like "Census_Blockgroup_Pop_Housing"') self.assertEqual(csw.catalogue.results['deleted'], 1)
def test_shp_upload(self): """ Tests if a vector layer can be uploaded to a running GeoNode/GeoServer""" layer_name = 'san_andres_y_providencia_water' fname = os.path.join(GOOD_DATA, 'vector', '%s.shp' % layer_name) self.upload_file(fname, self.complete_upload, check_name='%s' % layer_name) test_layer = Layer.objects.filter(name__icontains='%s' % layer_name).last() if test_layer: layer_attributes = test_layer.attributes self.assertIsNotNone(layer_attributes) # Links _def_link_types = ['original', 'metadata'] _links = Link.objects.filter(link_type__in=_def_link_types) # Check 'original' and 'metadata' links exist self.assertIsNotNone( _links, "No 'original' and 'metadata' links have been found") self.assertTrue( _links.count() > 0, "No 'original' and 'metadata' links have been found") # Check original links in csw_anytext _post_migrate_links_orig = Link.objects.filter( resource=test_layer.resourcebase_ptr, resource_id=test_layer.resourcebase_ptr.id, link_type='original') for _link_orig in _post_migrate_links_orig: self.assertIn( _link_orig.url, test_layer.csw_anytext, "The link URL {0} is not present in the 'csw_anytext' attribute of the layer '{1}'" .format(_link_orig.url, test_layer.alternate)) # Check catalogue catalogue = get_catalogue() record = catalogue.get_record(test_layer.uuid) self.assertIsNotNone(record) self.assertTrue( hasattr(record, 'links'), "No records have been found in the catalogue for the resource '{}'" .format(test_layer.alternate)) # Check 'metadata' links for each record for mime, name, metadata_url in record.links['metadata']: try: _post_migrate_link_meta = Link.objects.get( resource=test_layer.resourcebase_ptr, url=metadata_url, name=name, extension='xml', mime=mime, link_type='metadata') self.assertIsNotNone( _post_migrate_link_meta, "No '{}' links have been found in the catalogue for the resource '{}'" .format(name, test_layer.alternate)) except Link.DoesNotExist: _post_migrate_link_meta = None
def test_csw_query_bbox(self): """Verify that GeoNode CSW can handle bbox queries""" csw = get_catalogue() csw.catalogue.getrecords(bbox=[-140, -70, 80, 70]) self.assertEqual( csw.catalogue.results, {'matches': 7, 'nextrecord': 0, 'returned': 7}, 'Expected a specific bbox query result set')
def prefix_xsl_line(req, id): resource = get_object_or_404(ResourceBase, pk=id) catalogue = get_catalogue() record = catalogue.get_record(resource.uuid) xml = record.xml xsl_path = '{}/static/metadataxsl/metadata.xsl'.format(settings.SITEURL) xsl_line = '<?xml-stylesheet type="text/xsl" href="{}"?>'.format(xsl_path) return HttpResponse(xsl_line + xml, content_type="text/xml")
def test_csw_outputschema_iso(self): """Verify that GeoNode CSW can handle ISO metadata with ISO outputSchema""" csw = get_catalogue() # search for 'san_andres_y_providencia_location', output as Dublin Core csw.catalogue.getrecords( typenames='gmd:MD_Metadata', keywords=['%san_andres_y_providencia_location%'], outputschema='http://www.isotc211.org/2005/gmd', esn='full') record = csw.catalogue.records.values()[0] # test that the ISO title maps correctly in Dublin Core self.assertEqual( record.identification.title, 'San Andres Y Providencia Location', 'Expected a specific title in ISO model') # test that the ISO abstract maps correctly in Dublin Core self.assertEqual( record.identification.abstract, 'No abstract provided', 'Expected a specific abstract in ISO model') # test BBOX properties in Dublin Core self.assertEqual( record.identification.bbox.minx, '-81.8593555', 'Expected a specific minx coordinate value in ISO model') self.assertEqual( record.identification.bbox.miny, '12.1665322', 'Expected a specific minx coordinate value in ISO model') self.assertEqual( record.identification.bbox.maxx, '-81.356409', 'Expected a specific maxx coordinate value in ISO model') self.assertEqual( record.identification.bbox.maxy, '13.396306', 'Expected a specific maxy coordinate value in ISO model') # test for correct link articulation for link in record.distribution.online: if link.protocol == 'OGC:WMS': self.assertEqual(link.url, 'http://localhost:8080/geoserver/geonode/wms', 'Expected a specific OGC:WMS URL') elif link.protocol == 'OGC:WFS': self.assertEqual(link.url, 'http://localhost:8080/geoserver/geonode/wfs', 'Expected a specific OGC:WFS URL')
def prefix_xsl_line(req, id): resource = get_object_or_404(ResourceBase, pk=id) # if the layer is in the catalogue, try to get the distribution urls # that cannot be precalculated. try: catalogue = get_catalogue() record = catalogue.get_record(resource.uuid) except Exception, err: msg = 'Could not connect to catalogue to save information for layer "%s"' % str(resource.title) logger.warn(msg, err) raise err
def test_csw_search_count(self): """Verify that GeoNode can handle search counting""" csw = get_catalogue(skip_caps=False) # get all records csw.catalogue.getrecords(typenames="csw:Record") self.assertEqual(csw.catalogue.results["matches"], 16, "Expected 16 records") # get all ISO records, test for numberOfRecordsMatched csw.catalogue.getrecords(typenames="gmd:MD_Metadata") self.assertEqual(csw.catalogue.results["matches"], 16, "Expected 16 records against ISO typename")
def test_csw_query_bbox(self): """Verify that GeoNode CSW can handle bbox queries""" csw = get_catalogue() bbox = fes.BBox([-140, -70, 80, 70]) try: csw.catalogue.getrecords2([bbox, ]) logger.debug(csw.catalogue.results) self.assertEqual(csw.catalogue.results, {'matches': 7, 'nextrecord': 0, 'returned': 7}) except Exception: # This test seems to borken actually on pycsw pass
def post_save_service(instance, sender, **kwargs): """Get information from catalogue""" resources = ResourceBase.objects.filter(id=instance.resourcebase_ptr.id) LOGGER.warn(f'*** POST SAVING SERVICE "{instance.uuid}"') if resources.exists(): # Update the Catalog try: catalogue = get_catalogue() catalogue.create_record(instance) record = catalogue.get_record(instance.uuid) except EnvironmentError as err: if err.errno == errno.ECONNREFUSED: LOGGER.warning( f'Could not connect to catalogue to save information for layer "{instance.name}"', err) return else: raise err if not record: LOGGER.exception( f'Metadata record for service {instance.title} does not exist, check the catalogue signals.' ) return # generate an XML document if instance.metadata_uploaded and instance.metadata_uploaded_preserve: md_doc = etree.tostring(dlxml.fromstring(instance.metadata_xml)) else: LOGGER.info(f'Rebuilding metadata document for "{instance.uuid}"') template = getattr(settings, 'CATALOG_SERVICE_METADATA_TEMPLATE', 'xml/service-template.xml') md_doc = create_metadata_document(instance, template) try: csw_anytext = catalogue.catalogue.csw_gen_anytext(md_doc) except Exception as e: LOGGER.exception(e) csw_anytext = '' for r in resources: if instance.is_published: anonymous_group = Group.objects.get(name='anonymous') assign_perm('view_resourcebase', anonymous_group, r) else: remove_object_permissions(r) resources.update(metadata_xml=md_doc, csw_anytext=csw_anytext) else: LOGGER.warn( f'*** The resource selected does not exists or or more than one is selected "{instance.uuid}"' )
def catalogue_pre_save(instance, sender, **kwargs): """Send information to catalogue""" record = None # if the layer is in the catalogue, try to get the distribution urls # that cannot be precalculated. try: catalogue = get_catalogue() record = catalogue.get_record(instance.uuid) except EnvironmentError, err: msg = 'Could not connect to catalogue to save information for layer "%s"' % instance.name LOGGER.warn(msg, err) raise err
def catalogue_post_save(instance, sender, **kwargs): """Get information from catalogue""" try: catalogue = get_catalogue() catalogue.create_record(instance) record = catalogue.get_record(instance.uuid) except EnvironmentError, err: msg = 'Could not connect to catalogue to save information for layer "%s"' % instance.name if err.reason.errno == errno.ECONNREFUSED: LOGGER.warn(msg, err) return else: raise err
def prefix_xsl_line(req, id): resource = get_object_or_404(ResourceBase, pk=id) # if the layer is in the catalogue, try to get the distribution urls # that cannot be precalculated. try: catalogue = get_catalogue() record = catalogue.get_record(resource.uuid) except Exception, err: msg = 'Could not connect to catalogue to save information for layer "%s"' % str( resource.title) logger.warn(msg) raise err
def test_csw_outputschema_iso(self): """Verify that GeoNode CSW can handle ISO metadata with ISO outputSchema""" csw = get_catalogue() # search for 'san_andres_y_providencia_location', output as Dublin Core csw.catalogue.getrecords( typenames='gmd:MD_Metadata', keywords=['%san_andres_y_providencia_location%'], outputschema='http://www.isotc211.org/2005/gmd', esn='full') record = list(csw.catalogue.records.values())[0] # test that the ISO title maps correctly in Dublin Core self.assertEqual(record.identification.title, "San Andres Y Providencia Location") # test that the ISO abstract maps correctly in Dublin Core self.assertEqual(record.identification.abstract, 'No abstract provided') # test BBOX properties in Dublin Core from decimal import Decimal self.assertAlmostEqual(Decimal(record.identification.bbox.minx), Decimal('-81.8593555'), places=3) self.assertAlmostEqual(Decimal(record.identification.bbox.miny), Decimal('12.1665322'), places=3) self.assertAlmostEqual(Decimal(record.identification.bbox.maxx), Decimal('-81.356409'), places=3) self.assertAlmostEqual(Decimal(record.identification.bbox.maxy), Decimal('13.396306'), places=3) # test for correct link articulation for link in record.distribution.online: if check_ogc_backend(geoserver.BACKEND_PACKAGE): if link.protocol == 'OGC:WMS': self.assertEqual( link.url, '{}ows'.format(settings.GEOSERVER_PUBLIC_LOCATION), 'Expected a specific OGC:WMS URL') elif link.protocol == 'OGC:WFS': self.assertEqual( link.url, '{}wfs'.format(settings.GEOSERVER_PUBLIC_LOCATION), 'Expected a specific OGC:WFS URL')
def catalogue_pre_save(instance, sender, **kwargs): """Send information to catalogue """ record = None try: catalogue = get_catalogue() record = catalogue.get_record(instance.uuid) except EnvironmentError, err: msg = 'Could not connect to catalogue' \ 'to save information for layer "%s"' % (instance.name) if err.reason.errno == errno.ECONNREFUSED: LOGGER.warn(msg, err) else: raise err
def prefix_xsl_line(req, id): resource = get_object_or_404(ResourceBase, pk=id) catalogue = get_catalogue() record = catalogue.get_record(resource.uuid) xml = record.xml xsl_path = '{}/static/metadataxsl/metadata.xsl'.format(settings.SITEURL) xsl_line = '<?xml-stylesheet type="text/xsl" href="{}"?>'.format(xsl_path) return HttpResponse( xsl_line + xml, content_type="text/xml" )
def test_csw_search_count(self): """Verify that GeoNode can handle search counting""" csw = get_catalogue(skip_caps=False) # get all records csw.catalogue.getrecords(typenames='csw:Record') self.assertEqual(csw.catalogue.results['matches'], 16, 'Expected 16 records') # get all ISO records, test for numberOfRecordsMatched csw.catalogue.getrecords(typenames='gmd:MD_Metadata') self.assertEqual(csw.catalogue.results['matches'], 16, 'Expected 16 records against ISO typename')
def test_delete_layer(self): """Verify that the 'delete_layer' pre_delete hook is functioning """ gs_cat = gs_catalog # Upload a Shapefile Layer shp_file = os.path.join( gisdata.VECTOR_DATA, 'san_andres_y_providencia_poi.shp') shp_layer = file_upload(shp_file) shp_layer = geoserver_post_save2(shp_layer.id) time.sleep(20) shp_layer_id = shp_layer.pk ws = gs_cat.get_workspace(shp_layer.workspace) shp_store = gs_cat.get_store(shp_layer.store, ws) shp_store_name = shp_store.name uuid = shp_layer.uuid # Delete it with the Layer.delete() method shp_layer.delete() geoserver_delete(shp_layer.typename) # Verify that it no longer exists in GeoServer # self.assertIsNone(gs_cat.get_resource(name, store=shp_store)) # self.assertIsNone(gs_cat.get_layer(shp_layer.name)) self.assertRaises( FailedRequestError, lambda: gs_cat.get_store(shp_store_name)) # Check that it was also deleted from GeoNodes DB self.assertRaises(ObjectDoesNotExist, lambda: Layer.objects.get(pk=shp_layer_id)) # geonode.geoserver.helpers # If catalogue is installed, then check that it is deleted from there # too. if 'geonode.catalogue' in settings.INSTALLED_APPS: from geonode.catalogue import get_catalogue catalogue = get_catalogue() # Verify that it no longer exists in GeoNetwork shp_layer_gn_info = catalogue.get_record(uuid) assert shp_layer_gn_info is None
def test_csw_outputschema_iso(self): """Verify that GeoNode can handle ISO metadata with ISO outputSchema""" csw = get_catalogue() # search for 'san_andres_y_providencia_location', output as Dublin Core csw.catalogue.getrecords( typenames="gmd:MD_Metadata", keywords=["%san_andres_y_providencia_location%"], outputschema="http://www.isotc211.org/2005/gmd", esn="full", ) record = csw.catalogue.records.values()[0] # test that the ISO title maps correctly in Dublin Core self.assertEqual( record.identification.title, "San Andres Y Providencia Location", "Expected a specific title in ISO model" ) # test that the ISO abstract maps correctly in Dublin Core self.assertEqual( record.identification.abstract, "No abstract provided", "Expected a specific abstract in ISO model" ) # test BBOX properties in Dublin Core self.assertEqual( record.identification.bbox.minx, "-81.8593555", "Expected a specific minx coordinate value in ISO model" ) self.assertEqual( record.identification.bbox.miny, "12.1665322", "Expected a specific minx coordinate value in ISO model" ) self.assertEqual( record.identification.bbox.maxx, "-81.356409", "Expected a specific maxx coordinate value in ISO model" ) self.assertEqual( record.identification.bbox.maxy, "13.396306", "Expected a specific maxy coordinate value in ISO model" ) # test for correct link articulation for link in record.distribution.online: if link.protocol == "OGC:WMS": self.assertEqual( link.url, "http://localhost:8080/geoserver/geonode/wms", "Expected a specific OGC:WMS URL" ) elif link.protocol == "OGC:WFS": self.assertEqual( link.url, "http://localhost:8080/geoserver/geonode/wfs", "Expected a specific OGC:WFS URL" )
def test_csw_base(self): """Verify that GeoNode works against any CSW""" csw = get_catalogue( backend={ 'ENGINE': 'geonode.catalogue.backends.pycsw_local', 'URL': urljoin('http://localhost:8001/', '/catalogue/csw'), }, skip_caps=False) self.assertEqual( csw.catalogue.url, urljoin('http://localhost:8001/', '/catalogue/csw') ) # test that OGC:CSW URLs are identical to what is defined in GeoNode for op in csw.catalogue.operations: for method in op.methods: self.assertEqual( csw.catalogue.url, method['url'], 'Expected GeoNode URL to be equal to all CSW URLs') # test that OGC:CSW 2.0.2 is supported self.assertEqual(csw.catalogue.version, '2.0.2', 'Expected "2.0.2" as a supported version') # test that transactions are supported if csw.catalogue.type != 'pycsw_local': self.assertTrue( 'Transaction' in [ o.name for o in csw.catalogue.operations], 'Expected Transaction to be a supported operation') # test that gmd:MD_Metadata is a supported typename for o in csw.catalogue.operations: if o.name == 'GetRecords': typenames = o.parameters['typeNames']['values'] self.assertTrue( 'gmd:MD_Metadata' in typenames, 'Expected "gmd:MD_Metadata" to be a supported typeNames value') # test that http://www.isotc211.org/2005/gmd is a supported output # schema for o in csw.catalogue.operations: if o.name == 'GetRecords': outputschemas = o.parameters['outputSchema']['values'] self.assertTrue( 'http://www.isotc211.org/2005/gmd' in outputschemas, 'Expected "http://www.isotc211.org/2005/gmd" to be a supported outputSchema value')
def test_csw_outputschema_dc(self): """Verify that GeoNode CSW can handle ISO metadata with Dublin Core outputSchema""" csw = get_catalogue() # search for 'san_andres_y_providencia_location', output as Dublin Core csw.catalogue.getrecords( typenames='gmd:MD_Metadata', keywords=['%san_andres_y_providencia_location%'], outputschema='http://www.opengis.net/cat/csw/2.0.2', esn='full') record = csw.catalogue.records.values()[0] # test that the ISO title maps correctly in Dublin Core self.assertEqual(record.title, 'San Andres Y Providencia Location', 'Expected a specific title in Dublin Core model') # test that the ISO abstract maps correctly in Dublin Core self.assertEqual(record.abstract, 'No abstract provided', 'Expected a specific abstract in Dublin Core model') # test for correct service link articulation for link in record.references: if check_ogc_backend(geoserver.BACKEND_PACKAGE): if link['scheme'] == 'OGC:WMS': self.assertEqual( link['url'], '{}ows'.format(settings.GEOSERVER_PUBLIC_LOCATION), 'Expected a specific OGC:WMS URL') elif link['scheme'] == 'OGC:WFS': self.assertEqual( link['url'], '{}wfs'.format(settings.GEOSERVER_PUBLIC_LOCATION), 'Expected a specific OGC:WFS URL') elif check_ogc_backend(qgis_server.BACKEND_PACKAGE): if link['scheme'] == 'OGC:WMS': self.assertEqual( link['url'], 'http://localhost:8000/qgis-server/ogc/' 'san_andres_y_providencia_location', 'Expected a specific OGC:WMS URL') elif link['scheme'] == 'OGC:WFS': self.assertEqual( link['url'], 'http://localhost:8000/qgis-server/ogc/' 'san_andres_y_providencia_location', 'Expected a specific OGC:WFS URL')
def test_csw_upload_fgdc(self): """Verify that GeoNode can handle FGDC metadata upload""" # GeoNetwork and deegree do not transform ISO <-> FGDC # once this is implemented we can remove this condition csw = get_catalogue() if csw.catalogue.type == 'pycsw_http': # upload a native FGDC metadata document md_doc = etree.tostring(etree.fromstring(open(os.path.join(gisdata.GOOD_METADATA, 'sangis.org', 'Census', 'Census_Blockgroup_Pop_Housing.shp.xml')).read())) csw.catalogue.transaction(ttype='insert', typename='fgdc:metadata', record=md_doc) # test that FGDC document was successfully inserted self.assertEqual(csw.catalogue.results['inserted'], 1, 'Expected 1 inserted record in FGDC model') # query against FGDC typename, output FGDC csw.catalogue.getrecords(typenames='fgdc:metadata') self.assertEqual(csw.catalogue.results['matches'], 1, 'Expected 1 record in FGDC model') record = csw.catalogue.records.values()[0] # test that the FGDC title maps correctly in DC self.assertEqual(record.title, 'Census_Blockgroup_Pop_Housing', 'Expected a specific title in DC model') # test that the FGDC type maps correctly in DC self.assertEqual(record.type, 'vector digital data', 'Expected a specific type in DC model') # test CRS constructs in Dublin Core self.assertEqual(record.bbox.crs.code, 4326, 'Expected a specific CRS code value in Dublin Core model') # test BBOX properties in Dublin Core self.assertEqual(record.bbox.minx, '-117.6', 'Expected a specific minx coordinate value in Dublin Core model') self.assertEqual(record.bbox.miny, '32.53', 'Expected a specific minx coordinate value in Dublin Core model') self.assertEqual(record.bbox.maxx, '-116.08', 'Expected a specific maxx coordinate value in Dublin Core model') self.assertEqual(record.bbox.maxy, '33.51', 'Expected a specific maxy coordinate value in Dublin Core model') # query against FGDC typename, return in ISO csw.catalogue.getrecords(typenames='fgdc:metadata', esn='brief', outputschema='http://www.isotc211.org/2005/gmd') self.assertEqual(csw.catalogue.results['matches'], 1, 'Expected 1 record in ISO model') record = csw.catalogue.records.values()[0] # test that the FGDC title maps correctly in ISO self.assertEqual(record.identification.title, 'Census_Blockgroup_Pop_Housing', 'Expected a specific title in ISO model') # cleanup and delete inserted FGDC metadata document csw.catalogue.transaction(ttype='delete', typename='fgdc:metadata', cql='fgdc:Title like "Census_Blockgroup_Pop_Housing"') self.assertEqual(csw.catalogue.results['deleted'], 1, 'Expected 1 deleted record in FGDC model')
def test_csw_search_count(self): """Verify that GeoNode can handle search counting""" csw = get_catalogue(skip_caps=False) for f in csw.catalogue.operations: if f.name == 'GetRecords': typenames = ' '.join(f.parameters['typeNames']['values']) # get all records csw.catalogue.getrecords(typenames='csw:Record gmd:MD_Metadata') self.assertEqual(csw.catalogue.results['matches'], 16, 'Expected 16 records') # get all ISO records, test for numberOfRecordsMatched csw.catalogue.getrecords(typenames='gmd:MD_Metadata') self.assertEqual(csw.catalogue.results['matches'], 16, 'Expected 16 ISO records')
def test_csw_search_count(self): """Verify that GeoNode can handle search counting""" csw = get_catalogue(skip_caps=False) for f in csw.catalogue.operations: if f.name == "GetRecords": typenames = " ".join(f.parameters["typeNames"]["values"]) # get all records csw.catalogue.getrecords(typenames="csw:Record gmd:MD_Metadata") self.assertEqual(csw.catalogue.results["matches"], 16, "Expected 16 records") # get all ISO records, test for numberOfRecordsMatched csw.catalogue.getrecords(typenames="gmd:MD_Metadata") self.assertEqual(csw.catalogue.results["matches"], 16, "Expected 16 ISO records")
def test_delete_layer(self): """Verify that the 'delete_layer' pre_delete hook is functioning """ gs_cat = gs_catalog # Upload a Shapefile Layer shp_file = os.path.join( gisdata.VECTOR_DATA, 'san_andres_y_providencia_poi.shp') shp_layer = file_upload(shp_file) # we need some time to have the service up and running time.sleep(20) shp_layer_id = shp_layer.pk ws = gs_cat.get_workspace(shp_layer.workspace) shp_store = gs_cat.get_store(shp_layer.store, ws) shp_store_name = shp_store.name uuid = shp_layer.uuid # Delete it with the Layer.delete() method shp_layer.delete() # Verify that it no longer exists in GeoServer res = gs_cat.get_layer(shp_layer.name) self.assertIsNone(res) # Verify that the store was deleted ds = gs_cat.get_store(shp_store_name) self.assertIsNone(ds) # Check that it was also deleted from GeoNodes DB self.assertRaises(ObjectDoesNotExist, lambda: Layer.objects.get(pk=shp_layer_id)) # geonode.geoserver.helpers # If catalogue is installed, then check that it is deleted from there # too. if 'geonode.catalogue' in settings.INSTALLED_APPS: from geonode.catalogue import get_catalogue catalogue = get_catalogue() # Verify that it no longer exists in GeoNetwork shp_layer_gn_info = catalogue.get_record(uuid) assert shp_layer_gn_info is None
def test_csw_outputschema_fgdc(self): """Verify that GeoNode CSW can handle ISO metadata with FGDC outputSchema""" csw = get_catalogue() if csw.catalogue.type in {'pycsw_http', 'pycsw_local'}: # get all ISO records in FGDC schema csw.catalogue.getrecords( typenames='gmd:MD_Metadata', keywords=['san_andres_y_providencia_location'], outputschema='http://www.opengis.net/cat/csw/csdgm') record = list(csw.catalogue.records.values())[0] # test that the ISO title maps correctly in FGDC self.assertEqual(record.idinfo.citation.citeinfo['title'], "san_andres_y_providencia_location.shp") # test that the ISO abstract maps correctly in FGDC self.assertEqual(record.idinfo.descript.abstract, 'No abstract provided')
def test_update_metadata_records(self): layer = Layer.objects.first() self.assertIsNotNone(layer) layer.abstract = "<p>Test HTML abstract</p>" layer.save() self.assertEqual(layer.abstract, "<p>Test HTML abstract</p>") self.assertEqual(layer.raw_abstract, "Test HTML abstract") # refresh catalogue metadata records catalogue_post_save(instance=layer, sender=layer.__class__) # get all records csw = get_catalogue() record = csw.get_record(layer.uuid) self.assertIsNotNone(record) self.assertEqual(record.identification.title, layer.title) self.assertEqual(record.identification.abstract, layer.raw_abstract) if len(record.identification.otherconstraints) > 0: self.assertEqual(record.identification.otherconstraints[0], layer.raw_constraints_other)
def test_csw_outputschema_dc(self): """Verify that GeoNode can handle ISO metadata with Dublin Core outputSchema""" csw = get_catalogue() # search for 'san_andres_y_providencia_location', output as Dublin Core csw.catalogue.getrecords(typenames='gmd:MD_Metadata', keywords=['%san_andres_y_providencia_location%'], outputschema='http://www.opengis.net/cat/csw/2.0.2', esn='full') record = csw.catalogue.records.values()[0] # test that the ISO title maps correctly in Dublin Core self.assertEqual(record.title, 'San Andres Y Providencia Location', 'Expected a specific title in Dublin Core model') # test that the ISO abstract maps correctly in Dublin Core self.assertEqual(record.abstract, 'No abstract provided', 'Expected a specific abstract in Dublin Core model')
def catalogue_post_save(instance, sender, **kwargs): """Get information from catalogue""" # if layer is not to be published, temporarily # change publish state to be able to update # properties (#2332) is_published = instance.is_published if not is_published: resources = ResourceBase.objects.filter(id=instance.resourcebase_ptr.id) resources.update(is_published=True) try: catalogue = get_catalogue() catalogue.create_record(instance) record = catalogue.get_record(instance.uuid) except EnvironmentError, err: msg = 'Could not connect to catalogue to save information for layer "%s"' % instance.name if err.reason.errno == errno.ECONNREFUSED: LOGGER.warn(msg, err) return else: raise err
cat.delete(gs_resource) except: msg = 'Couldn\'t delete GeoServer resource during cleanup()' logger.warning(msg) if gs_store is not None: try: cat.delete(gs_store) except: logger.warning("Couldn't delete GeoServer store during cleanup()") logger.warning('Deleting dangling Catalogue record for [%s] ' '(no Django record to match)', name) if 'geonode.catalogue' in settings.INSTALLED_APPS: from geonode.catalogue import get_catalogue catalogue = get_catalogue() catalogue.remove_record(uuid) logger.warning('Finished cleanup after failed Catalogue/Django ' 'import for layer: %s', name) def save(layer, base_file, user, overwrite=True, title=None, abstract=None, permissions=None, keywords=()): """Upload layer data to Geoserver and registers it with Geonode. If specified, the layer given is overwritten, otherwise a new layer is created. """ logger.info(_separator) logger.info('Uploading layer: [%s], base filename: [%s]', layer, base_file)
def catalogue_pre_delete(instance, sender, **kwargs): """Removes the layer from the catalogue """ catalogue = get_catalogue() catalogue.remove_record(instance.uuid)