def test_lookup(self): import json # Peterborough b = geo.get_boundary( "http://statistics.data.gov.uk/doc/statistical-geography/E06000031" ) assert b is not None
def test_organization_extent(self): import json # Peterborough geojson = get_boundary("http://statistics.data.gov.uk/doc/statistical-geography/E06000031") shape = asShape(geojson) w = WKTSpatialElement(shape.wkt, 4326) assert w is not None
def test_organization_extent(self): import json # Peterborough geojson = get_boundary( "http://statistics.data.gov.uk/doc/statistical-geography/E06000031" ) shape = asShape(geojson) w = WKTSpatialElement(shape.wkt, 4326) assert w is not None
return None try: doc = InventoryDocument(req.content) except InventoryXmlError, e: self.save_gather_error( 'Failed to parse or validate the XML document: %s %s' % (e.__class__.__name__, e), harvest_job) return None doc_metadata = doc.top_level_metadata() # TODO: Somehow update the publisher details with the geo boundary spatial_coverage_url = doc_metadata.get('spatial-coverage-url') if spatial_coverage_url: boundary = get_boundary(spatial_coverage_url) if boundary: # don't import dgulocal_model until here, to allow tests that # don't need postgis to run under sqlite from ckanext.dgulocal import model as dgulocal_model try: dgulocal_model.set_organization_polygon( harvest_job.source.publisher_id, boundary) except Exception, e: log.exception(e) # but carry on anyway? # Find any previous harvests and store. If modified since then continue # otherwise bail. Store the last process date so we can check the # datasets doc_last_modified = doc_metadata['modified']
def test_lookup(self): import json # Peterborough b = geo.get_boundary("http://statistics.data.gov.uk/doc/statistical-geography/E06000031") assert b is not None
harvest_job) return None try: doc = InventoryDocument(req.content) except InventoryXmlError, e: self.save_gather_error('Failed to parse or validate the XML document: %s %s' % (e.__class__.__name__, e), harvest_job) return None doc_metadata = doc.top_level_metadata() # TODO: Somehow update the publisher details with the geo boundary spatial_coverage_url = doc_metadata.get('spatial-coverage-url') if spatial_coverage_url: boundary = get_boundary(spatial_coverage_url) if boundary: # don't import dgulocal_model until here, to allow tests that # don't need postgis to run under sqlite from ckanext.dgulocal import model as dgulocal_model try: dgulocal_model.set_organization_polygon( harvest_job.source.publisher_id, boundary) except Exception, e: log.exception(e) # but carry on anyway? # Find any previous harvests and store. If modified since then continue # otherwise bail. Store the last process date so we can check the # datasets