def _clip_vector_layer( layer, extent, extra_keywords=None, explode_flag=True, hard_clip_flag=False, explode_attribute=None): """Clip a Hazard or Exposure layer to the extents provided. The layer must be a vector layer or an exception will be thrown. The output layer will always be in WGS84/Geographic. :param layer: A valid QGIS vector or raster layer :type layer: :param extent: Either an array representing the exposure layer extents in the form [xmin, ymin, xmax, ymax]. It is assumed that the coordinates are in EPSG:4326 although currently no checks are made to enforce this. or: A QgsGeometry of type polygon. **Polygon clipping is currently only supported for vector datasets.** :type extent: list(float, float, float, float) :param extra_keywords: Optional keywords dictionary to be added to output layer. :type extra_keywords: dict :param explode_flag: A bool specifying whether multipart features should be 'exploded' into singleparts. **This parameter is ignored for raster layer clipping.** :type explode_flag: bool :param hard_clip_flag: A bool specifying whether line and polygon features that extend beyond the extents should be clipped such that they are reduced in size to the part of the geometry that intersects the extent only. Default is False. **This parameter is ignored for raster layer clipping.** :type hard_clip_flag: bool :param explode_attribute: A str specifying to which attribute #1, #2 and so on will be added in case of explode_flag being true. The attribute is modified only if there are at least 2 parts. :type explode_attribute: str :returns: Clipped layer (placed in the system temp dir). The output layer will be reprojected to EPSG:4326 if needed. :rtype: QgsVectorLayer """ if not layer or not extent: message = tr('Layer or Extent passed to clip is None.') raise InvalidParameterError(message) if layer.type() != QgsMapLayer.VectorLayer: message = tr( 'Expected a vector layer but received a %s.' % str(layer.type())) raise InvalidParameterError(message) # handle, file_name = tempfile.mkstemp('.sqlite', 'clip_', # temp_dir()) handle, file_name = tempfile.mkstemp( '.shp', 'clip_', temp_dir()) # Ensure the file is deleted before we try to write to it # fixes windows specific issue where you get a message like this # ERROR 1: c:\temp\inasafe\clip_jpxjnt.shp is not a directory. # This is because mkstemp creates the file handle and leaves # the file open. os.close(handle) os.remove(file_name) # Get the clip extents in the layer's native CRS geo_crs = QgsCoordinateReferenceSystem() geo_crs.createFromSrid(4326) transform = QgsCoordinateTransform(geo_crs, layer.crs()) allowed_clip_values = [QGis.WKBPolygon, QGis.WKBPolygon25D] if isinstance(extent, list): rectangle = QgsRectangle( extent[0], extent[1], extent[2], extent[3]) # noinspection PyCallByClass # noinspection PyTypeChecker polygon = QgsGeometry.fromRect(rectangle) elif (isinstance(extent, QgsGeometry) and extent.wkbType in allowed_clip_values): rectangle = extent.boundingBox().toRectF() polygon = extent else: raise InvalidClipGeometryError( tr( 'Clip geometry must be an extent or a single part' 'polygon based geometry.')) projected_extent = transform.transformBoundingBox(rectangle) # Get vector layer provider = layer.dataProvider() if provider is None: message = tr( 'Could not obtain data provider from ' 'layer "%s"' % layer.source()) raise Exception(message) # Get the layer field list, select by our extent then write to disk # .. todo:: FIXME - for different geometry types we should implement # different clipping behaviour e.g. reject polygons that # intersect the edge of the bbox. Tim request = QgsFeatureRequest() if not projected_extent.isEmpty(): request.setFilterRect(projected_extent) request.setFlags(QgsFeatureRequest.ExactIntersect) field_list = provider.fields() writer = QgsVectorFileWriter( file_name, None, field_list, layer.wkbType(), geo_crs, # 'SQLite') # FIXME (Ole): This works but is far too slow 'ESRI Shapefile') if writer.hasError() != QgsVectorFileWriter.NoError: message = tr( 'Error when creating shapefile: <br>Filename:' '%s<br>Error: %s' % (file_name, writer.hasError())) raise Exception(message) # Reverse the coordinate xform now so that we can convert # geometries from layer crs to geocrs. transform = QgsCoordinateTransform(layer.crs(), geo_crs) # Retrieve every feature with its geometry and attributes count = 0 has_multipart = False for feature in provider.getFeatures(request): geometry = feature.geometry() # Loop through the parts adding them to the output file # we write out single part features unless explode_flag is False if explode_flag: geometry_list = explode_multipart_geometry(geometry) else: geometry_list = [geometry] for part_index, part in enumerate(geometry_list): part.transform(transform) if hard_clip_flag: # Remove any dangling bits so only intersecting area is # kept. part = clip_geometry(polygon, part) if part is None: continue feature.setGeometry(part) # There are multiple parts and we want to show it in the # explode_attribute if part_index > 0 and explode_attribute is not None: has_multipart = True writer.addFeature(feature) count += 1 del writer # Flush to disk if count < 1: message = tr( 'No features fall within the clip extents. Try panning / zooming ' 'to an area containing data and then try to run your analysis ' 'again. If hazard and exposure data doesn\'t overlap at all, it ' 'is not possible to do an analysis. Another possibility is that ' 'the layers do overlap but because they may have different ' 'spatial references, they appear to be disjointed. If this is the ' 'case, try to turn on reproject on-the-fly in QGIS.') raise NoFeaturesInExtentError(message) keyword_io = KeywordIO() if extra_keywords is None: extra_keywords = {} extra_keywords[multipart_polygon_key] = has_multipart keyword_io.copy_keywords( layer, file_name, extra_keywords=extra_keywords) base_name = '%s clipped' % layer.name() layer = QgsVectorLayer(file_name, base_name, 'ogr') return layer
def _clip_raster_layer( layer, extent, cell_size=None, extra_keywords=None): """Clip a Hazard or Exposure raster layer to the extents provided. The layer must be a raster layer or an exception will be thrown. .. note:: The extent *must* be in EPSG:4326. The output layer will always be in WGS84/Geographic. :param layer: A valid QGIS raster layer in EPSG:4326 :type layer: QgsRasterLayer :param extent: An array representing the exposure layer extents in the form [xmin, ymin, xmax, ymax]. It is assumed that the coordinates are in EPSG:4326 although currently no checks are made to enforce this. or: A QgsGeometry of type polygon. **Polygon clipping currently only supported for vector datasets.** :type extent: list(float), QgsGeometry :param cell_size: Cell size (in GeoCRS) which the layer should be resampled to. If not provided for a raster layer (i.e. theCellSize=None), the native raster cell size will be used. :type cell_size: float :returns: Output clipped layer (placed in the system temp dir). :rtype: QgsRasterLayer :raises: InvalidProjectionError - if input layer is a density layer in projected coordinates. See issue #123. """ if not layer or not extent: message = tr('Layer or Extent passed to clip is None.') raise InvalidParameterError(message) if layer.type() != QgsMapLayer.RasterLayer: message = tr( 'Expected a raster layer but received a %s.' % str(layer.type())) raise InvalidParameterError(message) working_layer = layer.source() # Check for existence of keywords file base, _ = os.path.splitext(working_layer) keywords_path = base + '.xml' message = tr( 'Input file to be clipped "%s" does not have the ' 'expected keywords file %s' % ( working_layer, keywords_path )) verify(os.path.isfile(keywords_path), message) # Raise exception if layer is projected and refers to density (issue #123) # FIXME (Ole): Need to deal with it - e.g. by automatically reprojecting # the layer at this point and setting the native resolution accordingly # in its keywords. try: keywords = read_iso19115_metadata(working_layer) except (MetadataReadError, NoKeywordsFoundError): keywords = read_keywords(base + '.keywords') keywords = write_read_iso_19115_metadata(working_layer, keywords) if 'datatype' in keywords and keywords['datatype'] == 'count': if str(layer.crs().authid()) != 'EPSG:4326': # This layer is not WGS84 geographic message = ( 'Layer %s represents count but has spatial reference "%s". ' 'Count layers must be given in WGS84 geographic coordinates, ' 'so please reproject and try again. For more information, see ' 'issue https://github.com/AIFDR/inasafe/issues/123' % ( working_layer, layer.crs().toProj4() )) raise InvalidProjectionError(message) # We need to provide gdalwarp with a dataset for the clip # because unlike gdal_translate, it does not take projwin. clip_kml = extent_to_kml(extent) # Create a filename for the clipped, resampled and reprojected layer handle, filename = tempfile.mkstemp('.tif', 'clip_', temp_dir()) os.close(handle) os.remove(filename) # If no cell size is specified, we need to run gdalwarp without # specifying the output pixel size to ensure the raster dims # remain consistent. binary_list = which('gdalwarp') LOGGER.debug('Path for gdalwarp: %s' % binary_list) if len(binary_list) < 1: raise CallGDALError( tr('gdalwarp could not be found on your computer')) # Use the first matching gdalwarp found binary = binary_list[0] if cell_size is None: command = ( '"%s" -q -t_srs EPSG:4326 -r near -cutline %s -crop_to_cutline ' '-ot Float64 -of GTiff "%s" "%s"' % ( binary, clip_kml, working_layer, filename)) else: command = ( '"%s" -q -t_srs EPSG:4326 -r near -tr %s %s -cutline %s ' '-crop_to_cutline -ot Float64 -of GTiff "%s" "%s"' % ( binary, repr(cell_size), repr(cell_size), clip_kml, working_layer, filename)) LOGGER.debug(command) result = QProcess().execute(command) # For QProcess exit codes see # http://qt-project.org/doc/qt-4.8/qprocess.html#execute if result == -2: # cannot be started message_detail = tr('Process could not be started.') message = tr( '<p>Error while executing the following shell command:' '</p><pre>%s</pre><p>Error message: %s' % (command, message_detail)) raise CallGDALError(message) elif result == -1: # process crashed message_detail = tr('Process crashed.') message = tr( '<p>Error while executing the following shell command:</p>' '<pre>%s</pre><p>Error message: %s' % (command, message_detail)) raise CallGDALError(message) # .. todo:: Check the result of the shell call is ok keyword_io = KeywordIO() keyword_io.copy_keywords(layer, filename, extra_keywords=extra_keywords) base_name = '%s clipped' % layer.name() layer = QgsRasterLayer(filename, base_name) return layer
class KeywordIOTest(unittest.TestCase): """Tests for reading and writing of raster and vector data """ def setUp(self): self.keyword_io = KeywordIO() # SQLite Layer uri = QgsDataSourceURI() sqlite_building_path = standard_data_path( 'exposure', 'exposure.sqlite') uri.setDatabase(sqlite_building_path) uri.setDataSource('', 'buildings_osm_4326', 'Geometry') self.sqlite_layer = QgsVectorLayer( uri.uri(), 'OSM Buildings', 'spatialite') self.expected_sqlite_keywords = { 'datatype': 'OSM' } # Raster Layer keywords hazard_path = standard_data_path('hazard', 'tsunami_wgs84.tif') self.raster_layer, _ = load_layer(hazard_path) self.expected_raster_keywords = { 'hazard_category': 'single_event', 'title': 'Generic Continuous Flood', 'hazard': 'flood', 'continuous_hazard_unit': 'generic', 'layer_geometry': 'raster', 'layer_purpose': 'hazard', 'layer_mode': 'continuous', 'keyword_version': '3.5' } # Vector Layer keywords vector_path = standard_data_path('exposure', 'buildings_osm_4326.shp') self.vector_layer, _ = load_layer(vector_path) self.expected_vector_keywords = { 'keyword_version': '3.5', 'structure_class_field': 'FLOODED', 'value_mapping': {}, 'title': 'buildings_osm_4326', 'layer_geometry': 'polygon', 'layer_purpose': 'exposure', 'layer_mode': 'classified', 'exposure': 'structure', } # Keyword less layer keywordless_path = standard_data_path('other', 'keywordless_layer.shp') self.keywordless_layer, _ = load_layer(keywordless_path) # Keyword file self.keyword_path = standard_data_path( 'exposure', 'buildings_osm_4326.xml') def test_read_raster_file_keywords(self): """Can we read raster file keywords using generic readKeywords method """ layer = clone_raster_layer( name='generic_continuous_flood', extension='.asc', include_keywords=True, source_directory=standard_data_path('hazard')) keywords = self.keyword_io.read_keywords(layer) expected_keywords = self.expected_raster_keywords self.assertDictEqual(keywords, expected_keywords) def test_read_vector_file_keywords(self): """Test read vector file keywords with the generic readKeywords method. """ self.maxDiff = None keywords = self.keyword_io.read_keywords(self.vector_layer) expected_keywords = self.expected_vector_keywords self.assertDictEqual(keywords, expected_keywords) def test_read_keywordless_layer(self): """Test read 'keyword' file from keywordless layer. """ self.assertRaises( NoKeywordsFoundError, self.keyword_io.read_keywords, self.keywordless_layer, ) def test_update_keywords(self): """Test append file keywords with update_keywords method.""" self.maxDiff = None layer = clone_raster_layer( name='tsunami_wgs84', extension='.tif', include_keywords=True, source_directory=standard_data_path('hazard')) new_keywords = { 'hazard_category': 'multiple_event' } self.keyword_io.update_keywords(layer, new_keywords) keywords = self.keyword_io.read_keywords(layer) expected_keywords = { 'hazard_category': 'multiple_event', 'title': 'Tsunami', 'hazard': 'tsunami', 'continuous_hazard_unit': 'metres', 'layer_geometry': 'raster', 'layer_purpose': 'hazard', 'layer_mode': 'continuous', 'keyword_version': inasafe_keyword_version } expected_keywords = { k: get_unicode(v) for k, v in expected_keywords.iteritems() } self.assertDictEqual(keywords, expected_keywords) def test_copy_keywords(self): """Test we can copy the keywords.""" self.maxDiff = None out_path = unique_filename( prefix='test_copy_keywords', suffix='.shp') layer = clone_raster_layer( name='generic_continuous_flood', extension='.asc', include_keywords=True, source_directory=standard_data_path('hazard')) self.keyword_io.copy_keywords(layer, out_path) # copied_keywords = read_file_keywords(out_path.split('.')[0] + 'xml') copied_keywords = read_iso19115_metadata(out_path) expected_keywords = self.expected_raster_keywords expected_keywords['keyword_version'] = inasafe_keyword_version self.assertDictEqual(copied_keywords, expected_keywords) def test_definition(self): """Test we can get definitions for keywords. .. versionadded:: 3.2 """ keyword = 'hazards' keyword_definition = definition(keyword) self.assertTrue('description' in keyword_definition) def test_to_message(self): """Test we can convert keywords to a message object. .. versionadded:: 3.2 """ keywords = self.keyword_io.read_keywords(self.vector_layer) message = self.keyword_io.to_message(keywords).to_text() self.assertIn('*Exposure*, structure------', message) def test_layer_to_message(self): """Test to show augmented keywords if KeywordsIO ctor passed a layer. .. versionadded:: 3.3 """ keywords = KeywordIO(self.vector_layer) message = keywords.to_message().to_text() self.assertIn('*Reference system*, ', message) def test_dict_to_row(self): """Test the dict to row helper works. .. versionadded:: 3.2 """ keyword_value = ( "{'high': ['Kawasan Rawan Bencana III'], " "'medium': ['Kawasan Rawan Bencana II'], " "'low': ['Kawasan Rawan Bencana I']}") table = self.keyword_io._dict_to_row(keyword_value) self.assertIn( u'\n---\n*high*, Kawasan Rawan Bencana III------', table.to_text()) # should also work passing a dict keyword_value = { 'high': ['Kawasan Rawan Bencana III'], 'medium': ['Kawasan Rawan Bencana II'], 'low': ['Kawasan Rawan Bencana I']} table = self.keyword_io._dict_to_row(keyword_value) self.assertIn( u'\n---\n*high*, Kawasan Rawan Bencana III------', table.to_text()) def test_keyword_io(self): """Test read keywords directly from keywords file .. versionadded:: 3.2 """ self.maxDiff = None keywords = self.keyword_io.read_keywords_file(self.keyword_path) expected_keywords = self.expected_vector_keywords self.assertDictEqual(keywords, expected_keywords)
class KeywordIOTest(unittest.TestCase): """Tests for reading and writing of raster and vector data """ def setUp(self): self.keyword_io = KeywordIO() # SQLite Layer uri = QgsDataSourceURI() sqlite_building_path = test_data_path('exposure', 'exposure.sqlite') uri.setDatabase(sqlite_building_path) uri.setDataSource('', 'buildings_osm_4326', 'Geometry') self.sqlite_layer = QgsVectorLayer( uri.uri(), 'OSM Buildings', 'spatialite') self.expected_sqlite_keywords = { 'category': 'exposure', 'datatype': 'OSM', 'subcategory': 'building'} # Raster Layer keywords hazard_path = test_data_path('hazard', 'tsunami_wgs84.tif') self.raster_layer, _ = load_layer(hazard_path) self.expected_raster_keywords = { 'hazard_category': 'single_event', 'title': 'Tsunami', 'hazard': 'tsunami', 'continuous_hazard_unit': 'metres', 'layer_geometry': 'raster', 'layer_purpose': 'hazard', 'layer_mode': 'continuous', 'keyword_version': inasafe_keyword_version } # Vector Layer keywords vector_path = test_data_path('exposure', 'buildings_osm_4326.shp') self.vector_layer, _ = load_layer(vector_path) self.expected_vector_keywords = { 'keyword_version': inasafe_keyword_version, 'structure_class_field': 'FLOODED', 'title': 'buildings_osm_4326', 'layer_geometry': 'polygon', 'layer_purpose': 'exposure', 'layer_mode': 'classified', 'exposure': 'structure' } # Keyword less layer keywordless_path = test_data_path('other', 'keywordless_layer.shp') self.keywordless_layer, _ = load_layer(keywordless_path) def tearDown(self): pass def test_get_hash_for_datasource(self): """Test we can reliably get a hash for a uri""" hash_value = self.keyword_io.hash_for_datasource(PG_URI) expected_hash = '7cc153e1b119ca54a91ddb98a56ea95e' message = "Got: %s\nExpected: %s" % (hash_value, expected_hash) self.assertEqual(hash_value, expected_hash, message) def test_write_read_keyword_from_uri(self): """Test we can set and get keywords for a non local datasource""" handle, filename = tempfile.mkstemp( '.db', 'keywords_', temp_dir()) # Ensure the file is deleted before we try to write to it # fixes windows specific issue where you get a message like this # ERROR 1: c:\temp\inasafe\clip_jpxjnt.shp is not a directory. # This is because mkstemp creates the file handle and leaves # the file open. os.close(handle) os.remove(filename) expected_keywords = { 'category': 'exposure', 'datatype': 'itb', 'subcategory': 'building'} # SQL insert test # On first write schema is empty and there is no matching hash self.keyword_io.set_keyword_db_path(filename) self.keyword_io.write_keywords_for_uri(PG_URI, expected_keywords) # SQL Update test # On second write schema is populated and we update matching hash expected_keywords = { 'category': 'exposure', 'datatype': 'OSM', # <--note the change here! 'subcategory': 'building'} self.keyword_io.write_keywords_for_uri(PG_URI, expected_keywords) # Test getting all keywords keywords = self.keyword_io.read_keyword_from_uri(PG_URI) message = 'Got: %s\n\nExpected %s\n\nDB: %s' % ( keywords, expected_keywords, filename) self.assertDictEqual(keywords, expected_keywords, message) # Test getting just a single keyword keyword = self.keyword_io.read_keyword_from_uri(PG_URI, 'datatype') expected_keyword = 'OSM' message = 'Got: %s\n\nExpected %s\n\nDB: %s' % ( keyword, expected_keyword, filename) self.assertDictEqual(keywords, expected_keywords, message) # Test deleting keywords actually does delete self.keyword_io.delete_keywords_for_uri(PG_URI) try: _ = self.keyword_io.read_keyword_from_uri(PG_URI, 'datatype') # if the above didn't cause an exception then bad message = 'Expected a HashNotFoundError to be raised' assert message except HashNotFoundError: # we expect this outcome so good! pass def test_are_keywords_file_based(self): """Can we correctly determine if keywords should be written to file or to database?""" assert not self.keyword_io.are_keywords_file_based(self.sqlite_layer) assert self.keyword_io.are_keywords_file_based(self.raster_layer) assert self.keyword_io.are_keywords_file_based(self.vector_layer) def test_read_raster_file_keywords(self): """Can we read raster file keywords using generic readKeywords method """ keywords = self.keyword_io.read_keywords(self.raster_layer) expected_keywords = self.expected_raster_keywords source = self.raster_layer.source() message = 'Got:\n%s\nExpected:\n%s\nSource:\n%s' % ( keywords, expected_keywords, source) self.assertDictEqual(keywords, expected_keywords, message) def test_read_vector_file_keywords(self): """Test read vector file keywords with the generic readKeywords method. """ keywords = self.keyword_io.read_keywords(self.vector_layer) expected_keywords = self.expected_vector_keywords source = self.vector_layer.source() message = 'Got: %s\n\nExpected %s\n\nSource: %s' % ( keywords, expected_keywords, source) self.assertDictEqual(keywords, expected_keywords, message) def test_read_keywordless_layer(self): """Test read 'keyword' file from keywordless layer. """ self.assertRaises( NoKeywordsFoundError, self.keyword_io.read_keywords, self.keywordless_layer, ) def test_update_keywords(self): """Test append file keywords with update_keywords method.""" layer = clone_raster_layer( name='tsunami_wgs84', extension='.tif', include_keywords=True, source_directory=test_data_path('hazard')) new_keywords = {'category': 'exposure', 'test': 'TEST'} self.keyword_io.update_keywords(layer, new_keywords) keywords = self.keyword_io.read_keywords(layer) expected_keywords = { 'category': 'exposure', 'hazard_category': 'single_event', 'title': 'Tsunami', 'hazard': 'tsunami', 'continuous_hazard_unit': 'metres', 'test': 'TEST', 'layer_geometry': 'raster', 'layer_purpose': 'hazard', 'layer_mode': 'continuous', 'keyword_version': inasafe_keyword_version } message = 'Got:\n%s\nExpected:\n%s' % (keywords, expected_keywords) self.assertDictEqual(keywords, expected_keywords, message) def test_read_db_keywords(self): """Can we read sqlite kw with the generic read_keywords method """ db_path = test_data_path('other', 'test_keywords.db') self.read_db_keywords(db_path) def read_db_keywords(self, db_path): """Can we read sqlite keywords with the generic readKeywords method """ self.keyword_io.set_keyword_db_path(db_path) # We need to use relative path so that the hash from URI will match local_path = os.path.join( os.path.dirname(__file__), 'exposure.sqlite') sqlite_building_path = test_data_path('exposure', 'exposure.sqlite') shutil.copy2(sqlite_building_path, local_path) uri = QgsDataSourceURI() uri.setDatabase('exposure.sqlite') uri.setDataSource('', 'buildings_osm_4326', 'Geometry') sqlite_layer = QgsVectorLayer(uri.uri(), 'OSM Buildings', 'spatialite') expected_source = ( 'dbname=\'exposure.sqlite\' table="buildings_osm_4326" (' 'Geometry) sql=') message = 'Got source: %s\n\nExpected %s\n' % ( sqlite_layer.source(), expected_source) self.assertEqual(sqlite_layer.source(), expected_source, message) keywords = self.keyword_io.read_keywords(sqlite_layer) expected_keywords = self.expected_sqlite_keywords message = 'Got: %s\n\nExpected %s\n\nSource: %s' % ( keywords, expected_keywords, self.sqlite_layer.source()) self.assertDictEqual(keywords, expected_keywords, message) # Delete SQL Layer so that we can delete the file del sqlite_layer os.remove(local_path) def test_copy_keywords(self): """Test we can copy the keywords.""" out_path = unique_filename( prefix='test_copy_keywords', suffix='.keywords') self.keyword_io.copy_keywords(self.raster_layer, out_path) copied_keywords = read_file_keywords(out_path) expected_keywords = self.expected_raster_keywords message = 'Got:\n%s\nExpected:\n%s\nSource:\n%s' % ( copied_keywords, expected_keywords, out_path) self.assertDictEqual(copied_keywords, expected_keywords, message) def test_definition(self): """Test we can get definitions for keywords. .. versionadded:: 3.2 """ keyword = 'hazards' definition = self.keyword_io.definition(keyword) self.assertTrue('description' in definition) def test_to_message(self): """Test we can convert keywords to a message object. .. versionadded:: 3.2 """ keywords = self.keyword_io.read_keywords(self.vector_layer) message = self.keyword_io.to_message(keywords).to_text() self.assertIn('*Exposure*, structure------', message) def test_dict_to_row(self): """Test the dict to row helper works. .. versionadded:: 3.2 """ keyword_value = ( "{'high': ['Kawasan Rawan Bencana III'], " "'medium': ['Kawasan Rawan Bencana II'], " "'low': ['Kawasan Rawan Bencana I']}") table = self.keyword_io._dict_to_row(keyword_value) self.assertIn( u'\n---\n*high*, Kawasan Rawan Bencana III------', table.to_text()) # should also work passing a dict keyword_value = { 'high': ['Kawasan Rawan Bencana III'], 'medium': ['Kawasan Rawan Bencana II'], 'low': ['Kawasan Rawan Bencana I']} table = self.keyword_io._dict_to_row(keyword_value) self.assertIn( u'\n---\n*high*, Kawasan Rawan Bencana III------', table.to_text())
class KeywordIOTest(unittest.TestCase): """Tests for reading and writing of raster and vector data """ def setUp(self): self.keyword_io = KeywordIO() # SQLite Layer uri = QgsDataSourceURI() sqlite_building_path = test_data_path('exposure', 'exposure.sqlite') uri.setDatabase(sqlite_building_path) uri.setDataSource('', 'buildings_osm_4326', 'Geometry') self.sqlite_layer = QgsVectorLayer( uri.uri(), 'OSM Buildings', 'spatialite') self.expected_sqlite_keywords = { 'datatype': 'OSM' } # Raster Layer keywords hazard_path = test_data_path('hazard', 'tsunami_wgs84.tif') self.raster_layer, _ = load_layer(hazard_path) self.expected_raster_keywords = { 'hazard_category': 'single_event', 'title': 'Generic Continuous Flood', 'hazard': 'flood', 'continuous_hazard_unit': 'generic', 'layer_geometry': 'raster', 'layer_purpose': 'hazard', 'layer_mode': 'continuous', 'keyword_version': '3.2' } # Vector Layer keywords vector_path = test_data_path('exposure', 'buildings_osm_4326.shp') self.vector_layer, _ = load_layer(vector_path) self.expected_vector_keywords = { 'keyword_version': '3.3', 'structure_class_field': 'FLOODED', 'title': 'buildings_osm_4326', 'layer_geometry': 'polygon', 'layer_purpose': 'exposure', 'layer_mode': 'classified', 'exposure': 'structure' } # Keyword less layer keywordless_path = test_data_path('other', 'keywordless_layer.shp') self.keywordless_layer, _ = load_layer(keywordless_path) # Keyword file self.keyword_path = test_data_path( 'exposure', 'buildings_osm_4326.xml') def tearDown(self): pass def test_get_hash_for_datasource(self): """Test we can reliably get a hash for a uri""" hash_value = self.keyword_io.hash_for_datasource(PG_URI) expected_hash = '7cc153e1b119ca54a91ddb98a56ea95e' message = "Got: %s\nExpected: %s" % (hash_value, expected_hash) self.assertEqual(hash_value, expected_hash, message) def test_are_keywords_file_based(self): """Can we correctly determine if keywords should be written to file or to database?""" assert not self.keyword_io.are_keywords_file_based(self.sqlite_layer) assert self.keyword_io.are_keywords_file_based(self.raster_layer) assert self.keyword_io.are_keywords_file_based(self.vector_layer) def test_read_raster_file_keywords(self): """Can we read raster file keywords using generic readKeywords method """ layer = clone_raster_layer( name='generic_continuous_flood', extension='.asc', include_keywords=True, source_directory=test_data_path('hazard')) keywords = self.keyword_io.read_keywords(layer) expected_keywords = self.expected_raster_keywords self.assertDictEqual(keywords, expected_keywords) def test_read_vector_file_keywords(self): """Test read vector file keywords with the generic readKeywords method. """ keywords = self.keyword_io.read_keywords(self.vector_layer) expected_keywords = self.expected_vector_keywords self.assertDictEqual(keywords, expected_keywords) def test_read_keywordless_layer(self): """Test read 'keyword' file from keywordless layer. """ self.assertRaises( NoKeywordsFoundError, self.keyword_io.read_keywords, self.keywordless_layer, ) def test_update_keywords(self): """Test append file keywords with update_keywords method.""" layer = clone_raster_layer( name='tsunami_wgs84', extension='.tif', include_keywords=True, source_directory=test_data_path('hazard')) new_keywords = { 'hazard_category': 'multiple_event' } self.keyword_io.update_keywords(layer, new_keywords) keywords = self.keyword_io.read_keywords(layer) expected_keywords = { 'hazard_category': 'multiple_event', 'title': 'Tsunami', 'hazard': 'tsunami', 'continuous_hazard_unit': 'metres', 'layer_geometry': 'raster', 'layer_purpose': 'hazard', 'layer_mode': 'continuous', 'keyword_version': inasafe_keyword_version } expected_keywords = { k: get_unicode(v) for k, v in expected_keywords.iteritems() } self.maxDiff = None self.assertDictEqual(keywords, expected_keywords) @unittest.skip('No longer used in the new metadata.') def test_read_db_keywords(self): """Can we read sqlite kw with the generic read_keywords method """ db_path = test_data_path('other', 'test_keywords.db') self.read_db_keywords(db_path) def read_db_keywords(self, db_path): """Can we read sqlite keywords with the generic readKeywords method """ self.keyword_io.set_keyword_db_path(db_path) # We need to use relative path so that the hash from URI will match local_path = os.path.join( os.path.dirname(__file__), 'exposure.sqlite') sqlite_building_path = test_data_path('exposure', 'exposure.sqlite') shutil.copy2(sqlite_building_path, local_path) uri = QgsDataSourceURI() uri.setDatabase('exposure.sqlite') uri.setDataSource('', 'buildings_osm_4326', 'Geometry') sqlite_layer = QgsVectorLayer(uri.uri(), 'OSM Buildings', 'spatialite') expected_source = ( 'dbname=\'exposure.sqlite\' table="buildings_osm_4326" (' 'Geometry) sql=') self.assertEqual(sqlite_layer.source(), expected_source) keywords = self.keyword_io.read_keywords(sqlite_layer) expected_keywords = self.expected_sqlite_keywords self.assertDictEqual(keywords, expected_keywords) # Delete SQL Layer so that we can delete the file del sqlite_layer os.remove(local_path) def test_copy_keywords(self): """Test we can copy the keywords.""" out_path = unique_filename( prefix='test_copy_keywords', suffix='.shp') layer = clone_raster_layer( name='generic_continuous_flood', extension='.asc', include_keywords=True, source_directory=test_data_path('hazard')) self.keyword_io.copy_keywords(layer, out_path) # copied_keywords = read_file_keywords(out_path.split('.')[0] + 'xml') copied_keywords = read_iso19115_metadata(out_path) expected_keywords = self.expected_raster_keywords expected_keywords['keyword_version'] = inasafe_keyword_version self.maxDiff = None self.assertDictEqual(copied_keywords, expected_keywords) def test_definition(self): """Test we can get definitions for keywords. .. versionadded:: 3.2 """ keyword = 'hazards' keyword_definition = definition(keyword) self.assertTrue('description' in keyword_definition) def test_to_message(self): """Test we can convert keywords to a message object. .. versionadded:: 3.2 """ keywords = self.keyword_io.read_keywords(self.vector_layer) message = self.keyword_io.to_message(keywords).to_text() self.assertIn('*Exposure*, structure------', message) def test_layer_to_message(self): """Test to show augmented keywords if KeywordsIO ctor passed a layer. .. versionadded:: 3.3 """ keywords = KeywordIO(self.vector_layer) message = keywords.to_message().to_text() self.assertIn('*Reference system*, ', message) def test_dict_to_row(self): """Test the dict to row helper works. .. versionadded:: 3.2 """ keyword_value = ( "{'high': ['Kawasan Rawan Bencana III'], " "'medium': ['Kawasan Rawan Bencana II'], " "'low': ['Kawasan Rawan Bencana I']}") table = self.keyword_io._dict_to_row(keyword_value) self.assertIn( u'\n---\n*high*, Kawasan Rawan Bencana III------', table.to_text()) # should also work passing a dict keyword_value = { 'high': ['Kawasan Rawan Bencana III'], 'medium': ['Kawasan Rawan Bencana II'], 'low': ['Kawasan Rawan Bencana I']} table = self.keyword_io._dict_to_row(keyword_value) self.assertIn( u'\n---\n*high*, Kawasan Rawan Bencana III------', table.to_text()) def test_keyword_io(self): """Test read keywords directly from keywords file .. versionadded:: 3.2 """ keywords = self.keyword_io.read_keywords_file(self.keyword_path) expected_keywords = self.expected_vector_keywords message = 'Got:\n%s\nExpected:\n%s\nSource:\n%s' % ( keywords, expected_keywords, self.keyword_path) self.assertDictEqual(keywords, expected_keywords, message)
def _clip_vector_layer( layer, extent, extra_keywords=None, explode_flag=True, hard_clip_flag=False, explode_attribute=None): """Clip a Hazard or Exposure layer to the extents provided. The layer must be a vector layer or an exception will be thrown. The output layer will always be in WGS84/Geographic. :param layer: A valid QGIS vector or raster layer :type layer: :param extent: Either an array representing the exposure layer extents in the form [xmin, ymin, xmax, ymax]. It is assumed that the coordinates are in EPSG:4326 although currently no checks are made to enforce this. or: A QgsGeometry of type polygon. **Polygon clipping is currently only supported for vector datasets.** :type extent: list(float, float, float, float) :param extra_keywords: Optional keywords dictionary to be added to output layer. :type extra_keywords: dict :param explode_flag: A bool specifying whether multipart features should be 'exploded' into singleparts. **This parameter is ignored for raster layer clipping.** :type explode_flag: bool :param hard_clip_flag: A bool specifying whether line and polygon features that extend beyond the extents should be clipped such that they are reduced in size to the part of the geometry that intersects the extent only. Default is False. **This parameter is ignored for raster layer clipping.** :type hard_clip_flag: bool :param explode_attribute: A str specifying to which attribute #1, #2 and so on will be added in case of explode_flag being true. The attribute is modified only if there are at least 2 parts. :type explode_attribute: str :returns: Clipped layer (placed in the system temp dir). The output layer will be reprojected to EPSG:4326 if needed. :rtype: QgsVectorLayer """ if not layer or not extent: message = tr('Layer or Extent passed to clip is None.') raise InvalidParameterError(message) if layer.type() != QgsMapLayer.VectorLayer: message = tr( 'Expected a vector layer but received a %s.' % str(layer.type())) raise InvalidParameterError(message) # handle, file_name = tempfile.mkstemp('.sqlite', 'clip_', # temp_dir()) handle, file_name = tempfile.mkstemp( '.shp', 'clip_', temp_dir()) # Ensure the file is deleted before we try to write to it # fixes windows specific issue where you get a message like this # ERROR 1: c:\temp\inasafe\clip_jpxjnt.shp is not a directory. # This is because mkstemp creates the file handle and leaves # the file open. os.close(handle) os.remove(file_name) # Get the clip extents in the layer's native CRS geo_crs = QgsCoordinateReferenceSystem() geo_crs.createFromSrid(4326) transform = QgsCoordinateTransform(geo_crs, layer.crs()) allowed_clip_values = [QGis.WKBPolygon, QGis.WKBPolygon25D] if type(extent) is list: rectangle = QgsRectangle( extent[0], extent[1], extent[2], extent[3]) # noinspection PyCallByClass # noinspection PyTypeChecker polygon = QgsGeometry.fromRect(rectangle) elif (type(extent) is QgsGeometry and extent.wkbType in allowed_clip_values): rectangle = extent.boundingBox().toRectF() polygon = extent else: raise InvalidClipGeometryError( tr( 'Clip geometry must be an extent or a single part' 'polygon based geometry.')) projected_extent = transform.transformBoundingBox(rectangle) # Get vector layer provider = layer.dataProvider() if provider is None: message = tr( 'Could not obtain data provider from ' 'layer "%s"' % layer.source()) raise Exception(message) # Get the layer field list, select by our extent then write to disk # .. todo:: FIXME - for different geometry types we should implement # different clipping behaviour e.g. reject polygons that # intersect the edge of the bbox. Tim request = QgsFeatureRequest() if not projected_extent.isEmpty(): request.setFilterRect(projected_extent) request.setFlags(QgsFeatureRequest.ExactIntersect) field_list = provider.fields() writer = QgsVectorFileWriter( file_name, 'UTF-8', field_list, layer.wkbType(), geo_crs, # 'SQLite') # FIXME (Ole): This works but is far too slow 'ESRI Shapefile') if writer.hasError() != QgsVectorFileWriter.NoError: message = tr( 'Error when creating shapefile: <br>Filename:' '%s<br>Error: %s' % (file_name, writer.hasError())) raise Exception(message) # Reverse the coordinate xform now so that we can convert # geometries from layer crs to geocrs. transform = QgsCoordinateTransform(layer.crs(), geo_crs) # Retrieve every feature with its geometry and attributes count = 0 has_multipart = False for feature in provider.getFeatures(request): geometry = feature.geometry() # Loop through the parts adding them to the output file # we write out single part features unless explode_flag is False if explode_flag: geometry_list = explode_multipart_geometry(geometry) else: geometry_list = [geometry] for part_index, part in enumerate(geometry_list): part.transform(transform) if hard_clip_flag: # Remove any dangling bits so only intersecting area is # kept. part = clip_geometry(polygon, part) if part is None: continue feature.setGeometry(part) # There are multiple parts and we want to show it in the # explode_attribute if part_index > 0 and explode_attribute is not None: has_multipart = True writer.addFeature(feature) count += 1 del writer # Flush to disk if count < 1: message = tr( 'No features fall within the clip extents. Try panning / zooming ' 'to an area containing data and then try to run your analysis ' 'again. If hazard and exposure data doesn\'t overlap at all, it ' 'is not possible to do an analysis. Another possibility is that ' 'the layers do overlap but because they may have different ' 'spatial references, they appear to be disjointed. If this is the ' 'case, try to turn on reproject on-the-fly in QGIS.') raise NoFeaturesInExtentError(message) keyword_io = KeywordIO() if extra_keywords is None: extra_keywords = {} extra_keywords['had multipart polygon'] = has_multipart keyword_io.copy_keywords( layer, file_name, extra_keywords=extra_keywords) base_name = '%s clipped' % layer.name() layer = QgsVectorLayer(file_name, base_name, 'ogr') return layer
class KeywordIOTest(unittest.TestCase): """Tests for reading and writing of raster and vector data """ def setUp(self): self.keyword_io = KeywordIO() # SQLite Layer uri = QgsDataSourceURI() sqlite_building_path = test_data_path('exposure', 'exposure.sqlite') uri.setDatabase(sqlite_building_path) uri.setDataSource('', 'buildings_osm_4326', 'Geometry') self.sqlite_layer = QgsVectorLayer(uri.uri(), 'OSM Buildings', 'spatialite') self.expected_sqlite_keywords = { 'category': 'exposure', 'datatype': 'OSM', 'subcategory': 'building' } # Raster Layer keywords hazard_path = test_data_path('hazard', 'padang_tsunami_mw8.tif') self.raster_layer, _ = load_layer(hazard_path) self.expected_raster_keywords = { 'category': 'hazard', 'subcategory': 'tsunami', 'unit': 'm', 'title': 'A tsunami in Padang (Mw 8.8)' } # Vector Layer keywords vector_path = test_data_path('exposure', 'buildings_osm_4326.shp') self.vector_layer, _ = load_layer(vector_path) self.expected_vector_keywords = { 'category': 'exposure', 'datatype': 'osm', 'subcategory': 'structure', 'title': 'buildings_osm_4326', 'purpose': 'dki' } # Keyword less layer keywordless_path = test_data_path('other', 'keywordless_layer.shp') self.keywordless_layer, _ = load_layer(keywordless_path) def tearDown(self): pass def test_get_hash_for_datasource(self): """Test we can reliably get a hash for a uri""" hash_value = self.keyword_io.hash_for_datasource(PG_URI) expected_hash = '7cc153e1b119ca54a91ddb98a56ea95e' message = "Got: %s\nExpected: %s" % (hash_value, expected_hash) assert hash_value == expected_hash, message def test_write_read_keyword_from_uri(self): """Test we can set and get keywords for a non local datasource""" handle, filename = tempfile.mkstemp('.db', 'keywords_', temp_dir()) # Ensure the file is deleted before we try to write to it # fixes windows specific issue where you get a message like this # ERROR 1: c:\temp\inasafe\clip_jpxjnt.shp is not a directory. # This is because mkstemp creates the file handle and leaves # the file open. os.close(handle) os.remove(filename) expected_keywords = { 'category': 'exposure', 'datatype': 'itb', 'subcategory': 'building' } # SQL insert test # On first write schema is empty and there is no matching hash self.keyword_io.set_keyword_db_path(filename) self.keyword_io.write_keywords_for_uri(PG_URI, expected_keywords) # SQL Update test # On second write schema is populated and we update matching hash expected_keywords = { 'category': 'exposure', 'datatype': 'OSM', # <--note the change here! 'subcategory': 'building' } self.keyword_io.write_keywords_for_uri(PG_URI, expected_keywords) # Test getting all keywords keywords = self.keyword_io.read_keyword_from_uri(PG_URI) message = 'Got: %s\n\nExpected %s\n\nDB: %s' % ( keywords, expected_keywords, filename) assert keywords == expected_keywords, message # Test getting just a single keyword keyword = self.keyword_io.read_keyword_from_uri(PG_URI, 'datatype') expected_keyword = 'OSM' message = 'Got: %s\n\nExpected %s\n\nDB: %s' % ( keyword, expected_keyword, filename) assert keyword == expected_keyword, message # Test deleting keywords actually does delete self.keyword_io.delete_keywords_for_uri(PG_URI) try: _ = self.keyword_io.read_keyword_from_uri(PG_URI, 'datatype') # if the above didnt cause an exception then bad message = 'Expected a HashNotFoundError to be raised' assert message except HashNotFoundError: # we expect this outcome so good! pass def test_are_keywords_file_based(self): """Can we correctly determine if keywords should be written to file or to database?""" assert not self.keyword_io.are_keywords_file_based(self.sqlite_layer) assert self.keyword_io.are_keywords_file_based(self.raster_layer) assert self.keyword_io.are_keywords_file_based(self.vector_layer) def test_read_raster_file_keywords(self): """Can we read raster file keywords using generic readKeywords method """ keywords = self.keyword_io.read_keywords(self.raster_layer) expected_keywords = self.expected_raster_keywords source = self.raster_layer.source() message = 'Got:\n%s\nExpected:\n%s\nSource:\n%s' % ( keywords, expected_keywords, source) self.assertEquals(keywords, expected_keywords, message) def test_read_vector_file_keywords(self): """Test read vector file keywords with the generic readKeywords method. """ keywords = self.keyword_io.read_keywords(self.vector_layer) expected_keywords = self.expected_vector_keywords source = self.vector_layer.source() message = 'Got: %s\n\nExpected %s\n\nSource: %s' % ( keywords, expected_keywords, source) assert keywords == expected_keywords, message def test_read_keywordless_layer(self): """Test read 'keyword' file from keywordless layer. """ self.assertRaises( NoKeywordsFoundError, self.keyword_io.read_keywords, self.keywordless_layer, ) def test_update_keywords(self): """Test append file keywords with update_keywords method.""" layer = clone_raster_layer(name='padang_tsunami_mw8', extension='.tif', include_keywords=True, source_directory=test_data_path('hazard')) new_keywords = {'category': 'exposure', 'test': 'TEST'} self.keyword_io.update_keywords(layer, new_keywords) keywords = self.keyword_io.read_keywords(layer) expected_keywords = { 'category': 'exposure', 'test': 'TEST', 'subcategory': 'tsunami', 'unit': 'm', 'title': 'A tsunami in Padang (Mw 8.8)' } message = 'Keywords: %s. Expected: %s' % (keywords, expected_keywords) self.assertEqual(keywords, expected_keywords, message) def test_read_db_keywords(self): """Can we read sqlite kw with the generic read_keywords method """ db_path = test_data_path('other', 'test_keywords.db') self.read_db_keywords(db_path) def read_db_keywords(self, db_path): """Can we read sqlite keywords with the generic readKeywords method """ self.keyword_io.set_keyword_db_path(db_path) # We need to use relative path so that the hash from URI will match local_path = os.path.join(os.path.dirname(__file__), 'exposure.sqlite') sqlite_building_path = test_data_path('exposure', 'exposure.sqlite') shutil.copy2(sqlite_building_path, local_path) uri = QgsDataSourceURI() uri.setDatabase('exposure.sqlite') uri.setDataSource('', 'buildings_osm_4326', 'Geometry') sqlite_layer = QgsVectorLayer(uri.uri(), 'OSM Buildings', 'spatialite') expected_source = ( 'dbname=\'exposure.sqlite\' table="buildings_osm_4326" (' 'Geometry) sql=') message = 'Got source: %s\n\nExpected %s\n' % (sqlite_layer.source(), expected_source) self.assertEqual(sqlite_layer.source(), expected_source, message) keywords = self.keyword_io.read_keywords(sqlite_layer) expected_keywords = self.expected_sqlite_keywords message = 'Got: %s\n\nExpected %s\n\nSource: %s' % ( keywords, expected_keywords, self.sqlite_layer.source()) self.assertEqual(keywords, expected_keywords, message) # Delete SQL Layer so that we can delete the file del sqlite_layer os.remove(local_path) def test_copy_keywords(self): """Test we can copy the keywords.""" out_path = unique_filename(prefix='test_copy_keywords', suffix='.keywords') self.keyword_io.copy_keywords(self.raster_layer, out_path) copied_keywords = read_file_keywords(out_path) expected_keywords = self.expected_raster_keywords message = 'Got:\n%s\nExpected:\n%s\nSource:\n%s' % ( copied_keywords, expected_keywords, out_path) self.assertEquals(copied_keywords, expected_keywords, message)
def _clip_raster_layer(layer, extent, cell_size=None, extra_keywords=None): """Clip a Hazard or Exposure raster layer to the extents provided. The layer must be a raster layer or an exception will be thrown. .. note:: The extent *must* be in EPSG:4326. The output layer will always be in WGS84/Geographic. :param layer: A valid QGIS raster layer in EPSG:4326 :type layer: QgsRasterLayer :param extent: An array representing the exposure layer extents in the form [xmin, ymin, xmax, ymax]. It is assumed that the coordinates are in EPSG:4326 although currently no checks are made to enforce this. or: A QgsGeometry of type polygon. **Polygon clipping currently only supported for vector datasets.** :type extent: list(float), QgsGeometry :param cell_size: Cell size (in GeoCRS) which the layer should be resampled to. If not provided for a raster layer (i.e. theCellSize=None), the native raster cell size will be used. :type cell_size: float :returns: Output clipped layer (placed in the system temp dir). :rtype: QgsRasterLayer :raises: InvalidProjectionError - if input layer is a density layer in projected coordinates. See issue #123. """ if not layer or not extent: message = tr('Layer or Extent passed to clip is None.') raise InvalidParameterError(message) if layer.type() != QgsMapLayer.RasterLayer: message = tr('Expected a raster layer but received a %s.' % str(layer.type())) raise InvalidParameterError(message) working_layer = layer.source() # Check for existence of keywords file base, _ = os.path.splitext(working_layer) keywords_path = base + '.xml' message = tr('Input file to be clipped "%s" does not have the ' 'expected keywords file %s' % (working_layer, keywords_path)) verify(os.path.isfile(keywords_path), message) # Raise exception if layer is projected and refers to density (issue #123) # FIXME (Ole): Need to deal with it - e.g. by automatically reprojecting # the layer at this point and setting the native resolution accordingly # in its keywords. keywords = read_iso19115_metadata(working_layer) if 'datatype' in keywords and keywords['datatype'] == 'count': if str(layer.crs().authid()) != 'EPSG:4326': # This layer is not WGS84 geographic message = ( 'Layer %s represents count but has spatial reference "%s". ' 'Count layers must be given in WGS84 geographic coordinates, ' 'so please reproject and try again. For more information, see ' 'issue https://github.com/AIFDR/inasafe/issues/123' % (working_layer, layer.crs().toProj4())) raise InvalidProjectionError(message) # We need to provide gdalwarp with a dataset for the clip # because unlike gdal_translate, it does not take projwin. clip_kml = extent_to_kml(extent) # Create a filename for the clipped, resampled and reprojected layer handle, filename = tempfile.mkstemp('.tif', 'clip_', temp_dir()) os.close(handle) os.remove(filename) # If no cell size is specified, we need to run gdalwarp without # specifying the output pixel size to ensure the raster dims # remain consistent. binary_list = which('gdalwarp') LOGGER.debug('Path for gdalwarp: %s' % binary_list) if len(binary_list) < 1: raise CallGDALError(tr('gdalwarp could not be found on your computer')) # Use the first matching gdalwarp found binary = binary_list[0] if cell_size is None: command = ( '"%s" -q -t_srs EPSG:4326 -r near -cutline %s -crop_to_cutline ' '-ot Float64 -of GTiff "%s" "%s"' % (binary, clip_kml, working_layer, filename)) else: command = ('"%s" -q -t_srs EPSG:4326 -r near -tr %s %s -cutline %s ' '-crop_to_cutline -ot Float64 -of GTiff "%s" "%s"' % (binary, repr(cell_size), repr(cell_size), clip_kml, working_layer, filename)) LOGGER.debug(command) result = QProcess().execute(command) # For QProcess exit codes see # http://qt-project.org/doc/qt-4.8/qprocess.html#execute if result == -2: # cannot be started message_detail = tr('Process could not be started.') message = tr('<p>Error while executing the following shell command:' '</p><pre>%s</pre><p>Error message: %s' % (command, message_detail)) raise CallGDALError(message) elif result == -1: # process crashed message_detail = tr('Process crashed.') message = tr( '<p>Error while executing the following shell command:</p>' '<pre>%s</pre><p>Error message: %s' % (command, message_detail)) raise CallGDALError(message) # .. todo:: Check the result of the shell call is ok keyword_io = KeywordIO() keyword_io.copy_keywords(layer, filename, extra_keywords=extra_keywords) base_name = '%s clipped' % layer.name() layer = QgsRasterLayer(filename, base_name) return layer
class KeywordIOTest(unittest.TestCase): """Tests for reading and writing of raster and vector data """ def setUp(self): self.keyword_io = KeywordIO() # SQLite Layer uri = QgsDataSourceURI() sqlite_building_path = standard_data_path( 'exposure', 'exposure.sqlite') uri.setDatabase(sqlite_building_path) uri.setDataSource('', 'buildings_osm_4326', 'Geometry') self.sqlite_layer = QgsVectorLayer( uri.uri(), 'OSM Buildings', 'spatialite') self.expected_sqlite_keywords = { 'datatype': 'OSM' } # Raster Layer keywords hazard_path = standard_data_path('hazard', 'tsunami_wgs84.tif') self.raster_layer, _ = load_layer(hazard_path) self.expected_raster_keywords = { 'hazard_category': 'single_event', 'title': 'Generic Continuous Flood', 'hazard': 'flood', 'continuous_hazard_unit': 'generic', 'layer_geometry': 'raster', 'layer_purpose': 'hazard', 'layer_mode': 'continuous', 'keyword_version': '3.5' } # Vector Layer keywords vector_path = standard_data_path('exposure', 'buildings_osm_4326.shp') self.vector_layer, _ = load_layer(vector_path) self.expected_vector_keywords = { 'keyword_version': '3.5', 'value_map': {}, 'title': 'buildings_osm_4326', 'layer_geometry': 'polygon', 'layer_purpose': 'exposure', 'layer_mode': 'classified', 'exposure': 'structure', } # Keyword less layer keywordless_path = standard_data_path('other', 'keywordless_layer.shp') self.keywordless_layer, _ = load_layer(keywordless_path) # Keyword file self.keyword_path = standard_data_path( 'exposure', 'buildings_osm_4326.xml') def test_read_raster_file_keywords(self): """Can we read raster file keywords using generic readKeywords method """ layer = clone_raster_layer( name='generic_continuous_flood', extension='.asc', include_keywords=True, source_directory=standard_data_path('hazard')) keywords = self.keyword_io.read_keywords(layer) expected_keywords = self.expected_raster_keywords self.assertDictEqual(keywords, expected_keywords) def test_read_vector_file_keywords(self): """Test read vector file keywords with the generic readKeywords method. """ self.maxDiff = None keywords = self.keyword_io.read_keywords(self.vector_layer) expected_keywords = self.expected_vector_keywords self.assertDictEqual(keywords, expected_keywords) def test_read_keywordless_layer(self): """Test read 'keyword' file from keywordless layer. """ self.assertRaises( NoKeywordsFoundError, self.keyword_io.read_keywords, self.keywordless_layer, ) def test_update_keywords(self): """Test append file keywords with update_keywords method.""" self.maxDiff = None layer = clone_raster_layer( name='tsunami_wgs84', extension='.tif', include_keywords=True, source_directory=standard_data_path('hazard')) layer.keywords = { 'hazard_category': u'single_event', 'title': u'tsunami_wgs84', 'keyword_version': u'3.5', 'hazard': u'tsunami', 'continuous_hazard_unit': u'metres', 'inasafe_fields': {}, 'layer_geometry': u'raster', 'layer_purpose': u'hazard', 'layer_mode': u'continuous', } new_keywords = { 'hazard_category': 'multiple_event' } self.keyword_io.update_keywords(layer, new_keywords) keywords = self.keyword_io.read_keywords(layer) expected_keywords = { 'hazard_category': 'multiple_event', 'title': 'tsunami_wgs84', 'hazard': 'tsunami', 'continuous_hazard_unit': 'metres', 'layer_geometry': 'raster', 'layer_purpose': 'hazard', 'layer_mode': 'continuous', 'thresholds': { 'road': { 'tsunami_hazard_classes': { 'active': True, 'classes': { 'dry': [0.0, 0.1], 'high': [3.0, 8.0], 'medium': [1.0, 3.0], 'low': [0.1, 1.0], 'very high': [8.0, 16.68] } } }, 'structure': { 'tsunami_hazard_classes': { 'active': True, 'classes': { 'dry': [0.0, 0.1], 'high': [3.0, 8.0], 'medium': [1.0, 3.0], 'low': [0.1, 1.0], 'very high': [8.0, 16.68] } } }, 'place': { 'tsunami_hazard_classes': { 'active': True, 'classes': { 'dry': [0.0, 0.1], 'high': [3.0, 8.0], 'medium': [1.0, 3.0], 'low': [0.1, 1.0], 'very high': [8.0, 16.68] } } }, 'land_cover': { 'tsunami_hazard_classes': { 'active': True, 'classes': { 'dry': [0.0, 0.1], 'high': [3.0, 8.0], 'medium': [1.0, 3.0], 'low': [0.1, 1.0], 'very high': [8.0, 16.68] } } }, 'population': { 'tsunami_hazard_classes': { 'active': True, 'classes': { 'dry': [0.0, 0.1], 'high': [3.0, 8.0], 'medium': [1.0, 3.0], 'low': [0.1, 1.0], 'very high': [8.0, 16.68] } } } }, 'keyword_version': inasafe_keyword_version } expected_thresholds = expected_keywords.pop('thresholds') expected_keywords = { k: get_unicode(v) for k, v in expected_keywords.iteritems() } thresholds_keywords = keywords.pop('thresholds') self.assertDictEqual(expected_keywords, keywords) self.assertDictEqual(expected_thresholds, thresholds_keywords) def test_copy_keywords(self): """Test we can copy the keywords.""" self.maxDiff = None out_path = unique_filename( prefix='test_copy_keywords', suffix='.shp') layer = clone_raster_layer( name='generic_continuous_flood', extension='.asc', include_keywords=True, source_directory=standard_data_path('hazard')) self.keyword_io.copy_keywords(layer, out_path) # copied_keywords = read_file_keywords(out_path.split('.')[0] + 'xml') copied_keywords = read_iso19115_metadata(out_path) expected_keywords = self.expected_raster_keywords expected_keywords['keyword_version'] = inasafe_keyword_version self.assertDictEqual(copied_keywords, expected_keywords) def test_to_message(self): """Test we can convert keywords to a message object. .. versionadded:: 3.2 """ keywords = self.keyword_io.read_keywords(self.vector_layer) message = self.keyword_io.to_message(keywords).to_text() self.assertIn('*Exposure*, structure------', message) def test_layer_to_message(self): """Test to show augmented keywords if KeywordsIO ctor passed a layer. .. versionadded:: 3.3 """ keywords = KeywordIO(self.vector_layer) message = keywords.to_message().to_text() self.assertIn('*Reference system*, ', message) def test_dict_to_row(self): """Test the dict to row helper works. .. versionadded:: 3.2 """ keyword_value = ( "{'high': ['Kawasan Rawan Bencana III'], " "'medium': ['Kawasan Rawan Bencana II'], " "'low': ['Kawasan Rawan Bencana I']}") table = self.keyword_io._dict_to_row(keyword_value) self.assertIn( u'\n---\n*High*, Kawasan Rawan Bencana III------', table.to_text()) # should also work passing a dict keyword_value = { 'high': ['Kawasan Rawan Bencana III'], 'medium': ['Kawasan Rawan Bencana II'], 'low': ['Kawasan Rawan Bencana I']} table = self.keyword_io._dict_to_row(keyword_value) self.assertIn( u'\n---\n*High*, Kawasan Rawan Bencana III------', table.to_text()) def test_keyword_io(self): """Test read keywords directly from keywords file .. versionadded:: 3.2 """ self.maxDiff = None keywords = self.keyword_io.read_keywords_file(self.keyword_path) expected_keywords = self.expected_vector_keywords self.assertDictEqual(keywords, expected_keywords)