def __init__(self, message=None): """"General constructor. :param message: The optional error message. :type message: str, unicode, MessageElement """"" if isinstance(message, unicode): super(InaSAFEError, self).__init__(get_string(message)) self.message = message elif isinstance(message, str): super(InaSAFEError, self).__init__(message) self.message = get_unicode(message) elif isinstance(message, MessageElement): super(InaSAFEError, self).__init__(message.to_text()) self.message = get_unicode(message.to_text()) elif message is None: pass elif isinstance(message, BaseException): super(InaSAFEError, self).__init__(unicode(message)) self.message = unicode(message) # This shouldn't happen... else: raise TypeError
def generate_insufficient_overlap_message( e, exposure_geoextent, exposure_layer, hazard_geoextent, hazard_layer, viewport_geoextent): """Generate insufficient overlap message. :param e: An exception. :type e: Exception :param exposure_geoextent: Extent of the exposure layer in the form [xmin, ymin, xmax, ymax] in EPSG:4326. :type exposure_geoextent: list :param exposure_layer: Exposure layer. :type exposure_layer: QgsMapLayer :param hazard_geoextent: Extent of the hazard layer in the form [xmin, ymin, xmax, ymax] in EPSG:4326. :type hazard_geoextent: list :param hazard_layer: Hazard layer instance. :type hazard_layer: QgsMapLayer :param viewport_geoextent: Viewport extents as a list [xmin, ymin, xmax, ymax] in EPSG:4326. :type viewport_geoextent: list :return: An InaSAFE message object. :rtype: safe.messaging.Message """ description = tr( 'There was insufficient overlap between the input layers and / or the ' 'layers and the viewable area. Please select two overlapping layers ' 'and zoom or pan to them or disable viewable area clipping in the ' 'options dialog. Full details follow:') message = m.Message(description) text = m.Paragraph(tr('Failed to obtain the optimal extent given:')) message.add(text) analysis_inputs = m.BulletedList() # We must use Qt string interpolators for tr to work properly analysis_inputs.add(tr('Hazard: %s') % (hazard_layer.source())) analysis_inputs.add(tr('Exposure: %s') % (exposure_layer.source())) analysis_inputs.add( tr('Viewable area Geo Extent: %s') % ( get_unicode(viewport_geoextent))) analysis_inputs.add( tr('Hazard Geo Extent: %s') % ( get_unicode(hazard_geoextent))) analysis_inputs.add( tr('Exposure Geo Extent: %s') % ( get_unicode(exposure_geoextent))) analysis_inputs.add( tr('Details: %s') % ( get_unicode(e))) message.add(analysis_inputs) return message
def test_get_unicode(self): """Test get_unicode function.""" text = 'Test á, é, í, ó, ú, ü, ñ, ¿' unicode_repr = u'Test \xe1, \xe9, \xed, \xf3, \xfa, \xfc, \xf1, \xbf' message = 'It should return %s, but it returned %s' % ( get_unicode(text), unicode_repr) self.assertEqual(get_unicode(text), unicode_repr, message)
def generate_insufficient_overlap_message(e, exposure_geoextent, exposure_layer, hazard_geoextent, hazard_layer, viewport_geoextent): """Generate insufficient overlap message. :param e: An exception. :type e: Exception :param exposure_geoextent: Extent of the exposure layer in the form [xmin, ymin, xmax, ymax] in EPSG:4326. :type exposure_geoextent: list :param exposure_layer: Exposure layer. :type exposure_layer: QgsMapLayer :param hazard_geoextent: Extent of the hazard layer in the form [xmin, ymin, xmax, ymax] in EPSG:4326. :type hazard_geoextent: list :param hazard_layer: Hazard layer instance. :type hazard_layer: QgsMapLayer :param viewport_geoextent: Viewport extents as a list [xmin, ymin, xmax, ymax] in EPSG:4326. :type viewport_geoextent: list :return: An InaSAFE message object. :rtype: Message """ description = tr( 'There was insufficient overlap between the input layers and / or the ' 'layers and the viewable area. Please select two overlapping layers ' 'and zoom or pan to them or disable viewable area clipping in the ' 'options dialog. Full details follow:') message = m.Message(description) text = m.Paragraph(tr('Failed to obtain the optimal extent given:')) message.add(text) analysis_inputs = m.BulletedList() # We must use Qt string interpolators for tr to work properly analysis_inputs.add(tr('Hazard: %s') % (hazard_layer.source())) analysis_inputs.add(tr('Exposure: %s') % (exposure_layer.source())) analysis_inputs.add( tr('Viewable area Geo Extent: %s') % (get_unicode(viewport_geoextent))) analysis_inputs.add( tr('Hazard Geo Extent: %s') % (get_unicode(hazard_geoextent))) analysis_inputs.add( tr('Exposure Geo Extent: %s') % (get_unicode(exposure_geoextent))) analysis_inputs.add(tr('Details: %s') % (get_unicode(e))) message.add(analysis_inputs) return message
def test_update_keywords(self): """Test append file keywords with update_keywords method.""" self.maxDiff = None layer = clone_raster_layer( name='tsunami_wgs84', extension='.tif', include_keywords=True, source_directory=standard_data_path('hazard')) new_keywords = { 'hazard_category': 'multiple_event' } self.keyword_io.update_keywords(layer, new_keywords) keywords = self.keyword_io.read_keywords(layer) expected_keywords = { 'hazard_category': 'multiple_event', 'title': 'Tsunami', 'hazard': 'tsunami', 'continuous_hazard_unit': 'metres', 'layer_geometry': 'raster', 'layer_purpose': 'hazard', 'layer_mode': 'continuous', 'keyword_version': inasafe_keyword_version } expected_keywords = { k: get_unicode(v) for k, v in expected_keywords.iteritems() } self.assertDictEqual(keywords, expected_keywords)
def test_str_unicode_str(self): """Test if str(unicode(str)) works correctly.""" text = 'Test á, é, í, ó, ú, ü, ñ, ¿'.encode('utf-8') unicode_repr = get_unicode(text) str_repr = get_string(unicode_repr) message = 'It should return %s, but it returned %s' % (text, str_repr) self.assertEqual(text, str_repr, message)
def test_update_keywords(self): """Test append file keywords with update_keywords method.""" self.maxDiff = None layer = clone_raster_layer( name='tsunami_wgs84', extension='.tif', include_keywords=True, source_directory=standard_data_path('hazard')) new_keywords = {'hazard_category': 'multiple_event'} self.keyword_io.update_keywords(layer, new_keywords) keywords = self.keyword_io.read_keywords(layer) expected_keywords = { 'hazard_category': 'multiple_event', 'title': 'Tsunami', 'hazard': 'tsunami', 'continuous_hazard_unit': 'metres', 'layer_geometry': 'raster', 'layer_purpose': 'hazard', 'layer_mode': 'continuous', 'keyword_version': inasafe_keyword_version } expected_keywords = { k: get_unicode(v) for k, v in expected_keywords.iteritems() } self.assertDictEqual(keywords, expected_keywords)
def tr(text, context='@default'): """We define a tr() alias here since the utilities implementation below is not a class and does not inherit from QObject. .. note:: see http://tinyurl.com/pyqt-differences :param text: String to be translated :type text: str, unicode :param context: A context for the translation. Since a same can be translated to different text depends on the context. :type context: str :returns: Translated version of the given string if available, otherwise the original string. :rtype: str, unicode """ # Ensure it's in unicode text = get_unicode(text) # noinspection PyCallByClass,PyTypeChecker,PyArgumentList translated_text = QCoreApplication.translate(context, text) # Check if there is missing container. If so, return the original text. # See #3164 if text.count('%') == translated_text.count('%'): return translated_text else: content = ('There is a problem in the translation text.\n' 'The original text: "%s".\n' 'The translation: "%s".\n' 'The number of %% character does not match (%s and %s).' 'Please check the translation in transifex for %s.' % (text, translated_text, text.count('%'), translated_text.count('%s'), locale())) LOGGER.warning(content) return text
def test_str_unicode_str(self): """Test if str(unicode(str)) works correctly.""" text = 'Test á, é, í, ó, ú, ü, ñ, ¿' unicode_repr = get_unicode(text) str_repr = get_string(unicode_repr) message = 'It should return %s, but it returned %s' % (text, str_repr) self.assertEqual(text, str_repr, message)
def html_to_file(html, file_path=None, open_browser=False): """Save the html to an html file adapting the paths to the filesystem. if a file_path is passed, it is used, if not a unique_filename is generated. :param html: the html for the output file. :type html: str :param file_path: the path for the html output file. :type file_path: str :param open_browser: if true open the generated html in an external browser :type open_browser: bool """ if file_path is None: file_path = unique_filename(suffix='.html') # Ensure html is in unicode for codecs module html = get_unicode(html) with codecs.open(file_path, 'w', encoding='utf-8') as f: f.write(html) if open_browser: open_in_browser(file_path)
def _keywords_to_string(keywords, sublayer=None): """Create a string from a keywords dict. Args: * keywords: A required dictionary containing the keywords to stringify. * sublayer: str optional group marker for a sub layer. Returns: str: a String containing the rendered keywords list Raises: Any exceptions are propogated. .. note: Only simple keyword dicts should be passed here, not multilayer dicts. For example you pass a dict like this:: {'datatype': 'osm', 'category': 'exposure', 'title': 'buildings_osm_4326', 'subcategory': 'building', 'purpose': 'dki'} and the following string would be returned: datatype: osm category: exposure title: buildings_osm_4326 subcategory: building purpose: dki If sublayer is provided e.g. _keywords_to_string(keywords, sublayer='foo'), the following: [foo] datatype: osm category: exposure title: buildings_osm_4326 subcategory: building purpose: dki """ # Write result = get_unicode("") if sublayer is not None: result = "[%s]\n" % sublayer for k, value in keywords.items(): # Create key msg = "Key in keywords dictionary must be a string. " "I got %s with type %s" % (k, str(type(k))[1:-1]) verify(isinstance(k, basestring), msg) key = k msg = 'Key in keywords dictionary must not contain the ":" ' 'character. I got "%s"' % key verify(":" not in key, msg) # Store result += "%s: %s\n" % (key, value) return result
def toNewlineFreeString(self): """Return a string representation of the table which contains no newlines. .. note:: any preformatted <pre> blocks will be adversely affected by this. """ return get_unicode(self.__str__().replace('\n', ''))
def add_list_entry(self, key, value): """Add an item to the keywords list given its key/value. The key and value must both be valid, non empty strings or an InvalidKVPError will be raised. If an entry with the same key exists, it's value will be replaced with value. It will add the current key/value pair to the list if it is not already present. The kvp will also be stored in the data of the listwidgetitem as a simple string delimited with a bar ('|'). :param key: The key part of the key value pair (kvp). :type key: str, unicode :param value: Value part of the key value pair (kvp). :type value: str, unicode """ if key is None or key == '': return # make sure that both key and value is unicode key = get_unicode(key) value = get_unicode(value) message = '' if ':' in key: key = key.replace(':', '.') message = self.tr('Colons are not allowed, replaced with "."') if ':' in value: value = value.replace(':', '.') message = self.tr('Colons are not allowed, replaced with "."') if message == '': self.lblMessage.setText('') self.lblMessage.hide() else: self.lblMessage.setText(message) self.lblMessage.show() item = QtGui.QListWidgetItem(key + ':' + value) # We are going to replace, so remove it if it exists already self.remove_item_by_key(key) data = key + '|' + value item.setData(QtCore.Qt.UserRole, data) self.lstKeywords.insertItem(0, item)
def set_widgets(self): """Set widgets on the Source tab.""" # Just set values based on existing keywords source = self.parent.get_existing_keyword('source') if source or source == 0: self.leSource.setText(get_unicode(source)) else: self.leSource.clear() source_scale = self.parent.get_existing_keyword('scale') if source_scale or source_scale == 0: self.leSource_scale.setText(get_unicode(source_scale)) else: self.leSource_scale.clear() source_date = self.parent.get_existing_keyword('date') if source_date: self.ckbSource_date.setChecked(True) self.dtSource_date.setDateTime( QDateTime.fromString(get_unicode(source_date), 'yyyy-MM-dd HH:mm:ss')) else: self.ckbSource_date.setChecked(False) self.dtSource_date.clear() source_url = self.parent.get_existing_keyword('url') try: source_url = source_url.toString() except AttributeError: pass if source_url or source_url == 0: self.leSource_url.setText(get_unicode(source_url)) else: self.leSource_url.clear() source_license = self.parent.get_existing_keyword('license') if source_license or source_license == 0: self.leSource_license.setText(get_unicode(source_license)) else: self.leSource_license.clear()
def __init__(self, text, **kwargs): """Creates a strong Text object :param text: Text to add to the message, :type text: str We pass the kwargs on to the base class so an exception is raised if invalid keywords were passed. See: http://stackoverflow.com/questions/13124961/ how-to-pass-arguments-efficiently-kwargs-in-python """ super(PlainText, self).__init__(**kwargs) self.text = get_unicode(text)
def tr(text): """We define a tr() alias here since the utilities implementation below is not a class and does not inherit from QObject. .. note:: see http://tinyurl.com/pyqt-differences :param text: String to be translated :type text: str, unicode :returns: Translated version of the given string if available, otherwise the original string. :rtype: str, unicode """ # Ensure it's in unicode text = get_unicode(text) # noinspection PyCallByClass,PyTypeChecker,PyArgumentList return QCoreApplication.translate('@default', text)
def xml(self): """ the xml string representation. :return: the xml :rtype: str """ xml = self._get_xml(False) for key in self.impact_functions_fields: value = self.data(key) element = Element(key) element.text = get_unicode(value) xml += tostring(element) xml += '</provenance_step>' return xml
def tr(text, context='@default'): """We define a tr() alias here since the utilities implementation below is not a class and does not inherit from QObject. .. note:: see http://tinyurl.com/pyqt-differences :param text: String to be translated :type text: str, unicode :param context: A context for the translation. Since a same can be translated to different text depends on the context. :type context: str :returns: Translated version of the given string if available, otherwise the original string. :rtype: str, unicode """ # Ensure it's in unicode text = get_unicode(text) # noinspection PyCallByClass,PyTypeChecker,PyArgumentList translated_text = QCoreApplication.translate(context, text) # Check if there is missing container. If so, return the original text. # See #3164 if text.count('%') == translated_text.count('%'): return translated_text else: content = ( 'There is a problem in the translation text.\n' 'The original text: "%s".\n' 'The translation: "%s".\n' 'The number of %% character does not match (%s and %s).' 'Please check the translation in transifex for %s.' % ( text, translated_text, text.count('%'), translated_text.count('%s'), locale() )) LOGGER.warning(content) return text
def __init__(self, layer, name=None): """Init function. :param layer: A layer. Can be QgsMapLayer or safe.storage.layer. :type layer: QgsMapLayer, Layer :param name: A layer's name :type name: Basestring or None """ # Merely initialization self._layer = None self._keywords = {} self.layer = layer self._name = None if name: self._name = name else: try: # RM: convert title to string. Makes sure it is str self._name = get_unicode(self.keyword("title")) except KeywordNotFoundError: self._name = ""
def __init__(self, layer, name=None): """Init function. :param layer: A layer. Can be QgsMapLayer or safe.storage.layer. :type layer: QgsMapLayer, Layer :param name: A layer's name :type name: Basestring or None """ # Merely initialization self._layer = None self._keywords = {} self.layer = layer self._name = None if name: self._name = name else: try: # RM: convert title to string. Makes sure it is str self._name = get_unicode(self.keyword('title')) except KeywordNotFoundError: self._name = ''
def test_update_keywords(self): """Test append file keywords with update_keywords method.""" self.maxDiff = None layer = clone_raster_layer( name='tsunami_wgs84', extension='.tif', include_keywords=True, source_directory=standard_data_path('hazard')) layer.keywords = { 'hazard_category': u'single_event', 'title': u'tsunami_wgs84', 'keyword_version': u'3.5', 'hazard': u'tsunami', 'continuous_hazard_unit': u'metres', 'inasafe_fields': {}, 'layer_geometry': u'raster', 'layer_purpose': u'hazard', 'layer_mode': u'continuous', } new_keywords = { 'hazard_category': 'multiple_event' } self.keyword_io.update_keywords(layer, new_keywords) keywords = self.keyword_io.read_keywords(layer) expected_keywords = { 'hazard_category': 'multiple_event', 'title': 'tsunami_wgs84', 'hazard': 'tsunami', 'continuous_hazard_unit': 'metres', 'layer_geometry': 'raster', 'layer_purpose': 'hazard', 'layer_mode': 'continuous', 'thresholds': { 'road': { 'tsunami_hazard_classes': { 'active': True, 'classes': { 'dry': [0.0, 0.1], 'high': [3.0, 8.0], 'medium': [1.0, 3.0], 'low': [0.1, 1.0], 'very high': [8.0, 16.68] } } }, 'structure': { 'tsunami_hazard_classes': { 'active': True, 'classes': { 'dry': [0.0, 0.1], 'high': [3.0, 8.0], 'medium': [1.0, 3.0], 'low': [0.1, 1.0], 'very high': [8.0, 16.68] } } }, 'place': { 'tsunami_hazard_classes': { 'active': True, 'classes': { 'dry': [0.0, 0.1], 'high': [3.0, 8.0], 'medium': [1.0, 3.0], 'low': [0.1, 1.0], 'very high': [8.0, 16.68] } } }, 'land_cover': { 'tsunami_hazard_classes': { 'active': True, 'classes': { 'dry': [0.0, 0.1], 'high': [3.0, 8.0], 'medium': [1.0, 3.0], 'low': [0.1, 1.0], 'very high': [8.0, 16.68] } } }, 'population': { 'tsunami_hazard_classes': { 'active': True, 'classes': { 'dry': [0.0, 0.1], 'high': [3.0, 8.0], 'medium': [1.0, 3.0], 'low': [0.1, 1.0], 'very high': [8.0, 16.68] } } } }, 'keyword_version': inasafe_keyword_version } expected_thresholds = expected_keywords.pop('thresholds') expected_keywords = { k: get_unicode(v) for k, v in expected_keywords.iteritems() } thresholds_keywords = keywords.pop('thresholds') self.assertDictEqual(expected_keywords, keywords) self.assertDictEqual(expected_thresholds, thresholds_keywords)
def get_keywords(self): """Obtain the state of the dialog as a keywords dict. :returns: Keywords reflecting the state of the dialog. :rtype: dict """ keywords = {} inasafe_fields = {} keywords['layer_geometry'] = self.get_layer_geometry_key() if self.step_kw_purpose.selected_purpose(): keywords['layer_purpose'] = self.step_kw_purpose.\ selected_purpose()['key'] if self.step_kw_subcategory.selected_subcategory(): key = self.step_kw_purpose.selected_purpose()['key'] keywords[key] = self.step_kw_subcategory.\ selected_subcategory()['key'] if self.get_layer_geometry_key() == layer_geometry_raster['key']: if self.step_kw_band_selector.selected_band(): keywords['active_band'] = self.step_kw_band_selector.\ selected_band() if keywords['layer_purpose'] == layer_purpose_hazard['key']: if self.step_kw_hazard_category.selected_hazard_category(): keywords['hazard_category'] \ = self.step_kw_hazard_category.\ selected_hazard_category()['key'] if self.step_kw_layermode.selected_layermode(): keywords['layer_mode'] = self.step_kw_layermode.\ selected_layermode()['key'] if self.step_kw_unit.selected_unit(): if self.step_kw_purpose.selected_purpose() == layer_purpose_hazard: key = continuous_hazard_unit['key'] else: key = exposure_unit['key'] keywords[key] = self.step_kw_unit.selected_unit()['key'] if self.step_kw_field.selected_fields(): field_key = self.field_keyword_for_the_layer() inasafe_fields[field_key] = self.step_kw_field.selected_fields() if self.step_kw_classification.selected_classification(): keywords['classification'] = self.step_kw_classification.\ selected_classification()['key'] if keywords['layer_purpose'] == layer_purpose_hazard['key']: multi_classifications = self.step_kw_multi_classifications.\ get_current_state() value_maps = multi_classifications.get('value_maps') if value_maps is not None: keywords['value_maps'] = value_maps thresholds = multi_classifications.get('thresholds') if thresholds is not None: keywords['thresholds'] = thresholds else: if self.step_kw_layermode.selected_layermode(): layer_mode = self.step_kw_layermode.selected_layermode() if layer_mode == layer_mode_continuous: thresholds = self.step_kw_threshold.get_threshold() if thresholds: keywords['thresholds'] = thresholds elif layer_mode == layer_mode_classified: value_map = self.step_kw_classify.selected_mapping() if value_map: keywords['value_map'] = value_map if self.step_kw_source.leSource.text(): keywords['source'] = get_unicode( self.step_kw_source.leSource.text()) if self.step_kw_source.leSource_url.text(): keywords['url'] = get_unicode( self.step_kw_source.leSource_url.text()) if self.step_kw_source.leSource_scale.text(): keywords['scale'] = get_unicode( self.step_kw_source.leSource_scale.text()) if self.step_kw_source.ckbSource_date.isChecked(): keywords['date'] = self.step_kw_source.dtSource_date.dateTime() if self.step_kw_source.leSource_license.text(): keywords['license'] = get_unicode( self.step_kw_source.leSource_license.text()) if self.step_kw_title.leTitle.text(): keywords['title'] = get_unicode(self.step_kw_title.leTitle.text()) inasafe_fields.update(self.step_kw_inasafe_fields.get_inasafe_fields()) inasafe_fields.update( self.step_kw_default_inasafe_fields.get_inasafe_fields()) inasafe_fields.update( self.step_kw_fields_mapping.get_field_mapping()['fields']) if inasafe_fields: keywords['inasafe_fields'] = inasafe_fields inasafe_default_values = {} if keywords['layer_geometry'] == layer_geometry_raster['key']: pass # Notes(IS): Skipped assigning raster inasafe default value for # now. # inasafe_default_values = self.\ # step_kw_inasafe_raster_default_values.\ # get_inasafe_default_values() else: inasafe_default_values.update(self.step_kw_default_inasafe_fields. get_inasafe_default_values()) inasafe_default_values.update( self.step_kw_fields_mapping.get_field_mapping()['values']) if inasafe_default_values: keywords['inasafe_default_values'] = inasafe_default_values return keywords
def run(self): """Risk plugin for classified polygon hazard on building/structure. Counts number of building exposed to each hazard zones. :returns: Impact vector layer building exposed to each hazard zones. Table with number of buildings affected :rtype: Vector """ # Value from layer's keywords self.hazard_class_attribute = self.hazard.keyword('field') self.hazard_class_mapping = self.hazard.keyword('value_map') self.exposure_class_attribute = self.exposure.keyword( 'structure_class_field') try: exposure_value_mapping = self.exposure.keyword('value_mapping') except KeywordNotFoundError: # Generic IF, the keyword might not be defined base.py exposure_value_mapping = {} # Retrieve the classification that is used by the hazard layer. vector_hazard_classification = self.hazard.keyword( 'vector_hazard_classification') # Get the dictionary that contains the definition of the classification vector_hazard_classification = definition(vector_hazard_classification) # Get the list classes in the classification vector_hazard_classes = vector_hazard_classification['classes'] # Iterate over vector hazard classes hazard_classes = [] for vector_hazard_class in vector_hazard_classes: # Check if the key of class exist in hazard_class_mapping if vector_hazard_class['key'] in self.hazard_class_mapping.keys(): # Replace the key with the name as we need to show the human # friendly name in the report. self.hazard_class_mapping[vector_hazard_class['name']] = \ self.hazard_class_mapping.pop(vector_hazard_class['key']) # Adding the class name as a key in affected_building hazard_classes.append(vector_hazard_class['name']) hazard_zone_attribute_index = self.hazard.layer.fieldNameIndex( self.hazard_class_attribute) # Check if hazard_zone_attribute exists in hazard_layer if hazard_zone_attribute_index < 0: message = ( 'Hazard data %s does not contain expected attribute %s ' % (self.hazard.layer.name(), self.hazard_class_attribute)) # noinspection PyExceptionInherit raise InaSAFEError(message) # Hazard zone categories from hazard layer unique_values = self.hazard.layer.uniqueValues( hazard_zone_attribute_index) # Values might be integer or float, we should have unicode. #2626 self.hazard_zones = [get_unicode(val) for val in unique_values] self.init_report_var(hazard_classes) wgs84_extent = QgsRectangle(self.requested_extent[0], self.requested_extent[1], self.requested_extent[2], self.requested_extent[3]) # Run interpolation function for polygon2polygon interpolated_layer = interpolate_polygon_polygon( self.hazard.layer, self.exposure.layer, wgs84_extent) new_field = QgsField(self.target_field, QVariant.String) interpolated_layer.dataProvider().addAttributes([new_field]) interpolated_layer.updateFields() target_field_index = interpolated_layer.fieldNameIndex( self.target_field) changed_values = {} if interpolated_layer.featureCount() < 1: raise ZeroImpactException() # Extract relevant interpolated data for feature in interpolated_layer.getFeatures(): # Get the hazard value based on the value mapping in keyword hazard_value = get_key_for_value( feature[self.hazard_class_attribute], self.hazard_class_mapping) if not hazard_value: hazard_value = self._not_affected_value changed_values[feature.id()] = {target_field_index: hazard_value} usage = feature[self.exposure_class_attribute] usage = main_type(usage, exposure_value_mapping) affected = False if hazard_value in self.hazard_class_mapping.keys(): affected = True self.classify_feature(hazard_value, usage, affected) interpolated_layer.dataProvider().changeAttributeValues(changed_values) self.reorder_dictionaries() # Create style categories = self.affected_buildings.keys() categories.append(self._not_affected_value) colours = color_ramp(len(categories)) style_classes = [] for i, hazard_zone in enumerate(self.affected_buildings.keys()): style_class = dict() style_class['label'] = tr(hazard_zone) style_class['transparency'] = 0 style_class['value'] = hazard_zone style_class['size'] = 1 style_class['colour'] = colours[i] style_classes.append(style_class) # Override style info with new classes and name style_info = dict(target_field=self.target_field, style_classes=style_classes, style_type='categorizedSymbol') impact_data = self.generate_data() extra_keywords = { 'target_field': self.target_field, 'map_title': self.map_title(), 'legend_notes': self.metadata().key('legend_notes'), 'legend_units': self.metadata().key('legend_units'), 'legend_title': self.metadata().key('legend_title') } impact_layer_keywords = self.generate_impact_keywords(extra_keywords) # Create vector layer and return impact_layer = Vector(data=interpolated_layer, name=self.map_title(), keywords=impact_layer_keywords, style_info=style_info) impact_layer.impact_data = impact_data self._impact = impact_layer return impact_layer
def run(self): """Risk plugin for classified polygon hazard on building/structure. Counts number of building exposed to each hazard zones. :returns: Impact vector layer building exposed to each hazard zones. Table with number of buildings affected :rtype: Vector """ # Value from layer's keywords self.hazard_class_attribute = self.hazard.keyword('field') self.hazard_class_mapping = self.hazard.keyword('value_map') self.exposure_class_attribute = self.exposure.keyword( 'structure_class_field') try: exposure_value_mapping = self.exposure.keyword('value_mapping') except KeywordNotFoundError: # Generic IF, the keyword might not be defined base.py exposure_value_mapping = {} # Retrieve the classification that is used by the hazard layer. vector_hazard_classification = self.hazard.keyword( 'vector_hazard_classification') # Get the dictionary that contains the definition of the classification vector_hazard_classification = definition(vector_hazard_classification) # Get the list classes in the classification vector_hazard_classes = vector_hazard_classification['classes'] # Iterate over vector hazard classes hazard_classes = [] for vector_hazard_class in vector_hazard_classes: # Check if the key of class exist in hazard_class_mapping if vector_hazard_class['key'] in self.hazard_class_mapping.keys(): # Replace the key with the name as we need to show the human # friendly name in the report. self.hazard_class_mapping[vector_hazard_class['name']] = \ self.hazard_class_mapping.pop(vector_hazard_class['key']) # Adding the class name as a key in affected_building hazard_classes.append(vector_hazard_class['name']) hazard_zone_attribute_index = self.hazard.layer.fieldNameIndex( self.hazard_class_attribute) # Check if hazard_zone_attribute exists in hazard_layer if hazard_zone_attribute_index < 0: message = ( 'Hazard data %s does not contain expected attribute %s ' % (self.hazard.layer.name(), self.hazard_class_attribute)) # noinspection PyExceptionInherit raise InaSAFEError(message) # Hazard zone categories from hazard layer unique_values = self.hazard.layer.uniqueValues( hazard_zone_attribute_index) # Values might be integer or float, we should have unicode. #2626 self.hazard_zones = [get_unicode(val) for val in unique_values] self.init_report_var(hazard_classes) wgs84_extent = QgsRectangle( self.requested_extent[0], self.requested_extent[1], self.requested_extent[2], self.requested_extent[3]) # Run interpolation function for polygon2polygon interpolated_layer = interpolate_polygon_polygon( self.hazard.layer, self.exposure.layer, wgs84_extent) new_field = QgsField(self.target_field, QVariant.String) interpolated_layer.dataProvider().addAttributes([new_field]) interpolated_layer.updateFields() target_field_index = interpolated_layer.fieldNameIndex( self.target_field) changed_values = {} if interpolated_layer.featureCount() < 1: raise ZeroImpactException() # Extract relevant interpolated data for feature in interpolated_layer.getFeatures(): # Get the hazard value based on the value mapping in keyword hazard_value = get_key_for_value( feature[self.hazard_class_attribute], self.hazard_class_mapping) if not hazard_value: hazard_value = self._not_affected_value changed_values[feature.id()] = {target_field_index: hazard_value} usage = feature[self.exposure_class_attribute] usage = main_type(usage, exposure_value_mapping) affected = False if hazard_value in self.hazard_class_mapping.keys(): affected = True self.classify_feature(hazard_value, usage, affected) interpolated_layer.dataProvider().changeAttributeValues(changed_values) self.reorder_dictionaries() # Create style categories = self.affected_buildings.keys() categories.append(self._not_affected_value) colours = color_ramp(len(categories)) style_classes = [] for i, hazard_zone in enumerate(self.affected_buildings.keys()): style_class = dict() style_class['label'] = tr(hazard_zone) style_class['transparency'] = 0 style_class['value'] = hazard_zone style_class['size'] = 1 style_class['colour'] = colours[i] style_classes.append(style_class) # Override style info with new classes and name style_info = dict( target_field=self.target_field, style_classes=style_classes, style_type='categorizedSymbol' ) impact_data = self.generate_data() extra_keywords = { 'target_field': self.target_field, 'map_title': self.map_title(), 'legend_notes': self.metadata().key('legend_notes'), 'legend_units': self.metadata().key('legend_units'), 'legend_title': self.metadata().key('legend_title') } impact_layer_keywords = self.generate_impact_keywords(extra_keywords) # Create vector layer and return impact_layer = Vector( data=interpolated_layer, name=self.map_title(), keywords=impact_layer_keywords, style_info=style_info) impact_layer.impact_data = impact_data self._impact = impact_layer return impact_layer
def get_keywords(self): """Obtain the state of the dialog as a keywords dict. :returns: Keywords reflecting the state of the dialog. :rtype: dict """ keywords = {} keywords['layer_geometry'] = self.get_layer_geometry_id() if self.step_kw_purpose.selected_purpose(): keywords['layer_purpose'] = self.step_kw_purpose.\ selected_purpose()['key'] if keywords['layer_purpose'] == 'aggregation': keywords.update( self.step_kw_aggregation.get_aggregation_attributes()) if self.step_kw_subcategory.selected_subcategory(): key = self.step_kw_purpose.selected_purpose()['key'] keywords[key] = self.step_kw_subcategory.\ selected_subcategory()['key'] if self.step_kw_hazard_category.selected_hazard_category(): keywords['hazard_category'] \ = self.step_kw_hazard_category.\ selected_hazard_category()['key'] if self.step_kw_layermode.selected_layermode(): keywords['layer_mode'] = self.step_kw_layermode.\ selected_layermode()['key'] if self.step_kw_unit.selected_unit(): if self.step_kw_purpose.selected_purpose() == layer_purpose_hazard: key = continuous_hazard_unit['key'] else: key = exposure_unit['key'] keywords[key] = self.step_kw_unit.selected_unit()['key'] if self.step_kw_resample.selected_allowresampling() is not None: keywords['allow_resampling'] = ( self.step_kw_resample.selected_allowresampling() and 'true' or 'false') if self.step_kw_field.lstFields.currentItem(): field_keyword = self.field_keyword_for_the_layer() keywords[field_keyword] = self.step_kw_field.\ lstFields.currentItem().text() if self.step_kw_classification.selected_classification(): geom = 'raster' if is_raster_layer(self.layer) else 'vector' key = '%s_%s_classification' % ( geom, self.step_kw_purpose.selected_purpose()['key']) keywords[key] = self.step_kw_classification.\ selected_classification()['key'] value_map = self.step_kw_classify.selected_mapping() if value_map: if self.step_kw_classification.selected_classification(): # hazard mapping keyword = 'value_map' else: # exposure mapping keyword = 'value_mapping' keywords[keyword] = json.dumps(value_map) name_field = self.step_kw_name_field.selected_field() if name_field: keywords['name_field'] = name_field population_field = self.step_kw_population_field.selected_field() if population_field: keywords['population_field'] = population_field extra_keywords = self.step_kw_extrakeywords.selected_extra_keywords() for key in extra_keywords: keywords[key] = extra_keywords[key] if self.step_kw_source.leSource.text(): keywords['source'] = get_unicode( self.step_kw_source.leSource.text()) if self.step_kw_source.leSource_url.text(): keywords['url'] = get_unicode( self.step_kw_source.leSource_url.text()) if self.step_kw_source.leSource_scale.text(): keywords['scale'] = get_unicode( self.step_kw_source.leSource_scale.text()) if self.step_kw_source.ckbSource_date.isChecked(): keywords['date'] = self.step_kw_source.dtSource_date.dateTime() if self.step_kw_source.leSource_license.text(): keywords['license'] = get_unicode( self.step_kw_source.leSource_license.text()) if self.step_kw_title.leTitle.text(): keywords['title'] = get_unicode(self.step_kw_title.leTitle.text()) return keywords
def get_keywords(self): """Obtain the state of the dialog as a keywords dict. :returns: Keywords reflecting the state of the dialog. :rtype: dict """ keywords = {} inasafe_fields = {} keywords['layer_geometry'] = self.get_layer_geometry_key() if self.step_kw_purpose.selected_purpose(): keywords['layer_purpose'] = self.step_kw_purpose.\ selected_purpose()['key'] if self.step_kw_subcategory.selected_subcategory(): key = self.step_kw_purpose.selected_purpose()['key'] keywords[key] = self.step_kw_subcategory.\ selected_subcategory()['key'] if self.get_layer_geometry_key() == layer_geometry_raster['key']: if self.step_kw_band_selector.selected_band(): keywords['active_band'] = self.step_kw_band_selector.\ selected_band() if keywords['layer_purpose'] == layer_purpose_hazard['key']: if self.step_kw_hazard_category.selected_hazard_category(): keywords['hazard_category'] \ = self.step_kw_hazard_category.\ selected_hazard_category()['key'] if self.step_kw_layermode.selected_layermode(): keywords['layer_mode'] = self.step_kw_layermode.\ selected_layermode()['key'] if self.step_kw_unit.selected_unit(): if self.step_kw_purpose.selected_purpose() == layer_purpose_hazard: key = continuous_hazard_unit['key'] else: key = exposure_unit['key'] keywords[key] = self.step_kw_unit.selected_unit()['key'] if self.step_kw_field.selected_fields(): field_key = self.field_keyword_for_the_layer() inasafe_fields[field_key] = self.step_kw_field.selected_fields() if self.step_kw_classification.selected_classification(): keywords['classification'] = self.step_kw_classification.\ selected_classification()['key'] if keywords['layer_purpose'] == layer_purpose_hazard['key']: multi_classifications = self.step_kw_multi_classifications.\ get_current_state() value_maps = multi_classifications.get('value_maps') if value_maps is not None: keywords['value_maps'] = value_maps thresholds = multi_classifications.get('thresholds') if thresholds is not None: keywords['thresholds'] = thresholds else: if self.step_kw_layermode.selected_layermode(): layer_mode = self.step_kw_layermode.selected_layermode() if layer_mode == layer_mode_continuous: thresholds = self.step_kw_threshold.get_threshold() if thresholds: keywords['thresholds'] = thresholds elif layer_mode == layer_mode_classified: value_map = self.step_kw_classify.selected_mapping() if value_map: keywords['value_map'] = value_map if self.step_kw_source.leSource.text(): keywords['source'] = get_unicode( self.step_kw_source.leSource.text()) if self.step_kw_source.leSource_url.text(): keywords['url'] = get_unicode( self.step_kw_source.leSource_url.text()) if self.step_kw_source.leSource_scale.text(): keywords['scale'] = get_unicode( self.step_kw_source.leSource_scale.text()) if self.step_kw_source.ckbSource_date.isChecked(): keywords['date'] = self.step_kw_source.dtSource_date.dateTime() if self.step_kw_source.leSource_license.text(): keywords['license'] = get_unicode( self.step_kw_source.leSource_license.text()) if self.step_kw_title.leTitle.text(): keywords['title'] = get_unicode(self.step_kw_title.leTitle.text()) inasafe_fields.update(self.step_kw_inasafe_fields.get_inasafe_fields()) inasafe_fields.update( self.step_kw_default_inasafe_fields.get_inasafe_fields()) inasafe_fields.update( self.step_kw_fields_mapping.get_field_mapping()['fields']) if inasafe_fields: keywords['inasafe_fields'] = inasafe_fields inasafe_default_values = {} if keywords['layer_geometry'] == layer_geometry_raster['key']: pass # Notes(IS): Skipped assigning raster inasafe default value for # now. # inasafe_default_values = self.\ # step_kw_inasafe_raster_default_values.\ # get_inasafe_default_values() else: inasafe_default_values.update( self.step_kw_default_inasafe_fields.get_inasafe_default_values( )) inasafe_default_values.update( self.step_kw_fields_mapping.get_field_mapping()['values']) if inasafe_default_values: keywords['inasafe_default_values'] = inasafe_default_values return keywords
def run(self): """Risk plugin for classified polygon hazard on building/structure. Counts number of building exposed to each hazard zones. :returns: Impact vector layer building exposed to each hazard zones. Table with number of buildings affected :rtype: Vector """ self.validate() self.prepare() self.provenance.append_step( 'Calculating Step', 'Impact function is calculating the impact.') # Value from layer's keywords self.hazard_class_attribute = self.hazard.keyword('field') self.hazard_class_mapping = self.hazard.keyword('value_map') # Try to get the value from keyword, if not exist, it will not fail, # but use the old get_osm_building_usage try: self.exposure_class_attribute = self.exposure.keyword( 'structure_class_field') except KeywordNotFoundError: self.exposure_class_attribute = None # Retrieve the classification that is used by the hazard layer. vector_hazard_classification = self.hazard.keyword( 'vector_hazard_classification') # Get the dictionary that contains the definition of the classification vector_hazard_classification = definition(vector_hazard_classification) # Get the list classes in the classification vector_hazard_classes = vector_hazard_classification['classes'] # Initialize OrderedDict of affected buildings self.affected_buildings = OrderedDict() # Iterate over vector hazard classes for vector_hazard_class in vector_hazard_classes: # Check if the key of class exist in hazard_class_mapping if vector_hazard_class['key'] in self.hazard_class_mapping.keys(): # Replace the key with the name as we need to show the human # friendly name in the report. self.hazard_class_mapping[vector_hazard_class['name']] = \ self.hazard_class_mapping.pop(vector_hazard_class['key']) # Adding the class name as a key in affected_building self.affected_buildings[vector_hazard_class['name']] = {} hazard_zone_attribute_index = self.hazard.layer.fieldNameIndex( self.hazard_class_attribute) # Check if hazard_zone_attribute exists in hazard_layer if hazard_zone_attribute_index < 0: message = ( 'Hazard data %s does not contain expected attribute %s ' % (self.hazard.layer.name(), self.hazard_class_attribute)) # noinspection PyExceptionInherit raise InaSAFEError(message) # Hazard zone categories from hazard layer unique_values = self.hazard.layer.uniqueValues( hazard_zone_attribute_index) # Values might be integer or float, we should have unicode. #2626 self.hazard_zones = [get_unicode(val) for val in unique_values] self.buildings = {} wgs84_extent = QgsRectangle(self.requested_extent[0], self.requested_extent[1], self.requested_extent[2], self.requested_extent[3]) # Run interpolation function for polygon2polygon interpolated_layer = interpolate_polygon_polygon( self.hazard.layer, self.exposure.layer, wgs84_extent) new_field = QgsField(self.target_field, QVariant.String) interpolated_layer.dataProvider().addAttributes([new_field]) interpolated_layer.updateFields() attribute_names = [ field.name() for field in interpolated_layer.pendingFields() ] target_field_index = interpolated_layer.fieldNameIndex( self.target_field) changed_values = {} if interpolated_layer.featureCount() < 1: raise ZeroImpactException() # Extract relevant interpolated data for feature in interpolated_layer.getFeatures(): # Get the hazard value based on the value mapping in keyword hazard_value = get_key_for_value( feature[self.hazard_class_attribute], self.hazard_class_mapping) if not hazard_value: hazard_value = self._not_affected_value changed_values[feature.id()] = {target_field_index: hazard_value} if (self.exposure_class_attribute and self.exposure_class_attribute in attribute_names): usage = feature[self.exposure_class_attribute] else: usage = get_osm_building_usage(attribute_names, feature) if usage is None: usage = tr('Unknown') if usage not in self.buildings: self.buildings[usage] = 0 for category in self.hazard_class_mapping.keys(): self.affected_buildings[category][usage] = OrderedDict([ (tr('Buildings Affected'), 0) ]) self.buildings[usage] += 1 if hazard_value in self.hazard_class_mapping.keys(): self.affected_buildings[hazard_value][usage][tr( 'Buildings Affected')] += 1 interpolated_layer.dataProvider().changeAttributeValues(changed_values) # Lump small entries and 'unknown' into 'other' category # Building threshold #2468 postprocessors = self.parameters['postprocessors'] building_postprocessors = postprocessors['BuildingType'][0] self.building_report_threshold = building_postprocessors.value[0].value self._consolidate_to_other() # Generate simple impact report impact_summary = impact_table = self.html_report() # Create style categories = self.affected_buildings.keys() categories.append(self._not_affected_value) colours = color_ramp(len(categories)) style_classes = [] i = 0 for hazard_zone in self.affected_buildings.keys(): style_class = dict() style_class['label'] = tr(hazard_zone) style_class['transparency'] = 0 style_class['value'] = hazard_zone style_class['size'] = 1 style_class['colour'] = colours[i] style_classes.append(style_class) i += 1 # Override style info with new classes and name style_info = dict(target_field=self.target_field, style_classes=style_classes, style_type='categorizedSymbol') # For printing map purpose map_title = tr('Buildings affected by each hazard zone') legend_title = tr('Building count') legend_units = tr('(building)') legend_notes = tr('Thousand separator is represented by %s' % get_thousand_separator()) extra_keywords = { 'impact_summary': impact_summary, 'impact_table': impact_table, 'target_field': self.target_field, 'map_title': map_title, 'legend_notes': legend_notes, 'legend_units': legend_units, 'legend_title': legend_title } self.set_if_provenance() impact_layer_keywords = self.generate_impact_keywords(extra_keywords) # Create vector layer and return impact_layer = Vector( data=interpolated_layer, name=tr('Buildings affected by each hazard zone'), keywords=impact_layer_keywords, style_info=style_info) self._impact = impact_layer return impact_layer
def run(self, layers=None): """Risk plugin for flood population evacuation. :param layers: List of layers expected to contain * hazard_layer : Vector polygon layer of flood depth * exposure_layer : Raster layer of population data on the same grid as hazard_layer Counts number of people exposed to areas identified as flood prone :returns: Map of population exposed to flooding Table with number of people evacuated and supplies required. :rtype: tuple """ self.validate() self.prepare(layers) # Get the IF parameters affected_field = self.parameters['affected_field'] affected_value = self.parameters['affected_value'] evacuation_percentage = self.parameters['evacuation_percentage'] # Identify hazard and exposure layers hazard_layer = self.hazard exposure_layer = self.exposure # Check that hazard is polygon type if not hazard_layer.is_polygon_data: message = ( 'Input hazard must be a polygon layer. I got %s with layer ' 'type %s' % (hazard_layer.get_name(), hazard_layer.get_geometry_name())) raise Exception(message) nan_warning = False if has_no_data(exposure_layer.get_data(nan=True)): nan_warning = True # Check that affected field exists in hazard layer if affected_field in hazard_layer.get_attribute_names(): self.use_affected_field = True # Run interpolation function for polygon2raster interpolated_layer, covered_exposure = \ assign_hazard_values_to_exposure_data( hazard_layer, exposure_layer, attribute_name=self.target_field) # Data for manipulating the covered_exposure layer new_covered_exposure_data = covered_exposure.get_data() covered_exposure_top_left = numpy.array([ covered_exposure.get_geotransform()[0], covered_exposure.get_geotransform()[3] ]) covered_exposure_dimension = numpy.array([ covered_exposure.get_geotransform()[1], covered_exposure.get_geotransform()[5] ]) # Count affected population per polygon, per category and total total_affected_population = 0 for attr in interpolated_layer.get_data(): affected = False if self.use_affected_field: row_affected_value = attr[affected_field] if row_affected_value is not None: if isinstance(row_affected_value, Number): type_func = type(row_affected_value) affected = row_affected_value == type_func( affected_value) else: affected =\ get_unicode(affected_value).lower() == \ get_unicode(row_affected_value).lower() else: # assume that every polygon is affected (see #816) affected = True if affected: # Get population at this location population = attr[self.target_field] if not numpy.isnan(population): population = float(population) total_affected_population += population else: # If it's not affected, set the value of the impact layer to 0 grid_point = attr['grid_point'] index = numpy.floor((grid_point - covered_exposure_top_left) / (covered_exposure_dimension)).astype(int) new_covered_exposure_data[index[1]][index[0]] = 0 # Estimate number of people in need of evacuation evacuated = (total_affected_population * evacuation_percentage / 100.0) total_population = int( numpy.nansum(exposure_layer.get_data(scaling=False))) minimum_needs = [ parameter.serialize() for parameter in self.parameters['minimum needs'] ] # Rounding total_affected_population, rounding = population_rounding_full( total_affected_population) total_population = population_rounding(total_population) evacuated, rounding_evacuated = population_rounding_full(evacuated) # Generate impact report for the pdf map table_body, total_needs = self._tabulate(total_affected_population, evacuated, minimum_needs, self.question, rounding, rounding_evacuated) impact_table = Table(table_body).toNewlineFreeString() self._tabulate_action_checklist(table_body, total_population, nan_warning) impact_summary = Table(table_body).toNewlineFreeString() # Create style colours = [ '#FFFFFF', '#38A800', '#79C900', '#CEED00', '#FFCC00', '#FF6600', '#FF0000', '#7A0000' ] classes = create_classes(new_covered_exposure_data.flat[:], len(colours)) # check for zero impact if min(classes) == 0 == max(classes): table_body = [ self.question, TableRow([ tr('People affected'), '%s' % format_int(total_affected_population) ], header=True) ] message = Table(table_body).toNewlineFreeString() raise ZeroImpactException(message) interval_classes = humanize_class(classes) # Define style info for output polygons showing population counts style_classes = [] for i in xrange(len(colours)): style_class = dict() style_class['label'] = create_label(interval_classes[i]) if i == 1: label = create_label( interval_classes[i], tr('Low Population [%i people/cell]' % classes[i])) elif i == 4: label = create_label( interval_classes[i], tr('Medium Population [%i people/cell]' % classes[i])) elif i == 7: label = create_label( interval_classes[i], tr('High Population [%i people/cell]' % classes[i])) else: label = create_label(interval_classes[i]) if i == 0: transparency = 100 else: transparency = 0 style_class['label'] = label style_class['quantity'] = classes[i] style_class['colour'] = colours[i] style_class['transparency'] = transparency style_classes.append(style_class) # Override style info with new classes and name style_info = dict(target_field=None, style_classes=style_classes, style_type='rasterStyle') # For printing map purpose map_title = tr('People affected by flood prone areas') legend_notes = tr('Thousand separator is represented by \'.\'') legend_units = tr('(people per polygon)') legend_title = tr('Population Count') # Create vector layer and return impact_layer = Raster(data=new_covered_exposure_data, projection=covered_exposure.get_projection(), geotransform=covered_exposure.get_geotransform(), name=tr('People affected by flood prone areas'), keywords={ 'impact_summary': impact_summary, 'impact_table': impact_table, 'target_field': self.target_field, 'map_title': map_title, 'legend_notes': legend_notes, 'legend_units': legend_units, 'legend_title': legend_title, 'affected_population': total_affected_population, 'total_population': total_population, 'total_needs': total_needs }, style_info=style_info) self._impact = impact_layer return impact_layer
def run(self, layers=None): """Risk plugin for flood population evacuation. :param layers: List of layers expected to contain * hazard_layer : Vector polygon layer of flood depth * exposure_layer : Raster layer of population data on the same grid as hazard_layer Counts number of people exposed to areas identified as flood prone :returns: Map of population exposed to flooding Table with number of people evacuated and supplies required. :rtype: tuple """ self.validate() self.prepare(layers) # Get the IF parameters affected_field = self.parameters['affected_field'] affected_value = self.parameters['affected_value'] evacuation_percentage = self.parameters['evacuation_percentage'] # Identify hazard and exposure layers hazard_layer = self.hazard exposure_layer = self.exposure # Check that hazard is polygon type if not hazard_layer.is_polygon_data: message = ( 'Input hazard must be a polygon layer. I got %s with layer ' 'type %s' % ( hazard_layer.get_name(), hazard_layer.get_geometry_name())) raise Exception(message) nan_warning = False if has_no_data(exposure_layer.get_data(nan=True)): nan_warning = True # Check that affected field exists in hazard layer if affected_field in hazard_layer.get_attribute_names(): self.use_affected_field = True # Run interpolation function for polygon2raster interpolated_layer, covered_exposure = \ assign_hazard_values_to_exposure_data( hazard_layer, exposure_layer, attribute_name=self.target_field) # Data for manipulating the covered_exposure layer new_covered_exposure_data = covered_exposure.get_data() covered_exposure_top_left = numpy.array([ covered_exposure.get_geotransform()[0], covered_exposure.get_geotransform()[3]]) covered_exposure_dimension = numpy.array([ covered_exposure.get_geotransform()[1], covered_exposure.get_geotransform()[5]]) # Count affected population per polygon, per category and total total_affected_population = 0 for attr in interpolated_layer.get_data(): affected = False if self.use_affected_field: row_affected_value = attr[affected_field] if row_affected_value is not None: if isinstance(row_affected_value, Number): type_func = type(row_affected_value) affected = row_affected_value == type_func( affected_value) else: affected =\ get_unicode(affected_value).lower() == \ get_unicode(row_affected_value).lower() else: # assume that every polygon is affected (see #816) affected = True if affected: # Get population at this location population = attr[self.target_field] if not numpy.isnan(population): population = float(population) total_affected_population += population else: # If it's not affected, set the value of the impact layer to 0 grid_point = attr['grid_point'] index = numpy.floor( (grid_point - covered_exposure_top_left) / ( covered_exposure_dimension)).astype(int) new_covered_exposure_data[index[1]][index[0]] = 0 # Estimate number of people in need of evacuation evacuated = ( total_affected_population * evacuation_percentage / 100.0) total_population = int( numpy.nansum(exposure_layer.get_data(scaling=False))) minimum_needs = [ parameter.serialize() for parameter in self.parameters['minimum needs'] ] # Rounding total_affected_population, rounding = population_rounding_full( total_affected_population) total_population = population_rounding(total_population) evacuated, rounding_evacuated = population_rounding_full(evacuated) # Generate impact report for the pdf map table_body, total_needs = self._tabulate( total_affected_population, evacuated, minimum_needs, self.question, rounding, rounding_evacuated) impact_table = Table(table_body).toNewlineFreeString() self._tabulate_action_checklist( table_body, total_population, nan_warning) impact_summary = Table(table_body).toNewlineFreeString() # Create style colours = ['#FFFFFF', '#38A800', '#79C900', '#CEED00', '#FFCC00', '#FF6600', '#FF0000', '#7A0000'] classes = create_classes( new_covered_exposure_data.flat[:], len(colours)) # check for zero impact if min(classes) == 0 == max(classes): table_body = [ self.question, TableRow( [tr('People affected'), '%s' % format_int(total_affected_population)], header=True)] message = Table(table_body).toNewlineFreeString() raise ZeroImpactException(message) interval_classes = humanize_class(classes) # Define style info for output polygons showing population counts style_classes = [] for i in xrange(len(colours)): style_class = dict() style_class['label'] = create_label(interval_classes[i]) if i == 1: label = create_label( interval_classes[i], tr('Low Population [%i people/cell]' % classes[i])) elif i == 4: label = create_label( interval_classes[i], tr('Medium Population [%i people/cell]' % classes[i])) elif i == 7: label = create_label( interval_classes[i], tr('High Population [%i people/cell]' % classes[i])) else: label = create_label(interval_classes[i]) if i == 0: transparency = 100 else: transparency = 0 style_class['label'] = label style_class['quantity'] = classes[i] style_class['colour'] = colours[i] style_class['transparency'] = transparency style_classes.append(style_class) # Override style info with new classes and name style_info = dict( target_field=None, style_classes=style_classes, style_type='rasterStyle') # For printing map purpose map_title = tr('People affected by flood prone areas') legend_notes = tr('Thousand separator is represented by \'.\'') legend_units = tr('(people per polygon)') legend_title = tr('Population Count') # Create vector layer and return impact_layer = Raster( data=new_covered_exposure_data, projection=covered_exposure.get_projection(), geotransform=covered_exposure.get_geotransform(), name=tr('People affected by flood prone areas'), keywords={ 'impact_summary': impact_summary, 'impact_table': impact_table, 'target_field': self.target_field, 'map_title': map_title, 'legend_notes': legend_notes, 'legend_units': legend_units, 'legend_title': legend_title, 'affected_population': total_affected_population, 'total_population': total_population, 'total_needs': total_needs}, style_info=style_info) self._impact = impact_layer return impact_layer
def run(self): """Risk plugin for classified polygon hazard on building/structure. Counts number of building exposed to each hazard zones. :returns: Impact vector layer building exposed to each hazard zones. Table with number of buildings affected :rtype: Vector """ self.validate() self.prepare() self.provenance.append_step( 'Calculating Step', 'Impact function is calculating the impact.') # Value from layer's keywords self.hazard_class_attribute = self.hazard.keyword('field') self.hazard_class_mapping = self.hazard.keyword('value_map') # Try to get the value from keyword, if not exist, it will not fail, # but use the old get_osm_building_usage try: self.exposure_class_attribute = self.exposure.keyword( 'structure_class_field') except KeywordNotFoundError: self.exposure_class_attribute = None # Retrieve the classification that is used by the hazard layer. vector_hazard_classification = self.hazard.keyword( 'vector_hazard_classification') # Get the dictionary that contains the definition of the classification vector_hazard_classification = definition(vector_hazard_classification) # Get the list classes in the classification vector_hazard_classes = vector_hazard_classification['classes'] # Initialize OrderedDict of affected buildings self.affected_buildings = OrderedDict() # Iterate over vector hazard classes for vector_hazard_class in vector_hazard_classes: # Check if the key of class exist in hazard_class_mapping if vector_hazard_class['key'] in self.hazard_class_mapping.keys(): # Replace the key with the name as we need to show the human # friendly name in the report. self.hazard_class_mapping[vector_hazard_class['name']] = \ self.hazard_class_mapping.pop(vector_hazard_class['key']) # Adding the class name as a key in affected_building self.affected_buildings[vector_hazard_class['name']] = {} hazard_zone_attribute_index = self.hazard.layer.fieldNameIndex( self.hazard_class_attribute) # Check if hazard_zone_attribute exists in hazard_layer if hazard_zone_attribute_index < 0: message = ( 'Hazard data %s does not contain expected attribute %s ' % (self.hazard.layer.name(), self.hazard_class_attribute)) # noinspection PyExceptionInherit raise InaSAFEError(message) # Hazard zone categories from hazard layer unique_values = self.hazard.layer.uniqueValues( hazard_zone_attribute_index) # Values might be integer or float, we should have unicode. #2626 self.hazard_zones = [get_unicode(val) for val in unique_values] self.buildings = {} wgs84_extent = QgsRectangle( self.requested_extent[0], self.requested_extent[1], self.requested_extent[2], self.requested_extent[3]) # Run interpolation function for polygon2polygon interpolated_layer = interpolate_polygon_polygon( self.hazard.layer, self.exposure.layer, wgs84_extent) new_field = QgsField(self.target_field, QVariant.String) interpolated_layer.dataProvider().addAttributes([new_field]) interpolated_layer.updateFields() attribute_names = [ field.name() for field in interpolated_layer.pendingFields()] target_field_index = interpolated_layer.fieldNameIndex( self.target_field) changed_values = {} if interpolated_layer.featureCount() < 1: raise ZeroImpactException() # Extract relevant interpolated data for feature in interpolated_layer.getFeatures(): # Get the hazard value based on the value mapping in keyword hazard_value = get_key_for_value( feature[self.hazard_class_attribute], self.hazard_class_mapping) if not hazard_value: hazard_value = self._not_affected_value changed_values[feature.id()] = {target_field_index: hazard_value} if (self.exposure_class_attribute and self.exposure_class_attribute in attribute_names): usage = feature[self.exposure_class_attribute] else: usage = get_osm_building_usage(attribute_names, feature) if usage is None: usage = tr('Unknown') if usage not in self.buildings: self.buildings[usage] = 0 for category in self.hazard_class_mapping.keys(): self.affected_buildings[category][usage] = OrderedDict( [(tr('Buildings Affected'), 0)]) self.buildings[usage] += 1 if hazard_value in self.hazard_class_mapping.keys(): self.affected_buildings[hazard_value][usage][ tr('Buildings Affected')] += 1 interpolated_layer.dataProvider().changeAttributeValues(changed_values) # Lump small entries and 'unknown' into 'other' category # Building threshold #2468 postprocessors = self.parameters['postprocessors'] building_postprocessors = postprocessors['BuildingType'][0] self.building_report_threshold = building_postprocessors.value[0].value self._consolidate_to_other() # Generate simple impact report impact_summary = impact_table = self.html_report() # Create style categories = self.affected_buildings.keys() categories.append(self._not_affected_value) colours = color_ramp(len(categories)) style_classes = [] i = 0 for hazard_zone in self.affected_buildings.keys(): style_class = dict() style_class['label'] = tr(hazard_zone) style_class['transparency'] = 0 style_class['value'] = hazard_zone style_class['size'] = 1 style_class['colour'] = colours[i] style_classes.append(style_class) i += 1 # Override style info with new classes and name style_info = dict( target_field=self.target_field, style_classes=style_classes, style_type='categorizedSymbol' ) # For printing map purpose map_title = tr('Buildings affected by each hazard zone') legend_title = tr('Building count') legend_units = tr('(building)') legend_notes = tr( 'Thousand separator is represented by %s' % get_thousand_separator()) extra_keywords = { 'impact_summary': impact_summary, 'impact_table': impact_table, 'target_field': self.target_field, 'map_title': map_title, 'legend_notes': legend_notes, 'legend_units': legend_units, 'legend_title': legend_title } self.set_if_provenance() impact_layer_keywords = self.generate_impact_keywords(extra_keywords) # Create vector layer and return impact_layer = Vector( data=interpolated_layer, name=tr('Buildings affected by each hazard zone'), keywords=impact_layer_keywords, style_info=style_info) self._impact = impact_layer return impact_layer
def read_keywords(keyword_filename, sublayer=None, all_blocks=False): """Read keywords dictionary from file :param keyword_filename: Name of keywords file. Extension expected to be .keywords or .xml metadata. The format of one line is expected to be either string: string or string :type keyword_filename: str :param sublayer: Optional sublayer applicable only to multilayer formats such as sqlite or netcdf which can potentially hold more than one layer. The string should map to the layer group as per the example below. If the keywords file contains sublayer definitions but no sublayer was defined, the first layer group will be returned. :type sublayer: str :param all_blocks: Optional, defaults to False. If True will return a dict of dicts, where the top level dict entries each represent a sublayer, and the values of that dict will be dicts of keyword entries. :type all_blocks: bool :returns: keywords: Dictionary of keyword, value pairs A keyword layer with sublayers may look like this: [osm_buildings] datatype: osm category: exposure subcategory: building purpose: dki title: buildings_osm_4326 [osm_flood] datatype: flood category: hazard subcategory: building title: flood_osm_4326 Whereas a simple keywords file would look like this datatype: flood category: hazard subcategory: building title: flood_osm_4326 If filename does not exist, an empty dictionary is returned Blank lines are ignored Surrounding whitespace is removed from values, but keys are unmodified If there are no ':', then the keyword is treated as a key with no value """ # Input checks basename, ext = os.path.splitext(keyword_filename) msg = ('Unknown extension for file %s. ' 'Expected %s.keywords or %s.xml' % (keyword_filename, basename, basename)) verify(ext == '.keywords' or ext == '.xml', msg) # check .keywords file exist keywords_file = os.path.isfile(keyword_filename) \ and ext == '.keywords' if not keywords_file: return {} # Read all entries with open(keyword_filename, 'r') as fid: lines = fid.readlines() blocks = {} keywords = {} current_block = None first_keywords = None for line in lines: # Remove trailing (but not preceeding!) whitespace # FIXME: Can be removed altogether text = line.rstrip() text = get_unicode(text) # Ignore blank lines if text == '': continue # Check if it is an ini style group header block_flag = re.search(r'^\[.*]$', text, re.M | re.I) if block_flag: # Write the old block if it exists - must have a current # block to prevent orphans if len(keywords) > 0 and current_block is not None: blocks[current_block] = keywords if first_keywords is None and len(keywords) > 0: first_keywords = keywords # Now set up for a new block current_block = text[1:-1] # Reset the keywords each time we encounter a new block # until we know we are on the desired one keywords = {} continue if ':' not in text: key = text.strip() val = None else: # Get splitting point idx = text.find(':') # Take key as everything up to the first ':' key = text[:idx] # Take value as everything after the first ':' textval = text[idx + 1:].strip() try: # Take care of python structures like # booleans, None, lists, dicts etc val = literal_eval(textval) except (ValueError, SyntaxError): if 'OrderedDict(' == textval[:12]: try: val = OrderedDict(literal_eval(textval[12:-1])) except (ValueError, SyntaxError, TypeError): val = textval else: val = textval # Add entry to dictionary keywords[key] = val # Write our any unfinalised block data if len(keywords) > 0 and current_block is not None: blocks[current_block] = keywords if first_keywords is None: first_keywords = keywords # Ok we have generated a structure that looks like this: # blocks = {{ 'foo' : { 'a': 'b', 'c': 'd'}, # { 'bar' : { 'd': 'e', 'f': 'g'}} # where foo and bar are sublayers and their dicts are the sublayer keywords if all_blocks: return blocks if sublayer is not None: if sublayer in blocks: return blocks[sublayer] else: if 'keyword_version' in first_keywords.keys(): first_keywords['keyword_version'] = str( first_keywords['keyword_version']) return first_keywords
def _keywords_to_string(keywords, sublayer=None): """Create a string from a keywords dict. Args: * keywords: A required dictionary containing the keywords to stringify. * sublayer: str optional group marker for a sub layer. Returns: str: a String containing the rendered keywords list Raises: Any exceptions are propogated. .. note: Only simple keyword dicts should be passed here, not multilayer dicts. For example you pass a dict like this:: {'datatype': 'osm', 'category': 'exposure', 'title': 'buildings_osm_4326', 'subcategory': 'building', 'purpose': 'dki'} and the following string would be returned: datatype: osm category: exposure title: buildings_osm_4326 subcategory: building purpose: dki If sublayer is provided e.g. _keywords_to_string(keywords, sublayer='foo'), the following: [foo] datatype: osm category: exposure title: buildings_osm_4326 subcategory: building purpose: dki """ # Write result = get_unicode('') if sublayer is not None: result = '[%s]\n' % sublayer # Sort so that less changes when run the test for key in sorted(keywords.iterkeys()): # Create key msg = ('Key in keywords dictionary must be a string. ' 'I got %s with type %s' % (key, str(type(key))[1:-1])) verify(isinstance(key, basestring), msg) msg = ('Key in keywords dictionary must not contain the ":" ' 'character. I got "%s"' % key) verify(':' not in key, msg) # Store result += '%s: %s\n' % (key, keywords[key]) return result