def test_pre_processors_nearby_places(self): """Test the pre_processors_nearby_places""" hazard_layer = load_test_raster_layer( 'gisv4', 'hazard', 'earthquake.asc') exposure_layer = load_test_vector_layer( 'gisv4', 'exposure', 'building-points.geojson') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.crs = QgsCoordinateReferenceSystem(4326) status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) # The exposure is not place but buildings self.assertFalse( pre_processors_nearby_places['condition'](impact_function)) hazard_layer = load_test_raster_layer( 'gisv4', 'hazard', 'earthquake.asc') exposure_layer = load_test_vector_layer( 'gisv4', 'exposure', 'places.geojson') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.crs = QgsCoordinateReferenceSystem(4326) status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) # EQ on places, it must be OK. self.assertTrue( pre_processors_nearby_places['condition'](impact_function))
def test_pre_processors_earthquake_contour(self): """Test the pre_processors_earthquake_contour""" hazard_layer = load_test_raster_layer( 'gisv4', 'hazard', 'earthquake.asc') exposure_layer = load_test_vector_layer( 'gisv4', 'exposure', 'building-points.geojson') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.crs = QgsCoordinateReferenceSystem(4326) status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) self.assertTrue( pre_processor_earthquake_contour['condition'](impact_function)) hazard_layer = load_test_raster_layer( 'hazard', 'classified_flood_20_20.asc') exposure_layer = load_test_vector_layer( 'gisv4', 'exposure', 'places.geojson') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.crs = QgsCoordinateReferenceSystem(4326) status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) # not ok, since the hazard is flood, not earthquake self.assertFalse( pre_processor_earthquake_contour['condition'](impact_function))
def test_pre_processors_nearby_places(self): """Test the pre_processors_nearby_places""" hazard_layer = load_test_raster_layer('gisv4', 'hazard', 'earthquake.asc') exposure_layer = load_test_vector_layer('gisv4', 'exposure', 'building-points.geojson') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.crs = QgsCoordinateReferenceSystem(4326) status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) # The exposure is not place but buildings self.assertFalse( pre_processors_nearby_places['condition'](impact_function)) hazard_layer = load_test_raster_layer('gisv4', 'hazard', 'earthquake.asc') exposure_layer = load_test_vector_layer('gisv4', 'exposure', 'places.geojson') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.crs = QgsCoordinateReferenceSystem(4326) status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) # EQ on places, it must be OK. self.assertTrue( pre_processors_nearby_places['condition'](impact_function))
def test_pre_processors_earthquake_contour(self): """Test the pre_processors_earthquake_contour""" hazard_layer = load_test_raster_layer('gisv4', 'hazard', 'earthquake.asc') exposure_layer = load_test_vector_layer('gisv4', 'exposure', 'building-points.geojson') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.crs = QgsCoordinateReferenceSystem(4326) status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) self.assertTrue( pre_processor_earthquake_contour['condition'](impact_function)) hazard_layer = load_test_raster_layer('hazard', 'classified_flood_20_20.asc') exposure_layer = load_test_vector_layer('gisv4', 'exposure', 'places.geojson') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.crs = QgsCoordinateReferenceSystem(4326) status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) # not ok, since the hazard is flood, not earthquake self.assertFalse( pre_processor_earthquake_contour['condition'](impact_function))
def test_old_fields_keywords(self): """The IF is not ready with we have some wrong inasafe_fields.""" hazard_layer = load_test_vector_layer( 'gisv4', 'hazard', 'classified_vector.geojson') exposure_layer = load_test_vector_layer( 'gisv4', 'exposure', 'building-points.geojson', clone=True) aggregation_layer = load_test_vector_layer( 'gisv4', 'aggregation', 'small_grid.geojson') impact_function = ImpactFunction() impact_function.aggregation = aggregation_layer impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer status, message = impact_function.prepare() # The layer should be fine. self.assertEqual(PREPARE_SUCCESS, status, message) # Now, we remove one field exposure_layer.startEditing() field = exposure_layer.keywords['inasafe_fields'].values()[0] index = exposure_layer.fieldNameIndex(field) exposure_layer.deleteAttribute(index) exposure_layer.commitChanges() # It shouldn't be fine as we removed one field which # was in inasafe_fields status, message = impact_function.prepare() self.assertNotEqual(PREPARE_SUCCESS, status, message)
def test_analysis_earthquake_summary(self): """Test we can compute summary after an EQ on population.""" hazard = load_test_raster_layer('gisv4', 'hazard', 'earthquake.asc') exposure = load_test_raster_layer( 'gisv4', 'exposure', 'raster', 'population.asc') aggregation = load_test_vector_layer( 'gisv4', 'aggregation', 'small_grid.geojson') impact_function = ImpactFunction() impact_function.hazard = hazard impact_function.exposure = exposure impact_function.aggregation = aggregation status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) status, message = impact_function.run() self.assertEqual(ANALYSIS_SUCCESS, status, message) layer = impact_function.analysis_impacted classification = hazard.keywords['classification'] classes = definition(classification)['classes'] for hazard_class in classes: field_name = hazard_count_field['field_name'] % hazard_class['key'] message = '%s is not found in the EQ summary layer.' % field_name self.assertNotEqual(-1, layer.fieldNameIndex(field_name), message) check_inasafe_fields(impact_function.analysis_impacted) check_inasafe_fields(impact_function.aggregation_summary)
def test_profiling(self): """Test running impact function on test data.""" hazard_layer = load_test_vector_layer('gisv4', 'hazard', 'classified_vector.geojson') exposure_layer = load_test_vector_layer('gisv4', 'exposure', 'building-points.geojson') aggregation_layer = load_test_vector_layer('gisv4', 'aggregation', 'small_grid.geojson') # Set up impact function impact_function = ImpactFunction() impact_function.aggregation = aggregation_layer impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer status, message = impact_function.run() self.assertEqual(ANALYSIS_FAILED_BAD_INPUT, status, message) impact_function.prepare() status, message = impact_function.run() self.assertEqual(ANALYSIS_SUCCESS, status, message) message = impact_function.performance_log_message().to_text() expected_result = get_control_text('test-profiling-logs.txt') for line in expected_result: line = line.replace('\n', '') if line == '' or line == '-': continue self.assertIn(line, message) # Notes(IS): For some unknown reason I need to do this to make # test_provenance pass del hazard_layer
def test_analysis_earthquake_summary(self): """Test we can compute summary after an EQ on population.""" hazard = load_test_raster_layer('gisv4', 'hazard', 'earthquake.asc') exposure = load_test_raster_layer('gisv4', 'exposure', 'raster', 'population.asc') aggregation = load_test_vector_layer('gisv4', 'aggregation', 'small_grid.geojson') impact_function = ImpactFunction() impact_function.hazard = hazard impact_function.exposure = exposure impact_function.aggregation = aggregation status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) status, message = impact_function.run() self.assertEqual(ANALYSIS_SUCCESS, status, message) layer = impact_function.analysis_impacted classification = hazard.keywords['classification'] classes = definition(classification)['classes'] for hazard_class in classes: field_name = hazard_count_field['field_name'] % hazard_class['key'] message = '%s is not found in the EQ summary layer.' % field_name self.assertNotEqual(-1, layer.fieldNameIndex(field_name), message) check_inasafe_fields(impact_function.analysis_impacted) check_inasafe_fields(impact_function.aggregation_summary)
def test_old_fields_keywords(self): """The IF is not ready with we have some wrong inasafe_fields.""" hazard_layer = load_test_vector_layer('gisv4', 'hazard', 'classified_vector.geojson') exposure_layer = load_test_vector_layer('gisv4', 'exposure', 'building-points.geojson', clone=True) aggregation_layer = load_test_vector_layer('gisv4', 'aggregation', 'small_grid.geojson') impact_function = ImpactFunction() impact_function.aggregation = aggregation_layer impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer status, message = impact_function.prepare() # The layer should be fine. self.assertEqual(PREPARE_SUCCESS, status, message) # Now, we remove one field exposure_layer.startEditing() field = exposure_layer.keywords['inasafe_fields'].values()[0] index = exposure_layer.fieldNameIndex(field) exposure_layer.deleteAttribute(index) exposure_layer.commitChanges() # It shouldn't be fine as we removed one field which # was in inasafe_fields status, message = impact_function.prepare() self.assertNotEqual(PREPARE_SUCCESS, status, message)
def prepare_impact_function(self): """Create analysis as a representation of current situation of IFCW.""" # Impact Functions impact_function = ImpactFunction() impact_function.callback = self.progress_callback # Layers impact_function.hazard = self.parent.hazard_layer impact_function.exposure = self.parent.exposure_layer aggregation = self.parent.aggregation_layer if aggregation: impact_function.aggregation = aggregation impact_function.use_selected_features_only = ( setting('useSelectedFeaturesOnly', False, bool)) else: mode = setting('analysis_extents_mode') if self.extent.user_extent: # This like a hack to transform a geometry to a rectangle. # self.extent.user_extent is a QgsGeometry. # impact_function.requested_extent needs a QgsRectangle. wkt = self.extent.user_extent.exportToWkt() impact_function.requested_extent = wkt_to_rectangle(wkt) impact_function.requested_extent_crs = self.extent.crs elif mode == HAZARD_EXPOSURE_VIEW: impact_function.requested_extent = ( self.iface.mapCanvas().extent()) impact_function.requested_extent_crs = self.extent.crs # We don't have any checkbox in the wizard for the debug mode. impact_function.debug_mode = False return impact_function
def test_profiling(self): """Test running impact function on test data.""" hazard_layer = load_test_vector_layer( 'gisv4', 'hazard', 'classified_vector.geojson') exposure_layer = load_test_vector_layer( 'gisv4', 'exposure', 'building-points.geojson') aggregation_layer = load_test_vector_layer( 'gisv4', 'aggregation', 'small_grid.geojson') # Set up impact function impact_function = ImpactFunction() impact_function.aggregation = aggregation_layer impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer status, message = impact_function.run() self.assertEqual(ANALYSIS_FAILED_BAD_INPUT, status, message) impact_function.prepare() status, message = impact_function.run() self.assertEqual(ANALYSIS_SUCCESS, status, message) message = impact_function.performance_log_message().to_text() expected_result = get_control_text( 'test-profiling-logs.txt') for line in expected_result: line = line.replace('\n', '') if line == '' or line == '-': continue self.assertIn(line, message) # Notes(IS): For some unknown reason I need to do this to make # test_provenance pass del hazard_layer
def test_provenance_without_aggregation(self): """Test provenance of impact function without aggregation.""" hazard_layer = load_test_vector_layer( 'gisv4', 'hazard', 'classified_vector.geojson') exposure_layer = load_test_vector_layer( 'gisv4', 'exposure', 'building-points.geojson') hazard = definition(hazard_layer.keywords['hazard']) exposure = definition(exposure_layer.keywords['exposure']) hazard_category = definition(hazard_layer.keywords['hazard_category']) expected_provenance = { 'gdal_version': gdal.__version__, 'host_name': gethostname(), 'map_title': get_map_title(hazard, exposure, hazard_category), 'map_legend_title': exposure['layer_legend_title'], 'inasafe_version': get_version(), 'pyqt_version': PYQT_VERSION_STR, 'qgis_version': QGis.QGIS_VERSION, 'qt_version': QT_VERSION_STR, 'user': getpass.getuser(), 'os': readable_os_version(), 'aggregation_layer': None, 'aggregation_layer_id': None, 'exposure_layer': exposure_layer.source(), 'exposure_layer_id': exposure_layer.id(), 'hazard_layer': hazard_layer.source(), 'hazard_layer_id': hazard_layer.id(), 'analysis_question': get_analysis_question(hazard, exposure), 'aggregation_keywords': None, 'exposure_keywords': deepcopy(exposure_layer.keywords), 'hazard_keywords': deepcopy(hazard_layer.keywords), } # Set up impact function impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) status, message = impact_function.run() self.assertEqual(ANALYSIS_SUCCESS, status, message) self.maxDiff = None expected_provenance.update({ 'action_checklist': impact_function.action_checklist(), 'analysis_extent': impact_function.analysis_extent.exportToWkt(), 'impact_function_name': impact_function.name, 'impact_function_title': impact_function.title, 'notes': impact_function.notes(), 'requested_extent': impact_function.requested_extent, 'data_store_uri': impact_function.datastore.uri_path, 'start_datetime': impact_function.start_datetime, 'end_datetime': impact_function.end_datetime, 'duration': impact_function.duration }) self.assertDictEqual(expected_provenance, impact_function.provenance)
def test_impact_function_behaviour(self): """Test behaviour of impact function.""" hazard_layer = load_test_vector_layer('hazard', 'flood_multipart_polygons.shp') exposure_layer = load_test_vector_layer('exposure', 'roads.shp') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.prepare() self.assertEqual(impact_function.name, 'Flood Polygon On Roads Line') self.assertEqual(impact_function.title, 'be affected')
def test_impact_function_behaviour(self): """Test behaviour of impact function.""" hazard_layer = load_test_vector_layer( 'hazard', 'flood_multipart_polygons.shp') exposure_layer = load_test_vector_layer('exposure', 'roads.shp') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.prepare() self.assertEqual(impact_function.name, 'Flood Polygon On Road Line') self.assertEqual(impact_function.title, 'be affected')
def run_impact_function(cli_arguments): """Runs an analysis and delegates producing pdf and .geojson output layers. .. versionadded:: 3.2 :param cli_arguments: User inputs. :type cli_arguments: CommandLineArguments """ hazard = get_layer(cli_arguments.hazard, 'Hazard Layer') exposure = get_layer(cli_arguments.exposure, 'Exposure Layer') aggregation = None if cli_arguments.aggregation: aggregation = get_layer(cli_arguments.aggregation, 'Aggregation Layer') # Set up impact function impact_function = ImpactFunction() impact_function.hazard = hazard impact_function.exposure = exposure impact_function.aggregation = aggregation # Set the datastore impact_function.datastore = Folder(cli_arguments.output_dir) impact_function.datastore.default_vector_format = 'geojson' # Set the extent if cli_arguments.extent: impact_function.requested_extent_crs = \ QgsCoordinateReferenceSystem(4326) try: impact_function.requested_extent = QgsRectangle( float(cli_arguments.extent[0]), float(cli_arguments.extent[1]), float(cli_arguments.extent[2]), float(cli_arguments.extent[3]) ) except AttributeError: print "Extent is not valid..." pass # Prepare impact function status, message = impact_function.prepare() if status != PREPARE_SUCCESS: print message.to_text() return status, message, None status, message = impact_function.run() if status != ANALYSIS_SUCCESS: print message.to_text() return status, message, None return status, message, impact_function
def test_raster_post_minimum_needs_value_generation(self): """Test minimum needs postprocessors on raster exposure. Minimum needs postprocessors is defined to only generate values when exposure contains population data. Especially important to test, since on raster exposure the population field is generated on the fly. The postprocessors need to expect generated population field exists. """ # # # # Test with raster exposure data with population_exposure_count # exists. # # # hazard_layer = load_test_raster_layer( 'hazard', 'tsunami_wgs84.tif') exposure_layer = load_test_raster_layer( 'exposure', 'pop_binary_raster_20_20.asc') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.prepare() return_code, message = impact_function.run() self.assertEqual(return_code, ANALYSIS_SUCCESS, message) # minimum needs fields should exists in the results self._check_minimum_fields_exists(impact_function) # TODO: should include demographic postprocessor value too expected_value = { u'total_affected': 9.208200000039128, u'minimum_needs__rice': 25, u'minimum_needs__toilets': 0, u'minimum_needs__drinking_water': 161, u'minimum_needs__clean_water': 616, u'male': 4, u'female': 4, u'youth': 2, u'adult': 6, u'elderly': 0, u'total': 162.7667000000474, u'minimum_needs__family_kits': 1, u'total_not_affected': 153.55850000000828, } self._check_minimum_fields_value(expected_value, impact_function)
def test_vector_post_minimum_needs_value_generation(self): """Test minimum needs postprocessors on vector exposure. Test with vector exposure data with population_count_field exists. Minimum needs postprocessors is defined to only generate values when exposure contains population data. """ hazard_layer = load_test_vector_layer('gisv4', 'hazard', 'tsunami_vector.geojson') exposure_layer = load_test_vector_layer('gisv4', 'exposure', 'population.geojson') aggregation_layer = load_test_vector_layer('gisv4', 'aggregation', 'small_grid.geojson') impact_function = ImpactFunction() impact_function.aggregation = aggregation_layer impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) return_code, message = impact_function.run() self.assertEqual(return_code, ANALYSIS_SUCCESS, message) # minimum needs fields should exists in the results self._check_minimum_fields_exists(impact_function) expected_value = { u'population': 69, u'total': 9.0, u'minimum_needs__rice': 491, u'minimum_needs__clean_water': 11763, u'minimum_needs__toilets': 8, u'minimum_needs__drinking_water': 3072, u'minimum_needs__family_kits': 35, u'male': 34, u'female': 34, u'youth': 17, u'adult': 45, u'elderly': 6, u'total_affected': 6.0, } self._check_minimum_fields_value(expected_value, impact_function)
def test_vector_post_minimum_needs_value_generation(self): """Test minimum needs postprocessors on vector exposure. Test with vector exposure data with population_count_field exists. Minimum needs postprocessors is defined to only generate values when exposure contains population data. """ hazard_layer = load_test_vector_layer( 'gisv4', 'hazard', 'tsunami_vector.geojson') exposure_layer = load_test_vector_layer( 'gisv4', 'exposure', 'population.geojson') aggregation_layer = load_test_vector_layer( 'gisv4', 'aggregation', 'small_grid.geojson') impact_function = ImpactFunction() impact_function.aggregation = aggregation_layer impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) return_code, message = impact_function.run() self.assertEqual(return_code, ANALYSIS_SUCCESS, message) # minimum needs fields should exists in the results self._check_minimum_fields_exists(impact_function) expected_value = { u'population': 69, u'total': 9.0, u'minimum_needs__rice': 491, u'minimum_needs__clean_water': 11763, u'minimum_needs__toilets': 8, u'minimum_needs__drinking_water': 3072, u'minimum_needs__family_kits': 35, u'male': 34, u'female': 34, u'youth': 17, u'adult': 45, u'elderly': 6, u'total_affected': 6.0, } self._check_minimum_fields_value(expected_value, impact_function)
def impact_function_setup(command_line_arguments, hazard, exposure, aggregation=None): """Sets up an analysis object. .. versionadded:: 3.2 :param command_line_arguments: User inputs. :type command_line_arguments: CommandLineArguments :param hazard: Hazard layer :type hazard: QgsLayer :param exposure: Exposure Layer :type exposure: QgsLayer :param aggregation: Aggregation Layer :type aggregation: QgsLayer :raises: Exception """ # IF impact_function = ImpactFunction() impact_function.hazard = hazard impact_function.exposure = exposure impact_function.aggregation = aggregation impact_function.map_canvas = CANVAS # QSetting context settings = QSettings() crs = settings.value('inasafe/user_extent_crs', '', type=str) impact_function.requested_extent_crs = QgsCoordinateReferenceSystem(crs) try: impact_function.requested_extent = QgsRectangle( float(command_line_arguments.extent[0]), float(command_line_arguments.extent[1]), float(command_line_arguments.extent[2]), float(command_line_arguments.extent[3])) except AttributeError: print "No extents" pass return impact_function
def impact_function_setup( command_line_arguments, hazard, exposure, aggregation=None): """Sets up an analysis object. .. versionadded:: 3.2 :param command_line_arguments: User inputs. :type command_line_arguments: CommandLineArguments :param hazard: Hazard layer :type hazard: QgsLayer :param exposure: Exposure Layer :type exposure: QgsLayer :param aggregation: Aggregation Layer :type aggregation: QgsLayer :raises: Exception """ # IF impact_function = ImpactFunction() impact_function.hazard = hazard impact_function.exposure = exposure impact_function.aggregation = aggregation impact_function.map_canvas = CANVAS # QSetting context settings = QSettings() crs = settings.value('inasafe/user_extent_crs', '', type=str) impact_function.requested_extent_crs = QgsCoordinateReferenceSystem(crs) try: impact_function.requested_extent = QgsRectangle( float(command_line_arguments.extent[0]), float(command_line_arguments.extent[1]), float(command_line_arguments.extent[2]), float(command_line_arguments.extent[3]) ) except AttributeError: print "No extents" pass return impact_function
def test_keyword_monkey_patch(self): """Test behaviour of generating keywords.""" exposure_path = standard_data_path('exposure', 'building-points.shp') # noinspection PyCallingNonCallable exposure_layer = QgsVectorLayer(exposure_path, 'Building', 'ogr') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function._check_layer(impact_function.exposure, 'exposure') expected_inasafe_fields = { exposure_type_field['key']: 'TYPE', } self.assertDictEqual(exposure_layer.keywords['inasafe_fields'], expected_inasafe_fields) fields = impact_function.exposure.dataProvider().fieldNameMap().keys() self.assertIn( exposure_layer.keywords['inasafe_fields']['exposure_type_field'], fields) inasafe_fields = exposure_layer.keywords['inasafe_fields']
def test_keyword_monkey_patch(self): """Test behaviour of generating keywords.""" exposure_path = standard_data_path('exposure', 'building-points.shp') # noinspection PyCallingNonCallable exposure_layer = QgsVectorLayer(exposure_path, 'Building', 'ogr') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function._check_layer(impact_function.exposure, 'exposure') expected_inasafe_fields = { exposure_type_field['key']: 'TYPE', } self.assertDictEqual( exposure_layer.keywords['inasafe_fields'], expected_inasafe_fields) fields = impact_function.exposure.dataProvider().fieldNameMap().keys() self.assertIn( exposure_layer.keywords['inasafe_fields']['exposure_type_field'], fields ) inasafe_fields = exposure_layer.keywords['inasafe_fields']
def run_task(self, task_item, status_item, count=0, index=''): """Run a single task. :param task_item: Table task_item containing task name / details. :type task_item: QTableWidgetItem :param status_item: Table task_item that holds the task status. :type status_item: QTableWidgetItem :param count: Count of scenarios that have been run already. :type count: :param index: The index for the table item that will be run. :type index: int :returns: Flag indicating if the task succeeded or not. :rtype: bool """ self.enable_busy_cursor() for layer_group in self.layer_group_container: layer_group.setVisible(False) # set status to 'running' status_item.setText(self.tr('Running')) # .. see also:: :func:`appendRow` to understand the next 2 lines variant = task_item.data(QtCore.Qt.UserRole) value = variant[0] result = True if isinstance(value, str): filename = value # run script try: self.run_script(filename) # set status to 'OK' status_item.setText(self.tr('Script OK')) except Exception as e: # pylint: disable=W0703 # set status to 'fail' status_item.setText(self.tr('Script Fail')) LOGGER.exception('Running macro failed. The exception: ' + str(e)) result = False elif isinstance(value, dict): # start in new project if toggle is active if self.start_in_new_project: self.iface.newProject() # create layer group group_name = value['scenario_name'] self.layer_group = self.root.addGroup(group_name) self.layer_group_container.append(self.layer_group) # Its a dict containing files for a scenario success, parameters = self.prepare_task(value) if not success: # set status to 'running' status_item.setText(self.tr('Please update scenario')) self.disable_busy_cursor() return False # If impact function parameters loaded successfully, initiate IF. impact_function = ImpactFunction() impact_function.hazard = parameters[layer_purpose_hazard['key']] impact_function.exposure = ( parameters[layer_purpose_exposure['key']]) if parameters[layer_purpose_aggregation['key']]: impact_function.aggregation = ( parameters[layer_purpose_aggregation['key']]) elif parameters['extent']: impact_function.requested_extent = parameters['extent'] impact_function.requested_extent_crs = parameters['crs'] prepare_status, prepare_message = impact_function.prepare() if prepare_status == PREPARE_SUCCESS: LOGGER.info('Impact function ready') status, message = impact_function.run() if status == ANALYSIS_SUCCESS: status_item.setText(self.tr('Analysis Success')) impact_layer = impact_function.impact if impact_layer.isValid(): layer_list = [ impact_layer, parameters[layer_purpose_hazard['key']], parameters[layer_purpose_exposure['key']], parameters[layer_purpose_aggregation['key']]] QgsMapLayerRegistry.instance().addMapLayers( layer_list, False) for layer in layer_list: self.layer_group.addLayer(layer) map_canvas = QgsMapLayerRegistry.instance().mapLayers() for layer in map_canvas: # turn of layer visibility if not impact layer if map_canvas[layer].id() == impact_layer.id(): self.legend.setLayerVisible( map_canvas[layer], True) else: self.legend.setLayerVisible( map_canvas[layer], False) # generate map report and impact report try: # this line is to save the impact report in default # InaSAFE directory. generate_impact_report(impact_function, self.iface) generate_impact_map_report( impact_function, self.iface) # this line is to save the report in user specified # directory. self.generate_pdf_report( impact_function, self.iface, group_name) except: status_item.setText( self.tr('Report failed to generate.')) else: LOGGER.info('Impact layer is invalid') elif status == ANALYSIS_FAILED_BAD_INPUT: LOGGER.info('Bad input detected') elif status == ANALYSIS_FAILED_BAD_CODE: LOGGER.info('Impact function encountered a bug') else: LOGGER.warning('Impact function not ready') send_error_message(self, prepare_message) else: LOGGER.exception('Data type not supported: "%s"' % value) result = False self.disable_busy_cursor() return result
def run_task(self, task_item, status_item, count=0, index=''): """Run a single task. :param task_item: Table task_item containing task name / details. :type task_item: QTableWidgetItem :param status_item: Table task_item that holds the task status. :type status_item: QTableWidgetItem :param count: Count of scenarios that have been run already. :type count: :param index: The index for the table item that will be run. :type index: int :returns: Flag indicating if the task succeeded or not. :rtype: bool """ self.enable_busy_cursor() for layer_group in self.layer_group_container: layer_group.setItemVisibilityChecked(False) # set status to 'running' status_item.setText(self.tr('Running')) # .. see also:: :func:`appendRow` to understand the next 2 lines variant = task_item.data(QtCore.Qt.UserRole) value = variant[0] result = True if isinstance(value, str): filename = value # run script try: self.run_script(filename) # set status to 'OK' status_item.setText(self.tr('Script OK')) except Exception as e: # pylint: disable=W0703 # set status to 'fail' status_item.setText(self.tr('Script Fail')) LOGGER.exception( 'Running macro failed. The exception: ' + str(e)) result = False elif isinstance(value, dict): # start in new project if toggle is active if self.start_in_new_project: self.iface.newProject() # create layer group group_name = value['scenario_name'] self.layer_group = self.root.addGroup(group_name) self.layer_group_container.append(self.layer_group) # Its a dict containing files for a scenario success, parameters = self.prepare_task(value) if not success: # set status to 'running' status_item.setText(self.tr('Please update scenario')) self.disable_busy_cursor() return False directory = self.output_directory.text() if self.scenario_directory_radio.isChecked(): directory = self.source_directory.text() output_directory = os.path.join(directory, group_name) if not os.path.exists(output_directory): os.makedirs(output_directory) # If impact function parameters loaded successfully, initiate IF. impact_function = ImpactFunction() impact_function.datastore = Folder(output_directory) impact_function.datastore.default_vector_format = "geojson" impact_function.hazard = parameters[layer_purpose_hazard['key']] impact_function.exposure = ( parameters[layer_purpose_exposure['key']]) if parameters[layer_purpose_aggregation['key']]: impact_function.aggregation = ( parameters[layer_purpose_aggregation['key']]) elif parameters['extent']: impact_function.requested_extent = parameters['extent'] impact_function.crs = parameters['crs'] prepare_status, prepare_message = impact_function.prepare() if prepare_status == PREPARE_SUCCESS: LOGGER.info('Impact function ready') status, message = impact_function.run() if status == ANALYSIS_SUCCESS: status_item.setText(self.tr('Analysis Success')) impact_layer = impact_function.impact if impact_layer.isValid(): layer_list = [ impact_layer, impact_function.analysis_impacted, parameters[layer_purpose_hazard['key']], parameters[layer_purpose_exposure['key']], parameters[layer_purpose_aggregation['key']]] QgsProject.instance().addMapLayers( layer_list, False) for layer in layer_list: self.layer_group.addLayer(layer) map_canvas = QgsProject.instance().mapLayers() for layer in map_canvas: # turn of layer visibility if not impact layer if map_canvas[layer].id() == impact_layer.id(): self.set_layer_visible( map_canvas[layer], True) else: self.set_layer_visible( map_canvas[layer], False) # we need to set analysis_impacted as an active layer # because we need to get all qgis variables that we # need from this layer for infographic. if self.iface: self.iface.setActiveLayer( impact_function.analysis_impacted) report_directory = os.path.join( output_directory, 'output') # generate map report and impact report try: error_code, message = ( impact_function.generate_report( all_default_report_components, report_directory)) except BaseException: status_item.setText( self.tr('Report failed to generate.')) else: LOGGER.info('Impact layer is invalid') elif status == ANALYSIS_FAILED_BAD_INPUT: LOGGER.info('Bad input detected') elif status == ANALYSIS_FAILED_BAD_CODE: LOGGER.info( 'Impact function encountered a bug: %s' % message) else: LOGGER.warning('Impact function not ready') send_error_message(self, prepare_message) else: LOGGER.exception('Data type not supported: "%s"' % value) result = False self.disable_busy_cursor() return result
def test_ratios_with_vector_exposure(self): """Test if we can add defaults to a vector exposure.""" # First test, if we do not provide an aggregation, hazard_layer = load_test_vector_layer( 'gisv4', 'hazard', 'classified_vector.geojson') exposure_layer = load_test_vector_layer( 'gisv4', 'exposure', 'population.geojson') # Set up impact function impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.prepare() # Let's remove one field from keywords. # We monkey patch keywords for testing after `prepare` & before `run`. fields = impact_function.exposure.keywords['inasafe_fields'] del fields[female_count_field['key']] status, message = impact_function.run() self.assertEqual(ANALYSIS_SUCCESS, status, message) impact = impact_function.impact # We check the field exist after the IF with only one value. field = impact.fieldNameIndex( female_ratio_field['field_name']) self.assertNotEqual(-1, field) unique_ratio = impact.uniqueValues(field) self.assertEqual(1, len(unique_ratio), unique_ratio) self.assertEqual( unique_ratio[0], female_ratio_default_value['default_value']) # Second test, if we provide an aggregation without a default ratio 0.2 expected_ratio = 1.0 hazard_layer = load_test_vector_layer( 'gisv4', 'hazard', 'classified_vector.geojson') exposure_layer = load_test_vector_layer( 'gisv4', 'exposure', 'population.geojson') aggregation_layer = load_test_vector_layer( 'gisv4', 'aggregation', 'small_grid.geojson') # Set up impact function impact_function = ImpactFunction() impact_function.aggregation = aggregation_layer impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.debug_mode = True impact_function.prepare() # The `prepare` reads keywords from the file. impact_function.aggregation.keywords['inasafe_default_values'] = { elderly_ratio_field['key']: expected_ratio } fields = impact_function.exposure.keywords['inasafe_fields'] del fields[female_count_field['key']] status, message = impact_function.run() self.assertEqual(ANALYSIS_SUCCESS, status, message) impact = impact_function.impact # We check the field exist after the IF with only original values. field = impact.fieldNameIndex( female_ratio_field['field_name']) self.assertNotEqual(-1, field) unique_ratio = impact.uniqueValues(field) self.assertEqual(3, len(unique_ratio), unique_ratio) # We check the field exist after the IF with only one value. field = impact.fieldNameIndex( elderly_ratio_field['field_name']) self.assertNotEqual(-1, field) unique_ratio = impact.uniqueValues(field) self.assertEqual(1, len(unique_ratio), unique_ratio) self.assertEqual(expected_ratio, unique_ratio[0]) # Third test, if we provide an aggregation with a ratio and the # exposure has a count, we should a have a ratio from the exposure # count. hazard_layer = load_test_vector_layer( 'gisv4', 'hazard', 'classified_vector.geojson') exposure_layer = load_test_vector_layer( 'gisv4', 'exposure', 'population.geojson') aggregation_layer = load_test_vector_layer( 'gisv4', 'aggregation', 'small_grid.geojson') # Set up impact function impact_function = ImpactFunction() impact_function.debug_mode = True impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.aggregation = aggregation_layer impact_function.prepare() status, message = impact_function.run() self.assertEqual(ANALYSIS_SUCCESS, status, message) impact = impact_function.impact # Check that we have don't have only one unique value since the ratio # depends on the "population / female count" and we should have at # least different ratios. field = impact.fieldNameIndex( female_ratio_field['field_name']) self.assertNotEqual(-1, field) unique_ratio = impact.uniqueValues(field) self.assertNotEqual(1, len(unique_ratio), unique_ratio)
def test_provenance_without_aggregation(self): """Test provenance of impact function without aggregation.""" hazard_layer = load_test_vector_layer('gisv4', 'hazard', 'classified_vector.geojson') exposure_layer = load_test_vector_layer('gisv4', 'exposure', 'building-points.geojson') hazard = definition(hazard_layer.keywords['hazard']) exposure = definition(exposure_layer.keywords['exposure']) hazard_category = definition(hazard_layer.keywords['hazard_category']) expected_provenance = { provenance_gdal_version['provenance_key']: gdal.__version__, provenance_host_name['provenance_key']: gethostname(), provenance_map_title['provenance_key']: get_map_title(hazard, exposure, hazard_category), provenance_map_legend_title['provenance_key']: exposure['layer_legend_title'], provenance_user['provenance_key']: getpass.getuser(), provenance_os['provenance_key']: readable_os_version(), provenance_pyqt_version['provenance_key']: PYQT_VERSION_STR, provenance_qgis_version['provenance_key']: QGis.QGIS_VERSION, provenance_qt_version['provenance_key']: QT_VERSION_STR, provenance_inasafe_version['provenance_key']: get_version(), provenance_aggregation_layer['provenance_key']: None, provenance_aggregation_layer_id['provenance_key']: None, provenance_exposure_layer['provenance_key']: exposure_layer.source(), provenance_exposure_layer_id['provenance_key']: exposure_layer.id(), provenance_hazard_layer['provenance_key']: hazard_layer.source(), provenance_hazard_layer_id['provenance_key']: hazard_layer.id(), provenance_analysis_question['provenance_key']: get_analysis_question(hazard, exposure), provenance_aggregation_keywords['provenance_key']: None, provenance_exposure_keywords['provenance_key']: deepcopy(exposure_layer.keywords), provenance_hazard_keywords['provenance_key']: deepcopy(hazard_layer.keywords), } # Set up impact function impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) status, message = impact_function.run() self.assertEqual(ANALYSIS_SUCCESS, status, message) self.maxDiff = None expected_provenance.update({ provenance_action_checklist['provenance_key']: impact_function.action_checklist(), provenance_analysis_extent['provenance_key']: impact_function.analysis_extent.exportToWkt(), provenance_impact_function_name['provenance_key']: impact_function.name, provenance_impact_function_title['provenance_key']: impact_function.title, provenance_notes['provenance_key']: impact_function.notes(), provenance_requested_extent['provenance_key']: impact_function.requested_extent, provenance_data_store_uri['provenance_key']: impact_function.datastore.uri_path, provenance_start_datetime['provenance_key']: impact_function.start_datetime, provenance_end_datetime['provenance_key']: impact_function.end_datetime, provenance_duration['provenance_key']: impact_function.duration }) self.assertDictContainsSubset(expected_provenance, impact_function.provenance) output_layer_provenance_keys = [ provenance_layer_exposure_summary['provenance_key'], provenance_layer_aggregate_hazard_impacted['provenance_key'], provenance_layer_aggregation_summary['provenance_key'], provenance_layer_analysis_impacted['provenance_key'], provenance_layer_exposure_summary_table['provenance_key'] ] for key in output_layer_provenance_keys: self.assertIn(key, impact_function.provenance.keys())
def test_ratios_with_raster_exposure(self): """Test if we can add defaults to a raster exposure. See ticket #3851 how to manage ratios with a raster exposure. """ hazard_layer = load_test_vector_layer( 'gisv4', 'hazard', 'tsunami_vector.geojson') exposure_layer = load_test_raster_layer( 'gisv4', 'exposure', 'raster', 'population.asc') # Set up impact function impact_function = ImpactFunction() impact_function.debug_mode = True impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.prepare() status, message = impact_function.run() self.assertEqual(ANALYSIS_SUCCESS, status, message) for layer in impact_function.outputs: if layer.keywords['layer_purpose'] == ( layer_purpose_analysis_impacted['key']): analysis = layer if layer.keywords['layer_purpose'] == ( layer_purpose_aggregate_hazard_impacted['key']): impact = layer # We check in the impact layer if we have : # female default ratio with the default value index = impact.fieldNameIndex(female_ratio_field['field_name']) self.assertNotEqual(-1, index) unique_values = impact.uniqueValues(index) self.assertEqual(1, len(unique_values)) female_ratio = unique_values[0] # female displaced count and youth displaced count self.assertNotEqual( -1, impact.fieldNameIndex( female_displaced_count_field['field_name'])) self.assertNotEqual( -1, impact.fieldNameIndex( youth_displaced_count_field['field_name'])) # Check that we have more than 0 female displaced in the analysis layer index = analysis.fieldNameIndex( female_displaced_count_field['field_name']) female_displaced = analysis.uniqueValues(index)[0] self.assertGreater(female_displaced, 0) # Let's check computation index = analysis.fieldNameIndex( displaced_field['field_name']) displaced_population = analysis.uniqueValues(index)[0] self.assertEqual( int(displaced_population * female_ratio), female_displaced) # Check that we have more than 0 youth displaced in the analysis layer index = analysis.fieldNameIndex( female_displaced_count_field['field_name']) value = analysis.uniqueValues(index)[0] self.assertGreater(value, 0) # Let do another test with the special aggregation layer hazard_layer = load_test_vector_layer( 'gisv4', 'hazard', 'tsunami_vector.geojson') exposure_layer = load_test_raster_layer( 'gisv4', 'exposure', 'raster', 'population.asc') aggregation_layer = load_test_vector_layer( 'gisv4', 'aggregation', 'small_grid_ratios.geojson') # This aggregation layer has : # * a field for female ratio : 1, 0.5 and 0 # * use global default for youth ratio # * do not ust for adult ratio # * use custom 0.75 for elderly ratio # Set up impact function impact_function = ImpactFunction() impact_function.debug_mode = True impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.aggregation = aggregation_layer status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) status, message = impact_function.run() self.assertEqual(ANALYSIS_SUCCESS, status, message) impact = impact_function.impact # We should have a female_ratio with many values index = impact.fieldNameIndex(female_ratio_field['field_name']) self.assertNotEqual(-1, index) values = impact.uniqueValues(index) self.assertEqual(3, len(values)) # We should have a youth_ratio with global default index = impact.fieldNameIndex(youth_ratio_field['field_name']) self.assertNotEqual(-1, index) values = impact.uniqueValues(index) self.assertEqual(1, len(values)) # We should not have an adult_ratio index = impact.fieldNameIndex(adult_ratio_field['field_name']) self.assertEqual(-1, index) # We should have a elderly_ratio = 0.75 index = impact.fieldNameIndex(elderly_ratio_field['field_name']) self.assertNotEqual(-1, index) values = impact.uniqueValues(index) self.assertEqual(1, len(values)) self.assertEqual(0.75, values[0])
def test_provenance_without_aggregation(self): """Test provenance of impact function without aggregation.""" hazard_layer = load_test_vector_layer( 'gisv4', 'hazard', 'classified_vector.geojson') exposure_layer = load_test_vector_layer( 'gisv4', 'exposure', 'building-points.geojson') hazard = definition(hazard_layer.keywords['hazard']) exposure = definition(exposure_layer.keywords['exposure']) hazard_category = definition(hazard_layer.keywords['hazard_category']) expected_provenance = { provenance_gdal_version['provenance_key']: gdal.__version__, provenance_host_name['provenance_key']: gethostname(), provenance_map_title['provenance_key']: get_map_title( hazard, exposure, hazard_category), provenance_map_legend_title['provenance_key']: exposure[ 'layer_legend_title'], provenance_user['provenance_key']: getpass.getuser(), provenance_os['provenance_key']: readable_os_version(), provenance_pyqt_version['provenance_key']: PYQT_VERSION_STR, provenance_qgis_version['provenance_key']: QGis.QGIS_VERSION, provenance_qt_version['provenance_key']: QT_VERSION_STR, provenance_inasafe_version['provenance_key']: get_version(), provenance_aggregation_layer['provenance_key']: None, provenance_aggregation_layer_id['provenance_key']: None, provenance_exposure_layer['provenance_key']: exposure_layer.source(), provenance_exposure_layer_id['provenance_key']: exposure_layer.id(), provenance_hazard_layer['provenance_key']: hazard_layer.source(), provenance_hazard_layer_id['provenance_key']: hazard_layer.id(), provenance_analysis_question['provenance_key']: get_analysis_question(hazard, exposure), provenance_aggregation_keywords['provenance_key']: None, provenance_exposure_keywords['provenance_key']: deepcopy(exposure_layer.keywords), provenance_hazard_keywords['provenance_key']: deepcopy( hazard_layer.keywords), } # Set up impact function impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) status, message = impact_function.run() self.assertEqual(ANALYSIS_SUCCESS, status, message) self.maxDiff = None expected_provenance.update({ provenance_action_checklist['provenance_key']: impact_function.action_checklist(), provenance_analysis_extent['provenance_key']: impact_function.analysis_extent.exportToWkt(), provenance_impact_function_name['provenance_key']: impact_function.name, provenance_impact_function_title['provenance_key']: impact_function.title, provenance_notes['provenance_key']: impact_function.notes(), provenance_requested_extent['provenance_key']: impact_function. requested_extent, provenance_data_store_uri['provenance_key']: impact_function. datastore.uri_path, provenance_start_datetime['provenance_key']: impact_function. start_datetime, provenance_end_datetime['provenance_key']: impact_function.end_datetime, provenance_duration['provenance_key']: impact_function.duration }) self.assertDictContainsSubset( expected_provenance, impact_function.provenance) output_layer_provenance_keys = [ provenance_layer_exposure_summary['provenance_key'], provenance_layer_aggregate_hazard_impacted['provenance_key'], provenance_layer_aggregation_summary['provenance_key'], provenance_layer_analysis_impacted['provenance_key'], provenance_layer_exposure_summary_table['provenance_key'] ] for key in output_layer_provenance_keys: self.assertIn(key, impact_function.provenance.keys())
def test_ratios_with_raster_exposure(self): """Test if we can add defaults to a raster exposure. See ticket #3851 how to manage ratios with a raster exposure. """ hazard_layer = load_test_vector_layer('gisv4', 'hazard', 'tsunami_vector.geojson') exposure_layer = load_test_raster_layer('gisv4', 'exposure', 'raster', 'population.asc') # Set up impact function impact_function = ImpactFunction() impact_function.debug_mode = True impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.prepare() status, message = impact_function.run() self.assertEqual(ANALYSIS_SUCCESS, status, message) for layer in impact_function.outputs: if layer.keywords['layer_purpose'] == ( layer_purpose_analysis_impacted['key']): analysis = layer if layer.keywords['layer_purpose'] == ( layer_purpose_aggregate_hazard_impacted['key']): impact = layer # We check in the impact layer if we have : # female default ratio with the default value index = impact.fieldNameIndex(female_ratio_field['field_name']) self.assertNotEqual(-1, index) unique_values = impact.uniqueValues(index) self.assertEqual(1, len(unique_values)) female_ratio = unique_values[0] # female displaced count and youth displaced count self.assertNotEqual( -1, impact.fieldNameIndex(female_displaced_count_field['field_name'])) self.assertNotEqual( -1, impact.fieldNameIndex(youth_displaced_count_field['field_name'])) # Check that we have more than 0 female displaced in the analysis layer index = analysis.fieldNameIndex( female_displaced_count_field['field_name']) female_displaced = analysis.uniqueValues(index)[0] self.assertGreater(female_displaced, 0) # Let's check computation index = analysis.fieldNameIndex(displaced_field['field_name']) displaced_population = analysis.uniqueValues(index)[0] self.assertEqual(int(displaced_population * female_ratio), female_displaced) # Check that we have more than 0 youth displaced in the analysis layer index = analysis.fieldNameIndex( female_displaced_count_field['field_name']) value = analysis.uniqueValues(index)[0] self.assertGreater(value, 0) # Let do another test with the special aggregation layer hazard_layer = load_test_vector_layer('gisv4', 'hazard', 'tsunami_vector.geojson') exposure_layer = load_test_raster_layer('gisv4', 'exposure', 'raster', 'population.asc') aggregation_layer = load_test_vector_layer( 'gisv4', 'aggregation', 'small_grid_ratios.geojson') # This aggregation layer has : # * a field for female ratio : 1, 0.5 and 0 # * use global default for youth ratio # * do not ust for adult ratio # * use custom 0.75 for elderly ratio # Set up impact function impact_function = ImpactFunction() impact_function.debug_mode = True impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.aggregation = aggregation_layer status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) status, message = impact_function.run() self.assertEqual(ANALYSIS_SUCCESS, status, message) impact = impact_function.impact # We should have a female_ratio with many values index = impact.fieldNameIndex(female_ratio_field['field_name']) self.assertNotEqual(-1, index) values = impact.uniqueValues(index) self.assertEqual(3, len(values)) # We should have a youth_ratio with global default index = impact.fieldNameIndex(youth_ratio_field['field_name']) self.assertNotEqual(-1, index) values = impact.uniqueValues(index) self.assertEqual(1, len(values)) # We should not have an adult_ratio index = impact.fieldNameIndex(adult_ratio_field['field_name']) self.assertEqual(-1, index) # We should have a elderly_ratio = 0.75 index = impact.fieldNameIndex(elderly_ratio_field['field_name']) self.assertNotEqual(-1, index) values = impact.uniqueValues(index) self.assertEqual(1, len(values)) self.assertEqual(0.75, values[0])
def test_ratios_with_vector_exposure(self): """Test if we can add defaults to a vector exposure.""" # First test, if we do not provide an aggregation, hazard_layer = load_test_vector_layer('gisv4', 'hazard', 'classified_vector.geojson') exposure_layer = load_test_vector_layer('gisv4', 'exposure', 'population.geojson') # Set up impact function impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.prepare() # Let's remove one field from keywords. # We monkey patch keywords for testing after `prepare` & before `run`. fields = impact_function.exposure.keywords['inasafe_fields'] del fields[female_count_field['key']] status, message = impact_function.run() self.assertEqual(ANALYSIS_SUCCESS, status, message) impact = impact_function.impact # We check the field exist after the IF with only one value. field = impact.fieldNameIndex(female_ratio_field['field_name']) self.assertNotEqual(-1, field) unique_ratio = impact.uniqueValues(field) self.assertEqual(1, len(unique_ratio), unique_ratio) self.assertEqual(unique_ratio[0], female_ratio_default_value['default_value']) # Second test, if we provide an aggregation without a default ratio 0.2 expected_ratio = 1.0 hazard_layer = load_test_vector_layer('gisv4', 'hazard', 'classified_vector.geojson') exposure_layer = load_test_vector_layer('gisv4', 'exposure', 'population.geojson') aggregation_layer = load_test_vector_layer('gisv4', 'aggregation', 'small_grid.geojson') # Set up impact function impact_function = ImpactFunction() impact_function.aggregation = aggregation_layer impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.debug_mode = True impact_function.prepare() # The `prepare` reads keywords from the file. impact_function.aggregation.keywords['inasafe_default_values'] = { elderly_ratio_field['key']: expected_ratio } fields = impact_function.exposure.keywords['inasafe_fields'] del fields[female_count_field['key']] status, message = impact_function.run() self.assertEqual(ANALYSIS_SUCCESS, status, message) impact = impact_function.impact # We check the field exist after the IF with only original values. field = impact.fieldNameIndex(female_ratio_field['field_name']) self.assertNotEqual(-1, field) unique_ratio = impact.uniqueValues(field) self.assertEqual(3, len(unique_ratio), unique_ratio) # We check the field exist after the IF with only one value. field = impact.fieldNameIndex(elderly_ratio_field['field_name']) self.assertNotEqual(-1, field) unique_ratio = impact.uniqueValues(field) self.assertEqual(1, len(unique_ratio), unique_ratio) self.assertEqual(expected_ratio, unique_ratio[0]) # Third test, if we provide an aggregation with a ratio and the # exposure has a count, we should a have a ratio from the exposure # count. hazard_layer = load_test_vector_layer('gisv4', 'hazard', 'classified_vector.geojson') exposure_layer = load_test_vector_layer('gisv4', 'exposure', 'population.geojson') aggregation_layer = load_test_vector_layer('gisv4', 'aggregation', 'small_grid.geojson') # Set up impact function impact_function = ImpactFunction() impact_function.debug_mode = True impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.aggregation = aggregation_layer impact_function.prepare() status, message = impact_function.run() self.assertEqual(ANALYSIS_SUCCESS, status, message) impact = impact_function.impact # Check that we have don't have only one unique value since the ratio # depends on the "population / female count" and we should have at # least different ratios. field = impact.fieldNameIndex(female_ratio_field['field_name']) self.assertNotEqual(-1, field) unique_ratio = impact.uniqueValues(field) self.assertNotEqual(1, len(unique_ratio), unique_ratio)
def test_minimum_extent(self): """Test we can compute the minimum extent in the IF.""" # Without aggregation layer hazard_layer = load_test_vector_layer( 'hazard', 'flood_multipart_polygons.shp') exposure_layer = load_test_vector_layer('exposure', 'roads.shp') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) message = ( 'Test about the minimum extent without an aggregation layer is ' 'failing.') self.assertTrue( compare_wkt( 'Polygon ((' '106.8080099999999959 -6.19531000000000009, ' '106.8080099999999959 -6.16752599999999962, ' '106.83456946836641066 -6.16752599999999962, ' '106.83456946836641066 -6.19531000000000009, ' '106.8080099999999959 -6.19531000000000009))', impact_function.analysis_extent.exportToWkt()), message ) # Without aggregation layer but with a requested_extent hazard_layer = load_test_vector_layer( 'hazard', 'flood_multipart_polygons.shp') exposure_layer = load_test_vector_layer('exposure', 'roads.shp') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.requested_extent = wkt_to_rectangle( 'POLYGON ((' '106.772279 -6.237576, ' '106.772279 -6.165415, ' '106.885165 -6.165415, ' '106.885165 -6.237576, ' '106.772279 -6.237576' '))') impact_function.requested_extent_crs = QgsCoordinateReferenceSystem( 4326) status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) message = ( 'Test about the minimum extent without an aggregation layer but ' 'with a requested extent is failing.') self.assertTrue( compare_wkt( 'Polygon ((' '106.8080099999999959 -6.19531000000000009, ' '106.8080099999999959 -6.16752599999999962, ' '106.83456946836641066 -6.16752599999999962, ' '106.83456946836641066 -6.19531000000000009, ' '106.8080099999999959 -6.19531000000000009))', impact_function.analysis_extent.exportToWkt()), message ) # With an aggregation layer, without selection hazard_layer = load_test_vector_layer( 'gisv4', 'hazard', 'classified_vector.geojson') exposure_layer = load_test_vector_layer( 'gisv4', 'exposure', 'building-points.geojson') aggregation_layer = load_test_vector_layer( 'gisv4', 'aggregation', 'small_grid.geojson') impact_function = ImpactFunction() impact_function.aggregation = aggregation_layer impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.use_selected_features_only = False impact_function.aggregation.select(0) status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) message = ( 'Test about the minimum extent with an aggregation layer is ' 'failing.') self.assertTrue( compare_wkt( 'Polygon ((106.9033179652593617 -6.18324454090033182, ' '106.90331796525939012 -6.2725478115989306, ' '106.72365490843547775 -6.2725478115989306, ' '106.72365490843547775 -6.18324645462287137, ' '106.72365490843547775 -6.09392810187095257, ' '106.81348643684744104 -6.09392810187095257, ' '106.9033179652593617 -6.09392810187095257, ' '106.9033179652593617 -6.18324454090033182))', impact_function.analysis_extent.exportToWkt()), message ) # With an aggregation layer, with selection impact_function.use_selected_features_only = True impact_function.aggregation = aggregation_layer status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) message = ( 'Test about the minimum extent with an aggregation layer and ' 'a selection is failing.') self.assertTrue( compare_wkt( 'Polygon ((106.72365490843547775 -6.09392810187095257, ' '106.81348643684744104 -6.09392810187095257, ' '106.81348643684744104 -6.18324645462287137, ' '106.72365490843547775 -6.18324645462287137, ' '106.72365490843547775 -6.09392810187095257))', impact_function.analysis_extent.exportToWkt()), message )
def test_provenance_without_aggregation(self): """Test provenance of impact function without aggregation.""" hazard_layer = load_test_vector_layer('gisv4', 'hazard', 'classified_vector.geojson') exposure_layer = load_test_vector_layer('gisv4', 'exposure', 'building-points.geojson') hazard = definition(hazard_layer.keywords['hazard']) exposure = definition(exposure_layer.keywords['exposure']) hazard_category = definition(hazard_layer.keywords['hazard_category']) expected_provenance = { 'gdal_version': gdal.__version__, 'host_name': gethostname(), 'map_title': get_map_title(hazard, exposure, hazard_category), 'map_legend_title': exposure['layer_legend_title'], 'inasafe_version': get_version(), 'pyqt_version': PYQT_VERSION_STR, 'qgis_version': QGis.QGIS_VERSION, 'qt_version': QT_VERSION_STR, 'user': getpass.getuser(), 'os': readable_os_version(), 'aggregation_layer': None, 'aggregation_layer_id': None, 'exposure_layer': exposure_layer.source(), 'exposure_layer_id': exposure_layer.id(), 'hazard_layer': hazard_layer.source(), 'hazard_layer_id': hazard_layer.id(), 'analysis_question': get_analysis_question(hazard, exposure), 'aggregation_keywords': None, 'exposure_keywords': deepcopy(exposure_layer.keywords), 'hazard_keywords': deepcopy(hazard_layer.keywords), } # Set up impact function impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) status, message = impact_function.run() self.assertEqual(ANALYSIS_SUCCESS, status, message) self.maxDiff = None expected_provenance.update({ 'action_checklist': impact_function.action_checklist(), 'analysis_extent': impact_function.analysis_extent.exportToWkt(), 'impact_function_name': impact_function.name, 'impact_function_title': impact_function.title, 'notes': impact_function.notes(), 'requested_extent': impact_function.requested_extent, 'data_store_uri': impact_function.datastore.uri_path, 'start_datetime': impact_function.start_datetime, 'end_datetime': impact_function.end_datetime, 'duration': impact_function.duration }) self.assertDictEqual(expected_provenance, impact_function.provenance)
def run_scenario(scenario, use_debug=False): """Run scenario. :param scenario: Dictionary of hazard, exposure, and aggregation. :type scenario: dict :param use_debug: If we should use debug_mode when we run the scenario. :type use_debug: bool :returns: Tuple(status, Flow dictionary, outputs). :rtype: list """ if os.path.exists(scenario['exposure']): exposure_path = scenario['exposure'] elif os.path.exists(standard_data_path('exposure', scenario['exposure'])): exposure_path = standard_data_path('exposure', scenario['exposure']) elif os.path.exists( standard_data_path(*(scenario['exposure'].split('/')))): exposure_path = standard_data_path(*(scenario['exposure'].split('/'))) else: raise IOError('No exposure file') if os.path.exists(scenario['hazard']): hazard_path = scenario['hazard'] elif os.path.exists(standard_data_path('hazard', scenario['hazard'])): hazard_path = standard_data_path('hazard', scenario['hazard']) elif os.path.exists(standard_data_path(*(scenario['hazard'].split('/')))): hazard_path = standard_data_path(*(scenario['hazard'].split('/'))) else: raise IOError('No hazard file') if not scenario['aggregation']: aggregation_path = None else: if os.path.exists(scenario['aggregation']): aggregation_path = scenario['aggregation'] elif os.path.exists( standard_data_path('aggregation', scenario['aggregation'])): aggregation_path = standard_data_path('aggregation', scenario['aggregation']) elif os.path.exists( standard_data_path(*(scenario['aggregation'].split('/')))): aggregation_path = standard_data_path( *(scenario['aggregation'].split('/'))) else: raise IOError('No aggregation file') impact_function = ImpactFunction() impact_function.debug_mode = use_debug layer = QgsVectorLayer(hazard_path, 'Hazard', 'ogr') if not layer.isValid(): layer = QgsRasterLayer(hazard_path, 'Hazard') impact_function.hazard = layer layer = QgsVectorLayer(exposure_path, 'Exposure', 'ogr') if not layer.isValid(): layer = QgsRasterLayer(exposure_path, 'Exposure') impact_function.exposure = layer if aggregation_path: impact_function.aggregation = QgsVectorLayer(aggregation_path, 'Aggregation', 'ogr') status, message = impact_function.prepare() if status != 0: return status, message, None status, message = impact_function.run() if status != 0: return status, message, None for layer in impact_function.outputs: if layer.type() == QgsMapLayer.VectorLayer: check_inasafe_fields(layer) return status, impact_function.state, impact_function.outputs
def test_earthquake_population_without_aggregation(self): """Testing Earthquake in Population without aggregation. .. versionadded:: 4.0 """ output_folder = self.fixtures_dir('../output/earthquake_population') # Classified vector with building-points shutil.rmtree(output_folder, ignore_errors=True) hazard_layer = load_test_raster_layer( 'hazard', 'earthquake.tif') exposure_layer = load_test_raster_layer( 'exposure', 'pop_binary_raster_20_20.asc') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.prepare() return_code, message = impact_function.run() self.assertEqual(return_code, ANALYSIS_SUCCESS, message) report_metadata = ReportMetadata( metadata_dict=standard_impact_report_metadata_html) impact_report = ImpactReport( IFACE, report_metadata, impact_function=impact_function) impact_report.output_folder = output_folder return_code, message = impact_report.process_components() self.assertEqual( return_code, ImpactReport.REPORT_GENERATION_SUCCESS, message) """Checking generated context""" empty_component_output_message = 'Empty component output' # Check Analysis Summary analysis_summary = impact_report.metadata.component_by_key( general_report_component['key']) """:type: safe.report.report_metadata.Jinja2ComponentsMetadata""" expected_context = { 'table_header': ( u'Estimated Number of people affected per MMI intensity'), 'header': u'General Report', 'summary': [ { 'header_label': u'Hazard Zone', 'rows': [ {'value': 0, 'name': u'X', 'key': 'X'}, {'value': 0, 'name': u'IX', 'key': 'IX'}, {'value': '200', 'name': u'VIII', 'key': 'VIII'}, {'value': 0, 'name': u'VII', 'key': 'VII'}, {'value': 0, 'name': u'VI', 'key': 'VI'}, {'value': 0, 'name': u'V', 'key': 'V'}, {'value': 0, 'name': u'IV', 'key': 'IV'}, {'value': 0, 'name': u'III', 'key': 'III'}, {'value': 0, 'name': u'II', 'key': 'II'}, {'value': 0, 'name': u'I', 'key': 'I'}, { 'as_header': True, 'key': 'total_field', 'name': u'Total', 'value': '200' } ], 'value_label': u'Count' }, { 'header_label': u'Population', 'rows': [ { 'value': '200', 'name': u'Affected', 'key': 'total_affected_field', }, { 'key': 'total_not_affected_field', 'name': u'Not Affected', 'value': '0' }, { 'key': 'total_not_exposed_field', 'name': u'Not Exposed', 'value': '0'}, { 'value': '200', 'name': u'Displaced', 'key': 'displaced_field' }, { 'value': '0 - 100', 'name': u'Fatalities', 'key': 'fatalities_field' }], 'value_label': u'Count' } ], 'notes': [ 'Exposed People: People who are present in hazard zones and ' 'are thereby subject to potential losses. In InaSAFE, people ' 'who are exposed are those people who are within the extent ' 'of the hazard.', 'Affected People: People who are affected by a hazardous ' 'event. People can be affected directly or indirectly. ' 'Affected people may experience short-term or long-term ' 'consequences to their lives, livelihoods or health and in ' 'the economic, physical, social, cultural and environmental ' 'assets. In InaSAFE, people who are killed during the event ' 'are also considered affected.', 'Displaced People: Displaced people are people who, for ' 'different reasons and circumstances because of risk or ' 'disaster, have to leave their place of residence. ' 'In InaSAFE, demographic and minimum needs reports are based ' 'on displaced / evacuated people.' ] } actual_context = analysis_summary.context self.assertDictEqual(expected_context, actual_context) self.assertTrue( analysis_summary.output, empty_component_output_message) report_metadata = ReportMetadata( metadata_dict=infographic_report) infographic_impact_report = ImpactReport( IFACE, report_metadata, impact_function=impact_function) infographic_impact_report.output_folder = output_folder return_code, message = infographic_impact_report.process_components() self.assertEqual( return_code, ImpactReport.REPORT_GENERATION_SUCCESS, message) # check population pie chart if we have 100% donut slice population_chart_svg = ( infographic_impact_report.metadata.component_by_key( population_chart_svg_component['key']) ) expected_slices = [ { 'value': 200, 'show_label': True, 'center': (224.0, 128.0), 'stroke_opacity': 1, 'path': 'M128.000000,0.000000a128.000000,128.000000 0 0 1 ' '0.000000,256.000000l-0.000000,-64.000000a64.000000,' '64.000000 0 0 0 0.000000,-128.000000Z', 'percentage': 100, 'label': u'VIII', 'stroke': u'#ff7000', 'label_position': (256, 0), 'fill': u'#ff7000' }, { 'value': 100, 'show_label': False, 'center': (32.0, 128.0), 'stroke_opacity': 1, 'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 ' '-0.000000,-256.000000l0.000000,64.000000a64.000000,' '64.000000 0 0 0 0.000000,128.000000Z', 'percentage': 50.0, 'label': '', 'stroke': u'#ff7000', 'label_position': (256, 0), 'fill': u'#ff7000' }, { 'value': 0, 'show_label': False, 'center': (128.0, 224.0), 'stroke_opacity': 1, 'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 ' '0.000000,0.000000l-0.000000,-64.000000a64.000000,' '64.000000 0 0 0 0.000000,0.000000Z', 'percentage': 0.0, 'label': u'Total Not Affected', 'stroke': '#fff', 'label_position': (256, 0), 'fill': u'#1a9641' }] actual_context = population_chart_svg.context['context'] actual_slices = actual_context.slices self.assertEqual(expected_slices, actual_slices) self.assertTrue( population_chart_svg.output, empty_component_output_message) shutil.rmtree(output_folder, ignore_errors=True)
def run_scenario(scenario, use_debug=False): """Run scenario. :param scenario: Dictionary of hazard, exposure, and aggregation. :type scenario: dict :param use_debug: If we should use debug_mode when we run the scenario. :type use_debug: bool :returns: Tuple(status, Flow dictionary, outputs). :rtype: list """ if os.path.exists(scenario['exposure']): exposure_path = scenario['exposure'] elif os.path.exists(standard_data_path('exposure', scenario['exposure'])): exposure_path = standard_data_path('exposure', scenario['exposure']) elif os.path.exists( standard_data_path(*(scenario['exposure'].split('/')))): exposure_path = standard_data_path(*(scenario['exposure'].split('/'))) else: raise IOError('No exposure file') if os.path.exists(scenario['hazard']): hazard_path = scenario['hazard'] elif os.path.exists(standard_data_path('hazard', scenario['hazard'])): hazard_path = standard_data_path('hazard', scenario['hazard']) elif os.path.exists(standard_data_path(*(scenario['hazard'].split('/')))): hazard_path = standard_data_path(*(scenario['hazard'].split('/'))) else: raise IOError('No hazard file') if not scenario['aggregation']: aggregation_path = None else: if os.path.exists(scenario['aggregation']): aggregation_path = scenario['aggregation'] elif os.path.exists(standard_data_path( 'aggregation', scenario['aggregation'])): aggregation_path = standard_data_path( 'aggregation', scenario['aggregation']) elif os.path.exists( standard_data_path(*(scenario['aggregation'].split('/')))): aggregation_path = standard_data_path( *(scenario['aggregation'].split('/'))) else: raise IOError('No aggregation file') impact_function = ImpactFunction() impact_function.debug_mode = use_debug layer = QgsVectorLayer(hazard_path, 'Hazard', 'ogr') if not layer.isValid(): layer = QgsRasterLayer(hazard_path, 'Hazard') impact_function.hazard = layer layer = QgsVectorLayer(exposure_path, 'Exposure', 'ogr') if not layer.isValid(): layer = QgsRasterLayer(exposure_path, 'Exposure') impact_function.exposure = layer if aggregation_path: impact_function.aggregation = QgsVectorLayer( aggregation_path, 'Aggregation', 'ogr') status, message = impact_function.prepare() if status != 0: return status, message, None status, message = impact_function.run() if status != 0: return status, message, None for layer in impact_function.outputs: if layer.type() == QgsMapLayer.VectorLayer: check_inasafe_fields(layer) return status, impact_function.state, impact_function.outputs
def test_earthquake_population_without_aggregation(self): """Testing Earthquake in Population without aggregation. .. versionadded:: 4.0 """ output_folder = self.fixtures_dir('../output/earthquake_population') # Classified vector with building-points shutil.rmtree(output_folder, ignore_errors=True) hazard_layer = load_test_raster_layer( 'hazard', 'earthquake.tif') exposure_layer = load_test_raster_layer( 'exposure', 'pop_binary_raster_20_20.asc') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.prepare() return_code, message = impact_function.run() self.assertEqual(return_code, ANALYSIS_SUCCESS, message) report_metadata = ReportMetadata( metadata_dict=standard_impact_report_metadata_html) impact_report = ImpactReport( IFACE, report_metadata, impact_function=impact_function) impact_report.output_folder = output_folder return_code, message = impact_report.process_component() self.assertEqual( return_code, ImpactReport.REPORT_GENERATION_SUCCESS, message) """Checking generated context""" empty_component_output_message = 'Empty component output' # Check Analysis Summary analysis_summary = impact_report.metadata.component_by_key( general_report_component['key']) """:type: safe.report.report_metadata.Jinja2ComponentsMetadata""" expected_context = { 'table_header': u'Estimated Number of people', 'header': u'General Report', 'summary': [ { 'header_label': u'Hazard Zone', 'rows': [{'value': '0', 'name': u'X', 'key': 'X'}, {'value': '0', 'name': u'IX', 'key': 'IX'}, {'value': '200', 'name': u'VIII', 'key': 'VIII'}, {'value': '0', 'name': u'VII', 'key': 'VII'}, {'value': '0', 'name': u'VI', 'key': 'VI'}, {'value': '0', 'name': u'V', 'key': 'V'}, {'value': '0', 'name': u'IV', 'key': 'IV'}, {'value': '0', 'name': u'III', 'key': 'III'}, {'value': '0', 'name': u'II', 'key': 'II'}, {'value': '0', 'name': u'I', 'key': 'I'}], 'value_label': u'Count' }, { 'header_label': u'Population', 'rows': [{'value': '200', 'name': u'Displaced', 'key': 'displaced_field'}, {'value': '0 - 100', 'name': u'Fatalities', 'key': 'fatalities_field'}], 'value_label': u'Count' } ] } actual_context = analysis_summary.context self.assertDictEqual(expected_context, actual_context) self.assertTrue( analysis_summary.output, empty_component_output_message) # check population pie chart if we have 100% donut slice population_chart_svg = impact_report.metadata.component_by_key( population_chart_svg_component['key']) expected_slices = [ {'value': 0, 'show_label': False, 'center': (128.0, 32.0), 'stroke_opacity': 1, 'path': 'M128.000000,0.000000a128.000000,128.000000 0 0 1 ' '0.000000,0.000000l-0.000000,64.000000a64.000000,' '64.000000 0 0 0 0.000000,0.000000Z', 'percentage': 0.0, 'label': u'X', 'stroke': '#fff', 'label_position': (256, 0), 'fill': u'#dd0000'}, {'value': 0, 'show_label': False, 'center': (128.0, 32.0), 'stroke_opacity': 1, 'path': 'M128.000000,0.000000a128.000000,128.000000 0 0 1 ' '0.000000,0.000000l-0.000000,64.000000a64.000000,' '64.000000 0 0 0 0.000000,0.000000Z', 'percentage': 0.0, 'label': u'IX', 'stroke': '#fff', 'label_position': (256, 0), 'fill': u'#ff0000'}, {'value': 200, 'show_label': True, 'center': (224.0, 128.0), 'stroke_opacity': 1, 'path': 'M128.000000,0.000000a128.000000,128.000000 0 0 1 ' '0.000000,256.000000l-0.000000,-64.000000a64.000000,' '64.000000 0 0 0 0.000000,-128.000000Z', 'percentage': 100, 'label': u'VIII', 'stroke': u'#ff7000', 'label_position': (256, 0), 'fill': u'#ff7000'}, {'value': 100, 'show_label': False, 'center': (32.0, 128.0), 'stroke_opacity': 1, 'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 ' '-0.000000,-256.000000l0.000000,64.000000a64.000000,' '64.000000 0 0 0 0.000000,128.000000Z', 'percentage': 50.0, 'label': '', 'stroke': u'#ff7000', 'label_position': (256, 0), 'fill': u'#ff7000'}, {'value': 0, 'show_label': False, 'center': (128.0, 224.0), 'stroke_opacity': 1, 'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 ' '0.000000,0.000000l-0.000000,-64.000000a64.000000,' '64.000000 0 0 0 0.000000,0.000000Z', 'percentage': 0.0, 'label': u'VII', 'stroke': '#fff', 'label_position': (256, 0), 'fill': u'#ffa800'}, {'value': 0, 'show_label': False, 'center': (128.0, 224.0), 'stroke_opacity': 1, 'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 ' '0.000000,0.000000l-0.000000,-64.000000a64.000000,' '64.000000 0 0 0 0.000000,0.000000Z', 'percentage': 0.0, 'label': u'VI', 'stroke': '#fff', 'label_position': (256, 0), 'fill': u'#fff000'}, {'value': 0, 'show_label': False, 'center': (128.0, 224.0), 'stroke_opacity': 1, 'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 ' '0.000000,0.000000l-0.000000,-64.000000a64.000000,' '64.000000 0 0 0 0.000000,0.000000Z', 'percentage': 0.0, 'label': u'V', 'stroke': '#fff', 'label_position': (256, 0), 'fill': u'#aaffff'}, {'value': 0, 'show_label': False, 'center': (128.0, 224.0), 'stroke_opacity': 1, 'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 ' '0.000000,0.000000l-0.000000,-64.000000a64.000000,' '64.000000 0 0 0 0.000000,0.000000Z', 'percentage': 0.0, 'label': u'IV', 'stroke': '#fff', 'label_position': (256, 0), 'fill': u'#55ffff'}, {'value': 0, 'show_label': False, 'center': (128.0, 224.0), 'stroke_opacity': 1, 'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 ' '0.000000,0.000000l-0.000000,-64.000000a64.000000,' '64.000000 0 0 0 0.000000,0.000000Z', 'percentage': 0.0, 'label': u'III', 'stroke': '#fff', 'label_position': (256, 0), 'fill': u'#00cfff'}, {'value': 0, 'show_label': False, 'center': (128.0, 224.0), 'stroke_opacity': 1, 'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 ' '0.000000,0.000000l-0.000000,-64.000000a64.000000,' '64.000000 0 0 0 0.000000,0.000000Z', 'percentage': 0.0, 'label': u'II', 'stroke': '#fff', 'label_position': (256, 0), 'fill': u'#209fff'}] actual_context = population_chart_svg.context['context'] actual_slices = actual_context.slices self.assertEqual(expected_slices, actual_slices) self.assertTrue( population_chart_svg.output, empty_component_output_message) shutil.rmtree(output_folder, ignore_errors=True)
def test_earthquake_population_without_aggregation(self): """Testing Earthquake in Population without aggregation. .. versionadded:: 4.0 """ output_folder = self.fixtures_dir('../output/earthquake_population') # Classified vector with building-points shutil.rmtree(output_folder, ignore_errors=True) hazard_layer = load_test_raster_layer('hazard', 'earthquake.tif') exposure_layer = load_test_raster_layer('exposure', 'pop_binary_raster_20_20.asc') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.crs = QgsCoordinateReferenceSystem(4326) impact_function.prepare() return_code, message = impact_function.run() self.assertEqual(return_code, ANALYSIS_SUCCESS, message) report_metadata = ReportMetadata( metadata_dict=standard_impact_report_metadata_html) impact_report = ImpactReport(IFACE, report_metadata, impact_function=impact_function) impact_report.output_folder = output_folder return_code, message = impact_report.process_components() self.assertEqual(return_code, ImpactReport.REPORT_GENERATION_SUCCESS, message) """Checking generated context.""" empty_component_output_message = 'Empty component output' # Check Analysis Summary analysis_summary = impact_report.metadata.component_by_key( general_report_component['key']) """:type: safe.report.report_metadata.Jinja2ComponentsMetadata""" expected_context = { 'table_header': (u'Estimated Number of people affected per MMI intensity'), 'header': u'General Report', 'summary': [{ 'header_label': u'Hazard Zone', 'rows': [{ 'numbers': ['0'], 'name': u'X', 'key': 'X' }, { 'numbers': ['0'], 'name': u'IX', 'key': 'IX' }, { 'numbers': ['200'], 'name': u'VIII', 'key': 'VIII' }, { 'numbers': ['0'], 'name': u'VII', 'key': 'VII' }, { 'numbers': ['0'], 'name': u'VI', 'key': 'VI' }, { 'numbers': ['0'], 'name': u'V', 'key': 'V' }, { 'numbers': ['0'], 'name': u'IV', 'key': 'IV' }, { 'numbers': ['0'], 'name': u'III', 'key': 'III' }, { 'numbers': ['0'], 'name': u'II', 'key': 'II' }, { 'numbers': ['0'], 'name': u'I', 'key': 'I' }, { 'as_header': True, 'key': 'total_exposed_field', 'name': u'Total Exposed', 'numbers': ['200'] }], 'value_labels': [u'Count'] }, { 'header_label': u'Population', 'rows': [{ 'numbers': ['200'], 'name': u'Affected', 'key': 'total_affected_field', }, { 'key': 'total_not_affected_field', 'name': u'Not Affected', 'numbers': ['0'] }, { 'key': 'total_not_exposed_field', 'name': u'Not Exposed', 'numbers': ['0'] }, { 'numbers': ['200'], 'name': u'Displaced', 'key': 'displaced_field' }, { 'numbers': ['0 - 100'], 'name': u'Fatalities', 'key': 'fatalities_field' }], 'value_labels': [u'Count'] }], 'notes': [ u'Exposed People: People who are present in hazard zones and ' u'are thereby subject to potential losses. In InaSAFE, people ' u'who are exposed are those people who are within the extent ' u'of the hazard.', u'Affected People: People who are affected by a hazardous ' u'event. People can be affected directly or indirectly. ' u'Affected people may experience short-term or long-term ' u'consequences to their lives, livelihoods or health and in ' u'the economic, physical, social, cultural and environmental ' u'assets. In InaSAFE, people who are killed during the event ' u'are also considered affected.', u'Displaced People: Displaced people are people who, for ' u'different reasons and circumstances because of risk or ' u'disaster, have to leave their place of residence. ' u'In InaSAFE, demographic and minimum needs reports are based ' u'on displaced / evacuated people.' ] } actual_context = analysis_summary.context self.assertDictEqual(expected_context, actual_context) self.assertTrue(analysis_summary.output, empty_component_output_message) report_metadata = ReportMetadata(metadata_dict=infographic_report) infographic_impact_report = ImpactReport( IFACE, report_metadata, impact_function=impact_function) infographic_impact_report.output_folder = output_folder return_code, message = infographic_impact_report.process_components() self.assertEqual(return_code, ImpactReport.REPORT_GENERATION_SUCCESS, message) # check population pie chart if we have 100% donut slice population_chart_svg = ( infographic_impact_report.metadata.component_by_key( population_chart_svg_component['key'])) expected_slices = [{ 'value': 200, 'show_label': True, 'center': (224.0, 128.0), 'stroke_opacity': 1, 'path': 'M128.000000,0.000000a128.000000,128.000000 0 0 1 ' '0.000000,256.000000l-0.000000,-64.000000a64.000000,' '64.000000 0 0 0 0.000000,-128.000000Z', 'percentage': 100, 'label': u'VIII', 'stroke': u'#ff7000', 'label_position': (256, 0), 'fill': u'#ff7000' }, { 'value': 100, 'show_label': False, 'center': (32.0, 128.0), 'stroke_opacity': 1, 'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 ' '-0.000000,-256.000000l0.000000,64.000000a64.000000,' '64.000000 0 0 0 0.000000,128.000000Z', 'percentage': 50.0, 'label': '', 'stroke': u'#ff7000', 'label_position': (256, 0), 'fill': u'#ff7000' }, { 'value': 0, 'show_label': False, 'center': (128.0, 224.0), 'stroke_opacity': 1, 'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 ' '0.000000,0.000000l-0.000000,-64.000000a64.000000,' '64.000000 0 0 0 0.000000,0.000000Z', 'percentage': 0.0, 'label': u'Total Not Affected', 'stroke': '#fff', 'label_position': (256, 0), 'fill': u'#1a9641' }] actual_context = population_chart_svg.context['context'] actual_slices = actual_context.slices self.assertEqual(expected_slices, actual_slices) self.assertTrue(population_chart_svg.output, empty_component_output_message) shutil.rmtree(output_folder, ignore_errors=True)
def test_minimum_extent(self): """Test we can compute the minimum extent in the IF.""" # Without aggregation layer hazard_layer = load_test_vector_layer('hazard', 'flood_multipart_polygons.shp') exposure_layer = load_test_vector_layer('exposure', 'roads.shp') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) message = ( 'Test about the minimum extent without an aggregation layer is ' 'failing.') self.assertTrue( compare_wkt( 'Polygon ((' '106.8080099999999959 -6.19531000000000009, ' '106.8080099999999959 -6.16752599999999962, ' '106.83456946836641066 -6.16752599999999962, ' '106.83456946836641066 -6.19531000000000009, ' '106.8080099999999959 -6.19531000000000009))', impact_function.analysis_extent.exportToWkt()), message) # Without aggregation layer but with a requested_extent hazard_layer = load_test_vector_layer('hazard', 'flood_multipart_polygons.shp') exposure_layer = load_test_vector_layer('exposure', 'roads.shp') impact_function = ImpactFunction() impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.requested_extent = wkt_to_rectangle( 'POLYGON ((' '106.772279 -6.237576, ' '106.772279 -6.165415, ' '106.885165 -6.165415, ' '106.885165 -6.237576, ' '106.772279 -6.237576' '))') impact_function.requested_extent_crs = QgsCoordinateReferenceSystem( 4326) status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) message = ( 'Test about the minimum extent without an aggregation layer but ' 'with a requested extent is failing.') self.assertTrue( compare_wkt( 'Polygon ((' '106.8080099999999959 -6.19531000000000009, ' '106.8080099999999959 -6.16752599999999962, ' '106.83456946836641066 -6.16752599999999962, ' '106.83456946836641066 -6.19531000000000009, ' '106.8080099999999959 -6.19531000000000009))', impact_function.analysis_extent.exportToWkt()), message) # With an aggregation layer, without selection hazard_layer = load_test_vector_layer('gisv4', 'hazard', 'classified_vector.geojson') exposure_layer = load_test_vector_layer('gisv4', 'exposure', 'building-points.geojson') aggregation_layer = load_test_vector_layer('gisv4', 'aggregation', 'small_grid.geojson') impact_function = ImpactFunction() impact_function.aggregation = aggregation_layer impact_function.exposure = exposure_layer impact_function.hazard = hazard_layer impact_function.use_selected_features_only = False impact_function.aggregation.select(0) status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) message = ( 'Test about the minimum extent with an aggregation layer is ' 'failing.') self.assertTrue( compare_wkt( 'Polygon ((106.9033179652593617 -6.18324454090033182, ' '106.90331796525939012 -6.2725478115989306, ' '106.72365490843547775 -6.2725478115989306, ' '106.72365490843547775 -6.18324645462287137, ' '106.72365490843547775 -6.09392810187095257, ' '106.81348643684744104 -6.09392810187095257, ' '106.9033179652593617 -6.09392810187095257, ' '106.9033179652593617 -6.18324454090033182))', impact_function.analysis_extent.exportToWkt()), message) # With an aggregation layer, with selection impact_function.use_selected_features_only = True impact_function.aggregation = aggregation_layer status, message = impact_function.prepare() self.assertEqual(PREPARE_SUCCESS, status, message) message = ( 'Test about the minimum extent with an aggregation layer and ' 'a selection is failing.') self.assertTrue( compare_wkt( 'Polygon ((106.72365490843547775 -6.09392810187095257, ' '106.81348643684744104 -6.09392810187095257, ' '106.81348643684744104 -6.18324645462287137, ' '106.72365490843547775 -6.18324645462287137, ' '106.72365490843547775 -6.09392810187095257))', impact_function.analysis_extent.exportToWkt()), message)