def test_size_post_processor(self): """Test size, size rate, productivity post processor.""" impact_layer = load_test_vector_layer( 'impact', 'indivisible_polygon_impact.geojson', clone_to_memory=True) impact_layer.keywords['exposure_keywords'] = {'exposure': 'population'} self.assertIsNotNone(impact_layer) # Test the size post processor. result, message = run_single_post_processor(impact_layer, post_processor_size) self.assertTrue(result, message) # Check if new field is added impact_fields = impact_layer.dataProvider().fieldNameMap().keys() self.assertIn(size_field['field_name'], impact_fields) # Test the size rate post processor. result, message = run_single_post_processor(impact_layer, post_processor_size_rate) self.assertTrue(result, message) # Check if new field is added impact_fields = impact_layer.dataProvider().fieldNameMap().keys() self.assertIn(feature_value_field['field_name'], impact_fields)
def test_size_post_processor(self): """Test size, size rate, productivity post processor.""" impact_layer = load_test_vector_layer( 'impact', 'indivisible_polygon_impact.geojson', clone_to_memory=True) impact_layer.keywords['exposure_keywords'] = { 'exposure': 'population' } self.assertIsNotNone(impact_layer) # Test the size post processor. result, message = run_single_post_processor( impact_layer, post_processor_size) self.assertTrue(result, message) # Check if new field is added impact_fields = impact_layer.dataProvider().fieldNameMap().keys() self.assertIn(size_field['field_name'], impact_fields) # Test the size rate post processor. result, message = run_single_post_processor( impact_layer, post_processor_size_rate) self.assertTrue(result, message) # Check if new field is added impact_fields = impact_layer.dataProvider().fieldNameMap().keys() self.assertIn(feature_value_field['field_name'], impact_fields)
def minimum_needs(self, input_layer): """Compute minimum needs given a layer and a column containing pop. :param input_layer: Vector layer assumed to contain population counts. :type input_layer: QgsVectorLayer :returns: A tuple containing True and the vector layer if post processor success. Or False and an error message if something went wrong. :rtype: tuple(bool,QgsVectorLayer or basetring) """ # Create a new layer for output layer output_layer = self.prepare_new_layer(input_layer) # count each minimum needs for every features for needs in minimum_needs_post_processors: is_success, message = run_single_post_processor( output_layer, needs) # check if post processor not running successfully if not is_success: LOGGER.debug(message) display_critical_message_box( title=self.tr('Error while running post processor'), message=message) return False, None return True, output_layer
def minimum_needs(self, input_layer): """Compute minimum needs given a layer and a column containing pop. :param input_layer: Vector layer assumed to contain population counts. :type input_layer: QgsVectorLayer :returns: A tuple containing True and the vector layer if post processor success. Or False and an error message if something went wrong. :rtype: tuple(bool,QgsVectorLayer or basetring) """ # Create a new layer for output layer output_layer = self.prepare_new_layer(input_layer) # count each minimum needs for every features for needs in minimum_needs_post_processors: is_success, message = run_single_post_processor( output_layer, needs) # check if post processor not running successfully if not is_success: LOGGER.debug(message) display_critical_message_box( title=self.tr('Error while running post processor'), message=message) return False, None return True, output_layer
def test_gender_post_processor(self): """Test gender post processor.""" impact_layer = load_test_vector_layer( 'impact', 'indivisible_polygon_impact.geojson', clone_to_memory=True) self.assertIsNotNone(impact_layer) result, message = run_single_post_processor(impact_layer, post_processor_male) self.assertTrue(result, message) result, message = run_single_post_processor(impact_layer, post_processor_female) self.assertTrue(result, message) # Check if new fields were added impact_fields = impact_layer.dataProvider().fieldNameMap().keys() self.assertIn(female_displaced_count_field['field_name'], impact_fields) self.assertIn(male_displaced_count_field['field_name'], impact_fields)
def test_additional_rice_post_processor(self): """Test weekly hygiene post processor.""" impact_layer = load_test_vector_layer( 'impact', 'indivisible_polygon_impact.geojson', clone_to_memory=True) self.assertIsNotNone(impact_layer) # Need to run gender post processor first result, message = run_single_post_processor(impact_layer, post_processor_female) self.assertTrue(result, message) # run female hygiene post processor result, message = run_single_post_processor( impact_layer, post_processor_additional_rice) self.assertTrue(result, message) # Check if new field is added impact_fields = impact_layer.dataProvider().fieldNameMap().keys() self.assertIn(additional_rice_count_field['field_name'], impact_fields)
def test_weekly_hygiene_post_processor(self): """Test weekly hygiene post processor.""" impact_layer = load_test_vector_layer( 'impact', 'indivisible_polygon_impact.geojson', clone_to_memory=True) self.assertIsNotNone(impact_layer) # Need to run gender post processor first result, message = run_single_post_processor( impact_layer, post_processor_female) self.assertTrue(result, message) # run female hygiene post processor result, message = run_single_post_processor( impact_layer, post_processor_hygiene_packs) self.assertTrue(result, message) # Check if new field is added impact_fields = impact_layer.dataProvider().fieldNameMap().keys() self.assertIn(hygiene_packs_count_field['field_name'], impact_fields)
def test_gender_post_processor(self): """Test gender post processor.""" impact_layer = load_test_vector_layer( 'impact', 'indivisible_polygon_impact.geojson', clone_to_memory=True) self.assertIsNotNone(impact_layer) result, message = run_single_post_processor( impact_layer, post_processor_male) self.assertTrue(result, message) result, message = run_single_post_processor( impact_layer, post_processor_female) self.assertTrue(result, message) # Check if new fields were added impact_fields = impact_layer.dataProvider().fieldNameMap().keys() self.assertIn( female_displaced_count_field['field_name'], impact_fields) self.assertIn( male_displaced_count_field['field_name'], impact_fields)
def test_affected_post_processor(self): """Test affected post processor.""" impact_layer = load_test_vector_layer( 'impact', 'indivisible_polygon_impact.geojson', clone_to_memory=True) # Need to add keywords on the fly. impact_layer.keywords['hazard_keywords'] = { 'classification': 'flood_hazard_classes' } result, message = run_single_post_processor(impact_layer, post_processor_affected) self.assertTrue(result, message) # Check if new field is added impact_fields = impact_layer.dataProvider().fieldNameMap().keys() self.assertIn(affected_field['field_name'], impact_fields)
def test_affected_post_processor(self): """Test affected post processor.""" impact_layer = load_test_vector_layer( 'impact', 'indivisible_polygon_impact.geojson', clone_to_memory=True) # Need to add keywords on the fly. impact_layer.keywords['hazard_keywords'] = { 'classification': 'flood_hazard_classes' } result, message = run_single_post_processor( impact_layer, post_processor_affected) self.assertTrue(result, message) # Check if new field is added impact_fields = impact_layer.dataProvider().fieldNameMap().keys() self.assertIn(affected_field['field_name'], impact_fields)
def prepare_vector_layer(layer, callback=None): """This function will prepare the layer to be used in InaSAFE : * Make a local copy of the layer. * Make sure that we have an InaSAFE ID column. * Rename fields according to our definitions. * Remove fields which are not used. :param layer: The layer to prepare. :type layer: QgsVectorLayer :param callback: A function to all to indicate progress. The function should accept params 'current' (int), 'maximum' (int) and 'step' (str). Defaults to None. :type callback: function :return: Cleaned memory layer. :rtype: QgsVectorLayer .. versionadded:: 4.0 """ output_layer_name = prepare_vector_steps['output_layer_name'] output_layer_name = output_layer_name % layer.keywords['layer_purpose'] processing_step = prepare_vector_steps['step_name'] if not layer.keywords.get('inasafe_fields'): msg = 'inasafe_fields is missing in keywords from %s' % layer.name() raise InvalidKeywordsForProcessingAlgorithm(msg) feature_count = layer.featureCount() cleaned = create_memory_layer( output_layer_name, layer.geometryType(), layer.crs(), layer.fields()) # We transfer keywords to the output. cleaned.keywords = layer.keywords copy_layer(layer, cleaned) _remove_features(cleaned) # After removing rows, let's check if there is still a feature. request = QgsFeatureRequest().setFlags(QgsFeatureRequest.NoGeometry) iterator = cleaned.getFeatures(request) try: next(iterator) except StopIteration: LOGGER.warning( tr('No feature has been found in the {purpose}' .format(purpose=layer.keywords['layer_purpose']))) raise NoFeaturesInExtentError _add_id_column(cleaned) rename_remove_inasafe_fields(cleaned) if _size_is_needed(cleaned): LOGGER.info( 'We noticed some counts in your exposure layer. Before to update ' 'geometries, we compute the original size for each feature.') run_single_post_processor(cleaned, post_processor_size) if cleaned.keywords['layer_purpose'] == 'exposure': fields = cleaned.keywords['inasafe_fields'] if exposure_type_field['key'] not in fields: _add_default_exposure_class(cleaned) # Check value mapping _check_value_mapping(cleaned) cleaned.keywords['title'] = output_layer_name check_layer(cleaned) return cleaned
def prepare_vector_layer(layer, callback=None): """This function will prepare the layer to be used in InaSAFE : * Make a local copy of the layer. * Make sure that we have an InaSAFE ID column. * Rename fields according to our definitions. * Remove fields which are not used. :param layer: The layer to prepare. :type layer: QgsVectorLayer :param callback: A function to all to indicate progress. The function should accept params 'current' (int), 'maximum' (int) and 'step' (str). Defaults to None. :type callback: function :return: Cleaned memory layer. :rtype: QgsVectorLayer .. versionadded:: 4.0 """ output_layer_name = prepare_vector_steps['output_layer_name'] output_layer_name = output_layer_name % layer.keywords['layer_purpose'] processing_step = prepare_vector_steps['step_name'] if not layer.keywords.get('inasafe_fields'): msg = 'inasafe_fields is missing in keywords from %s' % layer.name() raise InvalidKeywordsForProcessingAlgorithm(msg) feature_count = layer.featureCount() cleaned = create_memory_layer( output_layer_name, layer.geometryType(), layer.crs(), layer.fields()) # We transfer keywords to the output. cleaned.keywords = layer.keywords copy_layer(layer, cleaned) _remove_features(cleaned) # After removing rows, let's check if there is still a feature. request = QgsFeatureRequest().setFlags(QgsFeatureRequest.NoGeometry) iterator = cleaned.getFeatures(request) try: next(iterator) except StopIteration: LOGGER.warning( tr('No feature has been found in the {purpose}' .format(purpose=layer.keywords['layer_purpose']))) raise NoFeaturesInExtentError _add_id_column(cleaned) clean_inasafe_fields(cleaned) if _size_is_needed(cleaned): LOGGER.info( 'We noticed some counts in your exposure layer. Before to update ' 'geometries, we compute the original size for each feature.') run_single_post_processor(cleaned, post_processor_size) if cleaned.keywords['layer_purpose'] == 'exposure': fields = cleaned.keywords['inasafe_fields'] if exposure_type_field['key'] not in fields: _add_default_exposure_class(cleaned) # Check value mapping _check_value_mapping(cleaned) cleaned.keywords['title'] = output_layer_name check_layer(cleaned) return cleaned
def test_productivity_post_processors(self): """Test for productivity, prod cost, and prod value""" impact_layer = load_test_vector_layer( 'impact', 'indivisible_polygon_impact.geojson', clone_to_memory=True) impact_layer.keywords['exposure_keywords'] = {'exposure': 'population'} self.assertIsNotNone(impact_layer) # Test the size post processor. result, message = run_single_post_processor(impact_layer, post_processor_size) self.assertTrue(result, message) # Check if new field is added impact_fields = impact_layer.dataProvider().fieldNameMap().keys() self.assertIn(size_field['field_name'], impact_fields) # Test for productivity rate result, message = run_single_post_processor( impact_layer, post_processor_productivity) self.assertTrue(result, message) # Check if new field is added impact_fields = impact_layer.dataProvider().fieldNameMap().keys() self.assertIn(productivity_field['field_name'], impact_fields) # Test for production cost rate result, message = run_single_post_processor( impact_layer, post_processor_production_cost) self.assertTrue(result, message) # Check if new field is added impact_fields = impact_layer.dataProvider().fieldNameMap().keys() self.assertIn(production_cost_field['field_name'], impact_fields) # Test for production value rate result, message = run_single_post_processor( impact_layer, post_processor_production_value) self.assertTrue(result, message) # Check if new field is added impact_fields = impact_layer.dataProvider().fieldNameMap().keys() self.assertIn(production_value_field['field_name'], impact_fields) # Checking the values. for feature in impact_layer.getFeatures(): size_value = feature.attribute(size_field['field_name']) # Inputs productivity_rate = feature.attribute( productivity_rate_field['field_name']) production_cost_rate = feature.attribute( production_cost_rate_field['field_name']) production_value_rate = feature.attribute( production_value_rate_field['field_name']) # result productivity = feature.attribute(productivity_field['field_name']) production_cost = feature.attribute( production_cost_field['field_name']) production_value = feature.attribute( production_value_field['field_name']) # Checking self.assertAlmostEqual(productivity, size_value * productivity_rate) self.assertAlmostEqual(production_cost, size_value * production_cost_rate) self.assertAlmostEqual(production_value, size_value * production_value_rate)
def test_productivity_post_processors(self): """Test for productivity, prod cost, and prod value""" impact_layer = load_test_vector_layer( 'impact', 'indivisible_polygon_impact.geojson', clone_to_memory=True) impact_layer.keywords['exposure_keywords'] = { 'exposure': exposure_population['key'] } self.assertIsNotNone(impact_layer) # Test the size post processor. result, message = run_single_post_processor( impact_layer, post_processor_size) self.assertTrue(result, message) # Check if new field is added impact_fields = list(impact_layer.dataProvider().fieldNameMap().keys()) self.assertIn(size_field['field_name'], impact_fields) # Test for productivity rate result, message = run_single_post_processor( impact_layer, post_processor_productivity) self.assertTrue(result, message) # Check if new field is added impact_fields = list(impact_layer.dataProvider().fieldNameMap().keys()) self.assertIn(productivity_field['field_name'], impact_fields) # Test for production cost rate result, message = run_single_post_processor( impact_layer, post_processor_production_cost) self.assertTrue(result, message) # Check if new field is added impact_fields = list(impact_layer.dataProvider().fieldNameMap().keys()) self.assertIn(production_cost_field['field_name'], impact_fields) # Test for production value rate result, message = run_single_post_processor( impact_layer, post_processor_production_value) self.assertTrue(result, message) # Check if new field is added impact_fields = list(impact_layer.dataProvider().fieldNameMap().keys()) self.assertIn(production_value_field['field_name'], impact_fields) # Checking the values. for feature in impact_layer.getFeatures(): size_value = feature.attribute(size_field['field_name']) # Inputs productivity_rate = feature.attribute( productivity_rate_field['field_name']) production_cost_rate = feature.attribute( production_cost_rate_field['field_name']) production_value_rate = feature.attribute( production_value_rate_field['field_name']) # result productivity = feature.attribute( productivity_field['field_name']) production_cost = feature.attribute( production_cost_field['field_name']) production_value = feature.attribute( production_value_field['field_name']) # Checking self.assertAlmostEqual( productivity, size_value * productivity_rate) self.assertAlmostEqual( production_cost, size_value * production_cost_rate) self.assertAlmostEqual( production_value, size_value * production_value_rate)