Exemple #1
0
    def test_analysis_earthquake_summary(self):
        """Test we can compute summary after an EQ on population."""
        hazard = load_test_raster_layer('gisv4', 'hazard', 'earthquake.asc')
        exposure = load_test_raster_layer('gisv4', 'exposure', 'raster',
                                          'population.asc')
        aggregation = load_test_vector_layer('gisv4', 'aggregation',
                                             'small_grid.geojson')

        impact_function = ImpactFunction()
        impact_function.hazard = hazard
        impact_function.exposure = exposure
        impact_function.aggregation = aggregation
        status, message = impact_function.prepare()
        self.assertEqual(PREPARE_SUCCESS, status, message)
        status, message = impact_function.run()
        self.assertEqual(ANALYSIS_SUCCESS, status, message)

        layer = impact_function.analysis_impacted
        classification = hazard.keywords['classification']
        classes = definition(classification)['classes']
        for hazard_class in classes:
            field_name = hazard_count_field['field_name'] % hazard_class['key']
            message = '%s is not found in the EQ summary layer.' % field_name
            self.assertNotEqual(-1, layer.fieldNameIndex(field_name), message)

        check_inasafe_fields(impact_function.analysis_impacted)
        check_inasafe_fields(impact_function.aggregation_summary)
    def test_pre_processors_nearby_places(self):
        """Test the pre_processors_nearby_places"""
        hazard_layer = load_test_raster_layer('gisv4', 'hazard',
                                              'earthquake.asc')
        exposure_layer = load_test_vector_layer('gisv4', 'exposure',
                                                'building-points.geojson')
        impact_function = ImpactFunction()
        impact_function.exposure = exposure_layer
        impact_function.hazard = hazard_layer
        impact_function.crs = QgsCoordinateReferenceSystem(4326)
        status, message = impact_function.prepare()
        self.assertEqual(PREPARE_SUCCESS, status, message)

        # The exposure is not place but buildings
        self.assertFalse(
            pre_processors_nearby_places['condition'](impact_function))

        hazard_layer = load_test_raster_layer('gisv4', 'hazard',
                                              'earthquake.asc')
        exposure_layer = load_test_vector_layer('gisv4', 'exposure',
                                                'places.geojson')
        impact_function = ImpactFunction()
        impact_function.exposure = exposure_layer
        impact_function.hazard = hazard_layer
        impact_function.crs = QgsCoordinateReferenceSystem(4326)
        status, message = impact_function.prepare()
        self.assertEqual(PREPARE_SUCCESS, status, message)

        # EQ on places, it must be OK.
        self.assertTrue(
            pre_processors_nearby_places['condition'](impact_function))
    def test_pre_processors_nearby_places(self):
        """Test the pre_processors_nearby_places"""
        hazard_layer = load_test_raster_layer(
            'gisv4', 'hazard', 'earthquake.asc')
        exposure_layer = load_test_vector_layer(
            'gisv4', 'exposure', 'building-points.geojson')
        impact_function = ImpactFunction()
        impact_function.exposure = exposure_layer
        impact_function.hazard = hazard_layer
        impact_function.crs = QgsCoordinateReferenceSystem(4326)
        status, message = impact_function.prepare()
        self.assertEqual(PREPARE_SUCCESS, status, message)

        # The exposure is not place but buildings
        self.assertFalse(
            pre_processors_nearby_places['condition'](impact_function))

        hazard_layer = load_test_raster_layer(
            'gisv4', 'hazard', 'earthquake.asc')
        exposure_layer = load_test_vector_layer(
            'gisv4', 'exposure', 'places.geojson')
        impact_function = ImpactFunction()
        impact_function.exposure = exposure_layer
        impact_function.hazard = hazard_layer
        impact_function.crs = QgsCoordinateReferenceSystem(4326)
        status, message = impact_function.prepare()
        self.assertEqual(PREPARE_SUCCESS, status, message)

        # EQ on places, it must be OK.
        self.assertTrue(
            pre_processors_nearby_places['condition'](impact_function))
Exemple #4
0
    def test_analysis_earthquake_summary(self):
        """Test we can compute summary after an EQ on population."""
        hazard = load_test_raster_layer('gisv4', 'hazard', 'earthquake.asc')
        exposure = load_test_raster_layer(
            'gisv4', 'exposure', 'raster', 'population.asc')
        aggregation = load_test_vector_layer(
            'gisv4', 'aggregation', 'small_grid.geojson')

        impact_function = ImpactFunction()
        impact_function.hazard = hazard
        impact_function.exposure = exposure
        impact_function.aggregation = aggregation
        status, message = impact_function.prepare()
        self.assertEqual(PREPARE_SUCCESS, status, message)
        status, message = impact_function.run()
        self.assertEqual(ANALYSIS_SUCCESS, status, message)

        layer = impact_function.analysis_impacted
        classification = hazard.keywords['classification']
        classes = definition(classification)['classes']
        for hazard_class in classes:
            field_name = hazard_count_field['field_name'] % hazard_class['key']
            message = '%s is not found in the EQ summary layer.' % field_name
            self.assertNotEqual(-1, layer.fieldNameIndex(field_name), message)

        check_inasafe_fields(impact_function.analysis_impacted)
        check_inasafe_fields(impact_function.aggregation_summary)
    def test_pre_processors_earthquake_contour(self):
        """Test the pre_processors_earthquake_contour"""
        hazard_layer = load_test_raster_layer('gisv4', 'hazard',
                                              'earthquake.asc')
        exposure_layer = load_test_vector_layer('gisv4', 'exposure',
                                                'building-points.geojson')
        impact_function = ImpactFunction()
        impact_function.exposure = exposure_layer
        impact_function.hazard = hazard_layer
        impact_function.crs = QgsCoordinateReferenceSystem(4326)
        status, message = impact_function.prepare()
        self.assertEqual(PREPARE_SUCCESS, status, message)

        self.assertTrue(
            pre_processor_earthquake_contour['condition'](impact_function))

        hazard_layer = load_test_raster_layer('hazard',
                                              'classified_flood_20_20.asc')
        exposure_layer = load_test_vector_layer('gisv4', 'exposure',
                                                'places.geojson')
        impact_function = ImpactFunction()
        impact_function.exposure = exposure_layer
        impact_function.hazard = hazard_layer
        impact_function.crs = QgsCoordinateReferenceSystem(4326)
        status, message = impact_function.prepare()
        self.assertEqual(PREPARE_SUCCESS, status, message)

        # not ok, since the hazard is flood, not earthquake
        self.assertFalse(
            pre_processor_earthquake_contour['condition'](impact_function))
    def test_pre_processors_earthquake_contour(self):
        """Test the pre_processors_earthquake_contour"""
        hazard_layer = load_test_raster_layer(
            'gisv4', 'hazard', 'earthquake.asc')
        exposure_layer = load_test_vector_layer(
            'gisv4', 'exposure', 'building-points.geojson')
        impact_function = ImpactFunction()
        impact_function.exposure = exposure_layer
        impact_function.hazard = hazard_layer
        impact_function.crs = QgsCoordinateReferenceSystem(4326)
        status, message = impact_function.prepare()
        self.assertEqual(PREPARE_SUCCESS, status, message)

        self.assertTrue(
            pre_processor_earthquake_contour['condition'](impact_function))

        hazard_layer = load_test_raster_layer(
            'hazard', 'classified_flood_20_20.asc')
        exposure_layer = load_test_vector_layer(
            'gisv4', 'exposure', 'places.geojson')
        impact_function = ImpactFunction()
        impact_function.exposure = exposure_layer
        impact_function.hazard = hazard_layer
        impact_function.crs = QgsCoordinateReferenceSystem(4326)
        status, message = impact_function.prepare()
        self.assertEqual(PREPARE_SUCCESS, status, message)

        # not ok, since the hazard is flood, not earthquake
        self.assertFalse(
            pre_processor_earthquake_contour['condition'](impact_function))
    def test_raster_post_minimum_needs_value_generation(self):
        """Test minimum needs postprocessors on raster exposure.

        Minimum needs postprocessors is defined to only generate values
        when exposure contains population data.
        Especially important to test, since on raster exposure the population
        field is generated on the fly.
        The postprocessors need to expect generated population field exists.
        """

        # # #
        # Test with raster exposure data with population_exposure_count
        # exists.
        # # #

        hazard_layer = load_test_raster_layer(
            'hazard', 'tsunami_wgs84.tif')
        exposure_layer = load_test_raster_layer(
            'exposure', 'pop_binary_raster_20_20.asc')

        impact_function = ImpactFunction()
        impact_function.exposure = exposure_layer
        impact_function.hazard = hazard_layer
        impact_function.prepare()
        return_code, message = impact_function.run()

        self.assertEqual(return_code, ANALYSIS_SUCCESS, message)

        # minimum needs fields should exists in the results
        self._check_minimum_fields_exists(impact_function)

        # TODO: should include demographic postprocessor value too
        expected_value = {
            u'total_affected': 9.208200000039128,
            u'minimum_needs__rice': 25,
            u'minimum_needs__toilets': 0,
            u'minimum_needs__drinking_water': 161,
            u'minimum_needs__clean_water': 616,
            u'male': 4,
            u'female': 4,
            u'youth': 2,
            u'adult': 6,
            u'elderly': 0,
            u'total': 162.7667000000474,
            u'minimum_needs__family_kits': 1,
            u'total_not_affected': 153.55850000000828,
        }

        self._check_minimum_fields_value(expected_value, impact_function)
    def test_raster_post_minimum_needs_value_generation(self):
        """Test minimum needs postprocessors on raster exposure.

        Minimum needs postprocessors is defined to only generate values
        when exposure contains population data.
        Especially important to test, since on raster exposure the population
        field is generated on the fly.
        The postprocessors need to expect generated population field exists.
        """

        # # #
        # Test with raster exposure data with population_exposure_count
        # exists.
        # # #

        hazard_layer = load_test_raster_layer(
            'hazard', 'tsunami_wgs84.tif')
        exposure_layer = load_test_raster_layer(
            'exposure', 'pop_binary_raster_20_20.asc')

        impact_function = ImpactFunction()
        impact_function.exposure = exposure_layer
        impact_function.hazard = hazard_layer
        impact_function.prepare()
        return_code, message = impact_function.run()

        self.assertEqual(return_code, ANALYSIS_SUCCESS, message)

        # minimum needs fields should exists in the results
        self._check_minimum_fields_exists(impact_function)

        # TODO: should include demographic postprocessor value too
        expected_value = {
            u'total_affected': 9.208200000039128,
            u'minimum_needs__rice': 25,
            u'minimum_needs__toilets': 0,
            u'minimum_needs__drinking_water': 161,
            u'minimum_needs__clean_water': 616,
            u'male': 4,
            u'female': 4,
            u'youth': 2,
            u'adult': 6,
            u'elderly': 0,
            u'total': 162.7667000000474,
            u'minimum_needs__family_kits': 1,
            u'total_not_affected': 153.55850000000828,
        }

        self._check_minimum_fields_value(expected_value, impact_function)
Exemple #9
0
    def test_polygonize_raster(self):
        """Test we can polygonize a raster layer."""
        layer = load_test_raster_layer('hazard', 'classified_flood_20_20.asc')

        expected_keywords = layer.keywords.copy()
        title = polygonize_steps['output_layer_name'] % (
            layer.keywords['layer_purpose'])
        expected_keywords[
            layer_geometry['key']] = layer_geometry_polygon['key']
        expected_keywords['title'] = title

        expected_keywords['inasafe_fields'] = {
            hazard_value_field['key']: hazard_value_field['field_name'][0:10]}

        polygonized = polygonize(layer)

        self.assertDictEqual(polygonized.keywords, expected_keywords)

        self.assertEqual(polygonized.featureCount(), 400)

        expected_count = {
            '1': 133,
            '2': 134,
            '3': 133,
        }

        inasafe_fields = polygonized.keywords.get('inasafe_fields')
        field_name = inasafe_fields.get(hazard_value_field['key'])

        for value, count in expected_count.iteritems():
            expression = '"%s" = \'%s\'' % (field_name, value)
            request = QgsFeatureRequest().setFilterExpression(expression)
            self.assertEqual(
                sum(1 for _ in polygonized.getFeatures(request)), count)
Exemple #10
0
    def test_polygonize_raster(self):
        """Test we can polygonize a raster layer."""
        layer = load_test_raster_layer('hazard', 'classified_flood_20_20.asc')

        expected_keywords = layer.keywords.copy()
        title = polygonize_steps['output_layer_name'] % (
            layer.keywords['layer_purpose'])
        expected_keywords[
            layer_geometry['key']] = layer_geometry_polygon['key']
        expected_keywords['title'] = title

        expected_keywords['inasafe_fields'] = {
            hazard_value_field['key']: hazard_value_field['field_name'][0:10]}

        polygonized = polygonize(layer)

        self.assertDictEqual(polygonized.keywords, expected_keywords)

        self.assertEqual(polygonized.featureCount(), 400)

        expected_count = {
            '1': 133,
            '2': 134,
            '3': 133,
        }

        inasafe_fields = polygonized.keywords.get('inasafe_fields')
        field_name = inasafe_fields.get(hazard_value_field['key'])

        for value, count in expected_count.iteritems():
            expression = '"%s" = \'%s\'' % (field_name, value)
            request = QgsFeatureRequest().setFilterExpression(expression)
            self.assertEqual(
                sum(1 for _ in polygonized.getFeatures(request)), count)
Exemple #11
0
 def test_smoothing(self):
     """Test smoothing method."""
     # Load shake map layer
     shakemap_layer = load_test_raster_layer('hazard', 'shake_data',
                                             '20131105060809', 'output',
                                             'grid-use_ascii.tif')
     smoothed_shakemap_path = smooth_shakemap(shakemap_layer.source())
     self.assertTrue(os.path.exists(smoothed_shakemap_path))
Exemple #12
0
    def test_clip_raster(self):
        """Test we can clip a raster layer."""
        layer = load_test_raster_layer('gisv4', 'hazard', 'earthquake.asc')
        expected = QgsRectangle(106.75, -6.2, 106.80, -6.1)
        new_layer = clip_by_extent(layer, expected)

        extent = new_layer.extent()
        self.assertAlmostEqual(expected.xMinimum(), extent.xMinimum(), 0)
        self.assertAlmostEqual(expected.xMaximum(), extent.xMaximum(), 0)
        self.assertAlmostEqual(expected.yMinimum(), extent.yMinimum(), 0)
        self.assertAlmostEqual(expected.yMaximum(), extent.yMaximum(), 0)
Exemple #13
0
 def test_smoothing(self):
     """Test smoothing method."""
     # Load shake map layer
     shakemap_layer = load_test_raster_layer(
         'hazard',
         'shake_data',
         '20131105060809',
         'output',
         'grid-use_ascii.tif')
     smoothed_shakemap_path = smooth_shakemap(shakemap_layer.source())
     self.assertTrue(os.path.exists(smoothed_shakemap_path))
    def test_clip_raster(self):
        """Test we can clip a raster layer."""
        layer = load_test_raster_layer('gisv4', 'hazard', 'earthquake.asc')
        expected = QgsRectangle(106.75, -6.2, 106.80, -6.1)
        new_layer = clip_by_extent(layer, expected)

        extent = new_layer.extent()
        self.assertAlmostEqual(expected.xMinimum(), extent.xMinimum(), 0)
        self.assertAlmostEqual(expected.xMaximum(), extent.xMaximum(), 0)
        self.assertAlmostEqual(expected.yMinimum(), extent.yMinimum(), 0)
        self.assertAlmostEqual(expected.yMaximum(), extent.yMaximum(), 0)
Exemple #15
0
    def test_zonal_statistics_raster(self):
        """Test we can do zonal statistics."""
        # Same projection
        raster = load_test_raster_layer(
            'exposure', 'pop_binary_raster_20_20.asc')
        raster.keywords['inasafe_default_values'] = {}
        vector = load_test_vector_layer(
            'aggregation', 'grid_jakarta_4326.geojson')

        vector.keywords['hazard_keywords'] = {}
        vector.keywords['aggregation_keywords'] = {}

        number_fields = vector.fields().count()
        vector = zonal_stats(raster, vector)

        self.assertEqual(vector.fields().count(), number_fields + 1)
        self.assertEqual(vector.geometryType(), QgsWkbTypes.PolygonGeometry)

        # With different projections
        raster = load_test_raster_layer(
            'exposure', 'pop_binary_raster_20_20.asc')
        raster.keywords['inasafe_default_values'] = {}
        vector_b = load_test_vector_layer(
            'aggregation', 'grid_jakarta_4326.geojson')

        vector_b.keywords['hazard_keywords'] = {}
        vector_b.keywords['aggregation_keywords'] = {}

        number_fields = vector_b.fields().count()
        vector_b = reproject(vector, QgsCoordinateReferenceSystem(3857))
        vector_b = zonal_stats(raster, vector_b)

        self.assertEqual(vector_b.fields().count(), number_fields + 1)
        self.assertEqual(vector_b.geometryType(), QgsWkbTypes.PolygonGeometry)

        # We compare the results between these 2 zonal stats.
        for feature_a, feature_b in zip(
                vector.getFeatures(), vector_b.getFeatures()):
            self.assertEqual(feature_a.attributes(), feature_b.attributes())
Exemple #16
0
 def test_contour(self):
     """Test create contour"""
     output_file_path = unique_filename(suffix='-contour.shp')
     # Load shake map layer
     shakemap_layer = load_test_raster_layer('hazard', 'shake_data',
                                             '20131105060809', 'output',
                                             'grid-use_ascii.tif')
     create_smooth_contour(shakemap_layer,
                           output_file_path=output_file_path)
     self.assertTrue(os.path.exists(output_file_path))
     ext = os.path.splitext(output_file_path)[1]
     metadata_path = output_file_path.replace(ext, '.xml')
     self.assertTrue(os.path.exists(metadata_path))
     self.assertTrue(metadata_path.endswith('.xml'))
Exemple #17
0
    def test_zonal_statistics_raster(self):
        """Test we can do zonal statistics."""
        raster = load_test_raster_layer('exposure',
                                        'pop_binary_raster_20_20.asc')
        raster.keywords['inasafe_default_values'] = {}
        vector = load_test_vector_layer('aggregation',
                                        'grid_jakarta_4326.geojson')

        vector.keywords['hazard_keywords'] = {}
        vector.keywords['aggregation_keywords'] = {}

        number_fields = vector.fields().count()
        vector = zonal_stats(raster, vector)

        self.assertEqual(vector.fields().count(), number_fields + 1)
        self.assertEqual(vector.geometryType(), QGis.Polygon)
    def test_zonal_statistics_raster(self):
        """Test we can do zonal statistics."""
        raster = load_test_raster_layer(
            'exposure', 'pop_binary_raster_20_20.asc')
        raster.keywords['inasafe_default_values'] = {}
        vector = load_test_vector_layer(
            'aggregation', 'grid_jakarta_4326.geojson')

        vector.keywords['hazard_keywords'] = {}
        vector.keywords['aggregation_keywords'] = {}

        number_fields = vector.fields().count()
        vector = zonal_stats(raster, vector)

        self.assertEqual(vector.fields().count(), number_fields + 1)
        self.assertEqual(vector.geometryType(), QGis.Polygon)
Exemple #19
0
 def test_contour(self):
     """Test create contour"""
     output_file_path = unique_filename(suffix='-contour.shp')
     # Load shake map layer
     shakemap_layer = load_test_raster_layer(
         'hazard',
         'shake_data',
         '20131105060809',
         'output',
         'grid-use_ascii.tif')
     create_smooth_contour(
         shakemap_layer,
         output_file_path=output_file_path
     )
     self.assertTrue(os.path.exists(output_file_path))
     ext = os.path.splitext(output_file_path)[1]
     metadata_path = output_file_path.replace(ext, '.xml')
     self.assertTrue(os.path.exists(metadata_path))
     self.assertTrue(metadata_path.endswith('.xml'))
Exemple #20
0
    def test_reclassify_raster(self):
        """Test we can reclassify a raster layer."""
        layer = load_test_raster_layer('hazard', 'continuous_flood_20_20.asc')

        classes = {
            'low': [None, 0.2],  # value <= 0.2
            'medium': [0.2, 1],  # 0.2 < value <= 1
            'high': [1, None],  # 1 < value
        }

        ranges = {
            exposure_structure['key']: {
                generic_hazard_classes['key']: {
                    'active': True,
                    'classes': classes
                }
            }
        }

        layer.keywords['thresholds'] = ranges

        expected_keywords = layer.keywords.copy()
        title = reclassify_raster_steps['output_layer_name'] % (
            layer.keywords['layer_purpose'])
        expected_keywords['layer_mode'] = 'classified'
        expected_keywords['value_map'] = {
            'high': [3],
            'low': [1],
            'medium': [2]
        }
        expected_keywords['title'] = title
        expected_keywords['classification'] = generic_hazard_classes['key']
        expected_keywords['thresholds'] = classes

        reclassified = reclassify(layer, exposure_structure['key'])

        self.assertDictEqual(reclassified.keywords, expected_keywords)

        stats = reclassified.dataProvider().bandStatistics(
            1, QgsRasterBandStats.Min | QgsRasterBandStats.Max)
        self.assertEqual(stats.minimumValue, 1.0)
        self.assertEqual(stats.maximumValue, 3.0)
Exemple #21
0
    def test_reclassify_raster(self):
        """Test we can reclassify a raster layer."""
        layer = load_test_raster_layer('hazard', 'continuous_flood_20_20.asc')

        classes = {
            'low': [None, 0.2],  # value <= 0.2
            'medium': [0.2, 1],  # 0.2 < value <= 1
            'high': [1, None],  # 1 < value
        }

        ranges = {
            exposure_structure['key']: {
                generic_hazard_classes['key']: {
                    'active': True,
                    'classes': classes
                }
            }
        }

        layer.keywords['thresholds'] = ranges

        expected_keywords = layer.keywords.copy()
        title = reclassify_raster_steps['output_layer_name'] % (
            layer.keywords['layer_purpose'])
        expected_keywords['layer_mode'] = 'classified'
        expected_keywords['value_map'] = {
            'high': [3],
            'low': [1],
            'medium': [2]
        }
        expected_keywords['title'] = title
        expected_keywords['classification'] = generic_hazard_classes['key']
        expected_keywords['thresholds'] = classes

        reclassified = reclassify(layer, exposure_structure['key'])

        self.assertDictEqual(reclassified.keywords, expected_keywords)

        stats = reclassified.dataProvider().bandStatistics(
            1, QgsRasterBandStats.Min | QgsRasterBandStats.Max)
        self.assertEqual(stats.minimumValue, 1.0)
        self.assertEqual(stats.maximumValue, 3.0)
    def test_earthquake_population_without_aggregation(self):
        """Testing Earthquake in Population without aggregation.

        .. versionadded:: 4.0
        """
        output_folder = self.fixtures_dir('../output/earthquake_population')

        # Classified vector with building-points
        shutil.rmtree(output_folder, ignore_errors=True)

        hazard_layer = load_test_raster_layer(
            'hazard', 'earthquake.tif')
        exposure_layer = load_test_raster_layer(
            'exposure', 'pop_binary_raster_20_20.asc')

        impact_function = ImpactFunction()
        impact_function.exposure = exposure_layer
        impact_function.hazard = hazard_layer
        impact_function.prepare()
        return_code, message = impact_function.run()

        self.assertEqual(return_code, ANALYSIS_SUCCESS, message)

        report_metadata = ReportMetadata(
            metadata_dict=standard_impact_report_metadata_html)

        impact_report = ImpactReport(
            IFACE,
            report_metadata,
            impact_function=impact_function)
        impact_report.output_folder = output_folder
        return_code, message = impact_report.process_component()

        self.assertEqual(
            return_code, ImpactReport.REPORT_GENERATION_SUCCESS, message)

        """Checking generated context"""
        empty_component_output_message = 'Empty component output'

        # Check Analysis Summary
        analysis_summary = impact_report.metadata.component_by_key(
            general_report_component['key'])
        """:type: safe.report.report_metadata.Jinja2ComponentsMetadata"""

        expected_context = {
            'table_header': u'Estimated Number of people',
            'header': u'General Report',
            'summary': [
                {
                    'header_label': u'Hazard Zone',
                    'rows': [{'value': '0', 'name': u'X', 'key': 'X'},
                             {'value': '0', 'name': u'IX', 'key': 'IX'},
                             {'value': '200', 'name': u'VIII', 'key': 'VIII'},
                             {'value': '0', 'name': u'VII', 'key': 'VII'},
                             {'value': '0', 'name': u'VI', 'key': 'VI'},
                             {'value': '0', 'name': u'V', 'key': 'V'},
                             {'value': '0', 'name': u'IV', 'key': 'IV'},
                             {'value': '0', 'name': u'III', 'key': 'III'},
                             {'value': '0', 'name': u'II', 'key': 'II'},
                             {'value': '0', 'name': u'I', 'key': 'I'}],
                    'value_label': u'Count'
                },
                {
                    'header_label': u'Population',
                    'rows': [{'value': '200',
                              'name': u'Displaced',
                              'key':
                                  'displaced_field'},
                             {'value': '0 - 100',
                              'name':
                                  u'Fatalities',
                              'key':
                                  'fatalities_field'}],
                    'value_label': u'Count'
                }
            ]
        }
        actual_context = analysis_summary.context

        self.assertDictEqual(expected_context, actual_context)
        self.assertTrue(
            analysis_summary.output, empty_component_output_message)

        # check population pie chart if we have 100% donut slice
        population_chart_svg = impact_report.metadata.component_by_key(
            population_chart_svg_component['key'])

        expected_slices = [
            {'value': 0, 'show_label': False, 'center': (128.0, 32.0),
             'stroke_opacity': 1,
             'path': 'M128.000000,0.000000a128.000000,128.000000 0 0 1 '
                     '0.000000,0.000000l-0.000000,64.000000a64.000000,'
                     '64.000000 0 0 0 0.000000,0.000000Z',
             'percentage': 0.0, 'label': u'X', 'stroke': '#fff',
             'label_position': (256, 0), 'fill': u'#dd0000'},
            {'value': 0, 'show_label': False, 'center': (128.0, 32.0),
             'stroke_opacity': 1,
             'path': 'M128.000000,0.000000a128.000000,128.000000 0 0 1 '
                     '0.000000,0.000000l-0.000000,64.000000a64.000000,'
                     '64.000000 0 0 0 0.000000,0.000000Z',
             'percentage': 0.0, 'label': u'IX', 'stroke': '#fff',
             'label_position': (256, 0), 'fill': u'#ff0000'},
            {'value': 200, 'show_label': True, 'center': (224.0, 128.0),
             'stroke_opacity': 1,
             'path': 'M128.000000,0.000000a128.000000,128.000000 0 0 1 '
                     '0.000000,256.000000l-0.000000,-64.000000a64.000000,'
                     '64.000000 0 0 0 0.000000,-128.000000Z',
             'percentage': 100, 'label': u'VIII', 'stroke': u'#ff7000',
             'label_position': (256, 0), 'fill': u'#ff7000'},
            {'value': 100, 'show_label': False, 'center': (32.0, 128.0),
             'stroke_opacity': 1,
             'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 '
                     '-0.000000,-256.000000l0.000000,64.000000a64.000000,'
                     '64.000000 0 0 0 0.000000,128.000000Z',
             'percentage': 50.0, 'label': '', 'stroke': u'#ff7000',
             'label_position': (256, 0), 'fill': u'#ff7000'},
            {'value': 0, 'show_label': False, 'center': (128.0, 224.0),
             'stroke_opacity': 1,
             'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 '
                     '0.000000,0.000000l-0.000000,-64.000000a64.000000,'
                     '64.000000 0 0 0 0.000000,0.000000Z',
             'percentage': 0.0, 'label': u'VII', 'stroke': '#fff',
             'label_position': (256, 0), 'fill': u'#ffa800'},
            {'value': 0, 'show_label': False, 'center': (128.0, 224.0),
             'stroke_opacity': 1,
             'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 '
                     '0.000000,0.000000l-0.000000,-64.000000a64.000000,'
                     '64.000000 0 0 0 0.000000,0.000000Z',
             'percentage': 0.0, 'label': u'VI', 'stroke': '#fff',
             'label_position': (256, 0), 'fill': u'#fff000'},
            {'value': 0, 'show_label': False, 'center': (128.0, 224.0),
             'stroke_opacity': 1,
             'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 '
                     '0.000000,0.000000l-0.000000,-64.000000a64.000000,'
                     '64.000000 0 0 0 0.000000,0.000000Z',
             'percentage': 0.0, 'label': u'V', 'stroke': '#fff',
             'label_position': (256, 0), 'fill': u'#aaffff'},
            {'value': 0, 'show_label': False, 'center': (128.0, 224.0),
             'stroke_opacity': 1,
             'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 '
                     '0.000000,0.000000l-0.000000,-64.000000a64.000000,'
                     '64.000000 0 0 0 0.000000,0.000000Z',
             'percentage': 0.0, 'label': u'IV', 'stroke': '#fff',
             'label_position': (256, 0), 'fill': u'#55ffff'},
            {'value': 0, 'show_label': False, 'center': (128.0, 224.0),
             'stroke_opacity': 1,
             'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 '
                     '0.000000,0.000000l-0.000000,-64.000000a64.000000,'
                     '64.000000 0 0 0 0.000000,0.000000Z',
             'percentage': 0.0, 'label': u'III', 'stroke': '#fff',
             'label_position': (256, 0), 'fill': u'#00cfff'},
            {'value': 0, 'show_label': False, 'center': (128.0, 224.0),
             'stroke_opacity': 1,
             'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 '
                     '0.000000,0.000000l-0.000000,-64.000000a64.000000,'
                     '64.000000 0 0 0 0.000000,0.000000Z',
             'percentage': 0.0, 'label': u'II', 'stroke': '#fff',
             'label_position': (256, 0), 'fill': u'#209fff'}]

        actual_context = population_chart_svg.context['context']
        actual_slices = actual_context.slices

        self.assertEqual(expected_slices, actual_slices)
        self.assertTrue(
            population_chart_svg.output, empty_component_output_message)

        shutil.rmtree(output_folder, ignore_errors=True)
 def test_raster_y_inverted(self):
     """Test if we can detect an upside down raster."""
     # We should have one test with an inverted raster but as it's not
     # usual, I'm not going to spend time.
     layer = load_test_raster_layer('gisv4', 'hazard', 'earthquake.asc')
     self.assertFalse(is_raster_y_inverted(layer))
    def test_earthquake_population_without_aggregation(self):
        """Testing Earthquake in Population without aggregation.

        .. versionadded:: 4.0
        """
        output_folder = self.fixtures_dir('../output/earthquake_population')

        # Classified vector with building-points
        shutil.rmtree(output_folder, ignore_errors=True)

        hazard_layer = load_test_raster_layer('hazard', 'earthquake.tif')
        exposure_layer = load_test_raster_layer('exposure',
                                                'pop_binary_raster_20_20.asc')

        impact_function = ImpactFunction()
        impact_function.exposure = exposure_layer
        impact_function.hazard = hazard_layer
        impact_function.crs = QgsCoordinateReferenceSystem(4326)
        impact_function.prepare()
        return_code, message = impact_function.run()

        self.assertEqual(return_code, ANALYSIS_SUCCESS, message)

        report_metadata = ReportMetadata(
            metadata_dict=standard_impact_report_metadata_html)

        impact_report = ImpactReport(IFACE,
                                     report_metadata,
                                     impact_function=impact_function)
        impact_report.output_folder = output_folder
        return_code, message = impact_report.process_components()

        self.assertEqual(return_code, ImpactReport.REPORT_GENERATION_SUCCESS,
                         message)
        """Checking generated context."""
        empty_component_output_message = 'Empty component output'

        # Check Analysis Summary
        analysis_summary = impact_report.metadata.component_by_key(
            general_report_component['key'])
        """:type: safe.report.report_metadata.Jinja2ComponentsMetadata"""

        expected_context = {
            'table_header':
            (u'Estimated Number of people affected per MMI intensity'),
            'header':
            u'General Report',
            'summary': [{
                'header_label':
                u'Hazard Zone',
                'rows': [{
                    'numbers': ['0'],
                    'name': u'X',
                    'key': 'X'
                }, {
                    'numbers': ['0'],
                    'name': u'IX',
                    'key': 'IX'
                }, {
                    'numbers': ['200'],
                    'name': u'VIII',
                    'key': 'VIII'
                }, {
                    'numbers': ['0'],
                    'name': u'VII',
                    'key': 'VII'
                }, {
                    'numbers': ['0'],
                    'name': u'VI',
                    'key': 'VI'
                }, {
                    'numbers': ['0'],
                    'name': u'V',
                    'key': 'V'
                }, {
                    'numbers': ['0'],
                    'name': u'IV',
                    'key': 'IV'
                }, {
                    'numbers': ['0'],
                    'name': u'III',
                    'key': 'III'
                }, {
                    'numbers': ['0'],
                    'name': u'II',
                    'key': 'II'
                }, {
                    'numbers': ['0'],
                    'name': u'I',
                    'key': 'I'
                }, {
                    'as_header': True,
                    'key': 'total_exposed_field',
                    'name': u'Total Exposed',
                    'numbers': ['200']
                }],
                'value_labels': [u'Count']
            }, {
                'header_label':
                u'Population',
                'rows': [{
                    'numbers': ['200'],
                    'name': u'Affected',
                    'key': 'total_affected_field',
                }, {
                    'key': 'total_not_affected_field',
                    'name': u'Not Affected',
                    'numbers': ['0']
                }, {
                    'key': 'total_not_exposed_field',
                    'name': u'Not Exposed',
                    'numbers': ['0']
                }, {
                    'numbers': ['200'],
                    'name': u'Displaced',
                    'key': 'displaced_field'
                }, {
                    'numbers': ['0 - 100'],
                    'name': u'Fatalities',
                    'key': 'fatalities_field'
                }],
                'value_labels': [u'Count']
            }],
            'notes': [
                u'Exposed People: People who are present in hazard zones and '
                u'are thereby subject to potential losses. In InaSAFE, people '
                u'who are exposed are those people who are within the extent '
                u'of the hazard.',
                u'Affected People: People who are affected by a hazardous '
                u'event. People can be affected directly or indirectly. '
                u'Affected people may experience short-term or long-term '
                u'consequences to their lives, livelihoods or health and in '
                u'the economic, physical, social, cultural and environmental '
                u'assets. In InaSAFE, people who are killed during the event '
                u'are also considered affected.',
                u'Displaced People: Displaced people are people who, for '
                u'different reasons and circumstances because of risk or '
                u'disaster, have to leave their place of residence. '
                u'In InaSAFE, demographic and minimum needs reports are based '
                u'on displaced / evacuated people.'
            ]
        }
        actual_context = analysis_summary.context

        self.assertDictEqual(expected_context, actual_context)
        self.assertTrue(analysis_summary.output,
                        empty_component_output_message)

        report_metadata = ReportMetadata(metadata_dict=infographic_report)
        infographic_impact_report = ImpactReport(
            IFACE, report_metadata, impact_function=impact_function)

        infographic_impact_report.output_folder = output_folder
        return_code, message = infographic_impact_report.process_components()

        self.assertEqual(return_code, ImpactReport.REPORT_GENERATION_SUCCESS,
                         message)

        # check population pie chart if we have 100% donut slice
        population_chart_svg = (
            infographic_impact_report.metadata.component_by_key(
                population_chart_svg_component['key']))

        expected_slices = [{
            'value':
            200,
            'show_label':
            True,
            'center': (224.0, 128.0),
            'stroke_opacity':
            1,
            'path':
            'M128.000000,0.000000a128.000000,128.000000 0 0 1 '
            '0.000000,256.000000l-0.000000,-64.000000a64.000000,'
            '64.000000 0 0 0 0.000000,-128.000000Z',
            'percentage':
            100,
            'label':
            u'VIII',
            'stroke':
            u'#ff7000',
            'label_position': (256, 0),
            'fill':
            u'#ff7000'
        }, {
            'value':
            100,
            'show_label':
            False,
            'center': (32.0, 128.0),
            'stroke_opacity':
            1,
            'path':
            'M128.000000,256.000000a128.000000,128.000000 0 0 1 '
            '-0.000000,-256.000000l0.000000,64.000000a64.000000,'
            '64.000000 0 0 0 0.000000,128.000000Z',
            'percentage':
            50.0,
            'label':
            '',
            'stroke':
            u'#ff7000',
            'label_position': (256, 0),
            'fill':
            u'#ff7000'
        }, {
            'value':
            0,
            'show_label':
            False,
            'center': (128.0, 224.0),
            'stroke_opacity':
            1,
            'path':
            'M128.000000,256.000000a128.000000,128.000000 0 0 1 '
            '0.000000,0.000000l-0.000000,-64.000000a64.000000,'
            '64.000000 0 0 0 0.000000,0.000000Z',
            'percentage':
            0.0,
            'label':
            u'Total Not Affected',
            'stroke':
            '#fff',
            'label_position': (256, 0),
            'fill':
            u'#1a9641'
        }]

        actual_context = population_chart_svg.context['context']
        actual_slices = actual_context.slices

        self.assertEqual(expected_slices, actual_slices)
        self.assertTrue(population_chart_svg.output,
                        empty_component_output_message)

        shutil.rmtree(output_folder, ignore_errors=True)
    def test_earthquake_population_without_aggregation(self):
        """Testing Earthquake in Population without aggregation.

        .. versionadded:: 4.0
        """
        output_folder = self.fixtures_dir('../output/earthquake_population')

        # Classified vector with building-points
        shutil.rmtree(output_folder, ignore_errors=True)

        hazard_layer = load_test_raster_layer(
            'hazard', 'earthquake.tif')
        exposure_layer = load_test_raster_layer(
            'exposure', 'pop_binary_raster_20_20.asc')

        impact_function = ImpactFunction()
        impact_function.exposure = exposure_layer
        impact_function.hazard = hazard_layer
        impact_function.prepare()
        return_code, message = impact_function.run()

        self.assertEqual(return_code, ANALYSIS_SUCCESS, message)

        report_metadata = ReportMetadata(
            metadata_dict=standard_impact_report_metadata_html)

        impact_report = ImpactReport(
            IFACE,
            report_metadata,
            impact_function=impact_function)
        impact_report.output_folder = output_folder
        return_code, message = impact_report.process_components()

        self.assertEqual(
            return_code, ImpactReport.REPORT_GENERATION_SUCCESS, message)

        """Checking generated context"""
        empty_component_output_message = 'Empty component output'

        # Check Analysis Summary
        analysis_summary = impact_report.metadata.component_by_key(
            general_report_component['key'])
        """:type: safe.report.report_metadata.Jinja2ComponentsMetadata"""

        expected_context = {
            'table_header': (
                u'Estimated Number of people affected per MMI intensity'),
            'header': u'General Report',
            'summary': [
                {
                    'header_label': u'Hazard Zone',
                    'rows': [
                        {'value': 0, 'name': u'X', 'key': 'X'},
                        {'value': 0, 'name': u'IX', 'key': 'IX'},
                        {'value': '200', 'name': u'VIII', 'key': 'VIII'},
                        {'value': 0, 'name': u'VII', 'key': 'VII'},
                        {'value': 0, 'name': u'VI', 'key': 'VI'},
                        {'value': 0, 'name': u'V', 'key': 'V'},
                        {'value': 0, 'name': u'IV', 'key': 'IV'},
                        {'value': 0, 'name': u'III', 'key': 'III'},
                        {'value': 0, 'name': u'II', 'key': 'II'},
                        {'value': 0, 'name': u'I', 'key': 'I'},
                        {
                            'as_header': True,
                            'key': 'total_field',
                            'name': u'Total',
                            'value': '200'
                        }
                    ],
                    'value_label': u'Count'
                },
                {
                    'header_label': u'Population',
                    'rows': [
                        {
                            'value': '200',
                            'name': u'Affected',
                            'key': 'total_affected_field',
                        }, {
                            'key': 'total_not_affected_field',
                            'name': u'Not Affected',
                            'value': '0'
                        }, {
                            'key': 'total_not_exposed_field',
                            'name': u'Not Exposed',
                            'value': '0'},
                        {
                            'value': '200',
                            'name': u'Displaced',
                            'key': 'displaced_field'
                        }, {
                            'value': '0 - 100',
                            'name': u'Fatalities',
                            'key': 'fatalities_field'
                        }],
                    'value_label': u'Count'
                }
            ],
            'notes': [
                'Exposed People: People who are present in hazard zones and '
                'are thereby subject to potential losses. In InaSAFE, people '
                'who are exposed are those people who are within the extent '
                'of the hazard.',
                'Affected People: People who are affected by a hazardous '
                'event. People can be affected directly or indirectly. '
                'Affected people may experience short-term or long-term '
                'consequences to their lives, livelihoods or health and in '
                'the economic, physical, social, cultural and environmental '
                'assets. In InaSAFE, people who are killed during the event '
                'are also considered affected.',
                'Displaced People: Displaced people are people who, for '
                'different reasons and circumstances because of risk or '
                'disaster, have to leave their place of residence. '
                'In InaSAFE, demographic and minimum needs reports are based '
                'on displaced / evacuated people.'
            ]
        }
        actual_context = analysis_summary.context

        self.assertDictEqual(expected_context, actual_context)
        self.assertTrue(
            analysis_summary.output, empty_component_output_message)

        report_metadata = ReportMetadata(
            metadata_dict=infographic_report)
        infographic_impact_report = ImpactReport(
            IFACE,
            report_metadata,
            impact_function=impact_function)

        infographic_impact_report.output_folder = output_folder
        return_code, message = infographic_impact_report.process_components()

        self.assertEqual(
            return_code, ImpactReport.REPORT_GENERATION_SUCCESS, message)

        # check population pie chart if we have 100% donut slice
        population_chart_svg = (
            infographic_impact_report.metadata.component_by_key(
                population_chart_svg_component['key'])
        )

        expected_slices = [
            {
                'value': 200,
                'show_label': True,
                'center': (224.0, 128.0),
                'stroke_opacity': 1,
                'path': 'M128.000000,0.000000a128.000000,128.000000 0 0 1 '
                        '0.000000,256.000000l-0.000000,-64.000000a64.000000,'
                        '64.000000 0 0 0 0.000000,-128.000000Z',
                'percentage': 100,
                'label': u'VIII',
                'stroke': u'#ff7000',
                'label_position': (256, 0),
                'fill': u'#ff7000'
            }, {
                'value': 100,
                'show_label': False,
                'center': (32.0, 128.0),
                'stroke_opacity': 1,
                'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 '
                        '-0.000000,-256.000000l0.000000,64.000000a64.000000,'
                        '64.000000 0 0 0 0.000000,128.000000Z',
                'percentage': 50.0,
                'label': '',
                'stroke': u'#ff7000',
                'label_position': (256, 0),
                'fill': u'#ff7000'
            }, {
                'value': 0,
                'show_label': False,
                'center': (128.0, 224.0),
                'stroke_opacity': 1,
                'path': 'M128.000000,256.000000a128.000000,128.000000 0 0 1 '
                        '0.000000,0.000000l-0.000000,-64.000000a64.000000,'
                        '64.000000 0 0 0 0.000000,0.000000Z',
                'percentage': 0.0,
                'label': u'Total Not Affected',
                'stroke': '#fff',
                'label_position': (256, 0),
                'fill': u'#1a9641'
            }]

        actual_context = population_chart_svg.context['context']
        actual_slices = actual_context.slices

        self.assertEqual(expected_slices, actual_slices)
        self.assertTrue(
            population_chart_svg.output, empty_component_output_message)

        shutil.rmtree(output_folder, ignore_errors=True)
 def setUp(self):
     self.vector = load_test_vector_layer('exposure', 'airports.shp')
     self.raster = load_test_raster_layer(
         'gisv4', 'hazard', 'earthquake.asc')
    def test_ratios_with_raster_exposure(self):
        """Test if we can add defaults to a raster exposure.

        See ticket #3851 how to manage ratios with a raster exposure.
        """
        hazard_layer = load_test_vector_layer(
            'gisv4', 'hazard', 'tsunami_vector.geojson')
        exposure_layer = load_test_raster_layer(
            'gisv4', 'exposure', 'raster', 'population.asc')

        # Set up impact function
        impact_function = ImpactFunction()
        impact_function.debug_mode = True
        impact_function.exposure = exposure_layer
        impact_function.hazard = hazard_layer
        impact_function.prepare()
        status, message = impact_function.run()
        self.assertEqual(ANALYSIS_SUCCESS, status, message)

        for layer in impact_function.outputs:
            if layer.keywords['layer_purpose'] == (
                    layer_purpose_analysis_impacted['key']):
                analysis = layer
            if layer.keywords['layer_purpose'] == (
                    layer_purpose_aggregate_hazard_impacted['key']):
                impact = layer

        # We check in the impact layer if we have :
        # female default ratio with the default value
        index = impact.fieldNameIndex(female_ratio_field['field_name'])
        self.assertNotEqual(-1, index)
        unique_values = impact.uniqueValues(index)
        self.assertEqual(1, len(unique_values))
        female_ratio = unique_values[0]

        # female displaced count and youth displaced count
        self.assertNotEqual(
            -1, impact.fieldNameIndex(
                female_displaced_count_field['field_name']))
        self.assertNotEqual(
            -1, impact.fieldNameIndex(
                youth_displaced_count_field['field_name']))

        # Check that we have more than 0 female displaced in the analysis layer
        index = analysis.fieldNameIndex(
            female_displaced_count_field['field_name'])
        female_displaced = analysis.uniqueValues(index)[0]
        self.assertGreater(female_displaced, 0)

        # Let's check computation
        index = analysis.fieldNameIndex(
            displaced_field['field_name'])
        displaced_population = analysis.uniqueValues(index)[0]
        self.assertEqual(
            int(displaced_population * female_ratio), female_displaced)

        # Check that we have more than 0 youth displaced in the analysis layer
        index = analysis.fieldNameIndex(
            female_displaced_count_field['field_name'])
        value = analysis.uniqueValues(index)[0]
        self.assertGreater(value, 0)

        # Let do another test with the special aggregation layer
        hazard_layer = load_test_vector_layer(
            'gisv4', 'hazard', 'tsunami_vector.geojson')
        exposure_layer = load_test_raster_layer(
            'gisv4', 'exposure', 'raster', 'population.asc')

        aggregation_layer = load_test_vector_layer(
            'gisv4', 'aggregation', 'small_grid_ratios.geojson')
        # This aggregation layer has :
        # * a field for female ratio : 1, 0.5 and 0
        # * use global default for youth ratio
        # * do not ust for adult ratio
        # * use custom 0.75 for elderly ratio

        # Set up impact function
        impact_function = ImpactFunction()
        impact_function.debug_mode = True
        impact_function.exposure = exposure_layer
        impact_function.hazard = hazard_layer
        impact_function.aggregation = aggregation_layer
        status, message = impact_function.prepare()
        self.assertEqual(PREPARE_SUCCESS, status, message)
        status, message = impact_function.run()
        self.assertEqual(ANALYSIS_SUCCESS, status, message)

        impact = impact_function.impact

        # We should have a female_ratio with many values
        index = impact.fieldNameIndex(female_ratio_field['field_name'])
        self.assertNotEqual(-1, index)
        values = impact.uniqueValues(index)
        self.assertEqual(3, len(values))

        # We should have a youth_ratio with global default
        index = impact.fieldNameIndex(youth_ratio_field['field_name'])
        self.assertNotEqual(-1, index)
        values = impact.uniqueValues(index)
        self.assertEqual(1, len(values))

        # We should not have an adult_ratio
        index = impact.fieldNameIndex(adult_ratio_field['field_name'])
        self.assertEqual(-1, index)

        # We should have a elderly_ratio = 0.75
        index = impact.fieldNameIndex(elderly_ratio_field['field_name'])
        self.assertNotEqual(-1, index)
        values = impact.uniqueValues(index)
        self.assertEqual(1, len(values))
        self.assertEqual(0.75, values[0])
Exemple #28
0
    def test_folder_datastore(self):
        """Test if we can store shapefiles."""
        path = QDir(mkdtemp())
        data_store = Folder(path)
        self.assertTrue(data_store.is_writable())

        path = mkdtemp()
        data_store = Folder(path)

        # We do not have any layer yet.
        self.assertEqual(len(data_store.layers()), 0)

        # Let's add a vector layer.
        layer = load_test_vector_layer('hazard',
                                       'flood_multipart_polygons.shp')
        vector_layer_name = 'flood_test'

        result = data_store.add_layer(layer, vector_layer_name, True)
        self.assertTrue(result[0])
        self.assertEqual(result[1], vector_layer_name)

        # We try to add the layer twice with the same name.
        result = data_store.add_layer(layer, vector_layer_name)
        self.assertFalse(result[0])

        # We have imported one layer.
        self.assertEqual(len(data_store.layers()), 1)

        # Check if we have the correct URI.
        # self.assertIsNone(data_store.layer_uri(layer_name))
        expected = str(
            normcase(normpath(join(path, vector_layer_name + '.shp'))))
        self.assertEqual(
            normcase(normpath(data_store.layer_uri(vector_layer_name))),
            expected)

        # The style must be there
        expected = unicode(
            normcase(normpath(join(path, vector_layer_name + '.qml'))))
        self.assertTrue(exists(expected))
        self.assertTrue(isfile(expected))

        # This layer do not exist
        self.assertIsNone(data_store.layer_uri('fake_layer'))

        # Let's add a raster layer.
        layer = load_test_raster_layer('hazard', 'classified_hazard.tif')
        result = data_store.add_layer(layer, vector_layer_name)
        self.assertFalse(result[0])

        raster_layer_name = 'flood_raster'
        result = data_store.add_layer(layer, raster_layer_name, False)
        self.assertTrue(result[0])

        # The style must not be there
        expected = unicode(
            normcase(normpath(join(path, raster_layer_name + '.qml'))))
        self.assertFalse(exists(expected))
        self.assertFalse(isfile(expected))

        # The datastore should have two layers.
        self.assertEqual(len(data_store.layers()), 2)

        # Check the URI for the raster layer.
        expected = normpath(normpath(join(path, raster_layer_name)))
        self.assertEqual(
            normcase(normpath(data_store.layer_uri(raster_layer_name))),
            expected + '.tif')

        # Check keywords files
        data_store.uri.setNameFilters(['*.xml'])
        files = data_store.uri.entryList()
        data_store.uri.setNameFilters([])
        self.assertIn(raster_layer_name + '.xml', files)
        self.assertIn(vector_layer_name + '.xml', files)

        # Test layer without geometry
        layer = load_test_vector_layer('gisv4', 'impacts',
                                       'exposure_summary_table.csv')
        tabular_layer_name = 'breakdown'
        result = data_store.add_layer(layer, tabular_layer_name)
        self.assertTrue(result[0])

        self.assertIsNotNone(
            data_store.layer_keyword('layer_purpose', 'hazard'))
    def test_ratios_with_raster_exposure(self):
        """Test if we can add defaults to a raster exposure.

        See ticket #3851 how to manage ratios with a raster exposure.
        """
        hazard_layer = load_test_vector_layer('gisv4', 'hazard',
                                              'tsunami_vector.geojson')
        exposure_layer = load_test_raster_layer('gisv4', 'exposure', 'raster',
                                                'population.asc')

        # Set up impact function
        impact_function = ImpactFunction()
        impact_function.debug_mode = True
        impact_function.exposure = exposure_layer
        impact_function.hazard = hazard_layer
        impact_function.prepare()
        status, message = impact_function.run()
        self.assertEqual(ANALYSIS_SUCCESS, status, message)

        for layer in impact_function.outputs:
            if layer.keywords['layer_purpose'] == (
                    layer_purpose_analysis_impacted['key']):
                analysis = layer
            if layer.keywords['layer_purpose'] == (
                    layer_purpose_aggregate_hazard_impacted['key']):
                impact = layer

        # We check in the impact layer if we have :
        # female default ratio with the default value
        index = impact.fieldNameIndex(female_ratio_field['field_name'])
        self.assertNotEqual(-1, index)
        unique_values = impact.uniqueValues(index)
        self.assertEqual(1, len(unique_values))
        female_ratio = unique_values[0]

        # female displaced count and youth displaced count
        self.assertNotEqual(
            -1,
            impact.fieldNameIndex(female_displaced_count_field['field_name']))
        self.assertNotEqual(
            -1,
            impact.fieldNameIndex(youth_displaced_count_field['field_name']))

        # Check that we have more than 0 female displaced in the analysis layer
        index = analysis.fieldNameIndex(
            female_displaced_count_field['field_name'])
        female_displaced = analysis.uniqueValues(index)[0]
        self.assertGreater(female_displaced, 0)

        # Let's check computation
        index = analysis.fieldNameIndex(displaced_field['field_name'])
        displaced_population = analysis.uniqueValues(index)[0]
        self.assertEqual(int(displaced_population * female_ratio),
                         female_displaced)

        # Check that we have more than 0 youth displaced in the analysis layer
        index = analysis.fieldNameIndex(
            female_displaced_count_field['field_name'])
        value = analysis.uniqueValues(index)[0]
        self.assertGreater(value, 0)

        # Let do another test with the special aggregation layer
        hazard_layer = load_test_vector_layer('gisv4', 'hazard',
                                              'tsunami_vector.geojson')
        exposure_layer = load_test_raster_layer('gisv4', 'exposure', 'raster',
                                                'population.asc')

        aggregation_layer = load_test_vector_layer(
            'gisv4', 'aggregation', 'small_grid_ratios.geojson')
        # This aggregation layer has :
        # * a field for female ratio : 1, 0.5 and 0
        # * use global default for youth ratio
        # * do not ust for adult ratio
        # * use custom 0.75 for elderly ratio

        # Set up impact function
        impact_function = ImpactFunction()
        impact_function.debug_mode = True
        impact_function.exposure = exposure_layer
        impact_function.hazard = hazard_layer
        impact_function.aggregation = aggregation_layer
        status, message = impact_function.prepare()
        self.assertEqual(PREPARE_SUCCESS, status, message)
        status, message = impact_function.run()
        self.assertEqual(ANALYSIS_SUCCESS, status, message)

        impact = impact_function.impact

        # We should have a female_ratio with many values
        index = impact.fieldNameIndex(female_ratio_field['field_name'])
        self.assertNotEqual(-1, index)
        values = impact.uniqueValues(index)
        self.assertEqual(3, len(values))

        # We should have a youth_ratio with global default
        index = impact.fieldNameIndex(youth_ratio_field['field_name'])
        self.assertNotEqual(-1, index)
        values = impact.uniqueValues(index)
        self.assertEqual(1, len(values))

        # We should not have an adult_ratio
        index = impact.fieldNameIndex(adult_ratio_field['field_name'])
        self.assertEqual(-1, index)

        # We should have a elderly_ratio = 0.75
        index = impact.fieldNameIndex(elderly_ratio_field['field_name'])
        self.assertNotEqual(-1, index)
        values = impact.uniqueValues(index)
        self.assertEqual(1, len(values))
        self.assertEqual(0.75, values[0])
Exemple #30
0
 def test_raster_y_inverted(self):
     """Test if we can detect an upside down raster."""
     # We should have one test with an inverted raster but as it's not
     # usual, I'm not going to spend time.
     layer = load_test_raster_layer('gisv4', 'hazard', 'earthquake.asc')
     self.assertFalse(is_raster_y_inverted(layer))
Exemple #31
0
    def test_folder_datastore(self):
        """Test if we can store shapefiles."""
        path = QDir(mkdtemp())
        data_store = Folder(path)
        self.assertTrue(data_store.is_writable())

        path = mkdtemp()
        data_store = Folder(path)

        # We do not have any layer yet.
        self.assertEqual(len(data_store.layers()), 0)

        # Let's add a vector layer.
        layer = load_test_vector_layer(
            'hazard', 'flood_multipart_polygons.shp')
        vector_layer_name = 'flood_test'

        result = data_store.add_layer(layer, vector_layer_name)
        self.assertTrue(result[0])
        self.assertEqual(result[1], vector_layer_name)

        # We try to add the layer twice with the same name.
        result = data_store.add_layer(layer, vector_layer_name)
        self.assertFalse(result[0])

        # We have imported one layer.
        self.assertEqual(len(data_store.layers()), 1)

        # Check if we have the correct URI.
        # self.assertIsNone(data_store.layer_uri(layer_name))
        expected = unicode(
            normcase(normpath(join(path, vector_layer_name + '.shp'))))
        self.assertEquals(
            normcase(normpath(
                data_store.layer_uri(vector_layer_name))), expected)

        # This layer do not exist
        self.assertIsNone(data_store.layer_uri('fake_layer'))

        # Let's add a raster layer.
        layer = load_test_raster_layer('hazard', 'classified_hazard.tif')
        result = data_store.add_layer(layer, vector_layer_name)
        self.assertFalse(result[0])

        raster_layer_name = 'flood_raster'
        result = data_store.add_layer(layer, raster_layer_name)
        self.assertTrue(result[0])

        # The datastore should have two layers.
        self.assertEqual(len(data_store.layers()), 2)

        # Check the URI for the raster layer.
        expected = normpath(normpath(join(path, raster_layer_name)))
        self.assertEqual(
            normcase(normpath(data_store.layer_uri(raster_layer_name))),
            expected + '.tif')

        # Check keywords files
        data_store.uri.setNameFilters('*.xml')
        files = data_store.uri.entryList()
        data_store.uri.setNameFilters('')
        self.assertIn(raster_layer_name + '.xml', files)
        self.assertIn(vector_layer_name + '.xml', files)

        # Test layer without geometry
        layer = load_test_vector_layer(
            'gisv4', 'impacts', 'exposure_summary_table.csv')
        tabular_layer_name = 'breakdown'
        result = data_store.add_layer(layer, tabular_layer_name)
        self.assertTrue(result[0])

        self.assertIsNotNone(
            data_store.layer_keyword('layer_purpose', 'hazard')
        )