Пример #1
0
    def run(self):
        """Run volcano point population evacuation Impact Function.

        Counts number of people exposed to volcano event.

        :returns: Map of population exposed to the volcano hazard zone.
            The returned dict will include a table with number of people
            evacuated and supplies required.
        :rtype: dict

        :raises:
            * Exception - When hazard layer is not vector layer
            * RadiiException - When radii are not valid (they need to be
                monotonically increasing)
        """

        # Parameters
        radii = self.parameters['distances'].value

        # Get parameters from layer's keywords
        volcano_name_attribute = self.hazard.keyword('volcano_name_field')

        data_table = self.hazard.layer.get_data()

        # Get names of volcanoes considered
        if volcano_name_attribute in self.hazard.layer.get_attribute_names():
            volcano_name_list = []
            # Run through all polygons and get unique names
            for row in data_table:
                volcano_name_list.append(row[volcano_name_attribute])

            volcano_names = ''
            for radius in volcano_name_list:
                volcano_names += '%s, ' % radius
            self.volcano_names = volcano_names[:-2]  # Strip trailing ', '

        # Run interpolation function for polygon2raster
        interpolated_layer, covered_exposure_layer = \
            assign_hazard_values_to_exposure_data(
                self.hazard.layer,
                self.exposure.layer,
                attribute_name=self.target_field
            )

        # Initialise affected population per categories
        for radius in radii:
            category = 'Radius %s km ' % format_int(radius)
            self.affected_population[category] = 0

        if has_no_data(self.exposure.layer.get_data(nan=True)):
            self.no_data_warning = True
        # Count affected population per polygon and total
        for row in interpolated_layer.get_data():
            # Get population at this location
            population = row[self.target_field]
            if not numpy.isnan(population):
                population = float(population)
                # Update population count for this category
                category = 'Radius %s km ' % format_int(
                    row[self.hazard_zone_attribute])
                self.affected_population[category] += population

        # Count totals
        self.total_population = population_rounding(
            int(numpy.nansum(self.exposure.layer.get_data())))

        self.minimum_needs = [
            parameter.serialize() for parameter in
            filter_needs_parameters(self.parameters['minimum needs'])
        ]

        # Create style
        colours = ['#FFFFFF', '#38A800', '#79C900', '#CEED00',
                   '#FFCC00', '#FF6600', '#FF0000', '#7A0000']
        classes = create_classes(
            covered_exposure_layer.get_data().flat[:], len(colours))
        interval_classes = humanize_class(classes)
        # Define style info for output polygons showing population counts
        style_classes = []
        for i in xrange(len(colours)):
            style_class = dict()
            style_class['label'] = create_label(interval_classes[i])
            if i == 1:
                label = create_label(
                    interval_classes[i],
                    tr('Low Population [%i people/cell]' % classes[i]))
            elif i == 4:
                label = create_label(
                    interval_classes[i],
                    tr('Medium Population [%i people/cell]' % classes[i]))
            elif i == 7:
                label = create_label(
                    interval_classes[i],
                    tr('High Population [%i people/cell]' % classes[i]))
            else:
                label = create_label(interval_classes[i])

            style_class['label'] = label
            style_class['quantity'] = classes[i]
            style_class['colour'] = colours[i]
            style_class['transparency'] = 0
            style_classes.append(style_class)

        # Override style info with new classes and name
        style_info = dict(
            target_field=None,
            style_classes=style_classes,
            style_type='rasterStyle')

        impact_data = self.generate_data()

        # Create vector layer and return
        extra_keywords = {
            'target_field': self.target_field,
            'map_title': self.metadata().key('map_title'),
            'legend_notes': self.metadata().key('legend_notes'),
            'legend_units': self.metadata().key('legend_units'),
            'legend_title': self.metadata().key('legend_title'),
            'total_needs': self.total_needs
        }

        impact_layer_keywords = self.generate_impact_keywords(extra_keywords)

        impact_layer = Raster(
            data=covered_exposure_layer.get_data(),
            projection=covered_exposure_layer.get_projection(),
            geotransform=covered_exposure_layer.get_geotransform(),
            name=self.metadata().key('layer_name'),
            keywords=impact_layer_keywords,
            style_info=style_info)

        impact_layer.impact_data = impact_data
        self._impact = impact_layer
        return impact_layer
Пример #2
0
    def run(self):
        """Plugin for impact of population as derived by continuous hazard.

        Hazard is reclassified into 3 classes based on the extrema provided
        as impact function parameters.

        Counts number of people exposed to each category of the hazard

        :returns:
          Map of population exposed to high category
          Table with number of people in each category
        """

        thresholds = [
            p.value for p in self.parameters['Categorical thresholds'].value]

        # Thresholds must contain 3 thresholds
        if len(thresholds) != 3:
            raise FunctionParametersError(
                'The thresholds must consist of 3 values.')

        # Thresholds must monotonically increasing
        monotonically_increasing_flag = all(
            x < y for x, y in zip(thresholds, thresholds[1:]))
        if not monotonically_increasing_flag:
            raise FunctionParametersError(
                'Each threshold should be larger than the previous.')

        # The 3 categories
        low_t = thresholds[0]
        medium_t = thresholds[1]
        high_t = thresholds[2]

        # Extract data as numeric arrays
        hazard_data = self.hazard.layer.get_data(nan=True)  # Category
        if has_no_data(hazard_data):
            self.no_data_warning = True

        # Calculate impact as population exposed to each category
        exposure_data = self.exposure.layer.get_data(nan=True, scaling=True)
        if has_no_data(exposure_data):
            self.no_data_warning = True

        # Make 3 data for each zone. Get the value of the exposure if the
        # exposure is in the hazard zone, else just assign 0
        low_exposure = numpy.where(hazard_data < low_t, exposure_data, 0)
        medium_exposure = numpy.where(
            (hazard_data >= low_t) & (hazard_data < medium_t),
            exposure_data, 0)
        high_exposure = numpy.where(
            (hazard_data >= medium_t) & (hazard_data <= high_t),
            exposure_data, 0)
        impacted_exposure = low_exposure + medium_exposure + high_exposure

        # Count totals
        self.total_population = int(numpy.nansum(exposure_data))
        self.affected_population[
            tr('Population in high hazard areas')] = int(
                numpy.nansum(high_exposure))
        self.affected_population[
            tr('Population in medium hazard areas')] = int(
                numpy.nansum(medium_exposure))
        self.affected_population[
            tr('Population in low hazard areas')] = int(
                numpy.nansum(low_exposure))
        self.unaffected_population = (
            self.total_population - self.total_affected_population)

        # check for zero impact
        if self.total_affected_population == 0:
            message = no_population_impact_message(self.question)
            raise ZeroImpactException(message)

        # Don't show digits less than a 1000
        self.minimum_needs = [
            parameter.serialize() for parameter in
            filter_needs_parameters(self.parameters['minimum needs'])
        ]
        total_needs = self.total_needs

        # Style for impact layer
        colours = [
            '#FFFFFF', '#38A800', '#79C900', '#CEED00',
            '#FFCC00', '#FF6600', '#FF0000', '#7A0000']
        classes = create_classes(impacted_exposure.flat[:], len(colours))
        interval_classes = humanize_class(classes)
        style_classes = []

        for i in xrange(len(colours)):
            style_class = dict()
            if i == 1:
                label = create_label(
                    interval_classes[i],
                    tr('Low Population [%i people/cell]' % classes[i]))
            elif i == 4:
                label = create_label(
                    interval_classes[i],
                    tr('Medium Population [%i people/cell]' % classes[i]))
            elif i == 7:
                label = create_label(
                    interval_classes[i],
                    tr('High Population [%i people/cell]' % classes[i]))
            else:
                label = create_label(interval_classes[i])
            style_class['label'] = label
            style_class['quantity'] = classes[i]
            style_class['transparency'] = 0
            style_class['colour'] = colours[i]
            style_classes.append(style_class)

        style_info = dict(
            target_field=None,
            style_classes=style_classes,
            style_type='rasterStyle')

        impact_data = self.generate_data()

        extra_keywords = {
            'map_title': self.metadata().key('map_title'),
            'legend_notes': self.metadata().key('legend_notes'),
            'legend_units': self.metadata().key('legend_units'),
            'legend_title': self.metadata().key('legend_title'),
            'total_needs': total_needs
        }

        impact_layer_keywords = self.generate_impact_keywords(extra_keywords)

        # Create raster object and return
        impact_layer = Raster(
            data=impacted_exposure,
            projection=self.hazard.layer.get_projection(),
            geotransform=self.hazard.layer.get_geotransform(),
            name=self.metadata().key('layer_name'),
            keywords=impact_layer_keywords,
            style_info=style_info)

        impact_layer.impact_data = impact_data
        self._impact = impact_layer
        return impact_layer
Пример #3
0
    def run(self):
        """Plugin for impact of population as derived by continuous hazard.

        Hazard is reclassified into 3 classes based on the extrema provided
        as impact function parameters.

        Counts number of people exposed to each category of the hazard

        :returns:
          Map of population exposed to high category
          Table with number of people in each category
        """

        thresholds = [
            p.value for p in self.parameters['Categorical thresholds'].value
        ]

        # Thresholds must contain 3 thresholds
        if len(thresholds) != 3:
            raise FunctionParametersError(
                'The thresholds must consist of 3 values.')

        # Thresholds must monotonically increasing
        monotonically_increasing_flag = all(
            x < y for x, y in zip(thresholds, thresholds[1:]))
        if not monotonically_increasing_flag:
            raise FunctionParametersError(
                'Each threshold should be larger than the previous.')

        # The 3 categories
        low_t = thresholds[0]
        medium_t = thresholds[1]
        high_t = thresholds[2]

        # Extract data as numeric arrays
        hazard_data = self.hazard.layer.get_data(nan=True)  # Category
        if has_no_data(hazard_data):
            self.no_data_warning = True

        # Calculate impact as population exposed to each category
        exposure_data = self.exposure.layer.get_data(nan=True, scaling=True)
        if has_no_data(exposure_data):
            self.no_data_warning = True

        # Make 3 data for each zone. Get the value of the exposure if the
        # exposure is in the hazard zone, else just assign 0
        low_exposure = numpy.where(hazard_data < low_t, exposure_data, 0)
        medium_exposure = numpy.where(
            (hazard_data >= low_t) & (hazard_data < medium_t), exposure_data,
            0)
        high_exposure = numpy.where(
            (hazard_data >= medium_t) & (hazard_data <= high_t), exposure_data,
            0)
        impacted_exposure = low_exposure + medium_exposure + high_exposure

        # Count totals
        self.total_population = int(numpy.nansum(exposure_data))
        self.affected_population[tr('Population in high hazard zones')] = int(
            numpy.nansum(high_exposure))
        self.affected_population[tr(
            'Population in medium hazard zones')] = int(
                numpy.nansum(medium_exposure))
        self.affected_population[tr('Population in low hazard zones')] = int(
            numpy.nansum(low_exposure))
        self.unaffected_population = (self.total_population -
                                      self.total_affected_population)

        # check for zero impact
        if self.total_affected_population == 0:
            message = no_population_impact_message(self.question)
            raise ZeroImpactException(message)

        # Don't show digits less than a 1000
        self.minimum_needs = [
            parameter.serialize() for parameter in filter_needs_parameters(
                self.parameters['minimum needs'])
        ]
        total_needs = self.total_needs

        # Style for impact layer
        colours = [
            '#FFFFFF', '#38A800', '#79C900', '#CEED00', '#FFCC00', '#FF6600',
            '#FF0000', '#7A0000'
        ]
        classes = create_classes(impacted_exposure.flat[:], len(colours))
        interval_classes = humanize_class(classes)
        style_classes = []

        for i in xrange(len(colours)):
            style_class = dict()
            if i == 1:
                label = create_label(
                    interval_classes[i],
                    tr('Low Population [%i people/cell]' % classes[i]))
            elif i == 4:
                label = create_label(
                    interval_classes[i],
                    tr('Medium Population [%i people/cell]' % classes[i]))
            elif i == 7:
                label = create_label(
                    interval_classes[i],
                    tr('High Population [%i people/cell]' % classes[i]))
            else:
                label = create_label(interval_classes[i])
            style_class['label'] = label
            style_class['quantity'] = classes[i]
            style_class['transparency'] = 0
            style_class['colour'] = colours[i]
            style_classes.append(style_class)

        style_info = dict(target_field=None,
                          style_classes=style_classes,
                          style_type='rasterStyle')

        impact_data = self.generate_data()

        extra_keywords = {
            'map_title': self.map_title(),
            'legend_notes': self.metadata().key('legend_notes'),
            'legend_units': self.metadata().key('legend_units'),
            'legend_title': self.metadata().key('legend_title'),
            'total_needs': total_needs
        }

        impact_layer_keywords = self.generate_impact_keywords(extra_keywords)

        # Create raster object and return
        impact_layer = Raster(
            data=impacted_exposure,
            projection=self.hazard.layer.get_projection(),
            geotransform=self.hazard.layer.get_geotransform(),
            name=self.map_title(),
            keywords=impact_layer_keywords,
            style_info=style_info)

        impact_layer.impact_data = impact_data
        self._impact = impact_layer
        return impact_layer
Пример #4
0
    def run(self):
        """Risk plugin for flood population evacuation.

        Counts number of people exposed to flood levels exceeding
        specified threshold.

        :returns: Map of population exposed to flood levels exceeding the
            threshold. Table with number of people evacuated and supplies
            required.
        :rtype: tuple
        """

        # Determine depths above which people are regarded affected [m]
        # Use thresholds from inundation layer if specified
        thresholds = self.parameters['thresholds'].value

        verify(isinstance(thresholds, list),
               'Expected thresholds to be a list. Got %s' % str(thresholds))

        # Extract data as numeric arrays

        data = self.hazard.layer.get_data(nan=True)  # Depth
        if has_no_data(data):
            self.no_data_warning = True

        # Calculate impact as population exposed to depths > max threshold
        population = self.exposure.layer.get_data(nan=True, scaling=True)
        total = int(numpy.nansum(population))
        if has_no_data(population):
            self.no_data_warning = True

        # merely initialize
        impact = None

        for i, lo in enumerate(thresholds):
            if i == len(thresholds) - 1:
                # The last threshold
                thresholds_name = tr('People in >= %.1f m of water') % lo
                self.impact_category_ordering.append(thresholds_name)
                self._evacuation_category = thresholds_name
                impact = medium = numpy.where(data >= lo, population, 0)
            else:
                # Intermediate thresholds
                hi = thresholds[i + 1]
                thresholds_name = tr('People in %.1f m to %.1f m of water' %
                                     (lo, hi))
                self.impact_category_ordering.append(thresholds_name)
                medium = numpy.where((data >= lo) * (data < hi), population, 0)

            # Count
            val = int(numpy.nansum(medium))
            self.affected_population[thresholds_name] = val

        # Put the deepest area in top #2385
        self.impact_category_ordering.reverse()

        self.total_population = total
        self.unaffected_population = total - self.total_affected_population

        # Carry the no data values forward to the impact layer.
        impact = numpy.where(numpy.isnan(population), numpy.nan, impact)
        impact = numpy.where(numpy.isnan(data), numpy.nan, impact)

        # Count totals
        evacuated = self.total_evacuated

        self.minimum_needs = [
            parameter.serialize()
            for parameter in self.parameters['minimum needs']
        ]

        total_needs = self.total_needs

        # check for zero impact
        if numpy.nanmax(impact) == 0 == numpy.nanmin(impact):
            message = no_population_impact_message(self.question)
            raise ZeroImpactException(message)

        # Create style
        colours = [
            '#FFFFFF', '#38A800', '#79C900', '#CEED00', '#FFCC00', '#FF6600',
            '#FF0000', '#7A0000'
        ]
        classes = create_classes(impact.flat[:], len(colours))
        interval_classes = humanize_class(classes)
        style_classes = []

        for i in xrange(len(colours)):
            style_class = dict()
            if i == 1:
                label = create_label(interval_classes[i], 'Low')
            elif i == 4:
                label = create_label(interval_classes[i], 'Medium')
            elif i == 7:
                label = create_label(interval_classes[i], 'High')
            else:
                label = create_label(interval_classes[i])
            style_class['label'] = label
            style_class['quantity'] = classes[i]
            style_class['transparency'] = 0
            style_class['colour'] = colours[i]
            style_classes.append(style_class)

        style_info = dict(target_field=None,
                          style_classes=style_classes,
                          style_type='rasterStyle')

        impact_data = self.generate_data()

        extra_keywords = {
            'map_title': self.map_title(),
            'legend_notes': self.metadata().key('legend_notes'),
            'legend_units': self.metadata().key('legend_units'),
            'legend_title': self.metadata().key('legend_title'),
            'evacuated': evacuated,
            'total_needs': total_needs
        }

        impact_layer_keywords = self.generate_impact_keywords(extra_keywords)

        # Create raster object and return
        impact_layer = Raster(
            impact,
            projection=self.hazard.layer.get_projection(),
            geotransform=self.hazard.layer.get_geotransform(),
            name=self.map_title(),
            keywords=impact_layer_keywords,
            style_info=style_info)

        impact_layer.impact_data = impact_data
        self._impact = impact_layer
        return impact_layer
Пример #5
0
    def run(self):
        """Run volcano population evacuation Impact Function.

        Counts number of people exposed to volcano event.

        :returns: Map of population exposed to the volcano hazard zone.
            The returned dict will include a table with number of people
            evacuated and supplies required.
        :rtype: dict

        :raises:
            * Exception - When hazard layer is not vector layer
            * RadiiException - When radii are not valid (they need to be
                monotonically increasing)
        """

        # Parameters
        self.hazard_class_attribute = self.hazard.keyword('field')
        name_attribute = self.hazard.keyword('volcano_name_field')
        self.hazard_class_mapping = self.hazard.keyword('value_map')

        if has_no_data(self.exposure.layer.get_data(nan=True)):
            self.no_data_warning = True

        # Input checks
        if not self.hazard.layer.is_polygon_data:
            message = tr(
                'Input hazard must be a polygon layer. I got %s with layer '
                'type %s' % (
                    self.hazard.layer.get_name(),
                    self.hazard.layer.get_geometry_name()))
            raise Exception(message)

        # Check if hazard_class_attribute exists in hazard_layer
        if (self.hazard_class_attribute not in
                self.hazard.layer.get_attribute_names()):
            message = tr(
                'Hazard data %s did not contain expected attribute ''%s ' % (
                self.hazard.layer.get_name(), self.hazard_class_attribute))
            # noinspection PyExceptionInherit
            raise InaSAFEError(message)

        features = self.hazard.layer.get_data()

        # Get names of volcanoes considered
        if name_attribute in self.hazard.layer.get_attribute_names():
            # Run through all polygons and get unique names
            for row in features:
                self.volcano_names.add(row[name_attribute])

        # Retrieve the classification that is used by the hazard layer.
        vector_hazard_classification = self.hazard.keyword(
            'vector_hazard_classification')
        # Get the dictionary that contains the definition of the classification
        vector_hazard_classification = definition(vector_hazard_classification)
        # Get the list classes in the classification
        vector_hazard_classes = vector_hazard_classification['classes']
        # Initialize OrderedDict of affected buildings
        self.affected_population = OrderedDict()
        # Iterate over vector hazard classes
        for vector_hazard_class in vector_hazard_classes:
            # Check if the key of class exist in hazard_class_mapping
            if vector_hazard_class['key'] in self.hazard_class_mapping.keys():
                # Replace the key with the name as we need to show the human
                # friendly name in the report.
                self.hazard_class_mapping[vector_hazard_class['name']] = \
                    self.hazard_class_mapping.pop(vector_hazard_class['key'])
                # Adding the class name as a key in affected_building
                self.affected_population[vector_hazard_class['name']] = 0

        # Run interpolation function for polygon2raster
        interpolated_layer, covered_exposure_layer = \
            assign_hazard_values_to_exposure_data(
                self.hazard.layer,
                self.exposure.layer,
                attribute_name=self.target_field)

        # Count affected population per polygon and total
        for row in interpolated_layer.get_data():
            # Get population at this location
            population = row[self.target_field]
            if not numpy.isnan(population):
                population = float(population)
                # Update population count for this hazard zone
                hazard_value = get_key_for_value(
                    row[self.hazard_class_attribute],
                    self.hazard_class_mapping)
                if not hazard_value:
                    hazard_value = self._not_affected_value
                self.affected_population[hazard_value] += population

        # Count totals
        self.total_population = int(
            numpy.nansum(self.exposure.layer.get_data()))
        self.unaffected_population = (
            self.total_population - self.total_affected_population)

        self.minimum_needs = [
            parameter.serialize() for parameter in
            filter_needs_parameters(self.parameters['minimum needs'])
        ]

        # check for zero impact
        if self.total_affected_population == 0:
            message = no_population_impact_message(self.question)
            raise ZeroImpactException(message)

        # Create style
        colours = ['#FFFFFF', '#38A800', '#79C900', '#CEED00',
                   '#FFCC00', '#FF6600', '#FF0000', '#7A0000']
        classes = create_classes(
            covered_exposure_layer.get_data().flat[:], len(colours))
        interval_classes = humanize_class(classes)
        # Define style info for output polygons showing population counts
        style_classes = []
        for i in xrange(len(colours)):
            style_class = dict()
            style_class['label'] = create_label(interval_classes[i])
            if i == 1:
                label = create_label(
                    interval_classes[i],
                    tr('Low Population [%i people/cell]' % classes[i]))
            elif i == 4:
                label = create_label(
                    interval_classes[i],
                    tr('Medium Population [%i people/cell]' % classes[i]))
            elif i == 7:
                label = create_label(
                    interval_classes[i],
                    tr('High Population [%i people/cell]' % classes[i]))
            else:
                label = create_label(interval_classes[i])

            style_class['label'] = label
            style_class['quantity'] = classes[i]
            style_class['colour'] = colours[i]
            style_class['transparency'] = 0
            style_classes.append(style_class)

        # Override style info with new classes and name
        style_info = dict(
            target_field=None,
            style_classes=style_classes,
            style_type='rasterStyle')

        impact_data = self.generate_data()

        extra_keywords = {
            'target_field': self.target_field,
            'map_title': self.map_title(),
            'legend_notes': self.metadata().key('legend_notes'),
            'legend_units': self.metadata().key('legend_units'),
            'legend_title': self.metadata().key('legend_title'),
            'total_needs': self.total_needs
        }

        impact_layer_keywords = self.generate_impact_keywords(extra_keywords)

        # Create vector layer and return
        impact_layer = Raster(
            data=covered_exposure_layer.get_data(),
            projection=covered_exposure_layer.get_projection(),
            geotransform=covered_exposure_layer.get_geotransform(),
            name=self.map_title(),
            keywords=impact_layer_keywords,
            style_info=style_info
        )

        impact_layer.impact_data = impact_data
        self._impact = impact_layer
        return impact_layer
Пример #6
0
    def run(self):
        """Indonesian Earthquake Fatality Model."""
        displacement_rate = self.hardcoded_parameters['displacement_rate']
        fatality_rate = self.compute_fatality_rate()

        # Extract data grids
        hazard = self.hazard.layer.get_data()   # Ground Shaking
        # Population Density
        exposure = self.exposure.layer.get_data(scaling=True)

        # Calculate people affected by each MMI level
        mmi_range = self.hardcoded_parameters['mmi_range']
        number_of_exposed = {}
        number_of_displaced = {}
        number_of_fatalities = {}
        # Calculate fatality rates for observed Intensity values (hazard
        # based on ITB power model
        mask = numpy.zeros(hazard.shape)
        for mmi in mmi_range:
            # Identify cells where MMI is in class i and
            # count people affected by this shake level
            step = self.hardcoded_parameters['step']
            mmi_matches = numpy.where(
                (hazard > mmi - step) * (hazard <= mmi + step), exposure, 0)

            # Calculate expected number of fatalities per level
            exposed = numpy.nansum(mmi_matches)
            fatalities = fatality_rate[mmi] * exposed

            # Calculate expected number of displaced people per level
            displacements = displacement_rate[mmi] * (
                exposed - numpy.median(fatalities))

            # Adjust displaced people to disregard fatalities.
            # Set to zero if there are more fatalities than displaced.
            # displacements = numpy.where(
            #    displacements > fatalities, displacements - fatalities, 0)

            # Sum up numbers for map
            # We need to use matrices here and not just numbers #2235
            # filter out NaN to avoid overflow additions
            mmi_matches = numpy.nan_to_num(mmi_matches)
            mask += mmi_matches   # Displaced

            # Generate text with result for this study
            # This is what is used in the real time system exposure table
            number_of_exposed[mmi] = exposed
            number_of_displaced[mmi] = displacements
            # noinspection PyUnresolvedReferences
            number_of_fatalities[mmi] = fatalities

        # Total statistics
        total_fatalities_raw = numpy.nansum(
            number_of_fatalities.values(), axis=0)

        # Compute probability of fatality in each magnitude bin
        if (self.__class__.__name__ == 'PAGFatalityFunction') or (
                self.__class__.__name__ == 'ITBBayesianFatalityFunction'):
            prob_fatality_mag = self.compute_probability(total_fatalities_raw)
        else:
            prob_fatality_mag = None

        # Compute number of fatalities
        self.total_population = numpy.nansum(number_of_exposed.values())
        self.total_fatalities = numpy.median(total_fatalities_raw)
        total_displaced = numpy.nansum(number_of_displaced.values())

        # As per email discussion with Ole, Trevor, Hadi, total fatalities < 50
        # will be rounded down to 0 - Tim
        # Needs to revisit but keep it alive for the time being - Hyeuk, Jono
        if self.total_fatalities < 50:
            self.total_fatalities = 0

        affected_population = self.affected_population
        affected_population[tr('Number of fatalities')] = self.total_fatalities
        affected_population[
            tr('Number of people displaced')] = total_displaced
        self.unaffected_population = (
            self.total_population - total_displaced - self.total_fatalities)
        self._evacuation_category = tr('Number of people displaced')

        self.minimum_needs = [
            parameter.serialize() for parameter in
            filter_needs_parameters(self.parameters['minimum needs'])
        ]
        total_needs = self.total_needs

        # Create style
        colours = ['#EEFFEE', '#FFFF7F', '#E15500', '#E4001B', '#730000']
        classes = create_classes(mask.flat[:], len(colours))
        interval_classes = humanize_class(classes)
        style_classes = []
        for i in xrange(len(interval_classes)):
            style_class = dict()
            style_class['label'] = create_label(interval_classes[i])
            style_class['quantity'] = classes[i]
            style_class['transparency'] = 30
            style_class['colour'] = colours[i]
            style_classes.append(style_class)

        style_info = dict(target_field=None,
                          style_classes=style_classes,
                          style_type='rasterStyle')

        impact_data = self.generate_data()

        extra_keywords = {
            'exposed_per_mmi': number_of_exposed,
            'total_population': self.total_population,
            'total_fatalities': population_rounding(self.total_fatalities),
            'total_fatalities_raw': self.total_fatalities,
            'fatalities_per_mmi': number_of_fatalities,
            'total_displaced': population_rounding(total_displaced),
            'displaced_per_mmi': number_of_displaced,
            'map_title': self.metadata().key('map_title'),
            'legend_notes': self.metadata().key('legend_notes'),
            'legend_units': self.metadata().key('legend_units'),
            'legend_title': self.metadata().key('legend_title'),
            'total_needs': total_needs,
            'prob_fatality_mag': prob_fatality_mag,
        }

        impact_layer_keywords = self.generate_impact_keywords(extra_keywords)

        # Create raster object and return
        impact_layer = Raster(
            mask,
            projection=self.exposure.layer.get_projection(),
            geotransform=self.exposure.layer.get_geotransform(),
            keywords=impact_layer_keywords,
            name=self.metadata().key('layer_name'),
            style_info=style_info)

        impact_layer.impact_data = impact_data
        self._impact = impact_layer
        return impact_layer
Пример #7
0
    def run(self):
        """Run the impact function.
        """
        # Range for ash hazard
        group_parameters = self.parameters['group_threshold']
        unaffected_max = group_parameters.value_map[
            'unaffected_threshold'].value
        very_low_max = group_parameters.value_map['very_low_threshold'].value
        low_max = group_parameters.value_map['low_threshold'].value
        medium_max = group_parameters.value_map['moderate_threshold'].value
        high_max = group_parameters.value_map['high_threshold'].value

        # Extract hazard data as numeric arrays
        ash = self.hazard.layer.get_data(nan=True)  # Thickness
        if has_no_data(ash):
            self.no_data_warning = True

        # Extract exposure data as numeric arrays
        population = self.exposure.layer.get_data(nan=True, scaling=True)
        if has_no_data(population):
            self.no_data_warning = True

        # Create 5 data for each hazard level. Get the value of the exposure
        # if the exposure is in the hazard zone, else just assign 0
        unaffected_exposure = numpy.where(ash < unaffected_max, population, 0)
        very_low_exposure = numpy.where(
            (ash >= unaffected_max) & (ash < very_low_max), population, 0)
        low_exposure = numpy.where(
            (ash >= very_low_max) & (ash < low_max), population, 0)
        medium_exposure = numpy.where(
            (ash >= low_max) & (ash < medium_max), population, 0)
        high_exposure = numpy.where(
            (ash >= medium_max) & (ash < high_max), population, 0)
        very_high_exposure = numpy.where(ash >= high_max, population, 0)

        impacted_exposure = (
            very_low_exposure +
            low_exposure +
            medium_exposure +
            high_exposure +
            very_high_exposure
        )

        # Count totals
        self.total_population = int(numpy.nansum(population))
        self.affected_population[
            tr('Population in very low hazard zone')] = int(
            numpy.nansum(very_low_exposure))
        self.affected_population[
            tr('Population in low hazard zone')] = int(
            numpy.nansum(low_exposure))
        self.affected_population[
            tr('Population in medium hazard zone')] = int(
            numpy.nansum(medium_exposure))
        self.affected_population[
            tr('Population in high hazard zone')] = int(
            numpy.nansum(high_exposure))
        self.affected_population[
            tr('Population in very high hazard zone')] = int(
            numpy.nansum(very_high_exposure))
        self.unaffected_population = int(
            numpy.nansum(unaffected_exposure))

        # check for zero impact
        if self.total_affected_population == 0:
            message = no_population_impact_message(self.question)
            raise ZeroImpactException(message)

        # Don't show digits less than a 1000
        self.minimum_needs = [
            parameter.serialize() for parameter in
            filter_needs_parameters(self.parameters['minimum needs'])
            ]
        total_needs = self.total_needs

        # Style for impact layer
        colours = [
            '#FFFFFF', '#38A800', '#79C900', '#CEED00',
            '#FFCC00', '#FF6600', '#FF0000', '#7A0000']
        classes = create_classes(impacted_exposure.flat[:], len(colours))
        interval_classes = humanize_class(classes)
        style_classes = []

        for i in xrange(len(colours)):
            style_class = dict()
            if i == 1:
                label = create_label(
                    interval_classes[i],
                    tr('Low Population [%i people/cell]' % classes[i]))
            elif i == 4:
                label = create_label(
                    interval_classes[i],
                    tr('Medium Population [%i people/cell]' % classes[i]))
            elif i == 7:
                label = create_label(
                    interval_classes[i],
                    tr('High Population [%i people/cell]' % classes[i]))
            else:
                label = create_label(interval_classes[i])
            style_class['label'] = label
            style_class['quantity'] = classes[i]
            style_class['transparency'] = 0
            style_class['colour'] = colours[i]
            style_classes.append(style_class)

        style_info = dict(
            target_field=None,
            style_classes=style_classes,
            style_type='rasterStyle')

        impact_data = self.generate_data()

        extra_keywords = {
            'map_title': self.map_title(),
            'legend_notes': self.metadata().key('legend_notes'),
            'legend_units': self.metadata().key('legend_units'),
            'legend_title': self.metadata().key('legend_title'),
            'total_needs': total_needs
        }

        impact_layer_keywords = self.generate_impact_keywords(extra_keywords)

        # Create raster object and return
        impact_layer = Raster(
            data=impacted_exposure,
            projection=self.hazard.layer.get_projection(),
            geotransform=self.hazard.layer.get_geotransform(),
            name=self.map_title(),
            keywords=impact_layer_keywords,
            style_info=style_info)

        impact_layer.impact_data = impact_data
        self._impact = impact_layer
        return impact_layer
Пример #8
0
    def run(self):
        """Plugin for impact of population as derived by classified hazard.

        Counts number of people exposed to each class of the hazard

        :returns: Map of population exposed to high class
            Table with number of people in each class
        """

        # The 3 classes
        # TODO (3.2): shouldnt these be defined in keywords rather? TS
        categorical_hazards = self.parameters['Categorical hazards'].value
        low_class = categorical_hazards[0].value
        medium_class = categorical_hazards[1].value
        high_class = categorical_hazards[2].value

        # The classes must be different to each other
        unique_classes_flag = all(x != y for x, y in list(
            itertools.combinations([low_class, medium_class, high_class], 2)))
        if not unique_classes_flag:
            raise FunctionParametersError(
                'There is hazard class that has the same value with other '
                'class. Please check the parameters.')

        # Extract data as numeric arrays
        hazard_data = self.hazard.layer.get_data(nan=True)  # Class
        if has_no_data(hazard_data):
            self.no_data_warning = True

        # Calculate impact as population exposed to each class
        population = self.exposure.layer.get_data(scaling=True)

        # Get all population data that falls in each hazard class
        high_hazard_population = numpy.where(hazard_data == high_class,
                                             population, 0)
        medium_hazard_population = numpy.where(hazard_data == medium_class,
                                               population, 0)
        low_hazard_population = numpy.where(hazard_data == low_class,
                                            population, 0)
        affected_population = (high_hazard_population +
                               medium_hazard_population +
                               low_hazard_population)

        # Carry the no data values forward to the impact layer.
        affected_population = numpy.where(numpy.isnan(population), numpy.nan,
                                          affected_population)
        affected_population = numpy.where(numpy.isnan(hazard_data), numpy.nan,
                                          affected_population)

        # Count totals
        self.total_population = int(numpy.nansum(population))
        self.affected_population[tr('Population in low hazard zone')] = int(
            numpy.nansum(low_hazard_population))
        self.affected_population[tr('Population in medium hazard zone')] = int(
            numpy.nansum(medium_hazard_population))
        self.affected_population[tr('Population in high hazard zone')] = int(
            numpy.nansum(high_hazard_population))
        self.unaffected_population = (self.total_population -
                                      self.total_affected_population)

        # check for zero impact
        if self.total_affected_population == 0:
            message = no_population_impact_message(self.question)
            raise ZeroImpactException(message)

        self.minimum_needs = [
            parameter.serialize()
            for parameter in self.parameters['minimum needs']
        ]

        total_needs = self.total_needs

        # Create style
        colours = [
            '#FFFFFF', '#38A800', '#79C900', '#CEED00', '#FFCC00', '#FF6600',
            '#FF0000', '#7A0000'
        ]
        classes = create_classes(affected_population.flat[:], len(colours))
        interval_classes = humanize_class(classes)
        style_classes = []

        for i in xrange(len(colours)):
            style_class = dict()
            if i == 1:
                label = create_label(
                    interval_classes[i],
                    tr('Low Population [%i people/cell]' % classes[i]))
            elif i == 4:
                label = create_label(
                    interval_classes[i],
                    tr('Medium Population [%i people/cell]' % classes[i]))
            elif i == 7:
                label = create_label(
                    interval_classes[i],
                    tr('High Population [%i people/cell]' % classes[i]))
            else:
                label = create_label(interval_classes[i])
            style_class['label'] = label
            style_class['quantity'] = classes[i]
            style_class['transparency'] = 0
            style_class['colour'] = colours[i]
            style_classes.append(style_class)

        style_info = dict(target_field=None,
                          style_classes=style_classes,
                          style_type='rasterStyle')

        impact_data = self.generate_data()

        extra_keywords = {
            'map_title': self.map_title(),
            'legend_notes': self.metadata().key('legend_notes'),
            'legend_units': self.metadata().key('legend_units'),
            'legend_title': self.metadata().key('legend_title'),
            'total_needs': total_needs
        }

        impact_layer_keywords = self.generate_impact_keywords(extra_keywords)

        # Create raster object and return
        impact_layer = Raster(
            data=affected_population,
            projection=self.exposure.layer.get_projection(),
            geotransform=self.exposure.layer.get_geotransform(),
            name=self.map_title(),
            keywords=impact_layer_keywords,
            style_info=style_info)

        impact_layer.impact_data = impact_data
        self._impact = impact_layer
        return impact_layer
Пример #9
0
    def run(self):
        """Run classified population evacuation Impact Function.

        Counts number of people exposed to each hazard zones.

        :returns: Map of population exposed to each hazard zone.
            The returned dict will include a table with number of people
            evacuated and supplies required.
        :rtype: dict

        :raises:
            * Exception - When hazard layer is not vector layer
        """

        # Value from layer's keywords
        self.hazard_class_attribute = self.hazard.keyword('field')
        self.hazard_class_mapping = self.hazard.keyword('value_map')
        # TODO: Remove check to self.validate (Ismail)
        # Input checks
        message = tr(
            'Input hazard must be a polygon layer. I got %s with layer type '
            '%s' % (self.hazard.name, self.hazard.layer.get_geometry_name()))
        if not self.hazard.layer.is_polygon_data:
            raise Exception(message)

        # Check if hazard_class_attribute exists in hazard_layer
        if (self.hazard_class_attribute
                not in self.hazard.layer.get_attribute_names()):
            message = tr(
                'Hazard data %s does not contain expected hazard '
                'zone attribute "%s". Please change it in the option. ' %
                (self.hazard.name, self.hazard_class_attribute))
            # noinspection PyExceptionInherit
            raise InaSAFEError(message)

        # Retrieve the classification that is used by the hazard layer.
        vector_hazard_classification = self.hazard.keyword(
            'vector_hazard_classification')
        # Get the dictionary that contains the definition of the classification
        vector_hazard_classification = definition(vector_hazard_classification)
        # Get the list classes in the classification
        vector_hazard_classes = vector_hazard_classification['classes']
        # Initialize OrderedDict of affected buildings
        self.affected_population = OrderedDict()
        # Iterate over vector hazard classes
        for vector_hazard_class in vector_hazard_classes:
            # Check if the key of class exist in hazard_class_mapping
            if vector_hazard_class['key'] in self.hazard_class_mapping.keys():
                # Replace the key with the name as we need to show the human
                # friendly name in the report.
                self.hazard_class_mapping[vector_hazard_class['name']] = \
                    self.hazard_class_mapping.pop(vector_hazard_class['key'])
                # Adding the class name as a key in affected_building
                self.affected_population[vector_hazard_class['name']] = 0

        # Interpolated layer represents grid cell that lies in the polygon
        interpolated_layer, covered_exposure_layer = \
            assign_hazard_values_to_exposure_data(
                self.hazard.layer,
                self.exposure.layer,
                attribute_name=self.target_field
            )

        # Count total affected population per hazard zone
        for row in interpolated_layer.get_data():
            # Get population at this location
            population = row[self.target_field]
            if not numpy.isnan(population):
                population = float(population)
                # Update population count for this hazard zone
                hazard_value = get_key_for_value(
                    row[self.hazard_class_attribute],
                    self.hazard_class_mapping)
                if not hazard_value:
                    hazard_value = self._not_affected_value
                else:
                    self.affected_population[hazard_value] += population

        # Count total population from exposure layer
        self.total_population = int(
            numpy.nansum(self.exposure.layer.get_data()))

        # Count total affected population
        total_affected_population = self.total_affected_population
        self.unaffected_population = (self.total_population -
                                      total_affected_population)

        self.minimum_needs = [
            parameter.serialize() for parameter in filter_needs_parameters(
                self.parameters['minimum needs'])
        ]

        # check for zero impact
        if total_affected_population == 0:
            message = no_population_impact_message(self.question)
            raise ZeroImpactException(message)

        # Create style
        colours = [
            '#FFFFFF', '#38A800', '#79C900', '#CEED00', '#FFCC00', '#FF6600',
            '#FF0000', '#7A0000'
        ]
        classes = create_classes(covered_exposure_layer.get_data().flat[:],
                                 len(colours))
        interval_classes = humanize_class(classes)
        # Define style info for output polygons showing population counts
        style_classes = []
        for i in xrange(len(colours)):
            style_class = dict()
            style_class['label'] = create_label(interval_classes[i])
            if i == 1:
                label = create_label(
                    interval_classes[i],
                    tr('Low Population [%i people/cell]' % classes[i]))
            elif i == 4:
                label = create_label(
                    interval_classes[i],
                    tr('Medium Population [%i people/cell]' % classes[i]))
            elif i == 7:
                label = create_label(
                    interval_classes[i],
                    tr('High Population [%i people/cell]' % classes[i]))
            else:
                label = create_label(interval_classes[i])

            style_class['label'] = label
            style_class['quantity'] = classes[i]
            style_class['colour'] = colours[i]
            style_class['transparency'] = 0
            style_classes.append(style_class)

        # Override style info with new classes and name
        style_info = dict(target_field=None,
                          style_classes=style_classes,
                          style_type='rasterStyle')

        impact_data = self.generate_data()

        extra_keywords = {
            'target_field': self.target_field,
            'map_title': self.map_title(),
            'legend_notes': self.metadata().key('legend_notes'),
            'legend_units': self.metadata().key('legend_units'),
            'legend_title': self.metadata().key('legend_title')
        }

        impact_layer_keywords = self.generate_impact_keywords(extra_keywords)

        # Create vector layer and return
        impact_layer = Raster(
            data=covered_exposure_layer.get_data(),
            projection=covered_exposure_layer.get_projection(),
            geotransform=covered_exposure_layer.get_geotransform(),
            name=self.map_title(),
            keywords=impact_layer_keywords,
            style_info=style_info)

        impact_layer.impact_data = impact_data
        self._impact = impact_layer
        return impact_layer
Пример #10
0
    def run(self):
        """Indonesian Earthquake Fatality Model."""
        displacement_rate = self.hardcoded_parameters['displacement_rate']
        fatality_rate = self.compute_fatality_rate()

        # Extract data grids
        hazard = self.hazard.layer.get_data()  # Ground Shaking
        # Population Density
        exposure = self.exposure.layer.get_data(scaling=True)

        # Calculate people affected by each MMI level
        mmi_range = self.hardcoded_parameters['mmi_range']
        number_of_exposed = {}
        number_of_displaced = {}
        number_of_fatalities = {}
        # Calculate fatality rates for observed Intensity values (hazard
        # based on ITB power model
        mask = numpy.zeros(hazard.shape)
        for mmi in mmi_range:
            # Identify cells where MMI is in class i and
            # count people affected by this shake level
            step = self.hardcoded_parameters['step']
            mmi_matches = numpy.where(
                (hazard > mmi - step) * (hazard <= mmi + step), exposure, 0)

            # Calculate expected number of fatalities per level
            exposed = numpy.nansum(mmi_matches)
            fatalities = fatality_rate[mmi] * exposed

            # Calculate expected number of displaced people per level
            displacements = displacement_rate[mmi] * (exposed -
                                                      numpy.median(fatalities))

            # Adjust displaced people to disregard fatalities.
            # Set to zero if there are more fatalities than displaced.
            # displacements = numpy.where(
            #    displacements > fatalities, displacements - fatalities, 0)

            # Sum up numbers for map
            # We need to use matrices here and not just numbers #2235
            # filter out NaN to avoid overflow additions
            mmi_matches = numpy.nan_to_num(mmi_matches)
            mask += mmi_matches  # Displaced

            # Generate text with result for this study
            # This is what is used in the real time system exposure table
            number_of_exposed[mmi] = exposed
            number_of_displaced[mmi] = displacements
            # noinspection PyUnresolvedReferences
            number_of_fatalities[mmi] = fatalities

        # Total statistics
        total_fatalities_raw = numpy.nansum(number_of_fatalities.values(),
                                            axis=0)

        # Compute probability of fatality in each magnitude bin
        if (self.__class__.__name__ == 'PAGFatalityFunction') or (
                self.__class__.__name__ == 'ITBBayesianFatalityFunction'):
            prob_fatality_mag = self.compute_probability(total_fatalities_raw)
        else:
            prob_fatality_mag = None

        # Compute number of fatalities
        self.total_population = numpy.nansum(number_of_exposed.values())
        self.total_fatalities = numpy.median(total_fatalities_raw)
        total_displaced = numpy.nansum(number_of_displaced.values())

        # As per email discussion with Ole, Trevor, Hadi, total fatalities < 50
        # will be rounded down to 0 - Tim
        # Needs to revisit but keep it alive for the time being - Hyeuk, Jono
        if self.total_fatalities < 50:
            self.total_fatalities = 0

        affected_population = self.affected_population
        affected_population[tr('Number of fatalities')] = self.total_fatalities
        affected_population[tr('Number of people displaced')] = total_displaced
        self.unaffected_population = (self.total_population - total_displaced -
                                      self.total_fatalities)
        self._evacuation_category = tr('Number of people displaced')

        self.minimum_needs = [
            parameter.serialize() for parameter in filter_needs_parameters(
                self.parameters['minimum needs'])
        ]
        total_needs = self.total_needs

        # Create style
        colours = ['#EEFFEE', '#FFFF7F', '#E15500', '#E4001B', '#730000']
        classes = create_classes(mask.flat[:], len(colours))
        interval_classes = humanize_class(classes)
        style_classes = []
        for i in xrange(len(interval_classes)):
            style_class = dict()
            style_class['label'] = create_label(interval_classes[i])
            style_class['quantity'] = classes[i]
            style_class['transparency'] = 30
            style_class['colour'] = colours[i]
            style_classes.append(style_class)

        style_info = dict(target_field=None,
                          style_classes=style_classes,
                          style_type='rasterStyle')

        impact_data = self.generate_data()

        extra_keywords = {
            'exposed_per_mmi': number_of_exposed,
            'total_population': self.total_population,
            'total_fatalities': population_rounding(self.total_fatalities),
            'total_fatalities_raw': self.total_fatalities,
            'fatalities_per_mmi': number_of_fatalities,
            'total_displaced': population_rounding(total_displaced),
            'displaced_per_mmi': number_of_displaced,
            'map_title': self.map_title(),
            'legend_notes': self.metadata().key('legend_notes'),
            'legend_units': self.metadata().key('legend_units'),
            'legend_title': self.metadata().key('legend_title'),
            'total_needs': total_needs,
            'prob_fatality_mag': prob_fatality_mag,
        }

        impact_layer_keywords = self.generate_impact_keywords(extra_keywords)

        # Create raster object and return
        impact_layer = Raster(
            mask,
            projection=self.exposure.layer.get_projection(),
            geotransform=self.exposure.layer.get_geotransform(),
            keywords=impact_layer_keywords,
            name=self.map_title(),
            style_info=style_info)

        impact_layer.impact_data = impact_data
        self._impact = impact_layer
        return impact_layer
Пример #11
0
    def run(self):
        """Plugin for impact of population as derived by classified hazard.

        Counts number of people exposed to each class of the hazard

        :returns: Map of population exposed to high class
            Table with number of people in each class
        """

        # The 3 classes
        # TODO (3.2): shouldnt these be defined in keywords rather? TS
        categorical_hazards = self.parameters['Categorical hazards'].value
        low_class = categorical_hazards[0].value
        medium_class = categorical_hazards[1].value
        high_class = categorical_hazards[2].value

        # The classes must be different to each other
        unique_classes_flag = all(
            x != y for x, y in list(
                itertools.combinations(
                    [low_class, medium_class, high_class], 2)))
        if not unique_classes_flag:
            raise FunctionParametersError(
                'There is hazard class that has the same value with other '
                'class. Please check the parameters.')

        # Extract data as numeric arrays
        hazard_data = self.hazard.layer.get_data(nan=True)  # Class
        if has_no_data(hazard_data):
            self.no_data_warning = True

        # Calculate impact as population exposed to each class
        population = self.exposure.layer.get_data(scaling=True)

        # Get all population data that falls in each hazard class
        high_hazard_population = numpy.where(
            hazard_data == high_class, population, 0)
        medium_hazard_population = numpy.where(
            hazard_data == medium_class, population, 0)
        low_hazard_population = numpy.where(
            hazard_data == low_class, population, 0)
        affected_population = (
            high_hazard_population + medium_hazard_population +
            low_hazard_population)

        # Carry the no data values forward to the impact layer.
        affected_population = numpy.where(
            numpy.isnan(population),
            numpy.nan,
            affected_population)
        affected_population = numpy.where(
            numpy.isnan(hazard_data),
            numpy.nan,
            affected_population)

        # Count totals
        self.total_population = int(numpy.nansum(population))
        self.affected_population[
            tr('Population in low hazard zone')] = int(
                numpy.nansum(low_hazard_population))
        self.affected_population[
            tr('Population in medium hazard zone')] = int(
                numpy.nansum(medium_hazard_population))
        self.affected_population[
            tr('Population in high hazard zone')] = int(
                numpy.nansum(high_hazard_population))
        self.unaffected_population = (
            self.total_population - self.total_affected_population)

        # check for zero impact
        if self.total_affected_population == 0:
            message = no_population_impact_message(self.question)
            raise ZeroImpactException(message)

        self.minimum_needs = [
            parameter.serialize() for parameter in
            self.parameters['minimum needs']
        ]

        total_needs = self.total_needs

        # Create style
        colours = [
            '#FFFFFF', '#38A800', '#79C900', '#CEED00',
            '#FFCC00', '#FF6600', '#FF0000', '#7A0000']
        classes = create_classes(affected_population.flat[:], len(colours))
        interval_classes = humanize_class(classes)
        style_classes = []

        for i in xrange(len(colours)):
            style_class = dict()
            if i == 1:
                label = create_label(
                    interval_classes[i],
                    tr('Low Population [%i people/cell]' % classes[i]))
            elif i == 4:
                label = create_label(
                    interval_classes[i],
                    tr('Medium Population [%i people/cell]' % classes[i]))
            elif i == 7:
                label = create_label(
                    interval_classes[i],
                    tr('High Population [%i people/cell]' % classes[i]))
            else:
                label = create_label(interval_classes[i])
            style_class['label'] = label
            style_class['quantity'] = classes[i]
            style_class['transparency'] = 0
            style_class['colour'] = colours[i]
            style_classes.append(style_class)

        style_info = dict(
            target_field=None,
            style_classes=style_classes,
            style_type='rasterStyle')

        impact_data = self.generate_data()

        extra_keywords = {
            'map_title': self.map_title(),
            'legend_notes': self.metadata().key('legend_notes'),
            'legend_units': self.metadata().key('legend_units'),
            'legend_title': self.metadata().key('legend_title'),
            'total_needs': total_needs
        }

        impact_layer_keywords = self.generate_impact_keywords(extra_keywords)

        # Create raster object and return
        impact_layer = Raster(
            data=affected_population,
            projection=self.exposure.layer.get_projection(),
            geotransform=self.exposure.layer.get_geotransform(),
            name=self.map_title(),
            keywords=impact_layer_keywords,
            style_info=style_info)

        impact_layer.impact_data = impact_data
        self._impact = impact_layer
        return impact_layer
Пример #12
0
    def run(self):
        """Risk plugin for tsunami population evacuation.

        Counts number of people exposed to tsunami levels exceeding
        specified threshold.

        :returns: Map of population exposed to tsunami levels exceeding the
            threshold. Table with number of people evacuated and supplies
            required.
        :rtype: tuple
        """

        # Determine depths above which people are regarded affected [m]
        # Use thresholds from inundation layer if specified
        thresholds = self.parameters['thresholds'].value

        verify(
            isinstance(thresholds, list),
            'Expected thresholds to be a list. Got %s' % str(thresholds))

        # Extract data as numeric arrays
        data = self.hazard.layer.get_data(nan=True)  # Depth
        if has_no_data(data):
            self.no_data_warning = True

        # Calculate impact as population exposed to depths > max threshold
        population = self.exposure.layer.get_data(nan=True, scaling=True)
        if has_no_data(population):
            self.no_data_warning = True

        # merely initialize
        impact = None
        for i, lo in enumerate(thresholds):
            if i == len(thresholds) - 1:
                # The last threshold
                thresholds_name = tr(
                    'People in >= %.1f m of water') % lo
                impact = medium = numpy.where(data >= lo, population, 0)
                self.impact_category_ordering.append(thresholds_name)
                self._evacuation_category = thresholds_name
            else:
                # Intermediate thresholds
                hi = thresholds[i + 1]
                thresholds_name = tr(
                    'People in %.1f m to %.1f m of water' % (lo, hi))
                medium = numpy.where((data >= lo) * (data < hi), population, 0)

            # Count
            val = int(numpy.nansum(medium))
            self.affected_population[thresholds_name] = val

        # Put the deepest area in top #2385
        self.impact_category_ordering.reverse()

        # Carry the no data values forward to the impact layer.
        impact = numpy.where(numpy.isnan(population), numpy.nan, impact)
        impact = numpy.where(numpy.isnan(data), numpy.nan, impact)

        # Count totals
        self.total_population = int(numpy.nansum(population))
        self.unaffected_population = (
            self.total_population - self.total_affected_population)

        self.minimum_needs = [
            parameter.serialize() for parameter in
            filter_needs_parameters(self.parameters['minimum needs'])
        ]

        # check for zero impact
        if numpy.nanmax(impact) == 0 == numpy.nanmin(impact):
            message = m.Message()
            message.add(self.question)
            message.add(tr('No people in %.1f m of water') % thresholds[-1])
            message = message.to_html(suppress_newlines=True)
            raise ZeroImpactException(message)

        # Create style
        colours = [
            '#FFFFFF', '#38A800', '#79C900', '#CEED00',
            '#FFCC00', '#FF6600', '#FF0000', '#7A0000']
        classes = create_classes(impact.flat[:], len(colours))
        interval_classes = humanize_class(classes)
        style_classes = []

        for i in xrange(len(colours)):
            style_class = dict()
            if i == 1:
                label = create_label(interval_classes[i], 'Low')
            elif i == 4:
                label = create_label(interval_classes[i], 'Medium')
            elif i == 7:
                label = create_label(interval_classes[i], 'High')
            else:
                label = create_label(interval_classes[i])
            style_class['label'] = label
            style_class['quantity'] = classes[i]
            style_class['transparency'] = 0
            style_class['colour'] = colours[i]
            style_classes.append(style_class)

        style_info = dict(
            target_field=None,
            style_classes=style_classes,
            style_type='rasterStyle')

        impact_data = self.generate_data()

        extra_keywords = {
            'map_title': self.metadata().key('map_title'),
            'legend_notes': self.metadata().key('legend_notes'),
            'legend_units': self.metadata().key('legend_units'),
            'legend_title': self.metadata().key('legend_title'),
            'evacuated': self.total_evacuated,
            'total_needs': self.total_needs
        }

        impact_layer_keywords = self.generate_impact_keywords(extra_keywords)

        # Create raster object and return
        impact_layer = Raster(
            impact,
            projection=self.hazard.layer.get_projection(),
            geotransform=self.hazard.layer.get_geotransform(),
            name=self.metadata().key('layer_name'),
            keywords=impact_layer_keywords,
            style_info=style_info)

        impact_layer.impact_data = impact_data
        self._impact = impact_layer
        return impact_layer
Пример #13
0
    def run(self):
        """Run volcano point population evacuation Impact Function.

        Counts number of people exposed to volcano event.

        :returns: Map of population exposed to the volcano hazard zone.
            The returned dict will include a table with number of people
            evacuated and supplies required.
        :rtype: dict

        :raises:
            * Exception - When hazard layer is not vector layer
            * RadiiException - When radii are not valid (they need to be
                monotonically increasing)
        """

        # Parameters
        radii = self.parameters['distances'].value

        # Get parameters from layer's keywords
        volcano_name_attribute = self.hazard.keyword('volcano_name_field')

        data_table = self.hazard.layer.get_data()

        # Get names of volcanoes considered
        if volcano_name_attribute in self.hazard.layer.get_attribute_names():
            # Run through all polygons and get unique names
            for row in data_table:
                self.volcano_names.add(row[volcano_name_attribute])

        # Run interpolation function for polygon2raster
        interpolated_layer, covered_exposure_layer = \
            assign_hazard_values_to_exposure_data(
                self.hazard.layer,
                self.exposure.layer,
                attribute_name=self.target_field
            )

        # Initialise affected population per categories
        impact_category_ordering = []
        for radius in radii:
            category = tr('Radius %s km ' % format_int(radius))
            self.affected_population[category] = 0
            impact_category_ordering.append(category)

        self.impact_category_ordering = impact_category_ordering

        if has_no_data(self.exposure.layer.get_data(nan=True)):
            self.no_data_warning = True
        # Count affected population per polygon and total
        for row in interpolated_layer.get_data():
            # Get population at this location
            population = row[self.target_field]
            if not numpy.isnan(population):
                population = float(population)
                # Update population count for this category
                category = tr('Radius %s km ' %
                              format_int(row[self.hazard_zone_attribute]))
                self.affected_population[category] += population

        # Count totals
        self.total_population = population_rounding(
            int(numpy.nansum(self.exposure.layer.get_data())))

        self.minimum_needs = [
            parameter.serialize() for parameter in filter_needs_parameters(
                self.parameters['minimum needs'])
        ]

        # Create style
        colours = [
            '#FFFFFF', '#38A800', '#79C900', '#CEED00', '#FFCC00', '#FF6600',
            '#FF0000', '#7A0000'
        ]
        classes = create_classes(covered_exposure_layer.get_data().flat[:],
                                 len(colours))
        interval_classes = humanize_class(classes)
        # Define style info for output polygons showing population counts
        style_classes = []
        for i in xrange(len(colours)):
            style_class = dict()
            style_class['label'] = create_label(interval_classes[i])
            if i == 1:
                label = create_label(
                    interval_classes[i],
                    tr('Low Population [%i people/cell]' % classes[i]))
            elif i == 4:
                label = create_label(
                    interval_classes[i],
                    tr('Medium Population [%i people/cell]' % classes[i]))
            elif i == 7:
                label = create_label(
                    interval_classes[i],
                    tr('High Population [%i people/cell]' % classes[i]))
            else:
                label = create_label(interval_classes[i])

            style_class['label'] = label
            style_class['quantity'] = classes[i]
            style_class['colour'] = colours[i]
            style_class['transparency'] = 0
            style_classes.append(style_class)

        # Override style info with new classes and name
        style_info = dict(target_field=None,
                          style_classes=style_classes,
                          style_type='rasterStyle')

        impact_data = self.generate_data()

        # Create vector layer and return
        extra_keywords = {
            'target_field': self.target_field,
            'map_title': self.map_title(),
            'legend_notes': self.metadata().key('legend_notes'),
            'legend_units': self.metadata().key('legend_units'),
            'legend_title': self.metadata().key('legend_title'),
            'total_needs': self.total_needs
        }

        impact_layer_keywords = self.generate_impact_keywords(extra_keywords)

        impact_layer = Raster(
            data=covered_exposure_layer.get_data(),
            projection=covered_exposure_layer.get_projection(),
            geotransform=covered_exposure_layer.get_geotransform(),
            name=self.map_title(),
            keywords=impact_layer_keywords,
            style_info=style_info)

        impact_layer.impact_data = impact_data
        self._impact = impact_layer
        return impact_layer
Пример #14
0
    def run(self):
        """Risk plugin for flood population evacuation.

        Counts number of people exposed to areas identified as flood prone

        :returns: Map of population exposed to flooding Table with number of
            people evacuated and supplies required.
        :rtype: tuple
        """

        # Get parameters from layer's keywords
        self.hazard_class_attribute = self.hazard.keyword('field')
        self.hazard_class_mapping = self.hazard.keyword('value_map')

        # Get the IF parameters
        self._evacuation_percentage = (
            self.parameters['evacuation_percentage'].value)

        # Check that hazard is polygon type
        if not self.hazard.layer.is_polygon_data:
            message = (
                'Input hazard must be a polygon layer. I got %s with layer '
                'type %s' % (
                    self.hazard.name,
                    self.hazard.layer.get_geometry_name()))
            raise Exception(message)

        if has_no_data(self.exposure.layer.get_data(nan=True)):
            self.no_data_warning = True

        # Check that affected field exists in hazard layer
        if (self.hazard_class_attribute in
                self.hazard.layer.get_attribute_names()):
            self.use_affected_field = True

        # Run interpolation function for polygon2raster
        interpolated_layer, covered_exposure = \
            assign_hazard_values_to_exposure_data(
                self.hazard.layer,
                self.exposure.layer,
                attribute_name=self.target_field)

        # Data for manipulating the covered_exposure layer
        new_covered_exposure_data = covered_exposure.get_data()
        covered_exposure_top_left = numpy.array([
            covered_exposure.get_geotransform()[0],
            covered_exposure.get_geotransform()[3]])
        covered_exposure_dimension = numpy.array([
            covered_exposure.get_geotransform()[1],
            covered_exposure.get_geotransform()[5]])

        # Count affected population per polygon, per category and total
        total_affected_population = 0
        for attr in interpolated_layer.get_data():
            affected = False
            if self.use_affected_field:
                row_affected_value = attr[self.hazard_class_attribute]
                if row_affected_value is not None:
                    affected = get_key_for_value(
                        row_affected_value, self.hazard_class_mapping)
            else:
                # assume that every polygon is affected (see #816)
                affected = self.wet

            if affected == self.wet:
                # Get population at this location
                population = attr[self.target_field]
                if not numpy.isnan(population):
                    population = float(population)
                    total_affected_population += population
            else:
                # If it's not affected, set the value of the impact layer to 0
                grid_point = attr['grid_point']
                index = numpy.floor(
                    (grid_point - covered_exposure_top_left) / (
                        covered_exposure_dimension)).astype(int)
                new_covered_exposure_data[index[1]][index[0]] = 0

        # Estimate number of people in need of evacuation
        if self.use_affected_field:
            affected_population = tr(
                'People within hazard field ("%s") of value "%s"') % (
                    self.hazard_class_attribute,
                    ','.join([
                        unicode(hazard_class) for
                        hazard_class in self.hazard_class_mapping[self.wet]
                    ]))
        else:
            affected_population = tr('People within any hazard polygon.')

        self.affected_population[affected_population] = (
            total_affected_population)

        self.total_population = int(
            numpy.nansum(self.exposure.layer.get_data(scaling=False)))
        self.unaffected_population = (
            self.total_population - self.total_affected_population)

        self.minimum_needs = [
            parameter.serialize() for parameter in
            filter_needs_parameters(self.parameters['minimum needs'])
        ]

        # Create style
        colours = ['#FFFFFF', '#38A800', '#79C900', '#CEED00',
                   '#FFCC00', '#FF6600', '#FF0000', '#7A0000']
        classes = create_classes(
            new_covered_exposure_data.flat[:], len(colours))

        # check for zero impact
        if total_affected_population == 0:
            message = no_population_impact_message(self.question)
            raise ZeroImpactException(message)

        interval_classes = humanize_class(classes)
        # Define style info for output polygons showing population counts
        style_classes = []
        for i in xrange(len(colours)):
            style_class = dict()
            style_class['label'] = create_label(interval_classes[i])
            if i == 1:
                label = create_label(
                    interval_classes[i],
                    tr('Low Population [%i people/cell]' % classes[i]))
            elif i == 4:
                label = create_label(
                    interval_classes[i],
                    tr('Medium Population [%i people/cell]' % classes[i]))
            elif i == 7:
                label = create_label(
                    interval_classes[i],
                    tr('High Population [%i people/cell]' % classes[i]))
            else:
                label = create_label(interval_classes[i])

            style_class['label'] = label
            style_class['quantity'] = classes[i]
            style_class['colour'] = colours[i]
            style_class['transparency'] = 0
            style_classes.append(style_class)

        # Override style info with new classes and name
        style_info = dict(
            target_field=None,
            style_classes=style_classes,
            style_type='rasterStyle')

        impact_data = self.generate_data()

        extra_keywords = {
            'target_field': self.target_field,
            'map_title': self.metadata().key('map_title'),
            'legend_notes': self.metadata().key('legend_notes'),
            'legend_units': self.metadata().key('legend_units'),
            'legend_title': self.metadata().key('legend_title'),
            'affected_population': total_affected_population,
            'total_population': self.total_population,
            'total_needs': self.total_needs
        }

        impact_layer_keywords = self.generate_impact_keywords(extra_keywords)

        # Create raster layer and return
        impact_layer = Raster(
            data=new_covered_exposure_data,
            projection=covered_exposure.get_projection(),
            geotransform=covered_exposure.get_geotransform(),
            name=self.metadata().key('layer_name'),
            keywords=impact_layer_keywords,
            style_info=style_info)

        impact_layer.impact_data = impact_data
        self._impact = impact_layer
        return impact_layer
Пример #15
0
    def run(self):
        """Risk plugin for flood population evacuation.

        Counts number of people exposed to areas identified as flood prone

        :returns: Map of population exposed to flooding Table with number of
            people evacuated and supplies required.
        :rtype: tuple
        """

        # Get parameters from layer's keywords
        self.hazard_class_attribute = self.hazard.keyword('field')
        self.hazard_class_mapping = self.hazard.keyword('value_map')
        # There is no wet in the class mapping
        if self.wet not in self.hazard_class_mapping:
            raise ZeroImpactException(tr(
                'There is no flooded area in the hazard layers, thus there '
                'is no affected population.'))

        # Get the IF parameters
        self._evacuation_percentage = (
            self.parameters['evacuation_percentage'].value)

        # Check that hazard is polygon type
        if not self.hazard.layer.is_polygon_data:
            message = (
                'Input hazard must be a polygon layer. I got %s with layer '
                'type %s' % (
                    self.hazard.name,
                    self.hazard.layer.get_geometry_name()))
            raise Exception(message)

        if has_no_data(self.exposure.layer.get_data(nan=True)):
            self.no_data_warning = True

        # Check that affected field exists in hazard layer
        if (self.hazard_class_attribute in
                self.hazard.layer.get_attribute_names()):
            self.use_affected_field = True

        # Run interpolation function for polygon2raster
        interpolated_layer, covered_exposure = \
            assign_hazard_values_to_exposure_data(
                self.hazard.layer,
                self.exposure.layer,
                attribute_name=self.target_field)

        # Data for manipulating the covered_exposure layer
        new_covered_exposure_data = covered_exposure.get_data()
        covered_exposure_top_left = numpy.array([
            covered_exposure.get_geotransform()[0],
            covered_exposure.get_geotransform()[3]])
        covered_exposure_dimension = numpy.array([
            covered_exposure.get_geotransform()[1],
            covered_exposure.get_geotransform()[5]])

        # Count affected population per polygon, per category and total
        total_affected_population = 0
        for attr in interpolated_layer.get_data():
            affected = False
            if self.use_affected_field:
                row_affected_value = attr[self.hazard_class_attribute]
                if row_affected_value is not None:
                    affected = get_key_for_value(
                        row_affected_value, self.hazard_class_mapping)
            else:
                # assume that every polygon is affected (see #816)
                affected = self.wet

            if affected == self.wet:
                # Get population at this location
                population = attr[self.target_field]
                if not numpy.isnan(population):
                    population = float(population)
                    total_affected_population += population
            else:
                # If it's not affected, set the value of the impact layer to 0
                grid_point = attr['grid_point']
                index = numpy.floor(
                    (grid_point - covered_exposure_top_left) / (
                        covered_exposure_dimension)).astype(int)
                new_covered_exposure_data[index[1]][index[0]] = 0

        # Estimate number of people in need of evacuation
        if self.use_affected_field:
            affected_population = tr(
                'People within hazard field ("%s") of value "%s"') % (
                    self.hazard_class_attribute,
                    ','.join([
                        unicode(hazard_class) for
                        hazard_class in self.hazard_class_mapping[self.wet]
                    ]))
        else:
            affected_population = tr('People within any hazard polygon.')

        self.affected_population[affected_population] = (
            total_affected_population)

        self.total_population = int(
            numpy.nansum(self.exposure.layer.get_data(scaling=False)))
        self.unaffected_population = (
            self.total_population - self.total_affected_population)

        self.minimum_needs = [
            parameter.serialize() for parameter in
            filter_needs_parameters(self.parameters['minimum needs'])
        ]

        # Create style
        colours = ['#FFFFFF', '#38A800', '#79C900', '#CEED00',
                   '#FFCC00', '#FF6600', '#FF0000', '#7A0000']
        classes = create_classes(
            new_covered_exposure_data.flat[:], len(colours))

        # check for zero impact
        if total_affected_population == 0:
            message = no_population_impact_message(self.question)
            raise ZeroImpactException(message)

        interval_classes = humanize_class(classes)
        # Define style info for output polygons showing population counts
        style_classes = []
        for i in xrange(len(colours)):
            style_class = dict()
            style_class['label'] = create_label(interval_classes[i])
            if i == 1:
                label = create_label(
                    interval_classes[i],
                    tr('Low Population [%i people/cell]' % classes[i]))
            elif i == 4:
                label = create_label(
                    interval_classes[i],
                    tr('Medium Population [%i people/cell]' % classes[i]))
            elif i == 7:
                label = create_label(
                    interval_classes[i],
                    tr('High Population [%i people/cell]' % classes[i]))
            else:
                label = create_label(interval_classes[i])

            style_class['label'] = label
            style_class['quantity'] = classes[i]
            style_class['colour'] = colours[i]
            style_class['transparency'] = 0
            style_classes.append(style_class)

        # Override style info with new classes and name
        style_info = dict(
            target_field=None,
            style_classes=style_classes,
            style_type='rasterStyle')

        impact_data = self.generate_data()

        extra_keywords = {
            'target_field': self.target_field,
            'map_title': self.map_title(),
            'legend_notes': self.metadata().key('legend_notes'),
            'legend_units': self.metadata().key('legend_units'),
            'legend_title': self.metadata().key('legend_title'),
            'affected_population': total_affected_population,
            'total_population': self.total_population,
            'total_needs': self.total_needs
        }

        impact_layer_keywords = self.generate_impact_keywords(extra_keywords)

        # Create raster layer and return
        impact_layer = Raster(
            data=new_covered_exposure_data,
            projection=covered_exposure.get_projection(),
            geotransform=covered_exposure.get_geotransform(),
            name=self.map_title(),
            keywords=impact_layer_keywords,
            style_info=style_info)

        impact_layer.impact_data = impact_data
        self._impact = impact_layer
        return impact_layer
Пример #16
0
    def run(self):
        """Indonesian Earthquake Fatality Model.

        Some additional notes to clarify behaviour:

        * Total population = all people within the analysis area
        * Affected population  = displaced people + people killed
        * Displaced = people * displacement rate for mmi level
        * Killed = people * mortality rate for mmi level
        * impact layer produced = affected population

        """
        displacement_rate = self.hardcoded_parameters["displacement_rate"]
        fatality_rate = self.compute_fatality_rate()

        # Extract data grids
        hazard = self.hazard.layer.get_data()  # Ground Shaking
        # Population Density
        exposure = self.exposure.layer.get_data(scaling=True)

        # Calculate people affected by each MMI level
        mmi_range = self.hardcoded_parameters["mmi_range"]
        number_of_exposed = {}
        number_of_displaced = {}
        number_of_fatalities = {}
        # Calculate fatality rates for observed Intensity values (hazard
        # based on ITB power model
        mask = numpy.zeros(hazard.shape)
        for mmi in mmi_range:
            # Identify cells where MMI is in class i and
            # count people affected by this shake level
            step = self.hardcoded_parameters["step"]
            mmi_matches = numpy.where((hazard > mmi - step) * (hazard <= mmi + step), exposure, 0)

            # Calculate expected number of fatalities per level
            exposed = numpy.nansum(mmi_matches)
            fatalities = fatality_rate[mmi] * exposed

            # Calculate expected number of displaced people per level
            displacements = displacement_rate[mmi] * (exposed - numpy.median(fatalities))

            # Adjust displaced people to disregard fatalities.
            # Set to zero if there are more fatalities than displaced.
            # displacements = numpy.where(
            #    displacements > fatalities, displacements - fatalities, 0)

            # Sum up numbers for map
            # We need to use matrices here and not just numbers #2235
            # filter out NaN to avoid overflow additions
            # Changed in 3.5.3 for Issue #3489 to correct mask
            # to that it returns affected (displaced + fatalities)
            mmi_matches = displacement_rate[mmi] * numpy.nan_to_num(mmi_matches)
            mask += mmi_matches  # Displaced

            # Generate text with result for this study
            # This is what is used in the real time system exposure table
            number_of_exposed[mmi] = exposed
            number_of_displaced[mmi] = displacements
            # noinspection PyUnresolvedReferences
            number_of_fatalities[mmi] = fatalities

        # Total statistics
        total_fatalities_raw = numpy.nansum(number_of_fatalities.values(), axis=0)

        # Compute probability of fatality in each magnitude bin
        if (self.__class__.__name__ == "PAGFatalityFunction") or (
            self.__class__.__name__ == "ITBBayesianFatalityFunction"
        ):
            prob_fatality_mag = self.compute_probability(total_fatalities_raw)
        else:
            prob_fatality_mag = None

        # Compute number of fatalities
        self.total_population = numpy.nansum(number_of_exposed.values())
        self.total_fatalities = numpy.median(total_fatalities_raw)
        total_displaced = numpy.nansum(number_of_displaced.values())

        # As per email discussion with Ole, Trevor, Hadi, total fatalities < 50
        # will be rounded down to 0 - Tim
        # Needs to revisit but keep it alive for the time being - Hyeuk, Jono
        if self.total_fatalities < 50:
            self.total_fatalities = 0

        affected_population = self.affected_population
        affected_population[tr("Number of fatalities")] = self.total_fatalities
        affected_population[tr("Number of people displaced")] = total_displaced
        self.unaffected_population = self.total_population - total_displaced - self.total_fatalities
        self._evacuation_category = tr("Number of people displaced")

        self.minimum_needs = [
            parameter.serialize() for parameter in filter_needs_parameters(self.parameters["minimum needs"])
        ]
        total_needs = self.total_needs

        # Create style
        colours = ["#EEFFEE", "#FFFF7F", "#E15500", "#E4001B", "#730000"]
        classes = create_classes(mask.flat[:], len(colours))
        interval_classes = humanize_class(classes)
        style_classes = []
        for i in xrange(len(interval_classes)):
            style_class = dict()
            style_class["label"] = create_label(interval_classes[i])
            style_class["quantity"] = classes[i]
            style_class["transparency"] = 30
            style_class["colour"] = colours[i]
            style_classes.append(style_class)

        style_info = dict(target_field=None, style_classes=style_classes, style_type="rasterStyle")

        impact_data = self.generate_data()

        extra_keywords = {
            "exposed_per_mmi": number_of_exposed,
            "total_population": self.total_population,
            "total_fatalities": population_rounding(self.total_fatalities),
            "total_fatalities_raw": self.total_fatalities,
            "fatalities_per_mmi": number_of_fatalities,
            "total_displaced": population_rounding(total_displaced),
            "displaced_per_mmi": number_of_displaced,
            "map_title": self.map_title(),
            "legend_notes": self.metadata().key("legend_notes"),
            "legend_units": self.metadata().key("legend_units"),
            "legend_title": self.metadata().key("legend_title"),
            "total_needs": total_needs,
            "prob_fatality_mag": prob_fatality_mag,
        }

        impact_layer_keywords = self.generate_impact_keywords(extra_keywords)

        # Create raster object and return
        impact_layer = Raster(
            mask,
            projection=self.exposure.layer.get_projection(),
            geotransform=self.exposure.layer.get_geotransform(),
            keywords=impact_layer_keywords,
            name=self.map_title(),
            style_info=style_info,
        )

        impact_layer.impact_data = impact_data
        self._impact = impact_layer
        return impact_layer