def run(self, layers): """Risk plugin for flood population evacuation Input layers: List of layers expected to contain H: Raster layer of flood depth P: Raster layer of population data on the same grid as H Counts number of people exposed to flood levels exceeding specified threshold. Return Map of population exposed to flood levels exceeding the threshold Table with number of people evacuated and supplies required """ # Depth above which people are regarded affected [m] threshold = 1.0 # Threshold [m] # Identify hazard and exposure layers inundation = get_hazard_layer(layers) # Flood inundation [m] population = get_exposure_layer(layers) question = get_question(inundation.get_name(), population.get_name(), self) # Extract data as numeric arrays D = inundation.get_data(nan=0.0) # Depth # Calculate impact as population exposed to depths > threshold P = population.get_data(nan=0.0, scaling=True) I = numpy.where(D > threshold, P, 0) M = numpy.where(D > 0.5, P, 0) L = numpy.where(D > 0.3, P, 0) # Count totals total = int(numpy.sum(P)) evacuated = int(numpy.sum(I)) medium = int(numpy.sum(M)) - int(numpy.sum(I)) low = int(numpy.sum(L)) - int(numpy.sum(M)) # Don't show digits less than a 1000 if total > 1000: total = total // 1000 * 1000 if evacuated > 1000: evacuated = evacuated // 1000 * 1000 if medium > 1000: medium = medium // 1000 * 1000 if low > 1000: low = low // 1000 * 1000 # Calculate estimated needs based on BNPB Perka 7/2008 minimum bantuan rice = evacuated * 2.8 drinking_water = evacuated * 17.5 water = evacuated * 67 family_kits = evacuated / 5 toilets = evacuated / 20 # Generate impact report for the pdf map table_body = [question, TableRow([_('People needing evacuation'), '%i' % evacuated], header=True), TableRow(_('Map shows population density needing ' 'evacuation'))] #, ## TableRow([_('People in 50cm to 1m of water '), ## '%i' % medium], ## header=True), ## TableRow([_('People in 30cm to 50cm of water'), ## '%i' % low], ## header=True)] ## TableRow([_('Needs per week'), _('Total')], ## header=True), ## [_('Rice [kg]'), int(rice)], ## [_('Drinking Water [l]'), int(drinking_water)], ## [_('Clean Water [l]'), int(water)], ## [_('Family Kits'), int(family_kits)], ## [_('Toilets'), int(toilets)]] impact_table = Table(table_body).toNewlineFreeString() # Extend impact report for on-screen display table_body.extend([TableRow(_('Notes:'), header=True), _('Total population: %i') % total, _('People need evacuation if flood levels ' 'exceed %(eps)i m') % {'eps': threshold}, _('People in 50cm to 1m of water: %i') % medium, _('People in 30cm to 50cm of water: %i') % low]) ## _('Minimum needs are defined in BNPB ' ## 'regulation 7/2008')]) impact_summary = Table(table_body).toNewlineFreeString() map_title = _('People in need of evacuation') style_info['legend_title'] = _('Population Density') # Create raster object and return R = Raster(I, projection=inundation.get_projection(), geotransform=inundation.get_geotransform(), name=_('Population which %s') % get_function_title(self), keywords={'impact_summary': impact_summary, 'impact_table': impact_table, 'map_title': map_title}, style_info=style_info) return R
def run(self, layers): """Risk plugin for Padang building survey """ # Extract data H = get_hazard_layer(layers) # Ground shaking E = get_exposure_layer(layers) # Building locations question = get_question(H.get_name(), E.get_name(), self) # Map from different kinds of datasets to Padang vulnerability classes datatype = E.get_keywords()['datatype'] vclass_tag = 'VCLASS' if datatype.lower() == 'osm': # Map from OSM attributes Emap = osm2padang(E) elif datatype.lower() == 'sigab': # Map from SIGAB attributes Emap = sigab2padang(E) else: Emap = E # Interpolate hazard level to building locations I = H.interpolate(Emap, attribute_name='MMI') # Extract relevant numerical data attributes = I.get_data() N = len(I) # Calculate building damage count_high = count_medium = count_low = count_none = 0 for i in range(N): mmi = float(attributes[i]['MMI']) building_type = Emap.get_data(vclass_tag, i) damage_params = damage_curves[building_type] beta = damage_params['beta'] median = damage_params['median'] percent_damage = lognormal_cdf(mmi, median=median, sigma=beta) * 100 # Add calculated impact to existing attributes attributes[i][self.target_field] = percent_damage # Calculate statistics if percent_damage < 10: count_none += 1 if 10 <= percent_damage < 33: count_low += 1 if 33 <= percent_damage < 66: count_medium += 1 if 66 <= percent_damage: count_high += 1 # Generate impact report table_body = [question, TableRow([_('Buildings'), _('Total')], header=True), TableRow([_('All'), N]), TableRow([_('No damage'), count_none]), TableRow([_('Low damage'), count_low]), TableRow([_('Medium damage'), count_medium]), TableRow([_('High damage'), count_high])] table_body.append(TableRow(_('Notes:'), header=True)) table_body.append(_('Levels of impact are defined by post 2009 ' 'Padang earthquake survey conducted by Geoscience ' 'Australia and Institute of Teknologi Bandung.')) table_body.append(_('Unreinforced masonry is assumed where no ' 'structural information is available.')) impact_summary = Table(table_body).toNewlineFreeString() impact_table = impact_summary map_title = _('Earthquake damage to buildings') # Create style style_classes = [dict(label=_('No damage'), min=0, max=10, colour='#00ff00', transparency=1), dict(label=_('Low damage'), min=10, max=33, colour='#ffff00', transparency=1), dict(label=_('Medium damage'), min=33, max=66, colour='#ffaa00', transparency=1), dict(label=_('High damage'), min=66, max=100, colour='#ff0000', transparency=1)] style_info = dict(target_field=self.target_field, style_classes=style_classes) # Create vector layer and return V = Vector(data=attributes, projection=E.get_projection(), geometry=E.get_geometry(), name='Estimated pct damage', keywords={'impact_summary': impact_summary, 'impact_table': impact_table, 'map_title': map_title}, style_info=style_info) return V
def run(self, layers, x=0.62275231, y=8.03314466, zeta=2.15): """Gender specific earthquake impact model Input layers: List of layers expected to contain H: Raster layer of MMI ground shaking P: Raster layer of population density """ # Define percentages of people being displaced at each mmi level displacement_rate = {1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0, 7: 0.1, 8: 0.5, 9: 0.75, 10: 1.0} # Extract input layers intensity = get_hazard_layer(layers) population = get_exposure_layer(layers) question = get_question(intensity.get_name(), population.get_name(), self) # Extract data grids H = intensity.get_data() # Ground Shaking P = population.get_data() # Population Density # Calculate population affected by each MMI level # FIXME (Ole): this range is 2-9. Should 10 be included? mmi_range = range(2, 10) number_of_exposed = {} number_of_fatalities = {} # Calculate fatality rates for observed Intensity values (H # based on ITB power model R = numpy.zeros(H.shape) for mmi in mmi_range: # Identify cells where MMI is in class i mask = (H > mmi - 0.5) * (H <= mmi + 0.5) # Count population affected by this shake level I = numpy.where(mask, P, 0) # Calculate expected number of fatalities per level fatality_rate = numpy.power(10.0, x * mmi - y) F = fatality_rate * I # Sum up fatalities to create map R += F # Generate text with result for this study # This is what is used in the real time system exposure table number_of_exposed[mmi] = numpy.nansum(I.flat) number_of_fatalities[mmi] = numpy.nansum(F.flat) # Set resulting layer to zero when less than a threshold. This is to # achieve transparency (see issue #126). R[R < 1] = numpy.nan # Total statistics total = numpy.nansum(P.flat) # Compute number of fatalities fatalities = numpy.nansum(number_of_fatalities.values()) # Compute number of people displaced due to building collapse displaced = 0 for mmi in mmi_range: displaced += displacement_rate[mmi] * number_of_exposed[mmi] displaced_women = displaced * 0.52 # Could be made province dependent displaced_pregnant_women = displaced_women * 0.01387 # CHECK # Generate impact report table_body = [question] # Add total fatality estimate s = str(int(fatalities)).rjust(10) table_body.append(TableRow([_("Number of fatalities"), s], header=True)) # Add total estimate of people displaced s = str(int(displaced)).rjust(10) table_body.append(TableRow([_("Number of people displaced"), s], header=True)) s = str(int(displaced_women)).rjust(10) table_body.append(TableRow([_("Number of women displaced"), s], header=True)) s = str(int(displaced_pregnant_women)).rjust(10) table_body.append(TableRow([_("Number of pregnant women displaced"), s], header=True)) table_body.append(TableRow(_("Action Checklist:"), header=True)) table_body.append(_("Are enough shelters available for %i women?") % displaced_women) table_body.append( _("Are enough facilities available to assist %i " "pregnant women?") % displaced_pregnant_women ) table_body.append(TableRow(_("Notes:"), header=True)) table_body.append(_("Fatality model is from " "Institute of Teknologi Bandung 2012.")) impact_summary = Table(table_body).toNewlineFreeString() impact_table = impact_summary map_title = _("Earthquake impact to population") # Create new layer and return L = Raster( R, projection=population.get_projection(), geotransform=population.get_geotransform(), keywords={ "impact_summary": impact_summary, "total_population": total, "total_fatalities": fatalities, "impact_table": impact_table, "map_title": map_title, }, name=_("Estimated fatalities"), style_info=style_info, ) # Maybe return a shape file with contours instead return L
def run(self, layers, x=0.62275231, y=8.03314466, zeta=2.15): """Indonesian Earthquake Fatality Model Input layers: List of layers expected to contain H: Raster layer of MMI ground shaking P: Raster layer of population density """ # Define percentages of people being displaced at each mmi level displacement_rate = {1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0, 7: 0.1, 8: 0.5, 9: 0.75, 10: 1.0} # Extract input layers intensity = get_hazard_layer(layers) population = get_exposure_layer(layers) question = get_question(intensity.get_name(), population.get_name(), self) # Extract data grids H = intensity.get_data() # Ground Shaking P = population.get_data() # Population Density # Calculate population affected by each MMI level # FIXME (Ole): this range is 2-9. Should 10 be included? mmi_range = range(2, 10) number_of_exposed = {} number_of_displaced = {} number_of_fatalities = {} # Calculate fatality rates for observed Intensity values (H # based on ITB power model R = numpy.zeros(H.shape) for mmi in mmi_range: # Identify cells where MMI is in class i mask = (H > mmi - 0.5) * (H <= mmi + 0.5) # Count population affected by this shake level I = numpy.where(mask, P, 0) # Calculate expected number of fatalities per level fatality_rate = numpy.power(10.0, x * mmi - y) F = fatality_rate * I # Calculate expected number of displaced people per level try: D = displacement_rate[mmi] * I except Exception, e: msg = 'mmi = %i, I = %s, Error msg: %s' % (mmi, str(I), str(e)) fid = open('C:\\error_message.txt', 'wb') fid.write(msg) fid.close() # Sum up numbers for map R += F # Fatalities #R += D # Displaced # Generate text with result for this study # This is what is used in the real time system exposure table number_of_exposed[mmi] = numpy.nansum(I.flat) number_of_displaced[mmi] = numpy.nansum(D.flat) number_of_fatalities[mmi] = numpy.nansum(F.flat)
def run(self, layers): """Flood impact to buildings (e.g. from Open Street Map) """ threshold = 1.0 # Flood threshold [m] # Extract data H = get_hazard_layer(layers) # Depth E = get_exposure_layer(layers) # Building locations question = get_question(H.get_name(), E.get_name(), self) # Interpolate hazard level to building locations if H.is_raster: I = H.interpolate(E, attribute_name='depth') hazard_type = 'depth' else: I = H.interpolate(E) hazard_type = 'floodprone' # Extract relevant exposure data attribute_names = I.get_attribute_names() attributes = I.get_data() N = len(I) # Calculate building impact count = 0 buildings = {} affected_buildings = {} for i in range(N): if hazard_type == 'depth': # Get the interpolated depth x = float(attributes[i]['depth']) x = x > threshold elif hazard_type == 'floodprone': # Use interpolated polygon attribute atts = attributes[i] if 'FLOODPRONE' in atts: res = atts['FLOODPRONE'] if res is None: x = False else: x = res.lower() == 'yes' else: # If there isn't a flood prone attribute, # assume that building is wet if inside polygon # as flag by generic attribute AFFECTED res = atts['Affected'] if res is None: x = False else: x = res else: msg = (_('Unknown hazard type %s. ' 'Must be either "depth" or "floodprone"') % hazard_type) raise Exception(msg) # Count affected buildings by usage type if available if 'type' in attribute_names: usage = attributes[i]['type'] else: usage = None if usage is not None and usage != 0: key = usage else: key = 'unknown' if key not in buildings: buildings[key] = 0 affected_buildings[key] = 0 # Count all buildings by type buildings[key] += 1 if x is True: # Count affected buildings by type affected_buildings[key] += 1 # Count total affected buildings count += 1 # Add calculated impact to existing attributes attributes[i][self.target_field] = x # Lump small entries and 'unknown' into 'other' category for usage in buildings.keys(): x = buildings[usage] if x < 25 or usage == 'unknown': if 'other' not in buildings: buildings['other'] = 0 affected_buildings['other'] = 0 buildings['other'] += x affected_buildings['other'] += affected_buildings[usage] del buildings[usage] del affected_buildings[usage] # Generate csv file of results ## fid = open('C:\dki_table_%s.csv' % H.get_name(), 'wb') ## fid.write('%s, %s, %s\n' % (_('Building type'), ## _('Temporarily closed'), ## _('Total'))) ## fid.write('%s, %i, %i\n' % (_('All'), count, N)) # Generate simple impact report table_body = [question, TableRow([_('Building type'), _('Temporarily closed'), _('Total')], header=True), TableRow([_('All'), count, N])] ## fid.write('%s, %s, %s\n' % (_('Building type'), ## _('Temporarily closed'), ## _('Total'))) # Generate break down by building usage type is available if 'type' in attribute_names: # Make list of building types building_list = [] for usage in buildings: building_type = usage.replace('_', ' ') # Lookup internationalised value if available if building_type in internationalised_values: building_type = internationalised_values[building_type] else: print ('WARNING: %s could not be translated' % building_type) building_list.append([building_type.capitalize(), affected_buildings[usage], buildings[usage]]) ## fid.write('%s, %i, %i\n' % (building_type.capitalize(), ## affected_buildings[usage], ## buildings[usage])) # Sort alphabetically building_list.sort() #table_body.append(TableRow([_('Building type'), # _('Temporarily closed'), # _('Total')], header=True)) table_body.append(TableRow(_('Breakdown by building type'), header=True)) for row in building_list: s = TableRow(row) table_body.append(s) ## fid.close() table_body.append(TableRow(_('Action Checklist:'), header=True)) table_body.append(TableRow(_('Are the critical facilities still ' 'open?'))) table_body.append(TableRow(_('Notes:'), header=True)) assumption = _('Buildings are said to be flooded when ') if hazard_type == 'depth': assumption += _('flood levels exceed %.1f m') % threshold else: assumption += _('in areas marked as flood prone') table_body.append(assumption) impact_summary = Table(table_body).toNewlineFreeString() impact_table = impact_summary map_title = _('Buildings inundated') # Create style style_classes = [dict(label=_('Not Flooded'), min=0, max=0, colour='#1EFC7C', transparency=0, size=1), dict(label=_('Flooded'), min=1, max=1, colour='#F31A1C', transparency=0, size=1)] style_info = dict(target_field=self.target_field, style_classes=style_classes) # Create vector layer and return V = Vector(data=attributes, projection=I.get_projection(), geometry=I.get_geometry(), name=_('Estimated buildings affected'), keywords={'impact_summary': impact_summary, 'impact_table': impact_table, 'map_title': map_title}, style_info=style_info) return V