Exemplo n.º 1
0
def damageState(building_types, fields, records, dmgstate):
    """
    Calculate probability of damage states for all building types.

    :param dict building_types: Dict of building type data, including the
                                parameters for the fragility curves. 
    :param list fields: List of field names from the input file.
    :param list records: List of lists of records from the input file.
    :param str state: Damage state to calculate. Must be one of
                      'slight', 'moderate', 'extensive' or 'complete'.

    """
    if dmgstate not in ['slight', 'moderate', 'extensive', 'complete']:
        raise KeyError("Invalid damage state requested: {0}".format(dmgstate))

    vintage = getField('ERA_CONST', fields, records, str)
    
    if dmgstate == 'slight':
        state = 'sl'
    elif dmgstate == 'moderate':
        state = 'mod'
    elif dmgstate == 'extensive':
        state = 'ext'
    elif dmgstate == 'complete':
        state = 'c'
        
    vmask = np.array(vintage)=='Post-1992'
    wind_speed = getField('vmax', fields, records)
    prob_state = np.empty((len(building_types.keys()), len(records)))
    LOG.debug("Appending required fields for damage state metrics")

    for i, bld_type in enumerate(building_types.keys()):
        LOG.debug("New field name is: {0}".format('_'.join([bld_type, state])))
        fields.append(['_'.join([bld_type, state]), "N", 10, 6])

        # Calculate the probability of being in a damage state:
        mu = building_types[bld_type][state+'_mu'] * \
          np.ones(len(wind_speed))
        sigma = building_types[bld_type][state+'_sd'] * \
          np.ones(len(wind_speed))
        scale = building_types[bld_type][state+'_scale'] * \
          np.ones(len(wind_speed))

        mu, sigma, scale = adjustFragilityCurves(bld_type, vmask, mu,
                                              sigma, scale, dmgstate)

        prob_state[i, :] = damage(wind_speed, mu, sigma, scale)
        
    for i, record in enumerate(records):
        record.extend(*[prob_state[:, i]])

    return fields, records
Exemplo n.º 2
0
def calculatePopulation(fields, records, building_types):
    """
    Calculate the population affected statistic for the event.
    "Population affected" is defined to be the population in building types
    that have 70% or greater probability of being in a moderate damage state,
    or 30% or greater probability of being in an extensive damage state.

    The population in a land parcel is proportionally distributed amongst all
    building types in the parcel.

    :param list fields: List of fields in the dataset.
    :param list records: List of lists of records from the input file.
    :param str vulnerability_file: Filename of csv-format file that holds the
                                   parameters for the vulnerability curves
                                   for all buildings.
    """

    LOG.info("Calculating affected population")
    nrecords = len(records)
    n_bldg_type = len(building_types.keys())
    flarea = getField('FLAREA_SUM', fields, records)
    pop = getField('POP_EST', fields, records)
    pop_affected = np.zeros((n_bldg_type, nrecords))
    
    for i, bld_type in enumerate(building_types.keys()):
        mod_dmg_name = bld_type + '_mod'
        ext_dmg_name = bld_type + '_ext'
        mod_dmg_prob = getField(mod_dmg_name, fields, records)
        ext_dmg_prob = getField(ext_dmg_name, fields, records)

        # Damage thresholds: >70% probability of moderate damage; 
        #                    >30% probability of extensive damage.
        mod_idx = np.where(mod_dmg_prob >= 0.7)
        ext_idx = np.where(ext_dmg_prob >= 0.3)

        bld_flarea = getField(bld_type, fields, records)
        xx = bld_flarea * pop / flarea

        pop_affected[i, mod_idx] = xx[mod_idx]
        pop_affected[i, ext_idx] = xx[ext_idx]

    population_affected = np.sum(pop_affected, axis=0)

    np.putmask(population_affected, population_affected > pop, pop)

    fields.append(['POP_AFFECT', "N", 10, 2])
    for i, record in enumerate(records):
        record.extend([population_affected[i]])


    return fields, records
Exemplo n.º 3
0
def calculateAverageLoss(records, fields, output_folder):
    """
    Calculate average loss (for return periods and annualised loss)
    for the region.
    """

    fh = open(pjoin(output_folder,'annual_loss.csv'), 'w')

    fh.write( "Return period, average loss, cost\n" )

    loss = getField('loss', fields, records)
    cost = getField('cost', fields, records)
    values = getField('bldg_value', fields, records)
    avg_loss = np.sum(loss*cost)/np.sum(cost)

    fh.write("Average loss: {0:f}\n".format(avg_loss))
    fh.write("Cost: P{0:,} (total value: P{1:,})".
             format(int(np.sum(cost)), int(np.sum(values))))
    fh.close()

    return
Exemplo n.º 4
0
def loadZonesFromRecords(records, fields, featureid, zoneid):
    """
    Obtain a list of all unique regions and sub regions contained in the input
    shapefile
    """
    LOG.debug("Extracting list of unique regions from records")

    output = dict()

    featurerecs = getField(featureid, fields, records, dtype=int)
    zonerecs = getField(zoneid, fields, records, dtype=str)

    for feature, zone in zip(featurerecs, zonerecs):
        try:
            region, subregion = zone.split(' - ', 1)
        except ValueError:
            region = zone
            subregion = zone
        finally:
            output[feature] = {'region': region, 'subregion': subregion}

    return output
Exemplo n.º 5
0
def calculateAverageLoss(records, fields, return_periods, output_folder):
    """
    Calculate average loss (for return periods and annualised loss)
    for the region.
    """

    avg_loss = np.empty(len(return_periods))
    tot_cost = np.empty(len(return_periods))

    fh = open(pjoin(output_folder, 'annual_loss.csv'), 'w')

    fh.write("Return period, average loss, cost\n")
    for i, ret_per in enumerate(return_periods):
        dmg_key = 'dmg' + str(int(ret_per))
        cost_key = 'cost' + str(int(ret_per))

        dmg = getField(dmg_key, fields, records)
        cost = getField(cost_key, fields, records)
        avg_loss[i] = np.sum(dmg * cost) / np.sum(cost)
        tot_cost[i] = np.sum(cost)
        fh.write("{0:d}, {1:f}, P{2:,d}\n".format(int(ret_per), avg_loss[i],
                                                  int(tot_cost[i])))

    ann_loss = getField('ann_loss', fields, records)
    ann_cost = getField('ann_cost', fields, records)
    values = getField('bldg_value', fields, records)
    avg_annloss = np.sum(ann_loss * cost) / np.sum(cost)

    fh.write("Average annualised loss: {0:f}\n".format(avg_annloss))
    fh.write("Annualised cost: P{0:,} (total value: P{1:,})".format(
        int(np.sum(ann_cost)), int(np.sum(values))))
    fh.close()

    plotResults(avg_loss, tot_cost, return_periods, output_folder)

    return
Exemplo n.º 6
0
def totalDamage(building_types, fields, records, return_periods,
                building_costs):
    """
    Calculate the total damage for each feature, for each return
    period.  Total damage is the weighted sum of damage to each
    building type, weighted by the fraction of floor area for that
    building type.
    """

    LOG.info("Calculating total damage to building stock")
    LOG.debug("Calculating total value of building stock in each feature")

    nrecords = len(records)
    n_bldg_type = len(building_types.keys())

    lu4 = getField('L4_USE', fields, records, str)
    lu5 = getField('L5_USE', fields, records, str)
    vintage = getField('ERA_CONST', fields, records, str)
    area_sqm = getField('AREA_SQM', fields, records)

    values = np.empty((n_bldg_type, nrecords))

    for i, bld_type in enumerate(building_types.keys()):
        flarea = getField(bld_type, fields, records)
        values[i, :] = calculateValue(flarea, lu4, lu5, bld_type,
                                      building_costs)

    totvalue = np.sum(values, axis=0)
    fields.append(['bldg_value', "N", 19, 0])

    for i, record in enumerate(records):
        record.append(totvalue[i])

    LOG.debug("Appending required fields for return period losses")
    for ret_per in return_periods:
        fields.append(['dmg' + str(int(ret_per)), "N", 9, 6])
        fields.append(['cost' + str(int(ret_per)), "N", 10, 0])
        fields.append(['dmgf' + str(int(ret_per)), "N", 10, 6])

    fields.append(['ann_loss', "N", 9, 6])
    fields.append(['ann_cost', "N", 10, 0])
    fields.append(['ann_dmgf', "N", 10, 6])

    # Create a mask to allow us to modify the vulnerability for
    # buildings of different ages. The key time is 1992. This applies
    # to the MWS, CWS, CHB and C1-L building types, where there were
    # changes to the construction materials used.
    vmask = np.array(vintage) == 'Post-1992'

    rp_damage = np.empty((len(return_periods), nrecords))
    rp_costs = np.empty((len(return_periods), nrecords))
    rp_dmgfl = np.empty((len(return_periods), nrecords))

    for n, ret_per in enumerate(return_periods):
        LOG.info("Processing return period {0}".format(ret_per.astype(int)))
        wind_speed = getField('V' + str(ret_per.astype(int)), fields, records)

        costs = np.empty((len(building_types.keys()), nrecords))
        dmg_flarea = np.empty((len(building_types.keys()), nrecords))

        for i, bld_type in enumerate(building_types.keys()):
            # Calculate corresponding value of built assets:
            flarea = getField(bld_type, fields, records)
            value = calculateValue(flarea, lu4, lu5, bld_type, building_costs)

            # Calculate the damage (as a fraction of replacement cost):
            mu = building_types[bld_type]['mu'] * np.ones(len(wind_speed))
            sigma = building_types[bld_type]['sigma'] * np.ones(
                len(wind_speed))
            scale = building_types[bld_type]['scale'] * np.ones(
                len(wind_speed))

            mu, sigma, scale = adjustDamageCurves(bld_type, vmask, mu, sigma,
                                                  scale)

            d = damage(wind_speed, mu, sigma, scale)

            costs[i, :] = d * value
            dmg_flarea[i, :] = 100. * d * flarea / area_sqm

        totcost = np.sum(costs, axis=0)
        totdmg = totcost / totvalue
        np.putmask(totdmg, totvalue == 0, 0)

        # Store the damaged floor area equivalent in units of hectare/sq km.
        totdmgfl = np.sum(dmg_flarea, axis=0)

        del costs
        del dmg_flarea

        for i, record in enumerate(records):
            record.extend([totdmg[i], totcost[i], totdmgfl[i]])

        rp_damage[n, :] = totdmg
        rp_costs[n, :] = totcost
        rp_dmgfl[n, :] = totdmgfl

        #LOG.info("Size of records = {0} MB".format(
        #            total_size(records)/1024**2))

    LOG.info("Calculating annualised losses")
    # Calculate annual probability:
    annual_prob = probability(return_periods)
    for i, record in enumerate(records):
        annualised_loss = integrateLoss(annual_prob, rp_damage[:, i])
        annualised_costs = integrateLoss(annual_prob, rp_costs[:, i])
        annualised_dmgfl = integrateLoss(annual_prob, rp_dmgfl[:, i])
        record.extend([annualised_loss, annualised_costs, annualised_dmgfl])

    return fields, records
Exemplo n.º 7
0
def totalDamage(building_types, fields, records, building_costs):
    """
    Calculate the total damage for each feature, for each return
    period.  Total damage is the weighted sum of damage to each
    building type, weighted by the fraction of floor area for that
    building type.
    """

    LOG.info("Calculating total damage to building stock")
    LOG.debug("Calculating total value of building stock in each feature")

    lu4 = getField('L4_USE', fields, records, str)
    lu5 = getField('L5_USE', fields, records, str)
    vintage = getField('ERA_CONST', fields, records, str)
    area_sqm = getField('AREA_SQM', fields, records)

    values = np.empty((len(building_types.keys()), len(records)))

    for i, bld_type in enumerate(building_types.keys()):
        flarea = getField(bld_type, fields, records)
        values[i, :] = calculateValue(flarea, lu4, lu5, bld_type,
                                        building_costs)

    totvalue = np.sum(values, axis=0)
    fields.append(['bldg_value', "N", 19, 0])

    for i, record in enumerate(records):
        record.append(totvalue[i])

    LOG.debug("Appending required fields for loss metrics")
    
    fields.append(['loss', "N", 9, 6])
    fields.append(['cost', "N", 10, 0])
    fields.append(['dmgf', "N", 10, 6])

    # Create a mask to allow us to modify the vulnerability for
    # buildings of different ages. The key time is 1992. This applies
    # to the MWS, CWS, CHB and C1-L building types, where there were
    # changes to the construction materials used.
    vmask = np.array(vintage)=='Post-1992'

    LOG.info("Processing wind speed")
    wind_speed = getField('vmax', fields, records)

    costs = np.empty((len(building_types.keys()), len(records)))
    dmg_flarea = np.empty((len(building_types.keys()), len(records)))

    for i, bld_type in enumerate(building_types.keys()):
        # Calculate corresponding value of built assets:
        flarea = getField(bld_type, fields, records)
        value = calculateValue(flarea, lu4, lu5, bld_type, building_costs)

        # Calculate the damage (as a fraction of replacement cost):
        mu = building_types[bld_type]['mu']*np.ones(len(wind_speed))
        sigma = building_types[bld_type]['sigma']*np.ones(len(wind_speed))
        scale = building_types[bld_type]['scale']*np.ones(len(wind_speed))

        mu, sigma, scale = adjustDamageCurves(bld_type, vmask, mu, sigma, scale)
        dmg = damage(wind_speed, mu, sigma, scale)
        costs[i, :] = dmg*value
        dmg_flarea[i, :] = 100.*dmg*flarea/area_sqm

    totcost = np.sum(costs, axis=0)
    totdmg = totcost/totvalue
    np.putmask(totdmg, totvalue==0, 0)

    # Store the damaged floor area equivalent in units of hectare/sq km.
    totdmgfl = np.sum(dmg_flarea, axis=0)

    del costs
    del dmg_flarea

    for i, record in enumerate(records):
        record.extend([totdmg[i], totcost[i], totdmgfl[i]])

    return fields, records
Exemplo n.º 8
0
def aggregate(records, fields, featureid, features, return_periods):
    """
    'records' - all records contained in the shapefile
    'fields' - list of fields held in the shape file
    'zoneid' - name of the unique feature id field in the records
    'features' is a dict keyed by all the available feature id numbers (which
    should contain all those in 'featureid'). This is loaded from the complete
    list of feature id numbers, using load_zones_from_records()
    'return_periods' - array of return period values for which to calculate
        aggregated losses.
    """
    LOG.info("Calculating aggregated losses")

    # Extract from teh shape file the required values
    oid = getField(featureid, fields, records, dtype=int)
    value = getField('bldg_value', fields, records)
    flarea = getField('FLAREA_SUM', fields, records)
    polygon_area = getField('AREA_SQM', fields, records)
    aac = getField('ann_cost', fields, records)

    # Aggregate the values to region and subregion:
    r_value, sr_value = extractZoneValues(oid, features, value)
    r_aac, sr_aac = extractZoneValues(oid, features, aac)
    r_area, sr_area = extractZoneValues(oid, features, polygon_area)
    r_flarea, sr_flarea = extractZoneValues(oid, features, flarea)
    output = AvDict()
    sroutput = AvDict()

    for region in r_value.keys():
        region_value = np.sum(r_value[region])
        # Floor area of the region in hectares:
        region_flarea = np.sum(r_flarea[region]) / np.power(10., 4)
        # Total area of the region in square kilometres:
        region_area = np.sum(r_area[region]) / np.power(10., 6)

        output[region]['VALUE'] = region_value
        output[region]['flarea_sum'] = region_flarea
        output[region]['area_sum'] = region_area

        output[region]['ann_cost'] = np.sum(r_aac[region])
        output[region]['ann_loss'] = output[region]['ann_cost'] / region_value
        output[region]['ann_dmg'] = output[region]['ann_loss'] * \
                                            region_flarea/region_area

    LOG.debug("Calculating zonal totals for each return period")
    for ret_per in return_periods:
        dmg_key = 'dmg' + str(int(ret_per))
        cost_key = 'cost' + str(int(ret_per))
        flarea_key = 'flarea' + str(int(ret_per))

        cost = getField(cost_key, fields, records)
        r_cost, sr_cost = extractZoneValues(oid, features, cost)

        for region in r_value.keys():
            cost_sum = np.sum(r_cost[region])
            dmg_sum = cost_sum / np.sum(r_value[region])
            output[region][dmg_key] = dmg_sum
            output[region][flarea_key] = dmg_sum * \
                                         output[region]['flarea_sum'] / \
                                         output[region]['area_sum']

            output[region][cost_key] = cost_sum

    for region in sr_value.keys():
        region_value = np.sum(sr_value[region])
        # Floor area of the region in hectares:
        region_flarea = np.sum(sr_flarea[region]) / np.power(10., 4)
        # Total area of the region in square kilometres:
        region_area = np.sum(sr_area[region]) / np.power(10., 6)

        sroutput[region]['VALUE'] = region_value
        sroutput[region]['ann_cost'] = np.sum(sr_aac[region])
        sroutput[region]['ann_loss'] = sroutput[region]['ann_cost'] / \
                                        region_value
        sroutput[region]['ann_dmg'] = sroutput[region]['ann_loss'] * \
                                                region_flarea/region_area
        sroutput[region]['flarea_sum'] = region_flarea
        sroutput[region]['area_sum'] = region_area

    for ret_per in return_periods:
        dmg_key = 'dmg' + str(int(ret_per))
        flarea_key = 'flarea' + str(int(ret_per))
        cost_key = 'cost' + str(int(ret_per))
        cost = getField(cost_key, fields, records)
        r_cost, sr_cost = extractZoneValues(oid, features, cost)

        for region in sr_value.keys():
            cost_sum = np.sum(sr_cost[region])
            dmg_sum = cost_sum / np.sum(sr_value[region])
            sroutput[region][dmg_key] = dmg_sum
            sroutput[region][flarea_key] = dmg_sum * \
                                        sroutput[region]['flarea_sum'] / \
                                        sroutput[region]['area_sum']
            sroutput[region][cost_key] = cost_sum

    return output, sroutput
Exemplo n.º 9
0
def aggregate_events(records, fields, featureid, features):
    """
    'records' - all records contained in the shapefile
    'fields' - list of fields held in the shape file
    'zoneid' - name of the unique feature id field in the records
    'features' is a dict keyed by all the available feature id numbers (which
    should contain all those in 'featureid'). This is loaded from the complete
    list of feature id numbers, using load_zones_from_records()

    """
    LOG.info("Calculating aggregated losses")
    # Extract from teh shape file the required values
    oid = getField(featureid, fields, records, dtype=int)
    value = getField('bldg_value', fields, records)
    flarea = getField('FLAREA_SUM', fields, records)
    polygon_area = getField('AREA_SQM', fields, records)
    aac = getField('cost', fields, records)
    popn = getField('POP_EST', fields, records)
    popa = getField('POP_AFFECT', fields, records, dtype=float)

    # Aggregate the values to region and subregion:
    r_value, sr_value = extractZoneValues(oid, features, value)
    r_aac, sr_aac = extractZoneValues(oid, features, aac)
    r_area, sr_area = extractZoneValues(oid, features, polygon_area)
    r_flarea, sr_flarea = extractZoneValues(oid, features, flarea)
    r_popn, sr_popn = extractZoneValues(oid, features, popn)
    r_popa, sr_popa = extractZoneValues(oid, features, popa)

    output = AvDict()
    sroutput = AvDict()

    for region in r_value.keys():
        region_value = np.sum(r_value[region])
        # Floor area of the region in hectares:
        region_flarea = np.sum(r_flarea[region]) / np.power(10., 4)
        # Total area of the region in square kilometres:
        region_area = np.sum(r_area[region]) / np.power(10., 6)

        # Total population in the region:
        region_pop = np.sum(r_popn[region])

        output[region]['VALUE'] = region_value
        output[region]['flarea_sum'] = region_flarea
        output[region]['area_sum'] = region_area
        output[region]['POP_EST'] = region_pop

        output[region]['cost'] = np.sum(r_aac[region])
        output[region]['loss'] = output[region]['cost'] / region_value
        output[region]['dmg'] = output[region]['loss'] * \
                                            region_flarea/region_area
        output[region]['POP_AFFECT'] = np.nansum(r_popa[region])

    for region in sr_value.keys():
        sregion_value = np.sum(sr_value[region])
        # Floor area of the region in hectares:
        sregion_flarea = np.sum(sr_flarea[region]) / np.power(10., 4)
        # Total area of the region in square kilometres:
        sregion_area = np.sum(sr_area[region]) / np.power(10., 6)

        # Total population in the region:
        sregion_pop = np.sum(sr_popn[region])

        sroutput[region]['VALUE'] = sregion_value
        sroutput[region]['flarea_sum'] = sregion_flarea
        sroutput[region]['area_sum'] = sregion_area
        sroutput[region]['POP_EST'] = sregion_pop

        sroutput[region]['cost'] = np.sum(sr_aac[region])
        sroutput[region]['loss'] = sroutput[region]['cost'] / sregion_value
        sroutput[region]['dmg'] = sroutput[region]['loss'] * \
                                            sregion_flarea/sregion_area
        sroutput[region]['POP_AFFECT'] = np.nansum(sr_popa[region])

    return output, sroutput